diff --git a/.github/workflows/audit.yaml b/.github/workflows/audit.yaml index 223cb18155..fc4f66d19e 100644 --- a/.github/workflows/audit.yaml +++ b/.github/workflows/audit.yaml @@ -29,7 +29,7 @@ jobs: run: | yarn cd meteor - meteor yarn validate:prod-dependencies + meteor npm run validate:prod-dependencies env: CI: true @@ -57,7 +57,7 @@ jobs: run: | yarn cd meteor - meteor yarn run validate:all-dependencies + meteor npm run validate:all-dependencies env: CI: true diff --git a/.github/workflows/node.yaml b/.github/workflows/node.yaml index b47a4d456c..aa76ac1464 100644 --- a/.github/workflows/node.yaml +++ b/.github/workflows/node.yaml @@ -46,7 +46,7 @@ jobs: # setup zodern:types. No linters are setup, so this simply installs the packages meteor lint - meteor yarn ci:lint + meteor npm run ci:lint env: CI: true @@ -85,12 +85,12 @@ jobs: # setup zodern:types. No linters are setup, so this simply installs the packages meteor lint - NODE_OPTIONS="--max-old-space-size=6144" meteor yarn unitci --force-exit + NODE_OPTIONS="--max-old-space-size=6144" meteor npm run unitci --force-exit env: CI: true - name: Send coverage if: ((github.event_name == 'pull_request') && (!startsWith(github.head_ref, 'release'))) || ((github.event_name == 'push') && (!startsWith(github.ref_name, 'release'))) - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v5 env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} @@ -246,7 +246,9 @@ jobs: echo "image=$image" >> $GITHUB_OUTPUT - name: Trivy scanning if: steps.check-build-and-push.outputs.enable == 'true' && steps.check-ghcr.outputs.enable == 'true' && steps.ghcr-tag.outputs.tags != 0 - uses: aquasecurity/trivy-action@0.24.0 + uses: aquasecurity/trivy-action@0.29.0 + env: + TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db with: image-ref: "${{ steps.trivy-image.outputs.image }}" format: "table" @@ -395,7 +397,9 @@ jobs: echo "image=$image" >> $GITHUB_OUTPUT - name: Trivy scanning if: steps.check-build-and-push.outputs.enable == 'true' && steps.check-ghcr.outputs.enable == 'true' && steps.ghcr-tag.outputs.tags != 0 - uses: aquasecurity/trivy-action@0.24.0 + uses: aquasecurity/trivy-action@0.29.0 + env: + TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db with: image-ref: "${{ steps.trivy-image.outputs.image }}" format: "table" @@ -587,13 +591,17 @@ jobs: yarn env: CI: true - - name: Run generator + - name: Build OpenAPI client library + run: | + cd packages/openapi + yarn build + env: + CI: true + - name: Generate OpenAPI docs and server run: | cd packages/openapi - yarn gendocs yarn genserver - yarn genclient:ts env: CI: true @@ -682,7 +690,7 @@ jobs: else # make dependencies of `determine-npm-tag` available yarn install --mode=skip-build - + cd packages PACKAGE_NAME="@sofie-automation/shared-lib" PUBLISHED_VERSION=$(yarn npm info --json $PACKAGE_NAME | jq -c '.version' -r) @@ -700,19 +708,11 @@ jobs: yarn build env: CI: true - - name: Generate OpenAPI client library - if: ${{ steps.do-publish.outputs.tag }} - uses: hatamiarash7/openapi-generator@v0.3.0 - with: - generator: typescript-fetch - openapi-file: ./packages/openapi/api/actions.yaml - output-dir: ./packages/openapi/client/ts - command-args: -p supportsES6=true - name: Build OpenAPI client library if: ${{ steps.do-publish.outputs.tag }} run: | cd packages/openapi - yarn build:main + yarn build env: CI: true - name: Modify dependencies to use npm packages diff --git a/.github/workflows/prerelease-libs.yml b/.github/workflows/prerelease-libs.yml index b87ee4492c..e9750028c4 100644 --- a/.github/workflows/prerelease-libs.yml +++ b/.github/workflows/prerelease-libs.yml @@ -131,19 +131,12 @@ jobs: yarn build env: CI: true - - name: Generate OpenAPI client library - if: ${{ steps.do-publish.outputs.tag }} - uses: hatamiarash7/openapi-generator@v0.3.0 - with: - generator: typescript-fetch - openapi-file: ./packages/openapi/api/actions.yaml - output-dir: ./packages/openapi/client/ts - command-args: -p supportsES6=true + - name: Build OpenAPI client library - if: ${{ steps.do-publish.outputs.tag }} + if: ${{ steps.do-publish.outputs.publish }} run: | cd packages/openapi - yarn build:main + yarn build env: CI: true - name: Modify dependencies to use npm packages diff --git a/.github/workflows/prune-container-images.yml b/.github/workflows/prune-container-images.yml index b7c5625124..11f35daef9 100644 --- a/.github/workflows/prune-container-images.yml +++ b/.github/workflows/prune-container-images.yml @@ -7,6 +7,8 @@ on: jobs: prune-container-images: + if: ${{ github.repository_owner == 'nrkno' }} + uses: nrkno/sofie-github-workflows/.github/workflows/prune-container-images.yml@main strategy: max-parallel: 1 diff --git a/.github/workflows/prune-tags.yml b/.github/workflows/prune-tags.yml index e9d9a5bbbc..2bd28d2b81 100644 --- a/.github/workflows/prune-tags.yml +++ b/.github/workflows/prune-tags.yml @@ -16,6 +16,8 @@ on: jobs: prune-tags: + if: ${{ github.repository_owner == 'nrkno' }} + name: Prune tags runs-on: ubuntu-latest timeout-minutes: 15 diff --git a/.github/workflows/trivy.yml b/.github/workflows/trivy.yml index 4c89694188..885d015d92 100644 --- a/.github/workflows/trivy.yml +++ b/.github/workflows/trivy.yml @@ -2,10 +2,12 @@ name: Scheduled Trivy Scan on: workflow_dispatch: schedule: - - cron: '0 10 * * 1' + - cron: "0 10 * * 1" jobs: trivy: + if: ${{ github.repository_owner == 'nrkno' }} + name: Trivy scan runs-on: ubuntu-latest strategy: @@ -15,17 +17,21 @@ jobs: steps: - name: Run Trivy vulnerability scanner (json) - uses: aquasecurity/trivy-action@0.24.0 + uses: aquasecurity/trivy-action@0.29.0 + env: + TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db with: image-ref: ghcr.io/nrkno/sofie-core-${{ matrix.image }}:latest format: json - output: '${{ matrix.image }}-trivy-scan-results.json' + output: "${{ matrix.image }}-trivy-scan-results.json" - name: Run Trivy vulnerability scanner (table) - uses: aquasecurity/trivy-action@0.24.0 + uses: aquasecurity/trivy-action@0.29.0 + env: + TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db with: image-ref: ghcr.io/nrkno/sofie-core-${{ matrix.image }}:latest - output: '${{ matrix.image }}-trivy-scan-results.txt' + output: "${{ matrix.image }}-trivy-scan-results.txt" - name: Post all scan results to Github Summary as a table env: @@ -38,10 +44,12 @@ jobs: echo $CODE_BLOCK >> $GITHUB_STEP_SUMMARY - name: Run Trivy in GitHub SBOM mode and submit results to Dependency Graph - uses: aquasecurity/trivy-action@0.24.0 + uses: aquasecurity/trivy-action@0.29.0 + env: + TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db with: - format: 'github' - output: 'dependency-results-${{ matrix.image }}.sbom.json' + format: "github" + output: "dependency-results-${{ matrix.image }}.sbom.json" image-ref: ghcr.io/nrkno/sofie-core-${{ matrix.image }}:latest github-pat: ${{ secrets.GITHUB_TOKEN }} @@ -56,46 +64,31 @@ jobs: echo ${{ env.SUMMARY }} - name: Send Slack Notification - uses: slackapi/slack-github-action@v1.27.0 + uses: slackapi/slack-github-action@v2.0.0 with: + webhook: ${{ secrets.SLACK_WEBHOOK_URL }} + webhook-type: incoming-webhook payload: | - { - "text": "Trivy scan results", - "blocks": [ - { - "type": "header", - "text": { - "type": "plain_text", - "text": "Trivy scan results for sofie-core-${{ matrix.image }}:latest" - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": ":thisisfine: ${{ env.SUMMARY }}" - } - }, - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "Read the full scan results on Github" - }, - "accessory": { - "type": "button", - "text": { - "type": "plain_text", - "text": ":github: Scan results", - "emoji": true - }, - "value": "workflow_run", - "url": "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}", - "action_id": "button-action" - } - } - ] - } - env: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} - SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK + text: "Trivy scan results" + blocks: + - type: "header" + text: + type: "plain_text" + text: "Trivy scan results for sofie-core-${{ matrix.image }}:latest" + - type: "section" + text: + type: "mrkdwn" + text: ":thisisfine: ${{ env.SUMMARY }}" + - type: "section" + text: + type: "mrkdwn" + text: "Read the full scan results on Github" + accessory: + type: "button" + text: + type: "plain_text" + text: ":github: Scan results" + emoji: true + value: "workflow_run" + url: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" + action_id: "button-action" diff --git a/.vscode/settings.json.default b/.vscode/settings.json.default index 29742098b9..bbed2d7f28 100644 --- a/.vscode/settings.json.default +++ b/.vscode/settings.json.default @@ -9,7 +9,8 @@ "packages/shared-lib", "packages/job-worker", "packages/openapi", - "packages/live-status-gateway" + "packages/live-status-gateway", + "packages/webui" ], "prettier.enable": true, "sonarlint.connectedMode.project": { "projectKey": "nrkno_sofie-core" }, diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index beced98156..b609a43658 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -9,4 +9,5 @@ This repository uses the following branches: * **_master_** is our main branch. We consider it stable and it is used in production. * The **_releaseXX_** branches are our in-development branches. When a release is ready, we decide to “freeze” that branch and create a new **_releaseXX+1_** branch. -We encourage you to base your contributions on the latest **_releaseXX_** branch, alternatively the **_master_** branch or a recently frozen **_releaseXX_** branch. The [_Sofie Releases_](https://nrkno.github.io/sofie-core/releases) page collects the status and timeline of the releases. +We require contributions to be based based on the latest **_release\*_** branch. +The [_Sofie Releases_](https://nrkno.github.io/sofie-core/releases) page collects the status and timeline of the releases. diff --git a/DEVELOPER.md b/DEVELOPER.md index 43b1749c7f..df3d084cb0 100644 --- a/DEVELOPER.md +++ b/DEVELOPER.md @@ -31,6 +31,7 @@ Follow these instructions to start up Sofie Core in development mode. (For produ ```bash git clone -b master https://github.com/nrkno/sofie-core.git cd sofie-core +yarn yarn start ``` @@ -138,12 +139,12 @@ Then submit this as a PR. ### ConfigManifests -The ConfigManifests for Blueprints and Gateways was replaced with JSONSchema in R50. +The ConfigManifests for Blueprints and Gateways was replaced with JSONSchema in R50. However, one usage by AdlibActions for their userDataManifest remains as this is not something we are actively using. ## Blueprint Migrations -In R49, a replacement flow was added consisting of `validateConfig` and `applyConfig`. +In R49, a replacement flow was added consisting of `validateConfig` and `applyConfig`. It is no longer recommended to use the old migrations flow for showstyle and studio blueprints. ### ExpectedMediaItems diff --git a/meteor/.meteor/packages b/meteor/.meteor/packages index 32ca94185a..4e5355b070 100644 --- a/meteor/.meteor/packages +++ b/meteor/.meteor/packages @@ -9,28 +9,18 @@ # but you can also edit it by hand. meteor-base@1.5.1 # Packages every Meteor app needs to have -mobile-experience@1.1.0 # Packages for a great mobile UX -mongo@1.16.7 # The database Meteor supports right now +mongo@1.16.10 # The database Meteor supports right now reactive-var@1.0.12 # Reactive variable for tracker -standard-minifier-css@1.9.2 # CSS minifier run for production mode -standard-minifier-js@2.8.1 # JS minifier run for production mode -es5-shim@4.8.0 # ECMAScript 5 compatibility for older browsers -ecmascript@0.16.7 # Enable ECMAScript2015+ syntax in app code -typescript@4.9.4 # Enable TypeScript syntax in .ts and .tsx modules +ecmascript@0.16.8 # Enable ECMAScript2015+ syntax in app code +typescript@4.9.5 # Enable TypeScript syntax in .ts and .tsx modules shell-server@0.5.0 # Server-side component of the `meteor shell` command -modern-browsers@0.1.9 # Select when to allow use of the "modern" bundle -static-html@1.3.2 # Define static page content in .html files -react-meteor-data # React higher-order component for reactively tracking Meteor data - -tracker@1.3.2 # Meteor's client-side reactive programming library -session@1.2.1 -fourseven:scss +tracker@1.3.3 # Meteor's client-side reactive programming library dynamic-import@0.7.3 ostrio:meteor-root -accounts-password@2.3.4 +accounts-password@2.4.0 julusian:meteor-elastic-apm@2.5.2 zodern:types diff --git a/meteor/.meteor/release b/meteor/.meteor/release index 6641d0478a..5152abe9d5 100644 --- a/meteor/.meteor/release +++ b/meteor/.meteor/release @@ -1 +1 @@ -METEOR@2.13.3 +METEOR@2.16 diff --git a/meteor/.meteor/versions b/meteor/.meteor/versions index bd092097f2..23b868e06f 100644 --- a/meteor/.meteor/versions +++ b/meteor/.meteor/versions @@ -1,84 +1,65 @@ -accounts-base@2.2.8 -accounts-password@2.3.4 +accounts-base@2.2.11 +accounts-password@2.4.0 allow-deny@1.1.1 autoupdate@1.8.0 -babel-compiler@7.10.4 +babel-compiler@7.10.5 babel-runtime@1.5.1 base64@1.0.12 binary-heap@1.0.11 -blaze-tools@1.1.3 -boilerplate-generator@1.7.1 -caching-compiler@1.2.2 -caching-html-compiler@1.2.1 +boilerplate-generator@1.7.2 callback-hook@1.5.1 -check@1.3.2 +check@1.4.1 ddp@1.4.1 -ddp-client@2.6.1 -ddp-common@1.4.0 -ddp-rate-limiter@1.2.0 -ddp-server@2.6.2 +ddp-client@2.6.2 +ddp-common@1.4.1 +ddp-rate-limiter@1.2.1 +ddp-server@2.7.1 diff-sequence@1.1.2 dynamic-import@0.7.3 -ecmascript@0.16.7 +ecmascript@0.16.8 ecmascript-runtime@0.8.1 ecmascript-runtime-client@0.12.1 ecmascript-runtime-server@0.11.0 ejson@1.1.3 -email@2.2.5 +email@2.2.6 es5-shim@4.8.0 -fetch@0.1.3 -fourseven:scss@4.15.0 +fetch@0.1.4 geojson-utils@1.0.11 hot-code-push@1.0.4 -html-tools@1.1.3 -htmljs@1.1.1 id-map@1.1.1 inter-process-messaging@0.1.1 julusian:meteor-elastic-apm@2.5.2 kschingiz:meteor-measured@1.0.3 -launch-screen@1.3.0 localstorage@1.2.0 -logging@1.3.2 -meteor@1.11.3 +logging@1.3.4 +meteor@1.11.5 meteor-base@1.5.1 -minifier-css@1.6.4 -minifier-js@2.7.5 -minimongo@1.9.3 -mobile-experience@1.1.0 -mobile-status-bar@1.1.0 -modern-browsers@0.1.9 -modules@0.19.0 +minimongo@1.9.4 +modern-browsers@0.1.10 +modules@0.20.0 modules-runtime@0.13.1 -mongo@1.16.7 +mongo@1.16.10 mongo-decimal@0.1.3 mongo-dev-server@1.1.0 mongo-id@1.0.8 -npm-mongo@4.16.0 +npm-mongo@4.17.2 ordered-dict@1.1.0 ostrio:meteor-root@1.1.1 promise@0.12.2 random@1.2.1 rate-limit@1.1.1 -react-fast-refresh@0.2.7 -react-meteor-data@2.5.1 -reactive-dict@1.3.1 +react-fast-refresh@0.2.8 reactive-var@1.0.12 reload@1.3.1 retry@1.1.0 routepolicy@1.1.1 -session@1.2.1 sha@1.0.9 shell-server@0.5.0 -socket-stream-client@0.5.1 -spacebars-compiler@1.3.1 -standard-minifier-css@1.9.2 -standard-minifier-js@2.8.1 -static-html@1.3.2 -templating-tools@1.2.2 -tracker@1.3.2 -typescript@4.9.4 -underscore@1.0.13 +socket-stream-client@0.5.2 +tracker@1.3.3 +typescript@4.9.5 +underscore@1.6.1 url@1.3.2 -webapp@1.13.5 +webapp@1.13.8 webapp-hashing@1.1.1 zodern:types@1.0.9 diff --git a/meteor/CHANGELOG.md b/meteor/CHANGELOG.md index df0291d5b7..be59164e1b 100644 --- a/meteor/CHANGELOG.md +++ b/meteor/CHANGELOG.md @@ -2,6 +2,27 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [1.51.2](https://github.com/nrkno/tv-automation-server-core/compare/v1.51.1...v1.51.2) (2024-11-21) + + +### Bug Fixes + +* Include previousPartInstance in check to orphan segments rather than remove them. ([2c113b5](https://github.com/nrkno/tv-automation-server-core/commit/2c113b58b205198d13f0fc7e2114704311eb915b)) +* updatePartInstancesSegmentIds: take into account when multiple segments have been merged into one. ([bdab8c4](https://github.com/nrkno/tv-automation-server-core/commit/bdab8c4e4ee1e67a3568cccc98106bb7f1258673)) + +## [1.51.0-in-testing.3](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.2...v1.51.0-in-testing.3) (2024-09-25) + +## [1.51.0-in-testing.2](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.1...v1.51.0-in-testing.2) (2024-09-24) + + +### Bug Fixes + +* allow replacement in replaceInfinitesFromPreviousPlayhead ([ebb154d](https://github.com/nrkno/sofie-core/commit/ebb154d6b59369588da400d8d921a00e41b84dc8)) +* **BucketPanel:** Bucket AdLibs don't trigger when created before Rundown activation (SOFIE-3478) ([a16d977](https://github.com/nrkno/sofie-core/commit/a16d9777a301a6d7d69ea00be02b70c53cb9bdcc)) +* **LinePartTimeline:** make rules for findMainPiece consistent, make infinite graphics Pieces display correctly ([153d100](https://github.com/nrkno/sofie-core/commit/153d100fb659546201a654af5c566b513951df88)) +* **NoraFloatingInspector:** prevent Segment crash when trying to show a Piece with an invalid Nora `previewPayload` ([4a3a2e7](https://github.com/nrkno/sofie-core/commit/4a3a2e779c144b1c9e88c187cce2e5c80d34626d)) +* resolve an issue with prompter moving when Parts become PartInstances and the prompter position is juuuust right ([a670a73](https://github.com/nrkno/sofie-core/commit/a670a73fa6bfb8331921a2bedd9c927952cfffcf)) + ## [1.51.0-in-testing.0](https://github.com/nrkno/sofie-core/compare/v1.50.4...v1.51.0-in-testing.0) (2024-08-19) diff --git a/meteor/__mocks__/defaultCollectionObjects.ts b/meteor/__mocks__/defaultCollectionObjects.ts index 163c569f38..2831f96b14 100644 --- a/meteor/__mocks__/defaultCollectionObjects.ts +++ b/meteor/__mocks__/defaultCollectionObjects.ts @@ -110,11 +110,14 @@ export function defaultStudio(_id: StudioId): DBStudio { mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, fallbackPartDuration: DEFAULT_FALLBACK_PART_DURATION, + allowHold: false, + allowPieceDirectPlay: false, + enableBuckets: false, }, _rundownVersionHash: '', - routeSets: {}, - routeSetExclusivityGroups: {}, - packageContainers: {}, + routeSetsWithOverrides: wrapDefaultObject({}), + routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), + packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { @@ -134,7 +137,6 @@ export function defaultSegment(_id: SegmentId, rundownId: RundownId): DBSegment externalId: unprotectString(_id), rundownId: rundownId, name: 'Default Segment', - externalModified: 1, } } diff --git a/meteor/__mocks__/helpers/database.ts b/meteor/__mocks__/helpers/database.ts index e881d8a1f5..6abd5a60bf 100644 --- a/meteor/__mocks__/helpers/database.ts +++ b/meteor/__mocks__/helpers/database.ts @@ -14,13 +14,11 @@ import { SourceLayerType, StudioBlueprintManifest, BlueprintManifestType, - IngestRundown, BlueprintManifestBase, ShowStyleBlueprintManifest, IShowStyleContext, BlueprintResultRundown, BlueprintResultSegment, - IngestSegment, IBlueprintAdLibPiece, IBlueprintRundown, IBlueprintSegment, @@ -32,6 +30,8 @@ import { StatusCode, IBlueprintPieceType, IBlueprintActionManifest, + SofieIngestSegment, + SofieIngestRundown, } from '@sofie-automation/blueprints-integration' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { DBShowStyleVariant } from '@sofie-automation/corelib/dist/dataModel/ShowStyleVariant' @@ -429,7 +429,10 @@ export async function setupMockShowStyleBlueprint( getShowStyleVariantId: (): string | null => { return SHOW_STYLE_VARIANT_ID }, - getRundown: (_context: IShowStyleContext, ingestRundown: IngestRundown): BlueprintResultRundown => { + getRundown: ( + _context: IShowStyleContext, + ingestRundown: SofieIngestRundown + ): BlueprintResultRundown => { const rundown: IBlueprintRundown = { externalId: ingestRundown.externalId, name: ingestRundown.name, @@ -452,7 +455,10 @@ export async function setupMockShowStyleBlueprint( baseline: { timelineObjects: [] }, } }, - getSegment: (_context: unknown, ingestSegment: IngestSegment): BlueprintResultSegment => { + getSegment: ( + _context: unknown, + ingestSegment: SofieIngestSegment + ): BlueprintResultSegment => { const segment: IBlueprintSegment = { name: ingestSegment.name ? ingestSegment.name : ingestSegment.externalId, privateData: ingestSegment.payload, @@ -654,7 +660,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_0', rundownId: rundown._id, name: 'Segment 0', - externalModified: 1, } await Segments.mutableCollection.insertAsync(segment0) /* tslint:disable:ter-indent*/ @@ -764,7 +769,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_2', rundownId: rundown._id, name: 'Segment 1', - externalModified: 1, } await Segments.mutableCollection.insertAsync(segment1) @@ -807,7 +811,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_2', rundownId: rundown._id, name: 'Segment 2', - externalModified: 1, } await Segments.mutableCollection.insertAsync(segment2) diff --git a/meteor/package.json b/meteor/package.json index bc2115b774..f23ba75397 100644 --- a/meteor/package.json +++ b/meteor/package.json @@ -10,18 +10,18 @@ "debug": "meteor run", "libs:syncVersions": "node scripts/libs-sync-version.js", "libs:syncVersionsAndChangelogs": "node scripts/libs-sync-version-and-changelog.js", - "postinstall": "meteor yarn run prepareForTest", + "postinstall": "meteor npm run prepareForTest", "prepareForTest": "node ../scripts/fixTestFibers.js", "inject-git-hash": "node ./scripts/generate-version-file.js", "unit": "jest", "unitci": "jest --maxWorkers 2 --coverage", "unitcov": "jest --coverage", - "test": "meteor yarn check-types && meteor yarn unit", + "test": "meteor npm run check-types && meteor npm run unit", "watch": "jest --watch", "update-snapshots": "jest --updateSnapshot", "ci:lint": "meteor yarn check-types && meteor yarn lint", "cov-open": "open-cli coverage/lcov-report/index.html", - "cov": "meteor yarn unitcov && meteor yarn cov-open", + "cov": "meteor npm run unitcov && meteor npm run cov-open", "license-validate": "node ../scripts/checkLicenses.js --allowed=\"MIT,BSD,ISC,Apache,Unlicense,CC0,LGPL,CC BY 3.0,CC BY 4.0,MPL 2.0,Python 2.0\" --excludePackages=timecode,rxjs/ajax,rxjs/fetch,rxjs/internal-compatibility,nw-pre-gyp-module-test,rxjs/operators,rxjs/testing,rxjs/webSocket,undefined,i18next-conv,@fortawesome/fontawesome-common-types,argv,indexof,custom-license,private,public-domain-module,@sofie-automation/corelib,@sofie-automation/shared-lib,@sofie-automation/job-worker", "lint": "run lint:raw .", "lint:raw": "eslint --ext .ts --ext .js --ext .tsx --ext .jsx", @@ -67,7 +67,7 @@ "object-path": "^0.11.8", "p-lazy": "^3.1.0", "semver": "^7.5.4", - "superfly-timeline": "9.0.1", + "superfly-timeline": "9.0.2", "threadedclass": "^1.2.2", "timecode": "0.0.4", "type-fest": "^3.13.1", diff --git a/meteor/server/Connections.ts b/meteor/server/Connections.ts index d97d44d5fa..a07bd33d3e 100644 --- a/meteor/server/Connections.ts +++ b/meteor/server/Connections.ts @@ -1,4 +1,4 @@ -import { deferAsync, getCurrentTime } from './lib/lib' +import { deferAsync, getCurrentTime, MeteorStartupAsync } from './lib/lib' import { Meteor } from 'meteor/meteor' import { logger } from './logging' import { sendTrace } from './api/integration/influx' @@ -83,20 +83,19 @@ function traceConnections() { }, 1000) } -Meteor.startup(() => { +MeteorStartupAsync(async () => { // Reset the connection status of the devices - deferAsync(async () => { - await PeripheralDevices.updateAsync( - { - connected: true, - lastSeen: { $lt: getCurrentTime() - 60 * 1000 }, - }, - { - $set: { - connected: false, - }, + + await PeripheralDevices.updateAsync( + { + connected: true, + lastSeen: { $lt: getCurrentTime() - 60 * 1000 }, + }, + { + $set: { + connected: false, }, - { multi: true } - ) - }) + }, + { multi: true } + ) }) diff --git a/meteor/server/__tests__/_testEnvironment.test.ts b/meteor/server/__tests__/_testEnvironment.test.ts index 7ff4542025..549cba2c9b 100644 --- a/meteor/server/__tests__/_testEnvironment.test.ts +++ b/meteor/server/__tests__/_testEnvironment.test.ts @@ -11,7 +11,7 @@ import { Evaluations, ExpectedMediaItems, ExternalMessageQueue, - IngestDataCache, + NrcsIngestDataCache, MediaObjects, MediaWorkFlows, MediaWorkFlowSteps, @@ -74,7 +74,7 @@ describe('Basic test of test environment', () => { // @ts-ignore expect(ExternalMessageQueue._isMock).toBeTruthy() // @ts-ignore - expect(IngestDataCache._isMock).toBeTruthy() + expect(NrcsIngestDataCache._isMock).toBeTruthy() // @ts-ignore expect(MediaObjects._isMock).toBeTruthy() // @ts-ignore @@ -153,7 +153,7 @@ describe('Basic test of test environment', () => { const studios = await Studios.findFetchAsync({}) expect(studios).toHaveLength(1) - const observer = Studios.observeChanges({ _id: protectString('abc') }, {}) + const observer = await Studios.observeChanges({ _id: protectString('abc') }, {}) expect(observer).toBeTruthy() await Studios.insertAsync({ diff --git a/meteor/server/__tests__/cronjobs.test.ts b/meteor/server/__tests__/cronjobs.test.ts index a083184179..2c189d38af 100644 --- a/meteor/server/__tests__/cronjobs.test.ts +++ b/meteor/server/__tests__/cronjobs.test.ts @@ -19,7 +19,8 @@ import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceIns import { Meteor } from 'meteor/meteor' import { EmptyPieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { - IngestDataCacheObjId, + NrcsIngestDataCacheObjId, + SofieIngestDataCacheObjId, PartId, PeripheralDeviceId, RundownId, @@ -42,7 +43,7 @@ import '../cronjobs' import '../api/peripheralDevice' import { CoreSystem, - IngestDataCache, + NrcsIngestDataCache, PartInstances, Parts, PeripheralDeviceCommands, @@ -51,8 +52,9 @@ import { Snapshots, UserActionsLog, Segments, + SofieIngestDataCache, } from '../collections' -import { IngestCacheType } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' import { JSONBlobStringify } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' import { DefaultEnvironment, @@ -61,6 +63,7 @@ import { } from '../../__mocks__/helpers/database' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { Settings } from '../Settings' +import { SofieIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' describe('cronjobs', () => { let env: DefaultEnvironment @@ -170,45 +173,95 @@ describe('cronjobs', () => { await PeripheralDevices.removeAsync({}) }) - testInFiber('Remove IngestDataCache objects that are not connected to any Rundown', async () => { - // Set up a mock rundown, a detached IngestDataCache object and an object attached to the mock rundown - // Detached IngestDataCache object 0 - const dataCache0Id = protectString(getRandomString()) - await IngestDataCache.mutableCollection.insertAsync({ + testInFiber('Remove NrcsIngestDataCache objects that are not connected to any Rundown', async () => { + // Set up a mock rundown, a detached NrcsIngestDataCache object and an object attached to the mock rundown + // Detached NrcsIngestDataCache object 0 + const dataCache0Id = protectString(getRandomString()) + await NrcsIngestDataCache.mutableCollection.insertAsync({ _id: dataCache0Id, data: { externalId: '', name: '', segments: [], type: '', + rundownSource: {} as any, + payload: undefined, }, modified: new Date(2000, 0, 1, 0, 0, 0).getTime(), // this one is attached to rundown0 rundownId: getRandomId(), - type: IngestCacheType.RUNDOWN, + type: NrcsIngestCacheType.RUNDOWN, }) - // Attached IngestDataCache object 1 - const dataCache1Id = protectString(getRandomString()) - await IngestDataCache.mutableCollection.insertAsync({ + // Attached NrcsIngestDataCache object 1 + const dataCache1Id = protectString(getRandomString()) + await NrcsIngestDataCache.mutableCollection.insertAsync({ _id: dataCache1Id, data: { externalId: '', name: '', segments: [], type: '', + rundownSource: {} as any, + payload: undefined, }, modified: new Date(2000, 0, 1, 0, 0, 0).getTime(), // just some random ID rundownId: rundownId, - type: IngestCacheType.RUNDOWN, + type: NrcsIngestCacheType.RUNDOWN, }) await runCronjobs() - expect(await IngestDataCache.findOneAsync(dataCache1Id)).toMatchObject({ + expect(await NrcsIngestDataCache.findOneAsync(dataCache1Id)).toMatchObject({ _id: dataCache1Id, }) - expect(await IngestDataCache.findOneAsync(dataCache0Id)).toBeUndefined() + expect(await NrcsIngestDataCache.findOneAsync(dataCache0Id)).toBeUndefined() + }) + testInFiber('Remove SofieIngestDataCache objects that are not connected to any Rundown', async () => { + // Set up a mock rundown, a detached SofieIngestDataCache object and an object attached to the mock rundown + // Detached SofieIngestDataCache object 0 + const dataCache0Id = protectString(getRandomString()) + await SofieIngestDataCache.mutableCollection.insertAsync({ + _id: dataCache0Id, + data: { + externalId: '', + name: '', + segments: [], + type: '', + rundownSource: {} as any, + userEditStates: {}, + payload: undefined, + }, + modified: new Date(2000, 0, 1, 0, 0, 0).getTime(), + // this one is attached to rundown0 + rundownId: getRandomId(), + type: SofieIngestCacheType.RUNDOWN, + }) + // Attached SofieIngestDataCache object 1 + const dataCache1Id = protectString(getRandomString()) + await SofieIngestDataCache.mutableCollection.insertAsync({ + _id: dataCache1Id, + data: { + externalId: '', + name: '', + segments: [], + type: '', + rundownSource: {} as any, + userEditStates: {}, + payload: undefined, + }, + modified: new Date(2000, 0, 1, 0, 0, 0).getTime(), + // just some random ID + rundownId: rundownId, + type: SofieIngestCacheType.RUNDOWN, + }) + + await runCronjobs() + + expect(await SofieIngestDataCache.findOneAsync(dataCache1Id)).toMatchObject({ + _id: dataCache1Id, + }) + expect(await SofieIngestDataCache.findOneAsync(dataCache0Id)).toBeUndefined() }) testInFiber('Removes old PartInstances and PieceInstances', async () => { // nightlyCronjobInner() @@ -217,7 +270,6 @@ describe('cronjobs', () => { _id: getRandomId(), _rank: 0, externalId: '', - externalModified: 0, rundownId, name: 'mock segment', } diff --git a/meteor/server/api/ExternalMessageQueue.ts b/meteor/server/api/ExternalMessageQueue.ts index 8f1fe9c284..51e0800985 100644 --- a/meteor/server/api/ExternalMessageQueue.ts +++ b/meteor/server/api/ExternalMessageQueue.ts @@ -1,7 +1,7 @@ import { Meteor } from 'meteor/meteor' import { check } from '../lib/check' import { StatusCode } from '@sofie-automation/blueprints-integration' -import { deferAsync, getCurrentTime } from '../lib/lib' +import { deferAsync, getCurrentTime, MeteorStartupAsync } from '../lib/lib' import { registerClassToMeteorMethods } from '../methods' import { NewExternalMessageQueueAPI, @@ -50,18 +50,19 @@ function updateExternalMessageQueueStatus(): void { } } -ExternalMessageQueue.observeChanges( - { - sent: { $not: { $gt: 0 } }, - tryCount: { $gt: 3 }, - }, - { - added: updateExternalMessageQueueStatus, - changed: updateExternalMessageQueueStatus, - removed: updateExternalMessageQueueStatus, - } -) -Meteor.startup(() => { +MeteorStartupAsync(async () => { + await ExternalMessageQueue.observeChanges( + { + sent: { $not: { $gt: 0 } }, + tryCount: { $gt: 3 }, + }, + { + added: updateExternalMessageQueueStatus, + changed: updateExternalMessageQueueStatus, + removed: updateExternalMessageQueueStatus, + } + ) + updateExternalMessageQueueStatus() // triggerdoMessageQueue(5000) }) diff --git a/meteor/server/api/__tests__/cleanup.test.ts b/meteor/server/api/__tests__/cleanup.test.ts index 75cc3e5f35..6aecf74097 100644 --- a/meteor/server/api/__tests__/cleanup.test.ts +++ b/meteor/server/api/__tests__/cleanup.test.ts @@ -32,7 +32,7 @@ import { ExpectedPackageWorkStatuses, ExpectedPlayoutItems, ExternalMessageQueue, - IngestDataCache, + NrcsIngestDataCache, PackageContainerPackageStatuses, PackageInfos, PeripheralDeviceCommands, @@ -45,6 +45,7 @@ import { TranslationsBundles, PackageContainerStatuses, TimelineDatastore, + SofieIngestDataCache, } from '../../collections' import { Collections } from '../../collections/lib' import { generateTranslationBundleOriginId } from '../translationsBundles' @@ -300,7 +301,14 @@ async function setDefaultDatatoDB(env: DefaultEnvironment, now: number) { tryCount: 0, type: '' as any, }) - await IngestDataCache.mutableCollection.insertAsync({ + await NrcsIngestDataCache.mutableCollection.insertAsync({ + _id: getRandomId(), + data: {} as any, + modified: 0, + rundownId, + type: '' as any, + }) + await SofieIngestDataCache.mutableCollection.insertAsync({ _id: getRandomId(), data: {} as any, modified: 0, diff --git a/meteor/server/api/__tests__/peripheralDevice.test.ts b/meteor/server/api/__tests__/peripheralDevice.test.ts index 3048dd3d17..7299084e52 100644 --- a/meteor/server/api/__tests__/peripheralDevice.test.ts +++ b/meteor/server/api/__tests__/peripheralDevice.test.ts @@ -120,7 +120,6 @@ describe('test peripheralDevice general API methods', () => { _rank: 0, rundownId: rundownID, name: 'Fire', - externalModified: 1, }) await Parts.mutableCollection.insertAsync({ _id: protectString('part000'), @@ -164,7 +163,6 @@ describe('test peripheralDevice general API methods', () => { externalId: 'segment01', rundownId: rundownID, name: 'Water', - externalModified: 1, }) await Segments.mutableCollection.insertAsync({ _id: protectString('segment2'), @@ -172,7 +170,6 @@ describe('test peripheralDevice general API methods', () => { externalId: 'segment02', rundownId: rundownID, name: 'Earth', - externalModified: 1, }) }) beforeEach(async () => { diff --git a/meteor/server/api/cleanup.ts b/meteor/server/api/cleanup.ts index f763388407..a15f3b49f1 100644 --- a/meteor/server/api/cleanup.ts +++ b/meteor/server/api/cleanup.ts @@ -37,7 +37,7 @@ import { ExpectedPackageWorkStatuses, ExpectedPlayoutItems, ExternalMessageQueue, - IngestDataCache, + NrcsIngestDataCache, MediaObjects, MediaWorkFlows, MediaWorkFlowSteps, @@ -69,6 +69,7 @@ import { UserActionsLog, Workers, WorkerThreadStatuses, + SofieIngestDataCache, } from '../collections' import { AsyncOnlyMongoCollection, AsyncOnlyReadOnlyMongoCollection } from '../collections/collection' import { getCollectionKey } from '../collections/lib' @@ -276,7 +277,8 @@ export async function cleanupOldDataInner(actuallyCleanup = false): Promise removedParts.add(id)) await ownedByRundownId(RundownBaselineAdLibActions) await ownedByRundownId(RundownBaselineAdLibPieces) diff --git a/meteor/server/api/client.ts b/meteor/server/api/client.ts index 9aeb3a7156..38839f5502 100644 --- a/meteor/server/api/client.ts +++ b/meteor/server/api/client.ts @@ -422,6 +422,8 @@ export namespace ServerClientAPI { class ServerClientAPIClass extends MethodContextAPI implements NewClientAPI { async clientLogger(type: string, ...args: string[]): Promise { + triggerWriteAccessBecauseNoCheckNecessary() + const loggerFunction: LeveledLogMethodFixed = (logger as any)[type] || logger.log loggerFunction(args.join(', ')) diff --git a/meteor/server/api/deviceTriggers/RundownContentObserver.ts b/meteor/server/api/deviceTriggers/RundownContentObserver.ts index 3758160320..04e73f9987 100644 --- a/meteor/server/api/deviceTriggers/RundownContentObserver.ts +++ b/meteor/server/api/deviceTriggers/RundownContentObserver.ts @@ -23,6 +23,7 @@ import { rundownPlaylistFieldSpecifier, segmentFieldSpecifier, } from './reactiveContentCache' +import { waitForAllObserversReady } from '../../publications/lib/lib' const REACTIVITY_DEBOUNCE = 20 @@ -37,13 +38,7 @@ export class RundownContentObserver { } #disposed = false - constructor( - rundownPlaylistId: RundownPlaylistId, - showStyleBaseId: ShowStyleBaseId, - rundownIds: RundownId[], - onChanged: ChangedHandler - ) { - logger.silly(`Creating RundownContentObserver for playlist "${rundownPlaylistId}"`) + private constructor(onChanged: ChangedHandler) { const { cache, cancel: cancelCache } = createReactiveContentCache(() => { this.#cleanup = onChanged(cache) if (this.#disposed) this.#cleanup() @@ -51,19 +46,40 @@ export class RundownContentObserver { this.#cache = cache this.#cancelCache = cancelCache + } + + static async create( + rundownPlaylistId: RundownPlaylistId, + showStyleBaseId: ShowStyleBaseId, + rundownIds: RundownId[], + onChanged: ChangedHandler + ): Promise { + logger.silly(`Creating RundownContentObserver for playlist "${rundownPlaylistId}"`) + + const observer = new RundownContentObserver(onChanged) + + await observer.initObservers(rundownPlaylistId, showStyleBaseId, rundownIds) - this.#observers = [ - RundownPlaylists.observeChanges(rundownPlaylistId, cache.RundownPlaylists.link(), { + return observer + } + + private async initObservers( + rundownPlaylistId: RundownPlaylistId, + showStyleBaseId: ShowStyleBaseId, + rundownIds: RundownId[] + ) { + this.#observers = await waitForAllObserversReady([ + RundownPlaylists.observeChanges(rundownPlaylistId, this.#cache.RundownPlaylists.link(), { projection: rundownPlaylistFieldSpecifier, }), - ShowStyleBases.observeChanges(showStyleBaseId, cache.ShowStyleBases.link()), + ShowStyleBases.observeChanges(showStyleBaseId, this.#cache.ShowStyleBases.link()), TriggeredActions.observeChanges( { showStyleBaseId: { $in: [showStyleBaseId, null], }, }, - cache.TriggeredActions.link() + this.#cache.TriggeredActions.link() ), Segments.observeChanges( { @@ -71,7 +87,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.Segments.link(), + this.#cache.Segments.link(), { projection: segmentFieldSpecifier, } @@ -82,7 +98,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.Parts.link(), + this.#cache.Parts.link(), { projection: partFieldSpecifier, } @@ -96,7 +112,7 @@ export class RundownContentObserver { $ne: true, }, }, - cache.PartInstances.link(), + this.#cache.PartInstances.link(), { projection: partInstanceFieldSpecifier, } @@ -107,7 +123,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.RundownBaselineAdLibActions.link(), + this.#cache.RundownBaselineAdLibActions.link(), { projection: adLibActionFieldSpecifier, } @@ -118,7 +134,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.RundownBaselineAdLibPieces.link(), + this.#cache.RundownBaselineAdLibPieces.link(), { projection: adLibPieceFieldSpecifier, } @@ -129,7 +145,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.AdLibActions.link(), + this.#cache.AdLibActions.link(), { projection: adLibActionFieldSpecifier, } @@ -140,12 +156,12 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.AdLibPieces.link(), + this.#cache.AdLibPieces.link(), { projection: adLibPieceFieldSpecifier, } ), - ] + ]) } public get cache(): ContentCache { diff --git a/meteor/server/api/deviceTriggers/RundownsObserver.ts b/meteor/server/api/deviceTriggers/RundownsObserver.ts index 9c915d7f84..eb88b76119 100644 --- a/meteor/server/api/deviceTriggers/RundownsObserver.ts +++ b/meteor/server/api/deviceTriggers/RundownsObserver.ts @@ -1,14 +1,14 @@ import { Meteor } from 'meteor/meteor' import { RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import _ from 'underscore' import { Rundowns } from '../../collections' import { literal } from '@sofie-automation/corelib/dist/lib' import { MongoFieldSpecifierOnesStrict } from '@sofie-automation/corelib/dist/mongo' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { PromiseDebounce } from '../../publications/lib/PromiseDebounce' const REACTIVITY_DEBOUNCE = 20 -type ChangedHandler = (rundownIds: RundownId[]) => () => void +type ChangedHandler = (rundownIds: RundownId[]) => Promise<() => void> type RundownFields = '_id' const rundownFieldSpecifier = literal>>({ @@ -16,53 +16,68 @@ const rundownFieldSpecifier = literal = new Set() #changed: ChangedHandler | undefined #cleanup: (() => void) | undefined - constructor(activePlaylistId: RundownPlaylistId, onChanged: ChangedHandler) { + #disposed = false + + readonly #triggerUpdateRundownContent = new PromiseDebounce(async () => { + if (this.#disposed) return + + if (!this.#changed) return + this.#cleanup?.() + + const changed = this.#changed + this.#cleanup = await changed(this.rundownIds) + + if (this.#disposed) this.#cleanup?.() + }, REACTIVITY_DEBOUNCE) + + private constructor(onChanged: ChangedHandler) { this.#changed = onChanged - this.#rundownsLiveQuery = Rundowns.observeChanges( + } + + static async create(playlistId: RundownPlaylistId, onChanged: ChangedHandler): Promise { + const observer = new RundownsObserver(onChanged) + + await observer.init(playlistId) + + return observer + } + + private async init(activePlaylistId: RundownPlaylistId) { + this.#rundownsLiveQuery = await Rundowns.observeChanges( { playlistId: activePlaylistId, }, { added: (rundownId) => { this.#rundownIds.add(rundownId) - this.updateRundownContent() + this.#triggerUpdateRundownContent.trigger() }, removed: (rundownId) => { this.#rundownIds.delete(rundownId) - this.updateRundownContent() + this.#triggerUpdateRundownContent.trigger() }, }, { projection: rundownFieldSpecifier, } ) - this.updateRundownContent() + + this.#triggerUpdateRundownContent.trigger() } public get rundownIds(): RundownId[] { return Array.from(this.#rundownIds) } - private innerUpdateRundownContent = () => { - if (!this.#changed) return - this.#cleanup?.() - - const changed = this.#changed - this.#cleanup = changed(this.rundownIds) - } - - public updateRundownContent = _.debounce( - Meteor.bindEnvironment(this.innerUpdateRundownContent), - REACTIVITY_DEBOUNCE - ) - public stop = (): void => { - this.updateRundownContent.cancel() + this.#disposed = true + + this.#triggerUpdateRundownContent.cancelWaiting() this.#rundownsLiveQuery.stop() this.#changed = undefined this.#cleanup?.() diff --git a/meteor/server/api/deviceTriggers/StudioDeviceTriggerManager.ts b/meteor/server/api/deviceTriggers/StudioDeviceTriggerManager.ts index c1ea42f0b3..03efd5c753 100644 --- a/meteor/server/api/deviceTriggers/StudioDeviceTriggerManager.ts +++ b/meteor/server/api/deviceTriggers/StudioDeviceTriggerManager.ts @@ -43,7 +43,7 @@ export class StudioDeviceTriggerManager { StudioActionManagers.set(studioId, new StudioActionManager()) } - updateTriggers(cache: ContentCache, showStyleBaseId: ShowStyleBaseId): void { + async updateTriggers(cache: ContentCache, showStyleBaseId: ShowStyleBaseId): Promise { const studioId = this.studioId this.#lastShowStyleBaseId = showStyleBaseId @@ -56,7 +56,7 @@ export class StudioDeviceTriggerManager { return } - const context = createCurrentContextFromCache(cache) + const context = createCurrentContextFromCache(cache, studioId) const actionManager = StudioActionManagers.get(studioId) if (!actionManager) throw new Meteor.Error( @@ -88,7 +88,7 @@ export class StudioDeviceTriggerManager { const addedPreviewIds: PreviewWrappedAdLibId[] = [] - Object.entries(triggeredAction.actions).forEach(([key, action]) => { + for (const [key, action] of Object.entries(triggeredAction.actions)) { // Since the compiled action is cached using this actionId as a key, having the action // and the filterChain allows for a quicker invalidation without doing a deepEquals const actionId = protectString( @@ -106,9 +106,9 @@ export class StudioDeviceTriggerManager { } touchedActionIds.push(actionId) - Object.entries(triggeredAction.triggers).forEach(([key, trigger]) => { + for (const [key, trigger] of Object.entries(triggeredAction.triggers)) { if (!isDeviceTrigger(trigger)) { - return + continue } let deviceActionArguments: ShiftRegisterActionArguments | undefined = undefined @@ -141,7 +141,7 @@ export class StudioDeviceTriggerManager { }, }) upsertedDeviceTriggerMountedActionIds.push(deviceTriggerMountedActionId) - }) + } if (!isPreviewableAction(thisAction)) { const adLibPreviewId = protectString(`${actionId}_preview`) @@ -165,7 +165,7 @@ export class StudioDeviceTriggerManager { addedPreviewIds.push(adLibPreviewId) } else { - const previewedAdLibs = thisAction.preview(context) + const previewedAdLibs = await thisAction.preview(context, null) previewedAdLibs.forEach((adLib) => { const adLibPreviewId = protectString( @@ -195,7 +195,7 @@ export class StudioDeviceTriggerManager { addedPreviewIds.push(adLibPreviewId) }) } - }) + } DeviceTriggerMountedActionAdlibsPreview.remove({ triggeredActionId: triggeredAction._id, @@ -271,7 +271,7 @@ function convertDocument(doc: ReadonlyObjectDeep): UITrigger }) } -function createCurrentContextFromCache(cache: ContentCache): ReactivePlaylistActionContext { +function createCurrentContextFromCache(cache: ContentCache, studioId: StudioId): ReactivePlaylistActionContext { const rundownPlaylist = cache.RundownPlaylists.findOne({ activationId: { $exists: true, @@ -301,6 +301,7 @@ function createCurrentContextFromCache(cache: ContentCache): ReactivePlaylistAct : [] return { + studioId: new DummyReactiveVar(studioId), currentPartInstanceId: new DummyReactiveVar(currentPartInstance?._id ?? null), currentPartId: new DummyReactiveVar(currentPartInstance?.part._id ?? null), nextPartId: new DummyReactiveVar(nextPartInstance?.part._id ?? null), diff --git a/meteor/server/api/deviceTriggers/StudioObserver.ts b/meteor/server/api/deviceTriggers/StudioObserver.ts index a6b38c239b..e9b7250963 100644 --- a/meteor/server/api/deviceTriggers/StudioObserver.ts +++ b/meteor/server/api/deviceTriggers/StudioObserver.ts @@ -20,6 +20,7 @@ import { ContentCache } from './reactiveContentCache' import { RundownContentObserver } from './RundownContentObserver' import { RundownsObserver } from './RundownsObserver' import { RundownPlaylists, Rundowns, ShowStyleBases } from '../../collections' +import { PromiseDebounce } from '../../publications/lib/PromiseDebounce' type ChangedHandler = (showStyleBaseId: ShowStyleBaseId, cache: ContentCache) => () => void @@ -66,6 +67,8 @@ export class StudioObserver extends EventEmitter { #changed: ChangedHandler + #disposed = false + constructor(studioId: StudioId, onChanged: ChangedHandler) { super() this.#changed = onChanged @@ -93,6 +96,8 @@ export class StudioObserver extends EventEmitter { activePlaylist: Pick } | null ): void => { + if (this.#disposed) return + const activePlaylistId = state?.activePlaylist?._id const activationId = state?.activePlaylist?.activationId const currentRundownId = @@ -146,73 +151,72 @@ export class StudioObserver extends EventEmitter { ) as Promise>>) : null ) - .end(this.updateShowStyle) + .end(this.updateShowStyle.call) } - private updateShowStyle = _.debounce( - Meteor.bindEnvironment( - ( - state: { - currentRundown: Pick - showStyleBase: Pick - } | null - ) => { - const showStyleBaseId = state?.showStyleBase._id - - if ( - showStyleBaseId === undefined || - !this.nextProps?.activePlaylistId || - !this.nextProps?.activationId - ) { - this.currentProps = undefined - this.#rundownsLiveQuery?.stop() - this.#rundownsLiveQuery = undefined - this.showStyleBaseId = showStyleBaseId - return - } - - if ( - showStyleBaseId === this.showStyleBaseId && - this.nextProps?.activationId === this.currentProps?.activationId && - this.nextProps?.activePlaylistId === this.currentProps?.activePlaylistId && - this.nextProps?.currentRundownId === this.currentProps?.currentRundownId - ) - return - - this.#rundownsLiveQuery?.stop() - this.#rundownsLiveQuery = undefined - - this.currentProps = this.nextProps - this.nextProps = undefined - - const { activePlaylistId } = this.currentProps - - this.showStyleBaseId = showStyleBaseId - - let cleanupChanges: (() => void) | undefined = undefined - - this.#rundownsLiveQuery = new RundownsObserver(activePlaylistId, (rundownIds) => { - logger.silly(`Creating new RundownContentObserver`) - const obs1 = new RundownContentObserver(activePlaylistId, showStyleBaseId, rundownIds, (cache) => { - cleanupChanges = this.#changed(showStyleBaseId, cache) - - return () => { - void 0 - } - }) - - return () => { - obs1.stop() - cleanupChanges?.() - } - }) + private readonly updateShowStyle = new PromiseDebounce< + void, + [ + { + currentRundown: Pick + showStyleBase: Pick + } | null + ] + >(async (state): Promise => { + if (this.#disposed) return + + const showStyleBaseId = state?.showStyleBase._id + + if (showStyleBaseId === undefined || !this.nextProps?.activePlaylistId || !this.nextProps?.activationId) { + this.currentProps = undefined + this.#rundownsLiveQuery?.stop() + this.#rundownsLiveQuery = undefined + this.showStyleBaseId = showStyleBaseId + return + } + + if ( + showStyleBaseId === this.showStyleBaseId && + this.nextProps?.activationId === this.currentProps?.activationId && + this.nextProps?.activePlaylistId === this.currentProps?.activePlaylistId && + this.nextProps?.currentRundownId === this.currentProps?.currentRundownId + ) + return + + this.#rundownsLiveQuery?.stop() + this.#rundownsLiveQuery = undefined + + this.currentProps = this.nextProps + this.nextProps = undefined + + const { activePlaylistId } = this.currentProps + + this.showStyleBaseId = showStyleBaseId + + this.#rundownsLiveQuery = await RundownsObserver.create(activePlaylistId, async (rundownIds) => { + logger.silly(`Creating new RundownContentObserver`) + + const obs1 = await RundownContentObserver.create(activePlaylistId, showStyleBaseId, rundownIds, (cache) => { + return this.#changed(showStyleBaseId, cache) + }) + + return () => { + obs1.stop() } - ), - REACTIVITY_DEBOUNCE - ) + }) + + if (this.#disposed) { + // If we were disposed of while waiting for the observer to be created, stop it immediately + this.#rundownsLiveQuery.stop() + } + }, REACTIVITY_DEBOUNCE) public stop = (): void => { + this.#disposed = true + + this.updateShowStyle.cancelWaiting() this.#playlistInStudioLiveQuery.stop() this.updatePlaylistInStudio.cancel() + this.#rundownsLiveQuery?.stop() } } diff --git a/meteor/server/api/deviceTriggers/observer.ts b/meteor/server/api/deviceTriggers/observer.ts index b23ba371ae..fb1448f24e 100644 --- a/meteor/server/api/deviceTriggers/observer.ts +++ b/meteor/server/api/deviceTriggers/observer.ts @@ -18,14 +18,14 @@ import { StudioObserver } from './StudioObserver' import { Studios } from '../../collections' import { ReactiveCacheCollection } from '../../publications/lib/ReactiveCacheCollection' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' +import { MeteorStartupAsync } from '../../lib/lib' type ObserverAndManager = { observer: StudioObserver manager: StudioDeviceTriggerManager } -Meteor.startup(() => { - if (!Meteor.isServer) return +MeteorStartupAsync(async () => { const studioObserversAndManagers = new Map() const jobQueue = new JobQueueWithClasses({ autoStart: true, @@ -44,7 +44,7 @@ Meteor.startup(() => { const manager = new StudioDeviceTriggerManager(studioId) const observer = new StudioObserver(studioId, (showStyleBaseId, cache) => { workInQueue(async () => { - manager.updateTriggers(cache, showStyleBaseId) + await manager.updateTriggers(cache, showStyleBaseId) }) return () => { @@ -69,7 +69,7 @@ Meteor.startup(() => { } } - Studios.observeChanges( + await Studios.observeChanges( {}, { added: (studioId) => { @@ -117,10 +117,12 @@ export async function receiveInputDeviceTrigger( if (!actionManager) throw new Meteor.Error(500, `No Studio Action Manager available to handle trigger in Studio "${studioId}"`) - DeviceTriggerMountedActions.find({ + const mountedActions = DeviceTriggerMountedActions.find({ deviceId, deviceTriggerId: triggerId, - }).forEach((mountedAction) => { + }).fetch() + + for (const mountedAction of mountedActions) { if (values && !_.isMatch(values, mountedAction.values)) return const executableAction = actionManager.getAction(mountedAction.actionId) if (!executableAction) @@ -132,6 +134,6 @@ export async function receiveInputDeviceTrigger( const context = actionManager.getContext() if (!context) throw new Meteor.Error(500, `Undefined Device Trigger context for studio "${studioId}"`) - executableAction.execute((t: ITranslatableMessage) => t.key ?? t, `${deviceId}: ${triggerId}`, context) - }) + await executableAction.execute((t: ITranslatableMessage) => t.key ?? t, `${deviceId}: ${triggerId}`, context) + } } diff --git a/meteor/server/api/deviceTriggers/triggersContext.ts b/meteor/server/api/deviceTriggers/triggersContext.ts index 85397cc531..ec563c562b 100644 --- a/meteor/server/api/deviceTriggers/triggersContext.ts +++ b/meteor/server/api/deviceTriggers/triggersContext.ts @@ -1,51 +1,68 @@ -import { TriggersContext } from '@sofie-automation/meteor-lib/dist/triggers/triggersContext' +import { + TriggersAsyncCollection, + TriggersContext, + TriggerTrackerComputation, +} from '@sofie-automation/meteor-lib/dist/triggers/triggersContext' import { SINGLE_USE_TOKEN_SALT } from '@sofie-automation/meteor-lib/dist/api/userActions' -import { assertNever, getHash, Time } from '../../lib/tempLib' +import { assertNever, getHash, ProtectedString, Time } from '../../lib/tempLib' import { getCurrentTime } from '../../lib/lib' import { MeteorCall } from '../methods' import { ClientAPI } from '@sofie-automation/meteor-lib/dist/api/client' import { UserAction } from '@sofie-automation/meteor-lib/dist/userAction' import { TFunction } from 'i18next' -import { Tracker } from 'meteor/tracker' - import { logger } from '../../logging' import { IBaseFilterLink, IRundownPlaylistFilterLink } from '@sofie-automation/blueprints-integration' import { PartId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DummyReactiveVar } from '@sofie-automation/meteor-lib/dist/triggers/reactive-var' import { ReactivePlaylistActionContext } from '@sofie-automation/meteor-lib/dist/triggers/actionFactory' -import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' -import { CollectionName } from '@sofie-automation/corelib/dist/dataModel/Collections' -import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' -import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' -import { PartInstance } from '@sofie-automation/meteor-lib/dist/collections/PartInstances' +import { FindOneOptions, FindOptions, MongoQuery } from '@sofie-automation/corelib/dist/mongo' +import { DBRundownPlaylist, SelectedPartInstance } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { + AdLibActions, + AdLibPieces, + PartInstances, + Parts, + RundownBaselineAdLibActions, + RundownBaselineAdLibPieces, + RundownPlaylists, + Rundowns, + Segments, +} from '../../collections' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' -import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' -import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibPiece' -import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' -import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' -import { createSyncReadOnlyMongoCollection } from './triggersContextCollection' +import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' +import { AsyncOnlyReadOnlyMongoCollection } from '../../collections/collection' export function hashSingleUseToken(token: string): string { return getHash(SINGLE_USE_TOKEN_SALT + token) } -/** - * Some synchronous read-only collections to satisfy the TriggersContext interface - */ -const AdLibActions = createSyncReadOnlyMongoCollection(CollectionName.AdLibActions) -const AdLibPieces = createSyncReadOnlyMongoCollection(CollectionName.AdLibPieces) -const PartInstances = createSyncReadOnlyMongoCollection(CollectionName.PartInstances) -const Parts = createSyncReadOnlyMongoCollection(CollectionName.Parts) -const RundownBaselineAdLibActions = createSyncReadOnlyMongoCollection( - CollectionName.RundownBaselineAdLibActions -) -const RundownBaselineAdLibPieces = createSyncReadOnlyMongoCollection( - CollectionName.RundownBaselineAdLibPieces -) -const RundownPlaylists = createSyncReadOnlyMongoCollection(CollectionName.RundownPlaylists) -const Rundowns = createSyncReadOnlyMongoCollection(CollectionName.Rundowns) -const Segments = createSyncReadOnlyMongoCollection(CollectionName.Segments) +class MeteorTriggersCollectionWrapper }> + implements TriggersAsyncCollection +{ + readonly #collection: AsyncOnlyReadOnlyMongoCollection + + constructor(collection: AsyncOnlyReadOnlyMongoCollection) { + this.#collection = collection + } + + async findFetchAsync( + _computation: TriggerTrackerComputation | null, + selector: MongoQuery, + options?: FindOptions + ): Promise> { + // Note: the _computation is not used, since we are not using Tracker server-side + return this.#collection.findFetchAsync(selector, options) + } + + async findOneAsync( + _computation: TriggerTrackerComputation | null, + selector: MongoQuery | DBInterface['_id'], + options?: FindOneOptions + ): Promise { + // Note: the _computation is not used, since we are not using Tracker server-side + return this.#collection.findOneAsync(selector, options) + } +} export const MeteorTriggersContext: TriggersContext = { MeteorCall, @@ -54,14 +71,14 @@ export const MeteorTriggersContext: TriggersContext = { isClient: false, - AdLibActions, - AdLibPieces, - Parts, - RundownBaselineAdLibActions, - RundownBaselineAdLibPieces, - RundownPlaylists, - Rundowns, - Segments, + AdLibActions: new MeteorTriggersCollectionWrapper(AdLibActions), + AdLibPieces: new MeteorTriggersCollectionWrapper(AdLibPieces), + Parts: new MeteorTriggersCollectionWrapper(Parts), + RundownBaselineAdLibActions: new MeteorTriggersCollectionWrapper(RundownBaselineAdLibActions), + RundownBaselineAdLibPieces: new MeteorTriggersCollectionWrapper(RundownBaselineAdLibPieces), + RundownPlaylists: new MeteorTriggersCollectionWrapper(RundownPlaylists), + Rundowns: new MeteorTriggersCollectionWrapper(Rundowns), + Segments: new MeteorTriggersCollectionWrapper(Segments), hashSingleUseToken, @@ -81,72 +98,92 @@ export const MeteorTriggersContext: TriggersContext = { ) }, - nonreactiveTracker: Tracker.nonreactive, + withComputation: async (_computation, func) => { + // Note: the _computation is not used, since we are not using Tracker server-side + return func() + }, - memoizedIsolatedAutorun: any>( - fnc: T, + memoizedIsolatedAutorun: async ( + computation: TriggerTrackerComputation | null, + fnc: (computation: TriggerTrackerComputation | null, ...args: TArgs) => Promise, _functionName: string, - ...params: Parameters - ): ReturnType => { - return fnc(...(params as any)) + ...params: TArgs + ): Promise => { + return fnc(computation, ...params) }, createContextForRundownPlaylistChain, } -function createContextForRundownPlaylistChain( +async function createContextForRundownPlaylistChain( studioId: StudioId, filterChain: IBaseFilterLink[] -): ReactivePlaylistActionContext | undefined { - const playlist = rundownPlaylistFilter( +): Promise { + const playlist = await rundownPlaylistFilter( studioId, filterChain.filter((link) => link.object === 'rundownPlaylist') as IRundownPlaylistFilterLink[] ) if (!playlist) return undefined - let currentPartId: PartId | null = null, - nextPartId: PartId | null = null, - currentPartInstance: PartInstance | null = null, - currentSegmentPartIds: PartId[] = [], - nextSegmentPartIds: PartId[] = [] - - if (playlist.currentPartInfo) { - currentPartInstance = PartInstances.findOne(playlist.currentPartInfo.partInstanceId) ?? null - const currentPart = currentPartInstance?.part ?? null - if (currentPart) { - currentPartId = currentPart._id - currentSegmentPartIds = Parts.find({ - segmentId: currentPart.segmentId, - }).map((part) => part._id) - } - } - if (playlist.nextPartInfo) { - const nextPart = PartInstances.findOne(playlist.nextPartInfo.partInstanceId)?.part ?? null - if (nextPart) { - nextPartId = nextPart._id - nextSegmentPartIds = Parts.find({ - segmentId: nextPart.segmentId, - }).map((part) => part._id) - } - } + const [currentPartInfo, nextPartInfo] = await Promise.all([ + fetchInfoForSelectedPart(playlist.currentPartInfo), + fetchInfoForSelectedPart(playlist.nextPartInfo), + ]) return { + studioId: new DummyReactiveVar(studioId), rundownPlaylistId: new DummyReactiveVar(playlist?._id), rundownPlaylist: new DummyReactiveVar(playlist), - currentRundownId: new DummyReactiveVar(currentPartInstance?.rundownId ?? playlist.rundownIdsInOrder[0] ?? null), - currentPartId: new DummyReactiveVar(currentPartId), - currentSegmentPartIds: new DummyReactiveVar(currentSegmentPartIds), - nextPartId: new DummyReactiveVar(nextPartId), - nextSegmentPartIds: new DummyReactiveVar(nextSegmentPartIds), + currentRundownId: new DummyReactiveVar( + playlist.currentPartInfo?.rundownId ?? playlist.rundownIdsInOrder[0] ?? null + ), + currentPartId: new DummyReactiveVar(currentPartInfo?.partId ?? null), + currentSegmentPartIds: new DummyReactiveVar(currentPartInfo?.segmentPartIds ?? []), + nextPartId: new DummyReactiveVar(nextPartInfo?.partId ?? null), + nextSegmentPartIds: new DummyReactiveVar(nextPartInfo?.segmentPartIds ?? []), currentPartInstanceId: new DummyReactiveVar(playlist.currentPartInfo?.partInstanceId ?? null), } } -function rundownPlaylistFilter( +async function fetchInfoForSelectedPart(partInfo: SelectedPartInstance | null): Promise<{ + partId: PartId + segmentPartIds: PartId[] +} | null> { + if (!partInfo) return null + + const partInstance = (await PartInstances.findOneAsync(partInfo.partInstanceId, { + projection: { + // @ts-expect-error deep property + 'part._id': 1, + segmentId: 1, + }, + })) as (Pick & { part: Pick }) | null + + if (!partInstance) return null + + const partId = partInstance.part._id + const segmentPartIds = await Parts.findFetchAsync( + { + segmentId: partInstance.segmentId, + }, + { + projection: { + _id: 1, + }, + } + ).then((parts) => parts.map((part) => part._id)) + + return { + partId, + segmentPartIds, + } +} + +async function rundownPlaylistFilter( studioId: StudioId, filterChain: IRundownPlaylistFilterLink[] -): DBRundownPlaylist | undefined { +): Promise { const selector: MongoQuery = { $and: [ { @@ -180,5 +217,5 @@ function rundownPlaylistFilter( } }) - return RundownPlaylists.findOne(selector) + return RundownPlaylists.findOneAsync(selector) } diff --git a/meteor/server/api/deviceTriggers/triggersContextCollection.ts b/meteor/server/api/deviceTriggers/triggersContextCollection.ts deleted file mode 100644 index 23711d92bb..0000000000 --- a/meteor/server/api/deviceTriggers/triggersContextCollection.ts +++ /dev/null @@ -1,76 +0,0 @@ -import { Meteor } from 'meteor/meteor' -import { Mongo } from 'meteor/mongo' -import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' -import { CollectionName } from '@sofie-automation/corelib/dist/dataModel/Collections' -import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' -import { - MongoReadOnlyCollection, - MongoCursor, - FindOptions, - FindOneOptions, -} from '@sofie-automation/meteor-lib/dist/collections/lib' -import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' -import { getOrCreateMongoCollection } from '../../collections/collection' - -/** - * Create a Mongo Collection for use in the client (has sync apis) - * @param name Name of the collection - */ -export function createSyncReadOnlyMongoCollection }>( - name: CollectionName -): MongoReadOnlyCollection { - const collection = getOrCreateMongoCollection(name) - const wrapped = new WrappedMongoReadOnlyCollection(collection, name) - - // registerClientCollection(name, wrapped) - - return wrapped -} - -class WrappedMongoReadOnlyCollection }> - implements MongoReadOnlyCollection -{ - protected readonly _collection: Mongo.Collection - - public readonly name: string | null - - constructor(collection: Mongo.Collection, name: string | null) { - this._collection = collection - this.name = name - } - - protected get _isMock() { - // @ts-expect-error re-export private property - return this._collection._isMock - } - - public get mockCollection() { - return this._collection - } - - protected wrapMongoError(e: any): never { - const str = stringifyError(e) || 'Unknown MongoDB Error' - throw new Meteor.Error((e && e.error) || 500, `Collection "${this.name}": ${str}`) - } - - find( - selector?: MongoQuery | DBInterface['_id'], - options?: FindOptions - ): MongoCursor { - try { - return this._collection.find((selector ?? {}) as any, options as any) as MongoCursor - } catch (e) { - this.wrapMongoError(e) - } - } - findOne( - selector?: MongoQuery | DBInterface['_id'], - options?: FindOneOptions - ): DBInterface | undefined { - try { - return this._collection.findOne((selector ?? {}) as any, options as any) - } catch (e) { - this.wrapMongoError(e) - } - } -} diff --git a/meteor/server/api/ingest/ingestCache.ts b/meteor/server/api/ingest/ingestCache.ts index e8f2cc74c9..36ca3a381f 100644 --- a/meteor/server/api/ingest/ingestCache.ts +++ b/meteor/server/api/ingest/ingestCache.ts @@ -1,58 +1,55 @@ import * as _ from 'underscore' import { Meteor } from 'meteor/meteor' -import { IngestRundown, IngestSegment, IngestPart } from '@sofie-automation/blueprints-integration' +import { IngestRundown, IngestSegment } from '@sofie-automation/blueprints-integration' import { logger } from '../../logging' import { profiler } from '../profiler' import { RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { IngestDataCache } from '../../collections' -import { IngestCacheType, IngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestDataCache } from '../../collections' +import { + NrcsIngestCacheType, + NrcsIngestDataCacheObj, + NrcsIngestDataCacheObjRundown, + NrcsIngestDataCacheObjSegment, +} from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' import { groupByToMap } from '@sofie-automation/corelib/dist/lib' -interface LocalIngestBase { - modified: number -} -export interface LocalIngestRundown extends IngestRundown, LocalIngestBase { - segments: LocalIngestSegment[] -} -export interface LocalIngestSegment extends IngestSegment, LocalIngestBase { - parts: LocalIngestPart[] -} -export interface LocalIngestPart extends IngestPart, LocalIngestBase {} - +/** + * This class provides a few convenience methods for fetching IngestRundown + * (or its child types) from the NrcsIngestDataCache collection + */ export class RundownIngestDataCache { - private constructor(private readonly rundownId: RundownId, private readonly documents: IngestDataCacheObj[]) {} + private constructor(private readonly rundownId: RundownId, private readonly documents: NrcsIngestDataCacheObj[]) {} static async create(rundownId: RundownId): Promise { - const docs = await IngestDataCache.findFetchAsync({ rundownId }) + const docs = await NrcsIngestDataCache.findFetchAsync({ rundownId }) return new RundownIngestDataCache(rundownId, docs) } - fetchRundown(): LocalIngestRundown | undefined { + fetchRundown(): IngestRundown | undefined { const span = profiler.startSpan('ingest.ingestCache.loadCachedRundownData') - const cachedRundown = this.documents.find((e) => e.type === IngestCacheType.RUNDOWN) + const cachedRundown = this.documents.find( + (e): e is NrcsIngestDataCacheObjRundown => e.type === NrcsIngestCacheType.RUNDOWN + ) if (!cachedRundown) { span?.end() return undefined } - const ingestRundown = cachedRundown.data as LocalIngestRundown - ingestRundown.modified = cachedRundown.modified + const ingestRundown = cachedRundown.data const segmentMap = groupByToMap(this.documents, 'segmentId') for (const objs of segmentMap.values()) { - const segmentEntry = objs.find((e) => e.type === IngestCacheType.SEGMENT) + const segmentEntry = objs.find( + (e): e is NrcsIngestDataCacheObjSegment => e.type === NrcsIngestCacheType.SEGMENT + ) if (segmentEntry) { - const ingestSegment = segmentEntry.data as LocalIngestSegment - ingestSegment.modified = segmentEntry.modified + const ingestSegment = segmentEntry.data for (const entry of objs) { - if (entry.type === IngestCacheType.PART) { - const ingestPart = entry.data as LocalIngestPart - ingestPart.modified = entry.modified - - ingestSegment.parts.push(ingestPart) + if (entry.type === NrcsIngestCacheType.PART) { + ingestSegment.parts.push(entry.data) } } @@ -67,10 +64,12 @@ export class RundownIngestDataCache { return ingestRundown } - fetchSegment(segmentId: SegmentId): LocalIngestSegment | undefined { + fetchSegment(segmentId: SegmentId): IngestSegment | undefined { const cacheEntries = this.documents.filter((d) => d.segmentId && d.segmentId === segmentId) - const segmentEntries = cacheEntries.filter((e) => e.type === IngestCacheType.SEGMENT) + const segmentEntries = cacheEntries.filter( + (e): e is NrcsIngestDataCacheObjSegment => e.type === NrcsIngestCacheType.SEGMENT + ) if (segmentEntries.length > 1) logger.warn( `There are multiple segments (${cacheEntries.length}) in IngestDataCache for rundownId: "${this.rundownId}", segmentId: "${segmentId}"` @@ -78,17 +77,14 @@ export class RundownIngestDataCache { const segmentEntry = segmentEntries[0] if (!segmentEntry) return undefined - if (segmentEntry.type !== IngestCacheType.SEGMENT) throw new Meteor.Error(500, 'Wrong type on cached segment') + if (segmentEntry.type !== NrcsIngestCacheType.SEGMENT) + throw new Meteor.Error(500, 'Wrong type on cached segment') - const ingestSegment = segmentEntry.data as LocalIngestSegment - ingestSegment.modified = segmentEntry.modified + const ingestSegment = segmentEntry.data for (const entry of cacheEntries) { - if (entry.type === IngestCacheType.PART) { - const ingestPart = entry.data as LocalIngestPart - ingestPart.modified = entry.modified - - ingestSegment.parts.push(ingestPart) + if (entry.type === NrcsIngestCacheType.PART) { + ingestSegment.parts.push(entry.data) } } diff --git a/meteor/server/api/ingest/mosDevice/__tests__/actions.test.ts b/meteor/server/api/ingest/mosDevice/__tests__/actions.test.ts index ba2d158797..5de794580a 100644 --- a/meteor/server/api/ingest/mosDevice/__tests__/actions.test.ts +++ b/meteor/server/api/ingest/mosDevice/__tests__/actions.test.ts @@ -54,7 +54,7 @@ describe('Test sending mos actions', () => { const fakeRundown = { _id: rundownId, externalId: getRandomString(), studioId: studioId } // Listen for changes - observer = PeripheralDeviceCommands.observeChanges( + observer = await PeripheralDeviceCommands.observeChanges( { deviceId: device._id }, { added: (id: PeripheralDeviceCommandId) => { @@ -96,7 +96,7 @@ describe('Test sending mos actions', () => { } // Listen for changes - observer = PeripheralDeviceCommands.observeChanges( + observer = await PeripheralDeviceCommands.observeChanges( { deviceId: device._id }, { added: (id: PeripheralDeviceCommandId) => { @@ -153,7 +153,7 @@ describe('Test sending mos actions', () => { } // Listen for changes - observer = PeripheralDeviceCommands.observeChanges( + observer = await PeripheralDeviceCommands.observeChanges( { deviceId: device._id }, { added: (id: PeripheralDeviceCommandId) => { diff --git a/meteor/server/api/ingest/mosDevice/actions.ts b/meteor/server/api/ingest/mosDevice/actions.ts index d719916249..c2bc59275c 100644 --- a/meteor/server/api/ingest/mosDevice/actions.ts +++ b/meteor/server/api/ingest/mosDevice/actions.ts @@ -1,4 +1,3 @@ -import { MOS } from '@sofie-automation/corelib' import { logger } from '../../../logging' import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { Meteor } from 'meteor/meteor' @@ -13,7 +12,7 @@ import { generateRundownSource, getPeripheralDeviceFromRundown, runIngestOperati import { IngestJobs } from '@sofie-automation/corelib/dist/worker/ingest' import { DEFAULT_MOS_TIMEOUT_TIME } from '@sofie-automation/shared-lib/dist/core/constants' import { executePeripheralDeviceFunctionWithCustomTimeout } from '../../peripheralDevice/executeFunction' -import { getMosTypes } from '@mos-connection/helper' +import { MOS } from '@sofie-automation/meteor-lib/dist/mos' export namespace MOSDeviceActions { export async function reloadRundown( @@ -73,7 +72,7 @@ export namespace MOSDeviceActions { if (!mosPayload.Body) throw new Meteor.Error(500, `Part Cache for "${partCache.externalId}" missing FullStory content!`) - const mosTypes = getMosTypes(false) + const mosTypes = MOS.getMosTypes(false) const story = mosPayload.Body.find( (item) => diff --git a/meteor/server/api/ingest/rundownInput.ts b/meteor/server/api/ingest/rundownInput.ts index bacdd0fd55..6534daaf34 100644 --- a/meteor/server/api/ingest/rundownInput.ts +++ b/meteor/server/api/ingest/rundownInput.ts @@ -1,9 +1,9 @@ import { Meteor } from 'meteor/meteor' import { check } from '../../lib/check' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { IngestDataCache, MediaObjects, Parts, Rundowns, Segments } from '../../collections' +import { NrcsIngestDataCache, MediaObjects, Parts, Rundowns, Segments } from '../../collections' import { literal } from '../../lib/tempLib' -import { lazyIgnore } from '../../lib/lib' +import { lazyIgnore, MeteorStartupAsync } from '../../lib/lib' import { IngestRundown, IngestSegment, IngestPart, IngestPlaylist } from '@sofie-automation/blueprints-integration' import { logger } from '../../logging' import { RundownIngestDataCache } from './ingestCache' @@ -17,7 +17,7 @@ import { MethodContext } from '../methodContext' import { IngestJobs } from '@sofie-automation/corelib/dist/worker/ingest' import { MediaObject } from '@sofie-automation/shared-lib/dist/core/model/MediaObjects' import { PeripheralDeviceId, RundownId, SegmentId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { IngestCacheType } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' export namespace RundownInput { @@ -363,17 +363,15 @@ async function listIngestRundowns(peripheralDevice: PeripheralDevice): Promise { - if (Meteor.isServer) { - MediaObjects.observe( - {}, - { - added: onMediaObjectChanged, - changed: onMediaObjectChanged, - }, - { fields: { _id: 1, mediaId: 1, mediainfo: 1, studioId: 1 } } - ) - } +MeteorStartupAsync(async () => { + await MediaObjects.observe( + {}, + { + added: onMediaObjectChanged, + changed: onMediaObjectChanged, + }, + { fields: { _id: 1, mediaId: 1, mediainfo: 1, studioId: 1 } } + ) }) interface MediaObjectUpdatedIds { @@ -415,9 +413,9 @@ async function onMediaObjectChanged(newDocument: MediaObject, oldDocument?: Medi const validSegmentIds = new Set( ( - await IngestDataCache.findFetchAsync( + await NrcsIngestDataCache.findFetchAsync( { - type: IngestCacheType.SEGMENT, + type: NrcsIngestCacheType.SEGMENT, rundownId: { $in: updateIds.map((obj) => obj.rundownId) }, }, { @@ -431,19 +429,19 @@ async function onMediaObjectChanged(newDocument: MediaObject, oldDocument?: Medi for (const mediaObjectUpdatedIds of updateIds) { if (validSegmentIds.has(mediaObjectUpdatedIds.segmentId)) { - try { - lazyIgnore( - `updateSegmentFromMediaObject_${mediaObjectUpdatedIds.segmentId}`, - async () => updateSegmentFromCache(newDocument.studioId, mediaObjectUpdatedIds), - 200 - ) - } catch (exception) { - logger.error( - `Error thrown while updating Segment from cache after MediaObject changed: ${stringifyError( - exception - )}` - ) - } + lazyIgnore( + `updateSegmentFromMediaObject_${mediaObjectUpdatedIds.segmentId}`, + () => { + updateSegmentFromCache(newDocument.studioId, mediaObjectUpdatedIds).catch((e) => { + logger.error( + `Error thrown while updating Segment from cache after MediaObject changed: ${stringifyError( + e + )}` + ) + }) + }, + 200 + ) } } } diff --git a/meteor/server/api/peripheralDevice/executeFunction.ts b/meteor/server/api/peripheralDevice/executeFunction.ts index e89c638ad1..e3e7d4de8e 100644 --- a/meteor/server/api/peripheralDevice/executeFunction.ts +++ b/meteor/server/api/peripheralDevice/executeFunction.ts @@ -126,7 +126,7 @@ export async function executePeripheralDeviceFunctionWithCustomTimeout( }) } - observer = PeripheralDeviceCommands.observeChanges( + observer = await PeripheralDeviceCommands.observeChanges( { _id: commandId, }, diff --git a/meteor/server/api/playout/playout.ts b/meteor/server/api/playout/playout.ts index f5b11df21f..20fb5e40c3 100644 --- a/meteor/server/api/playout/playout.ts +++ b/meteor/server/api/playout/playout.ts @@ -1,12 +1,10 @@ /* tslint:disable:no-use-before-declare */ -import { Meteor } from 'meteor/meteor' -import * as _ from 'underscore' -import { StudioRouteBehavior } from '@sofie-automation/corelib/dist/dataModel/Studio' import { PackageInfo } from '../../coreSystem' import { StudioContentAccess } from '../../security/studio' import { shouldUpdateStudioBaselineInner } from '@sofie-automation/corelib/dist/studio/baseline' -import { logger } from '../../logging' -import { Blueprints, RundownPlaylists, Studios, Timeline } from '../../collections' +import { Blueprints, RundownPlaylists, Timeline } from '../../collections' +import { StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' +import { QueueStudioJob } from '../../worker/worker' export namespace ServerPlayoutAPI { export async function shouldUpdateStudioBaseline(access: StudioContentAccess): Promise { @@ -38,32 +36,12 @@ export namespace ServerPlayoutAPI { export async function switchRouteSet( access: StudioContentAccess, routeSetId: string, - state: boolean + state: boolean | 'toggle' ): Promise { - logger.debug(`switchRouteSet "${access.studioId}" "${routeSetId}"=${state}`) - - const studio = access.studio - - if (studio.routeSets[routeSetId] === undefined) - throw new Meteor.Error(404, `RouteSet "${routeSetId}" not found!`) - const routeSet = studio.routeSets[routeSetId] - if (routeSet.behavior === StudioRouteBehavior.ACTIVATE_ONLY && state === false) - throw new Meteor.Error(400, `RouteSet "${routeSetId}" is ACTIVATE_ONLY`) - - const modification: Record = {} - modification[`routeSets.${routeSetId}.active`] = state - - if (studio.routeSets[routeSetId].exclusivityGroup && state === true) { - _.each(studio.routeSets, (otherRouteSet, otherRouteSetId) => { - if (otherRouteSetId === routeSetId) return - if (otherRouteSet.exclusivityGroup === routeSet.exclusivityGroup) { - modification[`routeSets.${otherRouteSetId}.active`] = false - } - }) - } - - await Studios.updateAsync(studio._id, { - $set: modification, + const queuedJob = await QueueStudioJob(StudioJobs.SwitchRouteSet, access.studioId, { + routeSetId, + state, }) + await queuedJob.complete } } diff --git a/meteor/server/api/rest/koa.ts b/meteor/server/api/rest/koa.ts index 00ecd85ac4..3a8c54dc0a 100644 --- a/meteor/server/api/rest/koa.ts +++ b/meteor/server/api/rest/koa.ts @@ -18,6 +18,7 @@ declare module 'http' { } const rootRouter = new KoaRouter() +const boundRouterPaths: string[] = [] Meteor.startup(() => { const koaApp = new Koa() @@ -51,7 +52,8 @@ Meteor.startup(() => { // serve the webui through koa // This is to avoid meteor injecting anything into the served html - koaApp.use(staticServe(public_dir)) + const webuiServer = staticServe(public_dir) + koaApp.use(webuiServer) logger.debug(`Serving static files from ${public_dir}`) // Serve the meteor runtime config @@ -66,9 +68,31 @@ Meteor.startup(() => { }) koaApp.use(rootRouter.routes()).use(rootRouter.allowedMethods()) + + koaApp.use(async (ctx, next) => { + if (ctx.method !== 'GET') return next() + + // Don't use the fallback for certain paths + if (ctx.path.startsWith('/assets/')) return next() + + // Don't use the fallback for anything handled by another router + // This does not feel efficient, but koa doesn't appear to have any shared state between the router handlers + for (const bindPath of boundRouterPaths) { + if (ctx.path.startsWith(bindPath)) return next() + } + + // fallback to the root file + ctx.path = '/' + return webuiServer(ctx, next) + }) }) export function bindKoaRouter(koaRouter: KoaRouter, bindPath: string): void { + // Track this path as having a router + let bindPathFull = bindPath + if (!bindPathFull.endsWith('/')) bindPathFull += '/' + boundRouterPaths.push(bindPathFull) + rootRouter.use(bindPath, koaRouter.routes()).use(bindPath, koaRouter.allowedMethods()) } diff --git a/meteor/server/api/rest/v0/index.ts b/meteor/server/api/rest/v0/index.ts index 721a3a803c..a2a150a51f 100644 --- a/meteor/server/api/rest/v0/index.ts +++ b/meteor/server/api/rest/v0/index.ts @@ -8,7 +8,7 @@ import * as _ from 'underscore' import { Meteor } from 'meteor/meteor' import { MeteorMethodSignatures } from '../../../methods' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' -import { MeteorPublications, MeteorPublicationSignatures } from '../../../publications/lib' +import { MeteorPublications, MeteorPublicationSignatures } from '../../../publications/lib/lib' import { UserActionAPIMethods } from '@sofie-automation/meteor-lib/dist/api/userActions' import { logger } from '../../../logging' import { ClientAPI } from '@sofie-automation/meteor-lib/dist/api/client' diff --git a/meteor/server/api/rest/v1/index.ts b/meteor/server/api/rest/v1/index.ts index c1fbb091b0..c935f83fa4 100644 --- a/meteor/server/api/rest/v1/index.ts +++ b/meteor/server/api/rest/v1/index.ts @@ -1,6 +1,7 @@ import KoaRouter from '@koa/router' import { interpollateTranslation, translateMessage } from '@sofie-automation/corelib/dist/TranslatableMessage' import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' +import { IConfigMessage, NoteSeverity } from '@sofie-automation/blueprints-integration' import Koa from 'koa' import bodyParser from 'koa-bodyparser' import { Meteor } from 'meteor/meteor' @@ -92,6 +93,30 @@ function extractErrorDetails(e: unknown): string[] | undefined { } } +export const checkValidation = (method: string, configValidationMsgs: IConfigMessage[]): void => { + /** + * Throws if any of the configValidationMsgs indicates that the config has errors. + * Will log any messages with severity WARNING or INFO + */ + const configValidationOK = configValidationMsgs.reduce((acc, msg) => acc && msg.level !== NoteSeverity.ERROR, true) + if (!configValidationOK) { + const details = JSON.stringify( + configValidationMsgs.filter((msg) => msg.level === NoteSeverity.ERROR).map((msg) => msg.message.key), + null, + 2 + ) + logger.error(`${method} failed blueprint config validation with errors: ${details}`) + throw new Meteor.Error(409, `${method} has failed blueprint config validation`, details) + } else { + const details = JSON.stringify( + configValidationMsgs.map((msg) => msg.message.key), + null, + 2 + ) + logger.info(`${method} received messages from bluepring config validation: ${details}`) + } +} + interface APIRequestError { status: number message: string diff --git a/meteor/server/api/rest/v1/playlists.ts b/meteor/server/api/rest/v1/playlists.ts index 93c621e224..30513751c4 100644 --- a/meteor/server/api/rest/v1/playlists.ts +++ b/meteor/server/api/rest/v1/playlists.ts @@ -96,7 +96,8 @@ class PlaylistsServerAPI implements PlaylistsRestAPI { event: string, rundownPlaylistId: RundownPlaylistId, adLibId: AdLibActionId | RundownBaselineAdLibActionId | PieceId | BucketAdLibId, - triggerMode?: string | null + triggerMode?: string | null, + adLibOptions?: { [key: string]: any } ): Promise> { const baselineAdLibPiece = RundownBaselineAdLibPieces.findOneAsync(adLibId as PieceId, { projection: { _id: 1 }, @@ -204,6 +205,7 @@ class PlaylistsServerAPI implements PlaylistsRestAPI { actionId: adLibActionDoc.actionId, userData: adLibActionDoc.userData, triggerMode: triggerMode ?? undefined, + actionOptions: adLibOptions, } ) } else { @@ -576,7 +578,7 @@ export function registerRoutes(registerRoute: APIRegisterHook) } ) - registerRoute<{ playlistId: string }, { adLibId: string; actionType?: string }, object>( + registerRoute<{ playlistId: string }, { adLibId: string; actionType?: string; adLibOptions?: any }, object>( 'post', '/playlists/:playlistId/execute-adlib', new Map([ @@ -591,12 +593,24 @@ export function registerRoutes(registerRoute: APIRegisterHook) ) const actionTypeObj = body const triggerMode = actionTypeObj ? (actionTypeObj as { actionType: string }).actionType : undefined - logger.info(`API POST: execute-adlib ${rundownPlaylistId} ${adLibId} - triggerMode: ${triggerMode}`) + const adLibOptions = actionTypeObj ? actionTypeObj.adLibOptions : undefined + logger.info( + `API POST: execute-adlib ${rundownPlaylistId} ${adLibId} - actionType: ${triggerMode} - options: ${ + adLibOptions ? JSON.stringify(adLibOptions) : 'undefined' + }` + ) check(adLibId, String) check(rundownPlaylistId, String) - return await serverAPI.executeAdLib(connection, event, rundownPlaylistId, adLibId, triggerMode) + return await serverAPI.executeAdLib( + connection, + event, + rundownPlaylistId, + adLibId, + triggerMode, + adLibOptions + ) } ) diff --git a/meteor/server/api/rest/v1/showstyles.ts b/meteor/server/api/rest/v1/showstyles.ts index 0723679042..9c434b6ac0 100644 --- a/meteor/server/api/rest/v1/showstyles.ts +++ b/meteor/server/api/rest/v1/showstyles.ts @@ -20,13 +20,14 @@ import { APIShowStyleVariantFrom, showStyleBaseFrom, showStyleVariantFrom, + validateAPIBlueprintConfigForShowStyle, } from './typeConversion' import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { runUpgradeForShowStyleBase, validateConfigForShowStyleBase } from '../../../migration/upgrades' -import { NoteSeverity } from '@sofie-automation/blueprints-integration' import { DBShowStyleVariant } from '@sofie-automation/corelib/dist/dataModel/ShowStyleVariant' import { assertNever } from '@sofie-automation/corelib/dist/lib' +import { checkValidation } from '.' class ShowStylesServerAPI implements ShowStylesRestAPI { async getShowStyleBases( @@ -42,9 +43,15 @@ class ShowStylesServerAPI implements ShowStylesRestAPI { async addShowStyleBase( _connection: Meteor.Connection, _event: string, - showStyleBase: APIShowStyleBase + apiShowStyleBase: APIShowStyleBase ): Promise> { - const showStyle = await showStyleBaseFrom(showStyleBase) + const blueprintConfigValidation = await validateAPIBlueprintConfigForShowStyle( + apiShowStyleBase, + protectString(apiShowStyleBase.blueprintId) + ) + checkValidation(`addShowStyleBase`, blueprintConfigValidation) + + const showStyle = await showStyleBaseFrom(apiShowStyleBase) if (!showStyle) throw new Meteor.Error(400, `Invalid ShowStyleBase`) const showStyleId = showStyle._id await ShowStyleBases.insertAsync(showStyle) @@ -60,16 +67,22 @@ class ShowStylesServerAPI implements ShowStylesRestAPI { const showStyleBase = await ShowStyleBases.findOneAsync(showStyleBaseId) if (!showStyleBase) throw new Meteor.Error(404, `ShowStyleBase ${showStyleBaseId} does not exist`) - return ClientAPI.responseSuccess(APIShowStyleBaseFrom(showStyleBase)) + return ClientAPI.responseSuccess(await APIShowStyleBaseFrom(showStyleBase)) } async addOrUpdateShowStyleBase( _connection: Meteor.Connection, _event: string, showStyleBaseId: ShowStyleBaseId, - showStyleBase: APIShowStyleBase + apiShowStyleBase: APIShowStyleBase ): Promise> { - const showStyle = await showStyleBaseFrom(showStyleBase, showStyleBaseId) + const blueprintConfigValidation = await validateAPIBlueprintConfigForShowStyle( + apiShowStyleBase, + protectString(apiShowStyleBase.blueprintId) + ) + checkValidation(`addOrUpdateShowStyleBase ${showStyleBaseId}`, blueprintConfigValidation) + + const showStyle = await showStyleBaseFrom(apiShowStyleBase, showStyleBaseId) if (!showStyle) throw new Meteor.Error(400, `Invalid ShowStyleBase`) const existingShowStyle = await ShowStyleBases.findOneAsync(showStyleBaseId) @@ -96,17 +109,73 @@ class ShowStylesServerAPI implements ShowStylesRestAPI { await ShowStyleBases.upsertAsync(showStyleBaseId, showStyle) + // wait for the upsert to complete before validation and upgrade read from the showStyleBases collection + await new Promise((resolve) => setTimeout(() => resolve(), 200)) + const validation = await validateConfigForShowStyleBase(showStyleBaseId) - const validateOK = validation.messages.reduce((acc, msg) => acc && msg.level === NoteSeverity.INFO, true) - if (!validateOK) { - const details = JSON.stringify( - validation.messages.filter((msg) => msg.level < NoteSeverity.INFO).map((msg) => msg.message.key), - null, - 2 - ) - logger.error(`addOrUpdateShowStyleBase failed validation with errors: ${details}`) - throw new Meteor.Error(409, `ShowStyleBase ${showStyleBaseId} has failed validation`, details) - } + checkValidation(`addOrUpdateShowStyleBase ${showStyleBaseId}`, validation.messages) + + return ClientAPI.responseSuccess(await runUpgradeForShowStyleBase(showStyleBaseId)) + } + + async getShowStyleConfig( + _connection: Meteor.Connection, + _event: string, + showStyleBaseId: ShowStyleBaseId + ): Promise> { + const showStyleBase = await ShowStyleBases.findOneAsync(showStyleBaseId) + if (!showStyleBase) throw new Meteor.Error(404, `ShowStyleBase ${showStyleBaseId} does not exist`) + + return ClientAPI.responseSuccess((await APIShowStyleBaseFrom(showStyleBase)).config) + } + + async updateShowStyleConfig( + _connection: Meteor.Connection, + _event: string, + showStyleBaseId: ShowStyleBaseId, + config: object + ): Promise> { + const existingShowStyleBase = await ShowStyleBases.findOneAsync(showStyleBaseId) + if (existingShowStyleBase) { + const rundowns = (await Rundowns.findFetchAsync( + { showStyleBaseId }, + { projection: { playlistId: 1 } } + )) as Array> + const playlists = (await RundownPlaylists.findFetchAsync( + { _id: { $in: rundowns.map((r) => r.playlistId) } }, + { + projection: { + activationId: 1, + }, + } + )) as Array> + if (playlists.some((playlist) => playlist.activationId !== undefined)) { + throw new Meteor.Error( + 412, + `Cannot update ShowStyleBase ${showStyleBaseId} as it is in use by an active Playlist` + ) + } + } else throw new Meteor.Error(404, `ShowStyleBase ${showStyleBaseId} not found`) + + const apiShowStyleBase = await APIShowStyleBaseFrom(existingShowStyleBase) + apiShowStyleBase.config = config + + const blueprintConfigValidation = await validateAPIBlueprintConfigForShowStyle( + apiShowStyleBase, + protectString(apiShowStyleBase.blueprintId) + ) + checkValidation(`updateShowStyleConfig ${showStyleBaseId}`, blueprintConfigValidation) + + const showStyle = await showStyleBaseFrom(apiShowStyleBase, showStyleBaseId) + if (!showStyle) throw new Meteor.Error(400, `Invalid ShowStyleBase`) + + await ShowStyleBases.upsertAsync(showStyleBaseId, showStyle) + + // wait for the upsert to complete before validation and upgrade read from the showStyleBases collection + await new Promise((resolve) => setTimeout(() => resolve(), 200)) + + const validation = await validateConfigForShowStyleBase(showStyleBaseId) + checkValidation(`updateShowStyleConfig ${showStyleBaseId}`, validation.messages) return ClientAPI.responseSuccess(await runUpgradeForShowStyleBase(showStyleBaseId)) } @@ -185,7 +254,7 @@ class ShowStylesServerAPI implements ShowStylesRestAPI { const variant = await ShowStyleVariants.findOneAsync(showStyleVariantId) if (!variant) throw new Meteor.Error(404, `ShowStyleVariant ${showStyleVariantId} not found`) - return ClientAPI.responseSuccess(APIShowStyleVariantFrom(variant)) + return ClientAPI.responseSuccess(await APIShowStyleVariantFrom(showStyleBase, variant)) } async addOrUpdateShowStyleVariant( @@ -193,12 +262,18 @@ class ShowStylesServerAPI implements ShowStylesRestAPI { _event: string, showStyleBaseId: ShowStyleBaseId, showStyleVariantId: ShowStyleVariantId, - showStyleVariant: APIShowStyleVariant + apiShowStyleVariant: APIShowStyleVariant ): Promise> { const showStyleBase = await ShowStyleBases.findOneAsync(showStyleBaseId) if (!showStyleBase) throw new Meteor.Error(404, `ShowStyleBase ${showStyleBaseId} does not exist`) - const showStyle = showStyleVariantFrom(showStyleVariant, showStyleVariantId) + const blueprintConfigValidation = await validateAPIBlueprintConfigForShowStyle( + apiShowStyleVariant, + showStyleBase.blueprintId + ) + checkValidation(`addOrUpdateShowStyleVariant ${showStyleVariantId}`, blueprintConfigValidation) + + const showStyle = showStyleVariantFrom(apiShowStyleVariant, showStyleVariantId) if (!showStyle) throw new Meteor.Error(400, `Invalid ShowStyleVariant`) const existingShowStyle = await ShowStyleVariants.findOneAsync(showStyleVariantId) @@ -335,6 +410,37 @@ export function registerRoutes(registerRoute: APIRegisterHook } ) + registerRoute<{ showStyleBaseId: string }, never, object>( + 'get', + '/showstyles/:showStyleBaseId/config', + new Map([[404, [UserErrorMessage.ShowStyleBaseNotFound]]]), + showStylesAPIFactory, + async (serverAPI, connection, event, params, _) => { + const showStyleBaseId = protectString(params.showStyleBaseId) + logger.info(`API GET: ShowStyleBase config ${showStyleBaseId}`) + + check(showStyleBaseId, String) + return await serverAPI.getShowStyleConfig(connection, event, showStyleBaseId) + } + ) + + registerRoute<{ showStyleBaseId: string }, object, void>( + 'put', + '/showstyles/:showStyleBaseId/config', + new Map([ + [404, [UserErrorMessage.ShowStyleBaseNotFound]], + [409, [UserErrorMessage.ValidationFailed]], + ]), + showStylesAPIFactory, + async (serverAPI, connection, event, params, body) => { + const showStyleBaseId = protectString(params.showStyleBaseId) + logger.info(`API PUT: Update ShowStyleBase config ${showStyleBaseId}`) + + check(showStyleBaseId, String) + return await serverAPI.updateShowStyleConfig(connection, event, showStyleBaseId, body) + } + ) + registerRoute<{ showStyleBaseId: string }, never, void>( 'delete', '/showstyles/:showStyleBaseId', diff --git a/meteor/server/api/rest/v1/studios.ts b/meteor/server/api/rest/v1/studios.ts index 798892966a..30e57b14ba 100644 --- a/meteor/server/api/rest/v1/studios.ts +++ b/meteor/server/api/rest/v1/studios.ts @@ -8,9 +8,8 @@ import { APIStudio, StudioAction, StudioActionType, StudiosRestAPI } from '../.. import { Meteor } from 'meteor/meteor' import { ClientAPI } from '@sofie-automation/meteor-lib/dist/api/client' import { PeripheralDevices, RundownPlaylists, Studios } from '../../../collections' -import { APIStudioFrom, studioFrom } from './typeConversion' +import { APIStudioFrom, studioFrom, validateAPIBlueprintConfigForStudio } from './typeConversion' import { runUpgradeForStudio, validateConfigForStudio } from '../../../migration/upgrades' -import { NoteSeverity } from '@sofie-automation/blueprints-integration' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { ServerClientAPI } from '../../client' import { assertNever } from '../../../lib/tempLib' @@ -20,6 +19,7 @@ import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { StudioContentWriteAccess } from '../../../security/studio' import { ServerPlayoutAPI } from '../../playout/playout' +import { checkValidation } from '.' class StudiosServerAPI implements StudiosRestAPI { constructor(private context: ServerAPIContext) {} @@ -36,24 +36,18 @@ class StudiosServerAPI implements StudiosRestAPI { async addStudio( _connection: Meteor.Connection, _event: string, - studio: APIStudio + apiStudio: APIStudio ): Promise> { - const newStudio = await studioFrom(studio) + const blueprintConfigValidation = await validateAPIBlueprintConfigForStudio(apiStudio) + checkValidation(`addStudio`, blueprintConfigValidation) + + const newStudio = await studioFrom(apiStudio) if (!newStudio) throw new Meteor.Error(400, `Invalid Studio`) const newStudioId = await Studios.insertAsync(newStudio) const validation = await validateConfigForStudio(newStudioId) - const validateOK = validation.messages.reduce((acc, msg) => acc && msg.level === NoteSeverity.INFO, true) - if (!validateOK) { - const details = JSON.stringify( - validation.messages.filter((msg) => msg.level < NoteSeverity.INFO).map((msg) => msg.message.key), - null, - 2 - ) - logger.error(`addStudio failed validation with errors: ${details}`) - throw new Meteor.Error(409, `Studio ${newStudioId} has failed validation`, details) - } + checkValidation(`addStudio ${newStudioId}`, validation.messages) await runUpgradeForStudio(newStudioId) return ClientAPI.responseSuccess(unprotectString(newStudioId), 200) @@ -67,16 +61,19 @@ class StudiosServerAPI implements StudiosRestAPI { const studio = await Studios.findOneAsync(studioId) if (!studio) throw new Meteor.Error(404, `Studio ${studioId} not found`) - return ClientAPI.responseSuccess(APIStudioFrom(studio)) + return ClientAPI.responseSuccess(await APIStudioFrom(studio)) } async addOrUpdateStudio( _connection: Meteor.Connection, _event: string, studioId: StudioId, - studio: APIStudio + apiStudio: APIStudio ): Promise> { - const newStudio = await studioFrom(studio, studioId) + const blueprintConfigValidation = await validateAPIBlueprintConfigForStudio(apiStudio) + checkValidation(`addOrUpdateStudio ${studioId}`, blueprintConfigValidation) + + const newStudio = await studioFrom(apiStudio, studioId) if (!newStudio) throw new Meteor.Error(400, `Invalid Studio`) const existingStudio = await Studios.findOneAsync(studioId) @@ -96,18 +93,54 @@ class StudiosServerAPI implements StudiosRestAPI { await Studios.upsertAsync(studioId, newStudio) + // wait for the upsert to complete before validation and upgrade read from the studios collection + await new Promise((resolve) => setTimeout(() => resolve(), 200)) + const validation = await validateConfigForStudio(studioId) - const validateOK = validation.messages.reduce((acc, msg) => acc && msg.level === NoteSeverity.INFO, true) - if (!validateOK) { - const details = JSON.stringify( - validation.messages.filter((msg) => msg.level < NoteSeverity.INFO).map((msg) => msg.message.key), - null, - 2 - ) - logger.error(`addOrUpdateStudio failed validation with errors: ${details}`) - throw new Meteor.Error(409, `Studio ${studioId} has failed validation`, details) + checkValidation(`addOrUpdateStudio ${studioId}`, validation.messages) + + return ClientAPI.responseSuccess(await runUpgradeForStudio(studioId)) + } + + async getStudioConfig( + _connection: Meteor.Connection, + _event: string, + studioId: StudioId + ): Promise> { + const studio = await Studios.findOneAsync(studioId) + if (!studio) throw new Meteor.Error(404, `Studio ${studioId} not found`) + + return ClientAPI.responseSuccess((await APIStudioFrom(studio)).config) + } + + async updateStudioConfig( + _connection: Meteor.Connection, + _event: string, + studioId: StudioId, + config: object + ): Promise> { + const existingStudio = await Studios.findOneAsync(studioId) + if (!existingStudio) { + throw new Meteor.Error(404, `Studio ${studioId} not found`) } + const apiStudio = await APIStudioFrom(existingStudio) + apiStudio.config = config + + const blueprintConfigValidation = await validateAPIBlueprintConfigForStudio(apiStudio) + checkValidation(`updateStudioConfig ${studioId}`, blueprintConfigValidation) + + const newStudio = await studioFrom(apiStudio, studioId) + if (!newStudio) throw new Meteor.Error(400, `Invalid Studio`) + + await Studios.upsertAsync(studioId, newStudio) + + // wait for the upsert to complete before validation and upgrade read from the studios collection + await new Promise((resolve) => setTimeout(() => resolve(), 200)) + + const validation = await validateConfigForStudio(studioId) + checkValidation(`updateStudioConfig ${studioId}`, validation.messages) + return ClientAPI.responseSuccess(await runUpgradeForStudio(studioId)) } @@ -338,6 +371,37 @@ export function registerRoutes(registerRoute: APIRegisterHook): } ) + registerRoute<{ studioId: string }, never, object>( + 'get', + '/studios/:studioId/config', + new Map([[404, [UserErrorMessage.StudioNotFound]]]), + studiosAPIFactory, + async (serverAPI, connection, event, params, _) => { + const studioId = protectString(params.studioId) + logger.info(`API GET: studio config ${studioId}`) + + check(studioId, String) + return await serverAPI.getStudioConfig(connection, event, studioId) + } + ) + + registerRoute<{ studioId: string }, object, void>( + 'put', + '/studios/:studioId/config', + new Map([ + [404, [UserErrorMessage.StudioNotFound]], + [409, [UserErrorMessage.ValidationFailed]], + ]), + studiosAPIFactory, + async (serverAPI, connection, event, params, body) => { + const studioId = protectString(params.studioId) + logger.info(`API PUT: Update studio config ${studioId}`) + + check(studioId, String) + return await serverAPI.updateStudioConfig(connection, event, studioId, body) + } + ) + registerRoute<{ studioId: string }, never, void>( 'delete', '/studios/:studioId', diff --git a/meteor/server/api/rest/v1/typeConversion.ts b/meteor/server/api/rest/v1/typeConversion.ts index 91d79a6d1d..75a38a37b2 100644 --- a/meteor/server/api/rest/v1/typeConversion.ts +++ b/meteor/server/api/rest/v1/typeConversion.ts @@ -1,22 +1,33 @@ import { BlueprintManifestType, IBlueprintConfig, + IConfigMessage, IOutputLayer, ISourceLayer, + ShowStyleBlueprintManifest, SourceLayerType, StatusCode, + StudioBlueprintManifest, } from '@sofie-automation/blueprints-integration' import { PeripheralDevice, PeripheralDeviceType } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { Blueprint } from '@sofie-automation/corelib/dist/dataModel/Blueprint' -import { BucketId, ShowStyleBaseId, ShowStyleVariantId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + BlueprintId, + BucketId, + ShowStyleBaseId, + ShowStyleVariantId, + StudioId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBStudio, IStudioSettings } from '@sofie-automation/corelib/dist/dataModel/Studio' -import { assertNever, getRandomId, literal } from '@sofie-automation/corelib/dist/lib' +import { assertNever, Complete, getRandomId, literal } from '@sofie-automation/corelib/dist/lib' import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { applyAndValidateOverrides, ObjectOverrideSetOp, wrapDefaultObject, updateOverrides, + convertObjectIntoOverrides, + ObjectWithOverrides, } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { APIBlueprint, @@ -33,6 +44,10 @@ import { import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { DBShowStyleVariant } from '@sofie-automation/corelib/dist/dataModel/ShowStyleVariant' import { Blueprints, ShowStyleBases, Studios } from '../../../collections' +import { Meteor } from 'meteor/meteor' +import { evalBlueprint } from '../../blueprints/cache' +import { CommonContext } from '../../../migration/upgrades/context' +import { logger } from '../../../logging' import { DEFAULT_MINIMUM_TAKE_SPAN, DEFAULT_FALLBACK_PART_DURATION, @@ -72,9 +87,20 @@ export async function showStyleBaseFrom( ? updateOverrides(showStyleBase.sourceLayersWithOverrides, newSourceLayers) : wrapDefaultObject({}) - const blueprintConfig = showStyleBase - ? updateOverrides(showStyleBase.blueprintConfigWithOverrides, apiShowStyleBase.config as IBlueprintConfig) - : wrapDefaultObject({}) + const blueprintManifest = evalBlueprint(blueprint) as ShowStyleBlueprintManifest + let blueprintConfig: ObjectWithOverrides + if (typeof blueprintManifest.blueprintConfigFromAPI !== 'function') { + blueprintConfig = showStyleBase + ? updateOverrides(showStyleBase.blueprintConfigWithOverrides, apiShowStyleBase.config as IBlueprintConfig) + : wrapDefaultObject({}) + } else { + blueprintConfig = showStyleBase + ? updateOverrides( + showStyleBase.blueprintConfigWithOverrides, + await ShowStyleBaseBlueprintConfigFromAPI(apiShowStyleBase, blueprintManifest) + ) + : convertObjectIntoOverrides(await ShowStyleBaseBlueprintConfigFromAPI(apiShowStyleBase, blueprintManifest)) + } return { _id: existingId ?? getRandomId(), @@ -91,7 +117,7 @@ export async function showStyleBaseFrom( } } -export function APIShowStyleBaseFrom(showStyleBase: DBShowStyleBase): APIShowStyleBase { +export async function APIShowStyleBaseFrom(showStyleBase: DBShowStyleBase): Promise { return { name: showStyleBase.name, blueprintId: unprotectString(showStyleBase.blueprintId), @@ -102,7 +128,7 @@ export function APIShowStyleBaseFrom(showStyleBase: DBShowStyleBase): APIShowSty sourceLayers: Object.values( applyAndValidateOverrides(showStyleBase.sourceLayersWithOverrides).obj ).map((layer) => APISourceLayerFrom(layer!)), - config: applyAndValidateOverrides(showStyleBase.blueprintConfigWithOverrides).obj, + config: await APIShowStyleBlueprintConfigFrom(showStyleBase, showStyleBase.blueprintId), } } @@ -128,12 +154,16 @@ export function showStyleVariantFrom( } } -export function APIShowStyleVariantFrom(showStyleVariant: DBShowStyleVariant): APIShowStyleVariant { +export async function APIShowStyleVariantFrom( + showStyleBase: DBShowStyleBase, + showStyleVariant: DBShowStyleVariant +): Promise { return { name: showStyleVariant.name, rank: showStyleVariant._rank, showStyleBaseId: unprotectString(showStyleVariant.showStyleBaseId), - config: applyAndValidateOverrides(showStyleVariant.blueprintConfigWithOverrides).obj, + blueprintConfigPresetId: showStyleVariant.blueprintConfigPresetId, + config: await APIShowStyleBlueprintConfigFrom(showStyleVariant, showStyleBase.blueprintId), } } @@ -255,16 +285,27 @@ export async function studioFrom(apiStudio: APIStudio, existingId?: StudioId): P let blueprint: Blueprint | undefined if (apiStudio.blueprintId) { blueprint = await Blueprints.findOneAsync(protectString(apiStudio.blueprintId)) - if (!blueprint) return undefined - if (blueprint.blueprintType !== BlueprintManifestType.STUDIO) return undefined + if (blueprint?.blueprintType !== BlueprintManifestType.STUDIO) return undefined } + if (!blueprint) return undefined let studio: DBStudio | undefined if (existingId) studio = await Studios.findOneAsync(existingId) - const blueprintConfig = studio - ? updateOverrides(studio.blueprintConfigWithOverrides, apiStudio.config as IBlueprintConfig) - : wrapDefaultObject({}) + const blueprintManifest = evalBlueprint(blueprint) as StudioBlueprintManifest + let blueprintConfig: ObjectWithOverrides + if (typeof blueprintManifest.blueprintConfigFromAPI !== 'function') { + blueprintConfig = studio + ? updateOverrides(studio.blueprintConfigWithOverrides, apiStudio.config as IBlueprintConfig) + : wrapDefaultObject({}) + } else { + blueprintConfig = studio + ? updateOverrides( + studio.blueprintConfigWithOverrides, + await StudioBlueprintConfigFromAPI(apiStudio, blueprintManifest) + ) + : convertObjectIntoOverrides(await StudioBlueprintConfigFromAPI(apiStudio, blueprintManifest)) + } return { _id: existingId ?? getRandomId(), @@ -276,10 +317,10 @@ export async function studioFrom(apiStudio: APIStudio, existingId?: StudioId): P supportedShowStyleBase: apiStudio.supportedShowStyleBase?.map((id) => protectString(id)) ?? [], organizationId: null, mappingsWithOverrides: wrapDefaultObject({}), - routeSets: {}, + routeSetsWithOverrides: wrapDefaultObject({}), _rundownVersionHash: '', - routeSetExclusivityGroups: {}, - packageContainers: {}, + routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), + packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { @@ -292,20 +333,20 @@ export async function studioFrom(apiStudio: APIStudio, existingId?: StudioId): P } } -export function APIStudioFrom(studio: DBStudio): APIStudio { +export async function APIStudioFrom(studio: DBStudio): Promise> { const studioSettings = APIStudioSettingsFrom(studio.settings) return { name: studio.name, blueprintId: unprotectString(studio.blueprintId), blueprintConfigPresetId: studio.blueprintConfigPresetId, - config: applyAndValidateOverrides(studio.blueprintConfigWithOverrides).obj, + config: await APIStudioBlueprintConfigFrom(studio), settings: studioSettings, supportedShowStyleBase: studio.supportedShowStyleBase.map((id) => unprotectString(id)), } } -export function studioSettingsFrom(apiStudioSettings: APIStudioSettings): IStudioSettings { +export function studioSettingsFrom(apiStudioSettings: APIStudioSettings): Complete { return { frameRate: apiStudioSettings.frameRate, mediaPreviewsUrl: apiStudioSettings.mediaPreviewsUrl, @@ -321,10 +362,14 @@ export function studioSettingsFrom(apiStudioSettings: APIStudioSettings): IStudi enableQuickLoop: apiStudioSettings.enableQuickLoop, forceQuickLoopAutoNext: forceQuickLoopAutoNextFrom(apiStudioSettings.forceQuickLoopAutoNext), fallbackPartDuration: apiStudioSettings.fallbackPartDuration ?? DEFAULT_FALLBACK_PART_DURATION, + allowAdlibTestingSegment: apiStudioSettings.allowAdlibTestingSegment, + allowHold: apiStudioSettings.allowHold ?? true, // Backwards compatible + allowPieceDirectPlay: apiStudioSettings.allowPieceDirectPlay ?? true, // Backwards compatible + enableBuckets: apiStudioSettings.enableBuckets ?? true, // Backwards compatible } } -export function APIStudioSettingsFrom(settings: IStudioSettings): APIStudioSettings { +export function APIStudioSettingsFrom(settings: IStudioSettings): Complete { return { frameRate: settings.frameRate, mediaPreviewsUrl: settings.mediaPreviewsUrl, @@ -340,6 +385,10 @@ export function APIStudioSettingsFrom(settings: IStudioSettings): APIStudioSetti enableQuickLoop: settings.enableQuickLoop, forceQuickLoopAutoNext: APIForceQuickLoopAutoNextFrom(settings.forceQuickLoopAutoNext), fallbackPartDuration: settings.fallbackPartDuration, + allowAdlibTestingSegment: settings.allowAdlibTestingSegment, + allowHold: settings.allowHold, + allowPieceDirectPlay: settings.allowPieceDirectPlay, + enableBuckets: settings.enableBuckets, } } @@ -461,6 +510,157 @@ export function APIOutputLayerFrom(outputLayer: IOutputLayer): APIOutputLayer { } } +async function getBlueprint( + blueprintId: BlueprintId | undefined, + blueprintType: BlueprintManifestType +): Promise { + const blueprint = blueprintId + ? await Blueprints.findOneAsync({ + _id: blueprintId, + blueprintType, + }) + : undefined + if (!blueprint) throw new Meteor.Error(404, `Blueprint "${blueprintId}" not found!`) + + if (!blueprint.blueprintHash) throw new Meteor.Error(500, 'Blueprint is not valid') + + return blueprint +} + +export async function validateAPIBlueprintConfigForShowStyle( + apiShowStyle: APIShowStyleBase | APIShowStyleVariant, + blueprintId: BlueprintId +): Promise> { + if (!apiShowStyle.blueprintConfigPresetId) { + logger.warn(`ShowStyle ${apiShowStyle.name} is missing config preset`) + return [] + } + const blueprint = await getBlueprint(blueprintId, BlueprintManifestType.SHOWSTYLE) + const blueprintManifest = evalBlueprint(blueprint) as ShowStyleBlueprintManifest + + if (typeof blueprintManifest.validateConfigFromAPI !== 'function') { + logger.info(`Blueprint ${blueprintManifest.blueprintId} does not support Config validation`) + return [] + } + + const blueprintContext = new CommonContext( + 'validateAPIBlueprintConfig', + `showStyle:${apiShowStyle.name},blueprint:${blueprint._id}` + ) + + return blueprintManifest.validateConfigFromAPI(blueprintContext, apiShowStyle.config) +} + +export async function ShowStyleBaseBlueprintConfigFromAPI( + apiShowStyleBase: APIShowStyleBase, + blueprintManifest: ShowStyleBlueprintManifest +): Promise { + if (!apiShowStyleBase.blueprintConfigPresetId) { + logger.warn(`ShowStyleBase ${apiShowStyleBase.name} is missing config preset`) + return apiShowStyleBase.config as IBlueprintConfig + } + + if (typeof blueprintManifest.blueprintConfigFromAPI !== 'function') { + return apiShowStyleBase.config as IBlueprintConfig + } + + const blueprintContext = new CommonContext( + 'BlueprintConfigFromAPI', + `showStyleBase:${apiShowStyleBase.name},blueprint:${blueprintManifest.blueprintId}` + ) + + return blueprintManifest.blueprintConfigFromAPI(blueprintContext, apiShowStyleBase.config) +} + +export async function APIShowStyleBlueprintConfigFrom( + showStyle: DBShowStyleBase | DBShowStyleVariant, + blueprintId: BlueprintId +): Promise { + if (!showStyle.blueprintConfigPresetId) { + logger.warn(`ShowStyle ${showStyle._id} is missing config preset`) + return applyAndValidateOverrides(showStyle.blueprintConfigWithOverrides).obj + } + const blueprint = await getBlueprint(blueprintId, BlueprintManifestType.SHOWSTYLE) + const blueprintManifest = evalBlueprint(blueprint) as ShowStyleBlueprintManifest + + if (typeof blueprintManifest.blueprintConfigToAPI !== 'function') + return applyAndValidateOverrides(showStyle.blueprintConfigWithOverrides).obj + + const blueprintContext = new CommonContext( + 'APIShowStyleBlueprintConfigFrom', + `showStyleBase:${showStyle._id},blueprint:${blueprint._id}` + ) + + return blueprintManifest.blueprintConfigToAPI( + blueprintContext, + applyAndValidateOverrides(showStyle.blueprintConfigWithOverrides).obj + ) +} + +export async function validateAPIBlueprintConfigForStudio(apiStudio: APIStudio): Promise> { + if (!apiStudio.blueprintConfigPresetId) { + logger.warn(`Studio ${apiStudio.name} is missing config preset`) + return [] + } + const blueprint = await getBlueprint(protectString(apiStudio.blueprintId), BlueprintManifestType.STUDIO) + const blueprintManifest = evalBlueprint(blueprint) as StudioBlueprintManifest + + if (typeof blueprintManifest.validateConfigFromAPI !== 'function') { + logger.info(`Blueprint ${blueprintManifest.blueprintId} does not support Config validation`) + return [] + } + + const blueprintContext = new CommonContext( + 'validateAPIBlueprintConfig', + `studio:${apiStudio.name},blueprint:${blueprint._id}` + ) + + return blueprintManifest.validateConfigFromAPI(blueprintContext, apiStudio.config) +} + +export async function StudioBlueprintConfigFromAPI( + apiStudio: APIStudio, + blueprintManifest: StudioBlueprintManifest +): Promise { + if (!apiStudio.blueprintConfigPresetId) { + logger.warn(`Studio ${apiStudio.name} is missing config preset`) + return apiStudio.config as IBlueprintConfig + } + + if (typeof blueprintManifest.blueprintConfigFromAPI !== 'function') { + return apiStudio.config as IBlueprintConfig + } + + const blueprintContext = new CommonContext( + 'BlueprintConfigFromAPI', + `studio:${apiStudio.name},blueprint:${blueprintManifest.blueprintId}` + ) + + return blueprintManifest.blueprintConfigFromAPI(blueprintContext, apiStudio.config) +} + +export async function APIStudioBlueprintConfigFrom(studio: DBStudio): Promise { + if (!studio.blueprintConfigPresetId) { + logger.warn(`Studio ${studio._id} is missing config preset`) + return applyAndValidateOverrides(studio.blueprintConfigWithOverrides).obj + } + const blueprint = await getBlueprint(studio.blueprintId, BlueprintManifestType.STUDIO) + const blueprintManifest = evalBlueprint(blueprint) as StudioBlueprintManifest + + if (typeof blueprintManifest.blueprintConfigToAPI !== 'function') + return applyAndValidateOverrides(studio.blueprintConfigWithOverrides).obj + + const blueprintContext = new CommonContext( + 'APIStudioBlueprintConfigFrom', + `studio:${studio.name},blueprint:${blueprint._id}` + ) + + return blueprintManifest.blueprintConfigToAPI( + blueprintContext, + applyAndValidateOverrides(studio.blueprintConfigWithOverrides).obj + ) +} + export function bucketFrom(apiBucket: APIBucket, existingId?: BucketId): Bucket { return { _id: existingId ?? getRandomId(), diff --git a/meteor/server/api/snapshot.ts b/meteor/server/api/snapshot.ts index d346e5c3a2..938567dca5 100644 --- a/meteor/server/api/snapshot.ts +++ b/meteor/server/api/snapshot.ts @@ -53,7 +53,6 @@ import { getPackageContainerPackageId, } from '@sofie-automation/corelib/dist/dataModel/PackageContainerPackageStatus' import { PackageInfoDB, getPackageInfoId } from '@sofie-automation/corelib/dist/dataModel/PackageInfos' -import { checkStudioExists } from '../optimizations' import { CoreRundownPlaylistSnapshot } from '@sofie-automation/corelib/dist/snapshots' import { QueueStudioJob } from '../worker/worker' import { StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' @@ -93,6 +92,11 @@ import { import { getCoreSystemAsync } from '../coreSystem/collection' import { executePeripheralDeviceFunction } from './peripheralDevice/executeFunction' import { verifyHashedToken } from './singleUseTokens' +import { + NrcsIngestDataCacheObjRundown, + NrcsIngestDataCacheObjSegment, + NrcsIngestDataCacheObjPart, +} from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' interface RundownPlaylistSnapshot extends CoreRundownPlaylistSnapshot { versionExtended: string | undefined @@ -482,6 +486,7 @@ async function retreiveSnapshot(snapshotId: SnapshotId, cred0: Credentials): Pro return readSnapshot } + async function restoreFromSnapshot( /** The snapshot data to restore */ snapshot: AnySnapshot, @@ -490,22 +495,7 @@ async function restoreFromSnapshot( ): Promise { // Determine what kind of snapshot - if (!_.isObject(snapshot)) throw new Meteor.Error(500, `Restore input data is not an object`) - // First, some special (debugging) cases: - // @ts-expect-error is's not really a snapshot here: - if (snapshot.externalId && snapshot.segments && snapshot.type === 'mos') { - // Special: Not a snapshot, but a datadump of a MOS rundown - const studioId: StudioId = Meteor.settings.manualSnapshotIngestStudioId || 'studio0' - const studioExists = await checkStudioExists(studioId) - if (studioExists) { - await importIngestRundown(studioId, snapshot as unknown as IngestRundown) - return - } - throw new Meteor.Error(500, `No Studio found`) - } - // Then, continue as if it's a normal snapshot: - if (!snapshot.snapshot) throw new Meteor.Error(500, `Restore input data is not a snapshot (${_.keys(snapshot)})`) if (snapshot.snapshot.type === SnapshotType.RUNDOWNPLAYLIST) { @@ -518,11 +508,7 @@ async function restoreFromSnapshot( ) } - // TODO: Improve this. This matches the 'old' behaviour - const studios = await Studios.findFetchAsync({}) - const snapshotStudioExists = studios.find((studio) => studio._id === playlistSnapshot.playlist.studioId) - const studioId = snapshotStudioExists ? playlistSnapshot.playlist.studioId : studios[0]?._id - if (!studioId) throw new Meteor.Error(500, `No Studio found`) + const studioId = await getStudioIdFromPlaylistSnapshot(playlistSnapshot) // A snapshot of a rundownPlaylist return restoreFromRundownPlaylistSnapshot(snapshot as RundownPlaylistSnapshot, studioId, restoreDebugData) @@ -534,6 +520,60 @@ async function restoreFromSnapshot( } } +async function getStudioIdFromPlaylistSnapshot(playlistSnapshot: RundownPlaylistSnapshot): Promise { + // TODO: Improve this. This matches the 'old' behaviour + const studios = await Studios.findFetchAsync({}) + const snapshotStudioExists = studios.find((studio) => studio._id === playlistSnapshot.playlist.studioId) + const studioId = snapshotStudioExists ? playlistSnapshot.playlist.studioId : studios[0]?._id + if (!studioId) throw new Meteor.Error(500, `No Studio found`) + return studioId +} +/** Read the ingest data from a snapshot and pipe it into blueprints */ +async function ingestFromSnapshot( + /** The snapshot data to restore */ + snapshot: AnySnapshot +): Promise { + // Determine what kind of snapshot + if (!snapshot.snapshot) throw new Meteor.Error(500, `Restore input data is not a snapshot (${_.keys(snapshot)})`) + if (snapshot.snapshot.type === SnapshotType.RUNDOWNPLAYLIST) { + const playlistSnapshot = snapshot as RundownPlaylistSnapshot + + const studioId = await getStudioIdFromPlaylistSnapshot(playlistSnapshot) + + // Read the ingestData from the snapshot + const ingestData = playlistSnapshot.ingestData + + const rundownData = ingestData.filter((e) => e.type === 'rundown') as NrcsIngestDataCacheObjRundown[] + const segmentData = ingestData.filter((e) => e.type === 'segment') as NrcsIngestDataCacheObjSegment[] + const partData = ingestData.filter((e) => e.type === 'part') as NrcsIngestDataCacheObjPart[] + + if (rundownData.length === 0) throw new Meteor.Error(402, `No rundowns found in ingestData`) + + for (const seg of segmentData) { + seg.data.parts = partData + .filter((e) => e.segmentId === seg.segmentId) + .map((e) => e.data) + .sort((a, b) => b.rank - a.rank) + } + + for (let i = 0; i < rundownData.length; i++) { + const rundown = rundownData[i] + + const segmentsInRundown = segmentData.filter((e) => e.rundownId === rundown.rundownId) + + const ingestRundown: IngestRundown = rundown.data + ingestRundown.segments = segmentsInRundown.map((s) => s.data).sort((a, b) => b.rank - a.rank) + + await importIngestRundown(studioId, ingestRundown) + } + } else { + throw new Meteor.Error( + 402, + `Unable to ingest a snapshot of type "${snapshot.snapshot.type}", did you mean to restore it?` + ) + } +} + async function restoreFromRundownPlaylistSnapshot( snapshot: RundownPlaylistSnapshot, studioId: StudioId, @@ -809,8 +849,16 @@ if (!Settings.enableUserAccounts) { if (!snapshot) throw new Meteor.Error(400, 'Restore Snapshot: Missing request body') const restoreDebugData = ctx.headers['restore-debug-data'] === '1' + const ingestSnapshotData = ctx.headers['ingest-snapshot-data'] === '1' + + if (typeof snapshot !== 'object' || snapshot === null) + throw new Meteor.Error(500, `Restore input data is not an object`) - await restoreFromSnapshot(snapshot, restoreDebugData) + if (ingestSnapshotData) { + await ingestFromSnapshot(snapshot) + } else { + await restoreFromSnapshot(snapshot, restoreDebugData) + } ctx.response.status = 200 ctx.response.body = content diff --git a/meteor/server/api/studio/api.ts b/meteor/server/api/studio/api.ts index e6ff0ce43f..4b79da80ff 100644 --- a/meteor/server/api/studio/api.ts +++ b/meteor/server/api/studio/api.ts @@ -4,7 +4,7 @@ import { registerClassToMeteorMethods } from '../../methods' import { NewStudiosAPI, StudiosAPIMethods } from '@sofie-automation/meteor-lib/dist/api/studios' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { literal, getRandomId } from '../../lib/tempLib' -import { lazyIgnore } from '../../lib/lib' +import { lazyIgnore, MeteorStartupAsync } from '../../lib/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { ExpectedPackages, @@ -48,11 +48,14 @@ export async function insertStudioInner(organizationId: OrganizationId | null, n frameRate: 25, mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: false, + allowPieceDirectPlay: false, + enableBuckets: true, }, _rundownVersionHash: '', - routeSets: {}, - routeSetExclusivityGroups: {}, - packageContainers: {}, + routeSetsWithOverrides: wrapDefaultObject({}), + routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), + packageContainersWithOverrides: wrapDefaultObject({}), thumbnailContainerIds: [], previewContainerIds: [], peripheralDeviceSettings: { @@ -131,17 +134,20 @@ function triggerUpdateStudioMappingsHash(studioId: StudioId) { 10 ) } -Studios.observeChanges( - {}, - { - added: triggerUpdateStudioMappingsHash, - changed: triggerUpdateStudioMappingsHash, - removed: triggerUpdateStudioMappingsHash, - }, - { - fields: { - mappingsWithOverrides: 1, - routeSets: 1, + +MeteorStartupAsync(async () => { + await Studios.observeChanges( + {}, + { + added: triggerUpdateStudioMappingsHash, + changed: triggerUpdateStudioMappingsHash, + removed: triggerUpdateStudioMappingsHash, }, - } -) + { + fields: { + mappingsWithOverrides: 1, + routeSetsWithOverrides: 1, + }, + } + ) +}) diff --git a/meteor/server/api/userActions.ts b/meteor/server/api/userActions.ts index 10fb20c44e..82a399f5cd 100644 --- a/meteor/server/api/userActions.ts +++ b/meteor/server/api/userActions.ts @@ -5,7 +5,7 @@ import { Time } from '../lib/tempLib' import { ServerPlayoutAPI } from './playout/playout' import { NewUserActionAPI, UserActionAPIMethods } from '@sofie-automation/meteor-lib/dist/api/userActions' import { EvaluationBase } from '@sofie-automation/meteor-lib/dist/collections/Evaluations' -import { IngestPart, IngestAdlib, ActionUserData } from '@sofie-automation/blueprints-integration' +import { IngestPart, IngestAdlib, ActionUserData, UserOperationTarget } from '@sofie-automation/blueprints-integration' import { storeRundownPlaylistSnapshot } from './snapshot' import { registerClassToMeteorMethods, ReplaceOptionalWithNullInMethodArguments } from '../methods' import { ServerRundownAPI } from './rundown' @@ -46,11 +46,12 @@ import { ShowStyleVariantId, StudioId, } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { IngestDataCache, Parts, Pieces, Rundowns } from '../collections' -import { IngestCacheType } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestDataCache, Parts, Pieces, Rundowns } from '../collections' +import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' import { verifyHashedToken } from './singleUseTokens' import { QuickLoopMarker } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { runIngestOperation } from './ingest/lib' +import { RundownPlaylistContentWriteAccess } from '../security/rundownPlaylist' import { IngestJobs } from '@sofie-automation/corelib/dist/worker/ingest' async function pieceSetInOutPoints( @@ -69,10 +70,10 @@ async function pieceSetInOutPoints( }) if (!rundown) throw new Meteor.Error(501, `Rundown "${part.rundownId}" not found!`) - const partCache = await IngestDataCache.findOneAsync({ + const partCache = await NrcsIngestDataCache.findOneAsync({ rundownId: rundown._id, partId: part._id, - type: IngestCacheType.PART, + type: NrcsIngestCacheType.PART, }) if (!partCache) throw new Meteor.Error(404, `Part Cache for "${partId}" not found!`) const piece = await Pieces.findOneAsync(pieceId) @@ -1114,7 +1115,7 @@ class ServerUserActionAPI eventTime: Time, studioId: StudioId, routeSetId: string, - state: boolean + state: boolean | 'toggle' ): Promise> { return ServerClientAPI.runUserActionInLog( this, @@ -1125,7 +1126,7 @@ class ServerUserActionAPI async () => { check(studioId, String) check(routeSetId, String) - check(state, Boolean) + check(state, Match.OneOf('toggle', Boolean)) const access = await StudioContentWriteAccess.routeSet(this, studioId) return ServerPlayoutAPI.switchRouteSet(access, routeSetId, state) @@ -1269,6 +1270,51 @@ class ServerUserActionAPI ) } + async executeUserChangeOperation( + userEvent: string, + eventTime: Time, + rundownId: RundownId, + operationTarget: UserOperationTarget, + operation: { id: string; [key: string]: any } + ): Promise> { + return ServerClientAPI.runUserActionInLog( + this, + userEvent, + eventTime, + 'executeUserChangeOperation', + { operationTarget, operation }, + async () => { + const access = await RundownPlaylistContentWriteAccess.rundown(this, rundownId) + if (!access.rundown) throw new Error(`Rundown "${rundownId}" not found`) + + await runIngestOperation(access.rundown.studioId, IngestJobs.UserExecuteChangeOperation, { + rundownExternalId: access.rundown.externalId, + operationTarget, + operation, + }) + } + ) + } + async clearQuickLoop( + userEvent: string, + eventTime: number, + playlistId: RundownPlaylistId + ): Promise> { + return ServerClientAPI.runUserActionInLogForPlaylistOnWorker( + this, + userEvent, + eventTime, + playlistId, + () => { + check(playlistId, String) + }, + StudioJobs.ClearQuickLoopMarkers, + { + playlistId, + } + ) + } + async createAdlibTestingRundownForShowStyleVariant( userEvent: string, eventTime: number, diff --git a/meteor/server/collections/collection.ts b/meteor/server/collections/collection.ts index c889709021..7f13f1839a 100644 --- a/meteor/server/collections/collection.ts +++ b/meteor/server/collections/collection.ts @@ -283,7 +283,7 @@ export interface AsyncOnlyReadOnlyMongoCollection | DBInterface['_id'], callbacks: PromisifyCallbacks>, options?: FindOptions - ): Meteor.LiveQueryHandle + ): Promise /** * Observe changes on this collection @@ -293,7 +293,7 @@ export interface AsyncOnlyReadOnlyMongoCollection | DBInterface['_id'], callbacks: PromisifyCallbacks>, options?: FindOptions - ): Meteor.LiveQueryHandle + ): Promise /** * Count the number of docuyments in a collection that match the selector. diff --git a/meteor/server/collections/implementations/asyncCollection.ts b/meteor/server/collections/implementations/asyncCollection.ts index 506e48d5f4..864748a5b7 100644 --- a/meteor/server/collections/implementations/asyncCollection.ts +++ b/meteor/server/collections/implementations/asyncCollection.ts @@ -48,20 +48,20 @@ export class WrappedAsyncMongoCollection | DBInterface['_id'], callbacks: PromisifyCallbacks>, options?: FindOptions - ): Meteor.LiveQueryHandle { - return this.find(selector as any, options).observeChanges(dePromiseObjectOfFunctions(callbacks)) + ): Promise { + return this.find(selector as any, options).observeChangesAsync(dePromiseObjectOfFunctions(callbacks)) } - observe( + async observe( selector: MongoQuery | DBInterface['_id'], callbacks: PromisifyCallbacks>, options?: FindOptions - ): Meteor.LiveQueryHandle { - return this.find(selector as any, options).observe(dePromiseObjectOfFunctions(callbacks)) + ): Promise { + return this.find(selector as any, options).observeAsync(dePromiseObjectOfFunctions(callbacks)) } async insertAsync(doc: DBInterface): Promise { diff --git a/meteor/server/collections/implementations/mock.ts b/meteor/server/collections/implementations/mock.ts index d208efc59b..d8f0b6abbe 100644 --- a/meteor/server/collections/implementations/mock.ts +++ b/meteor/server/collections/implementations/mock.ts @@ -63,19 +63,19 @@ export class WrappedMockCollection | DBInterface['_id'], callbacks: PromisifyCallbacks>, options?: FindOptions - ): Meteor.LiveQueryHandle { + ): Promise { return this.find(selector, options).observeChanges(dePromiseObjectOfFunctions(callbacks)) } - observe( + async observe( selector: MongoQuery | DBInterface['_id'], callbacks: PromisifyCallbacks>, options?: FindOptions - ): Meteor.LiveQueryHandle { + ): Promise { return this.find(selector, options).observe(dePromiseObjectOfFunctions(callbacks)) } diff --git a/meteor/server/collections/implementations/readonlyWrapper.ts b/meteor/server/collections/implementations/readonlyWrapper.ts index eaf4418fd5..d2e0cd8949 100644 --- a/meteor/server/collections/implementations/readonlyWrapper.ts +++ b/meteor/server/collections/implementations/readonlyWrapper.ts @@ -53,13 +53,15 @@ export class WrappedReadOnlyMongoCollection['observeChanges']> - ): Meteor.LiveQueryHandle { + ): Promise { return this.#mutableCollection.observeChanges(...args) } - observe(...args: Parameters['observe']>): Meteor.LiveQueryHandle { + async observe( + ...args: Parameters['observe']> + ): Promise { return this.#mutableCollection.observe(...args) } diff --git a/meteor/server/collections/lib.ts b/meteor/server/collections/lib.ts index 161c3cfa47..5d9d64d9fc 100644 --- a/meteor/server/collections/lib.ts +++ b/meteor/server/collections/lib.ts @@ -60,7 +60,7 @@ export async function ObserveChangesHelper { diff --git a/meteor/server/collections/rundown.ts b/meteor/server/collections/rundown.ts index ff35b31591..270c03c2fa 100644 --- a/meteor/server/collections/rundown.ts +++ b/meteor/server/collections/rundown.ts @@ -1,7 +1,8 @@ import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { CollectionName } from '@sofie-automation/corelib/dist/dataModel/Collections' -import { IngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { SofieIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' @@ -28,10 +29,23 @@ registerIndex(AdLibPieces, { _rank: 1, }) -export const IngestDataCache = createAsyncOnlyReadOnlyMongoCollection( - CollectionName.IngestDataCache +/** + * The NrcsIngestDataCache collection is used to store data that comes from an NRCS + */ +export const NrcsIngestDataCache = createAsyncOnlyReadOnlyMongoCollection( + CollectionName.NrcsIngestDataCache ) -registerIndex(IngestDataCache, { +registerIndex(NrcsIngestDataCache, { + rundownId: 1, +}) + +/** + * The SofieIngestDataCache collection is used to store data that comes from an NRCS and has been modified by Sofie + */ +export const SofieIngestDataCache = createAsyncOnlyReadOnlyMongoCollection( + CollectionName.SofieIngestDataCache +) +registerIndex(SofieIngestDataCache, { rundownId: 1, }) diff --git a/meteor/server/coreSystem/index.ts b/meteor/server/coreSystem/index.ts index 3d58469eff..fa1bb84d46 100644 --- a/meteor/server/coreSystem/index.ts +++ b/meteor/server/coreSystem/index.ts @@ -78,7 +78,7 @@ async function initializeCoreSystem() { } // Monitor database changes: - CoreSystem.observeChanges(SYSTEM_ID, { + await CoreSystem.observeChanges(SYSTEM_ID, { added: onCoreSystemChanged, changed: onCoreSystemChanged, removed: onCoreSystemChanged, @@ -88,7 +88,7 @@ async function initializeCoreSystem() { checkDatabaseVersions() } - Blueprints.observeChanges( + await Blueprints.observeChanges( {}, { added: observeBlueprintChanges, diff --git a/meteor/server/lib/ReactiveStore.ts b/meteor/server/lib/ReactiveStore.ts deleted file mode 100644 index 429da1ab93..0000000000 --- a/meteor/server/lib/ReactiveStore.ts +++ /dev/null @@ -1,128 +0,0 @@ -import { Meteor } from 'meteor/meteor' -import { Tracker } from 'meteor/tracker' -import * as _ from 'underscore' -import { getRandomString, ProtectedString } from './tempLib' -import { lazyIgnore } from '../lib/lib' - -/** The ReactiveStore is a Reactive key-value store. - * Keeps track of when the reactive values aren't in use anymore and automatically cleans them up. - */ -export class ReactiveStore | string, Value> { - private _store: Record< - string, - { - dep: Tracker.Dependency - computation?: Tracker.Computation - value: Value - } - > = {} - private _depsToBatchInvalidate: Tracker.Dependency[] = [] - private _name = getRandomString() - - constructor( - private options: { - /** Delays all Reactive updates with this time [ms] */ - delayUpdateTime?: number - } = {} - ) {} - /** - * Retrieves a value from the store. - * @param key Key to fetch the value from - * @param callbackGetValue (Optional) A Reactive function. If the value isn't found in the store, set up a Reactive watch for the value using this callback. - */ - getValue(key: Key, callbackGetValue?: () => Value): Value | undefined { - if (Meteor.isServer) { - // Server-side we won't use the cache at all. - return callbackGetValue?.() - } - - const key0 = key as unknown as string - let o = this._store[key0] - - if (!o) { - if (callbackGetValue) { - // Set up a Reactive watch for the value: - - this._store[key0] = o = { - dep: new Tracker.Dependency(), - computation: undefined, - value: undefined as any, - } - o.computation = Tracker.nonreactive(() => { - // Set up a new Reactive context for the callback: - return Tracker.autorun(() => { - // This function is invalidated and re-run whenever the value changes. - const newValue = callbackGetValue() - - const o = this._store[key0] - if (o) { - // Do an additional check whether the returned value actually changed: - if (!_.isEqual(o.value, newValue)) { - o.value = newValue - // Invaludate the dependency: - this.invalidateDependency(o.dep) - } - } - }) - }) - } else { - // No callback provided - return undefined - } - } - - if (Tracker.active && Tracker.currentComputation) { - Tracker.currentComputation.onStop(() => { - // Called when the reactive context of the caller of this.getValue is invalidated. - - if (!o.dep.hasDependents()) { - // If no-one is using it anymore, we should clean it out. - // Wait a bit, to give it a change to be reused. - setTimeout(() => { - const o = this._store[key0] - if (o) { - if (!o.dep.hasDependents()) { - this.removeValue(key) - } - } - }, 2000) - } - }) - // Depend, so that the reactive context will be invalidated whenever the value changes. - o.dep.depend() - } - return o.value - } - /** Remove a value from the store */ - private removeValue(key: Key) { - const key0 = key as unknown as string - const o = this._store[key0] - if (o) { - o.computation?.stop() - delete this._store[key0] - } - } - private invalidateDependency(dep: Tracker.Dependency) { - if (this.options.delayUpdateTime) { - // Delay and batch-invalidate all changes that might have come in until then: - this._depsToBatchInvalidate.push(dep) - lazyIgnore( - this._name, - () => { - for (const dep of this._depsToBatchInvalidate) { - dep.changed() - } - this._depsToBatchInvalidate = [] - }, - this.options.delayUpdateTime - ) - } else { - dep.changed() - } - } - clear(): void { - for (const key of Object.keys(this._store)) { - this.removeValue(key as unknown as Key) - } - } -} diff --git a/meteor/server/lib/customPublication/index.ts b/meteor/server/lib/customPublication/index.ts index e06cd0511a..836e015454 100644 --- a/meteor/server/lib/customPublication/index.ts +++ b/meteor/server/lib/customPublication/index.ts @@ -1,5 +1,5 @@ export { CustomPublishCollection } from './customPublishCollection' export { setUpOptimizedObserverArray } from './optimizedObserverArray' -export { TriggerUpdate } from './optimizedObserverBase' +export { TriggerUpdate, SetupObserversResult } from './optimizedObserverBase' export { setUpCollectionOptimizedObserver } from './optimizedObserverCollection' export { meteorCustomPublish, CustomPublish, CustomPublishChanges } from './publish' diff --git a/meteor/server/lib/customPublication/optimizedObserverArray.ts b/meteor/server/lib/customPublication/optimizedObserverArray.ts index bd31ce073b..7a677cc0b7 100644 --- a/meteor/server/lib/customPublication/optimizedObserverArray.ts +++ b/meteor/server/lib/customPublication/optimizedObserverArray.ts @@ -1,10 +1,9 @@ import { Meteor } from 'meteor/meteor' import { ReadonlyDeep } from 'type-fest' import { clone, ProtectedString } from '../tempLib' -import { TriggerUpdate, setUpOptimizedObserverInner } from './optimizedObserverBase' +import { SetupObserversResult, TriggerUpdate, setUpOptimizedObserverInner } from './optimizedObserverBase' import { CustomPublish, CustomPublishChanges } from './publish' import { diffObject } from './lib' -import { LiveQueryHandle } from '../lib' /** * This is an optimization to enable multiple listeners that observes (and manipulates) the same data, to only use one observer and manipulator, @@ -29,7 +28,7 @@ export async function setUpOptimizedObserverArray< args: ReadonlyDeep, /** Trigger an update by mutating the context of manipulateData */ triggerUpdate: TriggerUpdate - ) => Promise, + ) => Promise, manipulateData: ( args: ReadonlyDeep, state: Partial, diff --git a/meteor/server/lib/customPublication/optimizedObserverBase.ts b/meteor/server/lib/customPublication/optimizedObserverBase.ts index e25720d109..077259ced4 100644 --- a/meteor/server/lib/customPublication/optimizedObserverBase.ts +++ b/meteor/server/lib/customPublication/optimizedObserverBase.ts @@ -9,6 +9,7 @@ import { logger } from '../../logging' import { ReactiveCacheCollection } from '../../publications/lib/ReactiveCacheCollection' import { LiveQueryHandle, lazyIgnore } from '../lib' import { CustomPublish, CustomPublishChanges } from './publish' +import { waitForAllObserversReady } from '../../publications/lib/lib' const apmNamespace = 'optimizedObserver' @@ -41,6 +42,8 @@ const optimizedObservers: Record> = (updateProps: Partial) => void +export type SetupObserversResult = Array | LiveQueryHandle> + /** * This should not be used directly, and should be used through one of the setUpOptimizedObserverArray or setUpCollectionOptimizedObserver wrappers * @@ -65,7 +68,7 @@ export async function setUpOptimizedObserverInner< args: ReadonlyDeep, /** Trigger an update by mutating the context of manipulateData */ triggerUpdate: TriggerUpdate - ) => Promise, + ) => Promise, manipulateData: ( args: ReadonlyDeep, state: Partial, @@ -186,7 +189,7 @@ async function createOptimizedObserverWorker< args: ReadonlyDeep, /** Trigger an update by mutating the context of manipulateData */ triggerUpdate: TriggerUpdate - ) => Promise, + ) => Promise, manipulateData: ( args: ReadonlyDeep, state: Partial, @@ -324,7 +327,7 @@ async function createOptimizedObserverWorker< try { // Setup the mongo observers - const observers = await setupObservers(args, triggerUpdate) + const observers = await waitForAllObserversReady(await setupObservers(args, triggerUpdate)) thisObserverWorker = { args: args, diff --git a/meteor/server/lib/customPublication/optimizedObserverCollection.ts b/meteor/server/lib/customPublication/optimizedObserverCollection.ts index aad28131b2..5cbd25801a 100644 --- a/meteor/server/lib/customPublication/optimizedObserverCollection.ts +++ b/meteor/server/lib/customPublication/optimizedObserverCollection.ts @@ -1,8 +1,7 @@ import { ReadonlyDeep } from 'type-fest' import { ProtectedString } from '../tempLib' -import { LiveQueryHandle } from '../lib' import { CustomPublishCollection } from './customPublishCollection' -import { TriggerUpdate, setUpOptimizedObserverInner } from './optimizedObserverBase' +import { SetupObserversResult, TriggerUpdate, setUpOptimizedObserverInner } from './optimizedObserverBase' import { CustomPublish } from './publish' /** @@ -28,7 +27,7 @@ export async function setUpCollectionOptimizedObserver< args: ReadonlyDeep, /** Trigger an update by mutating the context of manipulateData */ triggerUpdate: TriggerUpdate - ) => Promise, + ) => Promise, manipulateData: ( args: ReadonlyDeep, state: Partial, diff --git a/meteor/server/lib/customPublication/publish.ts b/meteor/server/lib/customPublication/publish.ts index 0a864a28dc..b9ac5fc402 100644 --- a/meteor/server/lib/customPublication/publish.ts +++ b/meteor/server/lib/customPublication/publish.ts @@ -2,7 +2,7 @@ import { UserId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { Meteor } from 'meteor/meteor' import { AllPubSubTypes } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { ProtectedString, unprotectString } from '../tempLib' -import { PublishDocType, SubscriptionContext, meteorPublishUnsafe } from '../../publications/lib' +import { PublishDocType, SubscriptionContext, meteorPublishUnsafe } from '../../publications/lib/lib' export interface CustomPublishChanges }> { added: Array diff --git a/meteor/server/lib/lib.ts b/meteor/server/lib/lib.ts index 860b680496..dee6d8aa8a 100644 --- a/meteor/server/lib/lib.ts +++ b/meteor/server/lib/lib.ts @@ -57,7 +57,7 @@ export function MeteorWrapAsync(func: Function, context?: Object): any { } const lazyIgnoreCache: { [name: string]: number } = {} -export function lazyIgnore(name: string, f1: () => Promise | void, t: number): void { +export function lazyIgnore(name: string, f1: () => void, t: number): void { // Don't execute the function f1 until the time t has passed. // Subsequent calls will extend the laziness and ignore the previous call @@ -66,12 +66,11 @@ export function lazyIgnore(name: string, f1: () => Promise | void, t: numb } lazyIgnoreCache[name] = Meteor.setTimeout(() => { delete lazyIgnoreCache[name] - if (Meteor.isClient) { - f1()?.catch((e) => { - throw new Error(e) - }) - } else { - waitForPromise(f1()) + + try { + f1() + } catch (e) { + logger.error(`Unhandled error in lazyIgnore "${name}": ${stringifyError(e)}`) } }, t) } diff --git a/meteor/server/lib/reactiveMap.ts b/meteor/server/lib/reactiveMap.ts deleted file mode 100644 index 67ec848e42..0000000000 --- a/meteor/server/lib/reactiveMap.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { Tracker } from 'meteor/tracker' - -export class ReactiveMap { - private baseMap = new Map() - private dependencyMap = new Map() - private globalDependency = new Tracker.Dependency() - - set(key: string, value: T): void { - const prevVal = this.baseMap.get(key) - this.baseMap.set(key, value) - if (this.dependencyMap.has(key) && prevVal !== value) { - this.dependencyMap.get(key)?.changed() - } else { - this.dependencyMap.set(key, new Tracker.Dependency()) - } - if (prevVal !== value) this.globalDependency.changed() - } - - get(key: string): T | undefined { - if (this.dependencyMap.has(key)) { - this.dependencyMap.get(key)?.depend() - } else { - const dependency = new Tracker.Dependency() - dependency?.depend() - this.dependencyMap.set(key, dependency) - } - return this.baseMap.get(key) - } - - getAll(): { [key: string]: T } { - const result: { [key: string]: T } = {} - for (const [key, value] of this.baseMap.entries()) { - result[key] = value - } - this.globalDependency.depend() - return result - } -} diff --git a/meteor/server/lib/rest/v1/buckets.ts b/meteor/server/lib/rest/v1/buckets.ts index 83f45113f3..bafe7c1136 100644 --- a/meteor/server/lib/rest/v1/buckets.ts +++ b/meteor/server/lib/rest/v1/buckets.ts @@ -114,7 +114,7 @@ export interface APIImportAdlib { externalId: string name: string payloadType: string - payload?: unknown + payload: unknown | undefined showStyleBaseId: string } diff --git a/meteor/server/lib/rest/v1/playlists.ts b/meteor/server/lib/rest/v1/playlists.ts index ec4c21e468..fbbbd8cc8f 100644 --- a/meteor/server/lib/rest/v1/playlists.ts +++ b/meteor/server/lib/rest/v1/playlists.ts @@ -74,7 +74,8 @@ export interface PlaylistsRestAPI { event: string, rundownPlaylistId: RundownPlaylistId, adLibId: AdLibActionId | RundownBaselineAdLibActionId | PieceId | BucketAdLibId, - triggerMode?: string + triggerMode?: string, + adLibOptions?: any ): Promise> /** * Executes the requested Bucket AdLib/AdLib Action. This is a Bucket AdLib (Action) that has been previously inserted into a Bucket. diff --git a/meteor/server/lib/rest/v1/showstyles.ts b/meteor/server/lib/rest/v1/showstyles.ts index 19b4c85586..9668629b52 100644 --- a/meteor/server/lib/rest/v1/showstyles.ts +++ b/meteor/server/lib/rest/v1/showstyles.ts @@ -59,6 +59,34 @@ export interface ShowStylesRestAPI { showStyleBaseId: ShowStyleBaseId, showStyleBase: APIShowStyleBase ): Promise> + /** + * Gets a ShowStyle config, if the ShowStyle id exists. + * + * Throws if the specified ShowStyle does not exist. + * @param connection Connection data including client and header details + * @param event User event string + * @param showStyleBaseId ShowStyleBaseId to fetch + */ + getShowStyleConfig( + connection: Meteor.Connection, + event: string, + showStyleBaseId: ShowStyleBaseId + ): Promise> + /** + * Updates a ShowStyle configuration. + * + * Throws if the ShowStyle is in use in an active Rundown. + * @param connection Connection data including client and header details + * @param event User event string + * @param showStyleBaseId Id of the ShowStyleBase to update + * @param object Blueprint configuration object + */ + updateShowStyleConfig( + connection: Meteor.Connection, + event: string, + showStyleBaseId: ShowStyleBaseId, + config: object + ): Promise> /** * Removed a ShowStyleBase. * @@ -192,6 +220,7 @@ export interface APIShowStyleBase { export interface APIShowStyleVariant { name: string showStyleBaseId: string + blueprintConfigPresetId?: string config: object rank: number } diff --git a/meteor/server/lib/rest/v1/studios.ts b/meteor/server/lib/rest/v1/studios.ts index 211ea2c34e..5feda68fa7 100644 --- a/meteor/server/lib/rest/v1/studios.ts +++ b/meteor/server/lib/rest/v1/studios.ts @@ -56,6 +56,34 @@ export interface StudiosRestAPI { studioId: StudioId, studio: APIStudio ): Promise> + /** + * Gets a Studio config, if it exists. + * + * Throws if the specified Studio does not exist. + * @param connection Connection data including client and header details + * @param event User event string + * @param studioId Id of the Studio to fetch + */ + getStudioConfig( + connection: Meteor.Connection, + event: string, + studioId: StudioId + ): Promise> + /** + * Updates a Studio configuration. + * + * Throws if the Studio already exists and is in use in an active Rundown. + * @param connection Connection data including client and header details + * @param event User event string + * @param studioId Id of the Studio to update + * @param object Blueprint configuration object + */ + updateStudioConfig( + connection: Meteor.Connection, + event: string, + studioId: StudioId, + config: object + ): Promise> /** * Deletes a Studio. * @@ -186,4 +214,8 @@ export interface APIStudioSettings { forceQuickLoopAutoNext?: 'disabled' | 'enabled_when_valid_duration' | 'enabled_forcing_min_duration' minimumTakeSpan?: number fallbackPartDuration?: number + allowAdlibTestingSegment?: boolean + allowHold?: boolean + allowPieceDirectPlay?: boolean + enableBuckets?: boolean } diff --git a/meteor/server/main.ts b/meteor/server/main.ts index 7b6b7fa73d..30c7678f5c 100644 --- a/meteor/server/main.ts +++ b/meteor/server/main.ts @@ -3,26 +3,11 @@ */ import { Meteor } from 'meteor/meteor' -import { setMinimumBrowserVersions } from 'meteor/modern-browsers' Meteor.startup(() => { console.log('startup') }) -setMinimumBrowserVersions( - { - chrome: 80, - firefox: 74, - edge: 80, - ie: Infinity, - mobile_safari: [13, 4], - opera: 67, - safari: [13, 1], - electron: 6, - }, - 'optional chaining' -) - // Import all files that register Meteor methods: import './api/blueprints/api' import './api/blueprints/http' diff --git a/meteor/server/migration/0_1_0.ts b/meteor/server/migration/0_1_0.ts index d4fb2930fc..461ce95d6f 100644 --- a/meteor/server/migration/0_1_0.ts +++ b/meteor/server/migration/0_1_0.ts @@ -441,13 +441,16 @@ export const addSteps = addMigrationSteps('0.1.0', [ frameRate: 25, mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: false, + allowPieceDirectPlay: false, + enableBuckets: true, }, mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), _rundownVersionHash: '', - routeSets: {}, - routeSetExclusivityGroups: {}, - packageContainers: {}, + routeSetsWithOverrides: wrapDefaultObject({}), + routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), + packageContainersWithOverrides: wrapDefaultObject({}), thumbnailContainerIds: [], previewContainerIds: [], peripheralDeviceSettings: { diff --git a/meteor/server/migration/1_40_0.ts b/meteor/server/migration/1_40_0.ts index 5e669407df..c418d34278 100644 --- a/meteor/server/migration/1_40_0.ts +++ b/meteor/server/migration/1_40_0.ts @@ -24,8 +24,6 @@ interface ISettingsOld { allowRundownResetOnAir: boolean /** Default duration to use to render parts when no duration is provided */ defaultDisplayDuration: number - /** If true, allows creation of new playlists in the Lobby Gui (rundown list). If false; only pre-existing playlists are allowed. */ - allowMultiplePlaylistsInGUI: boolean /** How many segments of history to show when scrolling back in time (0 = show current segment only) */ followOnAirSegmentsHistory: number /** Clean up stuff that are older than this [ms] */ diff --git a/meteor/server/migration/1_42_0.ts b/meteor/server/migration/1_42_0.ts deleted file mode 100644 index 0d6278e580..0000000000 --- a/meteor/server/migration/1_42_0.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { addMigrationSteps } from './databaseMigration' -import { StudioRouteSet, StudioRouteType } from '@sofie-automation/corelib/dist/dataModel/Studio' -import { Studios } from '../collections' - -// Release 42 - -export const addSteps = addMigrationSteps('1.42.0', [ - { - id: 'Add new routeType property to routeSets where missing', - canBeRunAutomatically: true, - validate: async () => { - return ( - (await Studios.countDocuments({ - routeSets: { $exists: false }, - })) > 0 - ) - }, - migrate: async () => { - const studios = await Studios.findFetchAsync({}) - - for (const studio of studios) { - const routeSets = studio.routeSets - - Object.entries(routeSets).forEach(([routeSetId, routeSet]) => { - routeSet.routes.forEach((route) => { - if (!route.routeType) { - route.routeType = StudioRouteType.REROUTE - } - }) - - routeSets[routeSetId] = routeSet - }) - - await Studios.updateAsync(studio._id, { $set: { routeSets } }) - } - }, - }, -]) diff --git a/meteor/server/migration/1_50_0.ts b/meteor/server/migration/1_50_0.ts index a804a72ad7..9d85f90c98 100644 --- a/meteor/server/migration/1_50_0.ts +++ b/meteor/server/migration/1_50_0.ts @@ -83,10 +83,10 @@ function convertMappingsOverrideOps(studio: DBStudio) { return changed && newOverrides } -function convertRouteSetMappings(studio: DBStudio) { +function convertRouteSetMappings(routeSets: Record) { let changed = false - const newRouteSets = clone(studio.routeSets || {}) + const newRouteSets = clone(routeSets || {}) for (const routeSet of Object.values(newRouteSets)) { for (const route of routeSet.routes) { if (route.remapping && !route.remapping.options) { @@ -95,7 +95,7 @@ function convertRouteSetMappings(studio: DBStudio) { ..._.pick(route.remapping, ...mappingBaseOptions), options: _.omit(route.remapping, ...mappingBaseOptions), } - console.log('new route', route) + // console.log('new route', route) changed = true } } @@ -247,10 +247,13 @@ export const addSteps = addMigrationSteps('1.50.0', [ canBeRunAutomatically: true, validate: async () => { const studios = await Studios.findFetchAsync({ routeSets: { $exists: true } }) - for (const studio of studios) { - const newOverrides = convertRouteSetMappings(studio) - if (newOverrides) { + // Ignore this if the routeSets has been converted into an OverrideWithObjects: + if (studio.routeSetsWithOverrides) continue + //@ts-expect-error routeSets are not part of the typings: + const plainRouteSets = studio.routeSets as any as Record + const newRouteSets = convertRouteSetMappings(plainRouteSets) + if (newRouteSets) { return `object needs to be updated` } } @@ -261,7 +264,12 @@ export const addSteps = addMigrationSteps('1.50.0', [ const studios = await Studios.findFetchAsync({ routeSets: { $exists: true } }) for (const studio of studios) { - const newRouteSets = convertRouteSetMappings(studio) + // Ignore this if the routeSets already has been converted into an OverrideWithObjects: + if (studio.routeSetsWithOverrides) continue + //@ts-expect-error routeSets are not part of the typings: + const plainRouteSets = studio.routeSets as any as Record + + const newRouteSets = convertRouteSetMappings(plainRouteSets) if (newRouteSets) { await Studios.updateAsync(studio._id, { diff --git a/meteor/server/migration/X_X_X.ts b/meteor/server/migration/X_X_X.ts index ea8bc303e5..0b7409ea77 100644 --- a/meteor/server/migration/X_X_X.ts +++ b/meteor/server/migration/X_X_X.ts @@ -1,5 +1,12 @@ import { addMigrationSteps } from './databaseMigration' import { CURRENT_SYSTEM_VERSION } from './currentSystemVersion' +import { Studios } from '../collections' +import { convertObjectIntoOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { + StudioRouteSet, + StudioRouteSetExclusivityGroup, + StudioPackageContainer, +} from '@sofie-automation/corelib/dist/dataModel/Studio' /* * ************************************************************************************** @@ -12,5 +19,209 @@ import { CURRENT_SYSTEM_VERSION } from './currentSystemVersion' */ export const addSteps = addMigrationSteps(CURRENT_SYSTEM_VERSION, [ - // Add some migrations here: + // Add your migration here + + { + id: `convert routesets to ObjectWithOverrides`, + canBeRunAutomatically: true, + validate: async () => { + const studios = await Studios.findFetchAsync({ + routeSets: { $exists: true }, + routeSetsWithOverrides: { $exists: false }, + }) + + for (const studio of studios) { + //@ts-expect-error routeSets is not typed as ObjectWithOverrides + if (studio.routeSets) { + return 'routesets must be converted to an ObjectWithOverrides' + } + } + + return false + }, + migrate: async () => { + const studios = await Studios.findFetchAsync({ + routeSets: { $exists: true }, + routeSetsWithOverrides: { $exists: false }, + }) + + for (const studio of studios) { + //@ts-expect-error routeSets is typed as Record + const oldRouteSets = studio.routeSets + + const newRouteSets = convertObjectIntoOverrides(oldRouteSets || {}) + + await Studios.updateAsync(studio._id, { + $set: { + routeSetsWithOverrides: newRouteSets, + }, + $unset: { + routeSets: 1, + }, + }) + } + }, + }, + { + id: `add abPlayers object`, + canBeRunAutomatically: true, + validate: async () => { + const studios = await Studios.findFetchAsync({ routeSetsWithOverrides: { $exists: true } }) + + for (const studio of studios) { + const routeSetsDefaults = studio.routeSetsWithOverrides.defaults as any as Record< + string, + StudioRouteSet + > + for (const key of Object.keys(routeSetsDefaults)) { + if (!routeSetsDefaults[key].abPlayers) { + return 'AB players must be added to routeSetsWithOverrides' + } + } + } + + return false + }, + migrate: async () => { + const studios = await Studios.findFetchAsync({ routeSetsWithOverrides: { $exists: true } }) + + for (const studio of studios) { + const newRouteSetswithOverrides = studio.routeSetsWithOverrides + for (const key of Object.keys(newRouteSetswithOverrides.defaults)) { + if (!newRouteSetswithOverrides.defaults[key].abPlayers) { + newRouteSetswithOverrides.defaults[key].abPlayers = [] + } + } + + await Studios.updateAsync(studio._id, { + $set: { + routeSetsWithOverrides: newRouteSetswithOverrides, + }, + }) + } + }, + }, + { + id: `convert routeSetExclusivityGroups to ObjectWithOverrides`, + canBeRunAutomatically: true, + validate: async () => { + const studios = await Studios.findFetchAsync({ + routeSetExclusivityGroups: { $exists: true }, + routeSetExclusivityGroupsWithOverrides: { $exists: false }, + }) + + for (const studio of studios) { + //@ts-expect-error routeSetExclusivityGroups is not typed as ObjectWithOverrides + if (studio.routeSetExclusivityGroups) { + return 'routesets must be converted to an ObjectWithOverrides' + } + } + + return false + }, + migrate: async () => { + const studios = await Studios.findFetchAsync({ + routeSetExclusivityGroups: { $exists: true }, + routeSetExclusivityGroupsWithOverrides: { $exists: false }, + }) + + for (const studio of studios) { + //@ts-expect-error routeSets is typed as Record + const oldRouteSetExclusivityGroups = studio.routeSetExclusivityGroups + + const newRouteSetExclusivityGroups = convertObjectIntoOverrides( + oldRouteSetExclusivityGroups || {} + ) + + await Studios.updateAsync(studio._id, { + $set: { + routeSetExclusivityGroupsWithOverrides: newRouteSetExclusivityGroups, + }, + $unset: { + routeSetExclusivityGroups: 1, + }, + }) + } + }, + }, + { + id: `convert packageContainers to ObjectWithOverrides`, + canBeRunAutomatically: true, + validate: async () => { + const studios = await Studios.findFetchAsync({ + packageContainers: { $exists: true }, + packageContainersWithOverrides: { $exists: false }, + }) + + for (const studio of studios) { + // @ts-expect-error packageContainers is typed as Record + if (studio.packageContainers) { + return 'packageContainers must be converted to an ObjectWithOverrides' + } + } + + return false + }, + migrate: async () => { + const studios = await Studios.findFetchAsync({ + packageContainers: { $exists: true }, + packageContainersWithOverrides: { $exists: false }, + }) + + for (const studio of studios) { + // @ts-expect-error packageContainers is typed as Record + const oldPackageContainers = studio.packageContainers + + const newPackageContainers = convertObjectIntoOverrides( + oldPackageContainers || {} + ) + + await Studios.updateAsync(studio._id, { + $set: { + packageContainersWithOverrides: newPackageContainers, + }, + $unset: { + packageContainers: 1, + }, + }) + } + }, + }, + + { + id: `add studio settings allowHold & allowPieceDirectPlay`, + canBeRunAutomatically: true, + validate: async () => { + const studios = await Studios.findFetchAsync({ + $or: [ + { 'settings.allowHold': { $exists: false } }, + { 'settings.allowPieceDirectPlay': { $exists: false } }, + ], + }) + + if (studios.length > 0) { + return 'studios must have settings.allowHold and settings.allowPieceDirectPlay defined' + } + + return false + }, + migrate: async () => { + const studios = await Studios.findFetchAsync({ + $or: [ + { 'settings.allowHold': { $exists: false } }, + { 'settings.allowPieceDirectPlay': { $exists: false } }, + ], + }) + + for (const studio of studios) { + // Populate the settings to be backwards compatible + await Studios.updateAsync(studio._id, { + $set: { + 'settings.allowHold': true, + 'settings.allowPieceDirectPlay': true, + }, + }) + } + }, + }, ]) diff --git a/meteor/server/migration/__tests__/migrations.test.ts b/meteor/server/migration/__tests__/migrations.test.ts index e3a69a6498..5e9168964d 100644 --- a/meteor/server/migration/__tests__/migrations.test.ts +++ b/meteor/server/migration/__tests__/migrations.test.ts @@ -126,13 +126,16 @@ describe('Migrations', () => { mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: true, + allowPieceDirectPlay: true, + enableBuckets: true, }, mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), _rundownVersionHash: '', - routeSets: {}, - routeSetExclusivityGroups: {}, - packageContainers: {}, + routeSetsWithOverrides: wrapDefaultObject({}), + routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), + packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { @@ -164,13 +167,16 @@ describe('Migrations', () => { mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: true, + allowPieceDirectPlay: true, + enableBuckets: true, }, mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), _rundownVersionHash: '', - routeSets: {}, - routeSetExclusivityGroups: {}, - packageContainers: {}, + routeSetsWithOverrides: wrapDefaultObject({}), + routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), + packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { @@ -202,13 +208,16 @@ describe('Migrations', () => { mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: true, + allowPieceDirectPlay: true, + enableBuckets: true, }, mappingsWithOverrides: wrapDefaultObject({}), blueprintConfigWithOverrides: wrapDefaultObject({}), _rundownVersionHash: '', - routeSets: {}, - routeSetExclusivityGroups: {}, - packageContainers: {}, + routeSetsWithOverrides: wrapDefaultObject({}), + routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), + packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { diff --git a/meteor/server/migration/migrations.ts b/meteor/server/migration/migrations.ts index 578fd6b955..7a7d45f2f5 100644 --- a/meteor/server/migration/migrations.ts +++ b/meteor/server/migration/migrations.ts @@ -12,8 +12,7 @@ addSteps1_40_0() import { addSteps as addSteps1_41_0 } from './1_41_0' addSteps1_41_0() -import { addSteps as addSteps1_42_0 } from './1_42_0' -addSteps1_42_0() +// Note: There where no migrations for Release 42 import { addSteps as addSteps1_44_0 } from './1_44_0' addSteps1_44_0() diff --git a/meteor/server/publications/_publications.ts b/meteor/server/publications/_publications.ts index 30d002268b..8bcb30b0b1 100644 --- a/meteor/server/publications/_publications.ts +++ b/meteor/server/publications/_publications.ts @@ -1,5 +1,5 @@ import { Meteor } from 'meteor/meteor' -import './lib' +import './lib/lib' import './buckets' import './blueprintUpgradeStatus/publication' @@ -28,7 +28,7 @@ import './mountedTriggers' import './deviceTriggersPreview' import { AllPubSubNames } from '@sofie-automation/meteor-lib/dist/api/pubsub' -import { MeteorPublications } from './lib' +import { MeteorPublications } from './lib/lib' import { logger } from '../logging' // Ensure all the publications were registered at startup diff --git a/meteor/server/publications/blueprintUpgradeStatus/publication.ts b/meteor/server/publications/blueprintUpgradeStatus/publication.ts index 0218e93e90..568f9b0756 100644 --- a/meteor/server/publications/blueprintUpgradeStatus/publication.ts +++ b/meteor/server/publications/blueprintUpgradeStatus/publication.ts @@ -7,12 +7,12 @@ import { CustomPublishCollection, meteorCustomPublish, setUpCollectionOptimizedObserver, + SetupObserversResult, TriggerUpdate, } from '../../lib/customPublication' import { logger } from '../../logging' import { resolveCredentials } from '../../security/lib/credentials' import { NoSecurityReadAccess } from '../../security/noSecurity' -import { LiveQueryHandle } from '../../lib/lib' import { ContentCache, createReactiveContentCache, ShowStyleBaseFields, StudioFields } from './reactiveContentCache' import { UpgradesContentObserver } from './upgradesContentObserver' import { BlueprintMapEntry, checkDocUpgradeStatus } from './checkStatus' @@ -41,14 +41,14 @@ interface BlueprintUpgradeStatusUpdateProps { async function setupBlueprintUpgradeStatusPublicationObservers( _args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { // TODO - can this be done cheaper? const cache = createReactiveContentCache() // Push update triggerUpdate({ newCache: cache }) - const mongoObserver = new UpgradesContentObserver(cache) + const mongoObserver = await UpgradesContentObserver.create(cache) // Set up observers: return [ diff --git a/meteor/server/publications/blueprintUpgradeStatus/upgradesContentObserver.ts b/meteor/server/publications/blueprintUpgradeStatus/upgradesContentObserver.ts index c26ea657e5..a88ba8575b 100644 --- a/meteor/server/publications/blueprintUpgradeStatus/upgradesContentObserver.ts +++ b/meteor/server/publications/blueprintUpgradeStatus/upgradesContentObserver.ts @@ -7,16 +7,21 @@ import { studioFieldSpecifier, } from './reactiveContentCache' import { Blueprints, ShowStyleBases, Studios } from '../../collections' +import { waitForAllObserversReady } from '../lib/lib' export class UpgradesContentObserver { - #observers: Meteor.LiveQueryHandle[] = [] - #cache: ContentCache + readonly #cache: ContentCache + readonly #observers: Meteor.LiveQueryHandle[] - constructor(cache: ContentCache) { - logger.silly(`Creating UpgradesContentObserver`) + constructor(cache: ContentCache, observers: Meteor.LiveQueryHandle[]) { this.#cache = cache + this.#observers = observers + } - this.#observers = [ + static async create(cache: ContentCache): Promise { + logger.silly(`Creating UpgradesContentObserver`) + + const observers = await waitForAllObserversReady([ Studios.observeChanges({}, cache.Studios.link(), { projection: studioFieldSpecifier, }), @@ -26,7 +31,9 @@ export class UpgradesContentObserver { Blueprints.observeChanges({}, cache.Blueprints.link(), { projection: blueprintFieldSpecifier, }), - ] + ]) + + return new UpgradesContentObserver(cache, observers) } public get cache(): ContentCache { diff --git a/meteor/server/publications/buckets.ts b/meteor/server/publications/buckets.ts index 8db38ce907..3801f8b467 100644 --- a/meteor/server/publications/buckets.ts +++ b/meteor/server/publications/buckets.ts @@ -1,6 +1,6 @@ import { FindOptions } from '@sofie-automation/meteor-lib/dist/collections/lib' import { BucketSecurity } from '../security/buckets' -import { meteorPublish } from './lib' +import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { Bucket } from '@sofie-automation/meteor-lib/dist/collections/Buckets' import { StudioReadAccess } from '../security/studio' diff --git a/meteor/server/publications/deviceTriggersPreview.ts b/meteor/server/publications/deviceTriggersPreview.ts index c5aeed97a5..67e9edbf03 100644 --- a/meteor/server/publications/deviceTriggersPreview.ts +++ b/meteor/server/publications/deviceTriggersPreview.ts @@ -7,7 +7,7 @@ import { ReadonlyDeep } from 'type-fest' import { CustomCollectionName, MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { DeviceTriggerArguments, UIDeviceTriggerPreview } from '@sofie-automation/meteor-lib/dist/api/MountedTriggers' import { getCurrentTime } from '../lib/lib' -import { setUpOptimizedObserverArray, TriggerUpdate } from '../lib/customPublication' +import { SetupObserversResult, setUpOptimizedObserverArray, TriggerUpdate } from '../lib/customPublication' import { CustomPublish, meteorCustomPublish } from '../lib/customPublication/publish' import { StudioReadAccess } from '../security/studio' import { PeripheralDevices } from '../collections' @@ -73,7 +73,7 @@ function prepareTriggerBufferForStudio(studioId: string) { async function setupDeviceTriggersPreviewsObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const studioId = unprotectString(args.studioId) const lastTriggersStudio = prepareTriggerBufferForStudio(studioId) diff --git a/meteor/server/publications/lib/PromiseDebounce.ts b/meteor/server/publications/lib/PromiseDebounce.ts new file mode 100644 index 0000000000..5af796d9ab --- /dev/null +++ b/meteor/server/publications/lib/PromiseDebounce.ts @@ -0,0 +1,129 @@ +import { Meteor } from 'meteor/meteor' + +/** + * Based on https://github.com/sindresorhus/p-debounce + * With additional features: + * - `cancelWaiting` method + * - ensures only one execution in progress at a time + */ +export class PromiseDebounce { + readonly #fn: (...args: TArgs) => Promise + readonly #wait: number + + /** If an execution timeout has passed while */ + #pendingArgs: TArgs | null = null + #timeout: number | undefined + + #isExecuting = false + #waitingListeners: Listener[] = [] + + constructor(fn: (...args: TArgs) => Promise, wait: number) { + this.#fn = fn + this.#wait = wait + } + + /** + * Trigger an execution, and get the result. + * @returns A promise that resolves with the result of the function + */ + call = async (...args: TArgs): Promise => { + return new Promise((resolve, reject) => { + const listener: Listener = { resolve, reject } + this.#waitingListeners.push(listener) + + // Trigger an execution + this.trigger(...args) + }) + } + + /** + * Trigger an execution, but don't report the result. + */ + trigger = (...args: TArgs): void => { + // If an execution is 'imminent', don't do anything + if (this.#pendingArgs) { + this.#pendingArgs = args + return + } + + // Clear an existing timeout + if (this.#timeout) Meteor.clearTimeout(this.#timeout) + + // Start a new one + this.#timeout = Meteor.setTimeout(() => { + this.#timeout = undefined + + this.executeFn(args) + }, this.#wait) + } + + private executeFn(args: TArgs): void { + // If an execution is still in progress, mark as pending and stop + if (this.#isExecuting) { + this.#pendingArgs = args + return + } + + // We have the clear to begin executing + this.#isExecuting = true + this.#pendingArgs = null + + // Collect up the listeners for this execution + const listeners = this.#waitingListeners + this.#waitingListeners = [] + + Promise.resolve() + .then(async () => { + const result = await this.#fn(...args) + for (const listener of listeners) { + listener.resolve(result) + } + }) + .catch((error) => { + for (const listener of listeners) { + listener.reject(error) + } + }) + .finally(() => { + this.#isExecuting = false + + // If there is a pending execution, run that soon + if (this.#pendingArgs) { + const args = this.#pendingArgs + Meteor.setTimeout(() => this.executeFn(args), 0) + } + }) + } + + /** + * Cancel any waiting execution + */ + cancelWaiting = (error?: Error): void => { + this.#pendingArgs = null + + if (this.#timeout) { + Meteor.clearTimeout(this.#timeout) + this.#timeout = undefined + } + + // Inform any listeners + if (this.#waitingListeners.length > 0) { + const listeners = this.#waitingListeners + this.#waitingListeners = [] + + error = error ?? new Error('Cancelled') + + // Inform the listeners in the next tick + Meteor.defer(() => { + for (const listener of listeners) { + listener.reject(error) + } + }) + } + } +} + +interface Listener { + resolve: (value: TResult) => void + reject: (reason?: any) => void +} diff --git a/meteor/server/publications/lib/__tests__/PromiseDebounce.test.ts b/meteor/server/publications/lib/__tests__/PromiseDebounce.test.ts new file mode 100644 index 0000000000..0a0264c1dd --- /dev/null +++ b/meteor/server/publications/lib/__tests__/PromiseDebounce.test.ts @@ -0,0 +1,273 @@ +import { sleep } from '@sofie-automation/shared-lib/dist/lib/lib' +import { PromiseDebounce } from '../PromiseDebounce' + +describe('PromiseDebounce', () => { + beforeEach(() => { + jest.useFakeTimers() + }) + + it('trigger', async () => { + const fn = jest.fn() + const debounce = new PromiseDebounce(fn, 10) + + // No promise returned + expect(debounce.trigger()).toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for a bit + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(0) + + // Wait a bit more + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(1) + + // No more calls + fn.mockClear() + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(0) + }) + + it('call', async () => { + const fn = jest.fn() + const debounce = new PromiseDebounce(fn, 10) + + const ps = debounce.call() + expect(ps).not.toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for a bit + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(0) + + // Wait a bit more + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(1) + + // Should resolve without any more timer ticking + await expect(ps).resolves.toBe(undefined) + + // No more calls + fn.mockClear() + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(0) + }) + + it('cancelWaiting - trigger', async () => { + const fn = jest.fn() + const debounce = new PromiseDebounce(fn, 10) + + // No promise returned + expect(debounce.trigger()).toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for a bit + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(0) + + // Cancel waiting + debounce.cancelWaiting() + + // Wait until the timer should have fired + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(0) + }) + + it('cancelWaiting - call', async () => { + const fn = jest.fn() + const debounce = new PromiseDebounce(fn, 10) + + const ps = debounce.call() + ps.catch(() => null) // Add an error handler + expect(ps).not.toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for a bit + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(0) + + // Cancel waiting + debounce.cancelWaiting() + + // Wait until the timer should have fired + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(0) + + // Should have rejected + await expect(ps).rejects.toThrow('Cancelled') + }) + + it('cancelWaiting - call with error', async () => { + const fn = jest.fn() + const debounce = new PromiseDebounce(fn, 10) + + const ps = debounce.call() + ps.catch(() => null) // Add an error handler + expect(ps).not.toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for a bit + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(0) + + // Cancel waiting + debounce.cancelWaiting(new Error('Custom error')) + + // Wait until the timer should have fired + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(0) + + // Should have rejected + await expect(ps).rejects.toThrow('Custom error') + }) + + it('trigger - multiple', async () => { + const fn = jest.fn() + const debounce = new PromiseDebounce(fn, 10) + + // No promise returned + expect(debounce.trigger(1)).toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for a bit + await jest.advanceTimersByTimeAsync(6) + expect(fn).toHaveBeenCalledTimes(0) + + // Trigger again + expect(debounce.trigger(3)).toBe(undefined) + expect(debounce.trigger(5)).toBe(undefined) + + // Wait until the timer should have fired + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(5) + }) + + it('trigger - during slow execution', async () => { + const fn = jest.fn(async () => sleep(100)) + const debounce = new PromiseDebounce(fn, 10) + + // No promise returned + expect(debounce.trigger(1)).toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for it to start executing + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(1) + + // Trigger again + fn.mockClear() + expect(debounce.trigger(3)).toBe(undefined) + await jest.advanceTimersByTimeAsync(20) + expect(debounce.trigger(5)).toBe(undefined) + + // Wait until the second timer timer should + await jest.advanceTimersByTimeAsync(100) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(5) + }) + + it('call - return value', async () => { + const fn = jest.fn(async (val) => { + await sleep(100) + return val + }) + const debounce = new PromiseDebounce(fn, 10) + + const ps1 = debounce.call(1) + expect(ps1).not.toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for it to start executing + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(1) + + // Trigger again + fn.mockClear() + const ps3 = debounce.call(3) + await jest.advanceTimersByTimeAsync(20) + const ps5 = debounce.call(5) + + // Wait until the second timer timer should + await jest.advanceTimersByTimeAsync(150) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(5) + + await expect(ps1).resolves.toBe(1) + await expect(ps3).resolves.toBe(5) + await expect(ps5).resolves.toBe(5) + }) + + it('call - throw error', async () => { + const fn = jest.fn(async (val) => { + await sleep(100) + throw new Error(`Bad value: ${val}`) + }) + const debounce = new PromiseDebounce(fn, 10) + + const ps1 = debounce.call(1) + ps1.catch(() => null) // Add an error handler + expect(ps1).not.toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for it to start executing + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(1) + + // Trigger again + fn.mockClear() + const ps3 = debounce.call(3) + ps3.catch(() => null) // Add an error handler + await jest.advanceTimersByTimeAsync(20) + const ps5 = debounce.call(5) + ps5.catch(() => null) // Add an error handler + + // Wait until the second timer timer should + await jest.advanceTimersByTimeAsync(150) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(5) + + await expect(ps1).rejects.toThrow('Bad value: 1') + await expect(ps3).rejects.toThrow('Bad value: 5') + await expect(ps5).rejects.toThrow('Bad value: 5') + }) + + it('canelWaiting - during slow execution', async () => { + const fn = jest.fn(async () => sleep(100)) + const debounce = new PromiseDebounce(fn, 10) + + // No promise returned + expect(debounce.trigger(1)).toBe(undefined) + // Not called yet + expect(fn).toHaveBeenCalledTimes(0) + + // Wait for it to start executing + await jest.advanceTimersByTimeAsync(50) + expect(fn).toHaveBeenCalledTimes(1) + expect(fn).toHaveBeenCalledWith(1) + + // Trigger again + fn.mockClear() + expect(debounce.trigger(3)).toBe(undefined) + await jest.advanceTimersByTimeAsync(20) + expect(debounce.trigger(5)).toBe(undefined) + + debounce.cancelWaiting() + + // Wait until the second timer timer should + await jest.advanceTimersByTimeAsync(100) + expect(fn).toHaveBeenCalledTimes(0) + }) +}) diff --git a/meteor/server/publications/lib/__tests__/lib.test.ts b/meteor/server/publications/lib/__tests__/lib.test.ts new file mode 100644 index 0000000000..61a8ed52cc --- /dev/null +++ b/meteor/server/publications/lib/__tests__/lib.test.ts @@ -0,0 +1,63 @@ +import { Meteor } from 'meteor/meteor' +import { waitForAllObserversReady } from '../lib' +import { sleep } from '@sofie-automation/shared-lib/dist/lib/lib' + +describe('waitForAllObserversReady', () => { + // beforeEach(() => { + // jest.useFakeTimers() + // }) + + it('no observers', async () => { + await expect(waitForAllObserversReady([])).resolves.toHaveLength(0) + }) + + async function createFakeObserver(waitTime: number, stopFn: () => void): Promise { + await sleep(waitTime) + + return { + stop: stopFn, + } + } + + async function createBadObserver(waitTime: number): Promise { + await sleep(waitTime) + + throw new Error('Some error') + } + + function stopAll(observers: Meteor.LiveQueryHandle[]) { + observers.forEach((o) => o.stop()) + } + + it('multiple good observers', async () => { + const stopFn = jest.fn() + + const res = waitForAllObserversReady([ + createFakeObserver(10, stopFn), + createFakeObserver(12, stopFn), + createFakeObserver(10, stopFn), + createFakeObserver(8, stopFn), + ]) + await expect(res).resolves.toHaveLength(4) + + expect(stopFn).toHaveBeenCalledTimes(0) + + stopAll(await res) + expect(stopFn).toHaveBeenCalledTimes(4) + }) + + it('multiple good with a bad observer', async () => { + const stopFn = jest.fn() + + const res = waitForAllObserversReady([ + createFakeObserver(10, stopFn), + createFakeObserver(12, stopFn), + createBadObserver(10), + createFakeObserver(8, stopFn), + ]) + await expect(res).rejects.toThrow('Some error') + + // Successful ones should be stopped + expect(stopFn).toHaveBeenCalledTimes(3) + }) +}) diff --git a/meteor/server/publications/lib/__tests__/observerGroup.test.ts b/meteor/server/publications/lib/__tests__/observerGroup.test.ts index 20a880af98..c48722a08f 100644 --- a/meteor/server/publications/lib/__tests__/observerGroup.test.ts +++ b/meteor/server/publications/lib/__tests__/observerGroup.test.ts @@ -10,7 +10,7 @@ describe('ReactiveMongoObserverGroup', () => { test('cleanup on stop', async () => { const handle: LiveQueryHandle = { stop: jest.fn() } - const generator = jest.fn(async () => [handle]) + const generator = jest.fn(async () => [Promise.resolve(handle)]) const observerGroup = await ReactiveMongoObserverGroup(generator) @@ -39,7 +39,7 @@ describe('ReactiveMongoObserverGroup', () => { test('restarting', async () => { const handle: LiveQueryHandle = { stop: jest.fn() } - const generator = jest.fn(async () => [handle]) + const generator = jest.fn(async () => [Promise.resolve(handle)]) const observerGroup = await ReactiveMongoObserverGroup(generator) @@ -80,7 +80,7 @@ describe('ReactiveMongoObserverGroup', () => { test('restart debounce', async () => { const handle: LiveQueryHandle = { stop: jest.fn() } - const generator = jest.fn(async () => [handle]) + const generator = jest.fn(async () => [Promise.resolve(handle)]) const observerGroup = await ReactiveMongoObserverGroup(generator) diff --git a/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts b/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts index 80d90f43e4..d07d69cf51 100644 --- a/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts +++ b/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts @@ -20,12 +20,12 @@ describe('RundownsObserver', () => { const playlistId = protectString('playlist0') const onChangedCleanup = jest.fn() - const onChanged = jest.fn(() => onChangedCleanup) + const onChanged = jest.fn(async () => onChangedCleanup) // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = new RundownsObserver(studioId, playlistId, onChanged) + const observer = await RundownsObserver.create(studioId, playlistId, onChanged) try { // should now be an observer expect(RundownsMock.observers).toHaveLength(1) @@ -73,12 +73,12 @@ describe('RundownsObserver', () => { const playlistId = protectString('playlist0') const onChangedCleanup = jest.fn() - const onChanged = jest.fn<() => void, [RundownId[]]>(() => onChangedCleanup) + const onChanged = jest.fn void>, [RundownId[]]>(async () => onChangedCleanup) // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = new RundownsObserver(studioId, playlistId, onChanged) + const observer = await RundownsObserver.create(studioId, playlistId, onChanged) try { // ensure starts correct await waitUntil(async () => { @@ -127,12 +127,12 @@ describe('RundownsObserver', () => { const playlistId = protectString('playlist0') const onChangedCleanup = jest.fn() - const onChanged = jest.fn<() => void, [RundownId[]]>(() => onChangedCleanup) + const onChanged = jest.fn void>, [RundownId[]]>(async () => onChangedCleanup) // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = new RundownsObserver(studioId, playlistId, onChanged) + const observer = await RundownsObserver.create(studioId, playlistId, onChanged) try { // ensure starts correct // ensure starts correct @@ -181,12 +181,12 @@ describe('RundownsObserver', () => { const playlistId = protectString('playlist0') const onChangedCleanup = jest.fn() - const onChanged = jest.fn<() => void, [RundownId[]]>(() => onChangedCleanup) + const onChanged = jest.fn void>, [RundownId[]]>(async () => onChangedCleanup) // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = new RundownsObserver(studioId, playlistId, onChanged) + const observer = await RundownsObserver.create(studioId, playlistId, onChanged) try { // ensure starts correct // ensure starts correct diff --git a/meteor/server/publications/lib.ts b/meteor/server/publications/lib/lib.ts similarity index 75% rename from meteor/server/publications/lib.ts rename to meteor/server/publications/lib/lib.ts index ee05a9b56e..bbb2d0dc32 100644 --- a/meteor/server/publications/lib.ts +++ b/meteor/server/publications/lib/lib.ts @@ -1,9 +1,9 @@ import { Meteor, Subscription } from 'meteor/meteor' import { AllPubSubCollections, AllPubSubTypes } from '@sofie-automation/meteor-lib/dist/api/pubsub' -import { extractFunctionSignature } from '../lib' +import { extractFunctionSignature } from '../../lib' import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' -import { ResolvedCredentials, resolveCredentials } from '../security/lib/credentials' -import { Settings } from '../Settings' +import { ResolvedCredentials, resolveCredentials } from '../../security/lib/credentials' +import { Settings } from '../../Settings' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { MongoCursor } from '@sofie-automation/meteor-lib/dist/collections/lib' import { @@ -12,10 +12,9 @@ import { ShowStyleBaseId, UserId, } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { protectStringObject } from '../lib/tempLib' -import { waitForPromise } from '../lib/lib' +import { protectStringObject } from '../../lib/tempLib' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' -import { PeripheralDevices, ShowStyleBases } from '../collections' +import { PeripheralDevices, ShowStyleBases } from '../../collections' import { MetricsGauge } from '@sofie-automation/corelib/dist/prometheus' export const MeteorPublicationSignatures: { [key: string]: string[] } = {} @@ -52,11 +51,13 @@ export function meteorPublishUnsafe( const publicationGauge = MeteorPublicationsGauge.labels({ publication: name }) - Meteor.publish(name, function (...args: any[]): any { + Meteor.publish(name, async function (...args: any[]): Promise { publicationGauge.inc() this.onStop(() => publicationGauge.dec()) - return waitForPromise(callback.apply(protectStringObject(this), args)) || [] + const callbackRes = await callback.apply(protectStringObject(this), args) + // If no value is returned, return an empty array so that meteor marks the subscription as ready + return callbackRes || [] }) } @@ -164,3 +165,33 @@ export namespace AutoFillSelector { return { cred, selector } } } + +/** + * Await each observer, and return the handles + * If an observer throws, this will make sure to stop all the ones that were successfully started, to avoid leaking memory + */ +export async function waitForAllObserversReady( + observers: Array | Meteor.LiveQueryHandle> +): Promise { + // Wait for all the promises to complete + // Future: could this fail faster by aborting the rest once the first fails? + const results = await Promise.allSettled(observers) + const allSuccessfull = results.filter( + (r): r is PromiseFulfilledResult => r.status === 'fulfilled' + ) + + const firstFailure = results.find((r): r is PromiseRejectedResult => r.status === 'rejected') + if (firstFailure || allSuccessfull.length !== observers.length) { + // There was a failure, or not enough success so we should stop all the observers + for (const handle of allSuccessfull) { + handle.value.stop() + } + if (firstFailure) { + throw firstFailure.reason + } else { + throw new Meteor.Error(500, 'Not all observers were started') + } + } + + return allSuccessfull.map((r) => r.value) +} diff --git a/meteor/server/publications/lib/observerChain.ts b/meteor/server/publications/lib/observerChain.ts index 4b51df340a..abbbd49467 100644 --- a/meteor/server/publications/lib/observerChain.ts +++ b/meteor/server/publications/lib/observerChain.ts @@ -3,7 +3,8 @@ import { Meteor } from 'meteor/meteor' import { MongoCursor } from '@sofie-automation/meteor-lib/dist/collections/lib' import { Simplify } from 'type-fest' import { assertNever } from '../../lib/tempLib' -import { waitForPromise } from '../../lib/lib' +import { logger } from '../../logging' +import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' /** * https://stackoverflow.com/a/66011942 @@ -41,18 +42,18 @@ export function observerChain(): Pick, 'next'> { throw new Error('nextChanged: Unfinished observer chain. This is a memory leak.') } - function changedLink(collectorObject: Record) { + async function changedLink(collectorObject: Record) { if (previousObserver) { previousObserver.stop() previousObserver = null } - const cursorResult = waitForPromise(chainedCursor(collectorObject)) + const cursorResult = await chainedCursor(collectorObject) if (cursorResult === null) { nextStop() return } - previousObserver = cursorResult.observe({ + previousObserver = await cursorResult.observeAsync({ added: (doc) => { if (!chainedKey) throw new Error('Chained key needs to be defined') const newCollectorObject: Record = { @@ -96,10 +97,10 @@ export function observerChain(): Pick, 'next'> { } return { - changed: (obj: Record) => { + changed: async (obj: Record) => { switch (mode) { case 'next': - changedLink(obj) + await changedLink(obj) break case 'end': changedEnd(obj) @@ -160,7 +161,9 @@ export function observerChain(): Pick, 'next'> { const nextLink = link.next(key, cursorChain) setImmediate( Meteor.bindEnvironment(() => { - changed({}) + changed({}).catch((e) => { + logger.error(`Error in observerChain: ${stringifyError(e)}`) + }) }) ) return nextLink as any diff --git a/meteor/server/publications/lib/observerGroup.ts b/meteor/server/publications/lib/observerGroup.ts index cce4654e43..20ed35b2e1 100644 --- a/meteor/server/publications/lib/observerGroup.ts +++ b/meteor/server/publications/lib/observerGroup.ts @@ -1,6 +1,7 @@ import { ManualPromise, createManualPromise, getRandomString } from '@sofie-automation/corelib/dist/lib' import { Meteor } from 'meteor/meteor' import { LiveQueryHandle, lazyIgnore } from '../../lib/lib' +import { waitForAllObserversReady } from './lib' export interface ReactiveMongoObserverGroupHandle extends LiveQueryHandle { /** @@ -18,7 +19,7 @@ const REACTIVITY_DEBOUNCE = 20 * @returns Handle to stop and restart the observer group */ export async function ReactiveMongoObserverGroup( - generator: () => Promise> + generator: () => Promise>> ): Promise { let running = true let pendingStop: ManualPromise | undefined @@ -69,8 +70,7 @@ export async function ReactiveMongoObserverGroup( // Start the child observers if (!handles) { - // handles = await generator() - handles = await generator() + handles = await waitForAllObserversReady(await generator()) // check for another pending operation deferCheck() diff --git a/meteor/server/publications/lib/quickLoop.ts b/meteor/server/publications/lib/quickLoop.ts index 73a3b5f1dd..272a554ac9 100644 --- a/meteor/server/publications/lib/quickLoop.ts +++ b/meteor/server/publications/lib/quickLoop.ts @@ -9,7 +9,7 @@ import { MarkerPosition, compareMarkerPositions } from '@sofie-automation/coreli import { ProtectedString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { DEFAULT_FALLBACK_PART_DURATION } from '@sofie-automation/shared-lib/dist/core/constants' import { getCurrentTime } from '../../lib/lib' -import { generateTranslation } from '@sofie-automation/meteor-lib/dist/lib' +import { generateTranslation } from '@sofie-automation/corelib/dist/lib' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' diff --git a/meteor/server/publications/lib/rundownsObserver.ts b/meteor/server/publications/lib/rundownsObserver.ts index 82779b7c47..7e87546a84 100644 --- a/meteor/server/publications/lib/rundownsObserver.ts +++ b/meteor/server/publications/lib/rundownsObserver.ts @@ -1,25 +1,53 @@ import { Meteor } from 'meteor/meteor' import { RundownId, RundownPlaylistId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import _ from 'underscore' import { Rundowns } from '../../collections' +import { PromiseDebounce } from './PromiseDebounce' const REACTIVITY_DEBOUNCE = 20 -type ChangedHandler = (rundownIds: RundownId[]) => () => void +type ChangedHandler = (rundownIds: RundownId[]) => Promise<() => void> /** * A mongo observer/query for the RundownIds in a playlist. * Note: Updates are debounced to avoid rapid updates firing */ export class RundownsObserver implements Meteor.LiveQueryHandle { - #rundownsLiveQuery: Meteor.LiveQueryHandle + #rundownsLiveQuery!: Meteor.LiveQueryHandle #rundownIds: Set = new Set() #changed: ChangedHandler | undefined #cleanup: (() => void) | undefined - constructor(studioId: StudioId, playlistId: RundownPlaylistId, onChanged: ChangedHandler) { + #disposed = false + + readonly #triggerUpdateRundownContent = new PromiseDebounce(async () => { + if (this.#disposed) return + if (!this.#changed) return + this.#cleanup?.() + + const changed = this.#changed + this.#cleanup = await changed(this.rundownIds) + + if (this.#disposed) this.#cleanup?.() + }, REACTIVITY_DEBOUNCE) + + private constructor(onChanged: ChangedHandler) { this.#changed = onChanged - this.#rundownsLiveQuery = Rundowns.observe( + } + + static async create( + studioId: StudioId, + playlistId: RundownPlaylistId, + onChanged: ChangedHandler + ): Promise { + const observer = new RundownsObserver(onChanged) + + await observer.init(studioId, playlistId) + + return observer + } + + private async init(studioId: StudioId, playlistId: RundownPlaylistId) { + this.#rundownsLiveQuery = await Rundowns.observe( { playlistId, studioId, @@ -27,15 +55,15 @@ export class RundownsObserver implements Meteor.LiveQueryHandle { { added: (doc) => { this.#rundownIds.add(doc._id) - this.updateRundownContent() + this.#triggerUpdateRundownContent.trigger() }, changed: (doc) => { this.#rundownIds.add(doc._id) - this.updateRundownContent() + this.#triggerUpdateRundownContent.trigger() }, removed: (doc) => { this.#rundownIds.delete(doc._id) - this.updateRundownContent() + this.#triggerUpdateRundownContent.trigger() }, }, { @@ -44,28 +72,18 @@ export class RundownsObserver implements Meteor.LiveQueryHandle { }, } ) - this.updateRundownContent() + + this.#triggerUpdateRundownContent.trigger() } public get rundownIds(): RundownId[] { return Array.from(this.#rundownIds) } - private innerUpdateRundownContent = () => { - if (!this.#changed) return - this.#cleanup?.() - - const changed = this.#changed - this.#cleanup = changed(this.rundownIds) - } - - public updateRundownContent = _.debounce( - Meteor.bindEnvironment(this.innerUpdateRundownContent), - REACTIVITY_DEBOUNCE - ) - public stop = (): void => { - this.updateRundownContent.cancel() + this.#disposed = true + + this.#triggerUpdateRundownContent.cancelWaiting() this.#rundownsLiveQuery.stop() this.#changed = undefined this.#cleanup?.() diff --git a/meteor/server/publications/organization.ts b/meteor/server/publications/organization.ts index 58d1305891..f596d8b3c6 100644 --- a/meteor/server/publications/organization.ts +++ b/meteor/server/publications/organization.ts @@ -1,4 +1,4 @@ -import { meteorPublish, AutoFillSelector } from './lib' +import { meteorPublish, AutoFillSelector } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { Blueprint } from '@sofie-automation/corelib/dist/dataModel/Blueprint' import { Evaluation } from '@sofie-automation/meteor-lib/dist/collections/Evaluations' diff --git a/meteor/server/publications/packageManager/expectedPackages/contentObserver.ts b/meteor/server/publications/packageManager/expectedPackages/contentObserver.ts index dcb2488a82..eb15f97ce7 100644 --- a/meteor/server/publications/packageManager/expectedPackages/contentObserver.ts +++ b/meteor/server/publications/packageManager/expectedPackages/contentObserver.ts @@ -7,10 +7,10 @@ import { pieceInstanceFieldsSpecifier, } from './contentCache' import { ExpectedPackages, PieceInstances, RundownPlaylists } from '../../../collections' -import { waitForPromise } from '../../../lib/lib' import { ReactiveMongoObserverGroup, ReactiveMongoObserverGroupHandle } from '../../lib/observerGroup' import _ from 'underscore' import { equivalentArrays } from '@sofie-automation/shared-lib/dist/lib/lib' +import { waitForAllObserversReady } from '../../lib/lib' const REACTIVITY_DEBOUNCE = 20 @@ -19,35 +19,44 @@ export class ExpectedPackagesContentObserver implements Meteor.LiveQueryHandle { #cache: ExpectedPackagesContentCache #partInstanceIds: PartInstanceId[] = [] - #partInstanceIdObserver: ReactiveMongoObserverGroupHandle + #partInstanceIdObserver!: ReactiveMongoObserverGroupHandle - constructor(studioId: StudioId, cache: ExpectedPackagesContentCache) { - logger.silly(`Creating ExpectedPackagesContentObserver for "${studioId}"`) + #disposed = false + + private constructor(cache: ExpectedPackagesContentCache) { this.#cache = cache + } + + static async create( + studioId: StudioId, + cache: ExpectedPackagesContentCache + ): Promise { + logger.silly(`Creating ExpectedPackagesContentObserver for "${studioId}"`) + + const observer = new ExpectedPackagesContentObserver(cache) // Run the ShowStyleBase query in a ReactiveMongoObserverGroup, so that it can be restarted whenever - this.#partInstanceIdObserver = waitForPromise( - ReactiveMongoObserverGroup(async () => { - // Clear already cached data - cache.PieceInstances.remove({}) - - return [ - PieceInstances.observeChanges( - { - // We can use the `this.#partInstanceIds` here, as this is restarted every time that property changes - partInstanceId: { $in: this.#partInstanceIds }, - }, - cache.PieceInstances.link(), - { - projection: pieceInstanceFieldsSpecifier, - } - ), - ] - }) - ) + observer.#partInstanceIdObserver = await ReactiveMongoObserverGroup(async () => { + // Clear already cached data + cache.PieceInstances.remove({}) + + return [ + PieceInstances.observeChanges( + { + // We can use the `this.#partInstanceIds` here, as this is restarted every time that property changes + partInstanceId: { $in: observer.#partInstanceIds }, + }, + cache.PieceInstances.link(), + { + projection: pieceInstanceFieldsSpecifier, + } + ), + ] + }) // Subscribe to the database, and pipe any updates into the ReactiveCacheCollections - this.#observers = [ + // This takes ownership of the #partInstanceIdObserver, and will stop it if this throws + observer.#observers = await waitForAllObserversReady([ ExpectedPackages.observeChanges( { studioId: studioId, @@ -60,19 +69,23 @@ export class ExpectedPackagesContentObserver implements Meteor.LiveQueryHandle { studioId: studioId, }, cache.RundownPlaylists.link(() => { - this.updatePartInstanceIds() + observer.updatePartInstanceIds() }), { fields: rundownPlaylistFieldSpecifier, } ), - this.#partInstanceIdObserver, - ] + observer.#partInstanceIdObserver, + ]) + + return observer } private updatePartInstanceIds = _.debounce( Meteor.bindEnvironment(() => { + if (this.#disposed) return + const newPartInstanceIdsSet = new Set() this.#cache.RundownPlaylists.find({}).forEach((playlist) => { @@ -102,6 +115,8 @@ export class ExpectedPackagesContentObserver implements Meteor.LiveQueryHandle { } public stop = (): void => { + this.#disposed = true + this.#observers.forEach((observer) => observer.stop()) } } diff --git a/meteor/server/publications/packageManager/expectedPackages/generate.ts b/meteor/server/publications/packageManager/expectedPackages/generate.ts index 4850ceb301..cdc21bf353 100644 --- a/meteor/server/publications/packageManager/expectedPackages/generate.ts +++ b/meteor/server/publications/packageManager/expectedPackages/generate.ts @@ -31,6 +31,7 @@ export async function updateCollectionForExpectedPackageIds( contentCache: ReadonlyDeep, studio: Pick, layerNameToDeviceIds: Map, + packageContainers: Record, collection: CustomPublishCollection, filterPlayoutDeviceIds: ReadonlyDeep | undefined, regenerateIds: Set @@ -66,7 +67,8 @@ export async function updateCollectionForExpectedPackageIds( }, deviceId, null, - Priorities.OTHER // low priority + Priorities.OTHER, // low priority + packageContainers ) updatedDocIds.add(routedPackage._id) @@ -99,6 +101,7 @@ export async function updateCollectionForPieceInstanceIds( contentCache: ReadonlyDeep, studio: Pick, layerNameToDeviceIds: Map, + packageContainers: Record, collection: CustomPublishCollection, filterPlayoutDeviceIds: ReadonlyDeep | undefined, regenerateIds: Set @@ -140,7 +143,8 @@ export async function updateCollectionForPieceInstanceIds( }, deviceId, pieceInstanceId, - Priorities.OTHER // low priority + Priorities.OTHER, // low priority + packageContainers ) updatedDocIds.add(routedPackage._id) @@ -172,17 +176,21 @@ enum Priorities { } function generateExpectedPackageForDevice( - studio: Pick, + studio: Pick< + StudioLight, + '_id' | 'packageContainersWithOverrides' | 'previewContainerIds' | 'thumbnailContainerIds' + >, expectedPackage: PackageManagerExpectedPackageBase, deviceId: PeripheralDeviceId, pieceInstanceId: PieceInstanceId | null, - priority: Priorities + priority: Priorities, + packageContainers: Record ): PackageManagerExpectedPackage { // Lookup Package sources: const combinedSources: PackageContainerOnPackage[] = [] for (const packageSource of expectedPackage.sources) { - const lookedUpSource = studio.packageContainers[packageSource.containerId] + const lookedUpSource = packageContainers[packageSource.containerId] if (lookedUpSource) { combinedSources.push(calculateCombinedSource(packageSource, lookedUpSource)) } else { @@ -199,7 +207,7 @@ function generateExpectedPackageForDevice( } // Lookup Package targets: - const combinedTargets = calculateCombinedTargets(studio, expectedPackage, deviceId) + const combinedTargets = calculateCombinedTargets(expectedPackage, deviceId, packageContainers) if (!combinedSources.length && expectedPackage.sources.length !== 0) { logger.warn(`Pub.expectedPackagesForDevice: No sources found for "${expectedPackage._id}"`) @@ -253,14 +261,14 @@ function calculateCombinedSource( return combinedSource } function calculateCombinedTargets( - studio: Pick, expectedPackage: PackageManagerExpectedPackageBase, - deviceId: PeripheralDeviceId + deviceId: PeripheralDeviceId, + packageContainers: Record ): PackageContainerOnPackage[] { const mappingDeviceId = unprotectString(deviceId) let packageContainerId: string | undefined - for (const [containerId, packageContainer] of Object.entries(studio.packageContainers)) { + for (const [containerId, packageContainer] of Object.entries(packageContainers)) { if (packageContainer.deviceIds.includes(mappingDeviceId)) { // TODO: how to handle if a device has multiple containers? packageContainerId = containerId @@ -270,7 +278,7 @@ function calculateCombinedTargets( const combinedTargets: PackageContainerOnPackage[] = [] if (packageContainerId) { - const lookedUpTarget = studio.packageContainers[packageContainerId] + const lookedUpTarget = packageContainers[packageContainerId] if (lookedUpTarget) { // Todo: should the be any combination of properties here? combinedTargets.push({ diff --git a/meteor/server/publications/packageManager/expectedPackages/publication.ts b/meteor/server/publications/packageManager/expectedPackages/publication.ts index bc5bfd262e..1952fb7057 100644 --- a/meteor/server/publications/packageManager/expectedPackages/publication.ts +++ b/meteor/server/publications/packageManager/expectedPackages/publication.ts @@ -1,11 +1,12 @@ import { Meteor } from 'meteor/meteor' import { PeripheralDeviceReadAccess } from '../../../security/peripheralDevice' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { DBStudio, StudioPackageContainer } from '@sofie-automation/corelib/dist/dataModel/Studio' import { TriggerUpdate, meteorCustomPublish, setUpCollectionOptimizedObserver, CustomPublishCollection, + SetupObserversResult, } from '../../../lib/customPublication' import { literal, omit, protectString } from '../../../lib/tempLib' import { logger } from '../../../logging' @@ -48,22 +49,23 @@ interface ExpectedPackagesPublicationUpdateProps { interface ExpectedPackagesPublicationState { studio: Pick | undefined layerNameToDeviceIds: Map + packageContainers: Record contentCache: ReadonlyDeep } export type StudioFields = | '_id' - | 'routeSets' + | 'routeSetsWithOverrides' | 'mappingsWithOverrides' - | 'packageContainers' + | 'packageContainersWithOverrides' | 'previewContainerIds' | 'thumbnailContainerIds' const studioFieldSpecifier = literal>>({ _id: 1, - routeSets: 1, + routeSetsWithOverrides: 1, mappingsWithOverrides: 1, - packageContainers: 1, + packageContainersWithOverrides: 1, previewContainerIds: 1, thumbnailContainerIds: 1, }) @@ -71,7 +73,7 @@ const studioFieldSpecifier = literal, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const contentCache = createReactiveContentCache() // Push update @@ -79,7 +81,7 @@ async function setupExpectedPackagesPublicationObservers( // Set up observers: return [ - new ExpectedPackagesContentObserver(args.studioId, contentCache), + ExpectedPackagesContentObserver.create(args.studioId, contentCache), contentCache.ExpectedPackages.find({}).observeChanges({ added: (id) => triggerUpdate({ invalidateExpectedPackageIds: [protectString(id)] }), @@ -102,7 +104,7 @@ async function setupExpectedPackagesPublicationObservers( { fields: { // mappingsHash gets updated when either of these omitted fields changes - ...omit(studioFieldSpecifier, 'mappingsWithOverrides', 'routeSets'), + ...omit(studioFieldSpecifier, 'mappingsWithOverrides', 'routeSetsWithOverrides'), mappingsHash: 1, }, } @@ -122,6 +124,7 @@ async function manipulateExpectedPackagesPublicationData( const invalidateAllItems = !updateProps || updateProps.newCache || updateProps.invalidateStudio if (!state.layerNameToDeviceIds) state.layerNameToDeviceIds = new Map() + if (!state.packageContainers) state.packageContainers = {} if (invalidateAllItems) { // Everything is invalid, reset everything @@ -141,9 +144,14 @@ async function manipulateExpectedPackagesPublicationData( if (!state.studio) { logger.warn(`Pub.expectedPackagesForDevice: studio "${args.studioId}" not found!`) state.layerNameToDeviceIds = new Map() + state.packageContainers = {} } else { const studioMappings = applyAndValidateOverrides(state.studio.mappingsWithOverrides).obj - state.layerNameToDeviceIds = buildMappingsToDeviceIdMap(state.studio.routeSets, studioMappings) + state.layerNameToDeviceIds = buildMappingsToDeviceIdMap( + applyAndValidateOverrides(state.studio.routeSetsWithOverrides).obj, + studioMappings + ) + state.packageContainers = applyAndValidateOverrides(state.studio.packageContainersWithOverrides).obj } } @@ -170,6 +178,7 @@ async function manipulateExpectedPackagesPublicationData( state.contentCache, state.studio, state.layerNameToDeviceIds, + state.packageContainers, collection, args.filterPlayoutDeviceIds, regenerateExpectedPackageIds @@ -178,6 +187,7 @@ async function manipulateExpectedPackagesPublicationData( state.contentCache, state.studio, state.layerNameToDeviceIds, + state.packageContainers, collection, args.filterPlayoutDeviceIds, regeneratePieceInstanceIds diff --git a/meteor/server/publications/packageManager/packageContainers.ts b/meteor/server/publications/packageManager/packageContainers.ts index a479f8d66a..0accf66181 100644 --- a/meteor/server/publications/packageManager/packageContainers.ts +++ b/meteor/server/publications/packageManager/packageContainers.ts @@ -8,18 +8,24 @@ import { check } from 'meteor/check' import { Meteor } from 'meteor/meteor' import { ReadonlyDeep } from 'type-fest' import { PeripheralDevices, Studios } from '../../collections' -import { meteorCustomPublish, setUpOptimizedObserverArray, TriggerUpdate } from '../../lib/customPublication' +import { + meteorCustomPublish, + SetupObserversResult, + setUpOptimizedObserverArray, + TriggerUpdate, +} from '../../lib/customPublication' import { logger } from '../../logging' import { PeripheralDeviceReadAccess } from '../../security/peripheralDevice' import { PeripheralDevicePubSub, PeripheralDevicePubSubCollectionsNames, } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' -type StudioFields = '_id' | 'packageContainers' +type StudioFields = '_id' | 'packageContainersWithOverrides' const studioFieldSpecifier = literal>>({ _id: 1, - packageContainers: 1, + packageContainersWithOverrides: 1, }) interface PackageManagerPackageContainersArgs { @@ -34,7 +40,7 @@ type PackageManagerPackageContainersState = Record async function setupExpectedPackagesPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { // Set up observers: return [ Studios.observeChanges( @@ -68,8 +74,9 @@ async function manipulateExpectedPackagesPublicationData( const packageContainers: { [containerId: string]: PackageContainer } = {} if (studio) { + const studioPackageContainers = applyAndValidateOverrides(studio.packageContainersWithOverrides).obj for (const [containerId, studioPackageContainer] of Object.entries( - studio.packageContainers + studioPackageContainers )) { packageContainers[containerId] = studioPackageContainer.container } diff --git a/meteor/server/publications/packageManager/playoutContext.ts b/meteor/server/publications/packageManager/playoutContext.ts index f3462daa79..08c881fafe 100644 --- a/meteor/server/publications/packageManager/playoutContext.ts +++ b/meteor/server/publications/packageManager/playoutContext.ts @@ -8,7 +8,12 @@ import { check } from 'meteor/check' import { Meteor } from 'meteor/meteor' import { ReadonlyDeep } from 'type-fest' import { PeripheralDevices, RundownPlaylists, Rundowns } from '../../collections' -import { meteorCustomPublish, setUpOptimizedObserverArray, TriggerUpdate } from '../../lib/customPublication' +import { + meteorCustomPublish, + SetupObserversResult, + setUpOptimizedObserverArray, + TriggerUpdate, +} from '../../lib/customPublication' import { logger } from '../../logging' import { PeripheralDeviceReadAccess } from '../../security/peripheralDevice' import { @@ -36,7 +41,7 @@ type PackageManagerPlayoutContextState = Record async function setupExpectedPackagesPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { // Set up observers: return [ RundownPlaylists.observeChanges( diff --git a/meteor/server/publications/partInstancesUI/publication.ts b/meteor/server/publications/partInstancesUI/publication.ts index e85c9b8ec0..553dea9808 100644 --- a/meteor/server/publications/partInstancesUI/publication.ts +++ b/meteor/server/publications/partInstancesUI/publication.ts @@ -1,12 +1,8 @@ -import { - PartInstanceId, - RundownId, - RundownPlaylistActivationId, - SegmentId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PartInstanceId, RundownPlaylistActivationId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { check } from 'meteor/check' import { CustomPublishCollection, + SetupObserversResult, TriggerUpdate, meteorCustomPublish, setUpCollectionOptimizedObserver, @@ -17,7 +13,6 @@ import { resolveCredentials } from '../../security/lib/credentials' import { NoSecurityReadAccess } from '../../security/noSecurity' import { ContentCache, PartInstanceOmitedFields, createReactiveContentCache } from './reactiveContentCache' import { ReadonlyDeep } from 'type-fest' -import { LiveQueryHandle } from '../../lib/lib' import { RundownPlaylists } from '../../collections' import { literal } from '@sofie-automation/corelib/dist/lib' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' @@ -26,7 +21,6 @@ import { RundownsObserver } from '../lib/rundownsObserver' import { RundownContentObserver } from './rundownContentObserver' import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { Match } from '../../lib/check' -import { RundownReadAccess } from '../../security/rundown' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { extractRanks, @@ -37,7 +31,6 @@ import { interface UIPartInstancesArgs { readonly playlistActivationId: RundownPlaylistActivationId - readonly rundownIds: RundownId[] } export interface UIPartInstancesState { @@ -65,7 +58,7 @@ const rundownPlaylistFieldSpecifier = literal< async function setupUIPartInstancesPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const playlist = (await RundownPlaylists.findOneAsync( { activationId: args.playlistActivationId }, { @@ -74,7 +67,7 @@ async function setupUIPartInstancesPublicationObservers( )) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist with activationId="${args.playlistActivationId}" not found!`) - const rundownsObserver = new RundownsObserver(playlist.studioId, playlist._id, (rundownIds) => { + const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { logger.silly(`Creating new RundownContentObserver`) const cache = createReactiveContentCache() @@ -82,7 +75,12 @@ async function setupUIPartInstancesPublicationObservers( // Push update triggerUpdate({ newCache: cache }) - const obs1 = new RundownContentObserver(playlist.studioId, args.playlistActivationId, rundownIds, cache) + const obs1 = await RundownContentObserver.create( + playlist.studioId, + args.playlistActivationId, + rundownIds, + cache + ) const innerQueries = [ cache.Segments.find({}).observeChanges({ @@ -205,32 +203,26 @@ export async function manipulateUIPartInstancesPublicationData( meteorCustomPublish( MeteorPubSub.uiPartInstances, CustomCollectionName.UIPartInstances, - async function (pub, rundownIds: RundownId[], playlistActivationId: RundownPlaylistActivationId | null) { - check(rundownIds, [String]) + async function (pub, playlistActivationId: RundownPlaylistActivationId | null) { check(playlistActivationId, Match.Maybe(String)) const credentials = await resolveCredentials({ userId: this.userId, token: undefined }) - if ( - playlistActivationId && - (!credentials || - NoSecurityReadAccess.any() || - (await RundownReadAccess.rundownContent({ $in: rundownIds }, credentials))) - ) { + if (playlistActivationId && (!credentials || NoSecurityReadAccess.any())) { await setUpCollectionOptimizedObserver< Omit, UIPartInstancesArgs, UIPartInstancesState, UIPartInstancesUpdateProps >( - `pub_${MeteorPubSub.uiPartInstances}_${rundownIds.join(',')}_${playlistActivationId}`, - { rundownIds, playlistActivationId }, + `pub_${MeteorPubSub.uiPartInstances}_${playlistActivationId}`, + { playlistActivationId }, setupUIPartInstancesPublicationObservers, manipulateUIPartInstancesPublicationData, pub ) } else { - logger.warn(`Pub.uiPartInstances: Not allowed: [${rundownIds.join(',')}] "${playlistActivationId}"`) + logger.warn(`Pub.uiPartInstances: Not allowed:"${playlistActivationId}"`) } } ) diff --git a/meteor/server/publications/partInstancesUI/rundownContentObserver.ts b/meteor/server/publications/partInstancesUI/rundownContentObserver.ts index aa405e9071..09c782f709 100644 --- a/meteor/server/publications/partInstancesUI/rundownContentObserver.ts +++ b/meteor/server/publications/partInstancesUI/rundownContentObserver.ts @@ -9,21 +9,27 @@ import { studioFieldSpecifier, } from './reactiveContentCache' import { PartInstances, RundownPlaylists, Segments, Studios } from '../../collections' +import { waitForAllObserversReady } from '../lib/lib' export class RundownContentObserver { - #observers: Meteor.LiveQueryHandle[] = [] - #cache: ContentCache + readonly #cache: ContentCache + readonly #observers: Meteor.LiveQueryHandle[] - constructor( + private constructor(cache: ContentCache, observers: Meteor.LiveQueryHandle[]) { + this.#cache = cache + + this.#observers = observers + } + + static async create( studioId: StudioId, playlistActivationId: RundownPlaylistActivationId, rundownIds: RundownId[], cache: ContentCache - ) { + ): Promise { logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) - this.#cache = cache - this.#observers = [ + const observers = await waitForAllObserversReady([ Studios.observeChanges( { _id: studioId, @@ -66,7 +72,9 @@ export class RundownContentObserver { projection: partInstanceFieldSpecifier, } ), - ] + ]) + + return new RundownContentObserver(cache, observers) } public get cache(): ContentCache { diff --git a/meteor/server/publications/partsUI/publication.ts b/meteor/server/publications/partsUI/publication.ts index 1b36fe86a3..31af1ed031 100644 --- a/meteor/server/publications/partsUI/publication.ts +++ b/meteor/server/publications/partsUI/publication.ts @@ -2,6 +2,7 @@ import { PartId, RundownPlaylistId, SegmentId } from '@sofie-automation/corelib/ import { check } from 'meteor/check' import { CustomPublishCollection, + SetupObserversResult, TriggerUpdate, meteorCustomPublish, setUpCollectionOptimizedObserver, @@ -14,7 +15,6 @@ import { NoSecurityReadAccess } from '../../security/noSecurity' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { ContentCache, PartOmitedFields, createReactiveContentCache } from './reactiveContentCache' import { ReadonlyDeep } from 'type-fest' -import { LiveQueryHandle } from '../../lib/lib' import { RundownPlaylists } from '../../collections' import { literal } from '@sofie-automation/corelib/dist/lib' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' @@ -53,13 +53,13 @@ const rundownPlaylistFieldSpecifier = literal< async function setupUIPartsPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const playlist = (await RundownPlaylists.findOneAsync(args.playlistId, { projection: rundownPlaylistFieldSpecifier, })) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist "${args.playlistId}" not found!`) - const rundownsObserver = new RundownsObserver(playlist.studioId, playlist._id, (rundownIds) => { + const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { logger.silly(`Creating new RundownContentObserver`) const cache = createReactiveContentCache() @@ -67,7 +67,7 @@ async function setupUIPartsPublicationObservers( // Push update triggerUpdate({ newCache: cache }) - const obs1 = new RundownContentObserver(playlist.studioId, playlist._id, rundownIds, cache) + const obs1 = await RundownContentObserver.create(playlist.studioId, playlist._id, rundownIds, cache) const innerQueries = [ cache.Segments.find({}).observeChanges({ @@ -190,7 +190,7 @@ export async function manipulateUIPartsPublicationData( meteorCustomPublish( MeteorPubSub.uiParts, CustomCollectionName.UIParts, - async function (pub, playlistId: RundownPlaylistId) { + async function (pub, playlistId: RundownPlaylistId | null) { check(playlistId, String) const credentials = await resolveCredentials({ userId: this.userId, token: undefined }) @@ -198,7 +198,7 @@ meteorCustomPublish( if ( !credentials || NoSecurityReadAccess.any() || - (await RundownPlaylistReadAccess.rundownPlaylistContent(playlistId, credentials)) + (playlistId && (await RundownPlaylistReadAccess.rundownPlaylistContent(playlistId, credentials))) ) { await setUpCollectionOptimizedObserver< Omit, diff --git a/meteor/server/publications/partsUI/rundownContentObserver.ts b/meteor/server/publications/partsUI/rundownContentObserver.ts index e9de9dd780..ee7e92c7d6 100644 --- a/meteor/server/publications/partsUI/rundownContentObserver.ts +++ b/meteor/server/publications/partsUI/rundownContentObserver.ts @@ -9,16 +9,26 @@ import { studioFieldSpecifier, } from './reactiveContentCache' import { Parts, RundownPlaylists, Segments, Studios } from '../../collections' +import { waitForAllObserversReady } from '../lib/lib' export class RundownContentObserver { - #observers: Meteor.LiveQueryHandle[] = [] - #cache: ContentCache + readonly #cache: ContentCache + readonly #observers: Meteor.LiveQueryHandle[] - constructor(studioId: StudioId, playlistId: RundownPlaylistId, rundownIds: RundownId[], cache: ContentCache) { - logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) + private constructor(cache: ContentCache, observers: Meteor.LiveQueryHandle[]) { this.#cache = cache + this.#observers = observers + } - this.#observers = [ + static async create( + studioId: StudioId, + playlistId: RundownPlaylistId, + rundownIds: RundownId[], + cache: ContentCache + ): Promise { + logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) + + const observers = await waitForAllObserversReady([ Studios.observeChanges( { _id: studioId, @@ -59,7 +69,9 @@ export class RundownContentObserver { projection: partFieldSpecifier, } ), - ] + ]) + + return new RundownContentObserver(cache, observers) } public get cache(): ContentCache { diff --git a/meteor/server/publications/peripheralDevice.ts b/meteor/server/publications/peripheralDevice.ts index 61588d0e90..1ead8e0e6d 100644 --- a/meteor/server/publications/peripheralDevice.ts +++ b/meteor/server/publications/peripheralDevice.ts @@ -1,6 +1,6 @@ import { Meteor } from 'meteor/meteor' import { check, Match } from '../lib/check' -import { meteorPublish, AutoFillSelector } from './lib' +import { meteorPublish, AutoFillSelector } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { PeripheralDeviceReadAccess } from '../security/peripheralDevice' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' diff --git a/meteor/server/publications/peripheralDeviceForDevice.ts b/meteor/server/publications/peripheralDeviceForDevice.ts index bcead00051..f98b37e6ff 100644 --- a/meteor/server/publications/peripheralDeviceForDevice.ts +++ b/meteor/server/publications/peripheralDeviceForDevice.ts @@ -3,7 +3,12 @@ import { PeripheralDeviceReadAccess } from '../security/peripheralDevice' import { PeripheralDevice, PeripheralDeviceCategory } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { PeripheralDevices, Studios } from '../collections' -import { TriggerUpdate, meteorCustomPublish, setUpOptimizedObserverArray } from '../lib/customPublication' +import { + SetupObserversResult, + TriggerUpdate, + meteorCustomPublish, + setUpOptimizedObserverArray, +} from '../lib/customPublication' import { PeripheralDeviceForDevice } from '@sofie-automation/shared-lib/dist/core/model/peripheralDevice' import { ReadonlyDeep } from 'type-fest' import { ReactiveMongoObserverGroup } from './lib/observerGroup' @@ -120,7 +125,7 @@ export function convertPeripheralDeviceForGateway( async function setupPeripheralDevicePublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const studioObserver = await ReactiveMongoObserverGroup(async () => { const peripheralDeviceCompact = (await PeripheralDevices.findOneAsync(args.deviceId, { fields: { studioId: 1 }, diff --git a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts index d691c2c1d2..3f7c1027da 100644 --- a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts +++ b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts @@ -5,6 +5,7 @@ import { getMediaObjectMediaId, PieceContentStreamInfo, checkPieceContentStatusAndDependencies, + PieceContentStatusStudio, } from '../checkPieceContentStatus' import { PackageInfo, @@ -31,48 +32,128 @@ import { MediaStream, MediaStreamType, } from '@sofie-automation/shared-lib/dist/core/model/MediaObjects' -import { UIStudio } from '@sofie-automation/meteor-lib/dist/api/studios' import { defaultStudio } from '../../../../__mocks__/defaultCollectionObjects' import { testInFiber } from '../../../../__mocks__/helpers/jest' import { MediaObjects } from '../../../collections' import { PieceDependencies } from '../common' -import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { DEFAULT_MINIMUM_TAKE_SPAN } from '@sofie-automation/shared-lib/dist/core/constants' const mockMediaObjectsCollection = MongoMock.getInnerMockCollection(MediaObjects) describe('lib/mediaObjects', () => { - test('buildFormatString', () => { - const format1 = buildFormatString( - PackageInfo.FieldOrder.TFF, - literal({ - width: 1920, - height: 1080, - codec_time_base: '1/25', - }) - ) - expect(format1).toEqual('1920x1080i2500tff') + describe('buildFormatString', () => { + it('deepscan tff, stream unknown', () => { + const format1 = buildFormatString( + PackageInfo.FieldOrder.TFF, + literal({ + width: 1920, + height: 1080, + codec_time_base: '1/25', + }) + ) + expect(format1).toEqual('1920x1080i2500tff') + }) - const format2 = buildFormatString( - PackageInfo.FieldOrder.Progressive, + it('deepscan progressive, stream unknown', () => { + const format2 = buildFormatString( + PackageInfo.FieldOrder.Progressive, + literal({ + width: 1280, + height: 720, + codec_time_base: '1001/60000', + }) + ) + expect(format2).toEqual('1280x720p5994') + }) - literal({ - width: 1280, - height: 720, - codec_time_base: '1001/60000', - }) - ) - expect(format2).toEqual('1280x720p5994') + it('deepscan bff, stream unknown', () => { + const format3 = buildFormatString( + PackageInfo.FieldOrder.BFF, + literal({ + width: 720, + height: 576, + codec_time_base: '1/25', + }) + ) + expect(format3).toEqual('720x576i2500bff') + }) - const format3 = buildFormatString( - PackageInfo.FieldOrder.BFF, - literal({ - width: 720, - height: 576, - codec_time_base: '1/25', - }) - ) - expect(format3).toEqual('720x576i2500bff') + it('deepscan tff, stream bff', () => { + const format3 = buildFormatString( + PackageInfo.FieldOrder.TFF, + literal({ + width: 720, + height: 576, + codec_time_base: '1/25', + field_order: PackageInfo.FieldOrder.BFF, + }) + ) + expect(format3).toEqual('720x576i2500bff') + }) + + it('deepscan bff, stream tff', () => { + const format3 = buildFormatString( + PackageInfo.FieldOrder.BFF, + literal({ + width: 720, + height: 576, + codec_time_base: '1/25', + field_order: PackageInfo.FieldOrder.TFF, + }) + ) + expect(format3).toEqual('720x576i2500tff') + }) + + it('deepscan progressive, stream tff', () => { + const format3 = buildFormatString( + PackageInfo.FieldOrder.Progressive, + literal({ + width: 720, + height: 576, + codec_time_base: '1/25', + field_order: PackageInfo.FieldOrder.TFF, + }) + ) + expect(format3).toEqual('720x576i2500tff') + }) + + it('deepscan bff, stream progressive', () => { + const format3 = buildFormatString( + PackageInfo.FieldOrder.BFF, + literal({ + width: 720, + height: 576, + codec_time_base: '1/25', + field_order: PackageInfo.FieldOrder.Progressive, + }) + ) + expect(format3).toEqual('720x576i2500bff') + }) + + it('deepscan unknown, stream progressive', () => { + const format3 = buildFormatString( + undefined, + literal({ + width: 720, + height: 576, + codec_time_base: '1/25', + field_order: PackageInfo.FieldOrder.Progressive, + }) + ) + expect(format3).toEqual('720x576p2500') + }) + + it('r_frame_rate', () => { + const format3 = buildFormatString( + PackageInfo.FieldOrder.Progressive, + literal({ + width: 720, + height: 576, + r_frame_rate: '25/1', + }) + ) + expect(format3).toEqual('720x576p2500') + }) }) test('acceptFormat', () => { @@ -86,9 +167,7 @@ describe('lib/mediaObjects', () => { test('getAcceptedFormats', () => { const acceptedFormats = getAcceptedFormats({ supportedMediaFormats: '1920x1080i5000, 1280x720, i5000, i5000tff', - mediaPreviewsUrl: '', frameRate: 25, - minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, }) expect(acceptedFormats).toEqual([ ['1920', '1080', 'i', '5000', undefined], @@ -171,23 +250,20 @@ describe('lib/mediaObjects', () => { supportedAudioStreams: '4', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: false, + allowPieceDirectPlay: false, + enableBuckets: false, } const mockDefaultStudio = defaultStudio(protectString('studio0')) - const mockStudio: Complete< - Pick< - DBStudio, - '_id' | 'settings' | 'packageContainers' | 'previewContainerIds' | 'thumbnailContainerIds' | 'routeSets' - > & - Pick - > = { + const mockStudio: Complete = { _id: mockDefaultStudio._id, settings: mockStudioSettings, - packageContainers: mockDefaultStudio.packageContainers, previewContainerIds: ['previews0'], thumbnailContainerIds: ['thumbnails0'], - routeSets: mockDefaultStudio.routeSets, + routeSets: applyAndValidateOverrides(mockDefaultStudio.routeSetsWithOverrides).obj, mappings: applyAndValidateOverrides(mockDefaultStudio.mappingsWithOverrides).obj, + packageContainers: applyAndValidateOverrides(mockDefaultStudio.packageContainersWithOverrides).obj, } mockMediaObjectsCollection.insert( diff --git a/meteor/server/publications/pieceContentStatusUI/bucket/bucketContentObserver.ts b/meteor/server/publications/pieceContentStatusUI/bucket/bucketContentObserver.ts index 28ab518c81..e80ab6076b 100644 --- a/meteor/server/publications/pieceContentStatusUI/bucket/bucketContentObserver.ts +++ b/meteor/server/publications/pieceContentStatusUI/bucket/bucketContentObserver.ts @@ -11,11 +11,11 @@ import { } from './bucketContentCache' import { BucketAdLibActions, BucketAdLibs, ShowStyleBases } from '../../../collections' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' -import { waitForPromise } from '../../../lib/lib' import { equivalentArrays } from '@sofie-automation/shared-lib/dist/lib/lib' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { ReactiveMongoObserverGroup, ReactiveMongoObserverGroupHandle } from '../../lib/observerGroup' import _ from 'underscore' +import { waitForAllObserversReady } from '../../lib/lib' const REACTIVITY_DEBOUNCE = 20 @@ -31,47 +31,53 @@ export class BucketContentObserver implements Meteor.LiveQueryHandle { #cache: BucketContentCache #showStyleBaseIds: ShowStyleBaseId[] = [] - #showStyleBaseIdObserver: ReactiveMongoObserverGroupHandle + #showStyleBaseIdObserver!: ReactiveMongoObserverGroupHandle - constructor(bucketId: BucketId, cache: BucketContentCache) { - logger.silly(`Creating BucketContentObserver for "${bucketId}"`) + #disposed = false + + private constructor(cache: BucketContentCache) { this.#cache = cache + } + + static async create(bucketId: BucketId, cache: BucketContentCache): Promise { + logger.silly(`Creating BucketContentObserver for "${bucketId}"`) + + const observer = new BucketContentObserver(cache) // Run the ShowStyleBase query in a ReactiveMongoObserverGroup, so that it can be restarted whenever - this.#showStyleBaseIdObserver = waitForPromise( - ReactiveMongoObserverGroup(async () => { - // Clear already cached data - cache.ShowStyleSourceLayers.remove({}) - - return [ - ShowStyleBases.observe( - { - // We can use the `this.#showStyleBaseIds` here, as this is restarted every time that property changes - _id: { $in: this.#showStyleBaseIds }, + observer.#showStyleBaseIdObserver = await ReactiveMongoObserverGroup(async () => { + // Clear already cached data + cache.ShowStyleSourceLayers.remove({}) + + return [ + ShowStyleBases.observe( + { + // We can use the `this.#showStyleBaseIds` here, as this is restarted every time that property changes + _id: { $in: observer.#showStyleBaseIds }, + }, + { + added: (doc) => { + const newDoc = convertShowStyleBase(doc) + cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) + }, + changed: (doc) => { + const newDoc = convertShowStyleBase(doc) + cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) }, - { - added: (doc) => { - const newDoc = convertShowStyleBase(doc) - cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) - }, - changed: (doc) => { - const newDoc = convertShowStyleBase(doc) - cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) - }, - removed: (doc) => { - cache.ShowStyleSourceLayers.remove(doc._id) - }, + removed: (doc) => { + cache.ShowStyleSourceLayers.remove(doc._id) }, - { - projection: showStyleBaseFieldSpecifier, - } - ), - ] - }) - ) + }, + { + projection: showStyleBaseFieldSpecifier, + } + ), + ] + }) // Subscribe to the database, and pipe any updates into the ReactiveCacheCollections - this.#observers = [ + // This takes ownership of the #showStyleBaseIdObserver, and will stop it if this throws + observer.#observers = await waitForAllObserversReady([ BucketAdLibs.observeChanges( { bucketId: bucketId, @@ -79,7 +85,7 @@ export class BucketContentObserver implements Meteor.LiveQueryHandle { cache.BucketAdLibs.link(() => { // Check if the ShowStyleBaseIds needs updating // TODO - is this over-eager? - this.updateShowStyleBaseIds() + observer.updateShowStyleBaseIds() }), { projection: bucketAdlibFieldSpecifier, @@ -92,19 +98,23 @@ export class BucketContentObserver implements Meteor.LiveQueryHandle { cache.BucketAdLibActions.link(() => { // Check if the ShowStyleBaseIds needs updating // TODO - is this over-eager? - this.updateShowStyleBaseIds() + observer.updateShowStyleBaseIds() }), { projection: bucketActionFieldSpecifier, } ), - this.#showStyleBaseIdObserver, - ] + observer.#showStyleBaseIdObserver, + ]) + + return observer } private updateShowStyleBaseIds = _.debounce( Meteor.bindEnvironment(() => { + if (this.#disposed) return + const newShowStyleBaseIdsSet = new Set() this.#cache.BucketAdLibs.find({}).forEach((adlib) => newShowStyleBaseIdsSet.add(adlib.showStyleBaseId)) this.#cache.BucketAdLibActions.find({}).forEach((action) => @@ -127,6 +137,8 @@ export class BucketContentObserver implements Meteor.LiveQueryHandle { } public stop = (): void => { + this.#disposed = true + this.#observers.forEach((observer) => observer.stop()) } } diff --git a/meteor/server/publications/pieceContentStatusUI/bucket/publication.ts b/meteor/server/publications/pieceContentStatusUI/bucket/publication.ts index d1ecc15ed8..8661244883 100644 --- a/meteor/server/publications/pieceContentStatusUI/bucket/publication.ts +++ b/meteor/server/publications/pieceContentStatusUI/bucket/publication.ts @@ -18,12 +18,12 @@ import { meteorCustomPublish, setUpCollectionOptimizedObserver, TriggerUpdate, + SetupObserversResult, } from '../../../lib/customPublication' import { logger } from '../../../logging' import { resolveCredentials } from '../../../security/lib/credentials' import { NoSecurityReadAccess } from '../../../security/noSecurity' import { BucketContentCache, createReactiveContentCache } from './bucketContentCache' -import { LiveQueryHandle } from '../../../lib/lib' import { StudioReadAccess } from '../../../security/studio' import { Bucket } from '@sofie-automation/meteor-lib/dist/collections/Buckets' import { @@ -72,7 +72,7 @@ const bucketFieldSpecifier = literal, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const trackMediaObjectChange = (mediaId: string): Partial => ({ invalidateMediaObjectMediaId: [mediaId], }) @@ -103,7 +103,7 @@ async function setupUIBucketContentStatusesPublicationObservers( // Set up observers: return [ - new BucketContentObserver(args.bucketId, contentCache), + BucketContentObserver.create(args.bucketId, contentCache), contentCache.BucketAdLibs.find({}).observeChanges({ added: (id) => triggerUpdate(trackAdlibChange(protectString(id))), diff --git a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts index e3bc20e68a..dbeb8a4e9e 100644 --- a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts +++ b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts @@ -24,6 +24,7 @@ import { MappingsExt, ResultingMappingRoutes, StudioPackageContainer, + StudioRouteSet, } from '@sofie-automation/corelib/dist/dataModel/Studio' import { literal, Complete, assertNever } from '@sofie-automation/corelib/dist/lib' import { ReadonlyDeep } from 'type-fest' @@ -59,9 +60,21 @@ interface ScanInfoForPackage { * formatted string */ export function buildFormatString( - field_order: PackageInfo.FieldOrder | undefined, + scan_field_order: PackageInfo.FieldOrder | undefined, stream: PieceContentStreamInfo ): string { + let field_order: PackageInfo.FieldOrder + if (stream.field_order === PackageInfo.FieldOrder.BFF || stream.field_order === PackageInfo.FieldOrder.TFF) { + // If the stream says it is interlaced, trust that + field_order = stream.field_order + } else if (scan_field_order && scan_field_order !== PackageInfo.FieldOrder.Unknown) { + // Then try the scan if it gave a value + field_order = scan_field_order + } else { + // Fallback to whatever the stream has + field_order = stream.field_order || PackageInfo.FieldOrder.Unknown + } + let format = `${stream.width || 0}x${stream.height || 0}` switch (field_order) { case PackageInfo.FieldOrder.Progressive: @@ -134,7 +147,9 @@ export function acceptFormat(format: string, formats: Array>): boo * [undefined, undefined, i, 5000, tff] * ] */ -export function getAcceptedFormats(settings: IStudioSettings | undefined): Array> { +export function getAcceptedFormats( + settings: Pick | undefined +): Array> { const formatsConfigField = settings ? settings.supportedMediaFormats : '' const formatsString: string = (formatsConfigField && formatsConfigField !== '' ? formatsConfigField : '1920x1080i5000') + '' @@ -172,12 +187,15 @@ export type PieceContentStatusPiece = Pick { + extends Pick { /** Mappings between the physical devices / outputs and logical ones */ mappings: MappingsExt + /** Route sets with overrides */ + routeSets: Record + /** Contains settings for which Package Containers are present in the studio. + * (These are used by the Package Manager and the Expected Packages) + */ + packageContainers: Record } export async function checkPieceContentStatusAndDependencies( @@ -333,6 +351,7 @@ async function checkPieceContentMediaObjectStatus( codec_time_base: stream.codec.time_base, channels: stream.channels, r_frame_rate: undefined, + field_order: undefined, }) ), (stream) => buildFormatString(mediainfo.field_order, stream), @@ -557,7 +576,7 @@ async function checkPieceContentExpectedPackageStatus( const sideEffect = getSideEffect(expectedPackage, studio) thumbnailUrl = await getAssetUrlFromPackageContainerStatus( - studio, + studio.packageContainers, getPackageContainerPackageStatus, expectedPackageId, sideEffect.thumbnailContainerId, @@ -569,7 +588,7 @@ async function checkPieceContentExpectedPackageStatus( const sideEffect = getSideEffect(expectedPackage, studio) previewUrl = await getAssetUrlFromPackageContainerStatus( - studio, + studio.packageContainers, getPackageContainerPackageStatus, expectedPackageId, sideEffect.previewContainerId, @@ -716,7 +735,7 @@ async function checkPieceContentExpectedPackageStatus( } async function getAssetUrlFromPackageContainerStatus( - studio: PieceContentStatusStudio, + packageContainers: Record, getPackageContainerPackageStatus: ( packageContainerId: string, expectedPackageId: ExpectedPackageId @@ -727,7 +746,7 @@ async function getAssetUrlFromPackageContainerStatus( ): Promise { if (!assetContainerId || !packageAssetPath) return - const assetPackageContainer = studio.packageContainers[assetContainerId] + const assetPackageContainer = packageContainers[assetContainerId] if (!assetPackageContainer) return const previewPackageOnPackageContainer = await getPackageContainerPackageStatus(assetContainerId, expectedPackageId) @@ -873,7 +892,7 @@ function getPackageWarningMessage( export type PieceContentStreamInfo = Pick< PackageInfo.FFProbeScanStream, - 'width' | 'height' | 'time_base' | 'codec_type' | 'codec_time_base' | 'channels' | 'r_frame_rate' + 'width' | 'height' | 'time_base' | 'codec_type' | 'codec_time_base' | 'channels' | 'r_frame_rate' | 'field_order' > function checkStreamFormatsAndCounts( messages: Array, diff --git a/meteor/server/publications/pieceContentStatusUI/common.ts b/meteor/server/publications/pieceContentStatusUI/common.ts index 2f595f8002..591f1eb16e 100644 --- a/meteor/server/publications/pieceContentStatusUI/common.ts +++ b/meteor/server/publications/pieceContentStatusUI/common.ts @@ -14,19 +14,19 @@ import { PieceContentStatusStudio } from './checkPieceContentStatus' export type StudioFields = | '_id' | 'settings' - | 'packageContainers' + | 'packageContainersWithOverrides' | 'previewContainerIds' | 'thumbnailContainerIds' | 'mappingsWithOverrides' - | 'routeSets' + | 'routeSetsWithOverrides' export const studioFieldSpecifier = literal>>({ _id: 1, settings: 1, - packageContainers: 1, + packageContainersWithOverrides: 1, previewContainerIds: 1, thumbnailContainerIds: 1, mappingsWithOverrides: 1, - routeSets: 1, + routeSetsWithOverrides: 1, }) export type PackageContainerPackageStatusLight = Pick @@ -113,10 +113,10 @@ export async function fetchStudio(studioId: StudioId): Promise, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const trackMediaObjectChange = (mediaId: string): Partial => ({ invalidateMediaObjectMediaId: [mediaId], }) @@ -122,14 +122,14 @@ async function setupUIPieceContentStatusesPublicationObservers( })) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist "${args.rundownPlaylistId}" not found!`) - const rundownsObserver = new RundownsObserver(playlist.studioId, playlist._id, (rundownIds) => { + const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { logger.silly(`Creating new RundownContentObserver`) // TODO - can this be done cheaper? const contentCache = createReactiveContentCache() triggerUpdate({ newCache: contentCache }) - const obs1 = new RundownContentObserver(rundownIds, contentCache) + const obs1 = await RundownContentObserver.create(rundownIds, contentCache) const innerQueries = [ contentCache.Segments.find({}).observeChanges({ diff --git a/meteor/server/publications/pieceContentStatusUI/rundown/rundownContentObserver.ts b/meteor/server/publications/pieceContentStatusUI/rundown/rundownContentObserver.ts index 26d836aaa9..6ba425ab3e 100644 --- a/meteor/server/publications/pieceContentStatusUI/rundown/rundownContentObserver.ts +++ b/meteor/server/publications/pieceContentStatusUI/rundown/rundownContentObserver.ts @@ -33,7 +33,7 @@ import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settin import { ReactiveMongoObserverGroup, ReactiveMongoObserverGroupHandle } from '../../lib/observerGroup' import _ from 'underscore' import { equivalentArrays } from '@sofie-automation/shared-lib/dist/lib/lib' -import { waitForPromise } from '../../../lib/lib' +import { waitForAllObserversReady } from '../../lib/lib' const REACTIVITY_DEBOUNCE = 20 @@ -46,59 +46,73 @@ function convertShowStyleBase(doc: Pick): export class RundownContentObserver { #observers: Meteor.LiveQueryHandle[] = [] - #cache: ContentCache + readonly #cache: ContentCache #showStyleBaseIds: ShowStyleBaseId[] = [] - #showStyleBaseIdObserver: ReactiveMongoObserverGroupHandle + #showStyleBaseIdObserver!: ReactiveMongoObserverGroupHandle - constructor(rundownIds: RundownId[], cache: ContentCache) { - logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) + private constructor(cache: ContentCache) { this.#cache = cache + } + + static async create(rundownIds: RundownId[], cache: ContentCache): Promise { + logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) + const observer = new RundownContentObserver(cache) + + await observer.initShowStyleBaseIdObserver() + + // This takes ownership of the #showStyleBaseIdObserver, and will stop it if this throws + await observer.initContentObservers(rundownIds) + + return observer + } + + private async initShowStyleBaseIdObserver() { // Run the ShowStyleBase query in a ReactiveMongoObserverGroup, so that it can be restarted whenever - this.#showStyleBaseIdObserver = waitForPromise( - ReactiveMongoObserverGroup(async () => { - // Clear already cached data - cache.ShowStyleSourceLayers.remove({}) - - logger.silly(`optimized observer restarting ${this.#showStyleBaseIds}`) - - return [ - ShowStyleBases.observe( - { - // We can use the `this.#showStyleBaseIds` here, as this is restarted every time that property changes - _id: { $in: this.#showStyleBaseIds }, + this.#showStyleBaseIdObserver = await ReactiveMongoObserverGroup(async () => { + // Clear already cached data + this.#cache.ShowStyleSourceLayers.remove({}) + + logger.silly(`optimized observer restarting ${this.#showStyleBaseIds}`) + + return [ + ShowStyleBases.observe( + { + // We can use the `this.#showStyleBaseIds` here, as this is restarted every time that property changes + _id: { $in: this.#showStyleBaseIds }, + }, + { + added: (doc) => { + const newDoc = convertShowStyleBase(doc) + this.#cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) }, - { - added: (doc) => { - const newDoc = convertShowStyleBase(doc) - cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) - }, - changed: (doc) => { - const newDoc = convertShowStyleBase(doc) - cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) - }, - removed: (doc) => { - cache.ShowStyleSourceLayers.remove(doc._id) - }, + changed: (doc) => { + const newDoc = convertShowStyleBase(doc) + this.#cache.ShowStyleSourceLayers.upsert(doc._id, { $set: newDoc as Partial }) }, - { - projection: showStyleBaseFieldSpecifier, - } - ), - ] - }) - ) + removed: (doc) => { + this.#cache.ShowStyleSourceLayers.remove(doc._id) + }, + }, + { + projection: showStyleBaseFieldSpecifier, + } + ), + ] + }) + } + private async initContentObservers(rundownIds: RundownId[]) { // Subscribe to the database, and pipe any updates into the ReactiveCacheCollections - this.#observers = [ + this.#observers = await waitForAllObserversReady([ Rundowns.observeChanges( { _id: { $in: rundownIds, }, }, - cache.Rundowns.link(() => { + this.#cache.Rundowns.link(() => { // Check if the ShowStyleBaseIds needs updating this.updateShowStyleBaseIds() }), @@ -114,7 +128,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.Segments.link(), + this.#cache.Segments.link(), { projection: segmentFieldSpecifier, } @@ -125,7 +139,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.Parts.link(), + this.#cache.Parts.link(), { projection: partFieldSpecifier, } @@ -136,7 +150,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.Pieces.link(), + this.#cache.Pieces.link(), { projection: pieceFieldSpecifier, } @@ -148,7 +162,7 @@ export class RundownContentObserver { }, reset: { $ne: true }, }, - cache.PartInstances.link(), + this.#cache.PartInstances.link(), { projection: partInstanceFieldSpecifier, } @@ -160,7 +174,7 @@ export class RundownContentObserver { }, reset: { $ne: true }, }, - cache.PieceInstances.link(), + this.#cache.PieceInstances.link(), { projection: pieceInstanceFieldSpecifier, } @@ -171,7 +185,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.AdLibPieces.link(), + this.#cache.AdLibPieces.link(), { projection: adLibPieceFieldSpecifier, } @@ -182,7 +196,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.AdLibActions.link(), + this.#cache.AdLibActions.link(), { projection: adLibActionFieldSpecifier, } @@ -193,7 +207,7 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.BaselineAdLibPieces.link(), + this.#cache.BaselineAdLibPieces.link(), { projection: adLibPieceFieldSpecifier, } @@ -204,12 +218,12 @@ export class RundownContentObserver { $in: rundownIds, }, }, - cache.BaselineAdLibActions.link(), + this.#cache.BaselineAdLibActions.link(), { projection: adLibActionFieldSpecifier, } ), - ] + ]) } private updateShowStyleBaseIds = _.debounce( diff --git a/meteor/server/publications/rundown.ts b/meteor/server/publications/rundown.ts index 9c56b681f9..f939a9baff 100644 --- a/meteor/server/publications/rundown.ts +++ b/meteor/server/publications/rundown.ts @@ -1,5 +1,5 @@ import { Meteor } from 'meteor/meteor' -import { meteorPublish, AutoFillSelector } from './lib' +import { meteorPublish, AutoFillSelector } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { MongoFieldSpecifierZeroes, MongoQuery } from '@sofie-automation/corelib/dist/mongo' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' @@ -17,7 +17,7 @@ import { AdLibActions, AdLibPieces, ExpectedPlayoutItems, - IngestDataCache, + NrcsIngestDataCache, PartInstances, Parts, PeripheralDevices, @@ -29,7 +29,7 @@ import { Segments, } from '../collections' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' -import { IngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' import { literal } from '@sofie-automation/corelib/dist/lib' import { PartId, @@ -532,16 +532,16 @@ meteorPublish( // Note: this publication is for dev purposes only: meteorPublish( CorelibPubSub.ingestDataCache, - async function (selector: MongoQuery, token: string | undefined) { + async function (selector: MongoQuery, token: string | undefined) { if (!selector) throw new Meteor.Error(400, 'selector argument missing') - const modifier: FindOptions = { + const modifier: FindOptions = { fields: {}, } if ( NoSecurityReadAccess.any() || (await RundownReadAccess.rundownContent(selector.rundownId, { userId: this.userId, token })) ) { - return IngestDataCache.findWithCursor(selector, modifier) + return NrcsIngestDataCache.findWithCursor(selector, modifier) } return null } diff --git a/meteor/server/publications/rundownPlaylist.ts b/meteor/server/publications/rundownPlaylist.ts index b8b3cbe69a..89378b1587 100644 --- a/meteor/server/publications/rundownPlaylist.ts +++ b/meteor/server/publications/rundownPlaylist.ts @@ -1,5 +1,5 @@ import { RundownPlaylistReadAccess } from '../security/rundownPlaylist' -import { meteorPublish, AutoFillSelector } from './lib' +import { meteorPublish, AutoFillSelector } from './lib/lib' import { StudioReadAccess } from '../security/studio' import { OrganizationReadAccess } from '../security/organization' import { NoSecurityReadAccess } from '../security/noSecurity' diff --git a/meteor/server/publications/segmentPartNotesUI/publication.ts b/meteor/server/publications/segmentPartNotesUI/publication.ts index dd71797d5f..5ab2a86a44 100644 --- a/meteor/server/publications/segmentPartNotesUI/publication.ts +++ b/meteor/server/publications/segmentPartNotesUI/publication.ts @@ -12,13 +12,13 @@ import { CustomPublishCollection, meteorCustomPublish, setUpCollectionOptimizedObserver, + SetupObserversResult, TriggerUpdate, } from '../../lib/customPublication' import { logger } from '../../logging' import { resolveCredentials } from '../../security/lib/credentials' import { NoSecurityReadAccess } from '../../security/noSecurity' import { RundownPlaylistReadAccess } from '../../security/rundownPlaylist' -import { LiveQueryHandle } from '../../lib/lib' import { ContentCache, createReactiveContentCache, @@ -60,13 +60,13 @@ const rundownPlaylistFieldSpecifier = literal< async function setupUISegmentPartNotesPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const playlist = (await RundownPlaylists.findOneAsync(args.playlistId, { projection: rundownPlaylistFieldSpecifier, })) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist "${args.playlistId}" not found!`) - const rundownsObserver = new RundownsObserver(playlist.studioId, playlist._id, (rundownIds) => { + const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { logger.silly(`Creating new RundownContentObserver`) // TODO - can this be done cheaper? @@ -75,7 +75,7 @@ async function setupUISegmentPartNotesPublicationObservers( // Push update triggerUpdate({ newCache: cache }) - const obs1 = new RundownContentObserver(rundownIds, cache) + const obs1 = await RundownContentObserver.create(rundownIds, cache) const innerQueries = [ cache.Segments.find({}).observeChanges({ diff --git a/meteor/server/publications/segmentPartNotesUI/rundownContentObserver.ts b/meteor/server/publications/segmentPartNotesUI/rundownContentObserver.ts index 43f9c25270..214a5dac96 100644 --- a/meteor/server/publications/segmentPartNotesUI/rundownContentObserver.ts +++ b/meteor/server/publications/segmentPartNotesUI/rundownContentObserver.ts @@ -9,16 +9,21 @@ import { segmentFieldSpecifier, } from './reactiveContentCache' import { PartInstances, Parts, Rundowns, Segments } from '../../collections' +import { waitForAllObserversReady } from '../lib/lib' export class RundownContentObserver { - #observers: Meteor.LiveQueryHandle[] = [] - #cache: ContentCache + readonly #observers: Meteor.LiveQueryHandle[] + readonly #cache: ContentCache - constructor(rundownIds: RundownId[], cache: ContentCache) { - logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) + private constructor(cache: ContentCache, observers: Meteor.LiveQueryHandle[]) { this.#cache = cache + this.#observers = observers + } - this.#observers = [ + static async create(rundownIds: RundownId[], cache: ContentCache): Promise { + logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) + + const observers = await waitForAllObserversReady([ Rundowns.observeChanges( { _id: { @@ -57,7 +62,9 @@ export class RundownContentObserver { cache.DeletedPartInstances.link(), { fields: partInstanceFieldSpecifier } ), - ] + ]) + + return new RundownContentObserver(cache, observers) } public get cache(): ContentCache { diff --git a/meteor/server/publications/showStyle.ts b/meteor/server/publications/showStyle.ts index bd75548151..99b3099e50 100644 --- a/meteor/server/publications/showStyle.ts +++ b/meteor/server/publications/showStyle.ts @@ -1,4 +1,4 @@ -import { meteorPublish, AutoFillSelector } from './lib' +import { meteorPublish, AutoFillSelector } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { DBShowStyleVariant } from '@sofie-automation/corelib/dist/dataModel/ShowStyleVariant' diff --git a/meteor/server/publications/showStyleUI.ts b/meteor/server/publications/showStyleUI.ts index 7888357843..68309db7d9 100644 --- a/meteor/server/publications/showStyleUI.ts +++ b/meteor/server/publications/showStyleUI.ts @@ -1,19 +1,23 @@ import { ShowStyleBaseId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { MongoFieldSpecifierOnesStrict } from '@sofie-automation/corelib/dist/mongo' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' -import { Meteor } from 'meteor/meteor' import { ReadonlyDeep } from 'type-fest' import { CustomCollectionName, MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { UIShowStyleBase } from '@sofie-automation/meteor-lib/dist/api/showStyles' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { Complete, literal } from '../lib/tempLib' -import { meteorCustomPublish, setUpOptimizedObserverArray, TriggerUpdate } from '../lib/customPublication' +import { + meteorCustomPublish, + SetupObserversResult, + setUpOptimizedObserverArray, + TriggerUpdate, +} from '../lib/customPublication' import { logger } from '../logging' import { NoSecurityReadAccess } from '../security/noSecurity' import { OrganizationReadAccess } from '../security/organization' import { ShowStyleReadAccess } from '../security/showStyle' import { ShowStyleBases } from '../collections' -import { AutoFillSelector } from './lib' +import { AutoFillSelector } from './lib/lib' import { check } from 'meteor/check' interface UIShowStyleBaseArgs { @@ -38,7 +42,7 @@ const fieldSpecifier = literal, triggerUpdate: TriggerUpdate -): Promise { +): Promise { // Set up observers: return [ ShowStyleBases.observeChanges( diff --git a/meteor/server/publications/studio.ts b/meteor/server/publications/studio.ts index a717817348..08002e6938 100644 --- a/meteor/server/publications/studio.ts +++ b/meteor/server/publications/studio.ts @@ -1,6 +1,6 @@ import { Meteor } from 'meteor/meteor' import { check, Match } from '../lib/check' -import { meteorPublish, AutoFillSelector } from './lib' +import { meteorPublish, AutoFillSelector } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { getActiveRoutes, getRoutedMappings } from '@sofie-automation/meteor-lib/dist/collections/Studios' import { PeripheralDeviceReadAccess } from '../security/peripheralDevice' @@ -11,6 +11,7 @@ import { NoSecurityReadAccess } from '../security/noSecurity' import { CustomPublish, meteorCustomPublish, + SetupObserversResult, setUpOptimizedObserverArray, TriggerUpdate, } from '../lib/customPublication' @@ -170,7 +171,7 @@ interface RoutedMappingsUpdateProps { async function setupMappingsPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { // Set up observers: return [ Studios.observeChanges( @@ -202,7 +203,7 @@ async function manipulateMappingsPublicationData( const studio = await Studios.findOneAsync(args.studioId) if (!studio) return [] - const routes = getActiveRoutes(studio.routeSets) + const routes = getActiveRoutes(applyAndValidateOverrides(studio.routeSetsWithOverrides).obj) const rawMappings = applyAndValidateOverrides(studio.mappingsWithOverrides) const routedMappings = getRoutedMappings(rawMappings.obj, routes) diff --git a/meteor/server/publications/studioUI.ts b/meteor/server/publications/studioUI.ts index 7d6ae98ad2..b8de6f1b7d 100644 --- a/meteor/server/publications/studioUI.ts +++ b/meteor/server/publications/studioUI.ts @@ -1,7 +1,6 @@ import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { MongoFieldSpecifierOnesStrict } from '@sofie-automation/corelib/dist/mongo' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' -import { Meteor } from 'meteor/meteor' import { ReadonlyDeep } from 'type-fest' import { CustomCollectionName, MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { UIStudio } from '@sofie-automation/meteor-lib/dist/api/studios' @@ -11,6 +10,7 @@ import { CustomPublishCollection, meteorCustomPublish, setUpCollectionOptimizedObserver, + SetupObserversResult, TriggerUpdate, } from '../lib/customPublication' import { logger } from '../logging' @@ -38,25 +38,31 @@ function convertDocument(studio: Pick): UIStudio { settings: studio.settings, - routeSets: studio.routeSets, - routeSetExclusivityGroups: studio.routeSetExclusivityGroups, + routeSets: applyAndValidateOverrides(studio.routeSetsWithOverrides).obj, + routeSetExclusivityGroups: applyAndValidateOverrides(studio.routeSetExclusivityGroupsWithOverrides).obj, }) } -type StudioFields = '_id' | 'name' | 'mappingsWithOverrides' | 'settings' | 'routeSets' | 'routeSetExclusivityGroups' +type StudioFields = + | '_id' + | 'name' + | 'mappingsWithOverrides' + | 'settings' + | 'routeSetsWithOverrides' + | 'routeSetExclusivityGroupsWithOverrides' const fieldSpecifier = literal>>({ _id: 1, name: 1, mappingsWithOverrides: 1, settings: 1, - routeSets: 1, - routeSetExclusivityGroups: 1, + routeSetsWithOverrides: 1, + routeSetExclusivityGroupsWithOverrides: 1, }) async function setupUIStudioPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const trackChange = (id: StudioId): Partial => ({ invalidateStudioIds: [id], }) diff --git a/meteor/server/publications/system.ts b/meteor/server/publications/system.ts index bc74aa1ca6..94a8969027 100644 --- a/meteor/server/publications/system.ts +++ b/meteor/server/publications/system.ts @@ -1,5 +1,5 @@ import { Meteor } from 'meteor/meteor' -import { meteorPublish } from './lib' +import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { SystemReadAccess } from '../security/system' import { OrganizationReadAccess } from '../security/organization' diff --git a/meteor/server/publications/timeline.ts b/meteor/server/publications/timeline.ts index c9511ee4d2..15cf679157 100644 --- a/meteor/server/publications/timeline.ts +++ b/meteor/server/publications/timeline.ts @@ -8,12 +8,13 @@ import { serializeTimelineBlob, TimelineBlob, } from '@sofie-automation/corelib/dist/dataModel/Timeline' -import { meteorPublish } from './lib' +import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { FindOptions } from '@sofie-automation/meteor-lib/dist/collections/lib' import { CustomPublish, meteorCustomPublish, + SetupObserversResult, setUpOptimizedObserverArray, TriggerUpdate, } from '../lib/customPublication' @@ -36,6 +37,7 @@ import { PeripheralDevicePubSub, PeripheralDevicePubSubCollectionsNames, } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' meteorPublish(CorelibPubSub.timelineDatastore, async function (studioId: StudioId, token: string | undefined) { if (!studioId) throw new Meteor.Error(400, 'selector argument missing') @@ -123,7 +125,7 @@ interface RoutedTimelineUpdateProps { async function setupTimelinePublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { // Set up observers: return [ Studios.observeChanges( @@ -209,7 +211,7 @@ async function manipulateTimelinePublicationData( if (!state.routes) { // Routes need recalculating - state.routes = getActiveRoutes(state.studio.routeSets) + state.routes = getActiveRoutes(applyAndValidateOverrides(state.studio.routeSetsWithOverrides).obj) invalidateTimeline = true } diff --git a/meteor/server/publications/translationsBundles.ts b/meteor/server/publications/translationsBundles.ts index 55e98e9503..8173fd3ec5 100644 --- a/meteor/server/publications/translationsBundles.ts +++ b/meteor/server/publications/translationsBundles.ts @@ -1,5 +1,5 @@ import { TranslationsBundlesSecurity } from '../security/translationsBundles' -import { meteorPublish } from './lib' +import { meteorPublish } from './lib/lib' import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { TranslationsBundles } from '../collections' import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' diff --git a/meteor/server/publications/triggeredActionsUI.ts b/meteor/server/publications/triggeredActionsUI.ts index 854c20450a..5a431daf10 100644 --- a/meteor/server/publications/triggeredActionsUI.ts +++ b/meteor/server/publications/triggeredActionsUI.ts @@ -1,6 +1,5 @@ import { ShowStyleBaseId, TriggeredActionId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' -import { Meteor } from 'meteor/meteor' import { ReadonlyDeep } from 'type-fest' import { CustomCollectionName, MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' import { @@ -12,6 +11,7 @@ import { CustomPublishCollection, meteorCustomPublish, setUpCollectionOptimizedObserver, + SetupObserversResult, TriggerUpdate, } from '../lib/customPublication' import { logger } from '../logging' @@ -64,7 +64,7 @@ function convertDocument(doc: DBTriggeredActions): UITriggeredActionsObj { async function setupUITriggeredActionsPublicationObservers( args: ReadonlyDeep, triggerUpdate: TriggerUpdate -): Promise { +): Promise { const trackChange = (id: TriggeredActionId): Partial => ({ invalidateTriggeredActions: [id], }) diff --git a/meteor/yarn.lock b/meteor/yarn.lock index 007a73b429..233d3a8179 100644 --- a/meteor/yarn.lock +++ b/meteor/yarn.lock @@ -1211,7 +1211,7 @@ __metadata: node-fetch: ^2.7.0 p-lazy: ^3.1.0 p-timeout: ^4.1.0 - superfly-timeline: 9.0.1 + superfly-timeline: 9.0.2 threadedclass: ^1.2.2 tslib: ^2.6.2 type-fest: ^3.13.1 @@ -1242,7 +1242,7 @@ __metadata: resolution: "@sofie-automation/shared-lib@portal:../packages/shared-lib::locator=automation-core%40workspace%3A." dependencies: "@mos-connection/model": v4.2.0-alpha.1 - timeline-state-resolver-types: 9.2.0-nightly-release52-20240909-111856-517f0ee37.0 + timeline-state-resolver-types: 9.2.0-nightly-release52-20240923-122840-58cfbb259.0 tslib: ^2.6.2 type-fest: ^3.13.1 languageName: node @@ -2349,7 +2349,7 @@ __metadata: prettier: ^2.8.8 semver: ^7.5.4 standard-version: ^9.5.0 - superfly-timeline: 9.0.1 + superfly-timeline: 9.0.2 threadedclass: ^1.2.2 timecode: 0.0.4 ts-jest: ^29.1.2 @@ -10026,12 +10026,12 @@ __metadata: languageName: node linkType: hard -"superfly-timeline@npm:9.0.1": - version: 9.0.1 - resolution: "superfly-timeline@npm:9.0.1" +"superfly-timeline@npm:9.0.2": + version: 9.0.2 + resolution: "superfly-timeline@npm:9.0.2" dependencies: tslib: ^2.6.0 - checksum: 4267eed691fe9ce9f89bf17c8aed1a98206938dd6d850c64b083e4fd3a3dc5329801c76c757450c9520375bad100ce512cc6d6a3e4a997bdfa14a4e7d65f09f2 + checksum: d628d467d5384f5667bc10b877478c5b8b0a91774b5d5c5e9d9d3134b8f1b760225f2fbbb0f9ccd3e55f930c9f3719f81b9347b94ea853fbc0a18bc121d97665 languageName: node linkType: hard @@ -10197,12 +10197,12 @@ __metadata: languageName: node linkType: hard -"timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240909-111856-517f0ee37.0": - version: 9.2.0-nightly-release52-20240909-111856-517f0ee37.0 - resolution: "timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240909-111856-517f0ee37.0" +"timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240923-122840-58cfbb259.0": + version: 9.2.0-nightly-release52-20240923-122840-58cfbb259.0 + resolution: "timeline-state-resolver-types@npm:9.2.0-nightly-release52-20240923-122840-58cfbb259.0" dependencies: tslib: ^2.6.3 - checksum: 6f9526e3e60021b722fd152272a7697a2b78517fb800a9e0879170388dcfeaaa2d386f80b9868b20bffe058033c9158077f93294cff7907c5d8a6d7b27e186f6 + checksum: c041363201bcfc0daac2ebca021b09fddc1f5b12fdeb932d9c19bfadc3ee308aa81f36c74c005edad2e756ed1c6465de779bfca5ed63ffd940878bf015497231 languageName: node linkType: hard diff --git a/package.json b/package.json index ae7452cddd..eb20c670c5 100644 --- a/package.json +++ b/package.json @@ -20,13 +20,13 @@ "unit:packages": "cd packages && run unit", "check-types:meteor": "cd meteor && run check-types", "test:meteor": "cd meteor && run test", - "lint:meteor": "cd meteor && meteor yarn lint", - "unit:meteor": "cd meteor && meteor yarn unit", - "meteor:run": "cd meteor && meteor yarn start", + "lint:meteor": "cd meteor && meteor npm run lint", + "unit:meteor": "cd meteor && meteor npm run unit", + "meteor:run": "cd meteor && meteor npm run start", "lint": "run lint:meteor && run lint:packages", "unit": "run unit:meteor && run unit:packages", "validate:release": "yarn install && run install-and-build && run validate:release:packages && run validate:release:meteor", - "validate:release:meteor": "cd meteor && meteor yarn validate:prod-dependencies && meteor yarn license-validate && meteor yarn lint && meteor yarn test", + "validate:release:meteor": "cd meteor && meteor npm run validate:prod-dependencies && meteor npm run license-validate && meteor npm run lint && meteor npm run test", "validate:release:packages": "cd packages && run validate:dependencies && run test", "meteor": "cd meteor && meteor", "docs:serve": "cd packages && run docs:serve", diff --git a/packages/.yarn/patches/timecode-npm-0.0.4-82bde9e6fe.patch b/packages/.yarn/patches/timecode-npm-0.0.4-82bde9e6fe.patch new file mode 100644 index 0000000000..ab845f486f --- /dev/null +++ b/packages/.yarn/patches/timecode-npm-0.0.4-82bde9e6fe.patch @@ -0,0 +1,11 @@ +diff --git a/lib/timecode.js b/lib/timecode.js +index ef4028e2ebc3b5480e93d88a38627454553e3502..a521b33cd239c38dfced835410aa57e8be9246ca 100644 +--- a/lib/timecode.js ++++ b/lib/timecode.js +@@ -217,3 +217,5 @@ var exports = exports || window; + + exports.Timecode = Timecode; + ++// Vite needs a more modern export ++module.exports = { Timecode } +\ No newline at end of file diff --git a/packages/blueprints-integration/CHANGELOG.md b/packages/blueprints-integration/CHANGELOG.md index 7647e8c41b..abf1b45e2d 100644 --- a/packages/blueprints-integration/CHANGELOG.md +++ b/packages/blueprints-integration/CHANGELOG.md @@ -3,6 +3,102 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.51.3](https://github.com/nrkno/sofie-core/compare/v1.51.2...v1.51.3) (2024-11-21) + +**Note:** Version bump only for package @sofie-automation/blueprints-integration + + + + + +## [1.51.2](https://github.com/nrkno/sofie-core/compare/v1.51.1...v1.51.2) (2024-11-21) + +**Note:** Version bump only for package @sofie-automation/blueprints-integration + + + + + +## [1.51.1](https://github.com/nrkno/sofie-core/compare/v1.51.1-2...v1.51.1) (2024-11-13) + +**Note:** Version bump only for package @sofie-automation/blueprints-integration + + + + + +## [1.51.1-2](https://github.com/nrkno/sofie-core/compare/v1.51.1-1...v1.51.1-2) (2024-10-24) + +**Note:** Version bump only for package @sofie-automation/blueprints-integration + + + + + +## [1.51.1-2](https://github.com/nrkno/sofie-core/compare/v1.51.1-1...v1.51.1-2) (2024-10-24) + +**Note:** Version bump only for package @sofie-automation/blueprints-integration + + + + + +## [1.51.1-1](https://github.com/nrkno/sofie-core/compare/v1.51.1-0...v1.51.1-1) (2024-10-18) + +**Note:** Version bump only for package @sofie-automation/blueprints-integration + + + + + +## [1.51.1-0](https://github.com/nrkno/sofie-core/compare/v1.51.0...v1.51.1-0) (2024-10-18) + +**Note:** Version bump only for package @sofie-automation/blueprints-integration + + + + + +## [1.51.1-0](https://github.com/nrkno/sofie-core/compare/v1.51.0...v1.51.1-0) (2024-10-18) + +**Note:** Version bump only for package @sofie-automation/blueprints-integration + + + + + +# [1.51.0](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.3...v1.51.0) (2024-10-07) + +**Note:** Version bump only for package @sofie-automation/blueprints-integration + + + + + +# [1.51.0](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.3...v1.51.0) (2024-10-07) + +**Note:** Version bump only for package @sofie-automation/blueprints-integration + + + + + +# [1.51.0-in-testing.3](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.2...v1.51.0-in-testing.3) (2024-09-25) + +**Note:** Version bump only for package @sofie-automation/blueprints-integration + + + + + +# [1.51.0-in-testing.2](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.1...v1.51.0-in-testing.2) (2024-09-24) + +**Note:** Version bump only for package @sofie-automation/blueprints-integration + + + + + # [1.51.0-in-testing.1](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.0...v1.51.0-in-testing.1) (2024-09-06) diff --git a/packages/blueprints-integration/src/abPlayback.ts b/packages/blueprints-integration/src/abPlayback.ts index 8a7c782d4b..1cf6943a1b 100644 --- a/packages/blueprints-integration/src/abPlayback.ts +++ b/packages/blueprints-integration/src/abPlayback.ts @@ -1,6 +1,9 @@ import type { TimelineObjectAbSessionInfo } from '@sofie-automation/shared-lib/dist/core/model/Timeline' import type { ICommonContext } from './context' import type { OnGenerateTimelineObj, TSR } from './timeline' +import type { AbPlayerId } from '@sofie-automation/shared-lib/dist/core/model/StudioRouteSet' + +export type { AbPlayerId } export interface PieceAbSessionInfo extends TimelineObjectAbSessionInfo { /** @@ -20,7 +23,7 @@ export const AB_MEDIA_PLAYER_AUTO = '__auto__' * Description of a player in an AB pool */ export interface ABPlayerDefinition { - playerId: number | string + playerId: AbPlayerId } /** @@ -30,7 +33,7 @@ export interface ABTimelineLayerChangeRule { /** What AB pools can this rule be used for */ acceptedPoolNames: string[] /** A function to generate the new layer name for a chosen playerId */ - newLayerName: (playerId: number | string) => string + newLayerName: (playerId: AbPlayerId) => string /** Whether this rule can be used for lookaheadObjects */ allowsLookahead: boolean } @@ -60,7 +63,7 @@ export interface ABResolverConfiguration { customApplyToObject?: ( context: ICommonContext, poolName: string, - playerId: number | string, + playerId: AbPlayerId, timelineObject: OnGenerateTimelineObj ) => boolean } diff --git a/packages/blueprints-integration/src/action.ts b/packages/blueprints-integration/src/action.ts index 2639f7f8ef..be134514be 100644 --- a/packages/blueprints-integration/src/action.ts +++ b/packages/blueprints-integration/src/action.ts @@ -41,6 +41,8 @@ export interface IBlueprintActionManifestDisplay { uniquenessId?: string /** When not playing, display in the UI as playing, and vice versa. Useful for Adlibs that toggle something off when taken */ invertOnAirState?: boolean + /** Hide this adLib from the shelf - it is accessible only through the API */ + hidden?: boolean } export interface IBlueprintActionManifestDisplayContent extends IBlueprintActionManifestDisplay { @@ -99,8 +101,8 @@ export interface IBlueprintActionManifest + /** Schema for the executeAdLib adLibOptions property to allow for customising */ + optionsSchema?: JSONBlob // Potential future properties: // /** Execute the action after userData is changed. If not present ActionExecuteAfterChanged.none is assumed. */ // executeOnUserDataChanged?: ActionExecuteAfterChanged diff --git a/packages/blueprints-integration/src/api/showStyle.ts b/packages/blueprints-integration/src/api/showStyle.ts index 52ec4559fd..9a4958ea28 100644 --- a/packages/blueprints-integration/src/api/showStyle.ts +++ b/packages/blueprints-integration/src/api/showStyle.ts @@ -19,7 +19,7 @@ import type { IOnTakeContext, IOnSetAsNextContext, } from '../context' -import type { IngestAdlib, ExtendedIngestRundown, IngestSegment, IngestRundown } from '../ingest' +import type { IngestAdlib, ExtendedIngestRundown, IngestRundown } from '../ingest' import type { IBlueprintExternalMessageQueueObj } from '../message' import type { MigrationStepShowStyle } from '../migrations' import type { @@ -46,6 +46,7 @@ import type { BlueprintConfigCoreConfig, BlueprintManifestBase, BlueprintManifes import type { IBlueprintTriggeredActions } from '../triggers' import type { ExpectedPackage } from '../package' import type { ABResolverConfiguration } from '../abPlayback' +import type { SofieIngestSegment } from '../ingest-types' export type TimelinePersistentState = unknown @@ -85,7 +86,7 @@ export interface ShowStyleBlueprintManifest BlueprintResultSegment | Promise /** @@ -130,8 +131,10 @@ export interface ShowStyleBlueprintManifest Promise + privateData?: unknown, + publicData?: unknown, + actionOptions?: { [key: string]: any } + ) => Promise<{ validationErrors: any } | void> /** Generate adlib piece from ingest data */ getAdlibItem?: ( @@ -169,6 +172,24 @@ export interface ShowStyleBlueprintManifest TProcessedConfig + /** + * Optional method to validate the blueprint config passed to this blueprint according to the API schema. + * Returns a list of messages to the caller that are used for logging or to throw if errors have been found. + */ + validateConfigFromAPI?: (context: ICommonContext, apiConfig: object) => Array + + /** + * Optional method to transform from an API blueprint config to the database blueprint config if these are required to be different. + * If this method is not defined the config object will be used directly + */ + blueprintConfigFromAPI?: (context: ICommonContext, config: object) => TRawConfig + + /** + * Optional method to transform from a database blueprint config to the API blueprint config if these are required to be different. + * If this method is not defined the config object will be used directly + */ + blueprintConfigToAPI?: (context: ICommonContext, config: TRawConfig) => object + // Events onRundownActivate?: (context: IRundownActivationContext, wasActive: boolean) => Promise diff --git a/packages/blueprints-integration/src/api/studio.ts b/packages/blueprints-integration/src/api/studio.ts index a4be296f26..fd0c49c5d2 100644 --- a/packages/blueprints-integration/src/api/studio.ts +++ b/packages/blueprints-integration/src/api/studio.ts @@ -4,13 +4,30 @@ import type { BlueprintConfigCoreConfig, BlueprintManifestBase, BlueprintManifes import type { JSONSchema } from '@sofie-automation/shared-lib/dist/lib/JSONSchemaTypes' import type { JSONBlob } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' import type { MigrationStepStudio } from '../migrations' -import type { ICommonContext, IFixUpConfigContext, IStudioBaselineContext, IStudioUserContext } from '../context' +import type { + ICommonContext, + IFixUpConfigContext, + IStudioBaselineContext, + IStudioUserContext, + IProcessIngestDataContext, +} from '../context' import type { IBlueprintShowStyleBase } from '../showStyle' -import type { ExtendedIngestRundown } from '../ingest' +import type { + ExtendedIngestRundown, + NrcsIngestChangeDetails, + IngestRundown, + MutableIngestRundown, + UserOperationChange, +} from '../ingest' import type { ExpectedPlayoutItemGeneric, IBlueprintResultRundownPlaylist, IBlueprintRundownDB } from '../documents' import type { BlueprintMappings } from '../studio' import type { TimelineObjectCoreExt, TSR } from '../timeline' import type { ExpectedPackage } from '../package' +import type { + StudioRouteSet, + StudioRouteSetExclusivityGroup, +} from '@sofie-automation/shared-lib/dist/core/model/StudioRouteSet' +import { StudioPackageContainer } from '@sofie-automation/shared-lib/dist/core/model/PackageContainer' export interface StudioBlueprintManifest extends BlueprintManifestBase { @@ -75,6 +92,35 @@ export interface StudioBlueprintManifest TProcessedConfig + + /** + * Optional method to validate the blueprint config passed to this blueprint according to the API schema. + * Returns a list of messages to the caller that are used for logging or to throw if errors have been found. + */ + validateConfigFromAPI?: (context: ICommonContext, apiConfig: object) => Array + + /** + * Optional method to transform from an API blueprint config to the database blueprint config if these are required to be different. + * If this method is not defined the config object will be used directly + */ + blueprintConfigFromAPI?: (context: ICommonContext, config: object) => IBlueprintConfig + + /** + * Optional method to transform from a database blueprint config to the API blueprint config if these are required to be different. + * If this method is not defined the config object will be used directly + */ + blueprintConfigToAPI?: (context: ICommonContext, config: TRawConfig) => object + + /** + * Process an ingest operation, to apply changes to the sofie interpretation of the ingest data + */ + processIngestData?: ( + context: IProcessIngestDataContext, + mutableIngestRundown: MutableIngestRundown, + nrcsIngestRundown: IngestRundown, + previousNrcsIngestRundown: IngestRundown | undefined, + changes: NrcsIngestChangeDetails | UserOperationChange + ) => Promise } export interface BlueprintResultStudioBaseline { @@ -108,6 +154,12 @@ export interface BlueprintResultApplyStudioConfig { ingestDevices: Record /** Input-gateway subdevices */ inputDevices: Record + /** Route Sets */ + routeSets?: Record + /** Route Set Exclusivity Groups */ + routeSetExclusivityGroups?: Record + /** Package Containers */ + packageContainers?: Record } export interface IStudioConfigPreset { diff --git a/packages/blueprints-integration/src/context/adlibActionContext.ts b/packages/blueprints-integration/src/context/adlibActionContext.ts index f6dbd60cc4..ec1b19a4bb 100644 --- a/packages/blueprints-integration/src/context/adlibActionContext.ts +++ b/packages/blueprints-integration/src/context/adlibActionContext.ts @@ -4,6 +4,7 @@ import type { IShowStyleUserContext } from './showStyleContext' import { IPartAndPieceActionContext } from './partsAndPieceActionContext' import { IExecuteTSRActionsContext } from './executeTsrActionContext' import { IBlueprintPart, IBlueprintPartInstance, IBlueprintPiece } from '..' +import { IRouteSetMethods } from './routeSetContext' /** Actions */ export interface IDataStoreMethods { @@ -24,7 +25,8 @@ export interface IActionExecutionContext IEventContext, IDataStoreMethods, IPartAndPieceActionContext, - IExecuteTSRActionsContext { + IExecuteTSRActionsContext, + IRouteSetMethods { /** Fetch the showstyle config for the specified part */ // getNextShowStyleConfig(): Readonly<{ [key: string]: ConfigItemValue }> diff --git a/packages/blueprints-integration/src/context/eventContext.ts b/packages/blueprints-integration/src/context/eventContext.ts index 271ba20871..e5fe6e234d 100644 --- a/packages/blueprints-integration/src/context/eventContext.ts +++ b/packages/blueprints-integration/src/context/eventContext.ts @@ -2,6 +2,7 @@ import type { OnGenerateTimelineObj, TSR } from '../timeline' import type { IBlueprintPartInstance, IBlueprintPieceInstance, IBlueprintSegmentDB } from '../documents' import type { IRundownContext } from './rundownContext' import type { IBlueprintExternalMessageQueueObj } from '../message' +import { BlueprintQuickLookInfo } from './quickLoopInfo' export interface IEventContext { getCurrentTime(): number @@ -12,6 +13,9 @@ export interface ITimelineEventContext extends IEventContext, IRundownContext { readonly nextPartInstance: Readonly | undefined readonly previousPartInstance: Readonly | undefined + /** Information about the current loop, if there is one */ + readonly quickLoopInfo: BlueprintQuickLookInfo | null + /** * Get the full session id for an ab playback session. * Note: sessionName should be unique within the segment unless pieces want to share a session diff --git a/packages/blueprints-integration/src/context/index.ts b/packages/blueprints-integration/src/context/index.ts index 594ecff3de..843436ddd8 100644 --- a/packages/blueprints-integration/src/context/index.ts +++ b/packages/blueprints-integration/src/context/index.ts @@ -5,6 +5,7 @@ export * from './fixUpConfigContext' export * from './onSetAsNextContext' export * from './onTakeContext' export * from './packageInfoContext' +export * from './processIngestDataContext' export * from './rundownContext' export * from './showStyleContext' export * from './studioContext' diff --git a/packages/blueprints-integration/src/context/onSetAsNextContext.ts b/packages/blueprints-integration/src/context/onSetAsNextContext.ts index 0b28371aa2..da6afe52ae 100644 --- a/packages/blueprints-integration/src/context/onSetAsNextContext.ts +++ b/packages/blueprints-integration/src/context/onSetAsNextContext.ts @@ -9,12 +9,16 @@ import { IEventContext, IShowStyleUserContext, } from '..' +import { BlueprintQuickLookInfo } from './quickLoopInfo' /** * Context in which 'current' is the part currently on air, and 'next' is the partInstance being set as Next * This is similar to `IPartAndPieceActionContext`, but has more limits on what is allowed to be changed. */ export interface IOnSetAsNextContext extends IShowStyleUserContext, IEventContext { + /** Information about the current loop, if there is one */ + readonly quickLoopInfo: BlueprintQuickLookInfo | null + /** * Data fetching */ @@ -65,4 +69,12 @@ export interface IOnSetAsNextContext extends IShowStyleUserContext, IEventContex */ /** Remove piecesInstances by id. Returns ids of piecesInstances that were removed. Note: For now we only allow removing from the next, but this might change to include current if there is justification */ removePieceInstances(part: 'next', pieceInstanceIds: string[]): Promise + + /** + * Move the next part through the rundown. Can move by either a number of parts, or segments in either direction. + * This will result in the `onSetAsNext` callback being called again following the current call, with the new PartInstance. + * Multiple calls of this inside one call to `onSetAsNext` will replace earlier calls. + * @returns Whether a new Part was found using the provided offset + */ + moveNextPart(partDelta: number, segmentDelta: number): Promise } diff --git a/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts b/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts index 4c68a254f8..d7a213f599 100644 --- a/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts +++ b/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts @@ -8,8 +8,12 @@ import { IBlueprintResolvedPieceInstance, Time, } from '..' +import { BlueprintQuickLookInfo } from './quickLoopInfo' export interface IPartAndPieceActionContext { + /** Information about the current loop, if there is one */ + readonly quickLoopInfo: BlueprintQuickLookInfo | null + /** * Data fetching */ diff --git a/packages/blueprints-integration/src/context/processIngestDataContext.ts b/packages/blueprints-integration/src/context/processIngestDataContext.ts new file mode 100644 index 0000000000..480732d753 --- /dev/null +++ b/packages/blueprints-integration/src/context/processIngestDataContext.ts @@ -0,0 +1,71 @@ +import type { IngestRundown, IngestSegment } from '@sofie-automation/shared-lib/dist/peripheralDevice/ingest' +import type { IStudioContext } from './studioContext' +import type { IngestDefaultChangesOptions, MutableIngestRundown, NrcsIngestChangeDetails } from '../ingest' + +export interface IProcessIngestDataContext extends IStudioContext { + /** + * Perform the default syncing of changes from the ingest data to the rundown. + * + * Please note that this may be overly aggressive at removing any changes made by user operations + * If you are using user operations, you may need to perform some pre and post fixups to ensure + * changes aren't wiped unnecessarily. + * + * @param ingestRundown NRCS version of the IngestRundown to copy from + * @param ingestChanges A description of the changes that have been made to the rundown and should be propagated + * @param options Options for how to apply the changes + */ + defaultApplyIngestChanges( + mutableIngestRundown: MutableIngestRundown, + ingestRundown: IngestRundown, + ingestChanges: NrcsIngestChangeDetails, + options?: IngestDefaultChangesOptions + ): void + + /** + * Group the Parts in a MOS Rundown and return a new changes object + * This will group the Parts based on the segment name, using the separator provided to extract the segment name from the part name + * + * Please note that this ignores some of the granularity of the `ingestChanges` object, and relies more on the `previousIngestRundown` instead + * If you are using user operations, you may need to perform some pre and post fixups to ensure changes aren't wiped unnecessarily. + * + * @param ingestRundown The rundown whose parts needs grouping + * @param previousIngestRundown The rundown prior to the changes, if known + * @param ingestChanges The changes which have been performed in `ingestRundown`, that need to translating + * @param partNameSeparator A string to split the part name on + * @returns A transformed rundown and changes object + */ + groupMosPartsInRundownAndChangesWithSeparator( + ingestRundown: IngestRundown, + previousIngestRundown: IngestRundown | undefined, + ingestChanges: NrcsIngestChangeDetails, + partNameSeparator: string + ): GroupPartsInMosRundownAndChangesResult + + /** + * Group Parts in a Rundown and return a new changes object + * + * Please note that this ignores some of the granularity of the `ingestChanges` object, and relies more on the `previousIngestRundown` instead + * If you are using user operations, you may need to perform some pre and post fixups to ensure changes aren't wiped unnecessarily. + * + * @param ingestRundown The rundown whose parts needs grouping + * @param previousIngestRundown The rundown prior to the changes, if known + * @param ingestChanges The changes which have been performed in `ingestRundown`, that need to translating + * @param groupPartsIntoSegments A function to group parts into segments + * @returns A transformed rundown and changes object + */ + groupPartsInRundownAndChanges( + ingestRundown: IngestRundown, + previousIngestRundown: IngestRundown | undefined, + ingestChanges: NrcsIngestChangeDetails, + groupPartsIntoSegments: (ingestSegments: IngestSegment[]) => IngestSegment[] + ): GroupPartsInMosRundownAndChangesResult +} + +export interface GroupPartsInMosRundownAndChangesResult< + TRundownPayload = unknown, + TSegmentPayload = unknown, + TPartPayload = unknown +> { + nrcsIngestRundown: IngestRundown + ingestChanges: NrcsIngestChangeDetails +} diff --git a/packages/blueprints-integration/src/context/quickLoopInfo.ts b/packages/blueprints-integration/src/context/quickLoopInfo.ts new file mode 100644 index 0000000000..7c486b8e2d --- /dev/null +++ b/packages/blueprints-integration/src/context/quickLoopInfo.ts @@ -0,0 +1,6 @@ +export type BlueprintQuickLookInfo = Readonly<{ + /** Whether there is a loop running */ + running: boolean + /** Whether the loop is locked from user editing */ + locked: boolean +}> diff --git a/packages/blueprints-integration/src/context/routeSetContext.ts b/packages/blueprints-integration/src/context/routeSetContext.ts new file mode 100644 index 0000000000..616261d1b3 --- /dev/null +++ b/packages/blueprints-integration/src/context/routeSetContext.ts @@ -0,0 +1,9 @@ +import { StudioRouteSet } from '@sofie-automation/shared-lib/dist/core/model/StudioRouteSet' + +export interface IRouteSetMethods { + /** Returns a list of the Routesets */ + listRouteSets(): Promise> + + /** Switch RouteSet State */ + switchRouteSet(routeSetId: string, state: boolean | 'toggle'): Promise +} diff --git a/packages/blueprints-integration/src/documents/adlibPiece.ts b/packages/blueprints-integration/src/documents/adlibPiece.ts index 7e515480d9..a01d53f18a 100644 --- a/packages/blueprints-integration/src/documents/adlibPiece.ts +++ b/packages/blueprints-integration/src/documents/adlibPiece.ts @@ -22,6 +22,8 @@ export interface IBlueprintAdLibPiece diff --git a/packages/blueprints-integration/src/documents/part.ts b/packages/blueprints-integration/src/documents/part.ts index 6f656a8802..72a836f47c 100644 --- a/packages/blueprints-integration/src/documents/part.ts +++ b/packages/blueprints-integration/src/documents/part.ts @@ -1,3 +1,4 @@ +import type { UserEditingDefinition } from '../userEditing' import type { NoteSeverity } from '../lib' import type { ITranslatableMessage } from '../translations' @@ -54,9 +55,6 @@ export interface IBlueprintMutatablePart /** Whether the piece affects the output of the Studio or is describing an invisible state within the Studio */ notInVision?: boolean + + /** + * User editing definitions for this piece + */ + userEditOperations?: UserEditingDefinition[] } export interface IBlueprintPieceDB extends IBlueprintPiece { diff --git a/packages/blueprints-integration/src/documents/rundown.ts b/packages/blueprints-integration/src/documents/rundown.ts index 4c5318b068..8abd22f33c 100644 --- a/packages/blueprints-integration/src/documents/rundown.ts +++ b/packages/blueprints-integration/src/documents/rundown.ts @@ -1,3 +1,4 @@ +import type { UserEditingDefinition } from '../userEditing' import type { RundownPlaylistTiming } from './playlistTiming' /** The Rundown generated from Blueprint */ @@ -26,6 +27,11 @@ export interface IBlueprintRundown diff --git a/packages/blueprints-integration/src/index.ts b/packages/blueprints-integration/src/index.ts index 5353cc2c9e..d5196e59f7 100644 --- a/packages/blueprints-integration/src/index.ts +++ b/packages/blueprints-integration/src/index.ts @@ -6,6 +6,7 @@ export * from './content' export * from './context' export * from './documents' export * from './ingest' +export * from './ingest-types' export * from './lib' export * from './message' export * from './migrations' @@ -19,9 +20,11 @@ export * from './timeline' export * from './util' export * from './translations' export * from './triggers' +export * from './userEditing' export { MOS } from '@sofie-automation/shared-lib/dist/mos' export { JSONSchema } from '@sofie-automation/shared-lib/dist/lib/JSONSchemaTypes' export * from '@sofie-automation/shared-lib/dist/lib/JSONBlob' export * from '@sofie-automation/shared-lib/dist/lib/JSONSchemaUtil' +export * from '@sofie-automation/shared-lib/dist/core/model/StudioRouteSet' diff --git a/packages/blueprints-integration/src/ingest-types.ts b/packages/blueprints-integration/src/ingest-types.ts new file mode 100644 index 0000000000..0e7a27aa97 --- /dev/null +++ b/packages/blueprints-integration/src/ingest-types.ts @@ -0,0 +1,47 @@ +import { IngestPart, IngestPlaylist, IngestRundown, IngestSegment } from './ingest' + +export interface SofieIngestPlaylist extends IngestPlaylist { + /** Ingest cache of rundowns in this playlist. */ + rundowns: SofieIngestRundown[] +} +export interface SofieIngestRundown + extends IngestRundown { + /** Array of segments in this rundown */ + segments: SofieIngestSegment[] + + /** + * The userEditStates is a key-value store where Blueprints can store persistent data. + * + * Examples of use cases; + * - locks from NRCS updates + * - locks from user changes + * - removedByUser flags + */ + userEditStates: Record +} +export interface SofieIngestSegment + extends IngestSegment { + /** Array of parts in this segment */ + parts: SofieIngestPart[] + + /** + * The userEditStates is a key-value store where Blueprints can store persistent data. + * + * Examples of use cases; + * - locks from NRCS updates + * - locks from user changes + * - removedByUser flags + */ + userEditStates: Record +} +export interface SofieIngestPart extends IngestPart { + /** + * The userEditStates is a key-value store where Blueprints can store persistent data. + * + * Examples of use cases; + * - locks from NRCS updates + * - locks from user changes + * - removedByUser flags + */ + userEditStates: Record +} diff --git a/packages/blueprints-integration/src/ingest.ts b/packages/blueprints-integration/src/ingest.ts index eb6e8b6162..f2511241fd 100644 --- a/packages/blueprints-integration/src/ingest.ts +++ b/packages/blueprints-integration/src/ingest.ts @@ -1,5 +1,7 @@ -import { IngestRundown } from '@sofie-automation/shared-lib/dist/peripheralDevice/ingest' +import { IngestPart, IngestSegment } from '@sofie-automation/shared-lib/dist/peripheralDevice/ingest' import { IBlueprintRundownDBData } from './documents' +import { ReadonlyDeep } from 'type-fest' +import { SofieIngestRundown } from './ingest-types' export { IngestPart, @@ -9,7 +11,414 @@ export { IngestAdlib, } from '@sofie-automation/shared-lib/dist/peripheralDevice/ingest' -/** The IngesteRundown is extended with data from Core */ -export interface ExtendedIngestRundown extends IngestRundown { +/** The IngestRundown is extended with data from Core */ +export interface ExtendedIngestRundown + extends SofieIngestRundown { coreData: IBlueprintRundownDBData | undefined } + +/** + * Describes the possible ingest changes that can have been made to a part by the NRCS + */ +export enum NrcsIngestPartChangeDetails { + Inserted = 'inserted', + Deleted = 'deleted', + Updated = 'updated', +} + +/** + * Describes some of the possible ingest changes that can have been made to a segment by the NRCS + */ +export enum NrcsIngestSegmentChangeDetailsEnum { + /** + * The segment has been inserted into the rundown, or the segment has changed sufficiently to require a full regeneration + */ + InsertedOrUpdated = 'inserted-or-updated', + /** + * The segment has been removed from the rundown + */ + Deleted = 'deleted', +} + +/** + * Describes the possible ingest changes that can have been made to the rundown properties by the NRCS + */ +export enum NrcsIngestRundownChangeDetails { + /** + * The payload or name of the rundown has changed. + */ + Payload = 'payload', + + /** + * A full regeneration of the rundown and all segments is required. + * This will typically remove all user driven changes. + */ + Regenerate = 'regenerate', +} + +/** + * Describes the possible ingest changes that can have been made to the contents of a segment by the NRCS + */ +export interface NrcsIngestSegmentChangeDetailsObject { + /** + * True when the payload or name of the segment has changed. + */ + payloadChanged?: boolean + + /** + * True when the rank of any part in the segment has changed. + */ + partOrderChanged?: boolean + + /** + * Descibes the changes to the parts in the rundown + */ + partChanges?: Record +} + +export enum IngestChangeType { + /** Indicate that this change is from ingest operations */ + Ingest = 'ingest', + /** Indicate that this change is from user operations */ + User = 'user', +} + +/** + * Describes the possible ingest changes that can have been made to a segment by the NRCS + */ +export type NrcsIngestSegmentChangeDetails = NrcsIngestSegmentChangeDetailsEnum | NrcsIngestSegmentChangeDetailsObject + +export interface NrcsIngestChangeDetails { + /** Indicate that this change is from ingest operations */ + source: IngestChangeType.Ingest + + /** + * True when the rank of any segment in the rundown has changed. + * Expressing what exactly has changed non-trivial particularly how to represent that in this structure, + * so for now we just have a simple boolean. + * If this is false, no segments have been reordered, added or removed. + */ + segmentOrderChanged?: boolean + + /** + * Describes the changes to the rundown itself + */ + rundownChanges?: NrcsIngestRundownChangeDetails + + /** + * Describes the changes to the segments in the rundown + */ + segmentChanges?: Record + + /** + * Describes any changes to segment external ids + * This is used to ensure that content belonging to a segment gets moved between segments correctly + * Note: this is not currently defined by Sofie, but is defined by `groupPartsInRundownAndChanges` and `groupMosPartsInRundownAndChangesWithSeparator` + */ + changedSegmentExternalIds?: Record +} + +export interface UserOperationTarget { + segmentExternalId: string | undefined + partExternalId: string | undefined + pieceExternalId: string | undefined +} + +export type DefaultUserOperations = { + id: '__sofie-move-segment' // Future: define properly + payload: Record +} + +export interface UserOperationChange { + /** Indicate that this change is from user operations */ + source: IngestChangeType.User + + operationTarget: UserOperationTarget + operation: DefaultUserOperations | TCustomBlueprintOperations +} +/** + * The MutableIngestRundown is used to modify the contents of an IngestRundown during ingest. + * The public properties and methods are used i blueprints to selectively apply incoming + * or apply user operations to the SofieIngestRundown. + */ +export interface MutableIngestRundown { + /** Id of the rundown as reported by the ingest gateway. Must be unique for each rundown owned by the gateway */ + readonly externalId: string + /** Name of the rundown */ + readonly name: string + + /** Something that identified the data source. eg "spreadsheet", "mos" */ + readonly type: string + + /** Payload of rundown metadata. For use by other blueprints methods */ + readonly payload: ReadonlyDeep | undefined + + readonly userEditStates: Record + + /** Array of segments in this rundown */ + readonly segments: ReadonlyArray> + + /** + * Search for a Part through the whole IngestRundown + * @param partExternalId externalId of the Part + */ + findPart(partExternalId: string): MutableIngestPart | undefined + + /** + * Search for a Part through the whole IngestRundown + * @param partExternalId externalId of the Part + * @returns The part and segment that the part belongs to + */ + findPartAndSegment(partExternalId: string): + | { + part: MutableIngestPart + segment: MutableIngestSegment + } + | undefined + + /** + * Returns a Segment with a certain externalId + * @param segmentExternalId + */ + getSegment(segmentExternalId: string): MutableIngestSegment | undefined + + /** + * Move a segment to a new position in the rundown + * @param segmentExternalId externalId of the Segment to move + * @param beforeSegmentExternalId externalId of the Segment to position before. If null, position at the end + */ + moveSegmentBefore(segmentExternalId: string, beforeSegmentExternalId: string | null): void + + /** + * Move a segment to a new position in the rundown + * @param segmentExternalId externalId of the Segment to move + * @param afterSegmentExternalId externalId of the Segment to position after. If null, position at the beginning + */ + moveSegmentAfter(segmentExternalId: string, afterSegmentExternalId: string | null): void + + /** + * Replace a Segment in the Rundown with a new one. If the Segment does not already exist, it will be inserted. + * This will replace all of the Parts in the Segment as well, along with the payload and other properties of the Segment. + * @param segment the new IngestSegment to insert + * @param beforeSegmentExternalId externalId of the Segment to position before. If null, position at the end + * @returns the new MutableIngestSegment + */ + replaceSegment( + segment: Omit, 'rank'>, + beforeSegmentExternalId: string | null + ): MutableIngestSegment + + /** + * Change the externalId of a Segment + * @param oldSegmentExternalId Id of the segment to change + * @param newSegmentExternalId New id for the segment + */ + changeSegmentExternalId( + oldSegmentExternalId: string, + newSegmentExternalId: string + ): MutableIngestSegment + + /** + * Change the originalExternalId of a Segment + * This allows for tracking of segments that have been renamed, after a Segment has been added or replaced + * @param segmentExternalId Id of the segment to update + * @param originalSegmentExternalId Original id for the segment + */ + changeSegmentOriginalExternalId( + segmentExternalId: string, + originalSegmentExternalId: string + ): MutableIngestSegment + + /** + * Remove a Segment from the Rundown + * @param segmentExternalId externalId of the Segment to remove + * @returns true if the segment was removed, false if it was not found + */ + removeSegment(segmentExternalId: string): boolean + + /** + * Remove all Segments from the Rundown + */ + removeAllSegments(): void + + /** + * Force the whole Rundown to be re-run through the ingest blueprints, even if there are no changes + */ + forceFullRegenerate(): void + + /** + * Set name of the Rundown + */ + setName(name: string): void + + /** + * Update the payload of the Rundown + * This will trigger the Rundown and RundownPlaylist to be updated, but not Segments + * @param payload the new payload + */ + replacePayload(payload: ReadonlyDeep | TRundownPayload): void + + /** + * Update the portion of the payload of the Rundown + * This will trigger the Rundown and RundownPlaylist to be updated, but not Segments + * @param key the key of the payload to update + * @param value the new value + */ + setPayloadProperty( + key: TKey, + value: ReadonlyDeep | TRundownPayload[TKey] + ): void + + /** + * Set a value in the userEditState + */ + setUserEditState(key: string, value: boolean): void +} + +export interface MutableIngestSegment { + /** Id of the segment as reported by the ingest gateway. Must be unique for each segment in the rundown */ + readonly externalId: string + /** Name of the segment */ + readonly name: string + + /** If the segment has had it's externalId changed, the id before the change */ + readonly originalExternalId: string | undefined + + /** Payload of segment metadata. For use by other blueprints methods */ + readonly payload: ReadonlyDeep | undefined + + readonly userEditStates: Record + + /** Array of parts in this segment */ + readonly parts: ReadonlyArray> + + /** + * Get a Part from the Segment + * @param partExternalId externalId of the Part + */ + getPart(partExternalId: string): MutableIngestPart | undefined + + /** + * Move a part to a new position in the segment + * @param partExternalId externalId of the Part to move + * @param beforePartExternalId externalId of the Part to position before. If null, position at the end + */ + movePartBefore(partExternalId: string, beforePartExternalId: string | null): void + + /** + * Move a part to a new position in the segment + * @param partExternalId externalId of the Part to move + * @param afterPartExternalId externalId of the Part to position after. If null, position at the beginning + */ + movePartAfter(partExternalId: string, afterPartExternalId: string | null): void + + /** + * Replace a Part in the Segment with a new one. If the Part does not already exist, it will be inserted. + * This will replace the payload and other properties of the Part. + * @param ingestPart the new IngestPart to insert + * @param beforePartExternalId externalId of the Part to position before. If null, position at the end + * @returns the new MutableIngestPart + */ + replacePart( + ingestPart: Omit, 'rank'>, + beforePartExternalId: string | null + ): MutableIngestPart + + /** + * Remove a Part from the Segment + * @param partExternalId externalId of the Part to remove + * @returns true if the part was removed, false if it was not found + */ + removePart(partExternalId: string): boolean + + /** + * Force this segment to be regenerated, even if there are no changes + */ + forceRegenerate(): void + + /** + * Set the name of the Segment + */ + setName(name: string): void + + /** + * Update the payload of the Segment + * This will trigger the Segment to be updated + * @param payload the new payload + */ + replacePayload(payload: ReadonlyDeep | TSegmentPayload): void + + /** + * Update the portion of the payload of the Segment + * This will trigger the Segment to be updated + * @param key the key of the payload to update + * @param value the new value + */ + setPayloadProperty( + key: TKey, + value: ReadonlyDeep | TSegmentPayload[TKey] + ): void + + setUserEditState(key: string, value: boolean): void +} + +export interface MutableIngestPart { + /** Id of the part as reported by the ingest gateway. Must be unique for each part in the rundown */ + readonly externalId: string + /** Name of the part */ + readonly name: string + + /** Payload of the part. For use by other blueprints methods */ + readonly payload: ReadonlyDeep | undefined + + readonly userEditStates: Record + + /** + * Set the name of the Part + */ + setName(name: string): void + + /** + * Update the payload of the Part + * This will trigger the Segment to be updated + * @param payload the new payload + */ + replacePayload(payload: ReadonlyDeep | TPartPayload): void + + /** + * Update the portion of the payload of the Part + * This will trigger the Segment to be updated + * @param key the key of the payload to update + * @param value the new value + */ + setPayloadProperty( + key: TKey, + value: ReadonlyDeep | TPartPayload[TKey] + ): void + + setUserEditState(key: string, value: boolean): void +} + +export type TransformPayloadFunction = (payload: any, oldPayload: ReadonlyDeep | undefined) => T | ReadonlyDeep + +export interface IngestDefaultChangesOptions< + TRundownPayload = unknown, + TSegmentPayload = unknown, + TPartPayload = unknown +> { + /** + * A custom transform for the payload of a Rundown. + * Typically this will translate from a NRCS native structure to a javascript friendly structure. + */ + transformRundownPayload: TransformPayloadFunction + /** + * A custom transform for the payload of a Segment. + * Typically this will translate from a NRCS native structure to a javascript friendly structure. + */ + transformSegmentPayload: TransformPayloadFunction + /** + * A custom transform for the payload of a Part. + * Typically this will translate from a NRCS native structure to a javascript friendly structure. + */ + transformPartPayload: TransformPayloadFunction +} diff --git a/packages/blueprints-integration/src/triggers.ts b/packages/blueprints-integration/src/triggers.ts index 3ebaaa9561..3b7a54db85 100644 --- a/packages/blueprints-integration/src/triggers.ts +++ b/packages/blueprints-integration/src/triggers.ts @@ -194,6 +194,13 @@ export interface IRundownPlaylistActivateAdlibTestingAction extends ITriggeredAc filterChain: (IRundownPlaylistFilterLink | IGUIContextFilterLink)[] } +export interface ISwitchRouteSetAction extends ITriggeredActionBase { + action: PlayoutActions.switchRouteSet + filterChain: (IRundownPlaylistFilterLink | IGUIContextFilterLink)[] + routeSetId: string + state: boolean | 'toggle' +} + export interface ITakeAction extends ITriggeredActionBase { action: PlayoutActions.take filterChain: (IRundownPlaylistFilterLink | IGUIContextFilterLink)[] @@ -316,6 +323,7 @@ export type SomeAction = | IShowEntireCurrentSegmentAction | IMiniShelfQueueAdLib | IModifyShiftRegister + | ISwitchRouteSetAction export interface IBlueprintTriggeredActions { _id: string diff --git a/packages/blueprints-integration/src/userEditing.ts b/packages/blueprints-integration/src/userEditing.ts new file mode 100644 index 0000000000..7d2c2718ea --- /dev/null +++ b/packages/blueprints-integration/src/userEditing.ts @@ -0,0 +1,45 @@ +import type { JSONBlob } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' +import type { ITranslatableMessage } from './translations' +import type { JSONSchema } from '@sofie-automation/shared-lib/dist/lib/JSONSchemaTypes' + +/** + * Description of a user performed editing operation allowed on an document + */ +export type UserEditingDefinition = UserEditingDefinitionAction | UserEditingDefinitionForm + +/** + * A simple 'action' that can be performed + */ +export interface UserEditingDefinitionAction { + type: UserEditingType.ACTION + /** Id of this operation */ + id: string + /** Label to show to the user for this operation */ + label: ITranslatableMessage + /** Icon to show to when this action is 'active' */ + svgIcon?: string + /** Whether this action should be indicated as being active */ + isActive?: boolean +} + +/** + * A simple form based operation + */ +export interface UserEditingDefinitionForm { + type: UserEditingType.FORM + /** Id of this operation */ + id: string + /** Label to show to the user for this operation */ + label: ITranslatableMessage + /** The json schema describing the form to display */ + schema: JSONBlob + /** Current values to populate the form with */ + currentValues: Record +} + +export enum UserEditingType { + /** Action */ + ACTION = 'action', + /** Form of selections */ + FORM = 'form', +} diff --git a/packages/corelib/src/dataModel/Collections.ts b/packages/corelib/src/dataModel/Collections.ts index 7105b14aa9..670bdfcd44 100644 --- a/packages/corelib/src/dataModel/Collections.ts +++ b/packages/corelib/src/dataModel/Collections.ts @@ -13,7 +13,8 @@ export enum CollectionName { ExpectedPackageWorkStatuses = 'expectedPackageWorkStatuses', ExpectedPlayoutItems = 'expectedPlayoutItems', ExternalMessageQueue = 'externalMessageQueue', - IngestDataCache = 'ingestDataCache', + NrcsIngestDataCache = 'ingestDataCache', // Future: this could be renamed to nrcsIngestDataCache + SofieIngestDataCache = 'sofieIngestDataCache', MediaObjects = 'mediaObjects', MediaWorkFlows = 'mediaWorkFlows', MediaWorkFlowSteps = 'mediaWorkFlowSteps', diff --git a/packages/corelib/src/dataModel/Ids.ts b/packages/corelib/src/dataModel/Ids.ts index 840efbe33e..1e4e544cca 100644 --- a/packages/corelib/src/dataModel/Ids.ts +++ b/packages/corelib/src/dataModel/Ids.ts @@ -35,8 +35,11 @@ export type ExpectedPlayoutItemId = ProtectedString<'ExpectedPlayoutItemId'> /** A string, identifying a ExternalMessageQueueObj */ export type ExternalMessageQueueObjId = ProtectedString<'ExternalMessageQueueObjId'> -/** A string, identifying a IngestDataCacheObj */ -export type IngestDataCacheObjId = ProtectedString<'IngestDataCacheObjId'> +/** A string, identifying a NrcsIngestDataCacheObj */ +export type NrcsIngestDataCacheObjId = ProtectedString<'NrcsIngestDataCacheObjId'> + +/** A string, identifying a SofieIngestDataCacheObj */ +export type SofieIngestDataCacheObjId = ProtectedString<'SofieIngestDataCacheObjId'> /** A string, identifying a Organization */ export type OrganizationId = ProtectedString<'OrganizationId'> diff --git a/packages/corelib/src/dataModel/IngestDataCache.ts b/packages/corelib/src/dataModel/IngestDataCache.ts deleted file mode 100644 index e19bc00d1e..0000000000 --- a/packages/corelib/src/dataModel/IngestDataCache.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { IngestRundown, IngestSegment, IngestPart } from '@sofie-automation/blueprints-integration' -import { IngestDataCacheObjId, RundownId, SegmentId, PartId } from './Ids' - -export enum IngestCacheType { - RUNDOWN = 'rundown', - SEGMENT = 'segment', - PART = 'part', -} -export type IngestCacheData = IngestRundown | IngestSegment | IngestPart - -export interface IngestDataCacheObjBase { - _id: IngestDataCacheObjId - modified: number - type: IngestCacheType - - /** Id of the Rundown */ - rundownId: RundownId - segmentId?: SegmentId - partId?: PartId - - data: IngestCacheData -} - -export interface IngestDataCacheObjRundown extends IngestDataCacheObjBase { - type: IngestCacheType.RUNDOWN - rundownId: RundownId - data: IngestRundown -} -export interface IngestDataCacheObjSegment extends IngestDataCacheObjBase { - type: IngestCacheType.SEGMENT - rundownId: RundownId - segmentId: SegmentId - - data: IngestSegment -} -export interface IngestDataCacheObjPart extends IngestDataCacheObjBase { - type: IngestCacheType.PART - rundownId: RundownId - segmentId: SegmentId - partId: PartId - data: IngestPart -} -export type IngestDataCacheObj = IngestDataCacheObjRundown | IngestDataCacheObjSegment | IngestDataCacheObjPart diff --git a/packages/corelib/src/dataModel/Notes.ts b/packages/corelib/src/dataModel/Notes.ts index 280a421a19..7d097323b2 100644 --- a/packages/corelib/src/dataModel/Notes.ts +++ b/packages/corelib/src/dataModel/Notes.ts @@ -24,6 +24,11 @@ export interface GenericNote extends INoteBase { name: string } } +export interface RundownPlaylistNote extends INoteBase { + origin: { + name: string + } +} export interface RundownNote extends INoteBase { origin: { name: string diff --git a/packages/corelib/src/dataModel/NrcsIngestDataCache.ts b/packages/corelib/src/dataModel/NrcsIngestDataCache.ts new file mode 100644 index 0000000000..25f3bce1d6 --- /dev/null +++ b/packages/corelib/src/dataModel/NrcsIngestDataCache.ts @@ -0,0 +1,58 @@ +import { IngestRundown, IngestSegment, IngestPart } from '@sofie-automation/blueprints-integration' +import { NrcsIngestDataCacheObjId, RundownId, SegmentId, PartId } from './Ids' +import { RundownSource } from './Rundown' + +/* + The NRCSIngestDataCache collection is used to store raw data that comes from an NRCS. + See also ./SofieIngestDataCache.ts + For where the ingested data is stored after being processed/modified by Sofie. +*/ + +export enum NrcsIngestCacheType { + RUNDOWN = 'rundown', + SEGMENT = 'segment', + PART = 'part', +} +export type IngestCacheData = IngestRundown | IngestSegment | IngestPart + +export interface IngestRundownWithSource + extends IngestRundown { + rundownSource: RundownSource +} + +interface IngestDataCacheObjBase { + _id: NrcsIngestDataCacheObjId + modified: number + type: NrcsIngestCacheType + + /** Id of the Rundown */ + rundownId: RundownId + segmentId?: SegmentId + partId?: PartId + + data: IngestCacheData +} + +export interface NrcsIngestDataCacheObjRundown extends IngestDataCacheObjBase { + type: NrcsIngestCacheType.RUNDOWN + rundownId: RundownId + data: IngestRundownWithSource +} +export interface NrcsIngestDataCacheObjSegment extends IngestDataCacheObjBase { + type: NrcsIngestCacheType.SEGMENT + rundownId: RundownId + segmentId: SegmentId + + data: IngestSegment +} +export interface NrcsIngestDataCacheObjPart extends IngestDataCacheObjBase { + type: NrcsIngestCacheType.PART + rundownId: RundownId + segmentId: SegmentId + partId: PartId + data: IngestPart +} +export type NrcsIngestDataCacheObj = + | NrcsIngestDataCacheObjRundown + | NrcsIngestDataCacheObjSegment + | NrcsIngestDataCacheObjPart diff --git a/packages/corelib/src/dataModel/Part.ts b/packages/corelib/src/dataModel/Part.ts index 5792e02342..5194cb98b8 100644 --- a/packages/corelib/src/dataModel/Part.ts +++ b/packages/corelib/src/dataModel/Part.ts @@ -3,6 +3,7 @@ import { ITranslatableMessage } from '../TranslatableMessage' import { PartId, RundownId, SegmentId } from './Ids' import { PartNote } from './Notes' import { ReadonlyDeep } from 'type-fest' +import { CoreUserEditingDefinition } from './UserEditingDefinitions' export interface PartInvalidReason { message: ITranslatableMessage @@ -11,7 +12,7 @@ export interface PartInvalidReason { } /** A "Line" in NRK Lingo. */ -export interface DBPart extends IBlueprintPart { +export interface DBPart extends Omit { _id: PartId /** * Position inside the segment @@ -35,6 +36,11 @@ export interface DBPart extends IBlueprintPart { /** A modified expectedDuration with the piece/transition derived timings factored in */ expectedDurationWithTransition: number | undefined + + /** + * User editing definitions for this part + */ + userEditOperations?: CoreUserEditingDefinition[] } export function isPartPlayable(part: Pick, 'invalid' | 'floated'>): boolean { diff --git a/packages/corelib/src/dataModel/Piece.ts b/packages/corelib/src/dataModel/Piece.ts index a340c45fff..88d8e95865 100644 --- a/packages/corelib/src/dataModel/Piece.ts +++ b/packages/corelib/src/dataModel/Piece.ts @@ -7,6 +7,7 @@ import { } from '@sofie-automation/blueprints-integration' import { ProtectedString, protectString, unprotectString } from '../protectedString' import { PieceId, RundownId, SegmentId, PartId } from './Ids' +import { CoreUserEditingDefinition } from './UserEditingDefinitions' /** A generic list of playback availability statuses for a Piece */ export enum PieceStatusCode { @@ -49,8 +50,7 @@ export interface PieceGeneric extends Omit { /** Stringified timelineObjects */ timelineObjectsString: PieceTimelineObjectsBlob } - -export interface Piece extends PieceGeneric, Omit { +export interface Piece extends PieceGeneric, Omit { /** * This is the id of the rundown this piece starts playing in. * Currently this is the only rundown the piece could be playing in @@ -72,6 +72,11 @@ export interface Piece extends PieceGeneric, Omit diff --git a/packages/corelib/src/dataModel/Rundown.ts b/packages/corelib/src/dataModel/Rundown.ts index 1907fb8725..a4fd75f355 100644 --- a/packages/corelib/src/dataModel/Rundown.ts +++ b/packages/corelib/src/dataModel/Rundown.ts @@ -10,6 +10,7 @@ import { } from './Ids' import { RundownNote } from './Notes' import { ReadonlyDeep } from 'type-fest' +import { CoreUserEditingDefinition } from './UserEditingDefinitions' export enum RundownOrphanedReason { /** Rundown is deleted from the source but we still need it */ @@ -85,6 +86,10 @@ export interface Rundown { playlistId: RundownPlaylistId /** If the playlistId has ben set manually by a user in Sofie */ playlistIdIsSetInSofie?: boolean + /** + * User editing definitions for this rundown + */ + userEditOperations?: CoreUserEditingDefinition[] } /** A description of where a Rundown originated from */ diff --git a/packages/corelib/src/dataModel/RundownPlaylist.ts b/packages/corelib/src/dataModel/RundownPlaylist.ts index 99500fc19f..241e0c3895 100644 --- a/packages/corelib/src/dataModel/RundownPlaylist.ts +++ b/packages/corelib/src/dataModel/RundownPlaylist.ts @@ -10,6 +10,7 @@ import { StudioId, RundownId, } from './Ids' +import { RundownPlaylistNote } from './Notes' /** Details of an ab-session requested by the blueprints in onTimelineGenerate */ export interface ABSessionInfo { @@ -152,6 +153,9 @@ export interface DBRundownPlaylist { */ queuedSegmentId?: SegmentId + /** Holds notes (warnings / errors) thrown by the blueprints during creation */ + notes?: Array + quickLoop?: QuickLoopProps /** Actual time of playback starting */ @@ -160,7 +164,10 @@ export interface DBRundownPlaylist { lastIncorrectPartPlaybackReported?: Time /** Actual time of each rundown starting playback */ rundownsStartedPlayback?: Record - /** Actual time of SOME segments starting playback - usually just the previous and current one */ + /** + * Actual time of SOME segments starting playback - usually just the previous and current one + * This is not using SegmentId, but SegmentPlayoutId + */ segmentsStartedPlayback?: Record /** Time of the last take */ lastTakeTime?: Time diff --git a/packages/corelib/src/dataModel/Segment.ts b/packages/corelib/src/dataModel/Segment.ts index 76996af431..f755b57efb 100644 --- a/packages/corelib/src/dataModel/Segment.ts +++ b/packages/corelib/src/dataModel/Segment.ts @@ -1,11 +1,12 @@ import { SegmentDisplayMode, SegmentTimingInfo } from '@sofie-automation/blueprints-integration' import { SegmentId, RundownId } from './Ids' import { SegmentNote } from './Notes' +import { CoreUserEditingDefinition } from './UserEditingDefinitions' export enum SegmentOrphanedReason { /** Segment is deleted from the NRCS but we still need it */ DELETED = 'deleted', - /** Segment should be hidden, but it is still playing */ + /** Blueprints want the Segment to be hidden, but it is still playing so is must not be hidden right now. */ HIDDEN = 'hidden', /** Segment is owned by playout, and is for AdlibTesting in its rundown */ ADLIB_TESTING = 'adlib-testing', @@ -18,8 +19,6 @@ export interface DBSegment { _rank: number /** ID of the source object in the gateway */ externalId: string - /** Timestamp when the externalData was last modified */ - externalModified: number /** The rundown this segment belongs to */ rundownId: RundownId @@ -47,4 +46,9 @@ export interface DBSegment { /** Holds notes (warnings / errors) thrown by the blueprints during creation */ notes?: Array + + /** + * User editing definitions for this segment + */ + userEditOperations?: CoreUserEditingDefinition[] } diff --git a/packages/corelib/src/dataModel/SofieIngestDataCache.ts b/packages/corelib/src/dataModel/SofieIngestDataCache.ts new file mode 100644 index 0000000000..eb41e03838 --- /dev/null +++ b/packages/corelib/src/dataModel/SofieIngestDataCache.ts @@ -0,0 +1,62 @@ +import { SofieIngestRundown, SofieIngestSegment, SofieIngestPart } from '@sofie-automation/blueprints-integration' +import { SofieIngestDataCacheObjId, RundownId, SegmentId, PartId } from './Ids' +import { RundownSource } from './Rundown' + +/* + The SofieIngestDataCache collection is used to store data that comes from an NRCS and has been modified by Sofie. + See also ./NrcsIngestDataCache.ts for the raw data from the NRCS. +*/ + +export enum SofieIngestCacheType { + RUNDOWN = 'rundown', + SEGMENT = 'segment', + PART = 'part', +} +export type SofieIngestCacheData = SofieIngestRundown | SofieIngestSegment | SofieIngestPart + +export interface SofieIngestRundownWithSource< + TRundownPayload = unknown, + TSegmentPayload = unknown, + TPartPayload = unknown +> extends SofieIngestRundown { + rundownSource: RundownSource +} + +interface SofieIngestDataCacheBase { + _id: SofieIngestDataCacheObjId + modified: number + type: SofieIngestCacheType + + /** Id of the Rundown */ + rundownId: RundownId + segmentId?: SegmentId + partId?: PartId + + data: SofieIngestCacheData +} + +export interface SofieIngestDataCacheObjRundown extends SofieIngestDataCacheBase { + type: SofieIngestCacheType.RUNDOWN + rundownId: RundownId + data: SofieIngestRundownWithSource +} + +export interface SofieIngestDataCacheObjSegment extends SofieIngestDataCacheBase { + type: SofieIngestCacheType.SEGMENT + rundownId: RundownId + segmentId: SegmentId + data: SofieIngestSegment +} + +export interface SofieIngestDataCacheObjPart extends SofieIngestDataCacheBase { + type: SofieIngestCacheType.PART + rundownId: RundownId + segmentId: SegmentId + partId: PartId + data: SofieIngestPart +} + +export type SofieIngestDataCacheObj = + | SofieIngestDataCacheObjRundown + | SofieIngestDataCacheObjSegment + | SofieIngestDataCacheObjPart diff --git a/packages/corelib/src/dataModel/Studio.ts b/packages/corelib/src/dataModel/Studio.ts index e0060d6396..ba6d7233d2 100644 --- a/packages/corelib/src/dataModel/Studio.ts +++ b/packages/corelib/src/dataModel/Studio.ts @@ -1,12 +1,37 @@ -import { BlueprintMapping, IBlueprintConfig, PackageContainer, TSR } from '@sofie-automation/blueprints-integration' +import { IBlueprintConfig, TSR } from '@sofie-automation/blueprints-integration' import { ObjectWithOverrides } from '../settings/objectWithOverrides' import { StudioId, OrganizationId, BlueprintId, ShowStyleBaseId, MappingsHash, PeripheralDeviceId } from './Ids' import { BlueprintHash, LastBlueprintConfig } from './Blueprint' import { MappingsExt, MappingExt } from '@sofie-automation/shared-lib/dist/core/model/Timeline' import { ForceQuickLoopAutoNext } from './RundownPlaylist' +import { + ResultingMappingRoute, + RouteMapping, + StudioRouteBehavior, + ResultingMappingRoutes, + StudioRouteSet, + StudioRouteSetExclusivityGroup, + StudioRouteType, + StudioAbPlayerDisabling, +} from '@sofie-automation/shared-lib/dist/core/model/StudioRouteSet' +import { StudioPackageContainer } from '@sofie-automation/shared-lib/dist/core/model/PackageContainer' export { MappingsExt, MappingExt, MappingsHash } +// RouteSet functions has been moved to shared-lib: +// So we need to re-export them here: +export { + StudioRouteSetExclusivityGroup, + ResultingMappingRoute, + RouteMapping, + StudioRouteBehavior, + ResultingMappingRoutes, + StudioRouteSet, + StudioRouteType, + StudioAbPlayerDisabling, + StudioPackageContainer, +} + export interface IStudioSettings { /** The framerate (frames per second) used to convert internal timing information (in milliseconds) * into timecodes and timecode-like strings and interpret timecode user input @@ -63,6 +88,24 @@ export interface IStudioSettings { * Default: 3000 */ fallbackPartDuration?: number + + /** + * Whether to allow hold operations for Rundowns in this Studio + * When disabled, any action-triggers that would normally trigger a hold operation will be silently ignored + * This should only block entering hold, to ensure Sofie doesn't get stuck if it somehow gets into hold + */ + allowHold: boolean + + /** + * Whether to allow direct playing of a piece in the rundown + * This behaviour is usally triggered by double-clicking on a piece in the GUI + */ + allowPieceDirectPlay: boolean + + /** + * Enable buckets - the default behavior is to have buckets. + */ + enableBuckets: boolean } export type StudioLight = Omit @@ -101,13 +144,13 @@ export interface DBStudio { _rundownVersionHash: string - routeSets: Record - routeSetExclusivityGroups: Record + routeSetsWithOverrides: ObjectWithOverrides> + routeSetExclusivityGroupsWithOverrides: ObjectWithOverrides> /** Contains settings for which Package Containers are present in the studio. * (These are used by the Package Manager and the Expected Packages) */ - packageContainers: Record + packageContainersWithOverrides: ObjectWithOverrides> /** Which package containers is used for media previews in GUI */ previewContainerIds: string[] @@ -161,58 +204,3 @@ export interface StudioPlayoutDevice { options: TSR.DeviceOptionsAny } - -export interface StudioPackageContainer { - /** List of which peripheraldevices uses this packageContainer */ - deviceIds: string[] - container: PackageContainer -} -export interface StudioRouteSetExclusivityGroup { - name: string -} - -export interface StudioRouteSet { - /** User-presentable name */ - name: string - /** Whether this group is active or not */ - active: boolean - /** Default state of this group */ - defaultActive?: boolean - /** Only one Route can be active at the same time in the exclusivity-group */ - exclusivityGroup?: string - /** If true, should be displayed and toggleable by user */ - behavior: StudioRouteBehavior - - routes: RouteMapping[] -} -export enum StudioRouteBehavior { - HIDDEN = 0, - TOGGLE = 1, - ACTIVATE_ONLY = 2, -} - -export enum StudioRouteType { - /** Default */ - REROUTE = 0, - /** Replace all properties with a new mapping */ - REMAP = 1, -} - -export interface RouteMapping extends ResultingMappingRoute { - /** Which original layer to route. If false, a "new" layer will be inserted during routing */ - mappedLayer: string | undefined -} -export interface ResultingMappingRoutes { - /** Routes that route existing layers */ - existing: { - [mappedLayer: string]: ResultingMappingRoute[] - } - /** Routes that create new layers, from nothing */ - inserted: ResultingMappingRoute[] -} -export interface ResultingMappingRoute { - outputMappedLayer: string - deviceType?: TSR.DeviceType - remapping?: Partial - routeType: StudioRouteType -} diff --git a/packages/corelib/src/dataModel/UserEditingDefinitions.ts b/packages/corelib/src/dataModel/UserEditingDefinitions.ts new file mode 100644 index 0000000000..4930fbfbda --- /dev/null +++ b/packages/corelib/src/dataModel/UserEditingDefinitions.ts @@ -0,0 +1,30 @@ +import type { UserEditingType, JSONBlob, JSONSchema } from '@sofie-automation/blueprints-integration' +import type { ITranslatableMessage } from '../TranslatableMessage' + +export type CoreUserEditingDefinition = CoreUserEditingDefinitionAction | CoreUserEditingDefinitionForm + +export interface CoreUserEditingDefinitionAction { + type: UserEditingType.ACTION + /** Id of this operation */ + id: string + /** Label to show to the user for this operation */ + label: ITranslatableMessage + /** Icon to show to when this action is 'active' */ + svgIcon?: string + /** Whether this action should be indicated as being active */ + isActive?: boolean +} + +export interface CoreUserEditingDefinitionForm { + type: UserEditingType.FORM + /** Id of this operation */ + id: string + /** Label to show to the user for this operation */ + label: ITranslatableMessage + /** The json schema describing the form to display */ + schema: JSONBlob + /** Current values to populate the form with */ + currentValues: Record + /** Translation namespaces to use when rendering this form */ + translationNamespaces: string[] +} diff --git a/packages/corelib/src/lib.ts b/packages/corelib/src/lib.ts index c32833abe0..399db4fead 100644 --- a/packages/corelib/src/lib.ts +++ b/packages/corelib/src/lib.ts @@ -7,6 +7,7 @@ import { Timecode } from 'timecode' import { iterateDeeply, iterateDeeplyEnum, Time } from '@sofie-automation/blueprints-integration' import { IStudioSettings } from './dataModel/Studio' import { customAlphabet as createNanoid } from 'nanoid' +import type { ITranslatableMessage } from './TranslatableMessage' /** * Limited character set to use for id generation @@ -455,3 +456,16 @@ export function stringifyObjects(objs: unknown): string { return objs + '' } } + +/** Generate the translation for a string, to be applied later when it gets rendered */ +export function generateTranslation( + key: string, + args?: { [k: string]: any }, + namespaces?: string[] +): ITranslatableMessage { + return { + key, + args, + namespaces, + } +} diff --git a/packages/corelib/src/overrideOpHelper.ts b/packages/corelib/src/overrideOpHelper.ts new file mode 100644 index 0000000000..fd04bc3814 --- /dev/null +++ b/packages/corelib/src/overrideOpHelper.ts @@ -0,0 +1,328 @@ +import { clone, literal, objectPathSet } from './lib' +import { + SomeObjectOverrideOp, + ObjectWithOverrides, + ObjectOverrideDeleteOp, + ObjectOverrideSetOp, + applyAndValidateOverrides, + filterOverrideOpsForPrefix, + findParentOpToUpdate, +} from './settings/objectWithOverrides' +import { ReadonlyDeep } from 'type-fest' + +export interface WrappedOverridableItemDeleted { + type: 'deleted' + id: string + computed: undefined + defaults: ReadonlyDeep + overrideOps: ReadonlyDeep +} +export interface WrappedOverridableItemNormal { + type: 'normal' + id: string + computed: T + defaults: ReadonlyDeep | undefined + overrideOps: ReadonlyDeep +} + +export type WrappedOverridableItem = + | WrappedOverridableItemDeleted + | WrappedOverridableItemNormal + +/** + * Compile a sorted array of all the items currently in the ObjectWithOverrides, and those that have been deleted + * @param rawObject The ObjectWithOverrides to look at + * @param comparitor Comparitor for sorting the items + * @returns Sorted items, with sorted deleted items at the end + */ +export function getAllCurrentAndDeletedItemsFromOverrides( + rawObject: ReadonlyDeep>>, + comparitor: + | ((a: [id: string, obj: T | ReadonlyDeep], b: [id: string, obj: T | ReadonlyDeep]) => number) + | null +): WrappedOverridableItem[] { + // Sort and wrap in the return type + const sortedItems = getAllCurrentItemsFromOverrides(rawObject, comparitor) + + const removedOutputLayers: WrappedOverridableItemDeleted[] = [] + + // Find the items which have been deleted with an override + const computedOutputLayerIds = new Set(sortedItems.map((l) => l.id)) + for (const [id, output] of Object.entries>(rawObject.defaults)) { + if (!computedOutputLayerIds.has(id) && output) { + removedOutputLayers.push( + literal>({ + type: 'deleted', + id: id, + computed: undefined, + defaults: output, + overrideOps: filterOverrideOpsForPrefix(rawObject.overrides, id).opsForPrefix, + }) + ) + } + } + + if (comparitor) removedOutputLayers.sort((a, b) => comparitor([a.id, a.defaults], [b.id, b.defaults])) + + return [...sortedItems, ...removedOutputLayers] +} + +/** + * Compile a sorted array of all the items currently active in the ObjectWithOverrides + * @param rawObject The ObjectWithOverrides to look at + * @param comparitor Comparitor for sorting the items + * @returns Sorted items + */ +export function getAllCurrentItemsFromOverrides( + rawObject: ReadonlyDeep>>, + comparitor: + | ((a: [id: string, obj: T | ReadonlyDeep], b: [id: string, obj: T | ReadonlyDeep]) => number) + | null +): WrappedOverridableItemNormal[] { + const resolvedObject = applyAndValidateOverrides(rawObject).obj + + // Convert the items into an array + const validItems: Array<[id: string, obj: T]> = [] + for (const [id, obj] of Object.entries(resolvedObject)) { + if (obj) validItems.push([id, obj]) + } + + if (comparitor) validItems.sort((a, b) => comparitor(a, b)) + + // Sort and wrap in the return type + const sortedItems = validItems.map(([id, obj]) => + literal>({ + type: 'normal', + id: id, + computed: obj, + defaults: rawObject.defaults[id], + overrideOps: filterOverrideOpsForPrefix(rawObject.overrides, id).opsForPrefix, + }) + ) + + return sortedItems +} + +type SaveOverridesFunction = (newOps: SomeObjectOverrideOp[]) => void + +export type OverrideOpHelperForItemContents = () => OverrideOpHelperForItemContentsBatcher + +export interface OverrideOpHelperForItemContentsBatcher { + /** + * Clear all of the overrides for an value inside of an item + * This acts as a reset of property of its child properties + * Has no effect if there are no `overrideOps` on the `WrappedOverridableItemNormal` + */ + clearItemOverrides(itemId: string, subPath: string): this + + /** + * Set the value of a property of an item. + * Note: the id cannot be changed in this way + */ + setItemValue(itemId: string, subPath: string, value: unknown): this + + /** + * Finish the batch operation + */ + commit(): void +} + +export interface OverrideOpHelperBatcher extends OverrideOpHelperForItemContentsBatcher { + /** + * Clear all of the overrides for an item + * This acts as a reset to defaults or undelete + * Has no effect if there are no `overrideOps` on the `WrappedOverridableItemNormal` + */ + resetItem(itemId: string): this + + /** + * Delete an item from the object + */ + deleteItem(itemId: string): this + + /** + * Change the id of an item. + * This is only possible for ones which were created by an override, and does not exist in the defaults + * Only possible when the item being renamed does not exist in the defaults + */ + changeItemId(oldItemId: string, newItemId: string): this + + /** + * Replace a whole item with a new object + * Note: the id cannot be changed in this way + */ + replaceItem(itemId: string, value: any): this + + /** + * Finish the batch operation + */ + commit(): void +} + +export type OverrideOpHelper = () => OverrideOpHelperBatcher + +export class OverrideOpHelperImpl implements OverrideOpHelperBatcher { + readonly #saveOverrides: SaveOverridesFunction | null + readonly #object: ObjectWithOverrides + + constructor( + saveOverrides: SaveOverridesFunction | null, + object: ObjectWithOverrides | ReadonlyDeep> + ) { + this.#saveOverrides = saveOverrides + this.#object = { defaults: object.defaults, overrides: [...object.overrides] } + } + + clearItemOverrides = (itemId: string, subPath: string): this => { + const opPath = `${itemId}.${subPath}` + + const newOps = filterOverrideOpsForPrefix(this.#object.overrides, opPath).otherOps + + this.#object.overrides = newOps + + return this + } + + resetItem = (itemId: string): this => { + const newOps = filterOverrideOpsForPrefix(this.#object.overrides, itemId).otherOps + + this.#object.overrides = newOps + + return this + } + + deleteItem = (itemId: string): this => { + const newOps = filterOverrideOpsForPrefix(this.#object.overrides, itemId).otherOps + if (this.#object.defaults[itemId]) { + // If it was from the defaults, we need to mark it deleted + newOps.push( + literal({ + op: 'delete', + path: itemId, + }) + ) + } + + this.#object.overrides = newOps + + return this + } + + changeItemId = (oldItemId: string, newItemId: string): this => { + const { otherOps: newOps, opsForPrefix: opsForId } = filterOverrideOpsForPrefix( + this.#object.overrides, + oldItemId + ) + + if (!newItemId || newOps.find((op) => op.path === newItemId) || this.#object.defaults[newItemId]) { + throw new Error('Id is invalid or already in use') + } + + if (this.#object.defaults[oldItemId]) { + // Future: should we be able to handle this? + throw new Error("Can't change id of object with defaults") + } else { + // Change the id prefix of the ops + for (const op of opsForId) { + const newPath = `${newItemId}${op.path.substring(oldItemId.length)}` + + const newOp = { + ...op, + path: newPath, + } + newOps.push(newOp) + + if (newOp.path === newItemId && newOp.op === 'set') { + newOp.value._id = newItemId + } + } + + this.#object.overrides = newOps + + return this + } + } + + setItemValue = (itemId: string, subPath: string, value: unknown): this => { + if (subPath === '_id') { + throw new Error('Item id cannot be changed through this helper') + } else { + // Set a property + const { otherOps: newOps, opsForPrefix: opsForId } = filterOverrideOpsForPrefix( + this.#object.overrides, + itemId + ) + + const setRootOp = opsForId.find((op) => op.path === itemId) + if (setRootOp && setRootOp.op === 'set') { + // This is as its base an override, so modify that instead + const newOp = clone(setRootOp) + + objectPathSet(newOp.value, subPath, value) + + newOps.push(newOp) + } else { + // Look for a op which encompasses this new value + const parentOp = findParentOpToUpdate(opsForId, subPath) + if (parentOp) { + // Found an op at a higher level that can be modified instead + objectPathSet(parentOp.op.value, parentOp.newSubPath, value) + } else { + // Insert new op + const newOp = literal({ + op: 'set', + path: `${itemId}.${subPath}`, + value: value, + }) + + const newOpAsPrefix = `${newOp.path}.` + + // Preserve any other overrides + for (const op of opsForId) { + if (op.path === newOp.path || op.path.startsWith(newOpAsPrefix)) { + // ignore, as op has been replaced by the one at a higher path + } else { + // Retain unrelated op + newOps.push(op) + } + } + // Add the new override + newOps.push(newOp) + } + } + + this.#object.overrides = newOps + + return this + } + } + + replaceItem = (itemId: string, value: unknown): this => { + // Set a property + const { otherOps: newOps } = filterOverrideOpsForPrefix(this.#object.overrides, itemId) + + // TODO - is this too naive? + + newOps.push( + literal({ + op: 'set', + path: `${itemId}`, + value: value, + }) + ) + + this.#object.overrides = newOps + + return this + } + + commit = (): void => { + if (!this.#saveOverrides) throw new Error('Cannot commit changes without a save function') + + this.#saveOverrides(this.#object.overrides) + } + + getPendingOps = (): SomeObjectOverrideOp[] => { + return this.#object.overrides + } +} diff --git a/packages/corelib/src/playout/playlist.ts b/packages/corelib/src/playout/playlist.ts index 8c7368cfbf..7a9cced811 100644 --- a/packages/corelib/src/playout/playlist.ts +++ b/packages/corelib/src/playout/playlist.ts @@ -1,3 +1,4 @@ +import { DBRundown } from '../dataModel/Rundown' import { DBSegment } from '../dataModel/Segment' import { DBPart } from '../dataModel/Part' import { DBPartInstance } from '../dataModel/PartInstance' @@ -105,3 +106,22 @@ export function compareMarkerPositions(a: MarkerPosition, b: MarkerPosition): nu if (a.partRank < b.partRank) return 1 return 0 } + +export function sortRundownsWithinPlaylist( + sortedPossibleIds: ReadonlyDeep, + unsortedRundowns: ReadonlyDeep +): ReadonlyDeep { + return unsortedRundowns.slice().sort((a, b) => { + const indexA = sortedPossibleIds.indexOf(a._id) + const indexB = sortedPossibleIds.indexOf(b._id) + if (indexA === -1 && indexB === -1) { + return a._id.toString().localeCompare(b._id.toString()) + } else if (indexA === -1) { + return 1 + } else if (indexB === -1) { + return -1 + } + + return indexA - indexB + }) +} diff --git a/packages/corelib/src/playout/timings.ts b/packages/corelib/src/playout/timings.ts index e79e60fde9..77209d90e7 100644 --- a/packages/corelib/src/playout/timings.ts +++ b/packages/corelib/src/playout/timings.ts @@ -158,8 +158,8 @@ export function getPartTimingsOrDefaults( } function calculateExpectedDurationWithTransition(rawDuration: number, timings: PartCalculatedTimings): number { - // toPartDelay needs to be subtracted, because it is added to `fromPartRemaining` when the `fromPartRemaining` value is calculated. - return Math.max(0, rawDuration - (timings.fromPartRemaining - timings.toPartDelay)) + // toPartDelay and fromPartPostroll needs to be subtracted, because it is added to `fromPartRemaining` when the `fromPartRemaining` value is calculated. + return Math.max(0, rawDuration - (timings.fromPartRemaining - timings.toPartDelay - timings.fromPartPostroll)) } export type CalculateExpectedDurationPart = Pick diff --git a/packages/corelib/src/pubsub.ts b/packages/corelib/src/pubsub.ts index 9f4cbebc39..a8436a1403 100644 --- a/packages/corelib/src/pubsub.ts +++ b/packages/corelib/src/pubsub.ts @@ -12,7 +12,7 @@ import { DBSegment } from './dataModel/Segment' import { DBShowStyleBase } from './dataModel/ShowStyleBase' import { DBShowStyleVariant } from './dataModel/ShowStyleVariant' import { DBStudio } from './dataModel/Studio' -import { IngestDataCacheObj } from './dataModel/IngestDataCache' +import { NrcsIngestDataCacheObj } from './dataModel/NrcsIngestDataCache' import { DBTimelineDatastoreEntry } from '@sofie-automation/shared-lib/dist/core/model/TimelineDatastore' import { Blueprint } from './dataModel/Blueprint' import { BucketAdLibAction } from './dataModel/BucketAdLibAction' @@ -211,9 +211,9 @@ export interface CorelibPubSubTypes { token?: string ) => CollectionName.RundownBaselineAdLibActions [CorelibPubSub.ingestDataCache]: ( - selector: MongoQuery, + selector: MongoQuery, token?: string - ) => CollectionName.IngestDataCache + ) => CollectionName.NrcsIngestDataCache [CorelibPubSub.rundownPlaylists]: ( /** RundownPlaylistIds to fetch for, or null to fetch all */ rundownPlaylistIds: RundownPlaylistId[] | null, @@ -329,7 +329,7 @@ export type CorelibPubSubCollections = { [CollectionName.ExpectedPackages]: ExpectedPackageDBBase [CollectionName.ExpectedPackageWorkStatuses]: ExpectedPackageWorkStatus [CollectionName.ExternalMessageQueue]: ExternalMessageQueueObj - [CollectionName.IngestDataCache]: IngestDataCacheObj + [CollectionName.NrcsIngestDataCache]: NrcsIngestDataCacheObj [CollectionName.PartInstances]: DBPartInstance [CollectionName.PackageContainerStatuses]: PackageContainerStatusDB [CollectionName.PackageInfos]: PackageInfoDB diff --git a/packages/corelib/src/settings/objectWithOverrides.ts b/packages/corelib/src/settings/objectWithOverrides.ts index 228234eae7..03783ab565 100644 --- a/packages/corelib/src/settings/objectWithOverrides.ts +++ b/packages/corelib/src/settings/objectWithOverrides.ts @@ -51,6 +51,10 @@ export function wrapDefaultObject(obj: T): ObjectWithOverrides overrides: [], } } +export function isObjectWithOverrides(o: ObjectWithOverrides | T): o is ObjectWithOverrides { + const oAny = o as any + return typeof oAny.defaults === 'object' && Array.isArray(oAny.overrides) +} /** * In some cases, an ObjectWithOverrides should have no defaults. This is common for when the user owns the object containing the ObjectWithOverrides. * This helper takes an ObjectWithOverrides, and converts it to have no defaults, and have each contained object as an override diff --git a/packages/corelib/src/snapshots.ts b/packages/corelib/src/snapshots.ts index c75df55d03..b56f89420c 100644 --- a/packages/corelib/src/snapshots.ts +++ b/packages/corelib/src/snapshots.ts @@ -4,7 +4,7 @@ import { ExpectedMediaItem } from './dataModel/ExpectedMediaItem' import { ExpectedPackageDB } from './dataModel/ExpectedPackages' import { ExpectedPlayoutItem } from './dataModel/ExpectedPlayoutItem' import { RundownPlaylistId } from './dataModel/Ids' -import { IngestDataCacheObj } from './dataModel/IngestDataCache' +import { NrcsIngestDataCacheObj } from './dataModel/NrcsIngestDataCache' import { DBPart } from './dataModel/Part' import { DBPartInstance } from './dataModel/PartInstance' import { Piece } from './dataModel/Piece' @@ -15,13 +15,15 @@ import { RundownBaselineAdLibItem } from './dataModel/RundownBaselineAdLibPiece' import { RundownBaselineObj } from './dataModel/RundownBaselineObj' import { DBRundownPlaylist } from './dataModel/RundownPlaylist' import { DBSegment } from './dataModel/Segment' +import { SofieIngestDataCacheObj } from './dataModel/SofieIngestDataCache' export interface CoreRundownPlaylistSnapshot { version: string playlistId: RundownPlaylistId playlist: DBRundownPlaylist rundowns: Array - ingestData: Array + ingestData: Array + sofieIngestData: Array | undefined // Added in 1.52 baselineObjs: Array baselineAdlibs: Array segments: Array diff --git a/packages/corelib/src/worker/ingest.ts b/packages/corelib/src/worker/ingest.ts index 4e5ba19e0d..ffa7c3013b 100644 --- a/packages/corelib/src/worker/ingest.ts +++ b/packages/corelib/src/worker/ingest.ts @@ -11,7 +11,13 @@ import { StudioId, } from '../dataModel/Ids' import type { MOS } from '@sofie-automation/shared-lib/dist/mos' -import { IngestAdlib, IngestPart, IngestRundown, IngestSegment } from '@sofie-automation/blueprints-integration' +import { + IngestAdlib, + IngestPart, + IngestRundown, + IngestSegment, + UserOperationTarget, +} from '@sofie-automation/blueprints-integration' import { BucketAdLibAction } from '../dataModel/BucketAdLibAction' import { RundownSource } from '../dataModel/Rundown' @@ -117,6 +123,11 @@ export enum IngestJobs { */ UserUnsyncRundown = 'userUnsyncRundown', + /** + * User executed a change operation + */ + UserExecuteChangeOperation = 'userExecuteChangeOperation', + // For now these are in this queue, but if this gets split up to be per rundown, then a single bucket queue will be needed BucketItemImport = 'bucketItemImport', BucketItemRegenerate = 'bucketItemRegenerate', @@ -234,6 +245,11 @@ export interface UserRemoveRundownProps extends UserRundownPropsBase { } export type UserUnsyncRundownProps = UserRundownPropsBase +export interface UserExecuteChangeOperationProps extends IngestPropsBase { + operationTarget: UserOperationTarget + operation: { id: string; [key: string]: any } +} + export interface BucketItemImportProps { bucketId: BucketId showStyleBaseId: ShowStyleBaseId @@ -275,7 +291,7 @@ export interface CreateAdlibTestingRundownForShowStyleVariantProps { */ export type IngestJobFunc = { [IngestJobs.RemoveRundown]: (data: IngestRemoveRundownProps) => void - [IngestJobs.UpdateRundown]: (data: IngestUpdateRundownProps) => RundownId + [IngestJobs.UpdateRundown]: (data: IngestUpdateRundownProps) => void [IngestJobs.UpdateRundownMetaData]: (data: IngestUpdateRundownMetaDataProps) => void [IngestJobs.RemoveSegment]: (data: IngestRemoveSegmentProps) => void [IngestJobs.UpdateSegment]: (data: IngestUpdateSegmentProps) => void @@ -302,6 +318,7 @@ export type IngestJobFunc = { [IngestJobs.UserRemoveRundown]: (data: UserRemoveRundownProps) => void [IngestJobs.UserUnsyncRundown]: (data: UserUnsyncRundownProps) => void + [IngestJobs.UserExecuteChangeOperation]: (data: UserExecuteChangeOperationProps) => void [IngestJobs.BucketItemImport]: (data: BucketItemImportProps) => void [IngestJobs.BucketItemRegenerate]: (data: BucketItemRegenerateProps) => void diff --git a/packages/corelib/src/worker/studio.ts b/packages/corelib/src/worker/studio.ts index 81dd8fdc9a..9132ae8ba5 100644 --- a/packages/corelib/src/worker/studio.ts +++ b/packages/corelib/src/worker/studio.ts @@ -193,6 +193,17 @@ export enum StudioJobs { * Set QuickLoop marker */ SetQuickLoopMarker = 'setQuickLoopMarker', + + /** + * Clear all QuickLoop markers + */ + ClearQuickLoopMarkers = 'clearQuickLoopMarkers', + + /** + * Switch the route of the studio + * for use in ad.lib actions and other triggers + */ + SwitchRouteSet = 'switchRouteSet', } export interface RundownPlayoutPropsBase { @@ -249,6 +260,7 @@ export interface ExecuteActionProps extends RundownPlayoutPropsBase { actionId: string userData: any triggerMode?: string + actionOptions?: { [key: string]: any } } export interface ExecuteBucketAdLibOrActionProps extends RundownPlayoutPropsBase { bucketId: BucketId @@ -344,6 +356,12 @@ export interface SetQuickLoopMarkerProps extends RundownPlayoutPropsBase { type: 'start' | 'end' marker: QuickLoopMarker | null } +export type ClearQuickLoopMarkersProps = RundownPlayoutPropsBase + +export interface SwitchRouteSetProps { + routeSetId: string + state: boolean | 'toggle' +} /** * Set of valid functions, of form: @@ -398,6 +416,9 @@ export type StudioJobFunc = { [StudioJobs.ActivateAdlibTesting]: (data: ActivateAdlibTestingProps) => void [StudioJobs.SetQuickLoopMarker]: (data: SetQuickLoopMarkerProps) => void + [StudioJobs.ClearQuickLoopMarkers]: (data: ClearQuickLoopMarkersProps) => void + + [StudioJobs.SwitchRouteSet]: (data: SwitchRouteSetProps) => void } export function getStudioQueueName(id: StudioId): string { diff --git a/packages/documentation/docs/for-developers/contribution-guidelines.md b/packages/documentation/docs/for-developers/contribution-guidelines.md index 4e6ffadc97..f97118ca00 100644 --- a/packages/documentation/docs/for-developers/contribution-guidelines.md +++ b/packages/documentation/docs/for-developers/contribution-guidelines.md @@ -7,6 +7,8 @@ sidebar_position: 2 # Contribution Guidelines +_Last updated september 2024_ + ## About the Sofie TV Studio Automation Project The Sofie project includes a number of open source applications and libraries developed and maintained by the Norwegian public service broadcaster, [NRK](https://www.nrk.no/about/). Sofie has been used to produce live shows at NRK since September 2018. @@ -35,8 +37,8 @@ However, Sofie is a big project with many differing users and use cases. **Large 3. (If needed) NRK establishes contact with the RFC author, who will be invited to a workshop where the RFC is discussed. Meeting notes are published publicly on the RFC thread. 4. The contributor references the RFC when a pull request is ready. -### Base contributions on the in-development branch (or the master branch) -In order to facilitate merging, we ask that contributions are based on the latest (at the time of the pull request) _in-development_ branch (often named `release*`), alternatively the stable (eg. `master`) branch. NRK will take responsibility for rebasing stable contributions to the latest in-development branch if needed. +### Base contributions on the in-development branch +In order to facilitate merging, we ask that contributions are based on the latest (at the time of the pull request) _in-development_ branch (often named `release*`). See **CONTRIBUTING.md** in each official repository for details on which branch to use as a base for contributions. ## Developer Guidelines diff --git a/packages/documentation/docs/user-guide/further-reading.md b/packages/documentation/docs/user-guide/further-reading.md index caea59f4f2..be17d15f67 100644 --- a/packages/documentation/docs/user-guide/further-reading.md +++ b/packages/documentation/docs/user-guide/further-reading.md @@ -10,7 +10,7 @@ description: This guide has a lot of links. Here they are all listed by section. - [Gateways](concepts-and-architecture.md#gateways) - [Blueprints](concepts-and-architecture.md#blueprints) -- Ask questions in the [Sofie Slack Channel](https://join.slack.com/t/sofietv/shared_invite/enQtNTk2Mzc3MTQ1NzAzLTJkZjMyMDg3OGM0YWU3MmU4YzBhZDAyZWI1YmJmNmRiYWQ1OTZjYTkzOTkzMTA2YTE1YjgxMmVkM2U1OGZlNWI) +- Ask questions in the [Sofie Slack Channel](https://sofietv.slack.com/join/shared_invite/zt-2bfz8l9lw-azLeDB55cvN2wvMgqL1alA#/shared-invite/email) ## Installation & Setup diff --git a/packages/documentation/docs/user-guide/installation/installing-sofie-server-core.md b/packages/documentation/docs/user-guide/installation/installing-sofie-server-core.md index 2a07e4c960..9b3c1412d1 100644 --- a/packages/documentation/docs/user-guide/installation/installing-sofie-server-core.md +++ b/packages/documentation/docs/user-guide/installation/installing-sofie-server-core.md @@ -22,12 +22,12 @@ version: '3.3' services: db: hostname: mongo - image: mongo:4.2.18 + image: mongo:6.0 restart: always entrypoint: ['/usr/bin/mongod', '--replSet', 'rs0', '--bind_ip_all'] # the healthcheck avoids the need to initiate the replica set healthcheck: - test: test $$(echo "rs.initiate().ok || rs.status().ok" | mongo --quiet) -eq 1 + test: test $$(mongosh --quiet --eval "try {rs.initiate()} catch(e) {rs.status().ok}") -eq 1 interval: 10s start_period: 30s ports: @@ -39,7 +39,7 @@ services: core: hostname: core - image: sofietv/tv-automation-server-core:release37 + image: sofietv/tv-automation-server-core:release51 restart: always ports: - '3000:3000' # Same port as meteor uses by default @@ -57,7 +57,7 @@ services: - db playout-gateway: - image: sofietv/tv-automation-playout-gateway:release37 + image: sofietv/tv-automation-playout-gateway:release51 restart: always command: yarn start -host core -port 3000 -id playoutGateway0 networks: @@ -80,7 +80,7 @@ services: # - core # mos-gateway: - # image: sofietv/tv-automation-mos-gateway:release37 + # image: sofietv/tv-automation-mos-gateway:release51 # restart: always # ports: # - "10540:10540" # MOS Lower port diff --git a/packages/documentation/versioned_docs/version-1.50.0/about-sofie.md b/packages/documentation/versioned_docs/version-1.50.0/about-sofie.md new file mode 100644 index 0000000000..363475f108 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/about-sofie.md @@ -0,0 +1,22 @@ +--- +title: About Sofie +hide_table_of_contents: true +sidebar_label: About Sofie +sidebar_position: 1 +--- + +# NRK Sofie TV Automation System + +![The producer's view in Sofie](https://raw.githubusercontent.com/nrkno/Sofie-TV-automation/master/images/Sofie_GUI_example.jpg) + +_**Sofie**_ is a web-based TV automation system for studios and live shows, used in daily live TV news productions by the Norwegian public service broadcaster [**NRK**](https://www.nrk.no/about/) since September 2018. + +## Key Features + +- User-friendly, modern web-based GUI +- State-based device control and playout of video, audio, and graphics +- Modular device-control architecture with support for several hardware \(and software\) setups +- Modular data-ingest architecture, supports MOS and Google spreadsheets +- Plug-in architecture for programming shows + +_The NRK logo is a registered trademark of Norsk rikskringkasting AS. The license does not grant any right to use, in any way, any trademarks, service marks or logos of Norsk rikskringkasting AS._ \ No newline at end of file diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/api-documentation.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/api-documentation.md new file mode 100644 index 0000000000..9d5c84dee7 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/api-documentation.md @@ -0,0 +1,8 @@ +--- +sidebar_position: 6 +--- + +# API Documentation + +The Sofie Blueprints API and the Sofie Peripherals API documentation is automatically generated and available through +[nrkno.github.io/sofie-core/typedoc](https://nrkno.github.io/sofie-core/typedoc). diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/api-stability.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/api-stability.md new file mode 100644 index 0000000000..253a3bb750 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/api-stability.md @@ -0,0 +1,26 @@ +--- +title: API Stability +sidebar_position: 11 +--- + +Sofie has various APIs for talking between components, and for external systems to interact with. + +We classify each api into one of two categories: + +## Stable + +This is a collection of APIs which we intend to avoid introducing any breaking change to unless necessary. This is so external systems can rely on this API without needing to be updated in lockstep with Sofie, and hopefully will make sense to developers who are not familiar with Sofie's inner workings. + +In version 1.50, a new REST API was introduced. This can be found at `/api/v1.0`, and is designed to allow an external system to interact with Sofie using simplified abstractions of Sofie internals. + +The _Live Status Gateway_ is also part of this stable API, intended to allow for reactively retrieving data from Sofie. Internally it is translating the internal APIs into a stable version. + +:::note +You can find the _Live Status Gateway_ in the `packages` folder of the [Sofie Core](https://github.com/nrkno/sofie-core) repository. +::: + +## Internal + +This covers everything we expose over DDP, the `/api/0` endpoint and any other http endpoints. + +These are intended for use between components of Sofie, which should be updated together. The DDP api does have breaking changes in most releases. We use the `server-core-integration` library to manage these typings, and to ensure that compatible versions are used together. diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/contribution-guidelines.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/contribution-guidelines.md new file mode 100644 index 0000000000..4e6ffadc97 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/contribution-guidelines.md @@ -0,0 +1,96 @@ +--- +description: >- + The Sofie team happily encourage contributions to the Sofie project, and + kindly ask you to observe these guidelines when doing so. +sidebar_position: 2 +--- + +# Contribution Guidelines + +## About the Sofie TV Studio Automation Project + +The Sofie project includes a number of open source applications and libraries developed and maintained by the Norwegian public service broadcaster, [NRK](https://www.nrk.no/about/). Sofie has been used to produce live shows at NRK since September 2018. + +A list of the "Sofie repositories" [can be found here](libraries.md). NRK owns the copyright of the contents of the official Sofie repositories, including the source code, related files, as well as the Sofie logo. + +The Sofie team at NRK is responsible for development and maintenance. We also do thorough testing of each release to avoid regressions in functionality and ensure interoperability with the various hardware and software involved. + +The Sofie team welcomes open source contributions and will actively work towards enabling contributions to become mergeable into the Sofie repositories. However, as main stakeholder and maintainer we reserve the right to refuse any contributions. + + +## About Contributions + +Thank you for considering contributing to the Sofie project! + +Before you start, there are a few things you should know: + +### “Discussions Before Pull Requests” + +**Minor changes** (most bug fixes and small features) can be submitted directly as pull requests to the appropriate official repo. + +However, Sofie is a big project with many differing users and use cases. **Larger changes** might be more difficult to merge into an official repository if NRK has not been made aware of their existence beforehand. To facilitate a timely handling of larger contributions, there’s a workflow intended to keep an open dialogue between all interested parties: + +1. Contributor opens an RFC (as a _GitHub issue_) in the appropriate repository. +2. NRK evaluates the RFC, usually within a week. +3. (If needed) NRK establishes contact with the RFC author, who will be invited to a workshop where the RFC is discussed. Meeting notes are published publicly on the RFC thread. +4. The contributor references the RFC when a pull request is ready. + +### Base contributions on the in-development branch (or the master branch) +In order to facilitate merging, we ask that contributions are based on the latest (at the time of the pull request) _in-development_ branch (often named `release*`), alternatively the stable (eg. `master`) branch. NRK will take responsibility for rebasing stable contributions to the latest in-development branch if needed. +See **CONTRIBUTING.md** in each official repository for details on which branch to use as a base for contributions. + +## Developer Guidelines + +### Pull Requests + +We encourage you to open PRs early! If it’s still in development, open the PR as a draft. + +### Types + +All official Sofie repositories use TypeScript. When you contribute code, be sure to keep it as strictly typed as possible. + +### Code Style & Formatting + +Most of the projects use a linter (eslint) and a formatter (prettier). Before submitting a pull request, please make sure it conforms to the linting rules by running yarn lint. yarn lint --fix can fix most of the issues. + +### Documentation + +We rely on two types of documentation; the [Sofie documentation](https://nrkno.github.io/sofie-core/) ([source code](https://github.com/nrkno/sofie-core/tree/master/packages/documentation)) and inline code documentation. + +We don't aim to have the "absolute perfect documentation possible", BUT we do try to improve and add documentation to have a good-enough-to-be-comprehensible standard. We think that: + +* _What_ something does is not as important – we can read the code for that. +* _Why_ something does something, **is** important. Implied usage, side-effects, descriptions of the context etcetera... + +When you contribute, we ask you to also update any documentation where needed. + +### Updating Dependencies​ +When updating dependencies in a library, it is preferred to do so via `yarn upgrade-interactive --latest` whenever possible. This is so that the versions in `package.json` are also updated as we have no guarantee that the library will work with versions lower than that used in the `yarn.lock` file, even if it is compatible with the semver range in `package.json`. After this, a `yarn upgrade` can be used to update any child dependencies + +Be careful when bumping across major versions. + +Also, each of the libraries has a minimum nodejs version specified in their package.json. Care must be taken when updating dependencies to ensure its compatibility is retained. + +### Resolutions​ + +We sometimes use the `yarn resolutions` property in `package.json` to fix security vulnerabilities in dependencies of libraries that haven't released a fix yet. If adding a new one, try to make it as specific as possible to ensure it doesn't have unintended side effects. + +When updating other dependencies, it is a good idea to make sure that the resolutions defined still apply and are correct. + +### Logging + +When logging, we try to adher to the following guideliness: + +Usage of `console.log` and `console.error` directly is discouraged (except for quick debugging locally). Instead, use one of the logger libraries (to output json logs which are easier to index). +When logging, use one of the **log level** described below: + +| Level | Description | Examples | +| ------- | ----------- | -- | +| `silly` | For very detailed logs (rarely used). | - | +| `debug` | Logging of info that could be useful for developers when debugging certain issues in production. | `"payload: {>JSON<} "`

`"Reloading data X from DB"` | +| `verbose` | Logging of common events. | `"File X updated"` | +| `info` | Logging of significant / uncommon events.

_Note: If an event happens often or many times, use `verbose` instead._ | `"Initializing TSR..."`

`"Starting nightly cronjob..."`

`"Snapshot X restored"`

`"Not allowing removal of current playing segment 'xyz', making segment unsynced instead"`

`"PeripheralDevice X connected"` | +| `warn` | Used when something unexpected happened, but not necessarily due to an application bug.

These logs don't have to be acted upon directly, but could be useful to provide context to a dev/sysadmin while troubleshooting an issue. | `"PeripheralDevice X disconnected"`

`"User Error: Cannot activate Rundown (Rundown not found)" `

`"mosRoItemDelete NOT SUPPORTED"` | +| `error` | Used when something went _wrong_, preventing something from functioning.

A logged `error` should always result in a sysadmin / developer looking into the issue.

_Note: Don't use `error` for things that are out of the app's control, such as user error._ | `"Cannot read property 'length' of undefined"`

`"Failed to save Part 'X' to DB"`| +| `crit` | Fatal errors (rarely used) | - | + diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/data-model.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/data-model.md new file mode 100644 index 0000000000..8f887c92e8 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/data-model.md @@ -0,0 +1,132 @@ +--- +title: Data Model +sidebar_position: 9 +--- + +Sofie persists the majority of its data in a MongoDB database. This allows us to use Typescript friendly documents, +without needing to worry too much about the strictness of schemas, and allows us to watch for changes happening inside +the database as a way of ensuring that updates are reactive. + +Data is typically pushed to the UI or the gateways through [Publications](./publications) over the DDP connection that Meteor provides. + +## Collection Ownership + +Each collection in MongoDB is owned by a different area of Sofie. In some cases, changes are also made by another area, but we try to keep this to a minimum. +In every case, any layout changes and any scheduled cleanup are performed by the Meteor layer for simplicity. + +### Meteor + +This category of collections is rather loosely defined, as it ends up being everything that doesn't belong somewhere else + +This consists of anything that is configurable from the Sofie UI, anything needed soley for the UI and some other bits. Additionally, there are some collections which are populated by other portions of a Sofie system, such as by package manager, through an API over DDP. +Currently, there is not a very clearly defined flow for modifying these documents, with the UI often making changes directly with minimal or no validation. + +This includes: + +- [Blueprints](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/Blueprint.ts) +- [Buckets](https://github.com/nrkno/sofie-core/blob/master/meteor/lib/collections/Buckets.ts) +- [CoreSystem](https://github.com/nrkno/sofie-core/blob/master/meteor/lib/collections/CoreSystem.ts) +- [Evaluations](https://github.com/nrkno/sofie-core/blob/master/meteor/lib/collections/Evaluations.ts) +- [ExternalMessageQueue](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/ExternalMessageQueue.ts) +- [ExpectedPackageWorkStatuses](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/ExpectedPackageWorkStatuses.ts) +- [MediaObjects](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/MediaObjects.ts) +- [MediaWorkFlows](https://github.com/nrkno/sofie-core/blob/master/meteor/lib/collections/MediaWorkFlows.ts) +- [MediaWorkFlowSteps](https://github.com/nrkno/sofie-core/blob/master/meteor/lib/collections/MediaWorkFlowSteps.ts) +- [Organizations](https://github.com/nrkno/sofie-core/blob/master/meteor/lib/collections/Organization.ts) +- [PackageInfos](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/PackageInfos.ts) +- [PackageContainerPackageStatuses](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/PackageContainerPackageStatus.ts) +- [PackageContainerStatuses](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/PackageContainerStatus.ts) +- [PeripheralDeviceCommands](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/PeripheralDeviceCommand.ts) +- [PeripheralDevices](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/PeripheralDevice.ts) +- [RundownLayouts](https://github.com/nrkno/sofie-core/blob/master/meteor/lib/collections/RundownLayouts.ts) +- [ShowStyleBase](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/ShowStyleBase.ts) +- [ShowStyleVariant](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/ShowStyleVariant.ts) +- [Snapshots](https://github.com/nrkno/sofie-core/blob/master/meteor/lib/collections/Snapshots.ts) +- [Studio](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/Studio.ts) +- [TriggeredActions](https://github.com/nrkno/sofie-core/blob/master/meteor/lib/collections/TriggeredActions.ts) +- [TranslationsBundles](https://github.com/nrkno/sofie-core/blob/master/meteor/lib/collections/TranslationsBundles.ts) +- [UserActionsLog](https://github.com/nrkno/sofie-core/blob/master/meteor/lib/collections/UserActionsLog.ts) +- [Users](https://github.com/nrkno/sofie-core/blob/master/meteor/lib/collections/Users.ts) +- [Workers](https://github.com/nrkno/sofie-core/blob/master/meteor/lib/collections/Workers.ts) +- [WorkerThreads](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/WorkerThreads.ts) + +### Ingest + +This category of collections is owned by the ingest [worker threads](./worker-threads-and-locks.md), and models a Rundown based on how it is defined by the NRCS. + +These collections are not exposed as writable in Meteor, and are only allowed to be written to by the ingest worker threads. +There is an exception to both of these; Meteor is allowed to write to it as part of migrations, and cleaning up old documents. While the playout worker is allowed to modify certain Segments that are labelled as being owned by playout. + +The collections which are owned by the ingest workers are: + +- [AdLibActions](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/AdLibActions.ts) +- [AdLibPieces](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/AdLibPieces.ts) +- [BucketAdLibActions](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/BucketAdLibActions.ts) +- [BucketAdLibPieces](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/BucketAdLibPieces.ts) +- [ExpectedMediaItems](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/ExpectedMediaItems.ts) +- [ExpectedPackages](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/ExpectedPackages.ts) +- [ExpectedPlayoutItems](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/ExpectedPlayoutItems.ts) +- [IngestDataCache](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/IngestDataCache.ts) +- [Parts](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/Parts.ts) +- [Pieces](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/Pieces.ts) +- [RundownBaselineAdLibActions](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/RundownBaselineAdLibActions.ts) +- [RundownBaselineAdLibPieces](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/RundownBaselineAdLibPieces.ts) +- [RundownBaselineObjects](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/RundownBaselineObjects.ts) +- [Rundowns](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/Rundowns.ts) +- [Segments](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/Segments.ts) + +These collections model a Rundown from the NRCS in a Sofie form. Almost all of these contain documents which are largely generated by blueprints. +Some of these collections are used by package manager to initiate work, while others form a view of the Rundown for the users, and are used as part of the model for playout. + +### Playout + +This category of collections is owned by the playout [worker threads](./worker-threads-and-locks.md), and is used to model the playout of a Rundown or set of Rundowns. + +During the final stage of an ingest operation, there is a period where the ingest worker aquires a `PlaylistLock`, so that it can ensure that the RundownPlaylist the Rundown is a part of is updated with any necessary changes following the ingest operation. During this lock, it will also attempt to [sync any ingest changes](./for-blueprint-developers/sync-ingest-changes) to the PartInstances and PieceInstances, if supported by the blueprints. + +As before, Meteor is allowed to write to these collections as part of migrations, and cleaning up old documents. + +The collections which can only be modified inside of a `PlaylistLock` are: + +- [PartInstances](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/PartInstances.ts) +- [PieceInstances](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/PieceInstances.ts) +- [RundownPlaylists](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/RundownPlaylists.ts) +- [Timelines](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/Timelines.ts) +- [TimelineDatastore](https://github.com/nrkno/sofie-core/blob/master/packages/corelib/src/dataModel/TimelineDatastore.ts) + +These collections are used in combination with many of the ingest collections, to drive playout. + +#### RundownPlaylist + +RundownPlaylists are a Sofie invention designed to solve one problem; in some NRCS it is beneficial to build a show across multiple Rundowns, which should then be concatenated for playout. +In particular, MOS has no concept of a Playlist, only Rundowns, and it was here where we need to be able to combine multiple Rundowns. + +This functionality can be used to either break down long shows into managable chunks, or to indicate a different type of show between the each portion. + +Because of this, RundownPlaylists are largely missing from the ingest side of Sofie. We do not expose them in the ingest APIs, or do anything with them throughout the majority of the blueprints generating a Rundown. +Instead, we let the blueprints specify that a Rundown should be part of a RundownPlaylist by setting the `playlistExternalId` property, where multiple Rundowns in a Studio with the same id will be grouped into a RundownPlaylist. +If this property is not used, we automatically generate a RundownPlaylist containing the Rundown by itself. + +It is during the final stages of an ingest operation, where the RundownPlaylist will be generated (with the help of blueprints), if it is necessary. +Another benefit to this approach, is that it allows for very cheaply and easily moving Rundowns between RundownPlaylists, even safely affecting a RundownPlaylist that is currently on air. + +#### Part vs PartInstance and Piece vs PieceInstance + +In the early days of Sofie, we had only Parts and Pieces, no PartInstances and PieceInstances. + +This quickly became costly and complicated to handle cases where the user used Adlibs in Sofie. Some of the challenges were: + +- When a Part is deleted from the NRCS and that part is on air, we don't want to delete it in Sofie immediately +- When a Part is modified in the NRCS and that part is on air, we may not want to apply all of the changes to playout immediately +- When a Part has finished playback and is set-as-next again, we need to make sure to discard any changes made by the previous playout, and restore it to as if was refreshly ingested (including the changes we ignored while it was on air) +- When creating an adlib part, we need to be sure that an ingest operation doesn't attempt to delete it, until playout is finished with it. +- After using an adlib in a part, we need to remove the piece it created when we set-as-next again, or reset the rundown +- When an earlier part is removed, where an infinite piece has spanned into the current part, we may not want to remove that infinite piece + +Our solution to some of this early on was to not regenerate certain Parts when receiving ingest operations for them, and to defer it until after that Part was off air. While this worked, it was not optimal to re-run ingest operations like that while doing a take. This also required the blueprint api to generate a single part in each call, which we were starting to find limiting. This was also problematic when resetting a rundown, as that would often require rerunning ingest for the whole rundown, making it a notably slow operation. + +At this point in time, Adlib Actions did not exist in Sofie. They are able to change almost every property of a Part of Piece that ingest is able to define, which makes the resetting process harder. + +PartInstances and PieceInstances were added as a way for us to make a copy of each Part and Piece, as it was selected for playout, so that we could allow ingest without risking affecting playout, and to simplify the cleanup performed. The PartInstances and PieceInstances are our record of how the Rundown was played, which we can utilise to output metadata such as for chapter markers on a web player. In earlier versions of Sofie this was tracked independently with an `AsRunLog`, which resulted in odd issues such as having `AsRunLog` entries which refered to a Part which no longer existed, or whose content was very different to how it was played. + +Later on, this separation has allowed us to more cleanly define operations as ingest or playout, and allows us to run them in parallel with more confidence that they won't accidentally wipe out each others changes. Previously, both ingest and playout operations would be modifying documents in the Piece and Part collections, making concurrent operations unsafe as they could be modifying the same Part or Piece. diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/_category_.json b/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/_category_.json new file mode 100644 index 0000000000..5f6541c2b5 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Device Integrations", + "position": 5 +} \ No newline at end of file diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/intro.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/intro.md new file mode 100644 index 0000000000..1604538523 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/intro.md @@ -0,0 +1,18 @@ +# Introduction + +Device integrations in Sofie are part of the Timeline State Resolver (TSR) library. A device integration has a couple of responsibilites in the Sofie eco system. First and foremost it should establish a connection with a foreign device. It should also be able to convert Sofie's idea of what the device should be doing into commands to control the device. And lastly it should export interfaces to be used by the blueprints developer. + +In order to understand all about writing TSR integrations there are some concepts to familiarise yourself with, in this documentation we will attempt to explain these. + + - [Options and mappings](./options-and-mappings.html) + - [TSR Integration API](./tsr-api.html) + - [TSR Types package](./tsr-types.html) + - [TSR Actions](./tsr-actions.html) + +But to start of we will explain the general structure of the TSR. Any user of the TSR will interface primarily with the Conductor class. Primarily the user will input device configurations, mappings and timelines into the TSR. The timeline describes the entire state of all of the devices over time. It does this by putting objects on timeline layers. Every timeline layer maps to a specific part of the device, this is configured throught the mappings. + +The timeline is converted into disctinct states at different points in time, and these states are fed to the individual integrations. As an integration developer you shouldn't have to worry about keeping track of this. It is most important that you expose \(a\) a method to convert from a Timeline State to a Device State, \(b\) a method for diffing 2 device states and (c) a way to send commands to the device. We'll dive deeper into this in [TSR Integration API](./tsr-api.html). + +:::info +The information in this section is not a conclusive guide on writing an integration, it should be use more as a guide to use while looking at a TSR integration such as the [OSC integration](https://github.com/nrkno/sofie-timeline-state-resolver/tree/master/packages/timeline-state-resolver/src/integrations/osc). +::: \ No newline at end of file diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/options-and-mappings.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/options-and-mappings.md new file mode 100644 index 0000000000..1bb182f155 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/options-and-mappings.md @@ -0,0 +1,11 @@ +# Options and mappings + +For an end user to configure the system from the Sofie UI we have to expose options and mappings from the TSR. This is done through [JSON config schemas](../json-config-schema.html) in the `$schemas` folder of your integration. + +## Options + +Options are for any configuration the user needs to make for your device integration to work well. Things like IP addresses and ports go here. + +## Mappings + +A mappings is essentially an addresses into the device you are integrating with. For example, a mapping for CasparCG contains a channel and a layer. And a mapping for an Atem can be a mix effect or a downstream keyer. It is entirely possible for the user to define 2 mappings pointing to the same bit of hardware so keep that in mind while writing your integration. The granularity of the mappings influences both how you write your device as well as the shape of the timeline objects. If, for example, we had not included the layer number in the CasparCG mapping, we would have had to define this separately on every timeline object. \ No newline at end of file diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/tsr-actions.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/tsr-actions.md new file mode 100644 index 0000000000..791c6f5a26 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/tsr-actions.md @@ -0,0 +1,11 @@ +# TSR Actions + +Sometimes a state based model isn't enough and you just need to fire an action. In Sofie we try to be strict about any playout operations needing to be state based, i.e. doing a transition operation on a vision mixer should be a result of a state change, not an action. However, there are things that are easier done with actions. For example cleaning up a playlist on a graphics server or formatting a disk on a recorder. For these scenarios we have added TSR Actions. + +TSR Actions can be triggered through the UI by a user, through blueprints when the rundown is activated or deactivated or through adlib actions. + +When implementing the TSR Actions API you should start by defining a JSON schema outlying the action id's and payload your integration will consume. Once you've done this you're ready to implement the actions as callbacks on the `actions` property of your integration. + +:::warning +Beware that if your action changes the state of the device you should handle this appropriately by resetting the resolver +::: diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/tsr-api.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/tsr-api.md new file mode 100644 index 0000000000..e68424455e --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/tsr-api.md @@ -0,0 +1,28 @@ +# TSR Integration API + +:::info +As of version 1.50, there still exists a legacy API for device integrations. In this documentation we will only consider the more modern variant informally known as the _StateHandler_ format. +::: + +## Setup and status + +There are essentially 2 parts to the TSR API, the first thing you need to do is set up a connection with the device you are integrating with. This is done in the `init` method. It takes a parameter with the Device options as specified in the config schema. Additionally a `terminate` call is to be implemented to tear down the connection and prepare any timers to be garbage collected. + +Regarding status there are 2 important methods to be implemented, one is a getter for the `connected` status of the integration and the other is `getStatus` which should inform a TSR user of the status of device. You can add messages in this status as well. + +## State and commands + +The second part is where the bulk of the work happens. First your implementation for `convertTimelineStateToDeviceState` will be called with a Timeline State and the mappings for your integration. You are ought to return a "Device State" here which is an object representing the state of your device as inferred from the Timeline State and mappings. Then the next implementation is of the `diffStates` method, which will be called with 2 Device States as you've generated them earlier. The purpose of this method is to generate commands such that a state change from Device State A to Device State B can be executed. Hence it is called a "diff". The last important method here is `sendCommand` which will be called with the commands you've generated earlier when the TSR wants to transitition from State A to State B. + +Another thing to implement is the `actions` property. You can leave it as an empty object initially or read more about it in [TSR Actions](./tsr-actions.md). + +## Logging and emitting events + +Logging is done through an event emitter as is described in the DeviceEvents interface. You should also emit an event any time the connection status should change. There is an event you can emit to rerun the resolving process in TSR as well, this will more or less create new Timeline States from the timeline, diff them and see if they should be executed. + +## Best practices + + - The `init` method is asynchronous but you should not use it to wait for timeouts in your connection to reject it. Instead the rest of your integration should gracefully deal with a (initially) disconnected device. + - The result of the `getStatus` method is displayed in the UI of Sofie so try to put helpful information in the messages and only elevate to a "bad" status if something is really wrong, like being fully disconnected from a device. + - Be aware for side effects in your implementations of `convertTimelineStateToDeviceState` and `diffStates` they are _not_ guaranteed to be chronological and the states changes may never actually be executed. + - If you need to do any time aware commands (such as seeking in a media file) use the time from the Timeline State to do your calculations for these \ No newline at end of file diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/tsr-types.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/tsr-types.md new file mode 100644 index 0000000000..0c9d2e5108 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/device-integrations/tsr-types.md @@ -0,0 +1,7 @@ +# TSR Types + +The TSR monorepo contains a types package called `timeline-state-resolver-types`. The intent behind this package is that you may want to generate a Timeline in a place where you don't want to import the TSR library for performance reasons. Blueprints are a good example of this since the webpack setup does not deal well with importing everything. + +## What you should know about this + +When the TSR is built the types for the Mappings, Options and Actions for your integration will be auto generated under `src/generated`. In addition to this you should describe the content property of the timeline objects in a file using interfaces. If you're adding a new integration also add it to the `DeviceType` enum as described in `index.ts`. diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/_category_.json b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/_category_.json new file mode 100644 index 0000000000..c4c3c8c242 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "For Blueprint Developers", + "position": 4 +} \ No newline at end of file diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/_part-timings-demo.jsx b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/_part-timings-demo.jsx new file mode 100644 index 0000000000..98cb9f4275 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/_part-timings-demo.jsx @@ -0,0 +1,173 @@ +import React, { useState } from 'react' + +/** + * This is a demo showing the interactions between the part and piece groups on the timeline. + * The maths should be the same as in `meteor/lib/rundown/timings.ts`, but in a simplified form + */ + +const MS_TO_PIXEL_CONSTANT = 0.1 + +const viewPortStyle = { + width: '100%', + backgroundSize: '40px 40px', + backgroundImage: + 'linear-gradient(to right, grey 1px, transparent 1px), linear-gradient(to bottom, grey 1px, transparent 1px)', + overflowX: 'hidden', + display: 'flex', + flexDirection: 'column', + position: 'relative', +} + +export function PartTimingsDemo() { + const [postrollA1, setPostrollA1] = useState(0) + const [postrollA2, setPostrollA2] = useState(0) + const [prerollB1, setPrerollB1] = useState(0) + const [prerollB2, setPrerollB2] = useState(0) + const [outTransitionDuration, setOutTransitionDuration] = useState(0) + const [inTransitionBlockDuration, setInTransitionBlockDuration] = useState(0) + const [inTransitionContentsDelay, setInTransitionContentsDelay] = useState(0) + const [inTransitionKeepaliveDuration, setInTransitionKeepaliveDuration] = useState(0) + + // Arbitrary point in time for the take to be based around + const takeTime = 2400 + + const outTransitionTime = outTransitionDuration - inTransitionKeepaliveDuration + + // The amount of time needed to preroll Part B before the 'take' point + const partBPreroll = Math.max(prerollB1, prerollB2) + const prerollTime = partBPreroll - inTransitionContentsDelay + + // The amount to delay the part 'switch' to, to ensure the outTransition has time to complete as well as any prerolls for part B + const takeOffset = Math.max(0, outTransitionTime, prerollTime) + const takeDelayed = takeTime + takeOffset + + // Calculate the part A objects + const pieceA1 = { time: 0, duration: takeDelayed + inTransitionKeepaliveDuration + postrollA1 } + const pieceA2 = { time: 0, duration: takeDelayed + inTransitionKeepaliveDuration + postrollA2 } + const partA = { time: 0, duration: Math.max(pieceA1.duration, pieceA2.duration) } // part stretches to contain the piece + + // Calculate the transition objects + const pieceOutTransition = { + time: partA.time + partA.duration - outTransitionDuration - Math.max(postrollA1, postrollA2), + duration: outTransitionDuration, + } + const pieceInTransition = { time: takeDelayed, duration: inTransitionBlockDuration } + + // Calculate the part B objects + const partBBaseDuration = 2600 + const partB = { time: takeTime, duration: partBBaseDuration + takeOffset } + const pieceB1 = { time: takeDelayed + inTransitionContentsDelay - prerollB1, duration: partBBaseDuration + prerollB1 } + const pieceB2 = { time: takeDelayed + inTransitionContentsDelay - prerollB2, duration: partBBaseDuration + prerollB2 } + const pieceB3 = { time: takeDelayed + inTransitionContentsDelay + 300, duration: 200 } + + return ( +
+
+ + + + + + + + + + + + + + + +
+ + {/* Controls */} + + + + + + + + + +
+
+ ) +} + +function TimelineGroup({ duration, time, name, color }) { + return ( +
+ {name} +
+ ) +} + +function TimelineMarker({ time, title }) { + return ( +
+   +
+ ) +} + +function InputRow({ label, max, value, setValue }) { + return ( + + {label} + + setValue(parseInt(e.currentTarget.value))} + /> + + + ) +} diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/ab-playback.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/ab-playback.md new file mode 100644 index 0000000000..1a78316f77 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/ab-playback.md @@ -0,0 +1,236 @@ +# AB Playback + +:::info +Prior to 1.50 of Sofie, this was implemented in Blueprints and not natively in Sofie-core +::: + +_AB Playback_ is a common technique for clip playback. The aim is to be able to play multiple clips back to back, alternating which player is used for each clip. +At first glance it sounds simple to handle, but it quickly becomes complicated when we consider the need to allow users to run adlibs and that the system needs to seamlessly update pre-programmed clips when this happens. + +To avoid this problem, we take an approach of labelling pieces as needing an AB assignment and leaving timeline objects to have some unresolved values during the ingest blueprint operations, and we perform the AB resolving when building the timeline for playout. + +There are other challenges to the resolving to think about too, which make this a challenging area to tackle, and not something that wants to be considered when starting out with blueprints. Some of these challenges are: + +- Users get confused if the player of a clip changes without a reason +- Reloading an already loaded clip can be costly, so should be avoided when possible +- Adlibbing a clip, or changing what Part is nexted can result in needing to move what player a clip has assigned +- Postroll or preroll is often needed +- Some studios can have less players available than ideal. (eg, going back to back between two clips, and a clip is playing on the studio monitor) + +## Defining Piece sessions + +An AB-session is a request for an AB player for the lifetime of the object or Piece. The resolver operates on these sessions, to identify when players are needed and to identify which objects and Pieces are linked and should use the same Player. + +In order for the AB resolver to know what AB sessions there are on the timeline, and how they all relate to each other, we define `abSessions` properties on various objects when defining Pieces and their content during the `getSegment` blueprint method. + +The AB resolving operates by looking at all the Pieces on the timeline, and plotting all the requested abSessions out in time. It will then iterate through each of these sessions in time order and assign them in order to the available players. +Note: The sessions of TimelineObjects are not considered at this point, except for those in lookahead. + +Both Pieces and TimelineObjects accept an array of AB sessions, and are capable of using multiple AB pools on the same object. Eg, choosing a clip player and the DVE to play it through. + +:::warning +The sessions of TimelineObjects are not considered during the resolver stage, except for lookahead objects. +If a TimelineObject has an `abSession` set, its parent Piece must declare the same session. +::: + +For example: + +```ts +const partExternalId = 'id-from-nrcs' +const piece: Piece = { + externalId: partExternalId, + name: 'My Piece', + + abSessions: [{ + sessionName: partExternalId, + poolName: 'clip' + }], + + ... +} +``` + +This declares that this Piece requires a player from the 'clip' pool, with a unique sessionName. + +:::info +The `sessionName` property is an identifier for a session within the Segment. +Any other Pieces or TimelineObjects that want to share the session should use the same sessionName. Unrelated sessions must use a different name. +::: + +## Enabling AB playback resolving + +To enable AB playback for your blueprints, the `getAbResolverConfiguration` method of a ShowStyle blueprint must be implemented. This informs Sofie that you want the AB playback logic to run, and configures the behaviour. + +A minimal implementation of this is: + +```ts +getAbResolverConfiguration: (context: IShowStyleContext): ABResolverConfiguration => { + return { + resolverOptions: { + idealGapBefore: 1000, + nowWindow: 2000, + }, + pools: { + clip: [1, 2], + }, + } +} +``` + +The `resolverOptions` property defines various configuration that will affect how sessions are assigned to players. +The `pools` property defines the AB pools in your system, along with the ids of the players in the pools. These do not have to be sequential starting from 1, and can be any numbers you wish. The order used here will define the order the resolver will assign to. + +## Updating the timeline from the assignments + +There are 3 possible strategies for applying the assignments to timeline objects. The applying and ab-resolving is done just before `onTimelineGenerate` from your blueprints is called. + +### TimelineObject Keyframes + +The simplest approach is to use timeline keyframes, which can be labelled as belong to an abSession. These keyframes must be generated during ingest. + +This strategy works best for changing inputs on a video-mixer or other scenarios where a property inside of a timeline object needs changing. + +```ts +let obj = { + id: '', + enable: { start: 0 }, + layer: 'atem_me_program', + content: { + deviceType: TSR.DeviceType.ATEM, + type: TSR.TimelineContentTypeAtem.ME, + me: { + input: 0, // placeholder + transition: TSR.AtemTransitionStyle.CUT, + }, + }, + keyframes: [ + { + id: `mp_1`, + enable: { while: '1' }, + disabled: true, + content: { + input: 10, + }, + preserveForLookahead: true, + abSession: { + pool: 'clip', + index: 1, + }, + }, + { + id: `mp_2`, + enable: { while: '1' }, + disabled: true, + content: { + input: 11, + }, + preserveForLookahead: true, + abSession: { + pool: 'clip', + index: 2, + }, + }, + ], + abSessions: [ + { + pool: 'clip', + name: 'abcdef', + }, + ], +} +``` + +This object demonstrates how keyframes can be used to perform changes based on an assigned ab player session. The object itself must be labelled with the `abSession`, in the same way as the Piece is. +Each keyframe can be labelled with an `abSession`, with only one from the pool being left active. If `disabled` is set on the keyframe, that will be unset, and the other keyframes for the pool will be removed. + +Setting `disabled: true` is not strictly necessary, but ensures that the keyframe will be inactive in case that ab-pool is not processed. +In this example we are setting `preserveForLookahead` so that the keyframes are present on lookahead objects. If not set, then the keyframes will be removed by lookahead. + +### TimelineObject layer changing + +Another apoproach is to move objects between timeline layers. For example, player 1 is on CasparCG channel 1, with player 2 on CasparCG channel 2. This requires a different mapping for each layer. + +This strategy works best for playing a clip, where the whole object needs to move to different mappings. + +To enable this, the `ABResolverConfiguration` object returned from `getAbResolverConfiguration` can have a set of rules defined with the `timelineObjectLayerChangeRules` property. + +For example: + +```ts +getAbResolverConfiguration: (context: IShowStyleContext): ABResolverConfiguration => { + return { + resolverOptions: { + idealGapBefore: 1000, + nowWindow: 2000, + }, + pools: { + clip: [1, 2], + }, + timelineObjectLayerChangeRules: { + ['casparcg_player_clip_pending']: { + acceptedPoolNames: [AbSessionPool.CLIP], + newLayerName: (playerId: number) => `casparcg_player_clip_${playerId}`, + allowsLookahead: true, + }, + }, + } +} +``` + +And a timeline object: + +```ts +const clipObject: TimelineObjectCoreExt<> = { + id: '', + enable: { start: 0 }, + layer: 'casparcg_player_clip_pending', + content: { + deviceType: TSR.DeviceType.CASPARCG, + type: TSR.TimelineContentTypeCasparCg.MEDIA, + file: 'AMB', + }, + abSessions: [ + { + pool: 'clip', + name: 'abcdef', + }, + ], +} +``` + +This will result in the timeline object being moved to `casparcg_player_clip_1` if the clip is assigned to player 1, or `casparcg_player_clip_2` if the clip is assigned to player 2. + +This is also compatible with lookahead. To do this, the `casparcg_player_clip_pending` mapping should be created with the lookahead configuration set there, this should be of type `ABSTRACT`. The AB resolver will detect this lookahead object and it will get an assignment when a player is available. Lookahead should not be enabled for the `casparcg_player_clip_1` and other final mappings, as lookahead is run before AB so it will not find any objects on those layers. + +### Custom behaviour + +Sometimes, something more complex is needed than what the other options allow for. To support this, the `ABResolverConfiguration` object has an optional property `customApplyToObject`. It is advised to use the other two approaches when possible. + +```ts +getAbResolverConfiguration: (context: IShowStyleContext): ABResolverConfiguration => { + return { + resolverOptions: { + idealGapBefore: 1000, + nowWindow: 2000, + }, + pools: { + clip: [1, 2], + }, + customApplyToObject: ( + context: ICommonContext, + poolName: string, + playerId: number, + timelineObject: OnGenerateTimelineObj + ) => { + // Your own logic here + + return false + }, + } +} +``` + +Inside this function you are able to make any changes you like to the timeline object. +Return true if the object was changed, or false if it is unchanged. This allows for logging whether Sofie failed to modify an object for an ab assignment. + +For example, we use this to remap audio channels deep inside of some Sisyfos timeline objects. It is not possible for us to do this with keyframes due to the keyframes being applied with a shallow merge for the Sisyfos TSR device. diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/hold.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/hold.md new file mode 100644 index 0000000000..35d8dde7e4 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/hold.md @@ -0,0 +1,52 @@ +# Hold + +_Hold_ is a feature in Sofie to allow for a special form of take between two parts. It allows for the new part to start with some portions of the old part being retained, with the next 'take' stopping the remaining portions of the old part and not performing a true take. + +For example, it could be setup to hold back the video when going between two clips, creating what is known in film editing as a [split edit](https://en.wikipedia.org/wiki/Split_edit) or [J-cut](https://en.wikipedia.org/wiki/J_cut). The first _Take_ would start the audio from an _A-Roll_ (second clip), but keep the video playing from a _B-Roll_ (first clip). The second _Take_ would stop the first clip entirely, and join the audio and video for the second clip. + +![A timeline of a J-Cut in a Non-Linear Video Editor](/img/docs/video_edit_hold_j-cut.png) + +## Flow + +While _Hold_ is active or in progress, an indicator is shown in the header of the UI. +![_Hold_ in Rundown View header](/img/docs/rundown-header-hold.png) + +It is not possible to run any adlibs while a hold is active, or to change the nexted part. Once it is in progress, it is not possible to abort or cancel the _Hold_ and it must be run to completion. If the second part has an autonext and that gets reached before the _Hold_ is completed, the _Hold_ will be treated as completed and the autonext will execute as normal. + +When the part to be held is playing, with the correct part as next, the flow for the users is: + +- Before + - Part A is playing + - Part B is nexted +- Activate _Hold_ (By hotkey or other user action) + - Part A is playing + - Part B is nexted +- Perform a take into the _Hold_ + - Part B is playing + - Portions of Part A remain playing +- Perform a take to complete the _Hold_ + - Part B is playing + +Before the take into the _Hold_, it can be cancelled in the same way it was activated. + +## Supporting Hold in blueprints + +:::note +The functionality here is a bit limited, as it was originally written for one particular use-case and has not been expanded to support more complex scenarios. +Some unanswered questions we have are: + +- Should _Hold_ be rewritten to be done with adlib-actions instead to allow for more complex scenarios? +- Should there be a way to more intelligently check if _Hold_ can be done between two Parts? (perhaps a new blueprint method?) +::: + +The blueprints have to label parts as supporting _Hold_. +You can do this with the [`holdMode`](https://nrkno.github.io/sofie-core/typedoc/interfaces/_sofie_automation_blueprints_integration.IBlueprintPart.html#holdMode) property, and labelling it possible to _Hold_ from or to the part. + +Note: If the user manipulates what part is set as next, they will be able to do a _Hold_ between parts that are not sequential in the Rundown. + +You also have to label Pieces as something to extend into the _Hold_. Not every piece will be wanted, so it is opt-in. +You can do this with the [`extendOnHold`](https://nrkno.github.io/sofie-core/typedoc/interfaces/_sofie_automation_blueprints_integration.IBlueprintPiece.html#extendOnHold) property. The pieces will get extended in the same way as infinite pieces, but limited to only be extended into the one part. The usual piece collision and priority logic applies. + +Finally, you may find that there are some timeline objects that you don't want to use inside of the extended pieces, or there are some objects in the part that you don't want active while the _Hold_ is. +You can mark an object with the [`holdMode`](https://nrkno.github.io/sofie-core/typedoc/interfaces/_sofie_automation_blueprints_integration.TimelineObjectCoreExt.html#holdMode) property to specify its presence during a _Hold_. +The `HoldMode.ONLY` mode tells the object to only be used when in a _Hold_, which allows for doing some overrides in more complex scenarios. diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/intro.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/intro.md new file mode 100644 index 0000000000..ba84522629 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/intro.md @@ -0,0 +1,20 @@ +--- +sidebar_position: 1 +--- + +# Introduction + +:::caution +Documentation for this page is yet to be written. +::: + +[Blueprints](../../user-guide/concepts-and-architecture.md#blueprints) are programs that run inside Sofie Core and interpret +data coming in from the Rundowns and transform that into playable elements. They use an API published in [@sofie-automation/blueprints-integration](https://nrkno.github.io/sofie-core/typedoc/modules/_sofie_automation_blueprints_integration.html) library to expose their functionality and communicate with Sofie Core. + +Technically, a Blueprint is a JavaScript object, implementing one of the `BlueprintManifestBase` interfaces. + +Currently, there are three types of Blueprints: + +- [Show Style Blueprints](https://nrkno.github.io/sofie-core/typedoc/interfaces/_sofie_automation_blueprints_integration.ShowStyleBlueprintManifest.html) - handling converting NRCS Rundown data into Sofie Rundowns and content. +- [Studio Blueprints](https://nrkno.github.io/sofie-core/typedoc/interfaces/_sofie_automation_blueprints_integration.StudioBlueprintManifest.html) - handling selecting ShowStyles for a given NRCS Rundown and assigning NRCS Rundowns to Sofie Playlists +- [System Blueprints](https://nrkno.github.io/sofie-core/typedoc/interfaces/_sofie_automation_blueprints_integration.SystemBlueprintManifest.html) - handling system provisioning and global configuration diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/lookahead.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/lookahead.md new file mode 100644 index 0000000000..7c2d644969 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/lookahead.md @@ -0,0 +1,96 @@ +# Lookahead + +Lookahead allows Sofie to look into future Parts and Pieces, in order to preload or preview what is coming up. The aim is to fill in the gaps between your TimelineObjects with lookahead versions of these objects. +In this way, it can be used to provide functionality such as an AUX on your vision mixer showing the next cut, or to load the next clip into the media player. + +## Defining + +Lookahead can be enabled by configuring a few properties on a mapping: + +```ts +/** What method core should use to create lookahead objects for this layer */ +lookahead: LookaheadMode +/** The minimum number lookahead objects to create from future parts for this layer. Default = 1 */ +lookaheadDepth?: number +/** Maximum distance to search for lookahead. Default = undefined */ +lookaheadMaxSearchDistance?: number +``` + +With `LookaheadMode` defined as: + +```ts +export enum LookaheadMode { + /** + * Disable lookahead for this layer + */ + NONE = 0, + /** + * Preload content with a secondary layer. + * This requires support from the TSR device, to allow for preloading on a resource at the same time as it being on air. + * For example, this allows for your TimelineObjects to control the foreground of a CasparCG layer, with lookahead controlling the background of the same layer. + */ + PRELOAD = 1, + /** + * Fill the gaps between the planned objects on a layer. + * This is the primary lookahead mode, and appears to TSR devices as a single layer of simple objects. + */ + WHEN_CLEAR = 3, +} +``` + +If undefined, `lookaheadMaxSearchDistance` currently has a default distance of 10 parts. This number was chosen arbitrarily, and could change in the future. Be careful when choosing a distance to not set it too high. All the Pieces from the parts being searched have to be loaded from the database, which can come at a noticable cost. + +If you are doing [AB Playback](./ab-playback.md), or performing some other processing of the timeline in `onTimelineGenerate`, you may benefit from increasing the value of `lookaheadDepth`. In the case of AB Playback, you will likely want to set it to the number of players available in your pool. + +Typically, TimelineObjects do not need anything special to support lookahead, other than a sensible `priority` value. Lookahead objects are given a priority between `0` and `0.1`. Generally, your baseline objects should have a priority of `0` so that they are overridden by lookahead, and any objects from your Parts and Pieces should have a priority of `1` or higher, so that they override lookahead objects. + +If there are any keyframes on TimelineObjects that should be preserved when being converted to a lookahead object, they will need the `preserveForLookahead` property set. + +## How it works + +Lookahead is calculated while the timeline is being built, and searches based on the playhead, rather than looking at the planned Parts. + +The searching operates per-layer first looking at the current PartInstance, then the next PartInstance and then any Parts after the next PartInstance in the rundown. Any Parts marked as `invalid` or `floated` are ignored. This is what allows lookahead to be dynamic based on what the User is doing and intending to play. + +It is searching Parts in that order, until it has either searched through the `lookaheadMaxSearchDistance` number of Parts, or has found at least `lookaheadDepth` future timeline objects. + +Any pieces marked as `pieceType: IBlueprintPieceType.InTransition` will be considered only if playout intends to use the transition. +If an object is found in both a normal piece with `{ start: 0 }` and in an InTransition piece, then the objects from the normal piece will be ignored. + +These objects are then processed and added to the timeline. This is done in one of two ways: + +1. As timed objects. + If the object selected for lookahead is already on the timeline (it is in the current part, or the next part and autonext is enabled), then timed lookahead objects are generated. These objects are to fill in the gaps, and get their `enable` object to reference the objects on the timeline that they are filling between. + The `lookaheadDepth` setting of the mapping is ignored for these objects. + +2. As future objects. + If the object selected for lookahead is not on the timeline, then simpler objects are generated. Instead, these get an enable of either `{ while: '1' }`, or set to start after the last timed object on that layer. This lets them fill all the time after any other known objects. + The `lookaheadDepth` setting of the mapping is respected for these objects, with this number defining the **minimum** number future objects that will be produced. These future objects are inserted with a decreasing `priority`, starting from 0.1 decreasing down to but never reaching 0. + When using the `WHEN_CLEAR` lookahead mode, all but the first will be set as `disabled`, to ensure they aren't considered for being played out. These `disabled` objects can be used by `onTimelineGenerate`, or they will be dropped from the timeline if left `disabled`. + When there are multiple future objects on a layer, only the first is useful for playout directly, but the others are often utilised for [AB Playback](./ab-playback.md) + +Some additional changes done when processing each lookahead timeline object: + +- The `id` is processed to be unique +- The `isLookahead` property is set as true +- If the object has any keyframes, any not marked with `preserveForLookahead` are removed +- The object is removed from any group it was contained within +- If the lookahead mode used is `PRELOAD`, then the layer property is changed, with the `lookaheadForLayer` property set to indicate the layer it is for. + +The resulting objects are appended to the timeline and included in the call to `onTimelineGenerate` and the [AB Playback](./ab-playback.md) resolving. + +## Advanced Scenarios + +Because the lookahead objects are included in the timeline to `onTimelineGenerate`, this gives you the ability to make changes to the lookahead output. + +[AB Playback](./ab-playback.md) started out as being implemented inside of `onTimelineGenerate` and relies on lookahead objects being produced before reassigning them to other mappings. + +If any objects found by lookahead have a class `_lookahead_start_delay`, they will be given a short delay in their start time. This is a hack introduced to workaround a timing issue. At some point this will be removed once a proper solution is found. + +Sometimes it can be useful to have keyframes which are only applied when in lookahead. That can be achieved by setting `preserveForLookahead`, making the keyframe be disabled, and then re-enabling it inside `onTimelineGenerate` at the correct time. + +It is possible to implement a 'next' AUX on your vision mixer by: + +- Setup this mapping with `lookaheadDepth: 1` and `lookahead: LookaheadMode.WHEN_CLEAR` +- Each Part creates a TimelineObject on this mapping. Crucially, these have a priority of 0. +- Lookahead will run and will insert its objects overriding your predefined ones (because of its higher priority). Resulting in the AUX always showing the lookahead object. diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/part-and-piece-timings.mdx b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/part-and-piece-timings.mdx new file mode 100644 index 0000000000..2b21205a3c --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/part-and-piece-timings.mdx @@ -0,0 +1,141 @@ +import { PartTimingsDemo } from './_part-timings-demo' + +# Part and Piece Timings + +Parts and pieces are the core groups that form the timeline, and define start and end caps for the other timeline objects. + +When referring to the timeline in this page, we mean the built timeline objects that is sent to playout-gateway. +It is made of the previous PartInstance, the current PartInstance and sometimes the next PartInstance. + +### The properties + +These are stripped down interfaces, containing only the properties that are relevant for the timeline generation: + +```ts +export interface IBlueprintPart { + /** Should this item should progress to the next automatically */ + autoNext?: boolean + /** How much to overlap on when doing autonext */ + autoNextOverlap?: number + + /** Timings for the inTransition, when supported and allowed */ + inTransition?: IBlueprintPartInTransition + + /** Should we block the inTransition when starting the next Part */ + disableNextInTransition?: boolean + + /** Timings for the outTransition, when supported and allowed */ + outTransition?: IBlueprintPartOutTransition + + /** Expected duration of the line, in milliseconds */ + expectedDuration?: number +} + +/** Timings for the inTransition, when supported and allowed */ +export interface IBlueprintPartInTransition { + /** Duration this transition block a take for. After this time, another take is allowed which may cut this transition off early */ + blockTakeDuration: number + /** Duration the previous part be kept playing once the transition is started. Typically the duration of it remaining in-vision */ + previousPartKeepaliveDuration: number + /** Duration the pieces of the part should be delayed for once the transition starts. Typically the duration until the new part is in-vision */ + partContentDelayDuration: number +} + +/** Timings for the outTransition, when supported and allowed */ +export interface IBlueprintPartOutTransition { + /** How long to keep this part alive after taken out */ + duration: number +} + +export interface IBlueprintPiece { + /** Timeline enabler. When the piece should be active on the timeline. */ + enable: { + start: number | 'now' // 'now' is only valid from adlib-actions when inserting into the current part + duration?: number + } + + /** Whether this piece is a special piece */ + pieceType: IBlueprintPieceType + + /// from IBlueprintPieceGeneric: + + /** Whether and how the piece is infinite */ + lifespan: PieceLifespan + + /** + * How long this piece needs to prepare its content before it will have an effect on the output. + * This allows for flows such as starting a clip playing, then cutting to it after some ms once the player is outputting frames. + */ + prerollDuration?: number +} + +/** Special types of pieces. Some are not always used in all circumstances */ +export enum IBlueprintPieceType { + Normal = 'normal', + InTransition = 'in-transition', + OutTransition = 'out-transition', +} +``` + +### Concepts + +#### Piece Preroll + +Often, a Piece will need some time to do some preparation steps on a device before it should be considered as active. A common example is playing a video, as it often takes the player a couple of frames before the first frame is output to SDI. +This can be done with the `prerollDuration` property on the Piece. A general rule to follow is that it should not have any visible or audible effect on the output until `prerollDuration` has elapsed into the piece. + +When the timeline is built, the Pieces get their start times adjusted to allow for every Piece in the part to have its preroll time. If you look at the auto-generated pieceGroup timeline objects, their times will rarely match the times specified by the blueprints. Additionally, the previous Part will overlap into the Part long enough for the preroll to complete. + +Try the interactive to see how the prerollDuration properties interact. + +#### In Transition + +The in transition is a special Piece that can be played when taking into a Part. It is represented as a Piece, partly to show the user the transition type and duration, and partly to allow for timeline changes to be applied when the timeline generation thinks appropriate. + +When the `inTransition` is set on a Part, it will be applied when taking into that Part. During this time, any Pieces with `pieceType: IBlueprintPieceType.InTransition` will be added to the timeline, and the `IBlueprintPieceType.Normal` Pieces in the Part will be delayed based on the numbers from `inTransition` + +Try the interactive to see how the an inTransition affects the Piece and Part layout. + +#### Out Transition + +The out transition is a special Piece that gets played when taking out of the Part. It is intended to allow for some 'visual cleanup' before the take occurs. + +In effect, when `outTransition` is set on a Part, the take out of the Part will be delayed by the duration defined. During this time, any pieces with `pieceType: IBlueprintPieceType.OutTransition` will be added to the timeline and will run until the end of the Part. + +Try the interactive to see how this affects the Parts. + +### Piece postroll + +Sometimes rather than extending all the pieces and playing an out transition piece on top we want all pieces to stop except for 1, this has the same goal of 'visual cleanup' as the out transition but works slightly different. The main concept is that an out transition delays the take slightly but with postroll the take executes normally however the pieces with postroll will keep playing for a bit after the take. + +When the `postrollDuration` is set on a piece the part group will be extended slightly allowing pieces to play a little longer, however any piece that do not have postroll will end at their regular time. + +#### Autonext + +Autonext is a way for a Part to be made a fixed length. After playing for its `expectedDuration`, core will automatically perform a take into the next part. This is commonly used for fullscreen videos, to exit back to a camera before the video freezes on the last frame. It is enabled by setting the `autoNext: true` on a Part, and requires `expectedDuration` to be set to a duration higher than `1000`. + +In other situations, it can be desirable for a Part to overlap the next one for a few seconds. This is common for Parts such as a title sequence or bumpers, where the sequence ends with an keyer effect which should reveal the next Part. +To achieve this you can set `autoNextOverlap: 1000 // ms` to make the parts overlap on the timeline. In doing so, the in transition for the next Part will be ignored. + +The `autoNextOverlap` property can be thought of an override for the intransition on the next part defined as: + +```ts +const inTransition = { + blockTakeDuration: 1000, + partContentDelayDuration: 0, + previousPartKeepaliveDuration: 1000, +} +``` + +#### Infinites + +Pieces with an infinite lifespan (ie, not `lifespan: PieceLifespan.WithinPart`) get handled differently to other pieces. + +Only one pieceGoup is created for an infinite Piece which is present in multiple of the current, next and previous Parts. +The Piece calculates and tracks its own started playback times, which is preserved and reused in future takes. On the timeline it lives outside of the partGroups, but still gets the same caps applied when appropriate. + +### Interactive timings demo + +Use the sliders below to see how various Preroll and In & Out Transition timing properties interact with each other. + + diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/sync-ingest-changes.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/sync-ingest-changes.md new file mode 100644 index 0000000000..0d34a7c935 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/sync-ingest-changes.md @@ -0,0 +1,23 @@ +--- +title: Sync Ingest Changes +--- + +Since PartInstances and PieceInstances were added to Sofie, the default behaviour in Sofie is to not propogate any ingest changes from a Part onto its PartInstances. + +This is a safety net as without a detailed understanding of the Part and the change, we can't know whether it is safe to make on air. Without this, it would be possible for the user to change a clip name in the NRCS, and for Sofie to happily propogate that could result in a sudden change of clip mid sentence, or black if the clip needed to be copied to the playout server. This gets even more complicated when we consider that an adlib-action could have already modified a PartInstance, with changes that should likely not be overwritten with the newly ingested Part. + +Instead, this propogation can be implemented by a ShowStyle blueprint in the `syncIngestUpdateToPartInstance` method, in this way the implementation can be tailored to understand the change and its potential impact. This method is able to update the previous, current and next PartInstances. Any PartInstances older than the previous is no longer being used on the timeline so is now simply a record of how it was played and updating it would have no benefit. Sofie never has any further than the next PartInstance generated, so for any Part after that the Part is all that exists for it, so any changes will be used when it becomes the next. + +In this blueprint method, you are able to update almost any of the properties that are available to you both during ingest, and during adlib actions. It is possible the leave the Part in a broken state after this, so care must be taken to ensure it is not. If the call to your method throws an uncaught error, the changes you have made so far will be discarded but the rest of the ingest operation will continue as normal. + +### Tips + +- You should make use of the `metaData` fields on each Part and Piece to help work out what has changed. At NRK, we store the parsed ingest data (after converting the MOS to an intermediary json format) for the Part here, so that we can do a detailed diff to figure out whether a change is safe to accept. + +- You should track in `metaData` whether a part has been modified by an adlib-action in a way that makes this sync unsafe. + +- At NRK, we differentiate the Pieces into `primary`, `secondary`, `adlib`. This allows us to control the updates more granularly. + +- `newData.part` will be `undefined` when the PartInstance is orphaned. Generally, it's useful to differentiate the behavior of the implementation of this function based on `existingPartInstance.partInstance.orphaned` state + +- `playStatus: previous` means that the currentPartInstance is `orphaned: adlib-part` and thus possibly depends on an already past PartInstance for some of it's properties. Therefore the blueprint is allowed to modify the most recently played non-adlibbed PartInstance using ingested data. diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/timeline-datastore.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/timeline-datastore.md new file mode 100644 index 0000000000..e739ee0add --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/for-blueprint-developers/timeline-datastore.md @@ -0,0 +1,85 @@ +# Timeline Datastore + +The timeline datastore is a key-value store that can be used in conjuction with the timeline. The benefit of modifying values in the datastore is that the timings in the timeline are not modified so we can skip a lot of complicated calculations which reduces the system response time. An example usecase of the datastore feature is a fastpath for cutting cameras. + +## API + +In order to use the timeline datastore feature 2 API's are to be used. The timeline object has to contain a reference to a key in the datastore and the blueprints have to add a value for that key to the datastore. These references are added on the content field. + +### Timeline API + +```ts +/** + * An object containing references to the datastore + */ +export interface TimelineDatastoreReferences { + /** + * localPath is the path to the property in the content object to override + */ + [localPath: string]: { + /** Reference to the Datastore key where to fetch the value */ + datastoreKey: string + /** + * If true, the referenced value in the Datastore is only applied after the timeline-object has started (ie a later-started timeline-object will not be affected) + */ + overwrite: boolean + } +} +``` + +### Timeline API example + +```ts +const tlObj = { + id: 'obj0', + enable: { start: 1000 }, + layer: 'layer0', + content: { + deviceType: DeviceType.Atem, + type: TimelineObjectAtem.MixEffect, + + $references: { + 'me.input': { + datastoreKey: 'camInput', + overwrite: true, + }, + }, + + me: { + input: 1, + transition: TransitionType.Cut, + }, + }, +} +``` + +### Blueprints API + +Values can be added and removed from the datastore through the adlib actions API. + +```ts +interface DatastoreActionExecutionContext { + setTimelineDatastoreValue(key: string, value: unknown, mode: DatastorePersistenceMode): Promise + removeTimelineDatastoreValue(key: string): Promise +} + +enum DatastorePersistenceMode { + Temporary = 'temporary', + indefinite = 'indefinite', +} +``` + +The data persistence mode work as follows: + +- Temporary: this key-value pair may be cleaned up if it is no longer referenced to from the timeline, in practice this will currently only happen during deactivation of a rundown +- This key-value pair may _not_ be automatically removed (it can still be removed by the blueprints) + +The above context methods may be used from the usual adlib actions context but there is also a special path where none of the usual cached data is available, as loading the caches may take some time. The `executeDataStoreAction` method is executed just before the `executeAction` method. + +## Example use case: camera cutting fast path + +Assuming a set of blueprints where we can cut camera's a on a vision mixer's mix effect by using adlib pieces, we want to add a fast path where the camera input is changed through the datastore first and then afterwards we add the piece for correctness. + +1. If you haven't yet, convert the current camera adlibs to adlib actions by exporting the `IBlueprintActionManifest` as part of your `getRundown` implementation and implementing an adlib action in your `executeAction` handler that adds your camera piece. +2. Modify any camera pieces (including the one from your adlib action) to contain a reference to the datastore (See the timeline API example) +3. Implement an `executeDataStoreAction` handler as part of your blueprints, when this handler receives the action for your camera adlib it should call the `setTimelineDatastoreValue` method with the key you used in the timeline object (In the example it's `camInput`), the new input for the vision mixer and the `DatastorePersistenceMode.Temporary` persistence mode. diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/intro.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/intro.md new file mode 100644 index 0000000000..6b5caa33ca --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/intro.md @@ -0,0 +1,15 @@ +--- +sidebar_label: Introduction +sidebar_position: 1 +--- + +# For Developers + +The pages below are intended for developers of any of the Sofie-related repos and/or blueprints. + +A read-through of the [Concepts & Architectures](../user-guide/concepts-and-architecture.md) is recommended, before diving too deep into development. + +- [Libraries](libraries.md) +- [Contribution Guidelines](contribution-guidelines.md) +- [For Blueprint Developers](for-blueprint-developers/intro.md) +- [API Documentation](api-documentation.md) diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/json-config-schema.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/json-config-schema.md new file mode 100644 index 0000000000..1d6df1db25 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/json-config-schema.md @@ -0,0 +1,209 @@ +--- +sidebar_label: JSON Config Schema +sidebar_position: 7 +--- + +# JSON Config Schema + +So that Sofie does not have to be aware of every type of gateway that may connect to it, each gateway provides a manifest describing itself and the configuration fields that it has. + +Since version 1.50, this is done using [JSON Schemas](https://json-schema.org/). This allows schemas to be written, with typescript interfaces generated from the schema, and for the same schema to be used to render a flexible UI. +We recommend using [json-schema-to-typescript](https://github.com/bcherny/json-schema-to-typescript) to generate typescript interfaces. + +Only a subset of the JSON Schema specification is supported, and some additional properties are used for the UI. + +We expect this subset to grow over time as more sections are found to be useful to us, but we may proceed cautiously to avoid constantly breaking other applications that use TSR and these schemas. + +## Non-standard properties + +We use some non-standard properties to help the UI render with friendly names. + +### `ui:category` + +Note: Only valid for blueprint configuration. + +Category of the property + +### `ui:title` + +Title of the property + +### `ui:description` + +Description/hint for the property + +### `ui:summaryTitle` + +If set, when in a table this property will be used as part of the summary with this label + +### `ui:zeroBased` + +If an integer property, whether to treat it as zero-based + +### `ui:displayType` + +Override the presentation with a special mode. +Currently only valid for string properties. Valid values are 'json'. + +### `tsEnumNames` + +This is primarily for `json-schema-to-typescript`. + +Names of the enum values as generated for the typescript enum, which we display in the UI instead of the raw values + +### `ui:sofie-enum` & `ui:sofie-enum:filter` + +Note: Only valid for blueprint configuration. + +Sometimes it can be useful to reference other values. This property can be used on string fields, to let sofie generate a dropdown populated with values valid in the current context. + +#### `mappings` + +Valid for both show-style and studio blueprint configuration + +This will provide a dropdown of all mappings in the studio, or studios where the show-style can be used. + +Setting `ui:sofie-enum:filter` to an array of strings will filter the dropdown by the specified DeviceType. + +#### `source-layers` + +Valid for only show-style blueprint configuration. + +This will provide a dropdown of all source-layers in the show-style. + +Setting `ui:sofie-enum:filter` to an array of numbers will filter the dropdown by the specified SourceLayerType. + +## Supported types + +Any JSON Schema property or type is allowed, but will be ignored if it is not supported. + +In general, if a `default` is provided, we will use that as a placeholder in the input field. + +### `object` + +This should be used as the root of your schema, and can be used anywhere inside it. The properties inside any object will be shown if they are supported. + +You may want to set the `title` property to generate a typescript interface for it. + +See the examples to see how to create a table for an object. + +`ui:displayType` can be set to `json` to allow for manual editing of an arbitrary json object. + +### `integer` + +`enum` can be set with an array of values to turn it into a dropdown. + +### `number` + +### `boolean` + +### `string` + +`enum` can be set with an array of values to turn it into a dropdown. + +`ui:sofie-enum` can be used to make a special dropdown. + +### `array` + +The behaviour of this depends on the type of the `items`. + +#### `string` + +`enum` can be set with an array of values to turn it into a dropdown + +`ui:sofie-enum` can be used to make a special dropdown. + +Otherwise is treated as a multi-line string, stored as an array of strings. + +#### `object` + +This is not available in all places we use this schema. For example, Mappings are unable to use this, but device configuration is. Additionally, using it inside of another object-array is not allowed. + +## Examples + +Below is an example of a simple schema for a gateway configuration. The subdevices are handled separetely, with their own schema. + +```json +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://example.com/product.schema.json", + "title": "Mos Gateway Config", + "type": "object", + "properties": { + "mosId": { + "type": "string", + "ui:title": "MOS ID of Mos-Gateway (Sofie MOS ID)", + "ui:description": "MOS ID of the Sofie MOS device (ie our ID). Example: sofie.mos", + "default": "" + }, + "debugLogging": { + "type": "boolean", + "ui:title": "Activate Debug Logging", + "default": false + } + }, + "required": ["mosId"], + "additionalProperties": false +} +``` + +### Defining a table as an object + +In the generated typescript interface, this will produce a property `"TestTable": { [id: string]: TestConfig }`. + +The key part here, is that it is an object with no `properties` defined, and a single `patternProperties` value performing a catchall. + +An `object` table is better than an `array` in blueprint-configuration, as it allows the UI to override individual values, instead of the table as a whole. + +```json +"TestTable": { + "type": "object", + "ui:category": "Test", + "ui:title": "Test table", + "ui:description": "", + "patternProperties": { + "": { + "type": "object", + "title": "TestConfig", + "properties": { + "number": { + "type": "integer", + "ui:title": "Number", + "ui:description": "Camera number", + "ui:summaryTitle": "Number", + "default": 1, + "min": 0 + }, + "port": { + "type": "integer", + "ui:title": "Port", + "ui:description": "ATEM Port", + "default": 1, + "min": 0 + } + }, + "required": ["number", "port"], + "additionalProperties": false + } + }, + "additionalProperties": false +}, + +``` + +### Select multiple ATEM device mappings + +```json +"mappingId": { + "type": "array", + "ui:title": "Mapping", + "ui:description": "", + "ui:summaryTitle": "Mapping", + "items": { + "type": "string", + "ui:sofie-enum": "mappings", + "ui:sofie-enum:filter": ["ATEM"], + }, + "uniqueItems": true +}, +``` diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/libraries.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/libraries.md new file mode 100644 index 0000000000..2352cc0ed8 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/libraries.md @@ -0,0 +1,62 @@ +--- +description: List of all repositories related to Sofie +sidebar_position: 5 +--- + +# Applications & Libraries + +## Main Application + +[**Sofie Core**](https://github.com/nrkno/sofie-core) is the main application that serves the web GUI and handles the core logic. + +## Gateways and Services + +Together with the _Sofie Core_ there are several _gateways_ which are separate applications, but which connect to _Sofie Core_ and are managed from within the Core's web UI. + +* [**Playout Gateway**](https://github.com/nrkno/sofie-core/tree/master/packages/playout-gateway) Handles the playout from _Sofie_. Connects to and controls a multitude of devices, such as vision mixers, graphics, light controllers, audio mixers etc.. +* [**MOS Gateway**](https://github.com/nrkno/sofie-core/tree/master/packages/mos-gateway) Connects _Sofie_ to a newsroom system \(NRCS\) and ingests rundowns via the [MOS protocol](http://mosprotocol.com/). +* [**Live Status Gateway**](https://github.com/nrkno/sofie-core/tree/master/packages/live-status-gateway) Allows external systems to subscribe to state changes in Sofie. +* [**iNEWS Gateway**](https://github.com/tv2/inews-ftp-gateway) Connects _Sofie_ to an Avid iNEWS newsroom system. +* [**Spreadsheet Gateway**](https://github.com/SuperFlyTV/spreadsheet-gateway) Connects _Sofie_ to a _Google Drive_ folder and ingests rundowns from _Google Sheets_. +* [**Input Gateway**](https://github.com/nrkno/sofie-input-gateway) Connects _Sofie_ to various input devices, allowing triggering _User-Actions_ using these devices. +* [**Package Manager**](https://github.com/nrkno/sofie-package-manager) Handles media asset transfer and media file management for pulling new files, deleting expired files on playout devices and generating additional metadata (previews, thumbnails, automated QA checks) in a more performant, and possibly distributed, way. Can smartly figure out how to get a file on storage A to playout server B. + + +## Libraries + +There are a number of libraries used in the Sofie ecosystem: + +* [**ATEM Connection**](https://github.com/nrkno/sofie-atem-connection) Library for communicating with Blackmagic Design's ATEM mixers +* [**ATEM State**](https://github.com/nrkno/sofie-atem-state) Used in TSR to tracks the state of ATEMs and generate commands to control them. +* [**CasparCG Server Connection**](https://github.com/SuperFlyTV/casparcg-connection) developed by **[_SuperFly.tv_](https://github.com/SuperFlyTV)** Library to connect and interact with CasparCG Servers. +* [**CasparCG State**](https://github.com/superflytv/casparcg-state) developed by **[_SuperFly.tv_](https://github.com/SuperFlyTV)** Used in TSR to tracks the state of CasparCG Servers and generate commands to control them. +* [**Ember+ Connection**](https://github.com/nrkno/sofie-emberplus-connection) Library to communicate with _Ember+_ control protocol +* [**HyperDeck Connection**](https://github.com/nrkno/sofie-hyperdeck-connection) Library for connecting to Blackmagic Design's HyperDeck recorders. +* [**MOS Connection**](https://github.com/nrkno/sofie-mos-connection/) A [_MOS protocol_](http://mosprotocol.com/) library for acting as a MOS device and connecting to an newsroom control system. +* [**Quantel Gateway Client**](https://github.com/nrkno/sofie-quantel-gateway-client) An interface that talks to the Quantel-Gateway application. +* [**Sofie Core Integration**](https://github.com/nrkno/sofie-core-integration) Used to connect to the [Sofie Core](https://github.com/nrkno/sofie-core) by the Gateways. +* [**Sofie Blueprints Integration**](https://github.com/nrkno/sofie-sofie-blueprints-integration) Common types and interfaces used by both Sofie Core and the user-defined blueprints. +* [**SuperFly-Timeline**](https://github.com/SuperFlyTV/supertimeline) developed by **[_SuperFly.tv_](https://github.com/SuperFlyTV)** Resolver and rules for placing objects on a virtual timeline. +* [**ThreadedClass**](https://github.com/nytamin/threadedClass) developed by **[_Nytamin_](https://github.com/nytamin)** Used in TSR to spawn device controllers in separate processes. +* [**Timeline State Resolver**](https://github.com/nrkno/sofie-timeline-state-resolver) \(TSR\) The main driver in **Playout Gateway,** handles connections to playout-devices and sends commands based on a **Timeline** received from **Core**. + + + +There are also a few typings-only libraries that define interfaces between applications: + +* [**Blueprints Integration**](https://www.npmjs.com/package/@sofie-automation/blueprints-integration) Defines the interface between [**Blueprints**](../user-guide/concepts-and-architecture.md#blueprints) and **Sofie Core**. +* [**Timeline State Resolver types**](https://www.npmjs.com/package/timeline-state-resolver-types) Defines the interface between [**Blueprints**](../user-guide/concepts-and-architecture.md#blueprints) and the timeline that will be fed into **TSR** for playout. + +## Other Sofie-related Repositories + +* [**CasparCG Server** \(NRK fork\)](https://github.com/nrkno/sofie-casparcg-server) Sofie-specific fork of CasparCG Server. +* [**CasparCG Launcher**](https://github.com/nrkno/sofie-casparcg-launcher) Launcher, controller, and logger for CasparCG Server. +* [**CasparCG Media Scanner** \(NRK fork\)](https://github.com/nrkno/sofie-casparcg-server) Sofie-specific fork of CasparCG Server 2.2 Media Scanner. +* [**Sofie Chef**](https://github.com/nrkno/sofie-chef) A simple Chromium based renderer, used for kiosk mode rendering of web pages. +* [**Media Manager**](https://github.com/nrkno/sofie-media-management) *(deprecated)* Handles media transfer and media file management for pulling new files and deleting expired files on playout devices. +* [**Quantel Browser Plugin**](https://github.com/nrkno/sofie-quantel-browser-plugin) MOS-compatible Quantel video clip browser for use with Sofie. +* [**Sisyfos Audio Controller**](https://github.com/nrkno/sofie-sisyfos-audio-controller) *developed by [_olzzon_](https://github.com/olzzon/)* +* [**Quantel Gateway**](https://github.com/nrkno/sofie-quantel-gateway) CORBA to REST gateway for _Quantel/ISA_ playback. + + + diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/publications.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/publications.md new file mode 100644 index 0000000000..ed3b377532 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/publications.md @@ -0,0 +1,43 @@ +--- +title: Publications +sidebar_position: 12 +--- + +To ensure that the UI of Sofie is reactive, we are leveraging publications over the DDP connection that Meteor provides. +In its most basic form, this allows for streaming MongoDB document updates as they happen to the UI, and there is also a structure in place for 'Custom Publications' which appear like a MongoDB collection to the client, but are generated in-memory collections of data allowing us to do some processing of data before publishing it to the client. + +It is possible to subscribe to these publications outside of Meteor, but we have not found any maintained ddp clients, except for the one we are using in `server-core-integration`. The protocol is simple and stable and has documentation on the [Meteor GitHub](https://github.com/meteor/meteor/blob/devel/packages/ddp/DDP.md), and should be easy to implement in another language if desired. + +All of the publication implementations reside in [`meteor/server/publications` folder](https://github.com/nrkno/sofie-core/tree/master/meteor/server/publications), and are typically pretty well isolated from the rest of the code we have in Meteor. + +We prefer using publications in Sofie over polling because: + +- there are not enough DDP clients to a single Sofie installation for the number of connected clients to be problematic +- polling can be costly for many of these publications without some form of caching or tracking changes (which starts to get to a similar level of complexity) +- we can be more confident that all the clients have the same data as the database is our point of truth +- the system can be more reactive as changes are pushed to interested parties with minimal intervention + +## MongoDB Publications + +A majority of data is sent to the client utilising Meteor's ability to publish a MongoDB cursor. This allows us to run a MongoDB query on the backend, and let it handle the publishing of individual changes. + +In some (typically older) publications, we let the client specify the MongoDB query to use for the subscription, where we perform some basic validation and authentication before executing the query. + +In typically newer publications, we are formalising the publications a bit better by requiring some simpler parameters to the publication, with the query then generated on the backend. This will help us ensure that the queries are made with suitable indices, and to ensure that subscriptions are deduplicated where possible. + +## Custom Publications + +There has been a recent push towards using more 'custom' publications for streaming data to the UI. While we are unsure if this will be beneficial for every publication, it is really beneficial for others as it allows us to do some pre-computation of data before sending it to the client. + +To achieve this, we have an `optimisedObserver` flow which is designed to help maange to a custom publication, with a few methods to fill in to setup the reactivity and the data transformation. + +One such publication is the `PieceContentStatus`, prior to version 1.50, this was computed inside the UI. +A brief overview of this publication, is that it looks at each Piece in a Rundown, and reports whether the Piece is 'OK'. This check is primarily focussed on Pieces containing clips, where it will check the metadata generated by either package manager or media manager to ensure that the clip is marked as being ready for playout, and that it has the correct format and some other quality checks. + +To do this on the client meant needing to subscribe to the whole contents of a couple of MongoDB collections, as it is not easy to determine which documents will be needed until the check is being run. This caused some issues as these collections could get rather large. We also did not always have every Piece loaded in the UI, so had to defer some of the computation to the backend via polling. + +This makes it more suitable for a custom publication, where we can more easily and cheaply do this computation without being concerned about causing UI lockups and with less concern about memory pressure. Performing very granular MongoDB queries is also cheaper. The result is that we build a graph of what other documents are used for the status of each Piece, so we can cheaply react to changes to any of those documents, while also watching for changes to the pieces. + +## Live Status Gateway + +The Live Status Gateway was introduced to Sofie in version 1.50. This gateway serves as a way for an external system to subscribe to publications which are designed to be simpler than the ones we publish over DDP. These publications are intended to be used by external systems which need a 'stable' API and to not have too much knowledge about the inner workings of Sofie. See [Api Stability](./api-stability.md) for more details. diff --git a/packages/documentation/versioned_docs/version-1.50.0/for-developers/worker-threads-and-locks.md b/packages/documentation/versioned_docs/version-1.50.0/for-developers/worker-threads-and-locks.md new file mode 100644 index 0000000000..f20a47a567 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/for-developers/worker-threads-and-locks.md @@ -0,0 +1,61 @@ +--- +title: Worker Threads & Locks +sidebar_position: 9 +--- + +Starting with v1.40.0 (Release 40), the core logic of Sofie is split across +multiple threads. This has been done to minimise performance bottlenecks such as ingest changes delaying takes. In its +current state, it should not impact deployment of Sofie. + +In the initial implementation, these threads are run through [threadedClass](https://github.com/nytamin/threadedclass) +inside of Meteor. As Meteor does not support the use of `worker_threads`, and to allow for future separation, the +`worker_threads` are treated and implemented as if they are outside of the Meteor ecosystem. The code is isolated from +Meteor inside of `packages/job-worker`, with some shared code placed in `packages/corelib`. + +Prior to v1.40.0, there was already a work-queue of sorts in Meteor. As such the functions were defined pretty well to +translate across to being on a true work queue. For now this work queue is still in-memory in the Meteor process, but we +intend to investigate relocating this in a future release. This will be necessary as part of a larger task of allowing +us to scale Meteor for better resiliency. Many parts of the worker system have been designed with this in mind, and so +have sufficient abstraction in place already. + +### The Worker + +The worker process is designed to run the work for one or more studios. The initial implementation will run for all +studios in the database, and is monitoring for studios to be added or removed. + +For each studio, the worker runs 3 threads: + +1. The Studio/Playout thread. This is where all the playout operations are executed, as well as other operations that + require 'ownership' of the Studio +2. The Ingest thread. This is where all the MOS/Ingest updates are handled and fed through the bluerpints. +3. The events thread. Some low-priority tasks are pushed to here. Such as notifying ENPS about _the yellow line_, or the + Blueprints methods used to generate External-Messages for As-Run Log. + +In future it is expected that there will be multiple ingest threads. How the work will be split across them is yet to be +determined + +### Locks + +At times, the playout and ingest threads both need to take ownership of `RundownPlaylists` and `Rundowns`. + +To facilitate this, there are a couple of lock types in Sofie. These are coordinated by the parent thread in the worker +process. + +#### PlaylistLock + +This lock gives ownership of a specific `RundownPlaylist`. It is required to be able to load a `CacheForPlayout`, and +must be held during other times where the `RundownPlaylist` is modified or is expected to not change. + +This lock must be held while writing any changes to either a `RundownPlaylist` or any `Rundown` that belong to the +`RundownPlaylist`. This ensures that any writes to MongoDB are atomic, and that Sofie doesn't start performing a +playout operation halfway through an ingest operation saving. + +#### RundownLock + +This lock gives ownership of a specific `Rundown`. It is required to be able to load a `CacheForIngest`, and must held +during other times where the `Rundown` is modified or is expected to not change. + +:::caution +It is not allowed to aquire a `RundownLock` while inside of a `PlaylistLock`. This is to avoid deadlocks, as it is very +common to aquire a `PlaylistLock` inside of a `RundownLock` +::: diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/concepts-and-architecture.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/concepts-and-architecture.md new file mode 100644 index 0000000000..bce331a9f7 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/concepts-and-architecture.md @@ -0,0 +1,192 @@ +--- +sidebar_position: 1 +--- + +# Concepts & Architecture + +## System Architecture + +![Example of a Sofie setup with a Playout Gateway and a Spreadsheet Gateway](/img/docs/main/features/playout-and-spreadsheet-example.png) + +### Sofie Core + +**Sofie Core** is a web server which handle business logic and serves the web GUI. +It is a [NodeJS](https://nodejs.org/) process backed up by a [MongoDB](https://www.mongodb.com/) database and based on the framework [Meteor](http://meteor.com/). + +### Gateways + +Gateways are applications that connect to Sofie Core and and exchanges data; such as rundown data from an NRCS or the [Timeline](#timeline) for playout. + +An examples of a gateways is the [Spreadsheet Gateway](https://github.com/SuperFlyTV/spreadsheet-gateway). +All gateways use the [Core Integration Library](https://github.com/nrkno/sofie-core/tree/master/packages/server-core-integration) to communicate with Core. + +## System, \(Organization\), Studio & Show Style + +To be able to facilitate various workflows and to Here's a short explanation about the differences between the "System", "Organization", "Studio" and "Show Style". + +- The **System** defines the whole of the Sofie Core +- The **Organization** \(only available if user accounts are enabled\) defines things that are common for an organization. An organization consists of: **Users, Studios** and **ShowStyles**. +- The **Studio** contains things that are related to the "hardware" or "rig". Technically, a Studio is defined as an entity that can have one \(or none\) rundown active at any given time. In most cases, this will be a representation of your gallery, with cameras, video playback and graphics systems, external inputs, sound mixers, lighting controls and so on. A single System can easily control multiple Studios. +- The **Show Style** contains settings for the "show", for example if there's a "Morning Show" and an "Afternoon Show" - produced in the same gallery - they might be two different Show Styles \(played in the same Studio\). Most importantly, the Show Style decides the "look and feel" of the Show towards the producer/director, dictating how data ingested from the NRCS will be interpreted and how the user will interact with the system during playback (see: [Show Style](../configuration/settings-view#show-style) in Settings). + * A **Show Style Variant** is a set of Show Style _Blueprint_ configuration values, that allows to use the same interaction model across multiple Shows with potentially different assets, changing the outward look of the Show: for example news programs with different hosts produced from the same Studio, but with different light setups, backscreen and overlay graphics. + +![Sofie Architecture Venn Diagram](/img/docs/main/features/sofie-venn-diagram.png) + +## Playlists, Rundowns, Segments, Parts, Pieces + +![Playlists, Rundowns, Segments, Parts, Pieces](/img/docs/main/features/playlist-rundown-segment-part-piece.png) + +### Playlist + +A Playlist \(or "Rundown Playlist"\) is the entity that "goes on air" and controls the playhead/Take Point. + +It contains one or several Rundowns inside, which are playout out in order. + +:::info +In some many studios, there is only ever one rundown in a playlist. In those cases, we sometimes lazily refer to playlists and rundowns as "being the same thing". +::: + +A Playlist is played out in the context of it's [Studio](#studio), thereby only a single Playlist can be active at a time within each Studio. + +A playlist is normally played through and then ends but it is also possible to make looping playlists in which case the playlist will start over from the top after the last part has been played. + +### Rundown + +The Rundown contains the content for a show. It contains Segments and Parts, which can be selected by the user to be played out. +A Rundown always has a [showstyle](#showstyle) and is played out in the context of the [Studio](#studio) of its Playlist. + +### Segment + +The Segment is the horizontal line in the GUI. It is intended to be used as a "chapter" or "subject" in a rundown, where each individual playable element in the Segment is called a [Part](#part). + +### Part + +The Part is the playable element inside of a [Segment](#segment). This is the thing that starts playing when the user does a [TAKE](#take-point). A Playing part is _On Air_ or _current_, while the part "cued" to be played is _Next_. +The Part in itself doesn't determine what's going to happen, that's handled by the [Pieces](#piece) in it. + +### Piece + +The Pieces inside of a Part determines what's going to happen, the could be indicating things like VT's, cut to cameras, graphics, or what script the host is going to read. + +Inside of the pieces are the [timeline-objects](#what-is-the-timeline) which controls the playout on a technical level. + +:::tip +Tip! If you want to manually play a certain piece \(for example a graphics overlay\), you can at any time double-click it in the GUI, and it will be copied and played at your play head, just like an [AdLib](#adlib-pieces) would! +::: + +See also: [Showstyle](#system-organization-studio--show-style) + +### AdLib Piece + +The AdLib pieces are Pieces that isn't programmed to fire at a specific time, but instead intended to be manually triggered by the user. + +The AdLib pieces can either come from the currently playing Part, or it could be _global AdLibs_ that are available throughout the show. + +An AdLib isn't added to the Part in the GUI until it starts playing, instead you find it in the [Shelf](features/sofie-views.mdx#shelf). + +## Buckets + +A Bucket is a container for AdLib Pieces created by the producer/operator during production. They exist independently of the Rundowns and associated content created by ingesting data from the NRCS. Users can freely create, modify and remove Buckets. + +The primary use-case of these elements is for breaking news formats where quick turnaround video editing may require circumvention of the regular flow of show assets and programming via the NRCS. Currently, one way of creating AdLibs inside Buckets is using a MOS Plugin integration inside the Shelf, where MOS [ncsItem](https://mosprotocol.com/wp-content/MOS-Protocol-Documents/MOS-Protocol-2.8.4-Current.htm#ncsItem) elements can be dragged from the MOS Plugin onto a bucket and ingested. + +The ingest happens via the `getAdlibItem` method: [https://github.com/nrkno/sofie-core/blob/master/packages/blueprints-integration/src/api.ts#L215](https://github.com/nrkno/sofie-core/blob/master/packages/blueprints-integration/src/api.ts#L215) + +## Views + +Being a web-based system, Sofie has a number of customisable, user-facing web [views](features/sofie-views.mdx) used for control and monitoring. + +## Blueprints + +Blueprints are plug-ins that run in Sofie Core. They interpret the data coming in from the rundowns and transform them into a rich set of playable elements \(Segments, Parts, AdLibs etc\). + +The blueprints are webpacked javascript bundles which are uploaded into Sofie via the GUI. They are custom-made and changes depending on the show style, type of input data \(NRCS\) and the types of controlled devices. A generic [blueprint that works with spreadsheets is available here](https://github.com/SuperFlyTV/sofie-demo-blueprints). + +When [Sofie Core](#sofie-core) calls upon a Blueprint, it returns a JavaScript object containing methods callable by Sofie Core. These methods will be called by Sofie Core in different situations, depending on the method. +Documentation on these interfaces are available in the [Blueprints integration](https://www.npmjs.com/package/@sofie-automation/blueprints-integration) library. + +There are 3 types of blueprints, and all 3 must be uploaded into Sofie before the system will work correctly. + +### System Blueprints + +Handle things on the _System level_. +Documentation on the interface to be exposed by the Blueprint: +[https://github.com/nrkno/sofie-core/blob/master/packages/blueprints-integration/src/api.ts#L75](https://github.com/nrkno/sofie-core/blob/master/packages/blueprints-integration/src/api.ts#L75) + +### Studio Blueprints + +Handle things on the _Studio level_, like "which showstyle to use for this rundown". +Documentation on the interface to be exposed by the Blueprint: +[https://github.com/nrkno/sofie-core/blob/master/packages/blueprints-integration/src/api.ts#L85](https://github.com/nrkno/sofie-core/blob/master/packages/blueprints-integration/src/api.ts#L85) + +### Showstyle Blueprints + +Handle things on the _Showstyle level_, like generating [_Baseline_](#baseline), _Segments_, _Parts, Pieces_ and _Timelines_ in a rundown. +Documentation on the interface to be exposed by the Blueprint: +[https://github.com/nrkno/sofie-core/blob/master/packages/blueprints-integration/src/api.ts#L117](https://github.com/nrkno/sofie-core/blob/master/packages/blueprints-integration/src/api.ts#L117) + +## `PartInstances` and `PieceInstances` + +In order to be able to facilitate ingesting changes from the NRCS while continuing to provide a stable and predictable playback of the Rundowns, Sofie internally uses a concept of ["instantiation"](https://en.wikipedia.org/wiki/Instance_(computer_science)) of key Rundown elements. Before playback of a Part can begin, the Part and it's Pieces are copied into an Instance of a Part: a `PartInstance`. This protects the contents of the _Next_ and _On Air_ part, preventing accidental changes that could surprise the producer/director. This also makes it possible to inspect the "as played" state of the Rundown, independently of the "as planned" state ingested from the NRCS. + +The blueprints can optionally allow some changes to the Parts and Pieces to be forwarded onto these `PartInstances`: [https://github.com/nrkno/sofie-core/blob/master/packages/blueprints-integration/src/api.ts#L190](https://github.com/nrkno/sofie-core/blob/master/packages/blueprints-integration/src/api.ts#L190) + +## Timeline + +### What is the timeline? + +The Timeline is a collection of timeline-objects, that together form a "target state", i.e. an intent on what is to be played and at what times. + +The timeline-objects can be programmed to contain relative references to each other, so programming things like _"play this thing right after this other thing"_ is as easy as `{start: { #otherThing.end }}` + +The [Playout Gateway](../for-developers/libraries.md) picks up the timeline from Sofie Core and \(using the [TSR timeline-state-resolver](https://github.com/nrkno/sofie-timeline-state-resolver)\) controls the playout devices to make sure that they actually play what is intended. + +![Example of 2 objects in a timeline: The #video object, destined to play at a certain time, and #gfx0, destined to start 15 seconds into the video.](/img/docs/main/features/timeline.png) + +### Why a timeline? + +The Sofie system is made to work with a modern web- and IT-based approach in mind. Therefore, the Sofie Core can be run either on-site, or in an off-site cloud. + +![Sofie Core can run in the cloud](/img/docs/main/features/sofie-web-architecture.png) + +One drawback of running in a cloud over the public internet is the - sometimes unpredictable - latency. The Timeline overcomes this by moving all the immediate control of the playout devices to the Playout Gateway, which is intended to run on a local network, close to the hardware it controls. +This also gives the system a simple way of load-balancing - since the number of web-clients or load on Sofie Core won't affect the playout. + +Another benefit of basing the playout on a timeline is that when programming the show \(the blueprints\), you only have to care about "what you want to be on screen", you don't have to care about cleaning up previously played things, or what was actually played out before. Those are things that are handled by the Playout Gateway automatically. This also allows the user to jump around in a rundown freely, without the risk of things going wrong on air. + +### How does it work? + +:::tip +Fun tip! The timeline in itself is a [separate library available on github](https://github.com/SuperFlyTV/supertimeline). + +You can play around with the timeline in the browser using [JSFiddle and the timeline-visualizer](https://jsfiddle.net/nytamin/rztp517u/)! +::: + +The Timeline is stored by Sofie Core in a MongoDB collection. It is generated whenever a user does a [Take](#take-point), changes the [Next-point](#next-point-and-lookahead) or anything else that might affect the playout. + +_Sofie Core_ generates the timeline using: + +- The [Studio Baseline](#baseline) \(only if no rundown is currently active\) +- The [Showstyle Baseline](#baseline), of the currently active rundown. +- The [currently playing Part](#take-point) +- The [Next'ed Part](#next-point-and-lookahead) and Parts that come after it \(the [Lookahead](#lookahead)\) +- Any [AdLibs](#adlib-pieces) the user has manually selected to play + +The [**Playout Gateway**](../for-developers/libraries.md#gateways) then picks up the new timeline, and pipes it into the [\(TSR\) timeline-state-resolver](https://github.com/nrkno/sofie-timeline-state-resolver) library. + +The TSR then... + +- Resolves the timeline, using the [timeline-library](https://github.com/SuperFlyTV/supertimeline) +- Calculates new target-states for each relevant point in time +- Maps the target-state to each playout device +- Compares the target-states for each device with the currently-tracked-state and.. +- Generates commands to send to each device to account for the change +- The commands are then put on queue and sent to the devices at the correct time + +:::info +For more information about what playout devices _TSR_ supports, and examples of the timeline-objects, see the [README of TSR](https://github.com/nrkno/sofie-timeline-state-resolver#timeline-state-resolver) +::: + +:::info +For more information about how to program timeline-objects, see the [README of the timeline-library](https://github.com/SuperFlyTV/supertimeline#superfly-timeline) +::: diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/configuration/_category_.json b/packages/documentation/versioned_docs/version-1.50.0/user-guide/configuration/_category_.json new file mode 100644 index 0000000000..d2aee9ef5b --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/configuration/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Configuration", + "position": 4 +} \ No newline at end of file diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/configuration/settings-view.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/configuration/settings-view.md new file mode 100644 index 0000000000..2a4a3e6a25 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/configuration/settings-view.md @@ -0,0 +1,173 @@ +--- +sidebar_position: 2 +--- +# Settings View + +:::caution +The settings views are only visible to users with the correct [access level](../features/access-levels.md)! +::: + +Recommended read before diving into the settings: [System, \(Organization\), Studio & Show Style](../concepts-and-architecture.md#system-organization-studio-and-show-style). + +## System + +The _System_ settings are settings for this installation of Sofie. In here goes the settings that are applicable system-wide. + +:::caution +Documentation for this section is yet to be written. +::: + +### Name and logo + +Sofie contains the option to change the name of the installation. This is useful to identify different studios or regions. + +We have also provided some seasonal logos just for fun. + +### System-wide notification message + +This option will show a notification to the user containing some custom text. This can be used to inform the user about on-going problems or maintenance information. + +### Support panel + +The support panel is shown in the rundown view when the user clicks the "?" button in the right bottom corner. It can contain some custom HTML which can be used to refer your users to custom information specific to your organisation. + +### Action triggers + +The action triggers section lets you set custom keybindings for system-level actions such as doing a take or resetting a rundown. + +### Monitoring + +Sofie can be configured to send information to Elastic APM. This can provide useful information about the system's performance to developers. In general this can reduce the performance of Sofie altogether though so it is recommended to disable it in production. + +Sofie can also monitor for blocked threads, and will log a message if it discovers any. This is also recommended to disable in production. + +### CRON jobs + +Sofie contains cron jobs for restarting any casparcg servers through the casparcg launcher as well as a job to create rundown snapshots periodically. + +### Clean up + +The clean up process in Sofie will search the database for unused data and indexes and removes them. If you have had an installation running for many versions this may increase database informance and is in general safe to use at any time. + +## Studio + +A _Studio_ in Sofie-terms is a physical location, with a specific set of devices and equipment. Only one show can be on air in a studio at the same time. +The _studio_ settings are settings for that specific studio, and contains settings related to hardware and playout, such as: + +* **Attached devices** - the Gateways related to this studio +* **Blueprint configuration** - custom config option defined by the blueprints +* **Layer Mappings** - Maps the logical _timeline layers_ to physical devices and outputs + +The Studio uses a studio-blueprint, which handles things like mapping up an incoming rundown to a Showstyle. + +### Attached Devices + +This section allows you to add and remove Gateways that are related to this _Studio_. When a Gateway is attached to a Studio, it will react to the changes happening within it, as well as feed the neccessary data into it. + +### Blueprint Configuration + +Sofie allows the Blueprints to expose custom configuration fields that allow the System Administrator to reconfigure how these Blueprints work through the Sofie UI. Here you can change the configuration of the [Studio Blueprint](../concepts-and-architecture.md#studio-blueprints). + +### Layer Mappings + +This section allows you to add, remove and configure how logical device-control will be translated to physical automation control. [Blueprints](../concepts-and-architecture.md#blueprints) control devices through objects placed on a [Timeline](../concepts-and-architecture.md#timeline) using logical device identifiers called _Layers_. A layer represents a single aspect of a device that can be controlled at a given time: a video switcher's M/E bus, an audio mixers's fader, an OSC control node, a video server's output channel. Layer Mappings translate these logical identifiers into physical device aspects, for example: + +![A sample configuration of a Layer Mapping for the M/E1 Bus of an ATEM switcher](/img/docs/main/features/atem-layer-mapping-example.png) + +This _Layer Mapping_ configures the `atem_me_program` Timeline-layer to control the `atem0` device of the `ATEM` type. No Lookahead will be enabled for this layer. This layer will control a `MixEffect` aspect with the Index of `0` \(so M/E 1 Bus\). + +These mappings allow the System Administrator to reconfigure what devices the Blueprints will control, without the need of changing the Blueprint code. + +#### Route Sets + +In order to allow the Producer to reconfigure the automation from the Switchboard in the [Rundown View](../concepts-and-architecture.md#rundown-view), as well as have some pre-set automation control available for the System Administrator, Sofie has a concept of Route Sets. Route Sets work on top of the Layer Mappings, by configuring sets of [Layer Mappings](settings-view.md#layer-mappings) that will re-route the control from one device to another, or to disable the automation altogether. These Route Sets are presented to the Producer in the [Switchboard](../concepts-and-architecture.md#switchboard) panel. + +A Route Set is essentially a distinct set of Layer Mappings, which can modify the settings already configured by the Layer Mappings, but can be turned On and Off. Called Routes, these can change: + +* the Layer ID to a new Layer ID +* change the Device being controlled by the Layer +* change the aspect of the Device that's being controlled. + +Route Sets can be grouped into Exclusivity Groups, in which only a single Route Set can be enabled at a time. When activating a Route Set within an Exclusivity Group, all other Route Sets in that group will be deactivated. This in turn, allows the System Administrator to create entire sections of exclusive automation control within the Studio that the Producer can then switch between. One such example could be switching between Primary and Backup playout servers, or switching between Primary and Backup talent microphone. + +![The Exclusivity Group Name will be displayed as a header in the Switchboard panel](/img/docs/main/features/route-sets-exclusivity-groups.png) + +A Route Set has a Behavior property which will dictate what happens how the Route Set operates: + +| Type | Behavior | +| :-------------- | :------------------------------------------------------------------------------------------------------------------------------ | +| `ACTIVATE_ONLY` | This RouteSet cannot be deactivated, only a different RouteSet in the same Exclusivity Group can cause it to deactivate | +| `TOGGLE` | The RouteSet can be activated and deactivated. As a result, it's possible for the Exclusivity Group to have no Route Set active | +| `HIDDEN` | The RouteSet can be activated and deactivated, but it will not be presented to the user in the Switchboard panel | + +![An active RouteSet with a single Layer Mapping being re-configured](/img/docs/main/features/route-set-remap.png) + +Route Sets can also be configured with a _Default State_. This can be used to contrast a normal, day-to-day configuration with an exceptional one \(like using a backup device\) in the [Switchboard](../concepts-and-architecture#switchboard) panel. + +| Default State | Behavior | +| :------------ | :------------------------------------------------------------ | +| Active | If the Route Set is not active, an indicator will be shown | +| Not Active | If the Route Set is active, an indicator will be shown | +| Not defined | No indicator will be shown, regardless of the Route Set state | + +## Show style + +A _Showstyle_ is related to the looks and logic of a _show_, which in contrast to the _studio_ is not directly related to the hardware. +The Showstyle contains settings like + +* **Source Layers** - Groups different types of content in the GUI +* **Output Channels** - Indicates different output targets \(such as the _Program_ or _back-screen in the studio_\) +* **Action Triggers** - Select how actions can be started on a per-show basis, outside of the on-screen controls +* **Blueprint configuration** - custom config option defined by the blueprints + +:::caution +Please note the difference between _Source Layers_ and _timeline-layers_: + +[Pieces](../concepts-and-architecture.md#piece) are put onto _Source layers_, to group different types of content \(such as a VT or Camera\), they are therefore intended only as something to indicate to the user what is going to be played, not what is actually going to happen on the technical level. + +[Timeline-objects](../concepts-and-architecture.md#timeline-object) \(inside of the [Pieces](../concepts-and-architecture.md#piece)\) are put onto timeline-layers, which are \(through the Mappings in the studio\) mapped to physical devices and outputs. +The exact timeline-layer is never exposed to the user, but instead used on the technical level to control playout. + +An example of the difference could be when playing a VT \(that's a Source Layer\), which could involve all of the timeline-layers _video\_player0_, _audio\_fader\_video_, _audio\_fader\_host_ and _mixer\_pgm._ +::: + +### Action Triggers + +This is a way to set up how - outside of the Point-and-Click Graphical User Interface - actions can be performed in the User Interface. Commonly, these are the *hotkey combinations* that can be used to either trigger AdLib content or other actions in the larger system. This is done by creating sets of Triggers and Actions to be triggered by them. These pairs can be set at the Show Style level or at the _Sofie Core_ (System) level, for common actions such as doing a Take or activating a Rundown, where you want a shared method of operation. _Sofie Core_ migrations will set up a base set of basic, system-wide Action Triggers for interacting with rundowns, but they can be changed by the System blueprint. + +![Action triggers define modes of interacting with a Rundown](/img/docs/main/features/action_triggers_3.png) + +#### Triggers + +The triggers are designed to be either client-specific or issued by a peripheral device module. + +Currently, the Action Triggers system supports setting up two types of triggeers: Hotkeys and Device Triggers. + +Hotkeys are valid in the scope of a browser window and can be either a single key, a combination of keys (*combo*) or a *chord* - a sequnece of key combinations pressed in a particular order. *Chords* are popular in some text editing applications and vastly expand the amount of actions that can be triggered from a keyboard, at the expense of the time needed to execute them. Currently, the Hotkey editor in Sofie does not support creating *Chords*, but they can be specified by Blueprints during migrations. + +To edit a given trigger, click on the trigger pill on the left of the Trigger-Action set. When hovering, a **+** sign will appear, allowing you to add a new trigger to the set. + +Device Triggers are valid in the scope of a Studio and will be evaluated on the currently active Rundown in a given Studio. To use Device Triggers, you need to have at least a single [Input Gateway](../installation/installing-input-gateway) attached to a Studio and a Device configured in the Input Gateway. Once that's done, when selecting a **Device** trigger type in the pop-up, you can invoke triggers on your Input Device and you will see a preview of the input events shown at the bottom of the pop-up. You can select which of these events should be the trigger by clicking on one of the previews. Note, that some devices differentiate between _Up_ and _Down_ triggers, while others don't. Some may also have other activites that can be done _to_ a trigger. What they are and how they are identified is device-specific and is best discovered through interaction with the device. + +#### Actions + +The actions are built using a base *action* (such as *Activate a Rundown* or *AdLib*) and a set of *filters*, limiting the scope of the *action*. Optionally, some of these *actions* can take additional *parameters*. These filters can operate on various types of objects, depending on the action in question. All actions currently require that the chain of filters starts with scoping out the Rundown the action is supposed to affect. Currently, there is only one type of Rundown-level filter supported: "The Rundown currently in view". + +The Action Triggers user interface guides the user in a wizzard-like fashion through the available *filter* options on a given *action*. + +![Actions can take additional parameters](/img/docs/main/features/action_triggers_2.png) + +If the action provides a preview of the triggered items and there is an available matching Rundown, a preview will be displayed for the matching objects in that Rundown. The system will select the current active rundown, if it is of the currently-edited ShowStyle, and if not, it will select the first available Rundown of the currently-edited ShowStyle. + +![A preview of the action, as scoped by the filters](/img/docs/main/features/action_triggers_4.png) + +Clicking on the action and filter pills allows you to edit the action parameters and filter parameters. *Limit* limits the amount of objects to only the first *N* objects matched - this can significantly improve performance on large data sets. *Pick* and *Pick last* filters end the chain of the filters by selecting a single item from the filtered set of objects (the *N-th* object from the beginning or the end, respectively). *Pick* implicitly contains a *Limit* for the performance improvement. This is not true for *Pick last*, though. + +## Migrations + +The migrations are automatic setup-scripts that help you during initial setup and system upgrades. + +There are system-migrations that comes directly from the version of _Sofie Core_ you're running, and there are also migrations added by the different blueprints. + +It is mandatory to run migrations when you've upgraded _Sofie Core_ to a new version, or upgraded your blueprints. + diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/configuration/sofie-core-settings.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/configuration/sofie-core-settings.md new file mode 100644 index 0000000000..2c3599a7fe --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/configuration/sofie-core-settings.md @@ -0,0 +1,111 @@ +--- +sidebar_position: 1 +--- + +# Sofie Core: System Configuration + +_Sofie Core_ is configured at it's most basic level using a settings file and environment variables. + +### Environment Variables + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
SettingUseDefault valueExample
+ METEOR_SETTINGS + Contents of settings file (see below) + $(cat settings.json) +
+ TZ + The default time zone of the server (used in logging) + Europe/Amsterdam +
+ MAIL_URL + + Email server to use. See{' '} + https://docs.meteor.com/api/email.html + + smtps://USERNAME:PASSWORD@HOST:PORT +
+ LOG_TO_FILE + File path to log to file + /logs/core/ +
+ +### Settings File + +The settings file is an optional JSON file that contains some configuration settings for how the _Sofie Core_ works and behaves. + +To use a settings file: + +- During development: `meteor --settings settings.json` +- During prod: environment variable \(see above\) + +The structure of the file allows for public and private fields. At the moment, Sofie only uses public fields. Below is an example settings file: + +```text +{ + "public": { + "frameRate": 25 + } +} +``` + +There are various settings you can set for an installation. See the list below: + +| **Field name** | Use | Default value | +| :---------------------------- | :---------------------------------------------------------------------------------------------------------------------------- | :------------------------------------- | +| `defaultToCollapsedSegments` | Should all segments be collapsed by default, until the user expands them | `false` | +| `autoRewindLeavingSegment` | Should segments be automatically rewound after they stop playing | `false` | +| `disableBlurBorder` | Should a border be displayed around the Rundown View when it's not in focus and studio mode is enabled | `false` | +| `defaultTimeScale` | An arbitrary number, defining the default zoom factor of the Timelines | `1` | +| `allowGrabbingTimeline` | Can Segment Timelines be grabbed to scroll them? | `true` | +| `enableUserAccounts` | Enables User Accounts and Authentication. If disabled, all user stations will be treated as a single, anonymous user | `false` | +| `defaultDisplayDuration` | The fallback duration of a Part, when it's expectedDuration is 0. \_\_In milliseconds | `3000` | +| `allowMultiplePlaylistsInGUI` | If true, allows creation of new playlists in the Lobby Gui (rundown list). If false; only pre-existing playlists are allowed. | `false` | +| `followOnAirSegmentsHistory` | How many segments of history to show when scrolling back in time (0 = show current segment only) | `0` | +| `maximumDataAge` | Clean up stuff that are older than this [ms]) | 100 days | +| `poisonKey` | Enable the use of poison key if present and use the key specified. | `'Escape'` | +| `enableNTPTimeChecker` | If set, enables a check to ensure that the system time doesn't differ too much from the speficied NTP server time. | `null` | +| `defaultShelfDisplayOptions` | Default value used to toggle Shelf options when the 'display' URL argument is not provided. | `buckets,layout,shelfLayout,inspector` | +| `enableKeyboardPreview` | The KeyboardPreview is a feature that is not implemented in the main Fork, and is kept here for compatibility | `false` | +| `keyboardMapLayout` | Keyboard map layout (what physical layout to use for the keyboard) | STANDARD_102_TKL | +| `customizationClassName` | CSS class applied to the body of the page. Used to include custom implementations that differ from the main Fork. | `undefined` | +| `useCountdownToFreezeFrame` | If true, countdowns of videos will count down to the last freeze-frame of the video instead of to the end of the video | `true` | +| `confirmKeyCode` | Which keyboard key is used as "Confirm" in modal dialogs etc. | `'Enter'` | + +:::info +The exact definition for the settings can be found [in the code here](https://github.com/nrkno/sofie-core/blob/master/meteor/lib/Settings.ts#L12). +::: diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/faq.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/faq.md new file mode 100644 index 0000000000..e832a36f5c --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/faq.md @@ -0,0 +1,17 @@ +# FAQ + +## What software license does the system use? + +All main components are using the [MIT license](https://opensource.org/licenses/MIT). + +## Is there anything missing in the public repositories? + +Everything needed to install and configure a fully functioning Sofie system is publicly available, with the following exceptions: + +* A rundown data set describing the actual TV show and of media assets. +* Blueprints for your specific show. + +## When will feature _y_ become available? + +Check out the [issues page](https://github.com/nrkno/Sofie-TV-automation/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3ARelease), where there are notes on current and upcoming releases. + diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/_category_.json b/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/_category_.json new file mode 100644 index 0000000000..0dd70d8b0e --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Features", + "position": 2 +} \ No newline at end of file diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/access-levels.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/access-levels.md new file mode 100644 index 0000000000..50307f970e --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/access-levels.md @@ -0,0 +1,46 @@ +--- +sidebar_position: 3 +--- +# Access Levels + +A variety of access levels can be set via the URL. By default, a user cannot edit settings, nor play out anything. Some of the access levels provide additional administrative pages or helpful tool tips for new users. These modes are persistent between sessions and will need to be manually disabled by replacing the _1_ with a _0_ in the URL. Below is a quick reference to the modes and what they have access to. + +If user accounts are enabled \(`enableUserAccounts` in [_Sofie Core_ settings](../configuration/sofie-core-settings#settings-file)\), the access levels are set under the user settings. If no user accounts are set, the access level for a browser is set by adding `?theaccessmode=1` to the URL as described below. + +The access level is persisted in browser's Local Storage. To disable, visit`?theaccessmode=0`. + +| Access area | Basic Mode | Configuration Mode | Studio Mode | Admin Mode | +| :--- | :--- | :--- | :--- | :--- | +| **Rundowns** | View Only | View Only | Yes, playout | Yes, playout | +| **Settings** | No | Yes | No | Yes | + + +### Basic mode + +Without enabling any additional modes in Sofie, the browser will have minimal access to the system. It will be able to view a rundown but, will not have the ability to manipulate it. This includes activating, deactivating, or resetting the rundown as well as taking the next part, adlib, etc. + +### Studio mode + +Studio Mode gives the current browser full control of the studio and all information associated to it. This includes allowing actions like activating and deactivating rundowns, taking parts, adlibbing, etc. This mode is accessed by adding a `?studio=1` to the end of the URL. + +### Configuration mode + +Configuration mode gives the user full control over the Settings pages and allows full access to the system including the ability to modify _Blueprints_, _Studios_, or _Show Styles_, creating and restoring _Snapshots_, as well as modifying attached devices. + +### Help Mode + +Enables some tooltips that might be useful to new users. This mode is accessed by adding `?help=1` to the end of the URL. + +### Admin Mode + +This mode will give the user the same access as the _Configuration_ and _Studio_ modes as well as having access to a set of _Test Tools_ and a _Manual Control_ section on the Rundown page. + +This mode is enabled when `?admin=1` is added the end of the URL. + +### Testing Mode + +Enables the page Test Tools, which contains various tools useful for testing the system during development. This mode is enabled when `?testing=1` is added the end of the URL. + +### Developer Mode + +This mode will enable the browsers default right click menu to appear and can be accessed by adding `?develop=1` to the URL. It will also reveal the Manual Control section on the Rundown page. diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/api.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/api.md new file mode 100644 index 0000000000..30d939cfba --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/api.md @@ -0,0 +1,19 @@ +--- +sidebar_position: 10 +--- + +# API + +## Sofie User Actions REST API + +Starting with version 1.50.0, there is a semantically-versioned HTTP REST API definied using the [OpenAPI specification](https://spec.openapis.org/oas/v3.0.3) that exposes some of the functionality available through the GUI in a machine-readable fashion. The API specification can be found in the `packages/openapi` folder. The latest version of this API is available in _Sofie Core_ using the endpoint: `/api/1.0`. There should be no assumption of backwards-compatibility for this API, but this API will be semantically-versioned, with redirects set up for minor-version changes for compatibility. + +There is a also a legacy REST API available that can be used to fetch data and trigger actions. The documentation for this API is minimal, but the API endpoints are listed by _Sofie Core_ using the endpoint: `/api/0` + +## Sofie Live Status Gateway + +Starting with version 1.50.0, there is also a separate service available, called _Sofie Live Status Gateway_, running as a separate process, which will connect to the _Sofie Core_ as a Peripheral Device, listen to the changes of it's state and provide a PubSub service offering a machine-readable view into the system. The WebSocket API is defined using the [AsyncAPI specification](https://v2.asyncapi.com/docs/reference/specification/v2.5.0) and the specification can be found in the `packages/live-status-gateway/api` folder. + +## DDP – Core Integration + +If you're planning to build NodeJS applications that talk to _Sofie Core_, we recommend using the [core-integration](https://github.com/nrkno/sofie-core/tree/master/packages/server-core-integration.md) library, which exposes a number of callable methods and allows for subscribing to data the same way the [Gateways](../concepts-and-architecture.md#gateways) do it. diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/language.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/language.md new file mode 100644 index 0000000000..9fe03d816e --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/language.md @@ -0,0 +1,23 @@ +--- +sidebar_position: 7 +--- +# Language + +_Sofie_ uses the [i18n internationalisation framework](https://www.i18next.com/) that allows you to present user-facing views in multiple languages. + +## Language selection + +The UI will automatically detect user browser's default matching and select the best match, falling back to English. You can also force the UI language to any language by navigating to a page with `?lng=xx` query string, for example: + +`http://localhost:3000/?lng=en` + +This choice is persisted in browser's local storage, and the same language will be used until a new forced language is chosen using this method. + +_Sofie_ currently supports three languages: +* English _(default)_ `en` +* Norwegian bokmål `nb` +* Norwegian nynorsk `nn` + +## Further Reading + +* [List of language tags](https://en.wikipedia.org/wiki/IETF_language_tag) \ No newline at end of file diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/prompter.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/prompter.md new file mode 100644 index 0000000000..893e43f7af --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/prompter.md @@ -0,0 +1,189 @@ +--- +sidebar_position: 3 +--- + +# Prompter + +See [Sofie views](sofie-views.mdx#prompter-view) for how to access the prompter page. + +![Prompter screen before the first Part is taken](/img/docs/main/features/prompter-view.png) + +The prompter will display the script for the Rundown currently active in the Studio. On Air and Next parts and segments are highlighted - in red and green, respectively - to aid in navigation. In top-right corner of the screen, a Diff clock is shown, showing the difference between planned playback and what has been actually produced. This allows the host to know how far behind/ahead they are in regards to planned execution. + +![Indicators for the On Air and Next part shown underneath the Diff clock](/img/docs/main/features/prompter-view-indicators.png) + +If the user scrolls the prompter ahead or behind the On Air part, helpful indicators will be shown in the right-hand side of the screen. If the On Air or Next part's script is above the current viewport, arrows pointing up will be shown. If the On Air part's script is below the current viewport, a single arrow pointing down will be shown. + +## Customize looks + +The prompter UI can be configured using query parameters: + +| Query parameter | Type | Description | Default | +| :-------------- | :----- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------ | :------ | +| `mirror` | 0 / 1 | Mirror the display horizontally | `0` | +| `mirrorv` | 0 / 1 | Mirror the display vertically | `0` | +| `fontsize` | number | Set a custom font size of the text. 20 will fit in 5 lines of text, 14 will fit 7 lines etc.. | `14` | +| `marker` | string | Set position of the read-marker. Possible values: "center", "top", "bottom", "hide" | `hide` | +| `margin` | number | Set margin of screen \(used on monitors with overscan\), in %. | `0` | +| `showmarker` | 0 / 1 | If the marker is not set to "hide", control if the marker is hidden or not | `1` | +| `showscroll` | 0 / 1 | Whether the scroll bar should be shown | `1` | +| `followtake` | 0 / 1 | Whether the prompter should automatically scroll to current segment when the operator TAKE:s it | `1` | +| `showoverunder` | 0 / 1 | The timer in the top-right of the prompter, showing the overtime/undertime of the current show. | `1` | +| `debug` | 0 / 1 | Whether to display a debug box showing controller input values and the calculated speed the prompter is currently scrolling at. Used to tweak speedMaps and ranges. | `0` | + +Example: [http://127.0.0.1/prompter/studio0/?mode=mouse&followtake=0&fontsize=20](http://127.0.0.1/prompter/studio0/?mode=mouse&followtake=0&fontsize=20) + +## Controlling the prompter + +The prompter can be controlled by different types of controllers. The control mode is set by a query parameter, like so: `?mode=mouse`. + +| Query parameter | Description | +| :---------------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| Default | Controlled by both mouse and keyboard | +| `?mode=mouse` | Controlled by mouse only. [See configuration details](prompter.md#control-using-mouse-scroll-wheel) | +| `?mode=keyboard` | Controlled by keyboard only. [See configuration details](prompter.md#control-using-keyboard) | +| `?mode=shuttlekeyboard` | Controlled by a Contour Design ShuttleXpress, X-keys Jog and Shuttle or any compatible, configured as keyboard-ish device. [See configuration details](prompter.md#control-using-contour-shuttlexpress-or-x-keys) | +| `?mode=pedal` | Controlled by any MIDI device outputting note values between 0 - 127 of CC notes on channel 8. Analogue Expression pedals work well with TRS-USB midi-converters. [See configuration details](prompter.md#control-using-midi-input-mode-pedal) | +| `?mode=joycon` | Controlled by Nintendo Switch Joycon, using the HTML5 GamePad API. [See configuration details](prompter.md#control-using-nintendo-joycon-gamepad) | + +#### Control using mouse \(scroll wheel\) + +The prompter can be controlled in multiple ways when using the scroll wheel: + +| Query parameter | Description | +| :-------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `?controlmode=normal` | Scrolling of the mouse works as "normal scrolling" | +| `?controlmode=speed` | Scrolling of the mouse changes the speed of scolling. Left-click to toggle, right-click to rewind | +| `?controlmode=smoothscroll` | Scrolling the mouse wheel starts continous scrolling. Small speed adjustments can then be made by nudging the scroll wheel. Stop the scrolling by making a "larger scroll" on the wheel. | + +has several operating modes, described further below. All modes are intended to be controlled by a computer mouse or similar, such as a presenter tool. + +#### Control using keyboard + +Keyboard control is intended to be used when having a "keyboard"-device, such as a presenter tool. + +| Scroll up | Scroll down | +| :----------- | :------------ | +| `Arrow Up` | `Arrow Down` | +| `Arrow Left` | `Arrow Right` | +| `Page Up` | `Page Down` | +| | `Space` | + +#### Control using Contour ShuttleXpress or X-keys \(_?mode=shuttlekeyboard_\) + +This mode is intended to be used when having a Contour ShuttleXpress or X-keys device, configured to work as a keyboard device. These devices have jog/shuttle wheels, and their software/firmware allow them to map scroll movement to keystrokes from any key-combination. Since we only listen for key combinations, it effectively means that any device outputing keystrokes will work in this mode. + +| Query parameter | Type | Description | Default | +| :----------------- | :--------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------------------- | +| `shuttle_speedMap` | Array of numbers | Speeds to scroll by \(px. pr. frame - approx 60fps\) when scrolling forwards. The beginning of the forwards-range maps to the first number in this array, and the end of the forwards-range map to the end of this array. All values in between are being interpolated using a spline curve. | `0, 1, 2, 3, 5, 7, 9, 30]` | + +| Key combination | Function | +| :--------------------------------------------------------- | :------------------------------------- | +| `Ctrl` `Alt` `F1` ... `Ctrl` `Alt` `F7` | Set speed to +1 ... +7 \(Scroll down\) | +| `Ctrl` `Shift` `Alt` `F1` ... `Ctrl` `Shift` `Alt` `F7` | Set speed to -1 ... -7 \(Scroll up\) | +| `Ctrl` `Alt` `+` | Increase speed | +| `Ctrl` `Alt` `-` | Decrease speed | +| `Ctrl` `Alt` `Shift` `F8`, `Ctrl` `Alt` `Shift` `PageDown` | Jump to next Segment and stop | +| `Ctrl` `Alt` `Shift` `F9`, `Ctrl` `Alt` `Shift` `PageUp` | Jump to previous Segment and stop | +| `Ctrl` `Alt` `Shift` `F10` | Jump to top of Script and stop | +| `Ctrl` `Alt` `Shift` `F11` | Jump to Live and stop | +| `Ctrl` `Alt` `Shift` `F12` | Jump to next Segment and stop | + +Configuration files that can be used in their respective driver software: + +- [Contour ShuttleXpress](https://github.com/nrkno/sofie-core/blob/release26/resources/prompter_layout_shuttlexpress.pref) +- [X-keys](https://github.com/nrkno/sofie-core/blob/release26/resources/prompter_layout_xkeys.mw3) + +#### + +#### Control using midi input \(_?mode=pedal_\) + +This mode listens to MIDI CC-notes on channel 8, expecting a linear range like i.e. 0-127. Sutiable for use with expression pedals, but any MIDI controller can be used. The mode picks the first connected MIDI device, and supports hot-swapping \(you can remove and add the device without refreshing the browser\). + +Web-Midi requires the web page to be served over HTTPS, or that the Chrome flag `unsafely-treat-insecure-origin-as-secure` is set. + +If you want to use traditional analogue pedals with 5 volt TRS connection, a converter such as the _Beat Bars EX2M_ will work well. + +| Query parameter | Type | Description | Default | +| :---------------------- | :--------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :-------------------------------------- | +| `pedal_speedMap` | Array of numbers | Speeds to scroll by \(px. pr. frame - approx 60fps\) when scrolling forwards. The beginning of the forwards-range maps to the first number in this array, and the end of the forwards-range map to the end of this array. All values in between are being interpolated using a spline curve. | `[1, 2, 3, 4, 5, 7, 9, 12, 17, 19, 30]` | +| `pedal_reverseSpeedMap` | Array of numbers | Same as `pedal_speedMap` but for the backwards range. | `[10, 30, 50]` | +| `pedal_rangeRevMin` | number | The end of the backwards-range, full speed backwards. | `0` | +| `pedal_rangeNeutralMin` | number | The beginning of the backwards-range. | `35` | +| `pedal_rangeNeutralMax` | number | The minimum input to run forward, the start of the forward-range \(min speed\). This is also the end of any "deadband" you want filter out before starting moving forwards. | `80` | +| `pedal_rangeFwdMax` | number | The maximum input, the end of the forward-range \(max speed\) | `127` | + +- `pedal_rangeNeutralMin` has to be greater than `pedal_rangeRevMin` +- `pedal_rangeNeutralMax` has to be greater than `pedal_rangeNeutralMin` +- `pedal_rangeFwdMax` has to be greater than `pedal_rangeNeutralMax` + +![Yamaha FC7 mapped for both a forward \(80-127\) and backwards \(0-35\) range.](/img/docs/main/features/yamaha-fc7.jpg) + +The default values allow for both going forwards and backwards. This matches the _Yamaha FC7_ expression pedal. The default values create a forward-range from 80-127, a neutral zone from 35-80 and a reverse-range from 0-35. + +Any movement within forward range will map to the `pedal_speedMap` with interpolation between any numbers in the `pedal_speedMap`. You can turn on `?debug=1` to see how your input maps to an output. This helps during calibration. Similarly, any movement within the backwards rage maps to the `pedal_reverseSpeedMap`. + +**Calibration guide:** + +| **Symptom** | Adjustment | +| :---------------------------------------------------------------------------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| _"I can't rest my foot without it starting to run"_ | Increase `pedal_rangeNeutralMax` | +| _"I have to push too far before it starts moving"_ | Decrease `pedal_rangeNeutralMax` | +| _"It starts out fine, but runs too fast if I push too hard"_ | Add more weight to the lower part of the `pedal_speedMap` by adding more low values early in the map, compared to the large numbers in the end. | +| _"I have to go too far back to reverse"_ | Increse `pedal_rangeNeutralMin` | +| _"As I find a good speed, it varies a bit in speed up/down even if I hold my foot still"_ | Use `?debug=1` to see what speed is calculated in the position the presenter wants to rest the foot in. Add more of that number in a sequence in the `pedal_speedMap` to flatten out the speed curve, i.e. `[1, 2, 3, 4, 4, 4, 4, 5, ...]` | + +**Note:** The default values are set up to work with the _Yamaha FC7_ expression pedal, and will probably not be good for pedals with one continuous linear range from fully released to fully depressed. A suggested configuration for such pedals \(i.e. the _Mission Engineering EP-1_\) will be like: + +| Query parameter | Suggestion | +| :---------------------- | :-------------------------------------- | +| `pedal_speedMap` | `[1, 2, 3, 4, 5, 7, 9, 12, 17, 19, 30]` | +| `pedal_reverseSpeedMap` | `-2` | +| `pedal_rangeRevMin` | `-1` | +| `pedal_rangeNeutralMin` | `0` | +| `pedal_rangeNeutralMax` | `1` | +| `pedal_rangeFwdMax` | `127` | + +#### Control using Nintendo Joycon \(_?mode=joycon_\) + +This mode uses the browsers Gamapad API and polls connected Joycons for their states on button-presses and joystick inputs. + +The Joycons can operate in 3 modes, the L-stick, the R-stick or both L+R sticks together. Reconnections and jumping between modes works, with one known limitation: **Transition from L+R to a single stick blocks all input, and requires a reconnect of the sticks you want to use.** This seems to be a bug in either the Joycons themselves or in the Gamepad API in general. + +| Query parameter | Type | Description | Default | +| :----------------------- | :--------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :--------------------------- | +| `joycon_speedMap` | Array of numbes | Speeds to scroll by \(px. pr. frame - approx 60fps\) when scrolling forwards. The beginning of the forwards-range maps to the first number in this array, and thee end of the forwards-range map to the end of this array. All values in between are being interpolated in a spline curve. | `[1, 2, 3, 4, 5, 8, 12, 30]` | +| `joycon_reverseSpeedMap` | Array of numbers | Same as `joycon_speedMap` but for the backwards range. | `[1, 2, 3, 4, 5, 8, 12, 30]` | +| `joycon_rangeRevMin` | number | The end of the backwards-range, full speed backwards. | `-1` | +| `joycon_rangeNeutralMin` | number | The beginning of the backwards-range. | `-0.25` | +| `joycon_rangeNeutralMax` | number | The minimum input to run forward, the start of the forward-range \(min speed\). This is also the end of any "deadband" you want filter out before starting moving forwards. | `0.25` | +| `joycon_rangeFwdMax` | number | The maximum input, the end of the forward-range \(max speed\) | `1` | + +- `joycon_rangeNeutralMin` has to be greater than `joycon_rangeRevMin` +- `joycon_rangeNeutralMax` has to be greater than `joycon_rangeNeutralMin` +- `joycon_rangeFwdMax` has to be greater than `joycon_rangeNeutralMax` + +![Nintendo Swith Joycons](/img/docs/main/features/nintendo-switch-joycons.jpg) + +You can turn on `?debug=1` to see how your input maps to an output. + +**Button map:** + +| **Button** | Acton | +| :--------- | :------------------------ | +| L2 / R2 | Go to the "On-air" story | +| L / R | Go to the "Next" story | +| Up / X | Go top the top | +| Left / Y | Go to the previous story | +| Right / A | Go to the following story | + +**Calibration guide:** + +| **Symptom** | Adjustment | +| :------------------------------------------------------------------------------------------ | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| _"The prompter drifts upwards when I'm not doing anything"_ | Decrease `joycon_rangeNeutralMin` | +| _"The prompter drifts downwards when I'm not doing anything"_ | Increase `joycon_rangeNeutralMax` | +| _"It starts out fine, but runs too fast if I move too far"_ | Add more weight to the lower part of the `joycon_speedMap / joycon_reverseSpeedMap` by adding more low values early in the map, compared to the large numbers in the end. | +| _"I can't reach max speed backwards"_ | Increase `joycon_rangeRevMin` | +| _"I can't reach max speed forwards"_ | Decrease `joycon_rangeFwdMax` | +| _"As I find a good speed, it varies a bit in speed up/down even if I hold my finger still"_ | Use `?debug=1` to see what speed is calculated in the position the presenter wants to rest their finger in. Add more of that number in a sequence in the `joycon_speedMap` to flatten out the speed curve, i.e. `[1, 2, 3, 4, 4, 4, 4, 5, ...]` | diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/sofie-views.mdx b/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/sofie-views.mdx new file mode 100644 index 0000000000..e3d21dc9be --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/sofie-views.mdx @@ -0,0 +1,313 @@ +--- +sidebar_position: 2 +--- + +import Tabs from '@theme/Tabs' +import TabItem from '@theme/TabItem' + +# Sofie Views + +## Lobby View + +![Rundown View](/img/docs/lobby-view.png) + +All existing rundowns are listed in the _Lobby View_. + +## Rundown View + +![Rundown View](/img/docs/main/features/active-rundown-example.png) + +The _Rundown View_ is the main view that the producer is working in. + +![The Rundown view and naming conventions of components](/img/docs/main/sofie-naming-conventions.png) + +![Take Next](/img/docs/main/take-next.png) + +#### Take Point + +The Take point is currently playing [Part](#part) in the rundown, indicated by the "On Air" line in the GUI. +What's played on air is calculated from the timeline objects in the Pieces in the currently playing part. + +The Pieces inside of a Part determines what's going to happen, the could be indicating things like VT:s, cut to cameras, graphics, or what script the host is going to read. + +:::info +You can TAKE the next part by pressing _F12_ or the _Numpad Enter_ key. +::: + +#### Next Point + +The Next point is the next queued Part in the rundown. When the user clicks _Take_, the Next Part becomes the currently playing part, and the Next point is also moved. + +:::info +Change the Next point by right-clicking in the GUI, or by pressing \(Shift +\) F9 & F10. +::: + +#### Freeze-frame Countdown + +![Part is 1 second heavy, LiveSpeak piece has 7 seconds of playback until it freezes](/img/docs/main/freeze-frame-countdown.png) + +If a Piece has more or less content than the Part's expected duration allows, an additional counter with a Snowflake icon will be displayed, attached to the On Air line, counting down to the moment when content from that Piece will freeze-frame at the last frame. The time span in which the content from the Piece will be visible on the output, but will be frozen, is displayed with an overlay of icicles. + +#### Lookahead + +Elements in the [Next point](#next-point) \(or beyond\) might be pre-loaded or "put on preview", depending on the blueprints and playout devices used. This feature is called "Lookahead". + +### Storyboard Mode + +In the top-right corner of the Segment, there's a button controlling the display style of a given Segment. The default display style of a Segment can be indicated by the [Blueprints](../concepts-and-architecture.md#blueprints), but the User can switch to a different mode at any time. You can also change the display mode of all Segments at once, using a button in the bottom-right corner of the Rundown View. + +![Storyboard Mode](/img/docs/main/storyboard.png) + +The **_Storyboard_** mode is an alternative to the default **_Timeline_** mode. In Storyboard mode, the accurate placement in time of each Piece is not visualized, so that more Parts can be visualized at once in a single row. This can be particularly useful in Shows without very strict timing planning or where timing is not driven by the User, but rather some external factor; or in Shows where very long Parts are joined with very short ones: sports, events and debates. This mode also does not visualize the history of the playback: rather, it only shows what is currently On Air or is planned to go On Air. + +Storyboard mode selects a "main" Piece of the Part, using the same logic as the [Presenter View](#presenter-view), and presents it with a larger, hover-scrub-enabled Piece for easy preview. The countdown to freeze-frame is displayed in the top-right hand corner of the Thumbnail, once less than 10 seconds remain to freeze-frame. The Transition Piece is displayed on top of the thumbnail. Other Pieces are placed below the thumbnail, stacked in order of playback. After a Piece goes off-air, it will dissapear from the view. + +If no more Parts can be displayed in a given Segment, they are stacked in order on the right side of the Segment. The User can scroll through thse Parts by click-and-dragging the Storyboard area, or using the mouse wheel - `Alt`+Wheel, if only a vertical wheel is present in the mouse. + +### List View Mode + +Another mode available to display a Segment is the List View. In this mode, each _Part_ and it's contents are being displayed as a mini-timeline and it's width is normalized to fit the screen, unless it's shorter than 30 seconds, in which case it will be scaled down accordingly. + +![List View Mode](/img/docs/main/list_view.png) + +In this mode, the focus is on the "main" Piece of the Part. Additional _Lower-Third_ content that is not spanning the entire Part (is not infinite) will be displayed on top of the main Piece. All other content can be displayed to the right of the mini-timeline as a set of indicators, one per every Layer. Clicking on those indicators will show a pop-up with the Pieces so that they can be investigated using _hover-scrub_. Indicators can be also shown for Ad-Libs assigned to a Part, for easier discovery by the User. Which Layers should be shown in the columns can be decided in the [Settings ● Layers](../configuration/settings-view.md#show-style) area. A special, larger indicator is reserved for the Script piece, which can be useful to display so-called _out-words_. + +If a Part has an _in-transition_ Piece, it will be displayed to the left of the Part's Take Point. + +This view is designed to be used in productions that are mixing pre-planned and timed segments with more free-flowing production or mixing short live in-camera links with longer pre-produced clips, while trying to keep as much of the show in the viewport as possible, at the expense of hiding some of the content from the User and the _duration_ of the Part on screen having no bearing on it's _width_. This mode also allows Sofie to visualize content _beyond_ the planned duration of a Part. + +:::info +The Segment header area also shows the expected (planned) durations for all the Parts and will also show which Parts are sharing timing in a timing group using a *⌊* symbol in the place of a counter. +::: + +All user interactions work in the Storyboard and List View mode the same as in Timeline mode: Takes, AdLibs, Holds and moving the [Next Point](#next-point) around the Rundown. + +### Segment Header Countdowns + +![Each Segment has two clocks - the Segment Time Budget and a Segment Countdown](/img/docs/main/segment-budget-and-countdown.png) + + + +Clock on the left is an indicator of how much time has been spent playing Parts from that Segment in relation to how much time was planned for Parts in that Segment. If more time was spent playing than was planned for, this clock will turn red, there will be a **+** sign in front of it and will begin counting upwards. + + + +Clock on the right is a countdown to the beginning of a given segment. This takes into account unplayed time in the On Air Part and all unplayed Parts between the On Air Part and a given Segment. If there are no unplayed Parts between the On Air Part and the Segment, this counter will disappear. + + + +In the illustration above, the first Segment \(_Ny Sak_\) has been playing for 4 minutes and 25 seconds longer than it was planned for. The second segment \(_Direkte Strømstad\)_ is planned to play for 4 minutes and 40 seconds. There are 5 minutes and 46 seconds worth of content between the current On Air line \(which is in the first Segment\) and the second Segment. + +If you click on the Segment header countdowns, you can switch the _Segment Countdown_ to a _Segment OnAir Clock_ where this will show the time-of-day when a given Segment is expected to air. + +![Each Segment has two clocks - the Segment Time Budget and a Segment Countdown](/img/docs/main/features/segment-header-2.png) + +### Rundown Dividers + +When using a workflow and blueprints that combine multiple NRCS Rundowns into a single Sofie Rundown \(such as when using the "Ready To Air" functionality in AP ENPS\), information about these individual NRCS Rundowns will be inserted into the Rundown View at the point where each of these incoming Rundowns start. + +![Rundown divider between two NRCS Rundowns in a "Ready To Air" Rundown](/img/docs/main/rundown-divider.png) + +For reference, these headers show the Name, Planned Start and Planned Duration of the individual NRCS Rundown. + +### Shelf + +The shelf contains lists of AdLibs that can be played out. + +![Shelf](/img/docs/main/shelf.png) + +:::info +The Shelf can be opened by clicking the handle at the bottom of the screen, or by pressing the TAB key +::: + +### Shelf Layouts + +The _Rundown View_ and the _Detached Shelf View_ UI can have multiple concurrent layouts for any given Show Style. The automatic selection mechanism works as follows: + +1. select the first layout of the `RUNDOWN_LAYOUT` type, +2. select the first layout of any type, +3. use the default layout \(no additional filters\), in the style of `RUNDOWN_LAYOUT`. + +To use a specific layout in these views, you can use the `?layout=...` query string, providing either the ID of the layout or a part of the name. This string will then be mached against all available layouts for the Show Style, and the first matching will be selected. For example, for a layout called `Stream Deck layout`, to open the currently active rundown's Detached Shelf use: + +`http://localhost:3000/activeRundown/studio0/shelf?layout=Stream` + +The Detached Shelf view with a custom `DASHBOARD_LAYOUT` allows displaying the Shelf on an auxiliary touch screen, tablet or a Stream Deck device. A specialized Stream Deck view will be used if the view is opened on a device with hardware characteristics matching a Stream Deck device. + +The shelf also contains additional elements, not controlled by the Rundown View Layout. These include Buckets and the Inspector. If needed, these components can be displayed or hidden using additional url arguments: + +| Query parameter | Description | +| :---------------------------------- | :------------------------------------------------------------------------ | +| Default | Display the rundown layout \(as selected\), all buckets and the inspector | +| `?display=layout,buckets,inspector` | A comma-separated list of features to be displayed in the shelf | +| `?buckets=0,1,...` | A comma-separated list of buckets to be displayed | + +- `display`: Available values are: `layout` \(for displaying the Rundown Layout\), `buckets` \(for displaying the Buckets\) and `inspector` \(for displaying the Inspector\). +- `buckets`: The buckets can be specified as base-0 indices of the buckets as seen by the user. This means that `?buckets=1` will display the second bucket as seen by the user when not filtering the buckets. This allows the user to decide which bucket is displayed on a secondary attached screen simply by reordering the buckets on their main view. + +_Note: the Inspector is limited in scope to a particular browser window/screen, so do not expect the contents of the inspector to sync across multiple screens._ + +For the purpose of running the system in a studio environment, there are some additional views that can be used for various purposes: + +### Sidebar Panel + +#### Switchboard + +![Switchboard](/img/docs/main/switchboard.png) + +The Switchboard allows the producer to turn automation _On_ and _Off_ for sets of devices, as well as re-route automation control between devices - both with an active rundown and when no rundown is active in a [Studio](../concepts-and-architecture.md#system-organization-studio-and-show-style). + +The Switchboard panel can be accessed from the Rundown View's right-hand Toolbar, by clicking on the Switchboard button, next to the Support panel button. + +:::info +Technically, the switchboard activates and deactivates Route Sets. The Route Sets are grouped by Exclusivity Group. If an Exclusivity Group contains exactly two elements with the `ACTIVATE_ONLY` mode, the Route Sets will be displayed on either side of the switch. Otherwise, they will be displayed separately in a list next to an _Off_ position. See also [Settings ● Route sets](../configuration/settings-view#route-sets). +::: + +## Prompter View + +`/prompter/:studioId` + +![Prompter View](/img/docs/main/features/prompter-example.png) + +A fullscreen page which displays the prompter text for the currently active rundown. The prompter can be controlled and configured in various ways, see more at the [Prompter](prompter.md) documentation. If no Rundown is active in a given studio, the [Screensaver](sofie-views.mdx#screensaver) will be displayed. + +## Presenter View + +`/countdowns/:studioId/presenter` + +![Presenter View](/img/docs/main/features/presenter-screen-example.png) + +A fullscreen page, intended to be shown to the studio presenter. It displays countdown timers for the current and next items in the rundown. If no Rundown is active in a given studio, the [Screensaver](sofie-views.mdx#screensaver) will be shown. + +### Presenter View Overlay + +`/countdowns/:studioId/overlay` + +![Presenter View Overlay](/img/docs/main/features/presenter-screen-overlay-example.png) + +A fullscreen view with transparent background, intended to be shown to the studio presenter as an overlay on top of the produced PGM signal. It displays a reduced amount of the information from the regular [Presenter screen](sofie-views.mdx#presenter-view): the countdown to the end of the current Part, a summary preview \(type and name\) of the next item in the Rundown and the current time of day. If no Rundown is active it will show the name of the Studio. + +## Camera Position View + +`/countdowns/:studioId/camera` + +![Camera Position View](/img/docs/main/features/camera-view.jpg) + +A fullscreen view designed specifically for use on mobile devices or extra screens displaying a summary of the currently active Rundown, filtered for Parts containing Pieces matching particular Source Layers and Studio Labels. + +The Pieces are displayed as a Timeline, with the Pieces moving right-to-left as time progresses, and Parts being displayed from the current one being played up till the end of the Rundown. The closest (not necessarily _Next_) Part has a countdown timer in the top-right corner showing when it's expected to be Live. Each Part also has a Duration counter on the bottom-right. + +This view can be configured using query parameters: + +| Query parameter | Type | Description | Default | +| :-------------- | :--- | :---------- | :------ | +| `sourceLayerIds` | string | A comma-separated list of Source Layer IDs to be considered for display | _(show all)_ | +| `studioLabels` | string | A comma-separated list of Studio Labels (Piece `.content.studioLabel` values) to be considered for display | _(show all)_ | +| `fullscreen` | 0 / 1 | Should the view become fullscreen on the device on first user interaction | 0 | + +Example: [http://127.0.0.1/countdowns/studio0/camera?sourceLayerIds=camera0,dve0&studioLabels=1,KAM%201,K1,KAM1&fullscreen=1](http://127.0.0.1/countdowns/studio0/camera?sourceLayerIds=camera0,dve0&studioLabels=1,KAM%201,K1,KAM1&fullscreen=1) + +## Active Rundown View + +`/activeRundown/:studioId` + +![Active Rundown View](/img/docs/main/features/active-rundown-example.png) + +A page which automatically displays the currently active rundown. Can be useful for the producer to have on a secondary screen. + +## Active Rundown – Shelf + +`/activeRundown/:studioId/shelf` + +![Active Rundown Shelf](/img/docs/main/features/active-rundown-shelf-example.png) + +A view which automatically displays the currently active rundown, and shows the Shelf in full screen. Can be useful for the producer to have on a secondary screen. + +A shelf layout can be selected by modifying the query string, see [Shelf Layouts](#shelf-layouts). + +## Specific Rundown – Shelf + +`/rundown/:rundownId/shelf` + +Displays the shelf in fullscreen for a rundown + +## Screensaver + +When big screen displays \(like Prompter and the Presenter screen\) do not have any meaningful content to show, an animated screensaver showing the current time and the next planned show will be displayed. If no Rundown is upcoming, the Studio name will be displayed. + +![A screensaver showing the next scheduled show](/img/docs/main/features/next-scheduled-show-example.png) + +## System Status + +:::caution +Documentation for this feature is yet to be written. +::: + +System and devices statuses are displayed here. + +:::info +An API endpoint for the system status is also available under the URL `/health` +::: + +## Media Status View + +:::caution +Documentation for this feature is yet to be written. +::: + +This page displays media transfer statuses. + +## Message Queue View + +:::caution +Documentation for this feature is yet to be written. +::: + +_Sofie Core_ can send messages to external systems \(such as metadata, as-run-logs\) while on air. + +These messages are retained for a period of time, and can be reviewed in this list. + +Messages that was not successfully sent can be inspected and re-sent here. + +## User Log View + +The user activity log contains a list of the user-actions that users have previously done. This is used in troubleshooting issues on-air. + +![User Log](/img/docs/main/features/user-log.png) + +### Columns, explained + +#### Execution time + +The execution time column displays **coreDuration** + **gatewayDuration** \(**timelineResolveDuration**\)": + +- **coreDuration** : The time it took for Core to execute the command \(ie start-of-command 🠺 stored-result-into-database\) +- **gatewayDuration** : The time it took for Playout Gateway to execute the timeline \(ie stored-result-into-database 🠺 timeline-resolved 🠺 callback-to-core\) +- **timelineResolveDuration**: The duration it took in TSR \(in Playout Gateway\) to resolve the timeline + +Important to note is that **gatewayDuration** begins at the exact moment **coreDuration** ends. +So **coreDuration + gatewayDuration** is the full time it took from beginning-of-user-action to the timeline-resolved \(plus a little extra for the final callback for reporting the measurement\). + +#### Action + +Describes what action the user did; e g pressed a key, clicked a button, or selected a meny item. + +#### Method + +The internal name in _Sofie Core_ of what function was called + +#### Status + +The result of the operation. "Success" or an error message. + +## Evaluations + +When a broadcast is done, users can input feedback about how the show went in an evaluation form. + +:::info +Evaluations can be configured to be sent to Slack, by setting the "Slack Webhook URL" in the [Settings View](../configuration/settings-view.md) under _Studio_. +::: + +## Settings View + +The [Settings View](../configuration/settings-view.md) is only available to users with the [Access Level](access-levels.md) set correctly. diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/system-health.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/system-health.md new file mode 100644 index 0000000000..11ab7046b4 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/features/system-health.md @@ -0,0 +1,27 @@ +--- +sidebar_position: 11 +--- + +# System Health + +## Legacy healthcheck + +There is a legacy `/health` endpoint used by NRK systems. Its use is being phased out and will eventually be replaced by the new prometheus endpoint. + +## Prometheus + +From version 1.49, there is a prometheus `/metrics` endpoint exposed from Sofie. The metrics exposed from here will increase over time as we find more data to collect. + +Because Sofie is comprised of multiple worker-threads, each metric has a `threadName` label indicitating which it is from. In many cases this field will not matter, but it is useful for the default process metrics, and if your installation has multiple studios defined. + +Each thread exposes some default nodejs process metrics. These are defined by the [`prom-client`](https://github.com/siimon/prom-client#default-metrics) library we are using, and are best described there. + +The current Sofie metrics exposed are: + +| name | type | description | +| ------------------------------------------ | ------- | ------------------------------------------------------------------ | +| sofie_meteor_ddp_connections_total | Gauge | Number of open ddp connections | +| sofie_meteor_publication_subscribers_total | Gauge | Number of subscribers on a Meteor publication (ignoring arguments) | +| sofie_meteor_jobqueue_queue_total | Counter | Number of jobs put into each worker job queues | +| sofie_meteor_jobqueue_success | Counter | Number of successful jobs from each worker | +| sofie_meteor_jobqueue_queue_errors | Counter | Number of failed jobs from each worker | diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/further-reading.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/further-reading.md new file mode 100644 index 0000000000..caea59f4f2 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/further-reading.md @@ -0,0 +1,59 @@ +--- +description: This guide has a lot of links. Here they are all listed by section. +--- + +# Further Reading + +## Getting Started + +- [Sofie's Concepts & Architecture](concepts-and-architecture.md) +- [Gateways](concepts-and-architecture.md#gateways) +- [Blueprints](concepts-and-architecture.md#blueprints) + +- Ask questions in the [Sofie Slack Channel](https://join.slack.com/t/sofietv/shared_invite/enQtNTk2Mzc3MTQ1NzAzLTJkZjMyMDg3OGM0YWU3MmU4YzBhZDAyZWI1YmJmNmRiYWQ1OTZjYTkzOTkzMTA2YTE1YjgxMmVkM2U1OGZlNWI) + +## Installation & Setup + +### Installing Sofie Core + +- [Windows install for Docker](https://hub.docker.com/editions/community/docker-ce-desktop-windows) +- [Linux install instructions for Docker](https://docs.docker.com/install/linux/docker-ce/ubuntu/) +- [Linux install instructions for Docker Compose](https://www.digitalocean.com/community/tutorials/how-to-install-docker-compose-on-ubuntu-18-04) +- [Sofie Core Docker File Download](https://firebasestorage.googleapis.com/v0/b/gitbook-28427.appspot.com/o/assets%2F-LWRCgfY_-kYo9iX6UNy%2F-Lo5eWjgoVlRRDeFzLuO%2F-Lo5fLSSyM1eO6OXScew%2Fdocker-compose.yaml?alt=media&token=fc2fbe79-365c-4817-b270-e507c6a6e3c6) + +### Installing a Gateway + +#### Ingest Gateways and NRCS + +- [MOS Protocol Overview & Documentation](http://mosprotocol.com/) +- Information about ENPS on [The Associated Press' Website](https://www.ap.org/enps/support) +- Information about iNews: [Avid's Website](https://www.avid.com/products/inews/how-to-buy) + +**Google Spreadsheet Gateway** + +- [Demo Blueprints](https://github.com/SuperFlyTV/sofie-demo-blueprints/releases) on GitHub's website. +- [Example Rundown](https://docs.google.com/spreadsheets/d/1iyegRv5MxYYtlVu8uEEMkBYXsLL-71PAMrNW0ZfWRUw/edit?usp=sharing) provided by Sofie. +- [Google Sheets API](https://console.developers.google.com/apis/library/sheets.googleapis.com?) on the Google Developer website. + +### Additional Software & Hardware + +#### Installing CasparCG Server for Sofie + +- NRK's version of [CasparCG Server](https://github.com/nrkno/sofie-casparcg-server/releases) on GitHub. +- [Media Scanner](https://github.com/nrkno/sofie-casparcg-launcher/releases) on GitHub. +- [CasparCG Launcher](https://github.com/nrkno/sofie-casparcg-launcher) on GitHub. +- [Microsoft Visual C++ 2015 Redistributable](https://www.microsoft.com/en-us/download/details.aspx?id=52685) on Microsoft's website. +- [Blackmagic Design's DeckLink Cards](https://www.blackmagicdesign.com/products/decklink/models) on Blackmagic Design's website. Check the [DeckLink cards](installation/installing-connections-and-additional-hardware/casparcg-server-installation.md#decklink-cards) section for compatibility. +- [Installing a DeckLink Card](https://documents.blackmagicdesign.com/UserManuals/DesktopVideoManual.pdf) as a PDF. +- [Blackmagic Design 'Desktop Video' Driver Download](https://www.blackmagicdesign.com/support/family/capture-and-playback) on Blackmagic Design's website. +- [CasparCG Server Configuration Validator](https://casparcg.net/validator/) + +**Additional Resources** + +- Viz graphics through MSE, info on the [Vizrt](https://www.vizrt.com/) website. +- Information about the [Blackmagic Design's HyperDeck](https://www.blackmagicdesign.com/products/hyperdeckstudio) + +## FAQ, Progress, and Issues + +- [MIT Licence](https://opensource.org/licenses/MIT) +- [Releases and Issues on GitHub](https://github.com/nrkno/Sofie-TV-automation/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3ARelease) diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/_category_.json b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/_category_.json new file mode 100644 index 0000000000..2f3c7f2a9f --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Installation", + "position": 3 +} \ No newline at end of file diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/initial-sofie-core-setup.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/initial-sofie-core-setup.md new file mode 100644 index 0000000000..c0672b3e55 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/initial-sofie-core-setup.md @@ -0,0 +1,23 @@ +--- +sidebar_position: 3 +--- + +# Initial Sofie Core Setup + +#### Prerequisites + +* [Installed and running _Sofie Core_](installing-sofie-server-core.md) + +Once _Sofie Core_ has been installed and is running you can begin setting it up. The first step is to navigate to the _Settings page_. Please review the [Sofie Access Level](../features/access-levels.md) page for assistance getting there. + +To upgrade to a newer version or installation of new blueprints, Sofie needs to run its "Upgrade database" procedure to migrate data and pre-fill various settings. You can do this by clicking the _Upgrade Database_ button in the menu. + +![Update Database Section of the Settings Page](/img/docs/getting-started/settings-page-full-update-db-r47.png) + +Fill in the form as prompted and continue by clicking _Run Migrations Procedure_. Sometimes you will need to go through multiple steps before the upgrade is finished. + +Next, you will need to add some [Blueprints](installing-blueprints.md) and add [Gateways](installing-a-gateway/intro.md) to allow _Sofie_ to interpret rundown data and then play out things. + +![Initial Studio Settings Page](/img/docs/getting-started/settings-page-initial-studio.png) + +Next, you will need to add some [Blueprints](installing-blueprints) and add [Gateways](installing-a-gateway/intro) to allow _Sofie_ to interpret rundown data and then play out things. diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/_category_.json b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/_category_.json new file mode 100644 index 0000000000..7fa55d484d --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Installing a Gateway", + "position": 5 +} \ No newline at end of file diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/intro.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/intro.md new file mode 100644 index 0000000000..03bc8a5339 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/intro.md @@ -0,0 +1,25 @@ +--- +sidebar_label: Introduction +sidebar_position: 1 +--- +# Introduction: Installing a Gateway + +#### Prerequisites + +* [Installed and running Sofie Core](../installing-sofie-server-core.md) + +The _Sofie Core_ is the primary application for managing the broadcast, but it doesn't play anything out on it's own. A Gateway will establish the connection from _Sofie Core_ to other pieces of hardware or remote software. A basic setup may include the [Spreadsheet Gateway](rundown-or-newsroom-system-connection/installing-sofie-with-google-spreadsheet-support.md) which will ingest a rundown from Google Sheets then, use the [Playout Gateway](playout-gateway.md) send commands to a CasparCG Server graphics playout, an ATEM vision mixer, and / or the [Sisyfos audio controller](https://github.com/olzzon/sisyfos-audio-controller). + +Installing a gateway is a two part process. To begin, you will [add the required Blueprints](../installing-blueprints.md), or mini plug-in programs, to _Sofie Core_ so it can manipulate the data from the Gateway. Then you will install the Gateway itself. Each Gateway follows a similar installation pattern but, each one does differ slightly. The links below will help you navigate to the correct Gateway for the piece of hardware / software you are using. + +### Rundown & Newsroom Gateways + +* [Google Spreadsheet Gateway](rundown-or-newsroom-system-connection/installing-sofie-with-google-spreadsheet-support.md) +* [iNEWS Gateway](rundown-or-newsroom-system-connection/inews-gateway.md) +* [MOS Gateway](rundown-or-newsroom-system-connection/mos-gateway.md) + +### Playout & Media Manager Gateways + +* [Playout Gateway](playout-gateway.md) +* [Media Manager](../media-manager.md) + diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/playout-gateway.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/playout-gateway.md new file mode 100644 index 0000000000..0fd5f47626 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/playout-gateway.md @@ -0,0 +1,6 @@ +--- +sidebar_position: 3 +--- +# Playout Gateway + +The _Playout Gateway_ handles interacting external pieces of hardware or software by sending commands that will playout rundown content. This gateway used to be a separate installation but it has since been moved into the main _Sofie Core_ component. diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/_category_.json b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/_category_.json new file mode 100644 index 0000000000..b4c4ffc34d --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Rundown or Newsroom System Connection", + "position": 4 +} \ No newline at end of file diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/inews-gateway.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/inews-gateway.md new file mode 100644 index 0000000000..48659251a6 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/inews-gateway.md @@ -0,0 +1,12 @@ +# iNEWS Gateway + +The iNEWS Gateway communicates with an iNEWS system to ingest and remain in sync with a rundown. + +### Installing iNEWS for Sofie + +The iNEWS Gateway allows you to create rundowns from within iNEWS and sync them with the _Sofie Core_. The rundowns will update in real time and any changes made will be seen from within your Playout Timeline. + +The setup for the iNEWS Gateway is already in the Docker Compose file you downloaded earlier. Remove the _\#_ symbol from the start of the line labeled `image: tv2/inews-ftp-gateway:develop` and add a _\#_ to the other ingest gateway that was being used. + +Although the iNEWS Gateway is available free of charge, an iNEWS license is not. Visit [Avid's website](https://www.avid.com/products/inews/how-to-buy) to find an iNEWS reseller that handles your geographic area. + diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/installing-sofie-with-google-spreadsheet-support.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/installing-sofie-with-google-spreadsheet-support.md new file mode 100644 index 0000000000..8cdd2ed637 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/installing-sofie-with-google-spreadsheet-support.md @@ -0,0 +1,46 @@ +# Google Spreadsheet Gateway + +The Spreadsheet Gateway is an application for piping data between Sofie Core and Spreadsheets on Google Drive. + +### Example Blueprints for Spreadsheet Gateway + +To begin with, you will need to install a set of Blueprints that can handle the data being sent from the _Gateway_ to _Sofie Core_. Download the `demo-blueprints-r*.zip` file containing the blueprints you need from the [Demo Blueprints GitHub Repository](https://github.com/SuperFlyTV/sofie-demo-blueprints/releases). It is recommended to choose the newest release but, an older _Sofie Core_ version may require a different Blueprint version. The _Rundown page_ will warn you about any issue and display the desired versions. + +Instructions on how to install any Blueprint can be found in the [Installing Blueprints](../../installing-blueprints.md) section from earlier. + +### Spreadsheet Gateway Configuration + +If you are using the Docker version of Sofie, then the Spreadsheet Gateway will come preinstalled. For those who are not, please follow the [instructions listed on the GitHub page](https://github.com/SuperFlyTV/spreadsheet-gateway) labeled _Installation \(for developers\)._ + +Once the Gateway has been installed, you can navigate to the _Settings page_ and check the newly added Gateway is listed as _Spreadsheet Gateway_ under the _Devices section_. + +Before you select the Device, you want to add it to the current _Studio_ you are using. Select your current Studio from the menu and navigate to the _Attached Devices_ option. Click the _+_ icon and select the Spreadsheet Gateway. + +Now you can select the _Device_ from the _Devices menu_ and click the link provided to enable your Google Drive API to send files to the _Sofie Core_. The page that opens will look similar to the image below. + +![Nodejs Quickstart page](/img/docs/installation/installing-a-gateway/rundown-or-newsroom-system-connection/nodejs-quickstart.png) +xx +Make sure to follow the steps in **Create a project and enable the API** and enable the **Google Drive API** as well as the **Google Sheets API**. Your "APIs and services" Dashboard should now look as follows: + +![APIs and Services Dashboard](/img/docs/installation/installing-a-gateway/rundown-or-newsroom-system-connection/apis-and-services-dashboard.png) + +Now follow the steps in **Create credentials** and make sure to create an **OAuth Client ID** for a **Desktop App** and download the credentials file. + +![Create Credentials page](/img/docs/installation/installing-a-gateway/rundown-or-newsroom-system-connection/create-credentials.png) + +Use the button to download the configuration to a file and navigate back to _Sofie Core's Settings page_. Select the Spreadsheet Gateway, then click the _Browse_ button and upload the configuration file you just downloaded. A new link will appear to confirm access to your google drive account. Select the link and in the new window, select the Google account you would like to use. Currently, the Sofie Core Application is not verified with Google so you will need to acknowledge this and proceed passed the unverified page. Click the _Advanced_ button and then click _Go to QuickStart \( Unsafe \)_. + +After navigating through the prompts you are presented with your verification code. Copy this code into the input field on the _Settings page_ and the field should be removed. A message confirming the access token was saved will appear. + +You can now navigate to your Google Drive account and create a new folder for your rundowns. It is important that this folder has a unique name. Next, navigate back to _Sofie Core's Settings page_ and add the folder name to the appropriate input. + +The indicator should now read _Good, Watching folder 'Folder Name Here'_. Now you just need an example rundown.[ Navigate to this Google Sheets file](https://docs.google.com/spreadsheets/d/1iyegRv5MxYYtlVu8uEEMkBYXsLL-71PAMrNW0ZfWRUw/edit?usp=sharing) and select the _File_ menu and then select _Make a copy_. In the popup window, select _My Drive_ and then navigate to and select the rundowns folder you created earlier. + +At this point, one of two things will happen. If you have the Google Sheets API enabled, this is different from the Google Drive API you enabled earlier, then the Rundown you just copied will appear in the Rundown page and is accessible. The other outcome is the Spreadsheet Gateway status reads _Unknown, Initializing..._ which most likely means you need to enable the Google Sheets API. Navigate to the[ Google Sheets API Dashboard with this link](https://console.developers.google.com/apis/library/sheets.googleapis.com?) and click the _Enable_ button. Navigate back to _Sofie's Settings page_ and restart the Spreadsheet Gateway. The status should now read, _Good, Watching folder 'Folder Name Here'_ and the rundown will appear in the _Rundown page_. + +### Further Reading + +- [Demo Blueprints](https://github.com/SuperFlyTV/sofie-demo-blueprints/) GitHub Page for Developers +- [Example Rundown](https://docs.google.com/spreadsheets/d/1iyegRv5MxYYtlVu8uEEMkBYXsLL-71PAMrNW0ZfWRUw/edit?usp=sharing) provided by Sofie. +- [Google Sheets API](https://console.developers.google.com/apis/library/sheets.googleapis.com?) on the Google Developer website. +- [Spreadsheet Gateway](https://github.com/SuperFlyTV/spreadsheet-gateway) GitHub Page for Developers diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/intro.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/intro.md new file mode 100644 index 0000000000..cf2c62a508 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/intro.md @@ -0,0 +1,14 @@ +# Rundown & Newsroom Systems + +Sofie Core doesn't talk directly to the newsroom systems, but instead via one of the Gateways. + +The Google Spreadsheet Gateway, iNEWS Gateway, and the MOS \( [Media Object Server Communications Protocol](http://mosprotocol.com/) \) Gateway which can handle interacting with any system that communicates via MOS. + +### Further Reading + +* [MOS Protocol Overview & Documentation](http://mosprotocol.com/) +* [iNEWS on Avid's Website](https://www.avid.com/products/inews/how-to-buy) +* [ENPS on The Associated Press' Website](https://www.ap.org/enps/support) + + + diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/mos-gateway.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/mos-gateway.md new file mode 100644 index 0000000000..8a2a60145c --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-a-gateway/rundown-or-newsroom-system-connection/mos-gateway.md @@ -0,0 +1,9 @@ +# MOS Gateway + +The MOS Gateway communicates with a device that supports the [MOS protocol](http://mosprotocol.com/wp-content/MOS-Protocol-Documents/MOS-Protocol-2.8.4-Current.htm) to ingest and remain in sync with a rundown. It can connect to any editorial system \(NRCS\) that uses version 2.8.4 of the MOS protocol, such as ENPS, and sync their rundowns with the _Sofie Core_. The rundowns are kept updated in real time and any changes made will be seen in the Sofie GUI. + +The setup for the MOS Gateway is handled in the Docker Compose in the [Quick Install](../../installing-sofie-server-core.md) page. + +One thing to note if managing the mos-gateway manually: It needs a few ports open \(10540, 10541\) for MOS-messages to be pushed to it from the NCS. + + diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-blueprints.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-blueprints.md new file mode 100644 index 0000000000..34796bbb1d --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-blueprints.md @@ -0,0 +1,46 @@ +--- +sidebar_position: 4 +--- + +# Installing Blueprints + +#### Prerequisites + +- [Installed and running Sofie Core](installing-sofie-server-core.md) +- [Initial Sofie Core Setup](initial-sofie-core-setup.md) + +Blueprints are little plug-in programs that runs inside _Sofie_. They are the logic that determines how _Sofie_ interacts with rundowns, hardware, and media. + +Blueprints are custom scripts that you create yourself \(or download an existing one\). There are a set of example Blueprints for the Spreadsheet Gateway available for use here: [https://github.com/SuperFlyTV/sofie-demo-blueprints](https://github.com/SuperFlyTV/sofie-demo-blueprints). + +To begin installing any Blueprint, navigate to the _Settings page_. Getting there is covered in the [Access Levels](../features/access-levels.md) page. + +![The Settings Page](/img/docs/getting-started/settings-page.jpg) + +To upload a new blueprint, click the _+_ icon next to Blueprints menu option. Select the newly created Blueprint and upload the local blueprint JS file. You will get a confirmation if the installation was successful. + +There are 3 types of blueprints: System, Studio and Show Style: + +### System Blueprint + +_System Blueprints handles some basic functionality on how the Sofie system will operate._ + +After you've uploaded the your system-blueprint js-file, click _Assign_ in the blueprint-page to assign it as system-blueprint. + +### Studio Blueprint + +_Studio Blueprints determine how Sofie will interact with the hardware in your studio._ + +After you've uploaded the your studio-blueprint js-file, navigate to a Studio in the settings and assign the new Blueprint to it \(under the label _Blueprint_ \). + +After having installed the Blueprint, the Studio's baseline will need to be reloaded. On the Studio page, click the button _Reload Baseline_. This will also be needed whenever you have changed any settings. + +### Show Style Blueprint + +_Show Style Blueprints determine how your show will look / feel._ + +After you've uploaded the your show-style-blueprint js-file, navigate to a Show Style in the settings and assign the new Blueprint to it \(under the label _Blueprint_ \). + +### Further Reading + +- [Blueprints Supporting the Spreadsheet Gateway](https://github.com/SuperFlyTV/sofie-demo-blueprints) diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/README.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/README.md new file mode 100644 index 0000000000..4d35fb277d --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/README.md @@ -0,0 +1,35 @@ +# Additional Software & Hardware + +#### Prerequisites + +* [Installed and running Sofie Core](../installing-sofie-server-core.md) +* [Installed Playout Gateway](../installing-a-gateway/playout-gateway.md) +* [Installed and configured Studio Blueprints](../installing-blueprints.md#installing-a-studio-blueprint) + +The following pages are broken up by equipment type that is supported by Sofie's Gateways. + +## Playout & Recording +* [CasparCG Graphics and Video Server](casparcg-server-installation.md) - _Graphics / Playout / Recording_ +* [Blackmagic Design's HyperDeck](https://www.blackmagicdesign.com/products/hyperdeckstudio) - _Recording_ +* [Quantel](http://www.quantel.com) Solutions - _Playout_ +* [Vizrt](https://www.vizrt.com/) Graphics Solutions - _Graphics / Playout_ + +## Vision Mixers +* [Blackmagic's ATEM](https://www.blackmagicdesign.com/products/atem) hardware vision mixers +* [vMix](https://www.vmix.com/) software vision mixer \(coming soon\) + +## Audio Mixers +* [Sisyfos](https://github.com/olzzon/sisyfos-audio-controller) audio controller +* [Lawo sound mixers_,_](https://www.lawo.com/applications/broadcast-production/audio-consoles.html) _using emberplus protocol_ +* Generic OSC \(open sound control\) + +## PTZ Cameras +* [Panasonic PTZ](https://pro-av.panasonic.net/en/products/ptz_camera_systems.html) cameras + +## Lights +* [Pharos](https://www.pharoscontrols.com/) light control + +## Other +* Generic OSC \(open sound control\) +* Generic HTTP requests \(to control http-REST interfaces\) +* Generic TCP-socket diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/_category_.json b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/_category_.json new file mode 100644 index 0000000000..d3e1e8979e --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Installing Connections and Additional Hardware", + "position": 6 +} \ No newline at end of file diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/casparcg-server-installation.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/casparcg-server-installation.md new file mode 100644 index 0000000000..7ee0d5496d --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/casparcg-server-installation.md @@ -0,0 +1,233 @@ +--- +title: Installing CasparCG Server for Sofie +description: Sofie specific fork of CasparCG Server 2.1 +--- + +# Installing CasparCG Server for Sofie + +Although CasparCG Server is an open source program that is free to use for both personal and cooperate applications, the hardware needed to create and execute high quality graphics is not. You can get a preview running without any additional hardware but, it is not recommended to use CasparCG Server for production in this manner. To begin, you will install the CasparCG Server on your machine then add the additional configuration needed for your setup of choice. + +## Installing the CasparCG Server + +To begin, download the latest release of [CasparCG Server from GitHub](https://github.com/nrkno/sofie-casparcg-server/releases). There are multiple versions of CasparCG Server available to the public for download but, you specifically want the latest NRK version. + +Once downloaded, extract the files and navigate down the folders, _CasparCG Server_ then _Server_. This folder contains your CasparCG Server Configuration file, `casparcg.config`, and your CasparCG Server executable, `casparcg.exe`. + +How you will configure the CasparCG Server will depend on the number of DeckLink cards your machine contains. The first subsection for each CasparCG Server setup, labeled _Channels_, will contain the unique portion of the configuration. The following is the majority of the configuration file that will be consistent between setups. + +```markup + + + debug + + false + + + + + media/ + log/ + data/ + template/ + thumbnail/ + font/ + + secret + + + + + + 5250 + AMCP + + + 3250 + LOG + + + +``` + +One additional note, the Server does require the configuration file be named `casparcg.config`. + +### Installing CasparCG Media Scanner + +You can use the CasparCG Media Scanner to locate and add all of your media to the _Sofie Core_. To install the Media Scanner, you will go to the [project's Release page](https://github.com/nrkno/sofie-media-scanner/releases) and download the `.zip` file under the latest release. Similar to the CasparCG Server, you want to use the NRK version. + +Once downloaded and extracted, move the `scanner.exe` file to the same folder as your `casparcg.exe` file. + +### Installing the CasparCG Launcher + +You can launch both of your CasparCG applications with the[ CasparCG Launcher.](https://github.com/nrkno/sofie-casparcg-launcher) Download the `.exe` file in the latest release and once complete, move the file to the same folder as your `casparcg.exe` file. + +## Configuring Windows + +### Required Software + +Windows will require you install [Microsoft's Visual C++ 2015 Redistributable](https://www.microsoft.com/en-us/download/details.aspx?id=52685) to run the CasparCG Server properly. Before downloading the redistributable, please ensure it is not already installed on your system. Open your programs list and in the popup window, you can search for _C++_ in the search field. If _Visual C++ 2015_ appears, you do not need install the redistributable. + +If you need to install redistributable then, navigate to [Microsoft's website](https://www.microsoft.com/en-us/download/details.aspx?id=52685) and download it from there. Once downloaded, you can run the `.exe` file and follow the prompts. + +## Hardware Recommendations + +Although CasparCG Server can be run on some lower end hardware, it is only recommended to do so for non-production uses. Below is a table of the minimum and preferred specs depending on what type of system you are using. + +| System Type | Min CPU | Pref CPU | Min GPU | Pref GPU | Min Storage | Pref Storage | +| :------------ | :--------------- | :------------------------ | :------- | :----------- | :------------- | :------------- | +| Development | i5 Gen 6i7 Gen 6 | GTX 1050 | GTX 1060 | GTX 1060 | NVMe SSD 500gb | | +| Prod, 1 Card | i7 Gen 6 | i7 Gen 7 | GTX 1060 | GTX 1070 | NVMe SSD 500gb | NVMe SSD 500gb | +| Prod, 2 Cards | i9 Gen 8 | i9 Gen 10 Extreme Edition | RTX 2070 | Quadro P4000 | Dual Drives | Dual Drives | + +For _dual drives_, it is recommended to use a smaller 250gb NVMe SSD for the operating system. Then a faster 1tb NVMe SSD for the CasparCG Server and media. It is also recommended to buy a drive with about 40% storage overhead. This is for SSD p~~e~~rformance reasons and Sofie will warn you about this if your drive usage exceeds 60%. + +### DeckLink Cards + +There are a few SDI cards made by Blackmagic Design that are supported by CasparCG. The base model, with four bi-directional input and outputs, is the [Duo 2](https://www.blackmagicdesign.com/products/decklink/techspecs/W-DLK-31). If you need additional channels, use the[ Quad 4](https://www.blackmagicdesign.com/products/decklink/techspecs/W-DLK-30) which supports eight bi-directional inputs and outputs. Be aware the BNC connections are not the standard BNC type. B&H offers [Mini BNC to BNC connecters](https://www.bhphotovideo.com/c/product/1462647-REG/canare_cal33mb018_mini_rg59_12g_sdi_4k.html). Finally, for 4k support, use the [8K Pro](https://www.blackmagicdesign.com/products/decklink/techspecs/W-DLK-34) which has four bi-directional BNC connections and one reference connection. + +Here is the Blackmagic Design PDF for [installing your DeckLink card \( Desktop Video Device \).](https://documents.blackmagicdesign.com/UserManuals/DesktopVideoManual.pdf) + +Once the card in installed in your machine, you will need to download the controller from Blackmagic's website. Navigate to [this support page](https://www.blackmagicdesign.com/support/family/capture-and-playback), it will only display Desktop Video Support, and in the _Latest Downloads_ column download the most recent version of _Desktop Video_. Before installing, save your work because Blackmagic's installers will force you to restart your machine. + +Once booted back up, you should be able to launch the Desktop Video application and see your DeckLink card. + +![Blackmagic Design's Desktop Video Application](/img/docs/installation/installing-connections-and-additional-hardware/desktop-video.png) + +Click the icon in the center of the screen to open the setup window. Each production situation will very in frame rate and resolution so go through the settings and set what you know. Most things are set to standards based on your region so the default option will most likely be correct. + +![Desktop Video Settings](/img/docs/installation/installing-connections-and-additional-hardware/desktop-video-settings.png) + +If you chose a DeckLink Duo, then you will also need to set SDI connectors one and two to be your outputs. + +![DeckLink Duo SDI Output Settings](/img/docs/installation/installing-connections-and-additional-hardware/decklink_duo_card.png) + +## Hardware-specific Configurations + +### Preview Only \(Basic\) + +A preview only version of CasparCG Server does not lack any of the features of a production version. It is called a _preview only_ version because the standard outputs on a computer, without a DeckLink card, do not meet the requirements of a high quality broadcast graphics machine. It is perfectly suitable for development though. + +#### Required Hardware + +No additional hardware is required, just the computer you have been using to follow this guide. + +#### Configuration + +The default configuration will give you one preview window. No additional changes need to be made. + +### Single DeckLink Card \(Production Minimum\) + +#### Required Hardware + +To be production ready, you will need to output an SDI or HDMI signal from your production machine. CasparCG Server supports Blackmagic Design's DeckLink cards because they provide a key generator which will aid in keeping the alpha and fill channels of your graphics in sync. Please review the [DeckLink Cards](casparcg-server-installation.md#decklink-cards) section of this page to choose which card will best fit your production needs. + +#### Configuration + +You will need to add an additional consumer to your`caspar.config` file to output from your DeckLink card. After the screen consumer, add your new DeckLink consumer like so. + +```markup + + + 1080i5000 + stereo + + + 1 + true + + + + + 1 + 1 + true + stereo + normal + external_separate_device + false + 3 + + + + + +``` + +You may no longer need the screen consumer. If so, you can remove it and all of it's contents. This will dramatically improve overall performance. + +### Multiple DeckLink Cards \(Recommended Production Setup\) + +#### Required Hardware + +For a preferred production setup you want a minimum of two DeckLink Duo 2 cards. This is so you can use one card to preview your media, while your second card will support the program video and audio feeds. For CasparCG Server to recognize both cards, you need to add two additional channels to the `caspar.config` file. + +```markup + + + 1080i5000 + stereo + + + 1 + true + + + + + 1 + 1 + true + stereo + normal + external_separate_device + false + 3 + + + + + 2 + 2 + true + stereo + normal + external_separate_device + false + 3 + + + + + +``` + +### Validating the Configuration File + +Once you have setup the configuration file, you can use an online validator to check and make sure it is setup correctly. Navigate to the [CasparCG Server Config Validator](https://casparcg.net/validator/) and paste in your entire configuration file. If there are any errors, they will be displayed at the bottom of the page. + +### Launching the Server + +Launching the Server is the same for each hardware setup. This means you can run`casparcg-launcher.exe` and the server and media scanner will start. There will be two additional warning from Windows. The first is about the EXE file and can be bypassed by selecting _Advanced_ and then _Run Anyways_. The second menu will be about CasparCG Server attempting to access your firewall. You will need to allow access. + +A window will open and display the status for the server and scanner. You can start, stop, and/or restart the server from here if needed. An additional window should have opened as well. This is the main output of your CasparCG Server and will contain nothing but a black background for now. If you have a DeckLink card installed, its output will also be black. + +## Connecting Sofie to the CasparCG Server + +Now that your CasparCG Server software is running, you can connect it to the _Sofie Core_. Navigate back to the _Settings page_ and in the menu, select the _Playout Gateway_. If the _Playout Gateway's_ status does not read _Good_, then please review the [Installing and Setting up the Playout Gateway](../installing-a-gateway/playout-gateway.md) section of this guide. + +Under the Sub Devices section, you can add a new device with the _+_ button. Then select the pencil \( edit \) icon on the new device to open the sub device's settings. Select the _Device Type_ option and choose _CasparCG_ from the drop down menu. Some additional fields will be added to the form. + +The _Host_ and _Launcher Host_ fields will be _localhost_. The _Port_ will be CasparCG's TCP port responsible for handling the AMCP commands. It defaults to 5052 in the `casparcg.config` file. The _Launcher Port_ will be the CasparCG Launcher's port for handling HTTP requests. It will default to 8005 and can be changed in the _Launcher's settings page_. Once all four fields are filled out, you can click the check mark to save the device. + +In the _Attached Sub Devices_ section, you should now see the status of the CasparCG Server. You may need to restart the Playout Gateway if the status is _Bad_. + +## Further Reading + +- [CasparCG Server Releases](https://github.com/nrkno/sofie-casparcg-server/releases) on GitHub. +- [Media Scanner Releases](https://github.com/nrkno/sofie-media-scanner/releases) on GitHub. +- [CasparCG Launcher](https://github.com/nrkno/sofie-casparcg-launcher) on GitHub. +- [Microsoft Visual C++ 2015 Redistributable](https://www.microsoft.com/en-us/download/details.aspx?id=52685) on Microsoft's website. +- [Blackmagic Design's DeckLink Cards](https://www.blackmagicdesign.com/products/decklink/models) on Blackmagic's website. Check the [DeckLink cards](casparcg-server-installation.md#decklink-cards) section for compatibility. +- [Installing a DeckLink Card](https://documents.blackmagicdesign.com/UserManuals/DesktopVideoManual.pdf) as a PDF. +- [Desktop Video Download Page](https://www.blackmagicdesign.com/support/family/capture-and-playback) on Blackmagic's website. +- [CasparCG Configuration Validator](https://casparcg.net/validator/) diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/ffmpeg-installation.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/ffmpeg-installation.md new file mode 100644 index 0000000000..0843c76312 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/ffmpeg-installation.md @@ -0,0 +1,35 @@ +# Adding FFmpeg and FFprobe to your PATH on Windows + +Some parts of Sofie (specifically the Package Manager) require that [`FFmpeg`](https://www.ffmpeg.org/) and [`FFprobe`](https://ffmpeg.org/ffprobe.html) be available in your `PATH` environment variable. This guide will go over how to download these executables and add them to your `PATH`. + +### Installation + +1. `FFmpeg` and `FFprobe` can be downloaded from the [FFmpeg Downloads page](https://ffmpeg.org/download.html) under the "Get packages & executable files" heading. At the time of writing, there are two sources of Windows builds: `gyan.dev` and `BtbN` -- either one will work. +2. Once downloaded, extract the archive to some place permanent such as `C:\Program Files\FFmpeg`. + - You should end up with a `bin` folder inside of `C:\Program Files\FFmpeg` and in that `bin` folder should be three executables: `ffmpeg.exe`, `ffprobe.exe`, and `ffplay.exe`. +3. Open your Start Menu and type `path`. An option named "Edit the system environment variables" should come up. Click on that option to open the System Properties menu. + + ![Start Menu screenshot](/img/docs/edit_system_environment_variables.jpg) + +4. In the System Properties menu, click the "Environment Varibles..." button at the bottom of the "Advanced" tab. + + ![System Properties screenshot](/img/docs/system_properties.png) + +5. If you installed `FFmpeg` and `FFprobe` to a system-wide location such as `C:\Program Files\FFmpeg`, select and edit the `Path` variable under the "System variables" heading. Else, if you installed them to some place specific to your user account, edit the `Path` variable under the "User variables for \" heading. + + ![Environment Variables screenshot](/img/docs/environment_variables.png) + +6. In the window that pops up when you click "Edit...", click "New" and enter the path to the `bin` folder you extracted earlier. Then, click OK to add it. + + ![Edit environment variable screenshot](/img/docs/edit_path_environment_variable.png) + +7. Click "OK" to close the Environment Variables window, and then click "OK" again to close the + System Properties window. +8. Verify that it worked by opening a Command Prompt and executing the following commands: + + ```cmd + ffmpeg -version + ffprobe -version + ``` + + If you see version output from both of those commands, then you are all set! If not, double check the paths you entered and try restarting your computer. diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/vision-mixers.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/vision-mixers.md new file mode 100644 index 0000000000..1515b08840 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-connections-and-additional-hardware/vision-mixers.md @@ -0,0 +1,14 @@ +# Configuring Vision Mixers + +## ATEM – Blackmagic Design + +The [Playout Gateway](../installing-a-gateway/playout-gateway.md) supports communicating with the entire line up of Blackmagic Design's ATEM vision mixers. + +### Connecting Sofie + +Once your ATEM is properly configured on the network, you can add it as a device to the Sofie Core. To begin, navigate to the _Settings page_ and select the _Playout Gateway_ under _Devices_. Under the _Sub Devices_ section, you can add a new device with the _+_ button. Edit it the new device with the pencil \( edit \) icon add the host IP and port for your ATEM. Once complete, you should see your ATEM in the _Attached Sub Devices_ section with a _Good_ status indicator. + +### Additional Information + +Sofie does not support connecting to a vision mixer hardware panels. All interacts with the vision mixers must be handled within a Rundown. + diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-input-gateway.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-input-gateway.md new file mode 100644 index 0000000000..5d809d74af --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-input-gateway.md @@ -0,0 +1,34 @@ +# Input Gateway + +The Input Gateway handles control devices that are not capable of running a Web Browser. This allows Sofie to integrate directly with devices such as: Hardware Panels, GPI input, MIDI devices and external systems being able to send an HTTP Request. + +To install it, begin by downloading the latest release of [Input Gateway from GitHub](https://github.com/nrkno/sofie-input-gateway/releases). You can now run the `input-gateway.exe` file inside the extracted folder. A warning window may popup about the app being unrecognized. You can get around this by selecting _More Info_ and clicking _Run Anyways_. + +Much like [Package Manager](./installing-package-manager), the Sofie instance that Input Gateway needs to connect to is configured through command line arguments. A minimal configuration could look something like this. + +```bash +input-gateway.exe --host --port --https --id --token +``` + +If not connecting over HTTPS, remove the `--https` flag. + +Input Gateway can be launched from [CasparCG Launcher](./installing-connections-and-additional-hardware/casparcg-server-installation#installing-the-casparcg-launcher). This will make management and log collection easier on a production system. + +You can now open the _Sofie Core_, `http://localhost:3000`, and navigate to the _Settings page_. You will see your _Input Gateway_ under the _Devices_ section of the menu. In _Input Devices_ you can add devices that this instance of Input Gateway should handle. Some of the device integrations will allow you to customize the Feedback behavior. The *Device ID* property will identify a given Input Device in the Studio, so this property can be used for fail-over purposes. + +## Supported devices and protocols + +Currently, input gateway supports: + +* Stream Deck panels +* Skaarhoj panels - _TCP Raw Panel_ mode +* X-Keys panels +* MIDI controllers +* OSC +* HTTP + +### Further Reading + +* [Input Gateway Releases on GitHub](https://github.com/nrkno/sofie-input-gateway/releases) +* [Input Gateway GitHub Page for Developers](https://github.com/nrkno/sofie-input-gateway) + diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-package-manager.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-package-manager.md new file mode 100644 index 0000000000..bd6cbf3a15 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-package-manager.md @@ -0,0 +1,93 @@ +--- +sidebar_position: 7 +--- + +# Installing Package Manager + +### Prerequisites + +- [Installed and running Sofie Core](installing-sofie-server-core.md) +- [Initial Sofie Core Setup](initial-sofie-core-setup.md) +- [Installed and configured Demo Blueprints](https://github.com/SuperFlyTV/sofie-demo-blueprints) +- [Installed, configured, and running CasparCG Server](installing-connections-and-additional-hardware/casparcg-server-installation.md) +- [`FFmpeg` and `FFprobe` available in `PATH`](installing-connections-and-additional-hardware/ffmpeg-installation.md) + +Package Manager is used by Sofie to copy, analyze, and process media files. It is what powers Sofie's ability to copy media files to playout devices, to know when a media file is ready for playout, and to display details about media files in the rundown view such as scene changes, black frames, freeze frames, and more. + +Although Package Manager can be used to copy any kind of file to/from a wide array of devices, we'll be focusing on a basic CasparCG Server Server setup for this guide. + +:::caution + +At this time, the Package Manager worker process is Windows-only. Therefore, these instructions as a whole will only work on Windows. The worker will not work on WSL2. + +::: + +## Installation For Development (Quick Start) + +Package Manager is a suite of standalone applications, separate from _Sofie Core_. This guide assumes that Package Manager will be running on the same computer as _CasparCG Server_ and _Sofie Core_, as that is the fastest way to set up a demo. To get all parts of _Package Manager_ up and running quickly, execute these commands: + +```bash +git clone https://github.com/nrkno/sofie-package-manager.git +cd sofie-package-manager +yarn install +yarn build +yarn start:single-app -- -- --basePath "C:\your\path\to\casparcg-server\media-folder (i.e. sofie-demo-media)" +``` + +Note: if Powershell throws `Unknown argument: basePath` error, add one more pair of dashes (`--`) before the basePath argument: + +```bash +yarn start:single-app -- -- -- --basePath "C:\your\path\to\casparcg-server\media-folder (i.e. sofie-demo-media)" +``` + +On first startup, Package Manager will exit with the following message: + +``` +Not setup yet, exiting process! +To setup, go into Core and add this device to a Studio +``` + +This first run is necessary to get the Package Manager device registered with _Sofie Core_. We'll restart Package Manager later on in the [Configuration](#configuration) instructions. + + +## Installation In Production + +We provide pre-built executables for Windows (x64) systems that can be used in production environments. These can be found on the [Releases](https://github.com/nrkno/sofie-package-manager/releases) GitHub repository page for Package Manager. For a minimal installation, you'll need the `package-manager-single-app.exe` and `worker.exe`. Put them in a folder of your choice. You can also place `ffmpeg.exe` and `ffprobe.exe` alongside them, if you don't want to make them available in `PATH`. + +```bash +package-manager-single-app.exe --coreHost= --corePort= --deviceId= --deviceToken= +``` + +Package Manager can be launched from [CasparCG Launcher](./installing-connections-and-additional-hardware/casparcg-server-installation.md#installing-the-casparcg-launcher) alongside Caspar-CG. This will make management and log collection easier on a production Video Server. + +You can see a list of available options by running `package-manager-single-app.exe --help`. + +## Configuration + +1. Open the _Sofie Core_ Settings page ([http://localhost:3000/settings?admin=1](http://localhost:3000/settings?admin=1)), click on your Studio, and scroll down to the Attached Devices section. +1. Click the plus button (`+`) and select Package Manager to add the Package Manager device to your Studio. +1. On this same settings page, scroll down to the Package Manager section. +1. Click the plus button under the Package Containers heading, then click the edit icon (pencil) to the right of the newly-created package container. +1. Give this package container an ID of `casparcgContainer0` and a label of `CasparCG Package Container`. +1. Click on the dropdown under "Playout devices which use this package container" and select `casparcg0`. + - If you don't have a `casparcg0` device, add it to the Playout Gateway under the Devices heading, then restart the Playout Gateway. +1. Click the plus button under "Accessors", then click the edit icon to the right of the newly-created accessor. +1. Give this accessor an ID of `casparcgHttpProxy0`, a Label of `CasparCG HTTP Proxy Accessor`, an Accessor Type of `HTTP_PROXY`, and a Base URL of `http://localhost:8080/package`. Then, ensure that both the "Allow Read access" and "Allow Write access" boxes are checked. Finally, click the done button (checkmark icon) in the bottom right. +1. Scroll back up a bit to the "Studio Settings" subsection (still in the Package Manager section) and select "CasparCG Package Container" for both "Package Containers to use for previews" and "Package Containers to use for thumbnails". +1. Your settings should look like this once all the above steps have been completed: + ![Package Manager demo settings](/img/docs/Package_Manager_demo_settings.png) +1. If Package Manager `start:single-app` is running, restart it. If not, start it (see the above [Installation instructions](#installation-quick-start) for the relevant command line). + +## Usage + +In this basic configuration, Package Manager won't be copying any packages into your CasparCG Server media folder. Instead, it will simply check that the files in the rundown are present in your CasparCG Server media folder, and you'll have to manually place those files in the correct directory. However, thumbnail and preview generation will still function, as will status reporting. + +If you're using the demo rundown provided by the [Rundown Editor](rundown-editor.md), you should already see work statuses on the Package Status page ([Status > Packages](http://localhost:3000/status/expected-packages)). + +![Example Package Manager status display](/img/docs/Package_Manager_status_example.jpg) + +If all is good, head to the [Rundowns page](http://localhost:3000/rundowns) and open the demo rundown. + +### Further Reading + +- [Package Manager](https://github.com/nrkno/sofie-package-manager) on GitHub. diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-sofie-server-core.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-sofie-server-core.md new file mode 100644 index 0000000000..2a07e4c960 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/installing-sofie-server-core.md @@ -0,0 +1,139 @@ +--- +sidebar_position: 2 +--- + +# Quick install + +## Installing for testing \(or production\) + +### **Prerequisites** + +**\(Linux\)** Install [Docker](https://docs.docker.com/install/linux/docker-ce/ubuntu/) and [docker-compose](https://www.digitalocean.com/community/tutorials/how-to-install-docker-compose-on-ubuntu-18-04). +**\(Windows\)** Install [Docker for Windows](https://hub.docker.com/editions/community/docker-ce-desktop-windows). + +### Installation + +This docker-compose file automates the basic setup of the [Sofie-Core application](../../for-developers/libraries.md#main-application), the backend database and different Gateway options. + +```yaml +# This is NOT recommended to be used for a production deployment. +# It aims to quickly get an evaluation version of Sofie running and serve as a basis for how to set up a production deployment. +version: '3.3' +services: + db: + hostname: mongo + image: mongo:4.2.18 + restart: always + entrypoint: ['/usr/bin/mongod', '--replSet', 'rs0', '--bind_ip_all'] + # the healthcheck avoids the need to initiate the replica set + healthcheck: + test: test $$(echo "rs.initiate().ok || rs.status().ok" | mongo --quiet) -eq 1 + interval: 10s + start_period: 30s + ports: + - '27017:27017' + volumes: + - db-data:/data/db + networks: + - sofie + + core: + hostname: core + image: sofietv/tv-automation-server-core:release37 + restart: always + ports: + - '3000:3000' # Same port as meteor uses by default + environment: + PORT: '3000' + MONGO_URL: 'mongodb://db:27017/meteor' + MONGO_OPLOG_URL: 'mongodb://db:27017/local' + ROOT_URL: 'http://localhost:3000' + SOFIE_STORE_PATH: '/mnt/sofie-store' + networks: + - sofie + volumes: + - sofie-store:/mnt/sofie-store + depends_on: + - db + + playout-gateway: + image: sofietv/tv-automation-playout-gateway:release37 + restart: always + command: yarn start -host core -port 3000 -id playoutGateway0 + networks: + - sofie + - lan_access + depends_on: + - core + + # Choose one of the following images, depending on which type of ingest gateway is wanted. + # If using the Rundown Editor, then none of the below images are needed. + # The Rundown Editor can be found here: https://github.com/SuperFlyTV/sofie-automation-rundown-editor + + # spreadsheet-gateway: + # image: superflytv/sofie-spreadsheet-gateway:latest + # restart: always + # command: yarn start -host core -port 3000 -id spreadsheetGateway0 + # networks: + # - sofie + # depends_on: + # - core + + # mos-gateway: + # image: sofietv/tv-automation-mos-gateway:release37 + # restart: always + # ports: + # - "10540:10540" # MOS Lower port + # - "10541:10541" # MOS Upper port + # # - "10542:10542" # MOS query port - not used + # command: yarn start -host core -port 3000 -id mosGateway0 + # networks: + # - sofie + # depends_on: + # - core + + # inews-gateway: + # image: tv2media/inews-ftp-gateway:1.37.0-in-testing.20 + # restart: always + # command: yarn start -host core -port 3000 -id inewsGateway0 + # networks: + # - sofie + # depends_on: + # - core + +networks: + sofie: + lan_access: + driver: bridge + +volumes: + db-data: + sofie-store: +``` + +Create a `Sofie` folder, copy the above content, and save it as `docker-compose.yaml` within the `Sofie` folder. + +Navigate to the _ingest-gateway_ section of `docker-compose.yaml` and select which type of _ingest-gateway_ you'd like installed by uncommenting it. Save your changes. If you are using the [Rundown Editor](rundown-editor.md), then no ingest gateways need to be uncommented. + +Then open a terminal, `cd your-sofie-folder` and `sudo docker-compose up` \(just `docker-compose up` on Windows\). + +Once the installation is done, Sofie should be running on [http://localhost:3000](http://localhost:3000) + +Next, you will need to install a Rundown Gateway. Visit [Rundowns & Newsroom Systems](installing-a-gateway/rundown-or-newsroom-system-connection/intro.md) to see which _Rundown Gateway_ is best suited for _your_ production environment. + +### Tips for running in production + +There are some things not covered in this guide needed to run _Sofie_ in a production environment: + +- Logging: Collect, store and track error messages. [Kibana](https://www.elastic.co/kibana) and [logstash](https://www.elastic.co/logstash) is one way to do it. +- NGINX: It is customary to put a load-balancer in front of _Sofie Core_. +- Memory and CPU usage monitoring. + +## Installing for Development + +Installation instructions for installing Sofie-Core or the various gateways are available in the README file in their respective github repos. + +Common prerequisites are [Node.js](https://nodejs.org/) and [Yarn](https://yarnpkg.com/). +Links to the repos are listed at [Applications & Libraries](../../for-developers/libraries.md). + +[_Sofie Core_ GitHub Page for Developers](https://github.com/nrkno/sofie-core) diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/intro.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/intro.md new file mode 100644 index 0000000000..c3a14c218b --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/intro.md @@ -0,0 +1,37 @@ +--- +sidebar_position: 1 +--- +# Getting Started + +_Sofie_ can be installed in many different ways, depending on which platforms, needs, and features you desire. The _Sofie_ system consists of several applications that work together to provide complete broadcast automation system. Each of these components' installation will be covered in this guide. Additional information about the products or services mentioned alongside the Sofie Installation can be found on the [Further Reading](../further-reading.md). + +There are four minimum required components to get a Sofie system up and running. First you need the [_Sofie Core_](installing-sofie-server-core.md), which is the brains of the operation. Then a set of [_Blueprints_](installing-blueprints.md) to handle and interpret incoming and outgoing data. Next, an [_Ingest Gateway_](installing-a-gateway/rundown-or-newsroom-system-connection/intro.md) to fetch the data for the Blueprints. Then finally, a [_Playout Gateway_](installing-a-gateway/playout-gateway.md) to send the data to your playout device of choice. + + + +## Sofie Core View + +The _Rundowns_ view will display all the active rundowns that the _Sofie Core_ has access to. + +![Rundown View](/img/docs/getting-started/rundowns-in-sofie.png) + +The _Status_ views displays the current status for the attached devices and gateways. + +![Status View – Describes the state of _Sofie Core_](/img/docs/getting-started/status-page.jpg) + +The _Settings_ views contains various settings for the studio, show styles, blueprints etc.. If the link to the settings view is not visible in your application, check your [Access Levels](../features/access-levels.md). More info on specific parts of the _Settings_ view can be found in their corresponding guide sections. + +![Settings View – Describes how the _Sofie Core_ is configured](/img/docs/getting-started/settings-page.jpg) + +## Sofie Core Overview + +The _Sofie Core_ is the primary application for managing the broadcast but, it doesn't play anything out on it's own. You need to use Gateways to establish the connection from the _Sofie Core_ to other pieces of hardware or remote software. + +### Gateways + +Gateways are separate applications that bridge the gap between the _Sofie Core_ and other pieces of hardware or services. At minimum, you will need a _Playout Gateway_ so your timeline can interact with your playout system of choice. To install the _Playout Gateway_, visit the [Installing a Gateway](installing-a-gateway/intro.md) section of this guide and for a more in-depth look, please see [Gateways](../concepts-and-architecture.md#gateways). + +### Blueprints + +Blueprints can be described as the logic that determines how a studio and show should interact with one another. They interpret the data coming in from the rundowns and transform them into a rich set of playable elements \(_Segments_, _Parts_, _AdLibs,_ etcetera\). The _Sofie Core_ has three main blueprint types, _System Blueprints_, _Studio Blueprints_, and _Showstyle Blueprints_. Installing _Sofie_ does not require you understand what these blueprints do, just that they are required for the _Sofie Core_ to work. If you would like to gain a deeper understand of how _Blueprints_ work, please visit the [Blueprints](#blueprints) section. + diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/media-manager.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/media-manager.md new file mode 100644 index 0000000000..c286df52c3 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/media-manager.md @@ -0,0 +1,21 @@ +--- +sidebar_position: 100 +--- + +# Media Manager + +:::caution + +Media Manager is deprecated and is not recommended for new deployments. There are known issues that won't be fixed and the API's it is using to interface with Sofie will be removed. + +::: + +The Media Manager handles the media, or files, that make up the rundown content. To install it, begin by downloading the latest release of [Media Manager from GitHub](https://github.com/nrkno/sofie-media-management/releases). You can now run the `media-manager.exe` file inside the extracted folder. A warning window may popup about the app being unrecognized. You can get around this by selecting _More Info_ and clicking _Run Anyways_. A terminal window will open and begin running the application. + +You can now open the _Sofie Core_, `http://localhost:3000`, and navigate to the _Settings page_. You will see your _Media Manager_ under the _Devices_ section of the menu. The four main sections, general properties, attached storage, media flows, monitors, as well as attached subdivides, all contribute to how the media is handled within the Sofie Core. + +### Further Reading + +* [Media Manager Releases on GitHub](https://github.com/nrkno/sofie-media-management/releases) +* [Media Manager GitHub Page for Developers](https://github.com/nrkno/sofie-media-management) + diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/rundown-editor.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/rundown-editor.md new file mode 100644 index 0000000000..b46e173d08 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/installation/rundown-editor.md @@ -0,0 +1,11 @@ +--- +sidebar_position: 8 +--- + +# Sofie Rundown Editor + +Sofie Rundown Editor is a tool for creating and editing rundowns in a _demo_ environment of Sofie, without the use of an iNews, Spreadsheet or MOS Gateway + +### Installing Sofie Rundown Editor + +Check the installation instructions on the [project repository](https://github.com/SuperFlyTV/sofie-automation-rundown-editor). diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/intro.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/intro.md new file mode 100644 index 0000000000..4bf6b039a9 --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/intro.md @@ -0,0 +1,41 @@ +--- +sidebar_label: Introduction +sidebar_position: 0 +--- + +# Sofie User Guide + +## Key Features + +### Web-based GUI + +![Producer's / Director's View](/img/docs/Sofie_GUI_example.jpg) + +![Warnings and notifications are displayed to the user in the GUI](/img/docs/warnings-and-notifications.png) + +![The Host view, displaying time information and countdowns](/img/docs/host-view.png) + +![The prompter view](/img/docs/prompter-view.png) + +:::info +Tip: The different web views \(such as the host view and the prompter\) can easily be transmitted over an SDI signal using the HTML producer in [CasparCG](installation/installing-connections-and-additional-hardware/casparcg-server-installation.md). +::: + +### Modular Device Control + +Sofie controls playout devices \(such as vision and audio mixers, graphics and video playback\) via the Playout Gateway, using the [Timeline](concepts-and-architecture.md#timeline). +The Playout Gateway controls the devices and keeps track of their state and statuses, and lets the user know via the GUI if something's wrong that can affect the show. + +### _State-based Playout_ + +Sofie is using a state-based architecture to control playout. This means that each element in the show can be programmed independently - there's no need to take into account what has happened previously in the show; Sofie will make sure that the video is loaded and that the audio fader is tuned to the correct position, no matter what was played out previously. +This allows the producer to skip ahead or move backwards in a show, without the fear of things going wrong on air. + +### Modular Data Ingest + +Sofie features a modular ingest data-flow, allowing multiple types of input data to base rundowns on. Currently there is support for [MOS-based](http://mosprotocol.com) systems such as ENPS and iNEWS, as well as [Google Spreadsheets](installation/installing-a-gateway/rundown-or-newsroom-system-connection/installing-sofie-with-google-spreadsheet-support), and more is in development. + +### Blueprints + +The [Blueprints](concepts-and-architecture.md#blueprints) are plugins to _Sofie_, which allows for customization and tailor-made show designs. +The blueprints are made different depending on how the input data \(rundowns\) look like, how the show-design look like, and what devices to control. diff --git a/packages/documentation/versioned_docs/version-1.50.0/user-guide/supported-devices.md b/packages/documentation/versioned_docs/version-1.50.0/user-guide/supported-devices.md new file mode 100644 index 0000000000..55f27cd5ab --- /dev/null +++ b/packages/documentation/versioned_docs/version-1.50.0/user-guide/supported-devices.md @@ -0,0 +1,118 @@ +--- +sidebar_position: 1.5 +--- +# Supported Playout Devices + +All playout devices are essentially driven through the _timeline_, which passes through _Sofie Core_ into the Playout Gateway where it is processed by the timeline-state-resolver. This page details which devices and what parts of the devices can be controlled through the timeline-state-resolver library. In general a blueprints developer can use the [timeline-state-resolver-types package](https://www.npmjs.com/package/timeline-state-resolver-types) to see the interfaces for the timeline objects used to control the devices. + +## Blackmagic Design's ATEM Vision Mixers + +We support almost all features of these devices except fairlight audio, camera controls and streaming capabilities. A non-inclusive list: + +* Control of camera inputs +* Transitions +* Full control of keyers +* Full control of DVE's +* Control of media pools +* Control of auxilliaries + +## CasparCG Server + +Tested and developed against [a fork of version 2.1](https://github.com/nrkno/sofie-casparcg-server) with more support for version 2.3 being added in the future. + +* Video playback +* Graphics playback +* Recording / streaming +* Mixer parameters +* Transitions + +## HTTP Protocol + +* GET/POST/PUT/DELETE methods +* Interval based watcher for status monitoring + +## Blackmagic Design HyperDeck + +* Recording + +## Lawo Powercore & MC2 Series + +* Control over faders + * Using the ramp function on the powercore +* Control of parameters in the ember tree + +## OSC protocol + +* Sending of integers, floats, strings, blobs +* Tweening \(transitioning between\) values + +Can be configured in TCP or UDP mode. + +## Panasonic PTZ Cameras + +* Recalling presets +* Setting zoom, zoom speed and recall speed + +## Pharos Lighting Control + +* Recalling scenes +* Recalling timelines + +## Grass Valley SQ Media Servers + +* Control of playback +* Looping +* Cloning + +_Note: some features are controlled through the Package Manager_ + +## Shotoku Camera Robotics + +* Cutting to shots +* Fading to shots + +## Singular Live + +* Control nodes + +_Note: this is not currently used in production by anyone we know of_ + +## Sisyfos + +* On-air controls +* Fader levels +* Labels +* Hide / show channels + +## TCP Protocol + +* Sending messages + +## VizRT Viz MSE + +* Pilot elements +* Continue commands +* Loading all elements +* Clearing all elements + +## vMix + +* Full M/E control +* Audio control +* Streaming / recording control +* Fade to black +* Overlays +* Transforms +* Transitions + +## OBS + +*Through OBS WebSocket v4 RPC API* + +* Current / Preview Scene +* Current Transition +* Recording +* Streaming +* Scene Item visibility +* Source Settings (FFmpeg source) +* Source Mute diff --git a/packages/documentation/versioned_sidebars/version-1.50.0-sidebars.json b/packages/documentation/versioned_sidebars/version-1.50.0-sidebars.json new file mode 100644 index 0000000000..d7c19231b4 --- /dev/null +++ b/packages/documentation/versioned_sidebars/version-1.50.0-sidebars.json @@ -0,0 +1,14 @@ +{ + "userGuide": [ + { + "type": "autogenerated", + "dirName": "user-guide" + } + ], + "forDevelopers": [ + { + "type": "autogenerated", + "dirName": "for-developers" + } + ] +} diff --git a/packages/documentation/versions.json b/packages/documentation/versions.json index f9c6d229be..c75ea5eab1 100644 --- a/packages/documentation/versions.json +++ b/packages/documentation/versions.json @@ -1,8 +1,9 @@ [ - "1.49.0", - "1.47.0", - "1.46.0", - "1.41.0", - "1.38.0", - "1.37.0" -] + "1.50.0", + "1.49.0", + "1.47.0", + "1.46.0", + "1.41.0", + "1.38.0", + "1.37.0" +] \ No newline at end of file diff --git a/packages/job-worker/package.json b/packages/job-worker/package.json index 1014ab295a..4f42b539d2 100644 --- a/packages/job-worker/package.json +++ b/packages/job-worker/package.json @@ -52,7 +52,7 @@ "node-fetch": "^2.7.0", "p-lazy": "^3.1.0", "p-timeout": "^4.1.0", - "superfly-timeline": "9.0.1", + "superfly-timeline": "9.0.2", "threadedclass": "^1.2.2", "tslib": "^2.6.2", "type-fest": "^3.13.1", diff --git a/packages/job-worker/src/__mocks__/collection.ts b/packages/job-worker/src/__mocks__/collection.ts index 932e06386e..4b2a71b25c 100644 --- a/packages/job-worker/src/__mocks__/collection.ts +++ b/packages/job-worker/src/__mocks__/collection.ts @@ -6,7 +6,8 @@ import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLi import { CollectionName } from '@sofie-automation/corelib/dist/dataModel/Collections' import { ExpectedPackageDB } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItem } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' -import { IngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { SofieIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' @@ -283,7 +284,8 @@ export function getMockCollections(): { BucketAdLibPieces: new MockMongoCollection(CollectionName.BucketAdLibPieces), ExpectedMediaItems: new MockMongoCollection(CollectionName.ExpectedMediaItems), ExpectedPlayoutItems: new MockMongoCollection(CollectionName.ExpectedPlayoutItems), - IngestDataCache: new MockMongoCollection(CollectionName.IngestDataCache), + SofieIngestDataCache: new MockMongoCollection(CollectionName.SofieIngestDataCache), + NrcsIngestDataCache: new MockMongoCollection(CollectionName.NrcsIngestDataCache), Parts: new MockMongoCollection(CollectionName.Parts), PartInstances: new MockMongoCollection(CollectionName.PartInstances), PeripheralDevices: new MockMongoCollection(CollectionName.PeripheralDevices), @@ -339,7 +341,8 @@ export interface IMockCollections { BucketAdLibPieces: MockMongoCollection ExpectedMediaItems: MockMongoCollection ExpectedPlayoutItems: MockMongoCollection - IngestDataCache: MockMongoCollection + SofieIngestDataCache: MockMongoCollection + NrcsIngestDataCache: MockMongoCollection Parts: MockMongoCollection PartInstances: MockMongoCollection PeripheralDevices: MockMongoCollection diff --git a/packages/job-worker/src/__mocks__/context.ts b/packages/job-worker/src/__mocks__/context.ts index 9e2751de4e..57a861bb9f 100644 --- a/packages/job-worker/src/__mocks__/context.ts +++ b/packages/job-worker/src/__mocks__/context.ts @@ -226,6 +226,18 @@ export class MockJobContext implements JobContext { // throw new Error('Method not implemented.') } + setRouteSetActive(_routeSetId: string, _isActive: boolean | 'toggle'): boolean { + throw new Error('Method not implemented.') + } + + async saveRouteSetChanges(): Promise { + // throw new Error('Method not implemented.') + } + + discardRouteSetChanges(): void { + // throw new Error('Method not implemented.') + } + /** * Mock methods */ @@ -312,7 +324,10 @@ const MockShowStyleBlueprint: () => ShowStyleBlueprintManifest = () => ({ getShowStyleVariantId: (_context, variants): string | null => { return variants[0]._id }, - getRundown: (_context: IShowStyleContext, ingestRundown: ExtendedIngestRundown): BlueprintResultRundown => { + getRundown: ( + _context: IShowStyleContext, + ingestRundown: ExtendedIngestRundown + ): BlueprintResultRundown => { const rundown: IBlueprintRundown = { externalId: ingestRundown.externalId, name: ingestRundown.name, @@ -338,7 +353,7 @@ const MockShowStyleBlueprint: () => ShowStyleBlueprintManifest = () => ({ baseline: { timelineObjects: [] }, } }, - getSegment: (_context: ISegmentUserContext, ingestSegment: IngestSegment): BlueprintResultSegment => { + getSegment: (_context: ISegmentUserContext, ingestSegment: IngestSegment): BlueprintResultSegment => { const segment: IBlueprintSegment = { name: ingestSegment.name ? ingestSegment.name : ingestSegment.externalId, privateData: ingestSegment.payload, diff --git a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts index 819ac3aff1..adef4895e2 100644 --- a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts @@ -107,10 +107,13 @@ export function defaultStudio(_id: StudioId): DBStudio { mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, allowAdlibTestingSegment: true, + allowHold: true, + allowPieceDirectPlay: true, + enableBuckets: true, }, - routeSets: {}, - routeSetExclusivityGroups: {}, - packageContainers: {}, + routeSetsWithOverrides: wrapDefaultObject({}), + routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), + packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { @@ -131,7 +134,6 @@ export function defaultSegment(_id: SegmentId, rundownId: RundownId): DBSegment externalId: unprotectString(_id), rundownId: rundownId, name: 'Default Segment', - externalModified: 1, } } diff --git a/packages/job-worker/src/__mocks__/helpers/snapshot.ts b/packages/job-worker/src/__mocks__/helpers/snapshot.ts index 09b0f0e27d..4b1400f506 100644 --- a/packages/job-worker/src/__mocks__/helpers/snapshot.ts +++ b/packages/job-worker/src/__mocks__/helpers/snapshot.ts @@ -76,7 +76,6 @@ export function fixSnapshot(data: Data | Array, sortData?: boolean): Data // } else if (isPiece(o)) { // } else if (isPart(o)) { } else if (isSegment(o)) { - if (o.externalModified) o.externalModified = 0 // } else if (isPieceInstance(o)) { } return o diff --git a/packages/job-worker/src/__mocks__/presetCollections.ts b/packages/job-worker/src/__mocks__/presetCollections.ts index 68feee6b7a..7c1cf9e991 100644 --- a/packages/job-worker/src/__mocks__/presetCollections.ts +++ b/packages/job-worker/src/__mocks__/presetCollections.ts @@ -224,7 +224,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_0', rundownId: rundownId, name: 'Segment 0', - externalModified: 1, }) const part00: DBPart = { @@ -332,7 +331,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_2', rundownId: rundownId, name: 'Segment 1', - externalModified: 1, }) const part10: DBPart = { @@ -374,7 +372,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_2', rundownId: rundownId, name: 'Segment 2', - externalModified: 1, }) const globalAdLib0: RundownBaselineAdLibItem = { diff --git a/packages/job-worker/src/blueprints/__tests__/config.test.ts b/packages/job-worker/src/blueprints/__tests__/config.test.ts index 2e77bd5dd8..c46b7a2fba 100644 --- a/packages/job-worker/src/blueprints/__tests__/config.test.ts +++ b/packages/job-worker/src/blueprints/__tests__/config.test.ts @@ -15,6 +15,9 @@ describe('Test blueprint config', () => { mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: true, + allowPieceDirectPlay: true, + enableBuckets: true, }, blueprintConfigWithOverrides: wrapDefaultObject({ sdfsdf: 'one', another: 5 }), }) @@ -38,6 +41,9 @@ describe('Test blueprint config', () => { mediaPreviewsUrl: '', frameRate: 25, minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: true, + allowPieceDirectPlay: true, + enableBuckets: true, }, blueprintConfigWithOverrides: wrapDefaultObject({ sdfsdf: 'one', another: 5 }), }) diff --git a/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts b/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts index 624e0a68d4..403fc2fbf3 100644 --- a/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts +++ b/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts @@ -22,11 +22,16 @@ import { PlayoutModel } from '../../playout/model/PlayoutModel' import { ReadonlyDeep } from 'type-fest' import { getCurrentTime } from '../../lib' import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' +import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' +import { selectNewPartWithOffsets } from '../../playout/moveNextPart' export class OnSetAsNextContext extends ShowStyleUserContext implements IOnSetAsNextContext, IEventContext, IPartAndPieceInstanceActionContext { + public pendingMoveNextPart: { selectedPart: ReadonlyDeep | null } | undefined = undefined + constructor( contextInfo: UserContextInfo, context: JobContext, @@ -38,6 +43,10 @@ export class OnSetAsNextContext super(contextInfo, context, showStyle, watchedPackages) } + public get quickLoopInfo(): BlueprintQuickLookInfo | null { + return this.partAndPieceInstanceService.quickLoopInfo + } + public get nextPartState(): ActionPartChange { return this.partAndPieceInstanceService.nextPartState } @@ -112,6 +121,23 @@ export class OnSetAsNextContext return this.partAndPieceInstanceService.removePieceInstances('next', pieceInstanceIds) } + async moveNextPart(partDelta: number, segmentDelta: number): Promise { + if (typeof partDelta !== 'number') throw new Error('partDelta must be a number') + if (typeof segmentDelta !== 'number') throw new Error('segmentDelta must be a number') + + // Values of 0 mean discard the pending change + if (partDelta === 0 && segmentDelta === 0) { + this.pendingMoveNextPart = undefined + return true + } + + this.pendingMoveNextPart = { + selectedPart: selectNewPartWithOffsets(this.jobContext, this.playoutModel, partDelta, segmentDelta), + } + + return !!this.pendingMoveNextPart.selectedPart + } + getCurrentTime(): number { return getCurrentTime() } diff --git a/packages/job-worker/src/blueprints/context/OnTakeContext.ts b/packages/job-worker/src/blueprints/context/OnTakeContext.ts index ce0e31979d..8fef14c753 100644 --- a/packages/job-worker/src/blueprints/context/OnTakeContext.ts +++ b/packages/job-worker/src/blueprints/context/OnTakeContext.ts @@ -23,10 +23,15 @@ import { getCurrentTime } from '../../lib' import { JobContext, ProcessedShowStyleCompound } from '../../jobs' import { executePeripheralDeviceAction, listPlayoutDevices } from '../../peripheralDevice' import { ActionPartChange, PartAndPieceInstanceActionService } from './services/PartAndPieceInstanceActionService' +import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' export class OnTakeContext extends ShowStyleUserContext implements IOnTakeContext, IEventContext { public isTakeAborted: boolean + public get quickLoopInfo(): BlueprintQuickLookInfo | null { + return this.partAndPieceInstanceService.quickLoopInfo + } + public get currentPartState(): ActionPartChange { return this.partAndPieceInstanceService.currentPartState } diff --git a/packages/job-worker/src/blueprints/context/OnTimelineGenerateContext.ts b/packages/job-worker/src/blueprints/context/OnTimelineGenerateContext.ts index 9b7af560d3..6c3f8cd30d 100644 --- a/packages/job-worker/src/blueprints/context/OnTimelineGenerateContext.ts +++ b/packages/job-worker/src/blueprints/context/OnTimelineGenerateContext.ts @@ -15,17 +15,20 @@ import { PieceInstance, ResolvedPieceInstance } from '@sofie-automation/corelib/ import { ProcessedStudioConfig, ProcessedShowStyleConfig } from '../config' import _ = require('underscore') import { ProcessedShowStyleCompound } from '../../jobs' -import { convertPartInstanceToBlueprints } from './lib' +import { convertPartInstanceToBlueprints, createBlueprintQuickLoopInfo } from './lib' import { RundownContext } from './RundownContext' import { AbSessionHelper } from '../../playout/abPlayback/abSessionHelper' import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { PieceInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' export class OnTimelineGenerateContext extends RundownContext implements ITimelineEventContext { readonly currentPartInstance: Readonly | undefined readonly nextPartInstance: Readonly | undefined readonly previousPartInstance: Readonly | undefined + readonly quickLoopInfo: BlueprintQuickLookInfo | null + readonly abSessionsHelper: AbSessionHelper readonly #pieceInstanceCache = new Map>() @@ -57,6 +60,8 @@ export class OnTimelineGenerateContext extends RundownContext implements ITimeli this.nextPartInstance = nextPartInstance && convertPartInstanceToBlueprints(nextPartInstance) this.previousPartInstance = previousPartInstance && convertPartInstanceToBlueprints(previousPartInstance) + this.quickLoopInfo = createBlueprintQuickLoopInfo(playlist) + const partInstances = _.compact([previousPartInstance, currentPartInstance, nextPartInstance]) for (const pieceInstance of pieceInstances) { diff --git a/packages/job-worker/src/blueprints/context/ProcessIngestDataContext.ts b/packages/job-worker/src/blueprints/context/ProcessIngestDataContext.ts new file mode 100644 index 0000000000..d42e18738e --- /dev/null +++ b/packages/job-worker/src/blueprints/context/ProcessIngestDataContext.ts @@ -0,0 +1,60 @@ +import type { + GroupPartsInMosRundownAndChangesResult, + IProcessIngestDataContext, + IngestDefaultChangesOptions, + IngestRundown, + IngestSegment, + MutableIngestRundown, + NrcsIngestChangeDetails, +} from '@sofie-automation/blueprints-integration' +import { StudioContext } from './StudioContext' +import { defaultApplyIngestChanges } from '../ingest/defaultApplyIngestChanges' +import { groupMosPartsIntoIngestSegments, groupPartsInRundownAndChanges } from '../ingest/groupPartsInRundownAndChanges' + +/** + * Provides a context for blueprints while running the blueprints.processIngestData() method. + * Note: This provides some common helpers for doing mutations of the IngestRundown. + * Custom updates of the IngestRundown are done by calling methods on the mutableIngestRundown itself. + */ +export class ProcessIngestDataContext extends StudioContext implements IProcessIngestDataContext { + defaultApplyIngestChanges( + mutableIngestRundown: MutableIngestRundown, + nrcsIngestRundown: IngestRundown, + ingestChanges: NrcsIngestChangeDetails, + options?: IngestDefaultChangesOptions + ): void { + defaultApplyIngestChanges(mutableIngestRundown, nrcsIngestRundown, ingestChanges, { + transformRundownPayload: (payload) => payload as TRundownPayload, + transformSegmentPayload: (payload) => payload as TSegmentPayload, + transformPartPayload: (payload) => payload as TPartPayload, + ...options, + }) + } + + groupMosPartsInRundownAndChangesWithSeparator( + ingestRundown: IngestRundown, + previousIngestRundown: IngestRundown | undefined, + ingestChanges: NrcsIngestChangeDetails, + partNameSeparator: string + ): GroupPartsInMosRundownAndChangesResult { + if (ingestRundown.type !== 'mos') throw new Error('Only supported for mos rundowns') + + return groupPartsInRundownAndChanges(ingestRundown, previousIngestRundown, ingestChanges, (segments) => + groupMosPartsIntoIngestSegments(ingestRundown.externalId, segments, partNameSeparator) + ) + } + + groupPartsInRundownAndChanges( + ingestRundown: IngestRundown, + previousIngestRundown: IngestRundown | undefined, + ingestChanges: NrcsIngestChangeDetails, + groupPartsIntoSegments: (ingestSegments: IngestSegment[]) => IngestSegment[] + ): GroupPartsInMosRundownAndChangesResult { + return groupPartsInRundownAndChanges( + ingestRundown, + previousIngestRundown, + ingestChanges, + groupPartsIntoSegments + ) + } +} diff --git a/packages/job-worker/src/blueprints/context/StudioUserContext.ts b/packages/job-worker/src/blueprints/context/StudioUserContext.ts index 1ed5a483cb..be2c471dc4 100644 --- a/packages/job-worker/src/blueprints/context/StudioUserContext.ts +++ b/packages/job-worker/src/blueprints/context/StudioUserContext.ts @@ -3,20 +3,18 @@ import { ReadonlyDeep } from 'type-fest' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { ProcessedStudioConfig } from '../config' import { INoteBase } from '@sofie-automation/corelib/dist/dataModel/Notes' -import { UserContextInfo } from './CommonContext' +import { ContextInfo } from './CommonContext' import { StudioContext } from './StudioContext' export class StudioUserContext extends StudioContext implements IStudioUserContext { public readonly notes: INoteBase[] = [] - private readonly tempSendNotesIntoBlackHole: boolean constructor( - contextInfo: UserContextInfo, + contextInfo: ContextInfo, studio: ReadonlyDeep, studioBlueprintConfig: ProcessedStudioConfig ) { super(contextInfo, studio, studioBlueprintConfig) - this.tempSendNotesIntoBlackHole = contextInfo.tempSendUserNotesIntoBlackHole ?? false } notifyUserError(message: string, params?: { [key: string]: any }): void { @@ -30,16 +28,12 @@ export class StudioUserContext extends StudioContext implements IStudioUserConte this.addNote(NoteSeverity.INFO, message, params) } private addNote(type: NoteSeverity, message: string, params?: { [key: string]: any }) { - if (this.tempSendNotesIntoBlackHole) { - this.logNote(`UserNotes: "${message}", ${JSON.stringify(params)}`, type) - } else { - this.notes.push({ - type: type, - message: { - key: message, - args: params, - }, - }) - } + this.notes.push({ + type: type, + message: { + key: message, + args: params, + }, + }) } } diff --git a/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts b/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts index 6b4d4a137a..1515ae71ec 100644 --- a/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts +++ b/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts @@ -22,6 +22,7 @@ import { IBlueprintPieceObjectsSampleKeys, convertPieceInstanceToBlueprints, convertPartInstanceToBlueprints, + convertPartialBlueprintMutablePartToCore, } from './lib' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' @@ -175,7 +176,12 @@ export class SyncIngestUpdateToPartInstanceContext } } - if (!this.partInstance.updatePartProps(updatePart)) { + const playoutUpdatePart = convertPartialBlueprintMutablePartToCore( + updatePart, + this.showStyleCompound.blueprintId + ) + + if (!this.partInstance.updatePartProps(playoutUpdatePart)) { throw new Error(`Cannot update PartInstance. Some valid properties must be defined`) } diff --git a/packages/job-worker/src/blueprints/context/adlibActions.ts b/packages/job-worker/src/blueprints/context/adlibActions.ts index bd2f66db7e..444c8edc37 100644 --- a/packages/job-worker/src/blueprints/context/adlibActions.ts +++ b/packages/job-worker/src/blueprints/context/adlibActions.ts @@ -13,6 +13,7 @@ import { Time, TSR, IBlueprintPlayoutDevice, + StudioRouteSet, } from '@sofie-automation/blueprints-integration' import { PartInstanceId, PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReadonlyDeep } from 'type-fest' @@ -22,12 +23,15 @@ import { ShowStyleUserContext } from './ShowStyleUserContext' import { WatchedPackagesHelper } from './watchedPackages' import { getCurrentTime } from '../../lib' import { JobContext, ProcessedShowStyleCompound } from '../../jobs' -import { moveNextPart } from '../../playout/moveNextPart' +import { selectNewPartWithOffsets } from '../../playout/moveNextPart' import { ProcessedShowStyleConfig } from '../config' import { DatastorePersistenceMode } from '@sofie-automation/shared-lib/dist/core/model/TimelineDatastore' import { removeTimelineDatastoreValue, setTimelineDatastoreValue } from '../../playout/datastore' import { executePeripheralDeviceAction, listPlayoutDevices } from '../../peripheralDevice' import { ActionPartChange, PartAndPieceInstanceActionService } from './services/PartAndPieceInstanceActionService' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' +import { setNextPartFromPart } from '../../playout/setNext' export class DatastoreActionExecutionContext extends ShowStyleUserContext @@ -60,7 +64,19 @@ export class DatastoreActionExecutionContext /** Actions */ export class ActionExecutionContext extends ShowStyleUserContext implements IActionExecutionContext, IEventContext { - public takeAfterExecute: boolean + /** + * Whether the blueprints requested a take to be performed at the end of this action + * */ + public takeAfterExecute = false + /** + * Whether the blueprints performed an action that explicitly requires the timeline to be regenerated + * This isn't the only indicator that it should be regenerated + */ + public forceRegenerateTimeline = false + + public get quickLoopInfo(): BlueprintQuickLookInfo | null { + return this.partAndPieceInstanceService.quickLoopInfo + } public get currentPartState(): ActionPartChange { return this.partAndPieceInstanceService.currentPartState @@ -84,7 +100,6 @@ export class ActionExecutionContext extends ShowStyleUserContext implements IAct private readonly partAndPieceInstanceService: PartAndPieceInstanceActionService ) { super(contextInfo, _context, showStyle, watchedPackages) - this.takeAfterExecute = false } async getPartInstance(part: 'current' | 'next'): Promise { @@ -144,7 +159,8 @@ export class ActionExecutionContext extends ShowStyleUserContext implements IAct } async moveNextPart(partDelta: number, segmentDelta: number): Promise { - await moveNextPart(this._context, this._playoutModel, partDelta, segmentDelta) + const selectedPart = selectNewPartWithOffsets(this._context, this._playoutModel, partDelta, segmentDelta) + if (selectedPart) await setNextPartFromPart(this._context, this._playoutModel, selectedPart, true) } async updatePartInstance( @@ -184,6 +200,15 @@ export class ActionExecutionContext extends ShowStyleUserContext implements IAct partInstance.blockTakeUntil(time) } + async listRouteSets(): Promise> { + return applyAndValidateOverrides(this._context.studio.routeSetsWithOverrides).obj + } + + async switchRouteSet(routeSetId: string, state: boolean | 'toggle'): Promise { + const affectsTimeline = this._playoutModel.switchRouteSet(routeSetId, state) + this.forceRegenerateTimeline = this.forceRegenerateTimeline || affectsTimeline + } + async hackGetMediaObjectDuration(mediaId: string): Promise { return this.partAndPieceInstanceService.hackGetMediaObjectDuration(mediaId) } diff --git a/packages/job-worker/src/blueprints/context/index.ts b/packages/job-worker/src/blueprints/context/index.ts index 647dc57b5c..c13d880bc6 100644 --- a/packages/job-worker/src/blueprints/context/index.ts +++ b/packages/job-worker/src/blueprints/context/index.ts @@ -5,6 +5,7 @@ export * from './OnSetAsNextContext' export * from './OnTakeContext' export * from './OnTimelineGenerateContext' export * from './PartEventContext' +export * from './ProcessIngestDataContext' export * from './RundownContext' export * from './RundownDataChangedEventContext' export * from './RundownEventContext' diff --git a/packages/job-worker/src/blueprints/context/lib.ts b/packages/job-worker/src/blueprints/context/lib.ts index a3463616d5..4ef12532eb 100644 --- a/packages/job-worker/src/blueprints/context/lib.ts +++ b/packages/job-worker/src/blueprints/context/lib.ts @@ -9,9 +9,14 @@ import { ResolvedPieceInstance, } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { DBRundown, Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { + CoreUserEditingDefinition, + CoreUserEditingDefinitionAction, + CoreUserEditingDefinitionForm, +} from '@sofie-automation/corelib/dist/dataModel/UserEditingDefinitions' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' -import { clone, Complete, literal } from '@sofie-automation/corelib/dist/lib' -import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' +import { assertNever, clone, Complete, literal, omit } from '@sofie-automation/corelib/dist/lib' +import { unprotectString, unprotectStringArray } from '@sofie-automation/corelib/dist/protectedString' import { ReadonlyDeep } from 'type-fest' import { ExpectedPackage, @@ -44,6 +49,17 @@ import { } from '@sofie-automation/blueprints-integration' import { JobContext, ProcessedShowStyleBase, ProcessedShowStyleVariant } from '../../jobs' import { DBRundownPlaylist, QuickLoopMarkerType } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import _ = require('underscore') +import { BlueprintId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { wrapTranslatableMessageFromBlueprints } from '@sofie-automation/corelib/dist/TranslatableMessage' +import { + UserEditingDefinition, + UserEditingDefinitionAction, + UserEditingDefinitionForm, + UserEditingType, +} from '@sofie-automation/blueprints-integration/dist/userEditing' +import type { PlayoutMutatablePart } from '../../playout/model/PlayoutPartInstanceModel' +import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' /** * Convert an object to have all the values of all keys (including optionals) be 'true' @@ -80,10 +96,11 @@ export const IBlueprintPieceObjectsSampleKeys = allKeysOfObject allowDirectPlay: true, notInVision: true, abSessions: true, + userEditOperations: true, }) // Compile a list of the keys which are allowed to be set -export const IBlueprintMutatablePartSampleKeys = allKeysOfObject({ +export const PlayoutMutatablePartSampleKeys = allKeysOfObject({ title: true, prompterTitle: true, privateData: true, @@ -94,7 +111,6 @@ export const IBlueprintMutatablePartSampleKeys = allKeysOfObject pieceType: piece.pieceType, extendOnHold: piece.extendOnHold, notInVision: piece.notInVision, + userEditOperations: translateUserEditsToBlueprint(piece.userEditOperations), } return obj @@ -251,7 +269,6 @@ export function convertPartToBlueprints(part: ReadonlyDeep): IBlueprintP disableNextInTransition: part.disableNextInTransition, outTransition: clone(part.outTransition), expectedDuration: part.expectedDuration, - budgetDuration: part.budgetDuration, holdMode: part.holdMode, shouldNotifyCurrentPlayingPart: part.shouldNotifyCurrentPlayingPart, classes: clone(part.classes), @@ -262,6 +279,7 @@ export function convertPartToBlueprints(part: ReadonlyDeep): IBlueprintP hackListenToMediaObjectUpdates: clone( part.hackListenToMediaObjectUpdates ), + userEditOperations: translateUserEditsToBlueprint(part.userEditOperations), } return obj @@ -284,6 +302,7 @@ export function convertAdLibPieceToBlueprints(adLib: ReadonlyDeep): nextPieceTags: clone(adLib.nextPieceTags), uniquenessId: adLib.uniquenessId, invertOnAirState: adLib.invertOnAirState, + hidden: adLib.hidden, } return obj @@ -329,6 +348,7 @@ export function convertSegmentToBlueprints(segment: ReadonlyDeep): IB displayAs: segment.displayAs, showShelf: segment.showShelf, segmentTiming: segment.segmentTiming, + userEditOperations: translateUserEditsToBlueprint(segment.userEditOperations), } return obj @@ -353,6 +373,7 @@ export function convertRundownToBlueprints(rundown: ReadonlyDeep): IB showStyleVariantId: unprotectString(rundown.showStyleVariantId), playlistId: unprotectString(rundown.playlistId), airStatus: rundown.airStatus, + userEditOperations: translateUserEditsToBlueprint(rundown.userEditOperations), } return obj @@ -476,3 +497,101 @@ export async function getMediaObjectDuration(context: JobContext, mediaId: strin return durations.length > 0 ? durations[0] : undefined } + +function translateUserEditsToBlueprint( + userEdits: ReadonlyDeep | undefined +): UserEditingDefinition[] | undefined { + if (!userEdits) return undefined + + return _.compact( + userEdits.map((userEdit) => { + switch (userEdit.type) { + case UserEditingType.ACTION: + return { + type: UserEditingType.ACTION, + id: userEdit.id, + label: omit(userEdit.label, 'namespaces'), + svgIcon: userEdit.svgIcon, + isActive: userEdit.isActive, + } satisfies Complete + case UserEditingType.FORM: + return { + type: UserEditingType.FORM, + id: userEdit.id, + label: omit(userEdit.label, 'namespaces'), + schema: clone(userEdit.schema), + currentValues: clone(userEdit.currentValues), + } satisfies Complete + default: + assertNever(userEdit) + return undefined + } + }) + ) +} + +export function translateUserEditsFromBlueprint( + userEdits: UserEditingDefinition[] | undefined, + blueprintIds: BlueprintId[] +): CoreUserEditingDefinition[] | undefined { + if (!userEdits) return undefined + + return _.compact( + userEdits.map((userEdit) => { + switch (userEdit.type) { + case UserEditingType.ACTION: + return { + type: UserEditingType.ACTION, + id: userEdit.id, + label: wrapTranslatableMessageFromBlueprints(userEdit.label, blueprintIds), + svgIcon: userEdit.svgIcon, + isActive: userEdit.isActive, + } satisfies Complete + case UserEditingType.FORM: + return { + type: UserEditingType.FORM, + id: userEdit.id, + label: wrapTranslatableMessageFromBlueprints(userEdit.label, blueprintIds), + schema: clone(userEdit.schema), + currentValues: clone(userEdit.currentValues), + translationNamespaces: unprotectStringArray(blueprintIds), + } satisfies Complete + default: + assertNever(userEdit) + return undefined + } + }) + ) +} + +/** + * Converts a BlueprintMutatablePart into a PlayoutMutatablePart + */ +export function convertPartialBlueprintMutablePartToCore( + updatePart: Partial, + blueprintId: BlueprintId +): Partial { + const playoutUpdatePart: Partial = { + ...updatePart, + userEditOperations: undefined, + } + + if ('userEditOperations' in updatePart) { + playoutUpdatePart.userEditOperations = translateUserEditsFromBlueprint(updatePart.userEditOperations, [ + blueprintId, + ]) + } else { + delete playoutUpdatePart.userEditOperations + } + + return playoutUpdatePart +} +export function createBlueprintQuickLoopInfo(playlist: ReadonlyDeep): BlueprintQuickLookInfo | null { + const playlistLoopProps = playlist.quickLoop + if (!playlistLoopProps) return null + + return { + running: playlistLoopProps.running, + locked: playlistLoopProps.locked, + } +} diff --git a/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts b/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts index 095fdaa114..849b95c2b0 100644 --- a/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts +++ b/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts @@ -18,9 +18,11 @@ import { IBlueprintPieceObjectsSampleKeys, convertPartInstanceToBlueprints, convertPartToBlueprints, + convertPartialBlueprintMutablePartToCore, convertPieceInstanceToBlueprints, convertPieceToBlueprints, convertResolvedPieceInstanceToBlueprints, + createBlueprintQuickLoopInfo, getMediaObjectDuration, } from '../lib' import { getResolvedPiecesForCurrentPartInstance } from '../../../playout/resolvedPieces' @@ -54,6 +56,7 @@ import { syncPlayheadInfinitesForNextPartInstance } from '../../../playout/infin import { validateAdlibTestingPartInstanceProperties } from '../../../playout/adlibTesting' import { DBPart, isPartPlayable } from '@sofie-automation/corelib/dist/dataModel/Part' import { PlayoutRundownModel } from '../../../playout/model/PlayoutRundownModel' +import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' export enum ActionPartChange { NONE = 0, @@ -70,6 +73,10 @@ export class PartAndPieceInstanceActionService { private readonly _playoutModel: PlayoutModel readonly showStyleCompound: ReadonlyDeep + public get quickLoopInfo(): BlueprintQuickLookInfo | null { + return createBlueprintQuickLoopInfo(this._playoutModel.playlist) + } + /** To be set by any mutation methods on this context. Indicates to core how extensive the changes are to the current partInstance */ public currentPartState: ActionPartChange = ActionPartChange.NONE /** To be set by any mutation methods on this context. Indicates to core how extensive the changes are to the next partInstance */ @@ -338,7 +345,9 @@ export class PartAndPieceInstanceActionService { throw new Error('PartInstance could not be found') } - if (!partInstance.updatePartProps(props)) { + const playoutUpdatePart = convertPartialBlueprintMutablePartToCore(props, this.showStyleCompound.blueprintId) + + if (!partInstance.updatePartProps(playoutUpdatePart)) { throw new Error('Some valid properties must be defined') } @@ -384,6 +393,7 @@ export class PartAndPieceInstanceActionService { invalidReason: undefined, floated: false, expectedDurationWithTransition: undefined, // Filled in later + userEditOperations: [], // Adlibbed parts can't be edited by ingest } const pieces = postProcessPieces( diff --git a/packages/job-worker/src/blueprints/ingest/MutableIngestPartImpl.ts b/packages/job-worker/src/blueprints/ingest/MutableIngestPartImpl.ts new file mode 100644 index 0000000000..e35c397f94 --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/MutableIngestPartImpl.ts @@ -0,0 +1,79 @@ +import type { SofieIngestPart, MutableIngestPart } from '@sofie-automation/blueprints-integration' +import { clone } from '@sofie-automation/corelib/dist/lib' +import { ReadonlyDeep } from 'type-fest' +import _ = require('underscore') + +export class MutableIngestPartImpl implements MutableIngestPart { + readonly #ingestPart: Omit, 'rank'> + #hasChanges = false + + constructor(ingestPart: Omit, 'rank'>, hasChanges = false) { + this.#ingestPart = ingestPart + this.#hasChanges = hasChanges + } + + get externalId(): string { + return this.#ingestPart.externalId + } + + get name(): string { + return this.#ingestPart.name + } + + get payload(): ReadonlyDeep | undefined { + return this.#ingestPart.payload as ReadonlyDeep + } + + get userEditStates(): Record { + return this.#ingestPart.userEditStates ?? {} + } + + setName(name: string): void { + if (this.#ingestPart.name !== name) { + this.#ingestPart.name = name + this.#hasChanges = true + } + } + + replacePayload(payload: ReadonlyDeep | TPartPayload): void { + if (this.#hasChanges || !_.isEqual(this.#ingestPart.payload, payload)) { + this.#ingestPart.payload = clone(payload) + this.#hasChanges = true + } + } + + setPayloadProperty( + key: TKey, + value: ReadonlyDeep | TPartPayload[TKey] + ): void { + if (!this.#ingestPart.payload) { + throw new Error('Part payload is not set') + } + + if (this.#hasChanges || !_.isEqual(this.#ingestPart.payload[key], value)) { + // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion + ;(this.#ingestPart.payload as any)[key] = clone(value) + this.#hasChanges = true + } + } + + setUserEditState(key: string, value: boolean): void { + if (!this.#ingestPart.userEditStates) this.#ingestPart.userEditStates = {} + if (this.#hasChanges || this.#ingestPart.userEditStates[key] !== value) { + this.#ingestPart.userEditStates[key] = value + this.#hasChanges = true + } + } + + /** + * Check if the part has changes and clear any changes flags + * Note: this is not visible to blueprints + */ + checkAndClearChangesFlags(): boolean { + const hasChanges = this.#hasChanges + + this.#hasChanges = false + + return hasChanges + } +} diff --git a/packages/job-worker/src/blueprints/ingest/MutableIngestRundownImpl.ts b/packages/job-worker/src/blueprints/ingest/MutableIngestRundownImpl.ts new file mode 100644 index 0000000000..8752db4f9a --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/MutableIngestRundownImpl.ts @@ -0,0 +1,401 @@ +import type { + MutableIngestRundown, + MutableIngestSegment, + MutableIngestPart, + IngestSegment, + SofieIngestSegment, +} from '@sofie-automation/blueprints-integration' +import { Complete, clone, omit } from '@sofie-automation/corelib/dist/lib' +import { ReadonlyDeep } from 'type-fest' +import _ = require('underscore') +import { MutableIngestSegmentImpl } from './MutableIngestSegmentImpl' +import { SofieIngestDataCacheObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { SofieIngestRundownDataCacheGenerator } from '../../ingest/sofieIngestCache' +import { + SofieIngestDataCacheObj, + SofieIngestRundownWithSource, +} from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' +import type { ComputedIngestChangeObject } from '../../ingest/runOperation' +import { RundownSource } from '@sofie-automation/corelib/dist/dataModel/Rundown' + +export interface MutableIngestRundownChanges { + // define what needs regenerating + computedChanges: ComputedIngestChangeObject + + // define what portions of the ingestRundown need saving + changedCacheObjects: SofieIngestDataCacheObj[] + allCacheObjectIds: SofieIngestDataCacheObjId[] +} + +export class MutableIngestRundownImpl + implements MutableIngestRundown +{ + readonly ingestRundown: Omit< + SofieIngestRundownWithSource, + 'segments' + > + #hasChangesToRundown = false + + readonly #segments: MutableIngestSegmentImpl[] + + readonly #originalSegmentRanks = new Map() + + constructor( + ingestRundown: SofieIngestRundownWithSource, + isExistingRundown: boolean + ) { + this.ingestRundown = omit(ingestRundown, 'segments') + this.#segments = ingestRundown.segments + .slice() // shallow copy + .sort((a, b) => a.rank - b.rank) + .map((segment) => new MutableIngestSegmentImpl(segment, !isExistingRundown)) + this.#hasChangesToRundown = !isExistingRundown + + for (const segment of ingestRundown.segments) { + this.#originalSegmentRanks.set(segment.externalId, segment.rank) + } + } + + get segments(): MutableIngestSegmentImpl[] { + return this.#segments.slice() // shallow copy + } + + get externalId(): string { + return this.ingestRundown.externalId + } + + get type(): string { + return this.ingestRundown.type + } + + get name(): string { + return this.ingestRundown.name + } + + get payload(): ReadonlyDeep | undefined { + return this.ingestRundown.payload as ReadonlyDeep + } + + get userEditStates(): Record { + return this.ingestRundown.userEditStates ?? {} + } + + /** + * Internal method to propogate the rundown source + */ + updateRundownSource(source: RundownSource): void { + if (!_.isEqual(source, this.ingestRundown.rundownSource)) { + this.ingestRundown.rundownSource = source + this.#hasChangesToRundown = true + } + } + + setName(name: string): void { + if (this.ingestRundown.name !== name) { + this.ingestRundown.name = name + this.#hasChangesToRundown = true + } + } + + forceFullRegenerate(): void { + this.#hasChangesToRundown = true + } + + replacePayload(payload: ReadonlyDeep | TRundownPayload): void { + if (this.#hasChangesToRundown || !_.isEqual(this.ingestRundown.payload, payload)) { + this.ingestRundown.payload = clone(payload) + this.#hasChangesToRundown = true + } + } + + setPayloadProperty( + key: TKey, + value: ReadonlyDeep | TRundownPayload[TKey] + ): void { + if (!this.ingestRundown.payload) { + throw new Error('Rundown payload is not set') + } + + if (this.#hasChangesToRundown || !_.isEqual(this.ingestRundown.payload[key], value)) { + // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion + ;(this.ingestRundown.payload as any)[key] = clone(value) + this.#hasChangesToRundown = true + } + } + + findPart(partExternalId: string): MutableIngestPart | undefined { + for (const segment of this.#segments) { + const part = segment.getPart(partExternalId) + if (part) return part + } + + return undefined + } + + findPartAndSegment(partExternalId: string): + | { + part: MutableIngestPart + segment: MutableIngestSegment + } + | undefined { + for (const segment of this.#segments) { + const part = segment.getPart(partExternalId) + if (part) return { part, segment } + } + return undefined + } + + getSegment(segmentExternalId: string): MutableIngestSegment | undefined { + return this.#segments.find((s) => s.externalId === segmentExternalId) + } + + moveSegmentBefore(segmentExternalId: string, beforeSegmentExternalId: string | null): void { + if (segmentExternalId === beforeSegmentExternalId) throw new Error('Cannot move Segment before itself') + + const segment = this.#segments.find((s) => s.externalId === segmentExternalId) + if (!segment) throw new Error(`Segment "${segmentExternalId}" not found`) + + this.#removeSegment(segmentExternalId) + + if (beforeSegmentExternalId) { + const beforeIndex = this.#segments.findIndex((s) => s.externalId === beforeSegmentExternalId) + if (beforeIndex === -1) throw new Error(`Segment "${beforeSegmentExternalId}" not found`) + + this.#segments.splice(beforeIndex, 0, segment) + } else { + this.#segments.push(segment) + } + } + + moveSegmentAfter(segmentExternalId: string, afterSegmentExternalId: string | null): void { + if (segmentExternalId === afterSegmentExternalId) throw new Error('Cannot move Segment after itself') + + const segment = this.#segments.find((s) => s.externalId === segmentExternalId) + if (!segment) throw new Error(`Segment "${segmentExternalId}" not found`) + + this.#removeSegment(segmentExternalId) + + if (afterSegmentExternalId) { + const beforeIndex = this.#segments.findIndex((s) => s.externalId === afterSegmentExternalId) + if (beforeIndex === -1) throw new Error(`Segment "${afterSegmentExternalId}" not found`) + + this.#segments.splice(beforeIndex + 1, 0, segment) + } else { + this.#segments.unshift(segment) + } + } + + replaceSegment( + segment: Omit, 'rank'>, + beforeSegmentExternalId: string | null + ): MutableIngestSegment { + if (segment.externalId === beforeSegmentExternalId) throw new Error('Cannot insert Segment before itself') + + const newSegment = new MutableIngestSegmentImpl( + { ...segment, userEditStates: {}, parts: segment.parts.map((p) => ({ ...p, userEditStates: {} })) }, + true + ) + + const oldSegment = this.#segments.find((s) => s.externalId === segment.externalId) + if (oldSegment?.originalExternalId) { + newSegment.setOriginalExternalId(oldSegment.originalExternalId) + } + + this.#removeSegment(segment.externalId) + + if (beforeSegmentExternalId) { + const beforeIndex = this.#segments.findIndex((s) => s.externalId === beforeSegmentExternalId) + if (beforeIndex === -1) throw new Error(`Segment "${beforeSegmentExternalId}" not found`) + + this.#segments.splice(beforeIndex, 0, newSegment) + } else { + this.#segments.push(newSegment) + } + + return newSegment + } + + changeSegmentExternalId( + oldSegmentExternalId: string, + newSegmentExternalId: string + ): MutableIngestSegment { + const segment = this.#segments.find((s) => s.externalId === oldSegmentExternalId) + if (!segment) throw new Error(`Segment "${oldSegmentExternalId}" not found`) + + const targetSegment = this.#segments.find((s) => s.externalId === newSegmentExternalId) + if (targetSegment) throw new Error(`Segment "${newSegmentExternalId}" already exists`) + + segment.setExternalId(newSegmentExternalId) + + return segment + } + + changeSegmentOriginalExternalId( + segmentExternalId: string, + originalSegmentExternalId: string + ): MutableIngestSegment { + const segment = this.#segments.find((s) => s.externalId === segmentExternalId) + if (!segment) throw new Error(`Segment "${segmentExternalId}" not found`) + + const targetSegment = this.#segments.find((s) => s.externalId === originalSegmentExternalId) + if (targetSegment) throw new Error(`Segment "${originalSegmentExternalId}" exists`) + + segment.setOriginalExternalId(originalSegmentExternalId) + + return segment + } + + /** + * Remove a segment + * Note: this is separate from the removeSegment method to allow for internal use when methods are overridden in tests + */ + #removeSegment(segmentExternalId: string): boolean { + const existingIndex = this.#segments.findIndex((s) => s.externalId === segmentExternalId) + if (existingIndex !== -1) { + this.#segments.splice(existingIndex, 1) + + return true + } else { + return false + } + } + + removeSegment(segmentExternalId: string): boolean { + return this.#removeSegment(segmentExternalId) + } + + removeAllSegments(): void { + this.#segments.length = 0 + } + + setUserEditState(key: string, value: boolean): void { + if (!this.ingestRundown.userEditStates) this.ingestRundown.userEditStates = {} + if (this.#hasChangesToRundown || this.ingestRundown.userEditStates[key] !== value) { + this.ingestRundown.userEditStates[key] = value + this.#hasChangesToRundown = true + } + } + + /** + * Converts the state contained within this MutableIngestRundown, + * into a structure of the computed changes and the cache objects (the SofieIngestDataCacheObj) + * the MutableIngestRundownChanges are then used to update the SofieIngestDataCache and keep + * track of what portions of the Rundown need regenerating/updating. + * + * Note: This is NOT exposed to blueprints + */ + intoIngestRundown(ingestObjectGenerator: SofieIngestRundownDataCacheGenerator): MutableIngestRundownChanges { + const ingestSegments: SofieIngestSegment[] = [] + const changedCacheObjects: SofieIngestDataCacheObj[] = [] + const allCacheObjectIds: SofieIngestDataCacheObjId[] = [] + + const segmentsToRegenerate: SofieIngestSegment[] = [] + const segmentExternalIdChanges: Record = {} + const segmentsUpdatedRanks: Record = {} + + const usedSegmentIds = new Set() + const usedPartIds = new Set() + + this.#segments.forEach((segment, rank) => { + if (usedSegmentIds.has(segment.externalId)) { + throw new Error(`Segment "${segment.externalId}" is used more than once`) + } + usedSegmentIds.add(segment.externalId) + + const segmentInfo = segment.intoChangesInfo(ingestObjectGenerator) + + for (const part of segmentInfo.ingestParts) { + if (usedPartIds.has(part.externalId)) { + throw new Error(`Part "${part.externalId}" is used more than once`) + } + usedPartIds.add(part.externalId) + } + + const ingestSegment: Complete = { + externalId: segment.externalId, + rank, + name: segment.name, + payload: segment.payload, + parts: segmentInfo.ingestParts, + userEditStates: { ...segment.userEditStates }, + } + + ingestSegments.push(ingestSegment) + allCacheObjectIds.push(ingestObjectGenerator.getSegmentObjectId(ingestSegment.externalId)) + + changedCacheObjects.push(...segmentInfo.changedCacheObjects) + allCacheObjectIds.push(...segmentInfo.allCacheObjectIds) + + // Check for any changes to the rank + const oldRank = + (segment.originalExternalId ? this.#originalSegmentRanks.get(segment.originalExternalId) : null) ?? + this.#originalSegmentRanks.get(segment.externalId) + const rankChanged = ingestSegment.rank !== oldRank + if (rankChanged) { + segmentsUpdatedRanks[segment.externalId] = ingestSegment.rank + } + + // Check for any changes to the externalId + const externalIdChanged = segmentInfo.originalExternalId !== segment.externalId + if (externalIdChanged) { + segmentExternalIdChanges[segmentInfo.originalExternalId] = segment.externalId + } + + // Update ingest cache if there are changes + if (segmentInfo.segmentHasChanges || rankChanged || externalIdChanged) { + changedCacheObjects.push(ingestObjectGenerator.generateSegmentObject(ingestSegment)) + } + + // Regenerate the segment if there are substantial changes + if ( + segmentInfo.segmentHasChanges || + segmentInfo.partOrderHasChanged || + segmentInfo.partIdsWithChanges.length > 0 + ) { + segmentsToRegenerate.push(ingestSegment) + } + }) + + // Find any removed segments + const newSegmentIds = new Set(ingestSegments.map((s) => s.externalId)) + const removedSegmentIds = Array.from(this.#originalSegmentRanks.keys()).filter( + (id) => !newSegmentIds.has(id) && !segmentExternalIdChanges[id] + ) + + // Check if this rundown object has changed + if (this.#hasChangesToRundown) { + changedCacheObjects.push(ingestObjectGenerator.generateRundownObject(this.ingestRundown)) + } + allCacheObjectIds.push(ingestObjectGenerator.getRundownObjectId()) + + const regenerateRundown = this.#hasChangesToRundown + + this.#hasChangesToRundown = false + + // Reset this.#originalSegmentRanks + this.#originalSegmentRanks.clear() + this.#segments.forEach((segment, rank) => { + this.#originalSegmentRanks.set(segment.externalId, rank) + }) + + const result: MutableIngestRundownChanges = { + computedChanges: { + ingestRundown: { + ...this.ingestRundown, + segments: ingestSegments, + }, + + segmentsToRemove: removedSegmentIds, + segmentsUpdatedRanks, + segmentsToRegenerate, + regenerateRundown, + segmentExternalIdChanges: segmentExternalIdChanges, + }, + + changedCacheObjects, + allCacheObjectIds, + } + + return result + } +} diff --git a/packages/job-worker/src/blueprints/ingest/MutableIngestSegmentImpl.ts b/packages/job-worker/src/blueprints/ingest/MutableIngestSegmentImpl.ts new file mode 100644 index 0000000000..a11561848a --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/MutableIngestSegmentImpl.ts @@ -0,0 +1,262 @@ +import type { + IngestPart, + MutableIngestPart, + MutableIngestSegment, + SofieIngestPart, + SofieIngestSegment, +} from '@sofie-automation/blueprints-integration' +import { Complete, clone, omit } from '@sofie-automation/corelib/dist/lib' +import { ReadonlyDeep } from 'type-fest' +import _ = require('underscore') +import { MutableIngestPartImpl } from './MutableIngestPartImpl' +import { SofieIngestRundownDataCacheGenerator } from '../../ingest/sofieIngestCache' +import { getSegmentId } from '../../ingest/lib' +import { SofieIngestDataCacheObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { SofieIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' + +export interface MutableIngestSegmentChanges { + ingestParts: SofieIngestPart[] + changedCacheObjects: SofieIngestDataCacheObj[] + allCacheObjectIds: SofieIngestDataCacheObjId[] + segmentHasChanges: boolean + partIdsWithChanges: string[] + partOrderHasChanged: boolean + originalExternalId: string +} + +export class MutableIngestSegmentImpl + implements MutableIngestSegment +{ + readonly #ingestSegment: Omit, 'rank' | 'parts'> + #originalExternalId: string + #segmentHasChanges = false + #partOrderHasChanged = false + + readonly #parts: MutableIngestPartImpl[] + + get originalExternalId(): string | undefined { + if (this.#originalExternalId !== this.externalId) { + return this.#originalExternalId + } else { + return undefined + } + } + + constructor(ingestSegment: Omit, 'rank'>, hasChanges = false) { + this.#originalExternalId = ingestSegment.externalId + this.#ingestSegment = omit(ingestSegment, 'parts') + this.#parts = ingestSegment.parts + .slice() // shallow copy + .sort((a, b) => a.rank - b.rank) + .map((part) => new MutableIngestPartImpl(part, hasChanges)) + this.#segmentHasChanges = hasChanges + } + + get parts(): MutableIngestPart[] { + return this.#parts.slice() // shallow copy + } + + get externalId(): string { + return this.#ingestSegment.externalId + } + + get name(): string { + return this.#ingestSegment.name + } + + get payload(): ReadonlyDeep | undefined { + return this.#ingestSegment.payload as ReadonlyDeep + } + + get userEditStates(): Record { + return this.#ingestSegment.userEditStates ?? {} + } + + getPart(partExternalId: string): MutableIngestPart | undefined { + return this.#parts.find((part) => part.externalId === partExternalId) + } + + movePartBefore(partExternalId: string, beforePartExternalId: string | null): void { + if (partExternalId === beforePartExternalId) throw new Error('Cannot move Part before itself') + + const part = this.#parts.find((p) => p.externalId === partExternalId) + if (!part) throw new Error(`Part "${partExternalId}" not found`) + + this.#removePart(partExternalId) + + if (beforePartExternalId) { + const beforeIndex = this.#parts.findIndex((p) => p.externalId === beforePartExternalId) + if (beforeIndex === -1) throw new Error(`Part "${beforePartExternalId}" not found`) + + this.#parts.splice(beforeIndex, 0, part) + } else { + this.#parts.push(part) + } + + this.#partOrderHasChanged = true + } + + movePartAfter(partExternalId: string, afterPartExternalId: string | null): void { + if (partExternalId === afterPartExternalId) throw new Error('Cannot move Part after itself') + + const part = this.#parts.find((p) => p.externalId === partExternalId) + if (!part) throw new Error(`Part "${partExternalId}" not found`) + + this.#removePart(partExternalId) + + if (afterPartExternalId) { + const beforeIndex = this.#parts.findIndex((p) => p.externalId === afterPartExternalId) + if (beforeIndex === -1) throw new Error(`Part "${afterPartExternalId}" not found`) + + this.#parts.splice(beforeIndex + 1, 0, part) + } else { + this.#parts.unshift(part) + } + + this.#partOrderHasChanged = true + } + + replacePart( + ingestPart: Omit, 'rank'>, + beforePartExternalId: string | null + ): MutableIngestPart { + if (ingestPart.externalId === beforePartExternalId) throw new Error('Cannot insert Part before itself') + + this.#removePart(ingestPart.externalId) + + const newPart = new MutableIngestPartImpl({ ...ingestPart, userEditStates: {} }, true) + + if (beforePartExternalId) { + const beforeIndex = this.#parts.findIndex((s) => s.externalId === beforePartExternalId) + if (beforeIndex === -1) throw new Error(`Part "${beforePartExternalId}" not found`) + + this.#parts.splice(beforeIndex, 0, newPart) + } else { + this.#parts.push(newPart) + } + + this.#partOrderHasChanged = true + + return newPart + } + + /** + * Remove a part + * Note: this is separate from the removePart method to allow for internal use when methods are overridden in tests + */ + #removePart(partExternalId: string): boolean { + const index = this.#parts.findIndex((part) => part.externalId === partExternalId) + if (index === -1) { + return false + } + + this.#parts.splice(index, 1) + this.#partOrderHasChanged = true + + return true + } + + removePart(partExternalId: string): boolean { + return this.#removePart(partExternalId) + } + + forceRegenerate(): void { + this.#segmentHasChanges = true + } + + /** + * Note: This is not exposed to blueprints + */ + setExternalId(newSegmentExternalId: string): void { + this.#ingestSegment.externalId = newSegmentExternalId + } + /** + * Note: This is not exposed to blueprints + */ + setOriginalExternalId(oldSegmentExternalId: string): void { + this.#originalExternalId = oldSegmentExternalId + } + + setName(name: string): void { + if (this.#ingestSegment.name !== name) { + this.#ingestSegment.name = name + this.#segmentHasChanges = true + } + } + + replacePayload(payload: ReadonlyDeep | TSegmentPayload): void { + if (this.#segmentHasChanges || !_.isEqual(this.#ingestSegment.payload, payload)) { + this.#ingestSegment.payload = clone(payload) + this.#segmentHasChanges = true + } + } + + setPayloadProperty( + key: TKey, + value: ReadonlyDeep | TSegmentPayload[TKey] + ): void { + if (!this.#ingestSegment.payload) { + throw new Error('Segment payload is not set') + } + + if (this.#segmentHasChanges || !_.isEqual(this.#ingestSegment.payload[key], value)) { + // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion + ;(this.#ingestSegment.payload as any)[key] = clone(value) + this.#segmentHasChanges = true + } + } + + setUserEditState(key: string, value: boolean): void { + if (!this.#ingestSegment.userEditStates) this.#ingestSegment.userEditStates = {} + if (this.#segmentHasChanges || this.#ingestSegment.userEditStates[key] !== value) { + this.#ingestSegment.userEditStates[key] = value + this.#segmentHasChanges = true + } + } + + intoChangesInfo(generator: SofieIngestRundownDataCacheGenerator): MutableIngestSegmentChanges { + const ingestParts: SofieIngestPart[] = [] + const changedCacheObjects: SofieIngestDataCacheObj[] = [] + const allCacheObjectIds: SofieIngestDataCacheObjId[] = [] + const partIdsWithChanges: string[] = [] + + const segmentId = getSegmentId(generator.rundownId, this.#ingestSegment.externalId) + + this.#parts.forEach((part, rank) => { + const ingestPart: Complete = { + externalId: part.externalId, + rank, + name: part.name, + payload: part.payload, + userEditStates: part.userEditStates, + } + + allCacheObjectIds.push(generator.getPartObjectId(ingestPart.externalId)) + ingestParts.push(ingestPart) + + if (part.checkAndClearChangesFlags()) { + changedCacheObjects.push(generator.generatePartObject(segmentId, ingestPart)) + partIdsWithChanges.push(ingestPart.externalId) + } + }) + + const segmentHasChanges = this.#segmentHasChanges + const partOrderHasChanged = this.#partOrderHasChanged + const originalExternalId = this.#originalExternalId + + // clear flags + this.#segmentHasChanges = false + this.#partOrderHasChanged = false + this.#originalExternalId = this.#ingestSegment.externalId + + return { + ingestParts, + changedCacheObjects, + allCacheObjectIds, + segmentHasChanges, + partIdsWithChanges, + partOrderHasChanged, + originalExternalId, + } + } +} diff --git a/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestPartImpl.spec.ts b/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestPartImpl.spec.ts new file mode 100644 index 0000000000..e36cac887b --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestPartImpl.spec.ts @@ -0,0 +1,162 @@ +import { SofieIngestPart } from '@sofie-automation/blueprints-integration' +import { MutableIngestPartImpl } from '../MutableIngestPartImpl' +import { clone } from '@sofie-automation/corelib/dist/lib' + +describe('MutableIngestPartImpl', () => { + function getBasicIngestPart(): SofieIngestPart { + return { + externalId: 'externalId', + name: 'name', + rank: 0, + payload: { + val: 'some-val', + second: 5, + }, + userEditStates: { + one: true, + two: false, + }, + } + } + + test('create basic', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.externalId).toBe(ingestPart.externalId) + expect(mutablePart.name).toBe(ingestPart.name) + expect(mutablePart.payload).toEqual(ingestPart.payload) + + // check it has no changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + }) + + test('create basic with changes', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart), true) + + // compare properties + expect(mutablePart.externalId).toBe(ingestPart.externalId) + expect(mutablePart.name).toBe(ingestPart.name) + expect(mutablePart.payload).toEqual(ingestPart.payload) + + // check it has changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(true) + + // check flag has been cleared + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + }) + + test('set name', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.name).toBe(ingestPart.name) + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + + mutablePart.setName('new-name') + expect(mutablePart.name).toBe('new-name') + + // check it has changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(true) + }) + + test('replace payload with change', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.payload).toEqual(ingestPart.payload) + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + + const newPayload = { val: 'new-val' } + mutablePart.replacePayload(newPayload) + expect(mutablePart.payload).toEqual(newPayload) + + // check it has changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(true) + }) + + test('replace payload with no change', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.payload).toEqual(ingestPart.payload) + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + + mutablePart.replacePayload(ingestPart.payload) + expect(mutablePart.payload).toEqual(ingestPart.payload) + + // check it has no changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + }) + + test('set payload property change', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.payload).toEqual(ingestPart.payload) + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + + const newPayload = { ...ingestPart.payload, test: 123, second: undefined } + mutablePart.setPayloadProperty('test', 123) + mutablePart.setPayloadProperty('second', undefined) + expect(mutablePart.payload).toEqual(newPayload) + + // check it has changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(true) + }) + + test('set payload property unchanged', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.payload).toEqual(ingestPart.payload) + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + + mutablePart.setPayloadProperty('val', ingestPart.payload.val) + mutablePart.setPayloadProperty('another', undefined) + expect(mutablePart.payload).toEqual(ingestPart.payload) + + // check it has changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + }) + + test('set user edit state change', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.userEditStates).toEqual(ingestPart.userEditStates) + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + + const newUserEditStates = { ...ingestPart.userEditStates, two: true, another: false } + mutablePart.setUserEditState('two', true) + mutablePart.setUserEditState('another', false) + expect(mutablePart.userEditStates).toEqual(newUserEditStates) + + // check it has changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(true) + }) + + test('set user edit state unchanged', () => { + const ingestPart = getBasicIngestPart() + const mutablePart = new MutableIngestPartImpl(clone(ingestPart)) + + // compare properties + expect(mutablePart.userEditStates).toEqual(ingestPart.userEditStates) + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + + mutablePart.setUserEditState('one', true) + mutablePart.setUserEditState('two', false) + expect(mutablePart.userEditStates).toEqual(ingestPart.userEditStates) + + // check it has changes + expect(mutablePart.checkAndClearChangesFlags()).toBe(false) + }) +}) diff --git a/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestRundownImpl.spec.ts b/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestRundownImpl.spec.ts new file mode 100644 index 0000000000..a8030891b1 --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestRundownImpl.spec.ts @@ -0,0 +1,901 @@ +import { clone } from '@sofie-automation/corelib/dist/lib' +import { MutableIngestRundownChanges, MutableIngestRundownImpl } from '../MutableIngestRundownImpl' +import { SofieIngestRundownDataCacheGenerator } from '../../../ingest/sofieIngestCache' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { getSegmentId } from '../../../ingest/lib' +import { SofieIngestDataCacheObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { MutableIngestSegmentImpl } from '../MutableIngestSegmentImpl' +import { IngestRundown, IngestSegment, SofieIngestSegment } from '@sofie-automation/blueprints-integration' +import { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' + +describe('MutableIngestRundownImpl', () => { + function getBasicIngestRundown(): SofieIngestRundownWithSource { + return { + externalId: 'rundown0', + type: 'mock', + name: 'rundown-name', + payload: { + val: 'some-val', + second: 5, + }, + userEditStates: { + one: true, + two: false, + }, + rundownSource: { type: 'http' }, + segments: [ + { + externalId: 'seg0', + name: 'name', + rank: 0, + payload: { + val: 'first-val', + second: 5, + }, + userEditStates: {}, + parts: [ + { + externalId: 'part0', + name: 'my first part', + rank: 0, + payload: { + val: 'some-val', + }, + userEditStates: {}, + }, + ], + }, + { + externalId: 'seg1', + name: 'name 2', + rank: 1, + payload: { + val: 'next-val', + }, + userEditStates: {}, + parts: [ + { + externalId: 'part1', + name: 'my second part', + rank: 0, + payload: { + val: 'some-val', + }, + userEditStates: {}, + }, + ], + }, + { + externalId: 'seg2', + name: 'name 3', + rank: 2, + payload: { + val: 'last-val', + }, + userEditStates: {}, + parts: [ + { + externalId: 'part2', + name: 'my third part', + rank: 0, + payload: { + val: 'some-val', + }, + userEditStates: {}, + }, + ], + }, + ], + } + } + + const ingestObjectGenerator = new SofieIngestRundownDataCacheGenerator(protectString('rundownId')) + + function createNoChangesObject(ingestRundown: SofieIngestRundownWithSource): MutableIngestRundownChanges { + const allCacheObjectIds: SofieIngestDataCacheObjId[] = [] + for (const segment of ingestRundown.segments) { + allCacheObjectIds.push(ingestObjectGenerator.getSegmentObjectId(segment.externalId)) + for (const part of segment.parts) { + allCacheObjectIds.push(ingestObjectGenerator.getPartObjectId(part.externalId)) + } + } + allCacheObjectIds.push(ingestObjectGenerator.getRundownObjectId()) + + return { + computedChanges: { + ingestRundown, + + segmentsToRemove: [], + segmentsUpdatedRanks: {}, + segmentsToRegenerate: [], + regenerateRundown: false, + + segmentExternalIdChanges: {}, + }, + changedCacheObjects: [], + allCacheObjectIds: allCacheObjectIds, + } + } + + function addChangedSegments( + changes: MutableIngestRundownChanges, + _ingestRundown: IngestRundown, + ...ingestSegments: SofieIngestSegment[] + ): void { + for (const ingestSegment of ingestSegments) { + const segmentId = getSegmentId(ingestObjectGenerator.rundownId, ingestSegment.externalId) + + changes.computedChanges.segmentsToRegenerate.push(ingestSegment) + + for (const part of ingestSegment.parts) { + changes.changedCacheObjects.push(ingestObjectGenerator.generatePartObject(segmentId, part)) + } + + changes.changedCacheObjects.push(ingestObjectGenerator.generateSegmentObject(ingestSegment)) + } + } + function addChangedRankSegments( + changes: MutableIngestRundownChanges, + _ingestRundown: IngestRundown, + ...ingestSegments: SofieIngestSegment[] + ): void { + for (const ingestSegment of ingestSegments) { + changes.changedCacheObjects.push(ingestObjectGenerator.generateSegmentObject(ingestSegment)) + } + } + function addChangedRundown(changes: MutableIngestRundownChanges): void { + changes.computedChanges.regenerateRundown = true + changes.changedCacheObjects.push( + ingestObjectGenerator.generateRundownObject(changes.computedChanges.ingestRundown) + ) + } + function removeSegmentFromIngestRundown(ingestRundown: IngestRundown, segmentId: string): void { + const ingestSegment = ingestRundown.segments.find((p) => p.externalId === segmentId) + ingestRundown.segments = ingestRundown.segments.filter((p) => p.externalId !== segmentId) + if (ingestSegment) { + for (const part of ingestRundown.segments) { + if (part.rank > ingestSegment.rank) part.rank-- + } + } + } + function getSegmentIdOrder(mutableRundown: MutableIngestRundownImpl): string[] { + return mutableRundown.segments.map((p) => p.externalId) + } + function getSegmentOriginalIdOrder(mutableRundown: MutableIngestRundownImpl): Array { + return mutableRundown.segments.map((p) => p.originalExternalId) + } + + test('create basic', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.externalId).toBe(ingestRundown.externalId) + expect(mutableRundown.name).toBe(ingestRundown.name) + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + expect(mutableRundown.segments.length).toBe(ingestRundown.segments.length) + + // check it has no changes + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + }) + + test('create basic with changes', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), false) + + // compare properties + expect(mutableRundown.externalId).toBe(ingestRundown.externalId) + expect(mutableRundown.name).toBe(ingestRundown.name) + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + expect(mutableRundown.segments.length).toBe(ingestRundown.segments.length) + + // check it has changes + const expectedChanges = createNoChangesObject(ingestRundown) + addChangedSegments(expectedChanges, ingestRundown, ...ingestRundown.segments) + addChangedRundown(expectedChanges) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + + // check changes have been cleared + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + }) + + test('set name', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.name).toBe(ingestRundown.name) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + mutableRundown.setName('new-name') + expect(mutableRundown.name).toBe('new-name') + + // check it has changes + const expectedChanges = createNoChangesObject(clone(ingestRundown)) + expectedChanges.computedChanges.ingestRundown.name = 'new-name' + addChangedRundown(expectedChanges) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('replace payload with change', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + const newPayload = { val: 'new-val' } + mutableRundown.replacePayload(newPayload) + expect(mutableRundown.payload).toEqual(newPayload) + + // check it has changes + const expectedChanges = createNoChangesObject(clone(ingestRundown)) + expectedChanges.computedChanges.ingestRundown.payload = newPayload + addChangedRundown(expectedChanges) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('replace payload with no change', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + mutableRundown.replacePayload(ingestRundown.payload) + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + + // check it has no changes + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + }) + + test('set payload property change', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + const newPayload = { ...ingestRundown.payload, test: 123, second: undefined } + mutableRundown.setPayloadProperty('test', 123) + mutableRundown.setPayloadProperty('second', undefined) + expect(mutableRundown.payload).toEqual(newPayload) + + // check it has changes + const expectedChanges = createNoChangesObject(clone(ingestRundown)) + expectedChanges.computedChanges.ingestRundown.payload = newPayload + addChangedRundown(expectedChanges) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('set payload property unchanged', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + mutableRundown.setPayloadProperty('val', ingestRundown.payload.val) + mutableRundown.setPayloadProperty('another', undefined) + expect(mutableRundown.payload).toEqual(ingestRundown.payload) + + // check it has no changes + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + }) + + test('set user edit state change', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.userEditStates).toEqual(ingestRundown.userEditStates) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + const newUserEditStates = { ...ingestRundown.userEditStates, two: true, another: false } + mutableRundown.setUserEditState('two', true) + mutableRundown.setUserEditState('another', false) + expect(mutableRundown.userEditStates).toEqual(newUserEditStates) + + // check it has changes + const expectedChanges = createNoChangesObject(ingestRundown) + ingestRundown.userEditStates = newUserEditStates + addChangedRundown(expectedChanges) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('set user edit state unchanged', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.userEditStates).toEqual(ingestRundown.userEditStates) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + mutableRundown.setUserEditState('one', true) + mutableRundown.setUserEditState('two', false) + expect(mutableRundown.userEditStates).toEqual(ingestRundown.userEditStates) + + // check it has changes + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + }) + + test('get segments', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.segments.length).toBe(ingestRundown.segments.length) + expect(mutableRundown.getSegment('seg0')).toStrictEqual(mutableRundown.segments[0]) + expect(mutableRundown.getSegment('seg0') instanceof MutableIngestSegmentImpl).toBe(true) + expect(mutableRundown.getSegment('seg1')).toStrictEqual(mutableRundown.segments[1]) + expect(mutableRundown.getSegment('seg1') instanceof MutableIngestSegmentImpl).toBe(true) + expect(mutableRundown.getSegment('seg2')).toStrictEqual(mutableRundown.segments[2]) + expect(mutableRundown.getSegment('seg2') instanceof MutableIngestSegmentImpl).toBe(true) + expect(mutableRundown.getSegment('seg3')).toBeUndefined() + + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + }) + + test('findPart & findPartAndSegment', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // compare properties + expect(mutableRundown.segments.length).toBe(ingestRundown.segments.length) + expect(mutableRundown.findPart('part1')).toStrictEqual(mutableRundown.segments[1].parts[0]) + expect(mutableRundown.findPart('part1')).toStrictEqual(mutableRundown.findPartAndSegment('part1')?.part) + expect(mutableRundown.getSegment('seg1')).toStrictEqual(mutableRundown.findPartAndSegment('part1')?.segment) + + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + }) + + describe('removeSegment', () => { + test('good', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.removeSegment('seg1')).toBeTruthy() + + // compare properties + expect(mutableRundown.segments.length).toBe(2) + expect(mutableRundown.getSegment('seg1')).toBeUndefined() + + // check it has changes + const expectedIngestRundown = clone(ingestRundown) + removeSegmentFromIngestRundown(expectedIngestRundown, 'seg1') + const expectedChanges = createNoChangesObject(expectedIngestRundown) + expectedChanges.computedChanges.segmentsToRemove.push('seg1') + expectedChanges.computedChanges.segmentsUpdatedRanks = { seg2: 1 } + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + + // try removing a second time + expect(mutableRundown.removeSegment('seg1')).toBeFalsy() + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual( + createNoChangesObject(expectedIngestRundown) + ) + }) + + test('unknown id', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.removeSegment('segX')).toBeFalsy() + + // compare properties + expect(mutableRundown.segments.length).toBe(ingestRundown.segments.length) + + // ensure no changes + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual( + createNoChangesObject(ingestRundown) + ) + }) + }) + + test('removeAllSegments', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + mutableRundown.removeAllSegments() + + // compare properties + expect(mutableRundown.segments.length).toBe(0) + + // ensure no changes + const expectedIngestRundown = clone(ingestRundown) + expectedIngestRundown.segments = [] + const expectedChanges = createNoChangesObject(expectedIngestRundown) + for (const segment of ingestRundown.segments) { + expectedChanges.computedChanges.segmentsToRemove.push(segment.externalId) + } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('forceFullRegenerate', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + // ensure no changes + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestRundown)) + + mutableRundown.forceFullRegenerate() + + // check it has changes + const expectedChanges = createNoChangesObject(ingestRundown) + addChangedRundown(expectedChanges) + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + describe('replaceSegment', () => { + test('replace existing with a move', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + const segmentBefore = mutableRundown.getSegment('seg1') + expect(segmentBefore).toBeDefined() + for (const part of segmentBefore?.parts || []) { + expect(mutableRundown.findPart(part.externalId)).toStrictEqual(part) + } + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + const newSegment: Omit = { + externalId: 'seg1', + name: 'new name', + payload: { + val: 'new-val', + }, + userEditStates: {}, + parts: [ + { + externalId: 'part1', + name: 'new part name', + rank: 0, + payload: { + val: 'new-part-val', + }, + userEditStates: {}, + }, + ], + } + const replacedPart = mutableRundown.replaceSegment(newSegment, null) + expect(replacedPart).toBeDefined() + // ensure the inserted part looks correct + expect(replacedPart?.externalId).toBe(newSegment.externalId) + expect(replacedPart?.name).toBe(newSegment.name) + expect(replacedPart?.payload).toEqual(newSegment.payload) + + // check it has changes + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg2', 'seg1']) + const expectedIngestRundown = clone(ingestRundown) + removeSegmentFromIngestRundown(expectedIngestRundown, 'seg1') + expectedIngestRundown.segments.push({ ...newSegment, rank: 2 }) + + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + addChangedSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[2]) + expectedChanges.computedChanges.segmentsUpdatedRanks = { seg2: 1, seg1: 2 } + + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + + // ensure the MutableSegment is a new object + expect(mutableRundown.getSegment('seg1')).not.toBe(segmentBefore) + for (const part of segmentBefore?.parts || []) { + expect(mutableRundown.findPart(part.externalId)).not.toBe(part) + } + }) + + test('insert new', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('partX')).toBeUndefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + const newSegment: Omit = { + externalId: 'segX', + name: 'new name', + payload: { + val: 'new-val', + }, + userEditStates: {}, + parts: [ + { + externalId: 'partX', + name: 'new part name', + rank: 0, + payload: { + val: 'new-part-val', + }, + userEditStates: {}, + }, + ], + } + const replacedPart = mutableRundown.replaceSegment(newSegment, null) + expect(replacedPart).toBeDefined() + // ensure the inserted part looks correct + expect(replacedPart?.externalId).toBe(newSegment.externalId) + expect(replacedPart?.name).toBe(newSegment.name) + expect(replacedPart?.payload).toEqual(newSegment.payload) + + // check it has changes + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2', 'segX']) + const expectedIngestRundown = clone(ingestRundown) + expectedIngestRundown.segments.push({ ...newSegment, rank: 3 }) + + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[3]) + expectedChanges.computedChanges.segmentsUpdatedRanks = { segX: 3 } + + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('insert at position', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('partX')).toBeUndefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + const newSegment: Omit = { + externalId: 'segX', + name: 'new name', + payload: { + val: 'new-val', + }, + parts: [ + { + externalId: 'partX', + name: 'new part name', + rank: 0, + payload: { + val: 'new-part-val', + }, + }, + ], + } + + // insert at the end + expect(mutableRundown.replaceSegment(newSegment, null)).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2', 'segX']) + + // insert at the beginning + expect(mutableRundown.replaceSegment(newSegment, 'seg0')).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['segX', 'seg0', 'seg1', 'seg2']) + + // insert in the middle + expect(mutableRundown.replaceSegment(newSegment, 'seg2')).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'segX', 'seg2']) + + // Only the one should have changes + expect( + mutableRundown + .intoIngestRundown(ingestObjectGenerator) + .computedChanges.segmentsToRegenerate.map((s) => s.externalId) + ).toEqual(['segX']) + + // Try inserting before itself + expect(() => mutableRundown.replaceSegment(newSegment, newSegment.externalId)).toThrow( + /Cannot insert Segment before itself/ + ) + + // Try inserting before an unknown part + expect(() => mutableRundown.replaceSegment(newSegment, 'segY')).toThrow(/Segment(.*)not found/) + }) + }) + + describe('moveSegmentBefore', () => { + test('move unknown', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('segX')).toBeUndefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + expect(() => mutableRundown.moveSegmentBefore('segX', null)).toThrow(/Segment(.*)not found/) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + }) + + test('move to position', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + // insert at the end + mutableRundown.moveSegmentBefore('seg1', null) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg2', 'seg1']) + + // insert in the middle + mutableRundown.moveSegmentBefore('seg1', 'seg2') + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + // insert at the beginning + mutableRundown.moveSegmentBefore('seg1', 'seg0') + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg1', 'seg0', 'seg2']) + + // Check the reported changes + const expectedIngestRundown = clone(ingestRundown) + expectedIngestRundown.segments.splice(0, 0, expectedIngestRundown.segments.splice(1, 1)[0]) + expectedIngestRundown.segments[0].rank = 0 + expectedIngestRundown.segments[1].rank = 1 + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[0]) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + expectedChanges.computedChanges.segmentsUpdatedRanks = { seg1: 0, seg0: 1 } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + + // Try inserting before itself + expect(() => mutableRundown.moveSegmentBefore('seg1', 'seg1')).toThrow(/Cannot move Segment before itself/) + + // Try inserting before an unknown part + expect(() => mutableRundown.moveSegmentBefore('seg1', 'segY')).toThrow(/Segment(.*)not found/) + }) + }) + + describe('moveSegmentAfter', () => { + test('move unknown', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('segX')).toBeUndefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + expect(() => mutableRundown.moveSegmentAfter('segX', null)).toThrow(/Segment(.*)not found/) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + }) + + test('move to position', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + // insert at the beginning + mutableRundown.moveSegmentAfter('seg1', null) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg1', 'seg0', 'seg2']) + + // insert in the middle + mutableRundown.moveSegmentAfter('seg1', 'seg0') + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + + // insert at the end + mutableRundown.moveSegmentAfter('seg1', 'seg2') + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg2', 'seg1']) + + // Check the reported changes + const expectedIngestRundown = clone(ingestRundown) + expectedIngestRundown.segments.splice(1, 0, expectedIngestRundown.segments.splice(2, 1)[0]) + expectedIngestRundown.segments[1].rank = 1 + expectedIngestRundown.segments[2].rank = 2 + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[2]) + expectedChanges.computedChanges.segmentsUpdatedRanks = { seg2: 1, seg1: 2 } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + + // Try inserting before itself + expect(() => mutableRundown.moveSegmentAfter('seg1', 'seg1')).toThrow(/Cannot move Segment after itself/) + + // Try inserting before an unknown part + expect(() => mutableRundown.moveSegmentAfter('seg1', 'segY')).toThrow(/Segment(.*)not found/) + }) + }) + + describe('changeSegmentExternalId', () => { + test('rename unknown', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('segX')).toBeUndefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + expect(() => mutableRundown.changeSegmentExternalId('segX', 'segY')).toThrow(/Segment(.*)not found/) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + }) + + test('rename to duplicate', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('seg1')).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + expect(() => mutableRundown.changeSegmentExternalId('seg1', 'seg2')).toThrow(/Segment(.*)already exists/) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + }) + + test('good', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + const beforeSegment = mutableRundown.getSegment('seg1') as MutableIngestSegmentImpl + expect(beforeSegment).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + // rename and check + expect(mutableRundown.changeSegmentExternalId('seg1', 'segX')).toStrictEqual(beforeSegment) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'segX', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'seg1', undefined]) + expect(beforeSegment.originalExternalId).toBe('seg1') + expect(beforeSegment.externalId).toBe('segX') + + // Check the reported changes + const expectedIngestRundown = clone(ingestRundown) + expectedIngestRundown.segments[1].externalId = 'segX' + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + expectedChanges.computedChanges.segmentsUpdatedRanks = { segX: 1 } + expectedChanges.computedChanges.segmentExternalIdChanges = { seg1: 'segX' } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('rename twice', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + const beforeSegment = mutableRundown.getSegment('seg1') as MutableIngestSegmentImpl + expect(beforeSegment).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + // rename and check + expect(mutableRundown.changeSegmentExternalId('seg1', 'segX')).toStrictEqual(beforeSegment) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'segX', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'seg1', undefined]) + expect(beforeSegment.originalExternalId).toBe('seg1') + expect(beforeSegment.externalId).toBe('segX') + + // rename again + expect(mutableRundown.changeSegmentExternalId('segX', 'segY')).toStrictEqual(beforeSegment) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'segY', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'seg1', undefined]) + expect(beforeSegment.originalExternalId).toBe('seg1') + expect(beforeSegment.externalId).toBe('segY') + + // Check the reported changes + const expectedIngestRundown = clone(ingestRundown) + expectedIngestRundown.segments[1].externalId = 'segY' + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + expectedChanges.computedChanges.segmentsUpdatedRanks = { segY: 1 } + expectedChanges.computedChanges.segmentExternalIdChanges = { seg1: 'segY' } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('rename circle', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + const beforeSegment1 = mutableRundown.getSegment('seg1') as MutableIngestSegmentImpl + expect(beforeSegment1).toBeDefined() + const beforeSegment2 = mutableRundown.getSegment('seg2') as MutableIngestSegmentImpl + expect(beforeSegment2).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + // rename seg1 to segX + expect(mutableRundown.changeSegmentExternalId('seg1', 'segX')).toStrictEqual(beforeSegment1) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'segX', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'seg1', undefined]) + expect(beforeSegment1.originalExternalId).toBe('seg1') + expect(beforeSegment1.externalId).toBe('segX') + + // rename seg2 to seg1 + expect(mutableRundown.changeSegmentExternalId('seg2', 'seg1')).toStrictEqual(beforeSegment2) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'segX', 'seg1']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'seg1', 'seg2']) + expect(beforeSegment2.originalExternalId).toBe('seg2') + expect(beforeSegment2.externalId).toBe('seg1') + + // rename segX to seg2 + expect(mutableRundown.changeSegmentExternalId('segX', 'seg2')).toStrictEqual(beforeSegment1) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg2', 'seg1']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'seg1', 'seg2']) + expect(beforeSegment1.originalExternalId).toBe('seg1') + expect(beforeSegment1.externalId).toBe('seg2') + + // Check the reported changes + const expectedIngestRundown = clone(ingestRundown) + expectedIngestRundown.segments[1].externalId = 'seg2' + expectedIngestRundown.segments[2].externalId = 'seg1' + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[2]) + expectedChanges.computedChanges.segmentsUpdatedRanks = { seg2: 1, seg1: 2 } + expectedChanges.computedChanges.segmentExternalIdChanges = { seg1: 'seg2', seg2: 'seg1' } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + }) + + describe('changeSegmentOriginalExternalId', () => { + test('rename unknown', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('segX')).toBeUndefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + expect(() => mutableRundown.changeSegmentOriginalExternalId('segX', 'segY')).toThrow(/Segment(.*)not found/) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + }) + + test('rename to duplicate', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + expect(mutableRundown.getSegment('seg1')).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + expect(() => mutableRundown.changeSegmentOriginalExternalId('seg1', 'seg2')).toThrow(/Segment(.*)exists/) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + }) + + test('good', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + const beforeSegment = mutableRundown.getSegment('seg1') as MutableIngestSegmentImpl + expect(beforeSegment).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + // rename and check + expect(mutableRundown.changeSegmentOriginalExternalId('seg1', 'segX')).toStrictEqual(beforeSegment) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'segX', undefined]) + expect(beforeSegment.originalExternalId).toBe('segX') + expect(beforeSegment.externalId).toBe('seg1') + + // Check the reported changes + const expectedIngestRundown = clone(ingestRundown) + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + expectedChanges.computedChanges.segmentExternalIdChanges = { segX: 'seg1' } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('rename twice', () => { + const ingestRundown = getBasicIngestRundown() + const mutableRundown = new MutableIngestRundownImpl(clone(ingestRundown), true) + + const beforeSegment = mutableRundown.getSegment('seg1') as MutableIngestSegmentImpl + expect(beforeSegment).toBeDefined() + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, undefined, undefined]) + + // rename and check + expect(mutableRundown.changeSegmentOriginalExternalId('seg1', 'segX')).toStrictEqual(beforeSegment) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'segX', undefined]) + expect(beforeSegment.originalExternalId).toBe('segX') + expect(beforeSegment.externalId).toBe('seg1') + + // rename again + expect(mutableRundown.changeSegmentOriginalExternalId('seg1', 'segY')).toStrictEqual(beforeSegment) + expect(getSegmentIdOrder(mutableRundown)).toEqual(['seg0', 'seg1', 'seg2']) + expect(getSegmentOriginalIdOrder(mutableRundown)).toEqual([undefined, 'segY', undefined]) + expect(beforeSegment.originalExternalId).toBe('segY') + expect(beforeSegment.externalId).toBe('seg1') + + // Check the reported changes + const expectedIngestRundown = clone(ingestRundown) + const expectedChanges = createNoChangesObject(expectedIngestRundown) + addChangedRankSegments(expectedChanges, ingestRundown, expectedIngestRundown.segments[1]) + expectedChanges.computedChanges.segmentExternalIdChanges = { segY: 'seg1' } + expect(mutableRundown.intoIngestRundown(ingestObjectGenerator)).toEqual(expectedChanges) + }) + }) +}) diff --git a/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestSegmentImpl.spec.ts b/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestSegmentImpl.spec.ts new file mode 100644 index 0000000000..3d53c7849d --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/__tests__/MutableIngestSegmentImpl.spec.ts @@ -0,0 +1,526 @@ +import { clone } from '@sofie-automation/corelib/dist/lib' +import { MutableIngestSegmentChanges, MutableIngestSegmentImpl } from '../MutableIngestSegmentImpl' +import { SofieIngestRundownDataCacheGenerator } from '../../../ingest/sofieIngestCache' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { getSegmentId } from '../../../ingest/lib' +import { MutableIngestPartImpl } from '../MutableIngestPartImpl' +import { IngestPart, IngestSegment, SofieIngestSegment } from '@sofie-automation/blueprints-integration' + +describe('MutableIngestSegmentImpl', () => { + function getBasicIngestSegment(): SofieIngestSegment { + return { + externalId: 'externalId', + name: 'name', + rank: 0, + payload: { + val: 'some-val', + second: 5, + }, + userEditStates: { + one: true, + two: false, + }, + parts: [ + { + externalId: 'part0', + name: 'my first part', + rank: 0, + payload: { + val: 'some-val', + }, + userEditStates: {}, + }, + { + externalId: 'part1', + name: 'another part', + rank: 1, + payload: { + val: 'second-val', + }, + userEditStates: {}, + }, + { + externalId: 'part2', + name: 'third part', + rank: 2, + payload: { + val: 'third-val', + }, + userEditStates: {}, + }, + { + externalId: 'part3', + name: 'last part', + rank: 3, + payload: { + val: 'last-val', + }, + userEditStates: {}, + }, + ], + } + } + + const ingestObjectGenerator = new SofieIngestRundownDataCacheGenerator(protectString('rundownId')) + + function createNoChangesObject(ingestSegment: SofieIngestSegment): MutableIngestSegmentChanges { + return { + ingestParts: ingestSegment.parts, + changedCacheObjects: [], + allCacheObjectIds: ingestSegment.parts.map((p) => ingestObjectGenerator.getPartObjectId(p.externalId)), + segmentHasChanges: false, + partIdsWithChanges: [], + partOrderHasChanged: false, + originalExternalId: ingestSegment.externalId, + } + } + function removePartFromIngestSegment(ingestSegment: IngestSegment, partId: string): void { + const ingestPart = ingestSegment.parts.find((p) => p.externalId === partId) + ingestSegment.parts = ingestSegment.parts.filter((p) => p.externalId !== partId) + if (ingestPart) { + for (const part of ingestSegment.parts) { + if (part.rank > ingestPart.rank) part.rank-- + } + } + } + function getPartIdOrder(mutableSegment: MutableIngestSegmentImpl): string[] { + return mutableSegment.parts.map((p) => p.externalId) + } + + test('create basic', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.externalId).toBe(ingestSegment.externalId) + expect(mutableSegment.name).toBe(ingestSegment.name) + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + expect(mutableSegment.parts.length).toBe(ingestSegment.parts.length) + + // check it has no changes + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + }) + + test('create basic with changes', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment), true) + + // compare properties + expect(mutableSegment.externalId).toBe(ingestSegment.externalId) + expect(mutableSegment.name).toBe(ingestSegment.name) + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + expect(mutableSegment.parts.length).toBe(ingestSegment.parts.length) + + // check it has no changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.segmentHasChanges = true + const segmentId = getSegmentId(ingestObjectGenerator.rundownId, ingestSegment.externalId) + for (const ingestPart of ingestSegment.parts) { + expectedChanges.partIdsWithChanges.push(ingestPart.externalId) + expectedChanges.changedCacheObjects.push(ingestObjectGenerator.generatePartObject(segmentId, ingestPart)) + } + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + + // check changes have been cleared + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + }) + + test('set name', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.name).toBe(ingestSegment.name) + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + mutableSegment.setName('new-name') + expect(mutableSegment.name).toBe('new-name') + + // check it has changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.segmentHasChanges = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('replace payload with change', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + const newPayload = { val: 'new-val' } + mutableSegment.replacePayload(newPayload) + expect(mutableSegment.payload).toEqual(newPayload) + + // check it has changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.segmentHasChanges = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('replace payload with no change', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + mutableSegment.replacePayload(ingestSegment.payload) + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + + // check it has no changes + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + }) + + test('set payload property change', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + const newPayload = { ...ingestSegment.payload, test: 123, second: undefined } + mutableSegment.setPayloadProperty('test', 123) + mutableSegment.setPayloadProperty('second', undefined) + expect(mutableSegment.payload).toEqual(newPayload) + + // check it has changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.segmentHasChanges = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('set payload property unchanged', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + mutableSegment.setPayloadProperty('val', ingestSegment.payload.val) + mutableSegment.setPayloadProperty('another', undefined) + expect(mutableSegment.payload).toEqual(ingestSegment.payload) + + // check it has no changes + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + }) + + test('set user edit state change', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.userEditStates).toEqual(ingestSegment.userEditStates) + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + const newUserEditStates = { ...ingestSegment.userEditStates, two: true, another: false } + mutableSegment.setUserEditState('two', true) + mutableSegment.setUserEditState('another', false) + expect(mutableSegment.userEditStates).toEqual(newUserEditStates) + + // check it has changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.segmentHasChanges = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('set user edit state unchanged', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.userEditStates).toEqual(ingestSegment.userEditStates) + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + mutableSegment.setUserEditState('one', true) + mutableSegment.setUserEditState('two', false) + expect(mutableSegment.userEditStates).toEqual(ingestSegment.userEditStates) + + // check it has changes + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + }) + + test('get parts', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // compare properties + expect(mutableSegment.parts.length).toBe(ingestSegment.parts.length) + expect(mutableSegment.getPart('part0')).toStrictEqual(mutableSegment.parts[0]) + expect(mutableSegment.getPart('part0') instanceof MutableIngestPartImpl).toBe(true) + expect(mutableSegment.getPart('part1')).toStrictEqual(mutableSegment.parts[1]) + expect(mutableSegment.getPart('part1') instanceof MutableIngestPartImpl).toBe(true) + expect(mutableSegment.getPart('part2')).toStrictEqual(mutableSegment.parts[2]) + expect(mutableSegment.getPart('part2') instanceof MutableIngestPartImpl).toBe(true) + expect(mutableSegment.getPart('part3')).toStrictEqual(mutableSegment.parts[3]) + expect(mutableSegment.getPart('part3') instanceof MutableIngestPartImpl).toBe(true) + expect(mutableSegment.getPart('part4')).toBeUndefined() + + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + }) + + describe('removePart', () => { + test('good', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(mutableSegment.removePart('part1')).toBeTruthy() + + // compare properties + expect(mutableSegment.parts.length).toBe(3) + expect(mutableSegment.getPart('part1')).toBeUndefined() + + // check it has changes + const expectedIngestSegment = clone(ingestSegment) + removePartFromIngestSegment(expectedIngestSegment, 'part1') + const expectedChanges = createNoChangesObject(expectedIngestSegment) + expectedChanges.partOrderHasChanged = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + + // try removing a second time + expect(mutableSegment.removePart('part1')).toBeFalsy() + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual( + createNoChangesObject(expectedIngestSegment) + ) + }) + + test('unknown id', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(mutableSegment.removePart('partX')).toBeFalsy() + + // compare properties + expect(mutableSegment.parts.length).toBe(ingestSegment.parts.length) + + // ensure no changes + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + }) + }) + + test('forceRegenerate', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + // ensure no changes + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(createNoChangesObject(ingestSegment)) + + mutableSegment.forceRegenerate() + + // check it has changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.segmentHasChanges = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + describe('replacePart', () => { + test('replace existing with a move', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(mutableSegment.getPart('part1')).toBeDefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + const newPart: Omit = { + externalId: 'part1', + name: 'new name', + payload: { + val: 'new-val', + }, + } + const replacedPart = mutableSegment.replacePart(newPart, null) + expect(replacedPart).toBeDefined() + // ensure the inserted part looks correct + expect(replacedPart?.externalId).toBe(newPart.externalId) + expect(replacedPart?.name).toBe(newPart.name) + expect(replacedPart?.payload).toEqual(newPart.payload) + + // check it has changes + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part2', 'part3', 'part1']) + const expectedIngestSegment = clone(ingestSegment) + removePartFromIngestSegment(expectedIngestSegment, 'part1') + expectedIngestSegment.parts.push({ ...newPart, rank: 3, userEditStates: {} }) + + const expectedChanges = createNoChangesObject(expectedIngestSegment) + expectedChanges.partOrderHasChanged = true + expectedChanges.partIdsWithChanges.push('part1') + expectedChanges.changedCacheObjects.push( + ingestObjectGenerator.generatePartObject( + getSegmentId(ingestObjectGenerator.rundownId, ingestSegment.externalId), + { ...newPart, rank: 3, userEditStates: {} } + ) + ) + + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('insert new', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(mutableSegment.getPart('partX')).toBeUndefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + const newPart: Omit = { + externalId: 'partX', + name: 'new name', + payload: { + val: 'new-val', + }, + } + const replacedPart = mutableSegment.replacePart(newPart, null) + expect(replacedPart).toBeDefined() + // ensure the inserted part looks correct + expect(replacedPart?.externalId).toBe(newPart.externalId) + expect(replacedPart?.name).toBe(newPart.name) + expect(replacedPart?.payload).toEqual(newPart.payload) + + // check it has changes + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3', 'partX']) + const expectedIngestSegment = clone(ingestSegment) + expectedIngestSegment.parts.push({ ...newPart, rank: 4, userEditStates: {} }) + + const expectedChanges = createNoChangesObject(expectedIngestSegment) + expectedChanges.partOrderHasChanged = true + expectedChanges.partIdsWithChanges.push('partX') + expectedChanges.changedCacheObjects.push( + ingestObjectGenerator.generatePartObject( + getSegmentId(ingestObjectGenerator.rundownId, ingestSegment.externalId), + { ...newPart, rank: 4, userEditStates: {} } + ) + ) + + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + }) + + test('insert at position', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(mutableSegment.getPart('partX')).toBeUndefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + const newPart: Omit = { + externalId: 'partX', + name: 'new name', + payload: { + val: 'new-val', + }, + } + + // insert at the end + expect(mutableSegment.replacePart(newPart, null)).toBeDefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3', 'partX']) + + // insert at the beginning + expect(mutableSegment.replacePart(newPart, 'part0')).toBeDefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['partX', 'part0', 'part1', 'part2', 'part3']) + + // insert in the middle + expect(mutableSegment.replacePart(newPart, 'part2')).toBeDefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'partX', 'part2', 'part3']) + + // Only the one should have changes + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator).partIdsWithChanges).toEqual(['partX']) + + // Try inserting before itself + expect(() => mutableSegment.replacePart(newPart, newPart.externalId)).toThrow( + /Cannot insert Part before itself/ + ) + + // Try inserting before an unknown part + expect(() => mutableSegment.replacePart(newPart, 'partY')).toThrow(/Part(.*)not found/) + }) + }) + + describe('movePartBefore', () => { + test('move unknown', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(mutableSegment.getPart('partX')).toBeUndefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + expect(() => mutableSegment.movePartBefore('partX', null)).toThrow(/Part(.*)not found/) + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + }) + + test('move to position', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + // insert at the end + mutableSegment.movePartBefore('part1', null) + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part2', 'part3', 'part1']) + + // insert at the beginning + mutableSegment.movePartBefore('part1', 'part0') + expect(getPartIdOrder(mutableSegment)).toEqual(['part1', 'part0', 'part2', 'part3']) + + // insert in the middle + mutableSegment.movePartBefore('part1', 'part2') + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + // Only the one should have changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.partOrderHasChanged = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + + // Try inserting before itself + expect(() => mutableSegment.movePartBefore('part1', 'part1')).toThrow(/Cannot move Part before itself/) + + // Try inserting before an unknown part + expect(() => mutableSegment.movePartBefore('part1', 'partY')).toThrow(/Part(.*)not found/) + }) + }) + + describe('movePartAfter', () => { + test('move unknown', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(mutableSegment.getPart('partX')).toBeUndefined() + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + expect(() => mutableSegment.movePartAfter('partX', null)).toThrow(/Part(.*)not found/) + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + }) + + test('move to position', () => { + const ingestSegment = getBasicIngestSegment() + const mutableSegment = new MutableIngestSegmentImpl(clone(ingestSegment)) + + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + // insert at the beginning + mutableSegment.movePartAfter('part1', null) + expect(getPartIdOrder(mutableSegment)).toEqual(['part1', 'part0', 'part2', 'part3']) + + // insert at the end + mutableSegment.movePartAfter('part1', 'part3') + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part2', 'part3', 'part1']) + + // insert in the middle + mutableSegment.movePartAfter('part1', 'part0') + expect(getPartIdOrder(mutableSegment)).toEqual(['part0', 'part1', 'part2', 'part3']) + + // Only the one should have changes + const expectedChanges = createNoChangesObject(ingestSegment) + expectedChanges.partOrderHasChanged = true + expect(mutableSegment.intoChangesInfo(ingestObjectGenerator)).toEqual(expectedChanges) + + // Try inserting before itself + expect(() => mutableSegment.movePartAfter('part1', 'part1')).toThrow(/Cannot move Part after itself/) + + // Try inserting before an unknown part + expect(() => mutableSegment.movePartAfter('part1', 'partY')).toThrow(/Part(.*)not found/) + }) + }) +}) diff --git a/packages/job-worker/src/blueprints/ingest/__tests__/defaultApplyIngestChanges.spec.ts b/packages/job-worker/src/blueprints/ingest/__tests__/defaultApplyIngestChanges.spec.ts new file mode 100644 index 0000000000..4d5d91440c --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/__tests__/defaultApplyIngestChanges.spec.ts @@ -0,0 +1,960 @@ +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { MutableIngestRundownImpl } from '../MutableIngestRundownImpl' +import { defaultApplyIngestChanges } from '../defaultApplyIngestChanges' +import { + NrcsIngestChangeDetails, + NrcsIngestPartChangeDetails, + NrcsIngestRundownChangeDetails, + NrcsIngestSegmentChangeDetailsEnum, + IngestDefaultChangesOptions, + IngestRundown, + MutableIngestPart, + MutableIngestRundown, + MutableIngestSegment, + IngestChangeType, +} from '@sofie-automation/blueprints-integration' +import { clone } from '@sofie-automation/corelib/dist/lib' +import { toSofieIngestRundown } from './util' + +describe('defaultApplyIngestChanges', () => { + function createBasicIngestRundown(): IngestRundownWithSource { + return { + externalId: 'rd0', + name: 'my rundown', + type: 'mock', + rundownSource: { type: 'http' }, + payload: { + myData: 'data', + }, + segments: [ + { + externalId: 'seg0', + rank: 0, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [ + { + externalId: 'part0', + rank: 0, + name: 'my part', + payload: { + partData: 'data', + }, + }, + ], + }, + ], + } + } + function createMediumIngestRundown(): IngestRundownWithSource { + return { + externalId: 'rd0', + name: 'my rundown', + type: 'mock', + rundownSource: { type: 'http' }, + payload: { + myData: 'data', + }, + segments: [ + { + externalId: 'seg0', + rank: 0, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [ + { + externalId: 'part0', + rank: 0, + name: 'my part', + payload: { + partData: 'data', + }, + }, + { + externalId: 'part1', + rank: 1, + name: 'my part', + payload: { + partData: 'data', + }, + }, + ], + }, + { + externalId: 'seg1', + rank: 1, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [ + { + externalId: 'part2', + rank: 0, + name: 'my part', + payload: { + partData: 'data', + }, + }, + ], + }, + { + externalId: 'seg2', + rank: 2, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [ + { + externalId: 'part3', + rank: 0, + name: 'my part', + payload: { + partData: 'data', + }, + }, + { + externalId: 'part4', + rank: 1, + name: 'my part', + payload: { + partData: 'data', + }, + }, + { + externalId: 'part5', + rank: 1, + name: 'my part', + payload: { + partData: 'data', + }, + }, + ], + }, + ], + } + } + function createIngestRundownWithManySegments(): IngestRundownWithSource { + return { + externalId: 'rd0', + name: 'my rundown', + type: 'mock', + rundownSource: { type: 'http' }, + payload: { + myData: 'data', + }, + segments: [ + { + externalId: 'seg0', + rank: 0, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [], + }, + { + externalId: 'seg1', + rank: 1, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [], + }, + { + externalId: 'seg2', + rank: 2, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [], + }, + { + externalId: 'seg3', + rank: 3, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [], + }, + { + externalId: 'seg4', + rank: 4, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [], + }, + ], + } + } + + /** + * This creates a MutableIngestRundownImpl from an IngestRundown, and wraps all methods to record the mutation calls made to the rundown and its contents + */ + function createMutableIngestRundown(nrcsRundown: IngestRundownWithSource) { + const mutableIngestRundown = new MutableIngestRundownImpl(toSofieIngestRundown(nrcsRundown), true) + + const mockCalls: Array<{ target: string; name: string; args: any[] }> = [] + + const defaultOptions: IngestDefaultChangesOptions = { + transformRundownPayload: jest.fn((payload, oldPayload) => { + mockCalls.push({ target: 'options', name: 'transformRundownPayload', args: [!!oldPayload] }) + return payload + }), + transformSegmentPayload: jest.fn((payload, oldPayload) => { + mockCalls.push({ target: 'options', name: 'transformSegmentPayload', args: [!!oldPayload] }) + return payload + }), + transformPartPayload: jest.fn((payload, oldPayload) => { + mockCalls.push({ target: 'options', name: 'transformPartPayload', args: [!!oldPayload] }) + return payload + }), + } + + function wrapMethod( + target: string, + name: TName, + obj: TObj, + interceptReturn?: (val: ReturnType) => ReturnType + ) { + const rawMethod = obj[name] + if (typeof rawMethod !== 'function') throw new Error(`Cant wrap non-method ${name}`) + const origMethod = rawMethod.bind(obj) + + const mockMethod = jest.fn((...args) => { + mockCalls.push({ target, name, args }) + const returnVal = origMethod(...args) + if (interceptReturn) { + return interceptReturn(returnVal) + } else { + return returnVal + } + }) + obj[name] = mockMethod as any + + return mockMethod + } + + function wrapPart(part: MutableIngestPart) { + const target = `part ${part.externalId}` + wrapMethod(target, 'setName', part) + wrapMethod(target, 'replacePayload', part) + wrapMethod(target, 'setPayloadProperty', part) + } + + function wrapSegment(segment: MutableIngestSegment) { + const target = `segment ${segment.externalId}` + wrapMethod(target, 'movePartBefore', segment) + wrapMethod(target, 'movePartAfter', segment) + wrapMethod(target, 'replacePart', segment, (part: MutableIngestPart) => { + wrapPart(part) + return part + }) + wrapMethod(target, 'removePart', segment) + wrapMethod(target, 'setName', segment) + wrapMethod(target, 'replacePayload', segment) + wrapMethod(target, 'setPayloadProperty', segment) + + segment.parts.forEach(wrapPart) + } + + wrapMethod('rundown', 'moveSegmentAfter', mutableIngestRundown) + wrapMethod('rundown', 'moveSegmentBefore', mutableIngestRundown) + wrapMethod('rundown', 'removeAllSegments', mutableIngestRundown) + wrapMethod('rundown', 'replaceSegment', mutableIngestRundown, (segment: MutableIngestSegment) => { + wrapSegment(segment) + return segment + }) + wrapMethod('rundown', 'changeSegmentExternalId', mutableIngestRundown) + wrapMethod('rundown', 'changeSegmentOriginalExternalId', mutableIngestRundown) + wrapMethod('rundown', 'removeSegment', mutableIngestRundown) + wrapMethod('rundown', 'forceFullRegenerate', mutableIngestRundown) + wrapMethod('rundown', 'setName', mutableIngestRundown) + wrapMethod('rundown', 'replacePayload', mutableIngestRundown) + wrapMethod('rundown', 'setPayloadProperty', mutableIngestRundown) + + mutableIngestRundown.segments.forEach(wrapSegment) + + return { + mutableIngestRundown: mutableIngestRundown as MutableIngestRundown, + defaultOptions, + mockCalls, + } + } + + describe('rundown changes', () => { + it('no changes', async () => { + const nrcsRundown = createBasicIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { source: IngestChangeType.Ingest } + + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(0) + }) + it('rundown name and payload change', async () => { + const nrcsRundown = createBasicIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Payload, + } + + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(3) + expect(mockCalls[0]).toEqual({ target: 'options', name: 'transformRundownPayload', args: [true] }) + expect(mockCalls[1]).toEqual({ target: 'rundown', name: 'replacePayload', args: [nrcsRundown.payload] }) + expect(mockCalls[2]).toEqual({ target: 'rundown', name: 'setName', args: [nrcsRundown.name] }) + }) + it('rundown regenerate', async () => { + const nrcsRundown = createBasicIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + segmentOrderChanged: true, // will be ignored + segmentChanges: {}, // will be ignored + } + + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + // Ensure the segments were regenerated + expect(mockCalls).toHaveLength(8) + expect(mockCalls[0]).toEqual({ target: 'options', name: 'transformRundownPayload', args: [true] }) + expect(mockCalls[1]).toEqual({ target: 'rundown', name: 'replacePayload', args: [nrcsRundown.payload] }) + expect(mockCalls[2]).toEqual({ target: 'rundown', name: 'setName', args: [nrcsRundown.name] }) + expect(mockCalls[3]).toEqual({ target: 'rundown', name: 'removeAllSegments', args: [] }) + expect(mockCalls[4]).toEqual({ target: 'rundown', name: 'forceFullRegenerate', args: [] }) + expect(mockCalls[5]).toEqual({ target: 'options', name: 'transformSegmentPayload', args: [true] }) + expect(mockCalls[6]).toEqual({ target: 'options', name: 'transformPartPayload', args: [true] }) + expect(mockCalls[7]).toMatchObject({ target: 'rundown', name: 'replaceSegment' }) + expect(mutableIngestRundown.segments).toHaveLength(1) + }) + }) + + describe('segment order changes', () => { + function createIngestRundownWithManySegmentsAlternateOrder(): IngestRundown { + const ingestRundown = createIngestRundownWithManySegments() + + // reorder segments + ingestRundown.segments = [ + ingestRundown.segments[3], + ingestRundown.segments[1], + ingestRundown.segments[4], + ingestRundown.segments[0], + ingestRundown.segments[2], + ] + + return ingestRundown + } + + it('no changes', async () => { + const nrcsRundown = createIngestRundownWithManySegments() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown(nrcsRundown) + + const changes: NrcsIngestChangeDetails = { source: IngestChangeType.Ingest, segmentOrderChanged: true } + + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + // always ensures the order is sane + expect(mockCalls).toHaveLength(5) + expect(mockCalls[0]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg4', null] }) + expect(mockCalls[1]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg3', 'seg4'] }) + expect(mockCalls[2]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg2', 'seg3'] }) + expect(mockCalls[3]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg1', 'seg2'] }) + expect(mockCalls[4]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg0', 'seg1'] }) + }) + + it('good changes', async () => { + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + createIngestRundownWithManySegments() + ) + + // include some changes, which should be ignored + const modifiedRundown = createIngestRundownWithManySegmentsAlternateOrder() + + const changes: NrcsIngestChangeDetails = { source: IngestChangeType.Ingest, segmentOrderChanged: true } + + defaultApplyIngestChanges(mutableIngestRundown, modifiedRundown, changes, defaultOptions) + + // performs reorder + expect(mockCalls).toHaveLength(5) + expect(mockCalls[0]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg2', null] }) + expect(mockCalls[1]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg0', 'seg2'] }) + expect(mockCalls[2]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg4', 'seg0'] }) + expect(mockCalls[3]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg1', 'seg4'] }) + expect(mockCalls[4]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg3', 'seg1'] }) + }) + + it('missing segment in new order', async () => { + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + createIngestRundownWithManySegments() + ) + + // include some changes + const modifiedRundown = createIngestRundownWithManySegmentsAlternateOrder() + modifiedRundown.segments.splice(2, 1) // remove seg4 + + const changes: NrcsIngestChangeDetails = { source: IngestChangeType.Ingest, segmentOrderChanged: true } + + defaultApplyIngestChanges(mutableIngestRundown, modifiedRundown, changes, defaultOptions) + + // performs reorder + expect(mockCalls).toHaveLength(5) + expect(mockCalls[0]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg2', null] }) + expect(mockCalls[1]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg0', 'seg2'] }) + expect(mockCalls[2]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg1', 'seg0'] }) + expect(mockCalls[3]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg3', 'seg1'] }) + expect(mockCalls[4]).toEqual({ target: 'rundown', name: 'moveSegmentAfter', args: ['seg4', 'seg3'] }) // follows original order + }) + + it('extra segment in new order', async () => { + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + createIngestRundownWithManySegments() + ) + + // include some changes + const modifiedRundown = createIngestRundownWithManySegmentsAlternateOrder() + modifiedRundown.segments.splice(2, 0, { + externalId: 'segX', + rank: 2, + name: 'my segment', + payload: { + segmentData: 'data', + }, + parts: [], + }) + + const changes: NrcsIngestChangeDetails = { source: IngestChangeType.Ingest, segmentOrderChanged: true } + + defaultApplyIngestChanges(mutableIngestRundown, modifiedRundown, changes, defaultOptions) + + // performs reorder, ignoring segX + expect(mockCalls).toHaveLength(5) + expect(mockCalls[0]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg2', null] }) + expect(mockCalls[1]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg0', 'seg2'] }) + expect(mockCalls[2]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg4', 'seg0'] }) + expect(mockCalls[3]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg1', 'seg4'] }) + expect(mockCalls[4]).toEqual({ target: 'rundown', name: 'moveSegmentBefore', args: ['seg3', 'seg1'] }) + }) + }) + + describe('segment changes', () => { + it('mix of operations', async () => { + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + createIngestRundownWithManySegments() + ) + + // include some changes, which should be ignored + const modifiedRundown = createIngestRundownWithManySegments() + modifiedRundown.segments[1].externalId = 'segX' // replace seg1 + modifiedRundown.segments[2].externalId = 'segY' // repalce seg2 + modifiedRundown.segments.splice(4, 1) // remove seg4 + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg1: NrcsIngestSegmentChangeDetailsEnum.Deleted, + segX: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + seg3: { + payloadChanged: true, + }, + seg4: NrcsIngestSegmentChangeDetailsEnum.Deleted, + segY: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + seg2: NrcsIngestSegmentChangeDetailsEnum.Deleted, + }, + } + + defaultApplyIngestChanges(mutableIngestRundown, modifiedRundown, changes, defaultOptions) + + // performs deletes and inserts + expect(mockCalls).toHaveLength(10) + + // Note: this happens in the order of the changes object, but that is not guaranteed in the future + + // remove and update first + expect(mockCalls[0]).toEqual({ target: 'rundown', name: 'removeSegment', args: ['seg1'] }) + expect(mockCalls[1]).toEqual({ target: 'options', name: 'transformSegmentPayload', args: [true] }) + expect(mockCalls[2]).toMatchObject({ target: 'segment seg3', name: 'replacePayload' }) + expect(mockCalls[3]).toMatchObject({ target: 'segment seg3', name: 'setName' }) + expect(mockCalls[4]).toEqual({ target: 'rundown', name: 'removeSegment', args: ['seg4'] }) + expect(mockCalls[5]).toEqual({ target: 'rundown', name: 'removeSegment', args: ['seg2'] }) + + // insert new ones in order starting at the end + expect(mockCalls[6]).toEqual({ target: 'options', name: 'transformSegmentPayload', args: [false] }) + expect(mockCalls[7]).toMatchObject({ + target: 'rundown', + name: 'replaceSegment', + args: [{ externalId: 'segY' }, 'seg3'], + }) + expect(mockCalls[8]).toEqual({ target: 'options', name: 'transformSegmentPayload', args: [false] }) + expect(mockCalls[9]).toMatchObject({ + target: 'rundown', + name: 'replaceSegment', + args: [{ externalId: 'segX' }, 'segY'], + }) + }) + + it('insert missing', async () => { + const nrcsRundown = createIngestRundownWithManySegments() + const { mutableIngestRundown, defaultOptions } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + segX: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + } + + expect(() => defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions)).toThrow( + /Segment(.*)not found/ + ) + }) + + it('delete missing', async () => { + const nrcsRundown = createIngestRundownWithManySegments() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + segX: NrcsIngestSegmentChangeDetailsEnum.Deleted, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(1) + expect(mockCalls[0]).toEqual({ target: 'rundown', name: 'removeSegment', args: ['segX'] }) + }) + + it('update missing', async () => { + const nrcsRundown = createIngestRundownWithManySegments() + const { mutableIngestRundown, defaultOptions } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + segX: { + payloadChanged: true, + }, + }, + } + + // should run without error + expect(() => defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions)).toThrow( + /Segment(.*)not found/ + ) + }) + + it('update without changes', async () => { + const nrcsRundown = createIngestRundownWithManySegments() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg1: {}, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(0) + }) + + it('change segment id', async () => { + const nrcsRundown = createIngestRundownWithManySegments() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + segX: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + changedSegmentExternalIds: { + seg1: 'segX', + }, + } + + nrcsRundown.segments[1].externalId = 'segX' + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(3) + expect(mockCalls[0]).toEqual({ target: 'rundown', name: 'changeSegmentExternalId', args: ['seg1', 'segX'] }) + expect(mockCalls[1]).toEqual({ target: 'options', name: 'transformSegmentPayload', args: [true] }) + expect(mockCalls[2]).toMatchObject({ + target: 'rundown', + name: 'replaceSegment', + args: [{ externalId: 'segX' }, 'seg2'], + }) + }) + + it('change unknown segment id', async () => { + const nrcsRundown = createIngestRundownWithManySegments() + const { mutableIngestRundown, defaultOptions } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + segX: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + changedSegmentExternalIds: { + segY: 'segX', + }, + } + + nrcsRundown.segments[1].externalId = 'segX' + + // should run without error + expect(() => defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions)).toThrow( + /Segment(.*)not found/ + ) + }) + + describe('partOrderChanged', () => { + it('with single part', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + clone(nrcsRundown) + ) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg1: { + partOrderChanged: true, + }, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(1) + expect(mockCalls[0]).toEqual({ target: 'segment seg1', name: 'movePartBefore', args: ['part2', null] }) + }) + it('with multiple parts', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + clone(nrcsRundown) + ) + + // reorder parts + const origParts = nrcsRundown.segments[2].parts + nrcsRundown.segments[2].parts = [origParts[1], origParts[0], origParts[2]] + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg2: { + partOrderChanged: true, + }, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(3) + expect(mockCalls[0]).toEqual({ target: 'segment seg2', name: 'movePartBefore', args: ['part5', null] }) + expect(mockCalls[1]).toEqual({ + target: 'segment seg2', + name: 'movePartBefore', + args: ['part3', 'part5'], + }) + expect(mockCalls[2]).toEqual({ + target: 'segment seg2', + name: 'movePartBefore', + args: ['part4', 'part3'], + }) + }) + + it('missing part in new order', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + clone(nrcsRundown) + ) + + // remove a part + nrcsRundown.segments[2].parts.splice(1, 1) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg2: { + partOrderChanged: true, + }, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(3) + expect(mockCalls[0]).toEqual({ target: 'segment seg2', name: 'movePartBefore', args: ['part5', null] }) + expect(mockCalls[1]).toEqual({ + target: 'segment seg2', + name: 'movePartBefore', + args: ['part3', 'part5'], + }) + expect(mockCalls[2]).toEqual({ + target: 'segment seg2', + name: 'movePartAfter', + args: ['part4', 'part3'], + }) + }) + + it('extra segment in new order', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + clone(nrcsRundown) + ) + + // add an extra nrcs part + nrcsRundown.segments[2].parts.splice(1, 0, { + externalId: 'partX', + rank: 0, + name: 'my part', + payload: { + partData: 'data', + }, + }) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg2: { + partOrderChanged: true, + }, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + // performs reorder, ignoring segX + expect(mockCalls).toHaveLength(3) + expect(mockCalls[0]).toEqual({ target: 'segment seg2', name: 'movePartBefore', args: ['part5', null] }) + expect(mockCalls[1]).toEqual({ + target: 'segment seg2', + name: 'movePartBefore', + args: ['part4', 'part5'], + }) + expect(mockCalls[2]).toEqual({ + target: 'segment seg2', + name: 'movePartBefore', + args: ['part3', 'part4'], + }) + }) + }) + + describe('partsChanges', () => { + it('mix of operations', async () => { + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + createMediumIngestRundown() + ) + + // include some changes, which should be ignored + const modifiedRundown = createMediumIngestRundown() + const segment0 = modifiedRundown.segments[0] + segment0.parts[0].externalId = 'partX' // replace part0 + const segment2 = modifiedRundown.segments[2] + segment2.parts[0].externalId = 'partY' // replace part3 + segment2.parts.splice(1, 1) // remove part4 + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg0: { + partChanges: { + part0: NrcsIngestPartChangeDetails.Deleted, + partX: NrcsIngestPartChangeDetails.Inserted, + part1: NrcsIngestPartChangeDetails.Updated, + }, + }, + seg2: { + partChanges: { + part3: NrcsIngestPartChangeDetails.Deleted, + partY: NrcsIngestPartChangeDetails.Inserted, + part4: NrcsIngestPartChangeDetails.Deleted, + }, + }, + }, + } + + defaultApplyIngestChanges(mutableIngestRundown, modifiedRundown, changes, defaultOptions) + + // performs deletes and inserts + expect(mockCalls).toHaveLength(10) + + // Note: this happens in the order of the changes object, but that is not guaranteed in the future + + // first segment + expect(mockCalls[0]).toEqual({ target: 'segment seg0', name: 'removePart', args: ['part0'] }) + expect(mockCalls[1]).toEqual({ target: 'options', name: 'transformPartPayload', args: [true] }) + expect(mockCalls[2]).toMatchObject({ target: 'part part1', name: 'replacePayload' }) + expect(mockCalls[3]).toMatchObject({ target: 'part part1', name: 'setName' }) + expect(mockCalls[4]).toEqual({ target: 'options', name: 'transformPartPayload', args: [false] }) + expect(mockCalls[5]).toMatchObject({ + target: 'segment seg0', + name: 'replacePart', + args: [{ externalId: 'partX' }, 'part1'], + }) + + // second segment + expect(mockCalls[6]).toEqual({ target: 'segment seg2', name: 'removePart', args: ['part3'] }) + expect(mockCalls[7]).toEqual({ target: 'segment seg2', name: 'removePart', args: ['part4'] }) + expect(mockCalls[8]).toEqual({ target: 'options', name: 'transformPartPayload', args: [false] }) + expect(mockCalls[9]).toMatchObject({ + target: 'segment seg2', + name: 'replacePart', + args: [{ externalId: 'partY' }, 'part5'], + }) + }) + + it('insert missing', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg0: { + partChanges: { + partX: NrcsIngestPartChangeDetails.Inserted, + }, + }, + }, + } + + expect(() => + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + ).toThrow(/Part(.*)not found/) + }) + + it('delete missing', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + clone(nrcsRundown) + ) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg0: { + partChanges: { + partX: NrcsIngestPartChangeDetails.Deleted, + }, + }, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(1) + expect(mockCalls[0]).toEqual({ target: 'segment seg0', name: 'removePart', args: ['partX'] }) + }) + + it('update missing', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions } = createMutableIngestRundown(clone(nrcsRundown)) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg0: { + partChanges: { + partX: NrcsIngestPartChangeDetails.Updated, + }, + }, + }, + } + + // should run without error + expect(() => + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + ).toThrow(/Part(.*)not found/) + }) + + it('update without changes', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + clone(nrcsRundown) + ) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg0: { + partChanges: {}, + }, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(0) + }) + + it('move part across segments', async () => { + const nrcsRundown = createMediumIngestRundown() + const { mutableIngestRundown, defaultOptions, mockCalls } = createMutableIngestRundown( + clone(nrcsRundown) + ) + + // Move the part + const removed = nrcsRundown.segments[0].parts.splice(0, 1) + nrcsRundown.segments[1].parts.unshift(...removed) + + const changes: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + seg0: { + partChanges: { + part0: NrcsIngestPartChangeDetails.Deleted, + }, + }, + seg1: { + partChanges: { + part0: NrcsIngestPartChangeDetails.Inserted, + }, + }, + }, + } + + // should run without error + defaultApplyIngestChanges(mutableIngestRundown, nrcsRundown, changes, defaultOptions) + + expect(mockCalls).toHaveLength(3) + expect(mockCalls[0]).toEqual({ target: 'segment seg0', name: 'removePart', args: ['part0'] }) + expect(mockCalls[1]).toEqual({ target: 'options', name: 'transformPartPayload', args: [true] }) + expect(mockCalls[2]).toMatchObject({ + target: 'segment seg1', + name: 'replacePart', + args: [{ externalId: 'part0' }, 'part2'], + }) + }) + }) + }) +}) diff --git a/packages/job-worker/src/blueprints/ingest/__tests__/groupPartsInMosRundownAndChanges.spec.ts b/packages/job-worker/src/blueprints/ingest/__tests__/groupPartsInMosRundownAndChanges.spec.ts new file mode 100644 index 0000000000..18dfc9c39a --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/__tests__/groupPartsInMosRundownAndChanges.spec.ts @@ -0,0 +1,698 @@ +import { + NrcsIngestChangeDetails, + NrcsIngestPartChangeDetails, + NrcsIngestRundownChangeDetails, + NrcsIngestSegmentChangeDetailsEnum, + IngestRundown, + GroupPartsInMosRundownAndChangesResult, + IngestChangeType, +} from '@sofie-automation/blueprints-integration' +import { Complete, clone } from '@sofie-automation/corelib/dist/lib' +import { groupMosPartsIntoIngestSegments, groupPartsInRundownAndChanges } from '../groupPartsInRundownAndChanges' +import { updateRanksBasedOnOrder } from '../../../ingest/mosDevice/lib' + +describe('groupPartsInMosRundownAndChanges', () => { + function groupMosPartsInRundownAndChanges( + nrcsIngestRundown: IngestRundown, + previousNrcsIngestRundown: IngestRundown | undefined, + ingestChanges: Omit + ) { + return groupPartsInRundownAndChanges(nrcsIngestRundown, previousNrcsIngestRundown, ingestChanges, (segments) => + groupMosPartsIntoIngestSegments(nrcsIngestRundown.externalId, segments, ';') + ) + } + + function createBasicMosIngestRundown(): { nrcsIngestRundown: IngestRundown; combinedIngestRundown: IngestRundown } { + const rawRundown: IngestRundown = { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown', + payload: undefined, + segments: [ + { + externalId: 'segment-s1p1', + name: 'SEGMENT1;PART1', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 's1p1', + name: 'SEGMENT1;PART1', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'segment-s1p2', + name: 'SEGMENT1;PART2', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 's1p2', + name: 'SEGMENT1;PART2', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'segment-s2p1', + name: 'SEGMENT2;PART1', + rank: 2, + payload: undefined, + parts: [ + { + externalId: 's2p1', + name: 'SEGMENT2;PART1', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'segment-s2p2', + name: 'SEGMENT2;PART2', + rank: 3, + payload: undefined, + parts: [ + { + externalId: 's2p2', + name: 'SEGMENT2;PART2', + rank: 0, + payload: undefined, + }, + ], + }, + ], + } + const groupedRundown: IngestRundown = { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown', + payload: undefined, + segments: [ + { + externalId: 'rundown0_s1p1', + name: 'SEGMENT1', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 's1p1', + name: 'SEGMENT1;PART1', + rank: 0, + payload: undefined, + }, + { + externalId: 's1p2', + name: 'SEGMENT1;PART2', + rank: 1, + payload: undefined, + }, + ], + }, + { + externalId: 'rundown0_s2p1', + name: 'SEGMENT2', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 's2p1', + name: 'SEGMENT2;PART1', + rank: 0, + payload: undefined, + }, + { + externalId: 's2p2', + name: 'SEGMENT2;PART2', + rank: 1, + payload: undefined, + }, + ], + }, + ], + } + + return { + nrcsIngestRundown: rawRundown, + combinedIngestRundown: groupedRundown, + } + } + + it('no previous rundown, always performs full regeneration', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + 'segment-s1p1': NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + } + + const result = groupMosPartsInRundownAndChanges(clone(nrcsIngestRundown), undefined, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } satisfies Complete) + }) + + it('no change in rundown', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { source: IngestChangeType.Ingest } + + const result = groupMosPartsInRundownAndChanges(clone(nrcsIngestRundown), nrcsIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: {}, + segmentOrderChanged: false, + }, + } satisfies Complete) + }) + + it('propogate full regeneration', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + segmentOrderChanged: true, + } + + const result = groupMosPartsInRundownAndChanges(clone(nrcsIngestRundown), nrcsIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: {}, + segmentOrderChanged: false, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } satisfies Complete) + }) + + describe('segment changes', () => { + it('part added to end of segment', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments.splice(1, 1) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: { + rundown0_s1p1: { + partChanges: { + s1p2: NrcsIngestPartChangeDetails.Inserted, + }, + partOrderChanged: true, + payloadChanged: false, + }, + }, + segmentOrderChanged: false, + }, + } satisfies Complete) + }) + + it('part added to beginning of segment', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments.splice(2, 1) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: { + rundown0_s2p2: 'rundown0_s2p1', + }, + segmentChanges: { + rundown0_s2p1: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies Complete) + }) + + it('part removed from end of segment', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments.splice(2, 0, { + externalId: 'segment-s1p3', + name: 'SEGMENT1;PART3', + rank: 3, + payload: undefined, + parts: [ + { + externalId: 's1p3', + name: 'SEGMENT1;PART3', + rank: 0, + payload: undefined, + }, + ], + }) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: { + rundown0_s1p1: { + partChanges: { + s1p3: NrcsIngestPartChangeDetails.Deleted, + }, + partOrderChanged: true, + payloadChanged: false, + }, + }, + segmentOrderChanged: false, + }, + } satisfies Complete) + }) + + it('part removed from beginning of segment', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments.splice(2, 0, { + externalId: 'segment-s2p0', + name: 'SEGMENT2;PART0', + rank: 3, + payload: undefined, + parts: [ + { + externalId: 's2p0', + name: 'SEGMENT2;PART0', + rank: 0, + payload: undefined, + }, + ], + }) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: { + rundown0_s2p0: 'rundown0_s2p1', + }, + segmentChanges: { + rundown0_s2p1: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies Complete) + }) + + it('part has changes', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: { + // Note: this is ignored for inserts/deletes + 'segment-s1p2': { anything: 'here' } as any, // Note: contents is ignored + 'segment-s2p2': NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, // Explicitly force regeneration + }, + } + + const previousIngestRundown = clone(nrcsIngestRundown) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: { + rundown0_s1p1: { + partChanges: { + s1p2: NrcsIngestPartChangeDetails.Updated, + }, + partOrderChanged: false, + payloadChanged: false, + }, + rundown0_s2p1: { + partChanges: { + s2p2: NrcsIngestPartChangeDetails.Updated, + }, + partOrderChanged: false, + payloadChanged: false, + }, + }, + segmentOrderChanged: false, + }, + } satisfies Complete) + }) + + it('segment renamed', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments[0].name = 'SEGMENT0;PART1' + previousIngestRundown.segments[0].parts[0].name = 'SEGMENT0;PART1' + previousIngestRundown.segments[1].name = 'SEGMENT0;PART2' + previousIngestRundown.segments[1].parts[0].name = 'SEGMENT0;PART2' + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: { + rundown0_s1p1: { + partChanges: { + // name change counts as a payload change + s1p1: NrcsIngestPartChangeDetails.Updated, + s1p2: NrcsIngestPartChangeDetails.Updated, + }, + partOrderChanged: false, + payloadChanged: true, + }, + }, + segmentOrderChanged: false, + }, + } satisfies Complete) + }) + + it('segment id changed', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments[0].externalId = 'segment-s1p1-old' + previousIngestRundown.segments[0].parts[0].externalId = 's1p1-old' + previousIngestRundown.segments[0].parts[0].name = 'SEGMENT0;PART1' + previousIngestRundown.segments[1].externalId = 'segment-s1p2-old' + previousIngestRundown.segments[1].parts[0].externalId = 's1p2-old' + previousIngestRundown.segments[1].parts[0].name = 'SEGMENT0;PART2' + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: { + 'rundown0_s1p1-old': 'rundown0_s1p1', + }, + segmentChanges: { + rundown0_s1p1: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies Complete) + }) + + it('segment id changed and moved', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments[0].externalId = 'segment-s1p1-old' + previousIngestRundown.segments[0].parts[0].externalId = 's1p1-old' + previousIngestRundown.segments[0].parts[0].name = 'SEGMENT0;PART1' + previousIngestRundown.segments[1].externalId = 'segment-s1p2-old' + previousIngestRundown.segments[1].parts[0].externalId = 's1p2-old' + previousIngestRundown.segments[1].parts[0].name = 'SEGMENT0;PART2' + previousIngestRundown.segments = [ + previousIngestRundown.segments[2], + previousIngestRundown.segments[3], + previousIngestRundown.segments[0], + previousIngestRundown.segments[1], + ] + updateRanksBasedOnOrder(previousIngestRundown) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: { + 'rundown0_s1p1-old': 'rundown0_s1p1', + }, + segmentChanges: { + rundown0_s1p1: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies Complete) + }) + + it('swap segment parts', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments = [ + previousIngestRundown.segments[1], + previousIngestRundown.segments[0], + previousIngestRundown.segments[2], + previousIngestRundown.segments[3], + ] + updateRanksBasedOnOrder(previousIngestRundown) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: { + rundown0_s1p2: 'rundown0_s1p1', + }, + segmentChanges: { + rundown0_s1p1: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies Complete) + }) + + it('merge segments', () => { + const { nrcsIngestRundown, combinedIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + previousIngestRundown.segments = [ + previousIngestRundown.segments[0], + previousIngestRundown.segments[2], + previousIngestRundown.segments[1], + previousIngestRundown.segments[3], + ] + updateRanksBasedOnOrder(previousIngestRundown) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: { + rundown0_s1p1: { + partChanges: { + s1p2: NrcsIngestPartChangeDetails.Inserted, + }, + partOrderChanged: true, + payloadChanged: false, + }, + rundown0_s1p2: NrcsIngestSegmentChangeDetailsEnum.Deleted, + rundown0_s2p1: { + partChanges: { + s2p2: NrcsIngestPartChangeDetails.Inserted, + }, + partOrderChanged: true, + payloadChanged: false, + }, + rundown0_s2p2: NrcsIngestSegmentChangeDetailsEnum.Deleted, + }, + segmentOrderChanged: true, + }, + } satisfies Complete) + }) + + it('split segments', () => { + const { nrcsIngestRundown } = createBasicMosIngestRundown() + + const ingestChanges: NrcsIngestChangeDetails = { + source: IngestChangeType.Ingest, + segmentChanges: {}, // Note: this is ignored for inserts/deletes + } + + const previousIngestRundown = clone(nrcsIngestRundown) + nrcsIngestRundown.segments = [ + nrcsIngestRundown.segments[0], + nrcsIngestRundown.segments[2], + nrcsIngestRundown.segments[1], + nrcsIngestRundown.segments[3], + ] + updateRanksBasedOnOrder(nrcsIngestRundown) + + const result = groupMosPartsInRundownAndChanges(nrcsIngestRundown, previousIngestRundown, ingestChanges) + + expect(result).toEqual({ + nrcsIngestRundown: { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown', + payload: undefined, + segments: [ + { + externalId: 'rundown0_s1p1', + name: 'SEGMENT1', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 's1p1', + name: 'SEGMENT1;PART1', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'rundown0_s2p1', + name: 'SEGMENT2', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 's2p1', + name: 'SEGMENT2;PART1', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'rundown0_s1p2', + name: 'SEGMENT1', + rank: 2, + payload: undefined, + parts: [ + { + externalId: 's1p2', + name: 'SEGMENT1;PART2', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'rundown0_s2p2', + name: 'SEGMENT2', + rank: 3, + payload: undefined, + parts: [ + { + externalId: 's2p2', + name: 'SEGMENT2;PART2', + rank: 0, + payload: undefined, + }, + ], + }, + ], + }, + ingestChanges: { + source: IngestChangeType.Ingest, + changedSegmentExternalIds: {}, + segmentChanges: { + rundown0_s1p1: { + partChanges: { + s1p2: NrcsIngestPartChangeDetails.Deleted, + }, + partOrderChanged: true, + payloadChanged: false, + }, + rundown0_s1p2: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + rundown0_s2p1: { + partChanges: { + s2p2: NrcsIngestPartChangeDetails.Deleted, + }, + partOrderChanged: true, + payloadChanged: false, + }, + rundown0_s2p2: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies Complete) + }) + }) +}) diff --git a/packages/job-worker/src/blueprints/ingest/__tests__/util.ts b/packages/job-worker/src/blueprints/ingest/__tests__/util.ts new file mode 100644 index 0000000000..3ea6a573da --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/__tests__/util.ts @@ -0,0 +1,29 @@ +import type { + IngestPart, + SofieIngestPart, + IngestSegment, + SofieIngestSegment, +} from '@sofie-automation/blueprints-integration' +import type { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import type { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' + +function toSofieIngestPart(ingestPart: IngestPart): SofieIngestPart { + return { + ...ingestPart, + userEditStates: {}, + } +} +function toSofieIngestSegment(ingestSegment: IngestSegment): SofieIngestSegment { + return { + ...ingestSegment, + userEditStates: {}, + parts: ingestSegment.parts.map(toSofieIngestPart), + } +} +export function toSofieIngestRundown(ingestRundown: IngestRundownWithSource): SofieIngestRundownWithSource { + return { + ...ingestRundown, + userEditStates: {}, + segments: ingestRundown.segments.map(toSofieIngestSegment), + } +} diff --git a/packages/job-worker/src/blueprints/ingest/defaultApplyIngestChanges.ts b/packages/job-worker/src/blueprints/ingest/defaultApplyIngestChanges.ts new file mode 100644 index 0000000000..b4ea1bee33 --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/defaultApplyIngestChanges.ts @@ -0,0 +1,437 @@ +import { + IngestRundown, + NrcsIngestChangeDetails, + IngestDefaultChangesOptions, + NrcsIngestRundownChangeDetails, + MutableIngestRundown, + NrcsIngestSegmentChangeDetails, + IngestSegment, + NrcsIngestSegmentChangeDetailsEnum, + MutableIngestSegment, + NrcsIngestSegmentChangeDetailsObject, + NrcsIngestPartChangeDetails, + IngestPart, + MutableIngestPart, +} from '@sofie-automation/blueprints-integration' +import { assertNever, normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import { ReadonlyDeep } from 'type-fest' + +/** + * Perform the default syncing of changes from the ingest data to the rundown. + * + * Please note that this may be overly aggressive at removing any changes made by user operations + * If you are using user operations, you may need to perform some pre and post fixups to ensure + * changes aren't wiped unnecessarily. + */ +export function defaultApplyIngestChanges( + mutableIngestRundown: MutableIngestRundown, + nrcsRundown: IngestRundown, + ingestChanges: NrcsIngestChangeDetails, + options: IngestDefaultChangesOptions +): void { + if (ingestChanges.source !== 'ingest') + throw new Error(`Changes passed to defaultApplyIngestChanges must be from ingest source`) + + const payloadTransformers = new PayloadTransformers(options, mutableIngestRundown) + + let regenerateAllContents = false + + switch (ingestChanges.rundownChanges) { + case NrcsIngestRundownChangeDetails.Regenerate: { + mutableIngestRundown.replacePayload( + payloadTransformers.transformRundownPayload(nrcsRundown, mutableIngestRundown) + ) + + mutableIngestRundown.setName(nrcsRundown.name) + regenerateAllContents = true + + break + } + case NrcsIngestRundownChangeDetails.Payload: { + mutableIngestRundown.replacePayload( + payloadTransformers.transformRundownPayload(nrcsRundown, mutableIngestRundown) + ) + + mutableIngestRundown.setName(nrcsRundown.name) + break + } + case undefined: + case null: + // No changes + break + default: + assertNever(ingestChanges.rundownChanges) + } + + // Perform any renames before any other changes + if (ingestChanges.changedSegmentExternalIds) { + applySegmentRenames(mutableIngestRundown, ingestChanges.changedSegmentExternalIds) + } + + if (regenerateAllContents) { + // Track any existing segment externalId changes + const existingSegmentExternalIdChanges = new Map() + for (const segment of mutableIngestRundown.segments) { + const originalExternalId = segment.originalExternalId + if (originalExternalId) { + existingSegmentExternalIdChanges.set(segment.externalId, originalExternalId) + } + } + + mutableIngestRundown.removeAllSegments() + mutableIngestRundown.forceFullRegenerate() + + // Regenerate all the segments + for (const nrcsSegment of nrcsRundown.segments) { + mutableIngestRundown.replaceSegment( + payloadTransformers.transformPayloadsOnSegmentAndParts( + nrcsSegment, + mutableIngestRundown.getSegment(nrcsSegment.externalId) + ), + null + ) + } + + // Preserve any segment externalIds changes that were performed before this + // This allows blueprints to do renames, and then trigger a full regeneration and remember the relationship + // this is important to avoid leaking adlibbed parts into segments that will get stuck until a reset + for (const nrcsSegment of nrcsRundown.segments) { + const originalExternalId = existingSegmentExternalIdChanges.get(nrcsSegment.externalId) + if (originalExternalId) { + mutableIngestRundown.changeSegmentOriginalExternalId(nrcsSegment.externalId, originalExternalId) + } + } + } else { + // Propogate segment changes + if (ingestChanges.segmentChanges) { + applyAllSegmentChanges(mutableIngestRundown, nrcsRundown, ingestChanges.segmentChanges, payloadTransformers) + } + + if (ingestChanges.segmentOrderChanged) { + applySegmentOrder(mutableIngestRundown, nrcsRundown) + } + } +} + +function applySegmentOrder( + mutableIngestRundown: MutableIngestRundown, + nrcsRundown: IngestRundown +) { + // Figure out which segments don't have a new rank, and will need interpolating + const missingNewRank: Array<{ segmentId: string; afterId: string | null }> = [] + const segmentIdRanksInRundown = normalizeArrayToMap(nrcsRundown.segments, 'externalId') + mutableIngestRundown.segments.forEach((segment, i) => { + if (!segmentIdRanksInRundown.has(segment.externalId)) { + missingNewRank.push({ + segmentId: segment.externalId, + afterId: i > 0 ? mutableIngestRundown.segments[i - 1].externalId : null, + }) + } + }) + + // Run through the segments in reverse order, so that we can insert them in the correct order + for (let i = nrcsRundown.segments.length - 1; i >= 0; i--) { + const nrcsSegment = nrcsRundown.segments[i] + + // If the Segment doesn't exist, ignore it + if (!mutableIngestRundown.getSegment(nrcsSegment.externalId)) continue + + // Find the first valid segment after this one + let beforeNrcsSegmentId: string | null = null + for (let o = i + 1; o < nrcsRundown.segments.length; o++) { + const otherSegment = nrcsRundown.segments[o] + if (mutableIngestRundown.getSegment(otherSegment.externalId)) { + beforeNrcsSegmentId = otherSegment.externalId + break + } + } + + mutableIngestRundown.moveSegmentBefore(nrcsSegment.externalId, beforeNrcsSegmentId) + } + + // Run through the segments without a defined rank, and ensure they are positioned after the same segment as before + for (const segmentInfo of missingNewRank) { + mutableIngestRundown.moveSegmentAfter(segmentInfo.segmentId, segmentInfo.afterId) + } +} + +function applyAllSegmentChanges( + mutableIngestRundown: MutableIngestRundown, + nrcsRundown: IngestRundown, + changes: Record, + payloadTransformers: PayloadTransformers +) { + const nrcsSegmentMap = normalizeArrayToMap(nrcsRundown.segments, 'externalId') + const nrcsSegmentIds = nrcsRundown.segments.map((s) => s.externalId) + + // Perform the inserts last, so that we can ensure they happen in a sensible order + const segmentsToInsert: IngestSegment[] = [] + + // Apply changes and delete segments + for (const [segmentId, change] of Object.entries(changes)) { + if (!change) continue + + const nrcsSegment = nrcsSegmentMap.get(segmentId) + applyChangesForSingleSegment( + mutableIngestRundown, + nrcsSegment, + segmentsToInsert, + segmentId, + change, + payloadTransformers + ) + } + + // Now we can insert the new ones in descending order + segmentsToInsert.sort((a, b) => nrcsSegmentIds.indexOf(b.externalId) - nrcsSegmentIds.indexOf(a.externalId)) + for (const nrcsSegment of segmentsToInsert) { + const segmentIndex = nrcsSegmentIds.indexOf(nrcsSegment.externalId) + const beforeSegmentId = segmentIndex !== -1 ? nrcsSegmentIds[segmentIndex + 1] ?? null : null + + mutableIngestRundown.replaceSegment( + payloadTransformers.transformPayloadsOnSegmentAndParts( + nrcsSegment, + mutableIngestRundown.getSegment(nrcsSegment.externalId) + ), + beforeSegmentId + ) + } +} + +function applySegmentRenames( + mutableIngestRundown: MutableIngestRundown, + changedSegmentExternalIds: Record +) { + for (const [oldExternalId, newExternalId] of Object.entries(changedSegmentExternalIds)) { + if (!oldExternalId || !newExternalId) continue + + mutableIngestRundown.changeSegmentExternalId(oldExternalId, newExternalId) + } +} + +function applyChangesForSingleSegment( + mutableIngestRundown: MutableIngestRundown, + nrcsSegment: IngestSegment | undefined, + segmentsToInsert: IngestSegment[], + segmentId: string, + change: NrcsIngestSegmentChangeDetails, + payloadTransformers: PayloadTransformers +) { + const mutableSegment = mutableIngestRundown.getSegment(segmentId) + + switch (change) { + case NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated: { + if (!nrcsSegment) throw new Error(`Segment ${segmentId} not found in nrcs rundown`) + + segmentsToInsert.push(nrcsSegment) + + break + } + case NrcsIngestSegmentChangeDetailsEnum.Deleted: { + mutableIngestRundown.removeSegment(segmentId) + + break + } + default: { + if (!mutableSegment) throw new Error(`Segment ${segmentId} not found in rundown`) + if (!nrcsSegment) throw new Error(`Segment ${segmentId} not found in nrcs rundown`) + + applyChangesObjectForSingleSegment(mutableSegment, nrcsSegment, change, payloadTransformers) + + break + } + } +} + +function applyChangesObjectForSingleSegment( + mutableSegment: MutableIngestSegment, + nrcsSegment: IngestSegment, + segmentChange: NrcsIngestSegmentChangeDetailsObject, + payloadTransformers: PayloadTransformers +) { + if (segmentChange.payloadChanged) { + mutableSegment.replacePayload(payloadTransformers.transformSegmentPayload(nrcsSegment, mutableSegment)) + mutableSegment.setName(nrcsSegment.name) + } + + if (segmentChange.partChanges) { + const nrcsPartMap = normalizeArrayToMap(nrcsSegment.parts, 'externalId') + const nrcsPartIds = nrcsSegment.parts.map((s) => s.externalId) + + // Perform the inserts last, so that we can ensure they happen in a sensible order + const partsToInsert: IngestPart[] = [] + + for (const [partId, change] of Object.entries( + segmentChange.partChanges + )) { + if (!change) continue + + const nrcsPart = nrcsPartMap.get(partId) + applyChangesForPart(mutableSegment, nrcsPart, partsToInsert, partId, change, payloadTransformers) + } + + // Now we can insert them in descending order + partsToInsert.sort((a, b) => nrcsPartIds.indexOf(b.externalId) - nrcsPartIds.indexOf(a.externalId)) + for (const nrcsPart of partsToInsert) { + const partIndex = nrcsPartIds.indexOf(nrcsPart.externalId) + const beforePartId = partIndex !== -1 ? nrcsPartIds[partIndex + 1] ?? null : null + + mutableSegment.replacePart( + payloadTransformers.transformPayloadOnPart(nrcsPart, mutableSegment.getPart(nrcsPart.externalId)), + beforePartId + ) + } + } + + if (segmentChange.partOrderChanged) { + applyPartOrder(mutableSegment, nrcsSegment) + } +} + +function applyChangesForPart( + mutableSegment: MutableIngestSegment, + nrcsPart: IngestPart | undefined, + partsToInsert: IngestPart[], + partId: string, + change: NrcsIngestPartChangeDetails, + payloadTransformers: PayloadTransformers +) { + const mutablePart = mutableSegment.getPart(partId) + + switch (change) { + case NrcsIngestPartChangeDetails.Inserted: { + if (!nrcsPart) throw new Error(`Part ${partId} not found in nrcs rundown`) + + // Batch the inserts to be performed last + partsToInsert.push(nrcsPart) + break + } + case NrcsIngestPartChangeDetails.Deleted: { + mutableSegment.removePart(partId) + + break + } + case NrcsIngestPartChangeDetails.Updated: { + if (!mutablePart) throw new Error(`Part ${partId} not found in segment`) + if (!nrcsPart) throw new Error(`Part ${partId} not found in nrcs segment`) + + mutablePart.replacePayload(payloadTransformers.transformPartPayload(nrcsPart, mutablePart)) + mutablePart.setName(nrcsPart.name) + + break + } + default: { + assertNever(change) + } + } +} + +function applyPartOrder(mutableSegment: MutableIngestSegment, nrcsSegment: IngestSegment) { + // Figure out which segments don't have a new rank, and will need interpolating + const missingNewRank: Array<{ partId: string; afterId: string | null }> = [] + const partIdRanksInSegment = normalizeArrayToMap(nrcsSegment.parts, 'externalId') + mutableSegment.parts.forEach((part, i) => { + if (!partIdRanksInSegment.has(part.externalId)) { + missingNewRank.push({ + partId: part.externalId, + afterId: i > 0 ? mutableSegment.parts[i - 1].externalId : null, + }) + } + }) + + // Run through the segments in reverse order, so that we can insert them in the correct order + for (let i = nrcsSegment.parts.length - 1; i >= 0; i--) { + const nrcsPart = nrcsSegment.parts[i] + + // If the Part doesn't exist, ignore it + if (!mutableSegment.getPart(nrcsPart.externalId)) continue + + // Find the first valid segment after this one + let beforeNrcsPartId: string | null = null + for (let o = i + 1; o < nrcsSegment.parts.length; o++) { + const otherPart = nrcsSegment.parts[o] + if (mutableSegment.getPart(otherPart.externalId)) { + beforeNrcsPartId = otherPart.externalId + break + } + } + + mutableSegment.movePartBefore(nrcsPart.externalId, beforeNrcsPartId) + } + + // Run through the segments without a defined rank, and ensure they are positioned after the same segment as before + for (const segmentInfo of missingNewRank) { + mutableSegment.movePartAfter(segmentInfo.partId, segmentInfo.afterId) + } +} + +class PayloadTransformers { + readonly #options: IngestDefaultChangesOptions + readonly #initialMutableParts = new Map>() + readonly #initialMutableSegments = new Map>() + + constructor( + options: IngestDefaultChangesOptions, + mutableIngestRundown: MutableIngestRundown + ) { + this.#options = options + + // Collect all of the Part payloads before any operation was run + for (const segment of mutableIngestRundown.segments) { + this.#initialMutableSegments.set(segment.externalId, segment) + + for (const part of segment.parts) { + this.#initialMutableParts.set(part.externalId, part) + } + } + } + + transformRundownPayload( + nrcsRundown: IngestRundown, + mutableIngestRundown: MutableIngestRundown + ): ReadonlyDeep | TRundownPayload { + return this.#options.transformRundownPayload(nrcsRundown.payload, mutableIngestRundown.payload) + } + + transformSegmentPayload( + nrcsSegment: IngestSegment, + mutableSegment: MutableIngestSegment + ): ReadonlyDeep | TSegmentPayload { + return this.#options.transformSegmentPayload(nrcsSegment.payload, mutableSegment?.payload) + } + + transformPartPayload( + nrcsPart: IngestPart, + mutablePart: MutableIngestPart + ): ReadonlyDeep | TPartPayload { + return this.#options.transformPartPayload(nrcsPart.payload, mutablePart?.payload) + } + + transformPayloadsOnSegmentAndParts( + segment: IngestSegment, + mutableSegment: MutableIngestSegment | undefined + ): IngestSegment { + return { + ...segment, + payload: this.#options.transformSegmentPayload( + segment.payload, + mutableSegment ? mutableSegment.payload : this.#initialMutableSegments.get(segment.externalId)?.payload + ) as TSegmentPayload, + parts: segment.parts.map((part) => + this.transformPayloadOnPart(part, mutableSegment?.getPart(part.externalId)) + ), + } + } + transformPayloadOnPart( + part: IngestPart, + mutablePart: MutableIngestPart | undefined + ): IngestPart { + return { + ...part, + payload: this.#options.transformPartPayload( + part.payload, + mutablePart ? mutablePart.payload : this.#initialMutableParts.get(part.externalId)?.payload + ) as TPartPayload, + } + } +} diff --git a/packages/job-worker/src/blueprints/ingest/groupPartsInRundownAndChanges.ts b/packages/job-worker/src/blueprints/ingest/groupPartsInRundownAndChanges.ts new file mode 100644 index 0000000000..18f44a373a --- /dev/null +++ b/packages/job-worker/src/blueprints/ingest/groupPartsInRundownAndChanges.ts @@ -0,0 +1,299 @@ +import { + GroupPartsInMosRundownAndChangesResult, + IngestChangeType, + IngestPart, + IngestRundown, + IngestSegment, + NrcsIngestChangeDetails, + NrcsIngestPartChangeDetails, + NrcsIngestRundownChangeDetails, + NrcsIngestSegmentChangeDetails, + NrcsIngestSegmentChangeDetailsEnum, +} from '@sofie-automation/blueprints-integration' +import { Complete, normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import _ = require('underscore') + +/** + * Groups parts in a MOS rundown into segments, using a separator of the part names. + * For example: + * - "UN;story A" + * - "UN;story B" + * becomes a "UN" segment with two parts (story A and story B) + * + * (The input to this function is actually expected to be segments with a single part in them. This is what the MOS ingest produces.) + */ +export function groupMosPartsIntoIngestSegments( + rundownExternalId: string, + ingestSegments: IngestSegment[], + separator: string +): IngestSegment[] { + const groupedParts: { name: string; parts: IngestPart[] }[] = [] + + for (const ingestSegment of ingestSegments) { + const segmentName = ingestSegment.name.split(separator)[0] || ingestSegment.name + + const lastSegment = _.last(groupedParts) + if (lastSegment && lastSegment.name === segmentName) { + lastSegment.parts.push(...ingestSegment.parts) + } else { + groupedParts.push({ name: segmentName, parts: [...ingestSegment.parts] }) + } + } + + return groupedParts.map( + (partGroup, i) => + ({ + externalId: `${rundownExternalId}_${partGroup.parts[0].externalId}`, + name: partGroup.name, + rank: i, + parts: partGroup.parts.map((part, i) => ({ ...part, rank: i })), + payload: undefined, + } satisfies IngestSegment) + ) +} + +/** + * Group Parts in a Rundown and return a new changes object + * + * Please note that this ignores some of the granularity of the `ingestChanges` object, and relies more on the `previousIngestRundown` instead + * If you are using user operations, you may need to perform some pre and post fixups to ensure changes aren't wiped unnecessarily. + * + * @param nrcsIngestRundown The rundown whose parts needs grouping + * @param previousIngestRundown The rundown prior to the changes, if known + * @param ingestChanges The changes which have been performed in `ingestRundown`, that need to translating + * @param groupPartsIntoSegments A function to group parts into segments + * @returns A transformed rundown and changes object + */ +export function groupPartsInRundownAndChanges( + nrcsIngestRundown: IngestRundown, + previousNrcsIngestRundown: IngestRundown | undefined, + ingestChanges: Omit, + groupPartsIntoSegments: (ingestSegments: IngestSegment[]) => IngestSegment[] +): GroupPartsInMosRundownAndChangesResult { + // Combine parts into segments + const combinedIngestRundown = groupPartsIntoNewIngestRundown( + nrcsIngestRundown, + groupPartsIntoSegments + ) + + // If there is no previous rundown, we need to regenerate everything + if (!previousNrcsIngestRundown) { + return { + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } + } + + // Combine parts into segments, in both the new and old ingest rundowns + const oldCombinedIngestRundown = groupPartsIntoNewIngestRundown(previousNrcsIngestRundown, groupPartsIntoSegments) + + // Calculate the changes to each segment + const allPartWithChanges = findAllPartsWithChanges(nrcsIngestRundown, ingestChanges) + const segmentChanges = calculateSegmentChanges(oldCombinedIngestRundown, combinedIngestRundown, allPartWithChanges) + + // Calculate other changes + const changedSegmentExternalIds = calculateSegmentExternalIdChanges(oldCombinedIngestRundown, combinedIngestRundown) + const segmentOrderChanged = hasSegmentOrderChanged( + combinedIngestRundown.segments, + oldCombinedIngestRundown.segments + ) + + // Ensure id changes aren't flagged as deletions + for (const [oldSegmentExternalId, newSegmentExternalId] of Object.entries(changedSegmentExternalIds)) { + if (!oldSegmentExternalId || !newSegmentExternalId) continue + + if (segmentChanges[oldSegmentExternalId] === NrcsIngestSegmentChangeDetailsEnum.Deleted) { + delete segmentChanges[oldSegmentExternalId] + } + } + + return { + nrcsIngestRundown: combinedIngestRundown, + ingestChanges: { + source: IngestChangeType.Ingest, + rundownChanges: ingestChanges.rundownChanges, + segmentOrderChanged, + segmentChanges, + changedSegmentExternalIds, + } satisfies Complete, + } +} + +function findAllPartsWithChanges( + nrcsIngestRundown: IngestRundown, + sourceChanges: NrcsIngestChangeDetails +): Set { + if (!sourceChanges.segmentChanges) return new Set() + + const partChanges = new Set() + + for (const segment of nrcsIngestRundown.segments) { + const segmentChanges = sourceChanges.segmentChanges[segment.externalId] + if (!segmentChanges) continue + + for (const part of segment.parts) { + switch (segmentChanges) { + case NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated: + // This could have been an update, ensure that gets propogated + partChanges.add(part.externalId) + break + case NrcsIngestSegmentChangeDetailsEnum.Deleted: + // Deletions will be tracked elsewhere + break + default: + if (typeof segmentChanges !== 'object') + throw new Error(`Unexpected segment change for "${segment.externalId}": ${segmentChanges}`) + + // Something changed, this will cause the necessary propogation + partChanges.add(part.externalId) + + break + } + } + } + + return partChanges +} + +function calculateSegmentChanges( + oldCombinedIngestRundown: IngestRundown, + combinedIngestRundown: IngestRundown, + allPartWithChanges: Set +): Record { + const oldIngestSegments = normalizeArrayToMap(oldCombinedIngestRundown.segments, 'externalId') + + const segmentChanges: Record = {} + + // Track any segment changes + for (const segment of combinedIngestRundown.segments) { + const oldIngestSegment = oldIngestSegments.get(segment.externalId) + + if (!oldIngestSegment) { + segmentChanges[segment.externalId] = NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated + } else { + const segmentPartChanges: Record = {} + + const newPartIds = new Set(segment.parts.map((p) => p.externalId)) + const oldPartMap = normalizeArrayToMap(oldIngestSegment.parts, 'externalId') + + for (const part of segment.parts) { + const oldPart = oldPartMap.get(part.externalId) + if (!oldPart) { + segmentPartChanges[part.externalId] = NrcsIngestPartChangeDetails.Inserted + } else if ( + allPartWithChanges.has(part.externalId) || + oldPart.name !== part.name || + !_.isEqual(oldPart.payload, part.payload) + ) { + segmentPartChanges[part.externalId] = NrcsIngestPartChangeDetails.Updated + } + } + for (const oldPart of oldIngestSegment.parts) { + if (!newPartIds.has(oldPart.externalId)) { + segmentPartChanges[oldPart.externalId] = NrcsIngestPartChangeDetails.Deleted + } + } + + const payloadChanged = + oldIngestSegment.name !== segment.name || !_.isEqual(oldIngestSegment.payload, segment.payload) + + const partOrderChanged = hasPartOrderChanged(segment.parts, oldIngestSegment.parts) + if (partOrderChanged || payloadChanged || Object.keys(segmentPartChanges).length > 0) { + segmentChanges[segment.externalId] = { + partChanges: segmentPartChanges, + partOrderChanged, + payloadChanged, + } + } + } + } + + // Track any segment deletions + if (oldCombinedIngestRundown) { + const newSegmentIds = new Set(combinedIngestRundown.segments.map((s) => s.externalId)) + for (const oldSegment of oldCombinedIngestRundown.segments) { + if (!newSegmentIds.has(oldSegment.externalId)) { + segmentChanges[oldSegment.externalId] = NrcsIngestSegmentChangeDetailsEnum.Deleted + } + } + } + + return segmentChanges +} + +function hasSegmentOrderChanged(ingestSegments: IngestSegment[], oldIngestSegments: IngestSegment[]): boolean { + if (ingestSegments.length !== oldIngestSegments.length) return true + + for (let i = 0; i < ingestSegments.length; i++) { + if (ingestSegments[i].externalId !== oldIngestSegments[i].externalId) return true + } + + return false +} + +function hasPartOrderChanged(ingestParts: IngestPart[], oldIngestParts: IngestPart[]): boolean { + if (ingestParts.length !== oldIngestParts.length) return true + + for (let i = 0; i < ingestParts.length; i++) { + if (ingestParts[i].externalId !== oldIngestParts[i].externalId) return true + } + + return false +} + +function groupPartsIntoNewIngestRundown( + ingestRundown: IngestRundown, + groupPartsIntoIngestSements: (ingestSegments: IngestSegment[]) => IngestSegment[] +): IngestRundown { + return { + ...(ingestRundown as IngestRundown), + segments: groupPartsIntoIngestSements(ingestRundown.segments), + } +} + +function calculateSegmentExternalIdChanges( + oldIngestRundown: IngestRundown, + newIngestRundown: IngestRundown +): Record { + const segmentExternalIdChanges: Record = {} + + const oldIngestSegmentMap = normalizeArrayToMap(oldIngestRundown.segments, 'externalId') + const newIngestSegmentMap = normalizeArrayToMap(newIngestRundown.segments, 'externalId') + + const removedSegments = oldIngestRundown.segments.filter((s) => !newIngestSegmentMap.has(s.externalId)) + let addedSegments = newIngestRundown.segments.filter((s) => !oldIngestSegmentMap.has(s.externalId)) + + if (removedSegments.length === 0 || addedSegments.length === 0) return {} + + for (const removedSegment of removedSegments) { + let newSegmentExternalId: string | undefined + + // try finding "it" in the added, using name + // Future: this may not be particularly accurate, as multiple could have been formed + newSegmentExternalId = addedSegments.find((se) => se.name === removedSegment.name)?.externalId + + if (!newSegmentExternalId) { + // second try, match with any parts: + newSegmentExternalId = addedSegments.find((se) => { + for (const part of removedSegment.parts) { + if (se.parts.find((p) => p.externalId === part.externalId)) { + return true + } + } + + return false + })?.externalId + } + if (newSegmentExternalId) { + segmentExternalIdChanges[removedSegment.externalId] = newSegmentExternalId + + // Ensure the same id doesn't get used multiple times + addedSegments = addedSegments.filter((s) => s.externalId !== newSegmentExternalId) + } + } + + return segmentExternalIdChanges +} diff --git a/packages/job-worker/src/blueprints/postProcess.ts b/packages/job-worker/src/blueprints/postProcess.ts index 6e9d2f8fca..52889f9189 100644 --- a/packages/job-worker/src/blueprints/postProcess.ts +++ b/packages/job-worker/src/blueprints/postProcess.ts @@ -44,6 +44,7 @@ import { setDefaultIdOnExpectedPackages } from '../ingest/expectedPackages' import { logger } from '../logging' import { validateTimeline } from 'superfly-timeline' import { ReadonlyDeep } from 'type-fest' +import { translateUserEditsFromBlueprint } from './context/lib' function getIdHash(docType: string, usedIds: Map, uniqueId: string): string { const count = usedIds.get(uniqueId) @@ -108,6 +109,7 @@ export function postProcessPieces( startPartId: partId, invalid: setInvalid ?? false, timelineObjectsString: EmptyPieceTimelineObjectsBlob, + userEditOperations: translateUserEditsFromBlueprint(orgPiece.userEditOperations, [blueprintId]), } if (piece.pieceType !== IBlueprintPieceType.Normal) { @@ -397,6 +399,7 @@ export function postProcessBucketAdLib( blueprintId: BlueprintId, bucketId: BucketId, rank: number | undefined, + name: string | undefined, importVersions: RundownImportVersions ): BucketAdLib { const id: PieceId = protectString( @@ -416,6 +419,7 @@ export function postProcessBucketAdLib( importVersions, ingestInfo, _rank: rank || itemOrig._rank, + name: name || itemOrig.name, timelineObjectsString: EmptyPieceTimelineObjectsBlob, } // Fill in ids of unnamed expectedPackages @@ -446,6 +450,7 @@ export function postProcessBucketAction( blueprintId: BlueprintId, bucketId: BucketId, rank: number | undefined, + label: string | undefined, importVersions: RundownImportVersions ): BucketAdLibAction { const id: AdLibActionId = protectString( @@ -463,7 +468,7 @@ export function postProcessBucketAction( bucketId, importVersions, ingestInfo, - ...processAdLibActionITranslatableMessages(itemOrig, blueprintId, rank), + ...processAdLibActionITranslatableMessages(itemOrig, blueprintId, rank, label), } // Fill in ids of unnamed expectedPackages @@ -498,12 +503,12 @@ function processAdLibActionITranslatableMessages< })[] }, T extends IBlueprintActionManifest ->(itemOrig: T, blueprintId: BlueprintId, rank?: number): Pick { +>(itemOrig: T, blueprintId: BlueprintId, rank?: number, label?: string): Pick { return { display: { ...itemOrig.display, _rank: rank ?? itemOrig.display._rank, - label: wrapTranslatableMessageFromBlueprints(itemOrig.display.label, [blueprintId]), + label: (label as any) ?? wrapTranslatableMessageFromBlueprints(itemOrig.display.label, [blueprintId]), triggerLabel: itemOrig.display.triggerLabel && wrapTranslatableMessageFromBlueprints(itemOrig.display.triggerLabel, [blueprintId]), diff --git a/packages/job-worker/src/db/collections.ts b/packages/job-worker/src/db/collections.ts index faa2a64fc2..1b9b714222 100644 --- a/packages/job-worker/src/db/collections.ts +++ b/packages/job-worker/src/db/collections.ts @@ -17,7 +17,8 @@ import { BucketAdLibAction } from '@sofie-automation/corelib/dist/dataModel/Buck import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibPiece' import { ExpectedMediaItem } from '@sofie-automation/corelib/dist/dataModel/ExpectedMediaItem' import { ExpectedPlayoutItem } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' -import { IngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { NrcsIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { SofieIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' @@ -97,7 +98,8 @@ export interface IDirectCollections { BucketAdLibPieces: ICollection ExpectedMediaItems: ICollection ExpectedPlayoutItems: ICollection - IngestDataCache: ICollection + SofieIngestDataCache: ICollection + NrcsIngestDataCache: ICollection Parts: ICollection PartInstances: ICollection PeripheralDevices: IReadOnlyCollection @@ -158,7 +160,14 @@ export function getMongoCollections( database.collection(CollectionName.ExpectedPlayoutItems), allowWatchers ), - IngestDataCache: wrapMongoCollection(database.collection(CollectionName.IngestDataCache), allowWatchers), + SofieIngestDataCache: wrapMongoCollection( + database.collection(CollectionName.SofieIngestDataCache), + allowWatchers + ), + NrcsIngestDataCache: wrapMongoCollection( + database.collection(CollectionName.NrcsIngestDataCache), + allowWatchers + ), Parts: wrapMongoCollection(database.collection(CollectionName.Parts), allowWatchers), PartInstances: wrapMongoCollection(database.collection(CollectionName.PartInstances), allowWatchers), PeripheralDevices: wrapMongoCollection( diff --git a/packages/job-worker/src/ingest/__tests__/ingest.test.ts b/packages/job-worker/src/ingest/__tests__/ingest.test.ts index 76aacf22b5..e46a0f827b 100644 --- a/packages/job-worker/src/ingest/__tests__/ingest.test.ts +++ b/packages/job-worker/src/ingest/__tests__/ingest.test.ts @@ -45,27 +45,42 @@ import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { insertQueuedPartWithPieces } from '../../playout/adlibUtils' import { UserErrorMessage } from '@sofie-automation/corelib/dist/error' import { PlayoutPartInstanceModel } from '../../playout/model/PlayoutPartInstanceModel' +import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { wrapGenericIngestJob, wrapGenericIngestJobWithPrecheck } from '../jobWrappers' + +const handleRemovedRundownWrapped = wrapGenericIngestJob(handleRemovedRundown) +const handleUpdatedRundownWrapped = wrapGenericIngestJob(handleUpdatedRundown) +const handleUpdatedRundownMetaDataWrapped = wrapGenericIngestJob(handleUpdatedRundownMetaData) +const handleRemovedSegmentWrapped = wrapGenericIngestJob(handleRemovedSegment) +const handleUpdatedSegmentWrapped = wrapGenericIngestJobWithPrecheck(handleUpdatedSegment) +const handleUpdatedSegmentRanksWrapped = wrapGenericIngestJob(handleUpdatedSegmentRanks) +const handleRemovedPartWrapped = wrapGenericIngestJob(handleRemovedPart) +const handleUpdatedPartWrapped = wrapGenericIngestJob(handleUpdatedPart) const externalId = 'abcde' const rundownData1: IngestRundown = { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', name: 'Segment 0', rank: 0, + payload: undefined, parts: [ { externalId: 'part0', name: 'Part 0', rank: 0, + payload: undefined, }, { externalId: 'part1', name: 'Part 1', rank: 0, + payload: undefined, }, ], }, @@ -73,11 +88,13 @@ const rundownData1: IngestRundown = { externalId: 'segment1', name: 'Segment 1', rank: 0, + payload: undefined, parts: [ { externalId: 'part2', name: 'Part 2', rank: 0, + payload: undefined, }, ], }, @@ -161,7 +178,7 @@ describe('Test ingest actions for rundowns and segments', () => { async function recreateRundown(data: IngestRundown): Promise { await context.clearAllRundownsAndPlaylists() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: data.externalId, ingestRundown: data, isCreateAction: true, @@ -177,6 +194,7 @@ describe('Test ingest actions for rundowns and segments', () => { await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeTruthy() await context.mockCollections.Rundowns.update({}, { $set: { orphaned: RundownOrphanedReason.DELETED } }) + await context.mockCollections.NrcsIngestDataCache.remove({}) } test('dataRundownCreate', async () => { @@ -184,7 +202,7 @@ describe('Test ingest actions for rundowns and segments', () => { await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeFalsy() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData1.externalId, ingestRundown: rundownData1, isCreateAction: true, @@ -219,7 +237,7 @@ describe('Test ingest actions for rundowns and segments', () => { name: 'MyMockRundownRenamed', } - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: false, @@ -259,16 +277,18 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment2', name: 'Segment 2', rank: 0, + payload: undefined, parts: [ { externalId: 'part3', name: 'Part 3', rank: 0, + payload: undefined, }, ], }) - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: false, @@ -303,11 +323,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment2', name: 'Segment 2', rank: 0, + payload: undefined, parts: [ { externalId: 'part3', name: 'Part 3', rank: 0, + payload: undefined, }, ], }) @@ -315,9 +337,10 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'partZ', name: 'Part Z', rank: 0, + payload: undefined, }) - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: false, @@ -355,17 +378,19 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment2', name: 'Segment 2', rank: 0, + payload: undefined, parts: [ { externalId: 'part3', name: 'Part 3', rank: 0, + payload: undefined, }, ], }) await recreateRundown(initialRundownData) - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData1.externalId, ingestRundown: rundownData1, isCreateAction: false, @@ -399,7 +424,7 @@ describe('Test ingest actions for rundowns and segments', () => { const rundownData = clone(rundownData1) expect(rundownData.segments[0].parts.shift()).toBeTruthy() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: false, @@ -434,9 +459,10 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundownRenamed', type: 'mock', + payload: undefined, } - await handleUpdatedRundownMetaData(context, { + await handleUpdatedRundownMetaDataWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, rundownSource: createRundownSource(device), @@ -469,7 +495,7 @@ describe('Test ingest actions for rundowns and segments', () => { test('dataRundownDelete', async () => { await recreateRundown(rundownData1) - await handleRemovedRundown(context, { + await handleRemovedRundownWrapped(context, { rundownExternalId: externalId, }) @@ -482,7 +508,7 @@ describe('Test ingest actions for rundowns and segments', () => { await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeFalsy() await expect( - handleRemovedRundown(context, { + handleRemovedRundownWrapped(context, { rundownExternalId: externalId, }) ).rejects.toThrow(/Rundown.*not found/i) @@ -518,16 +544,19 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', name: 'Segment 0', rank: 0, + payload: undefined, parts: [ { externalId: 'part1', name: 'Part 1', rank: 0, + payload: undefined, }, ], }, @@ -535,11 +564,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment2', name: 'Segment 2', rank: 0, + payload: undefined, parts: [ { externalId: 'part3', name: 'Part 3', rank: 0, + payload: undefined, }, ], }, @@ -547,7 +578,7 @@ describe('Test ingest actions for rundowns and segments', () => { } await expect( - handleUpdatedRundown(context, { + handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: false, @@ -571,16 +602,19 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', name: 'Segment 0', rank: 0, + payload: undefined, parts: [ { externalId: 'part1', name: 'Part 1', rank: 0, + payload: undefined, }, ], }, @@ -588,12 +622,14 @@ describe('Test ingest actions for rundowns and segments', () => { } // Submit an update trying to remove a segment - await handleUpdatedRundown(context, { - rundownExternalId: rundownData.externalId, - ingestRundown: rundownData, - isCreateAction: false, - rundownSource: createRundownSource(device), - }) + await expect( + handleUpdatedRundownWrapped(context, { + rundownExternalId: rundownData.externalId, + ingestRundown: rundownData, + isCreateAction: false, + rundownSource: createRundownSource(device2), + }) + ).rejects.toThrow(/Rundown(.+)not found/) // Segment count should not have changed const rundown1 = (await context.mockCollections.Rundowns.findOne({ externalId: externalId })) as DBRundown @@ -606,7 +642,7 @@ describe('Test ingest actions for rundowns and segments', () => { await recreateRundown(rundownData1) await setRundownsOrphaned() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData1.externalId, ingestRundown: rundownData1, isCreateAction: true, @@ -631,14 +667,17 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'MyMockSegment', rank: 0, + payload: undefined, parts: [], } - await handleUpdatedSegment(context, { - rundownExternalId: externalId, - ingestSegment: ingestSegment, - isCreateAction: true, - }) + await expect( + handleUpdatedSegmentWrapped(context, { + rundownExternalId: externalId, + ingestSegment: ingestSegment, + isCreateAction: true, + }) + ).rejects.toThrow(/Rundown(.+)not found/) await expect(context.mockCollections.Segments.findOne({ externalId: segExternalId })).resolves.toBeFalsy() @@ -656,10 +695,11 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'MyMockSegment', rank: 0, + payload: undefined, parts: [], } - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: externalId, ingestSegment: ingestSegment, isCreateAction: true, @@ -694,10 +734,11 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'MyMockSegment2', rank: 0, + payload: undefined, parts: [], } - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: externalId, ingestSegment: ingestSegment, isCreateAction: true, @@ -719,16 +760,18 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'MyMockSegment', rank: 0, + payload: undefined, parts: [ { externalId: 'part42', name: 'Part 42', rank: 0, + payload: undefined, }, ], } - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: externalId, ingestSegment: ingestSegment, isCreateAction: false, @@ -754,6 +797,10 @@ describe('Test ingest actions for rundowns and segments', () => { { rundownId: rundown._id }, { $set: { orphaned: SegmentOrphanedReason.DELETED } } ) + await context.mockCollections.NrcsIngestDataCache.remove({ + type: NrcsIngestCacheType.SEGMENT, + rundownId: rundown._id, + }) const segExternalId = rundownData1.segments[0].externalId @@ -766,20 +813,24 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'MyMockSegment2', rank: 0, + payload: undefined, parts: [ { externalId: 'part423', name: 'Part 423', rank: 0, + payload: undefined, }, ], } - await handleUpdatedSegment(context, { - rundownExternalId: externalId, - ingestSegment: ingestSegment, - isCreateAction: false, - }) + await expect( + handleUpdatedSegmentWrapped(context, { + rundownExternalId: externalId, + ingestSegment: ingestSegment, + isCreateAction: false, + }) + ).rejects.toThrow(/Segment.*not found/) await expect(context.mockCollections.Segments.findFetch({ rundownId: rundown._id })).resolves.toHaveLength(2) @@ -805,20 +856,24 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'MyMockSegment2', rank: 0, + payload: undefined, parts: [ { externalId: 'part423', name: 'Part 423', rank: 0, + payload: undefined, }, ], } - await handleUpdatedSegment(context, { - rundownExternalId: externalId, - ingestSegment: ingestSegment, - isCreateAction: false, - }) + await expect( + handleUpdatedSegmentWrapped(context, { + rundownExternalId: externalId, + ingestSegment: ingestSegment, + isCreateAction: false, + }) + ).rejects.toThrow(/Rundown.*not found/) await expect(context.mockCollections.Segments.findFetch({ rundownId: rundown._id })).resolves.toHaveLength(2) @@ -835,13 +890,14 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId2, name: 'MyMockSegment', rank: 0, + payload: undefined, parts: [], } await expect(context.mockCollections.Segments.findOne({ externalId: segExternalId2 })).resolves.toBeFalsy() await expect( - handleUpdatedSegment(context, { + handleUpdatedSegmentWrapped(context, { rundownExternalId: 'wibble', ingestSegment: ingestSegment, isCreateAction: false, @@ -864,7 +920,7 @@ describe('Test ingest actions for rundowns and segments', () => { const ingestSegment = rundownData1.segments[0] - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: externalId, ingestSegment: ingestSegment, isCreateAction: false, @@ -895,7 +951,7 @@ describe('Test ingest actions for rundowns and segments', () => { }) expect(partsBefore).toHaveLength(2) - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: externalId, ingestSegment: ingestSegment, isCreateAction: false, @@ -915,20 +971,25 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: '', name: 'MyMockSegment', rank: 0, + payload: undefined, parts: [], } await expect( - handleUpdatedSegment(context, { + handleUpdatedSegmentWrapped(context, { rundownExternalId: externalId, ingestSegment: ingestSegment, isCreateAction: false, }) - ).rejects.toThrow(`getSegmentId: segmentExternalId must be set!`) + ).rejects.toThrow(`Segment externalId must be set!`) }) test('dataSegmentDelete already orphaned segment', async () => { const rundown = await recreateRundown(rundownData1) + await context.mockCollections.NrcsIngestDataCache.remove({ + type: NrcsIngestCacheType.SEGMENT, + rundownId: rundown._id, + }) const segExternalId = rundownData1.segments[0].externalId @@ -937,10 +998,12 @@ describe('Test ingest actions for rundowns and segments', () => { { $set: { orphaned: SegmentOrphanedReason.DELETED } } ) - await handleRemovedSegment(context, { - rundownExternalId: externalId, - segmentExternalId: segExternalId, - }) + await expect( + handleRemovedSegmentWrapped(context, { + rundownExternalId: externalId, + segmentExternalId: segExternalId, + }) + ).rejects.toThrow(/Rundown(.*) does not have a Segment(.*) to remove/) await expect(context.mockCollections.Segments.findFetch({ rundownId: rundown._id })).resolves.toHaveLength(2) await expect(context.mockCollections.Segments.findOne({ externalId: segExternalId })).resolves.toBeTruthy() @@ -952,16 +1015,19 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', name: 'Segment 0', rank: 0, + payload: undefined, parts: [ { externalId: 'part1', name: 'Part 1', rank: 0, + payload: undefined, }, ], }, @@ -969,11 +1035,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment2', name: 'Segment 2', rank: 0, + payload: undefined, parts: [ { externalId: 'part2', name: 'Part 2', rank: 0, + payload: undefined, }, ], }, @@ -981,18 +1049,20 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'Segment 3', rank: 0, + payload: undefined, parts: [ { externalId: 'part3', name: 'Part 3', rank: 0, + payload: undefined, }, ], }, ], } - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: externalId, ingestRundown: rundownData, isCreateAction: true, @@ -1006,14 +1076,17 @@ describe('Test ingest actions for rundowns and segments', () => { ).resolves.toHaveLength(1) await context.mockCollections.Rundowns.update({}, { $set: { orphaned: RundownOrphanedReason.DELETED } }) + await context.mockCollections.NrcsIngestDataCache.remove({}) await context.mockCollections.Segments.update({ rundownId: rundown._id }, { $unset: { orphaned: 1 } }) await expect(context.mockCollections.Segments.findFetch({ rundownId: rundown._id })).resolves.toHaveLength(3) - await handleRemovedSegment(context, { - rundownExternalId: externalId, - segmentExternalId: segExternalId, - }) + await expect( + handleRemovedSegmentWrapped(context, { + rundownExternalId: externalId, + segmentExternalId: segExternalId, + }) + ).rejects.toThrow(/Rundown(.+)not found/) await expect(context.mockCollections.Segments.findFetch({ rundownId: rundown._id })).resolves.toHaveLength(3) await expect(context.mockCollections.Segments.findOne({ externalId: segExternalId })).resolves.toBeTruthy() @@ -1024,7 +1097,7 @@ describe('Test ingest actions for rundowns and segments', () => { const segExternalId = rundownData1.segments[1].externalId - await handleRemovedSegment(context, { + await handleRemovedSegmentWrapped(context, { rundownExternalId: externalId, segmentExternalId: segExternalId, }) @@ -1040,7 +1113,7 @@ describe('Test ingest actions for rundowns and segments', () => { ).resolves.toHaveLength(0) await expect( - handleRemovedSegment(context, { + handleRemovedSegmentWrapped(context, { rundownExternalId: externalId, segmentExternalId: segExternalId, }) @@ -1054,7 +1127,7 @@ describe('Test ingest actions for rundowns and segments', () => { expect(rundown).toBeFalsy() await expect( - handleRemovedSegment(context, { + handleRemovedSegmentWrapped(context, { rundownExternalId: 'wibble', segmentExternalId: segExternalId, }) @@ -1069,10 +1142,11 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: segExternalId, name: 'MyMockSegment', rank: 0, + payload: undefined, parts: [], } await expect( - handleUpdatedSegment(context, { + handleUpdatedSegmentWrapped(context, { rundownExternalId: 'wibble', ingestSegment: ingestSegment, isCreateAction: true, @@ -1093,9 +1167,10 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'party', name: 'Part Y', rank: 0, + payload: undefined, } - await handleUpdatedPart(context, { + await handleUpdatedPartWrapped(context, { rundownExternalId: externalId, segmentExternalId: segment.externalId, ingestPart: ingestPart, @@ -1125,7 +1200,7 @@ describe('Test ingest actions for rundowns and segments', () => { const ingestPart = clone(rundownData1.segments[0].parts[0]) ingestPart.name = 'My special part' - await handleUpdatedPart(context, { + await handleUpdatedPartWrapped(context, { rundownExternalId: externalId, segmentExternalId: segment.externalId, ingestPart: ingestPart, @@ -1158,7 +1233,7 @@ describe('Test ingest actions for rundowns and segments', () => { }) ).resolves.toHaveLength(1) - await handleRemovedPart(context, { + await handleRemovedPartWrapped(context, { rundownExternalId: externalId, segmentExternalId: segment.externalId, partExternalId: partExternalId, @@ -1180,52 +1255,53 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', name: 'Segment 0', rank: 1, - // payload?: any, + payload: undefined, parts: [], }, { externalId: 'segment1', name: 'Segment 1', rank: 2, - // payload?: any, + payload: undefined, parts: [], }, { externalId: 'segment2', name: 'Segment 2', rank: 3, - // payload?: any, + payload: undefined, parts: [], }, { externalId: 'segment3', name: 'Segment 3', rank: 4, - // payload?: any, + payload: undefined, parts: [], }, { externalId: 'segment4', name: 'Segment 4', rank: 5, - // payload?: any, + payload: undefined, parts: [], }, { externalId: 'segment5', name: 'Segment 5', rank: 6, - // payload?: any, + payload: undefined, parts: [], }, ], } - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: externalId, ingestRundown: rundownData, isCreateAction: true, @@ -1235,7 +1311,7 @@ describe('Test ingest actions for rundowns and segments', () => { const rundown = (await context.mockCollections.Rundowns.findOne({ externalId: externalId })) as DBRundown expect(rundown).toBeTruthy() - await handleUpdatedSegmentRanks(context, { + await handleUpdatedSegmentRanksWrapped(context, { rundownExternalId: externalId, newRanks: { ['segment0']: 6, @@ -1247,19 +1323,19 @@ describe('Test ingest actions for rundowns and segments', () => { const segments = await context.mockCollections.Segments.findFetch({ rundownId: rundown._id }) expect(segments).toHaveLength(6) - expect(segments.find((s) => s.externalId === 'segment0')?._rank).toBe(6) - expect(segments.find((s) => s.externalId === 'segment1')?._rank).toBe(2) - expect(segments.find((s) => s.externalId === 'segment2')?._rank).toBe(1) - expect(segments.find((s) => s.externalId === 'segment3')?._rank).toBe(4) - expect(segments.find((s) => s.externalId === 'segment4')?._rank).toBe(5) - expect(segments.find((s) => s.externalId === 'segment5')?._rank).toBe(3) + expect(segments.find((s) => s.externalId === 'segment0')?._rank).toBe(5) + expect(segments.find((s) => s.externalId === 'segment1')?._rank).toBe(1) + expect(segments.find((s) => s.externalId === 'segment2')?._rank).toBe(0) + expect(segments.find((s) => s.externalId === 'segment3')?._rank).toBe(3) + expect(segments.find((s) => s.externalId === 'segment4')?._rank).toBe(4) + expect(segments.find((s) => s.externalId === 'segment5')?._rank).toBe(2) }) test('unsyncing of rundown', async () => { // Preparation: set up rundown await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeFalsy() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData1.externalId, ingestRundown: rundownData1, isCreateAction: true, @@ -1285,7 +1361,7 @@ describe('Test ingest actions for rundowns and segments', () => { const resyncRundown = async () => { // simulate a resync. we don't have a gateway to call out to, but this is how it will respond - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData1.externalId, ingestRundown: rundownData1, isCreateAction: true, @@ -1307,7 +1383,7 @@ describe('Test ingest actions for rundowns and segments', () => { await expect(getRundownOrphaned()).resolves.toBeUndefined() await expect( - handleRemovedRundown(context, { + handleRemovedRundownWrapped(context, { rundownExternalId: rundownData1.externalId, }) ).rejects.toMatchUserError(UserErrorMessage.RundownRemoveWhileActive) @@ -1324,7 +1400,7 @@ describe('Test ingest actions for rundowns and segments', () => { }) expect(partInstance[0].segmentId).toEqual(segments[0]._id) - await handleRemovedSegment(context, { + await handleRemovedSegmentWrapped(context, { rundownExternalId: rundown.externalId, segmentExternalId: segments[0].externalId, }) @@ -1335,7 +1411,7 @@ describe('Test ingest actions for rundowns and segments', () => { await expect(getRundownOrphaned()).resolves.toBeUndefined() await expect(getSegmentOrphaned(segments[0]._id)).resolves.toBeUndefined() - await handleRemovedPart(context, { + await handleRemovedPartWrapped(context, { rundownExternalId: rundown.externalId, segmentExternalId: segments[0].externalId, partExternalId: parts[0].externalId, @@ -1353,11 +1429,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', name: 'Segment 0', rank: 0, + payload: undefined, parts: [ { externalId: 'part0', @@ -1405,11 +1483,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment1', name: 'Segment 1', rank: 1, + payload: undefined, parts: [ { externalId: 'part2', name: 'Part 2', rank: 0, + payload: undefined, }, ], }, @@ -1419,7 +1499,7 @@ describe('Test ingest actions for rundowns and segments', () => { // Preparation: set up rundown await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeFalsy() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: true, @@ -1479,7 +1559,7 @@ describe('Test ingest actions for rundowns and segments', () => { const updatedSegmentData: IngestSegment = rundownData.segments[0] updatedSegmentData.parts[1].externalId = 'new-part' - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: rundownData.externalId, ingestSegment: updatedSegmentData, isCreateAction: false, @@ -1529,11 +1609,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', name: 'Segment 0', rank: 0, + payload: undefined, parts: [ { externalId: 'part0', @@ -1581,11 +1663,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment1', name: 'Segment 1', rank: 1, + payload: undefined, parts: [ { externalId: 'part2', name: 'Part 2', rank: 0, + payload: undefined, }, ], }, @@ -1593,18 +1677,20 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment2', name: 'Segment 2', rank: 1, + payload: undefined, parts: [ { externalId: 'part3', name: 'Part 3', rank: 0, + payload: undefined, }, ], }, ], } - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: true, @@ -1692,11 +1778,13 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'segment2', name: 'Segment 2a', rank: 1, + payload: undefined, parts: [ { externalId: 'part3', name: 'Part 3', rank: 0, + payload: undefined, }, ], } @@ -1711,7 +1799,7 @@ describe('Test ingest actions for rundowns and segments', () => { expect(segment2.name).not.toBe(ingestSegment.name) } - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: rundownData.externalId, ingestSegment: ingestSegment, isCreateAction: false, @@ -1788,7 +1876,7 @@ describe('Test ingest actions for rundowns and segments', () => { expect(segment2.name).not.toBe(ingestSegment.name) } - await handleUpdatedSegment(context, { + await handleUpdatedSegmentWrapped(context, { rundownExternalId: rundownData.externalId, ingestSegment: ingestSegment, isCreateAction: false, @@ -1829,6 +1917,7 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', @@ -1884,6 +1973,7 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'part2', name: 'Part 2', rank: 0, + payload: undefined, }, ], }, @@ -1892,7 +1982,7 @@ describe('Test ingest actions for rundowns and segments', () => { // Preparation: set up rundown await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeFalsy() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: true, @@ -1944,7 +2034,7 @@ describe('Test ingest actions for rundowns and segments', () => { // Delete segment 0, while on air const segmentExternalId = rundownData.segments[0].externalId - await handleRemovedSegment(context, { + await handleRemovedSegmentWrapped(context, { rundownExternalId: rundownData.externalId, segmentExternalId: segmentExternalId, }) @@ -1972,7 +2062,7 @@ describe('Test ingest actions for rundowns and segments', () => { // Trigger an 'resync' of the rundown rundownData.segments.splice(0, 1) - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: false, @@ -1995,6 +2085,7 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: externalId, name: 'MyMockRundown', type: 'mock', + payload: undefined, segments: [ { externalId: 'segment0', @@ -2050,6 +2141,7 @@ describe('Test ingest actions for rundowns and segments', () => { externalId: 'part2', name: 'Part 2', rank: 0, + payload: undefined, }, ], }, @@ -2058,7 +2150,7 @@ describe('Test ingest actions for rundowns and segments', () => { // Preparation: set up rundown await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeFalsy() - await handleUpdatedRundown(context, { + await handleUpdatedRundownWrapped(context, { rundownExternalId: rundownData.externalId, ingestRundown: rundownData, isCreateAction: true, diff --git a/packages/job-worker/src/ingest/__tests__/ingestPartJobs.spec.ts b/packages/job-worker/src/ingest/__tests__/ingestPartJobs.spec.ts new file mode 100644 index 0000000000..8bebfab3f5 --- /dev/null +++ b/packages/job-worker/src/ingest/__tests__/ingestPartJobs.spec.ts @@ -0,0 +1,314 @@ +import { setupDefaultJobEnvironment } from '../../__mocks__/context' +import { handleRemovedPart, handleUpdatedPart } from '../ingestPartJobs' +import { clone } from '@sofie-automation/corelib/dist/lib' +import { IngestChangeType, IngestPart, NrcsIngestPartChangeDetails } from '@sofie-automation/blueprints-integration' +import { UpdateIngestRundownChange } from '../runOperation' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' + +function getDefaultIngestRundown(): IngestRundownWithSource { + return { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown', + payload: undefined, + segments: [ + { + externalId: 'segment0', + name: 'Segment 0', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 'part0', + name: 'Part 0', + rank: 0, + payload: undefined, + }, + { + externalId: 'part1', + name: 'Part 1', + rank: 1, + payload: undefined, + }, + ], + }, + { + externalId: 'segment1', + name: 'Segment 1', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 'part2', + name: 'Part 2', + rank: 0, + payload: undefined, + }, + { + externalId: 'part3', + name: 'Part 3', + rank: 1, + payload: undefined, + }, + ], + }, + ], + rundownSource: { type: 'http' }, + } +} + +describe('handleRemovedPart', () => { + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleRemovedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment0', + partExternalId: 'part0', + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('missing segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect(() => + handleRemovedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segmentX', + partExternalId: 'part0', + }, + clone(ingestRundown) + ) + ).toThrow(/Rundown(.*)does not have a Segment/) + }) + + it('missing part', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleRemovedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment0', + partExternalId: 'partX', + }, + clone(ingestRundown) + ) + expect(changes).toEqual({ + ingestRundown, + changes: { + // No changes + source: IngestChangeType.Ingest, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('part belongs to different segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleRemovedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment1', + partExternalId: 'part0', + }, + clone(ingestRundown) + ) + expect(changes).toEqual({ + ingestRundown, + changes: { + // No changes + source: IngestChangeType.Ingest, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('good', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleRemovedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment1', + partExternalId: 'part2', + }, + clone(ingestRundown) + ) + + // update the expected ingestRundown + ingestRundown.segments[1].parts.splice(0, 1) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + segment1: { + partChanges: { + part2: NrcsIngestPartChangeDetails.Deleted, + }, + }, + }, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleUpdatedPart', () => { + const newIngestPart: IngestPart = { + externalId: 'partX', + name: 'New Part', + rank: 66, + payload: { + val: 'my new part', + }, + } + + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleUpdatedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment0', + ingestPart: clone(newIngestPart), + isCreateAction: true, + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('missing segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect(() => + handleUpdatedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segmentX', + ingestPart: clone(newIngestPart), + isCreateAction: true, + }, + clone(ingestRundown) + ) + ).toThrow(/Rundown(.*)does not have a Segment/) + }) + + it('insert part', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment1', + ingestPart: clone(newIngestPart), + isCreateAction: true, + }, + clone(ingestRundown) + ) + + // update the expected ingestRundown + ingestRundown.segments[1].parts.push(newIngestPart) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + segment1: { + partChanges: { + partX: NrcsIngestPartChangeDetails.Inserted, + }, + }, + }, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('update part', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const newIngestPart2 = { ...newIngestPart, externalId: 'part2' } + + const changes = handleUpdatedPart( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment1', + ingestPart: clone(newIngestPart2), + isCreateAction: true, + }, + clone(ingestRundown) + ) + + // update the expected ingestRundown + ingestRundown.segments[1].parts.splice(0, 1) + ingestRundown.segments[1].parts.push(newIngestPart2) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + segment1: { + partChanges: { + part2: NrcsIngestPartChangeDetails.Updated, + }, + }, + }, + }, + } satisfies UpdateIngestRundownChange) + }) + + // TODO: should this be a test case? + // it('part belongs to different segment', () => { + // const context = setupDefaultJobEnvironment() + + // const ingestRundown = getDefaultIngestRundown() + + // const newIngestPart2 = { ...newIngestPart, externalId: 'part0' } + + // expect(() => + // handleUpdatedPart( + // context, + // { + // peripheralDeviceId: null, + // rundownExternalId: 'rundown0', + // segmentExternalId: 'segment1', + // ingestPart: clone(newIngestPart2), + // isCreateAction: true, + // }, + // clone(ingestRundown) + // ) + // ).toThrow('TODO fill out this error') + // }) +}) diff --git a/packages/job-worker/src/ingest/__tests__/ingestRundownJobs.spec.ts b/packages/job-worker/src/ingest/__tests__/ingestRundownJobs.spec.ts new file mode 100644 index 0000000000..0d241dafdf --- /dev/null +++ b/packages/job-worker/src/ingest/__tests__/ingestRundownJobs.spec.ts @@ -0,0 +1,431 @@ +import { MockJobContext, setupDefaultJobEnvironment } from '../../__mocks__/context' +import { clone } from '@sofie-automation/corelib/dist/lib' +import { IngestChangeType, NrcsIngestRundownChangeDetails } from '@sofie-automation/blueprints-integration' +import { ComputedIngestChangeAction, UpdateIngestRundownChange } from '../runOperation' +import { + handleRegenerateRundown, + handleRemovedRundown, + handleUpdatedRundown, + handleUpdatedRundownMetaData, + handleUserUnsyncRundown, +} from '../ingestRundownJobs' +import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { DBRundown, RundownOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' + +function getDefaultIngestRundown(): IngestRundownWithSource { + return { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown', + rundownSource: { type: 'http' }, + payload: undefined, + segments: [ + { + externalId: 'segment0', + name: 'Segment 0', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 'part0', + name: 'Part 0', + rank: 0, + payload: undefined, + }, + { + externalId: 'part1', + name: 'Part 1', + rank: 1, + payload: undefined, + }, + ], + }, + { + externalId: 'segment1', + name: 'Segment 1', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 'part2', + name: 'Part 2', + rank: 0, + payload: undefined, + }, + { + externalId: 'part3', + name: 'Part 3', + rank: 1, + payload: undefined, + }, + ], + }, + ], + } +} + +describe('handleRemovedRundown', () => { + it('no rundown, normal delete', () => { + const context = setupDefaultJobEnvironment() + + expect( + handleRemovedRundown( + context, + { + rundownExternalId: 'rundown0', + // forceDelete: false, + }, + undefined + ) + ).toBe(ComputedIngestChangeAction.DELETE) + }) + + it('no rundown, force delete', () => { + const context = setupDefaultJobEnvironment() + + expect( + handleRemovedRundown( + context, + { + rundownExternalId: 'rundown0', + forceDelete: true, + }, + undefined + ) + ).toBe(ComputedIngestChangeAction.FORCE_DELETE) + }) + + it('with rundown, normal delete', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect( + handleRemovedRundown( + context, + { + rundownExternalId: 'rundown0', + forceDelete: false, + }, + ingestRundown + ) + ).toBe(ComputedIngestChangeAction.DELETE) + }) + + it('with rundown, force delete', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect( + handleRemovedRundown( + context, + { + rundownExternalId: 'rundown0', + forceDelete: true, + }, + ingestRundown + ) + ).toBe(ComputedIngestChangeAction.FORCE_DELETE) + }) +}) + +// TODO: handleUserRemoveRundown + +describe('handleRegenerateRundown', () => { + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleRegenerateRundown( + context, + { + rundownExternalId: 'rundown0', + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('good', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleRegenerateRundown( + context, + { + rundownExternalId: 'rundown0', + }, + clone(ingestRundown) + ) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleUserUnsyncRundown', () => { + const rundownId: RundownId = protectString('rundown0') + + async function createRundown(context: MockJobContext, fragment?: Partial) { + await context.mockCollections.Rundowns.insertOne({ + _id: rundownId, + organizationId: protectString('organization0'), + studioId: context.studioId, + showStyleBaseId: protectString('showStyleBase0'), + showStyleVariantId: protectString('showStyleVariant0'), + created: 0, + modified: 0, + importVersions: {} as any, + externalId: 'rundownExternal0', + name: 'Rundown', + timing: {} as any, + playlistId: protectString('playlist0'), + source: { + type: 'testing', + showStyleVariantId: protectString('showStyleVariant0'), + }, + ...fragment, + }) + context.mockCollections.Rundowns.clearOpLog() + } + + it('no rundown', async () => { + const context = setupDefaultJobEnvironment() + + await handleUserUnsyncRundown(context, { rundownId }) + + expect(context.mockCollections.Rundowns.operations).toHaveLength(1) + expect(context.mockCollections.Rundowns.operations[0]).toEqual({ + type: 'findOne', + args: ['rundown0', undefined], + }) + }) + + it('already orphaned', async () => { + const context = setupDefaultJobEnvironment() + + await createRundown(context, { orphaned: RundownOrphanedReason.MANUAL }) + + await handleUserUnsyncRundown(context, { rundownId }) + + expect(context.mockCollections.Rundowns.operations).toHaveLength(1) + expect(context.mockCollections.Rundowns.operations[0]).toEqual({ + type: 'findOne', + args: ['rundown0', undefined], + }) + }) + + it('good', async () => { + const context = setupDefaultJobEnvironment() + + await createRundown(context, {}) + + await handleUserUnsyncRundown(context, { rundownId }) + + expect(context.mockCollections.Rundowns.operations).toHaveLength(2) + expect(context.mockCollections.Rundowns.operations[0]).toEqual({ + type: 'findOne', + args: ['rundown0', undefined], + }) + expect(context.mockCollections.Rundowns.operations[1]).toEqual({ + type: 'update', + args: [ + 'rundown0', + { + $set: { + orphaned: RundownOrphanedReason.MANUAL, + }, + }, + ], + }) + }) +}) + +describe('handleUpdatedRundown', () => { + const newIngestRundown: IngestRundownWithSource = { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown2', + rundownSource: { type: 'http' }, + payload: undefined, + segments: [ + { + externalId: 'segment0', + name: 'Segment 0b', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 'part0', + name: 'Part 0b', + rank: 0, + payload: undefined, + }, + { + externalId: 'part1', + name: 'Part 1b', + rank: 1, + payload: undefined, + }, + ], + }, + { + externalId: 'segment2', + name: 'Segment 2', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 'part4', + name: 'Part 4', + rank: 0, + payload: undefined, + }, + { + externalId: 'part5', + name: 'Part 5', + rank: 1, + payload: undefined, + }, + ], + }, + ], + } + + it('create rundown', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedRundown( + context, + { + rundownExternalId: 'rundown0', + ingestRundown: clone(ingestRundown), + isCreateAction: true, + rundownSource: { type: 'http' }, + }, + undefined + ) + + expect(changes).toEqual({ + ingestRundown: ingestRundown, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('update missing rundown', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect(() => + handleUpdatedRundown( + context, + { + rundownExternalId: 'rundown0', + ingestRundown: clone(ingestRundown), + isCreateAction: false, + rundownSource: { type: 'http' }, + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('update existing rundown', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedRundown( + context, + { + rundownExternalId: 'rundown0', + ingestRundown: clone(newIngestRundown), + isCreateAction: false, + rundownSource: { type: 'http' }, + }, + clone(ingestRundown) + ) + + expect(changes).toEqual({ + ingestRundown: newIngestRundown, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleUpdatedRundownMetaData', () => { + const newIngestRundown: IngestRundownWithSource = { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown2', + rundownSource: { type: 'http' }, + segments: [], + payload: { + key: 'value', + }, + } + + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleUpdatedRundownMetaData( + context, + { + rundownExternalId: 'rundown0', + ingestRundown: clone(newIngestRundown), + rundownSource: { type: 'http' }, + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('update existing rundown', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedRundownMetaData( + context, + { + rundownExternalId: 'rundown0', + ingestRundown: clone(newIngestRundown), + rundownSource: { type: 'http' }, + }, + clone(ingestRundown) + ) + + // update the expected ingestRundown + const expectedIngestRundown: IngestRundownWithSource = { + ...newIngestRundown, + segments: ingestRundown.segments, + } + + expect(changes).toEqual({ + ingestRundown: expectedIngestRundown, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Payload, + }, + } satisfies UpdateIngestRundownChange) + }) +}) diff --git a/packages/job-worker/src/ingest/__tests__/ingestSegmentJobs.spec.ts b/packages/job-worker/src/ingest/__tests__/ingestSegmentJobs.spec.ts new file mode 100644 index 0000000000..7060dfce61 --- /dev/null +++ b/packages/job-worker/src/ingest/__tests__/ingestSegmentJobs.spec.ts @@ -0,0 +1,409 @@ +import { setupDefaultJobEnvironment } from '../../__mocks__/context' +import { + handleRegenerateSegment, + handleRemovedSegment, + handleUpdatedSegment, + handleUpdatedSegmentRanks, +} from '../ingestSegmentJobs' +import { clone } from '@sofie-automation/corelib/dist/lib' +import { + IngestChangeType, + IngestSegment, + NrcsIngestSegmentChangeDetailsEnum, +} from '@sofie-automation/blueprints-integration' +import { UpdateIngestRundownChange } from '../runOperation' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' + +function getDefaultIngestRundown(): IngestRundownWithSource { + return { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown', + rundownSource: { type: 'http' }, + payload: undefined, + segments: [ + { + externalId: 'segment0', + name: 'Segment 0', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 'part0', + name: 'Part 0', + rank: 0, + payload: undefined, + }, + { + externalId: 'part1', + name: 'Part 1', + rank: 1, + payload: undefined, + }, + ], + }, + { + externalId: 'segment1', + name: 'Segment 1', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 'part2', + name: 'Part 2', + rank: 0, + payload: undefined, + }, + { + externalId: 'part3', + name: 'Part 3', + rank: 1, + payload: undefined, + }, + ], + }, + ], + } +} + +describe('handleRegenerateSegment', () => { + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleRegenerateSegment( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment0', + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('missing segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect(() => + handleRegenerateSegment( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segmentX', + }, + clone(ingestRundown) + ) + ).toThrow(/Rundown(.*)does not have a Segment/) + }) + + it('good', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleRegenerateSegment( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment1', + }, + clone(ingestRundown) + ) + + // update the expected ingestRundown + // ingestRundown.modified = 1 + // ingestRundown.segments.splice(1, 1) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + segment1: { + payloadChanged: true, + }, + }, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleRemovedSegment', () => { + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleRemovedSegment( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment0', + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('missing segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect(() => + handleRemovedSegment( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segmentX', + }, + clone(ingestRundown) + ) + ).toThrow(/Rundown(.*)does not have a Segment/) + }) + + it('good', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleRemovedSegment( + context, + { + rundownExternalId: 'rundown0', + segmentExternalId: 'segment1', + }, + clone(ingestRundown) + ) + + // update the expected ingestRundown + ingestRundown.segments.splice(1, 1) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + segment1: NrcsIngestSegmentChangeDetailsEnum.Deleted, + }, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleUpdatedSegment', () => { + const newIngestSegment: IngestSegment = { + externalId: 'segmentX', + name: 'New Segment', + rank: 66, + payload: { + val: 'my new segment', + }, + parts: [ + { + externalId: 'partX', + name: 'New Part', + rank: 0, + payload: undefined, + }, + ], + } + + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleUpdatedSegment(context, { + rundownExternalId: 'rundown0', + ingestSegment: clone(newIngestSegment), + isCreateAction: true, + })(undefined) + ).toThrow(/Rundown(.*)not found/) + }) + + it('missing id', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const customIngestSegment = clone(newIngestSegment) + customIngestSegment.externalId = '' + + expect(() => + handleUpdatedSegment(context, { + rundownExternalId: 'rundown0', + ingestSegment: customIngestSegment, + isCreateAction: true, + })(clone(ingestRundown)) + ).toThrow(/Segment externalId must be set!/) + }) + + it('insert segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedSegment(context, { + rundownExternalId: 'rundown0', + ingestSegment: clone(newIngestSegment), + isCreateAction: true, + })(clone(ingestRundown)) as UpdateIngestRundownChange + + // update the expected ingestRundown + ingestRundown.segments.push(newIngestSegment) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + segmentX: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('update missing segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + expect(() => + handleUpdatedSegment(context, { + rundownExternalId: 'rundown0', + ingestSegment: clone(newIngestSegment), + isCreateAction: false, + })(clone(ingestRundown)) + ).toThrow(/Segment(.*)not found/) + }) + + it('update segment', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const customIngestSegment = clone(newIngestSegment) + customIngestSegment.externalId = 'segment1' + + const changes = handleUpdatedSegment(context, { + rundownExternalId: 'rundown0', + ingestSegment: clone(customIngestSegment), + isCreateAction: false, // has no impact + })(clone(ingestRundown)) as UpdateIngestRundownChange + + // update the expected ingestRundown + ingestRundown.segments.splice(1, 1, customIngestSegment) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + segment1: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleUpdatedSegmentRanks', () => { + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + expect(() => + handleUpdatedSegmentRanks( + context, + { + rundownExternalId: 'rundown0', + newRanks: { + segment0: 1, + segment1: 0, + }, + }, + undefined + ) + ).toThrow(/Rundown(.*)not found/) + }) + + it('no valid changes', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedSegmentRanks( + context, + { + rundownExternalId: 'rundown0', + newRanks: { + segmentX: 2, + }, + }, + clone(ingestRundown) + ) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: false, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('update some segments', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedSegmentRanks( + context, + { + rundownExternalId: 'rundown0', + newRanks: { + segmentX: 2, + segment0: 5, + }, + }, + clone(ingestRundown) + ) + + ingestRundown.segments[0].rank = 5 + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: true, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('invalid rank value type', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const changes = handleUpdatedSegmentRanks( + context, + { + rundownExternalId: 'rundown0', + newRanks: { + segmentX: 2, + segment0: 'a' as any, + }, + }, + clone(ingestRundown) + ) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: false, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +// Future: tests for handleRemoveOrphanedSegemnts diff --git a/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts b/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts index 9fd132c416..2c6e7d8ce4 100644 --- a/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts +++ b/packages/job-worker/src/ingest/__tests__/selectShowStyleVariant.test.ts @@ -15,6 +15,8 @@ describe('selectShowStyleVariant', () => { type: 'mock', segments: [], coreData: undefined, + userEditStates: {}, + payload: undefined, } } function createBlueprintContext(context: MockJobContext): StudioUserContext { @@ -22,7 +24,6 @@ describe('selectShowStyleVariant', () => { { name: 'test', identifier: 'test', - tempSendUserNotesIntoBlackHole: true, }, context.studio, context.getStudioBlueprintConfig() diff --git a/packages/job-worker/src/ingest/__tests__/updateNext.test.ts b/packages/job-worker/src/ingest/__tests__/updateNext.test.ts index 4df4a21361..e051cfddd3 100644 --- a/packages/job-worker/src/ingest/__tests__/updateNext.test.ts +++ b/packages/job-worker/src/ingest/__tests__/updateNext.test.ts @@ -69,7 +69,6 @@ async function createMockRO(context: MockJobContext): Promise { externalId: 's1', rundownId: rundownId, name: 'Segment1', - externalModified: 1, }), literal({ _id: protectString('mock_segment2'), @@ -77,7 +76,6 @@ async function createMockRO(context: MockJobContext): Promise { externalId: 's2', rundownId: rundownId, name: 'Segment2', - externalModified: 1, }), literal({ _id: protectString('mock_segment3'), @@ -85,7 +83,6 @@ async function createMockRO(context: MockJobContext): Promise { externalId: 's3', rundownId: rundownId, name: 'Segment3', - externalModified: 1, }), literal({ _id: protectString('mock_segment4'), @@ -93,7 +90,6 @@ async function createMockRO(context: MockJobContext): Promise { externalId: 's4', rundownId: rundownId, name: 'Segment4', - externalModified: 1, }), ] ) diff --git a/packages/job-worker/src/ingest/bucket/import.ts b/packages/job-worker/src/ingest/bucket/import.ts index e2d44faac2..c7216ef48f 100644 --- a/packages/job-worker/src/ingest/bucket/import.ts +++ b/packages/job-worker/src/ingest/bucket/import.ts @@ -30,10 +30,17 @@ import { BucketId, ShowStyleBaseId } from '@sofie-automation/corelib/dist/dataMo import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' export async function handleBucketItemImport(context: JobContext, data: BucketItemImportProps): Promise { - await regenerateBucketItemFromIngestInfo(context, data.bucketId, data.showStyleBaseId, { - limitToShowStyleVariantIds: data.showStyleVariantIds, - payload: data.payload, - }) + await regenerateBucketItemFromIngestInfo( + context, + data.bucketId, + data.showStyleBaseId, + { + limitToShowStyleVariantIds: data.showStyleVariantIds, + payload: data.payload, + }, + undefined, + undefined + ) } export async function handleBucketItemRegenerate(context: JobContext, data: BucketItemRegenerateProps): Promise { @@ -49,9 +56,11 @@ export async function handleBucketItemRegenerate(context: JobContext, data: Buck projection: { showStyleBaseId: 1, ingestInfo: 1, + name: 1, + _rank: 1, }, } - ) as Promise> | undefined, + ) as Promise> | undefined, context.directCollections.BucketAdLibActions.findOne( { externalId: data.externalId, @@ -62,9 +71,10 @@ export async function handleBucketItemRegenerate(context: JobContext, data: Buck projection: { showStyleBaseId: 1, ingestInfo: 1, + display: 1, }, } - ) as Promise> | undefined, + ) as Promise> | undefined, ]) // TODO - UserErrors? @@ -74,7 +84,9 @@ export async function handleBucketItemRegenerate(context: JobContext, data: Buck context, data.bucketId, adlibAction.showStyleBaseId, - adlibAction.ingestInfo + adlibAction.ingestInfo, + adlibAction.display._rank, + typeof adlibAction.display.label === 'string' ? adlibAction.display.label : undefined ) } else if (adlibPiece) { if (!adlibPiece.ingestInfo) throw new Error(`Bucket AdLibPiece cannot be resynced, it has no ingest data`) @@ -82,7 +94,9 @@ export async function handleBucketItemRegenerate(context: JobContext, data: Buck context, data.bucketId, adlibPiece.showStyleBaseId, - adlibPiece.ingestInfo + adlibPiece.ingestInfo, + adlibPiece._rank, + adlibPiece.name ) } else { throw new Error(`No Bucket Items with externalId "${data.externalId}" were found`) @@ -93,7 +107,9 @@ async function regenerateBucketItemFromIngestInfo( context: JobContext, bucketId: BucketId, showStyleBaseId: ShowStyleBaseId, - ingestInfo: BucketAdLibIngestInfo + ingestInfo: BucketAdLibIngestInfo, + oldRank: number | undefined, + oldLabel: string | undefined ): Promise { const [showStyleBase, allShowStyleVariants, allOldAdLibPieces, allOldAdLibActions, blueprint] = await Promise.all([ context.getShowStyleBase(showStyleBaseId), @@ -130,7 +146,7 @@ async function regenerateBucketItemFromIngestInfo( const actionIdsToRemove = new Set(allOldAdLibActions.map((p) => p._id)) let isFirstShowStyleVariant = true - let newRank: number | undefined = undefined + const newRank: number | undefined = oldRank ?? (await calculateHighestRankInBucket(context, bucketId)) + 1 let onlyGenerateOneItem = false const ps: Promise[] = [] @@ -150,13 +166,6 @@ async function regenerateBucketItemFromIngestInfo( core: getSystemVersion(), } - // Cache the newRank, so we only have to calculate it once: - if (newRank === undefined) { - newRank = (await calculateHighestRankInBucket(context, bucketId)) + 1 - } else { - newRank++ - } - if (isAdlibAction(rawAdlib)) { if (isFirstShowStyleVariant) { if (rawAdlib.allVariants) { @@ -174,6 +183,7 @@ async function regenerateBucketItemFromIngestInfo( blueprint.blueprintId, bucketId, newRank, + oldLabel, importVersions ) @@ -194,6 +204,7 @@ async function regenerateBucketItemFromIngestInfo( blueprint.blueprintId, bucketId, newRank, + oldLabel, importVersions ) diff --git a/packages/job-worker/src/ingest/commit.ts b/packages/job-worker/src/ingest/commit.ts index 069e96aea0..62416afe5e 100644 --- a/packages/job-worker/src/ingest/commit.ts +++ b/packages/job-worker/src/ingest/commit.ts @@ -42,6 +42,7 @@ import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { DatabasePersistedModel } from '../modelBase' import { updateSegmentIdsForAdlibbedPartInstances } from './commit/updateSegmentIdsForAdlibbedPartInstances' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' +import { AnyBulkWriteOperation } from 'mongodb' export type BeforePartMapItem = { id: PartId; rank: number } export type BeforeIngestOperationPartMap = ReadonlyMap> @@ -176,6 +177,9 @@ export async function CommitIngestOperation( // Ensure any adlibbed parts are updated to follow the segmentId of the previous part await updateSegmentIdsForAdlibbedPartInstances(context, ingestModel, beforePartMap) + if (data.renamedSegments && data.renamedSegments.size > 0) { + logger.debug(`Renamed segments: ${JSON.stringify(Array.from(data.renamedSegments.entries()))}`) + } // ensure instances have matching segmentIds with the parts await updatePartInstancesSegmentIds(context, ingestModel, data.renamedSegments, beforePartMap) @@ -261,11 +265,17 @@ export async function CommitIngestOperation( } function canRemoveSegment( + prevPartInstance: ReadonlyDeep | undefined, currentPartInstance: ReadonlyDeep | undefined, nextPartInstance: ReadonlyDeep | undefined, segmentId: SegmentId ): boolean { - if (currentPartInstance?.segmentId === segmentId || nextPartInstance?.segmentId === segmentId) { + if (prevPartInstance?.segmentId === segmentId) { + // Don't allow removing an active rundown + logger.warn(`Not allowing removal of previous playing segment "${segmentId}", making segment unsynced instead`) + return false + } + if (currentPartInstance?.segmentId === segmentId) { // Don't allow removing an active rundown logger.warn(`Not allowing removal of current playing segment "${segmentId}", making segment unsynced instead`) return false @@ -294,26 +304,32 @@ async function updatePartInstancesSegmentIds( renamedSegments: ReadonlyMap | null, beforePartMap: BeforeIngestOperationPartMap ) { - // A set of rules which can be translated to mongo queries for PartInstances to update + /** + * Maps new SegmentId -> + * A set of rules which can be translated to mongo queries for PartInstances to update + */ const renameRules = new Map< SegmentId, { + /** Parts that have been moved to the new SegmentId */ partIds: PartId[] - fromSegmentId: SegmentId | null + /** Segments that have been renamed to the new SegmentId */ + fromSegmentIds: SegmentId[] } >() // Add whole segment renames to the set of rules if (renamedSegments) { for (const [fromSegmentId, toSegmentId] of renamedSegments) { - const rule = renameRules.get(toSegmentId) ?? { partIds: [], fromSegmentId: null } + const rule = renameRules.get(toSegmentId) ?? { partIds: [], fromSegmentIds: [] } renameRules.set(toSegmentId, rule) - rule.fromSegmentId = fromSegmentId + rule.fromSegmentIds.push(fromSegmentId) } } - // Reverse the structure + // Reverse the Map structure + /** Maps Part -> SegmentId-of-the-part-before-ingest-changes */ const beforePartSegmentIdMap = new Map() for (const [segmentId, partItems] of beforePartMap.entries()) { for (const partItem of partItems) { @@ -324,8 +340,11 @@ async function updatePartInstancesSegmentIds( // Some parts may have gotten a different segmentId to the base rule, so track those seperately in the rules for (const partModel of ingestModel.getAllOrderedParts()) { const oldSegmentId = beforePartSegmentIdMap.get(partModel.part._id) + if (oldSegmentId && oldSegmentId !== partModel.part.segmentId) { - const rule = renameRules.get(partModel.part.segmentId) ?? { partIds: [], fromSegmentId: null } + // The part has moved to another segment, add a rule to update its corresponding PartInstances: + + const rule = renameRules.get(partModel.part.segmentId) ?? { partIds: [], fromSegmentIds: [] } renameRules.set(partModel.part.segmentId, rule) rule.partIds.push(partModel.part._id) @@ -334,30 +353,80 @@ async function updatePartInstancesSegmentIds( // Perform a mongo update to modify the PartInstances if (renameRules.size > 0) { - await context.directCollections.PartInstances.bulkWrite( - Array.from(renameRules.entries()).map(([newSegmentId, rule]) => ({ - updateMany: { - filter: { - $or: _.compact([ - rule.fromSegmentId - ? { - segmentId: rule.fromSegmentId, - } - : undefined, - { - 'part._id': { $in: rule.partIds }, + const rulesInOrder = Array.from(renameRules.entries()).sort((a, b) => { + // Ensure that the ones with partIds are processed last, + // as that should take precedence. + + if (a[1].partIds.length && !b[1].partIds.length) return 1 + if (!a[1].partIds.length && b[1].partIds.length) return -1 + return 0 + }) + + const writeOps: AnyBulkWriteOperation[] = [] + + for (const [newSegmentId, rule] of rulesInOrder) { + if (rule.fromSegmentIds.length) { + writeOps.push({ + updateMany: { + filter: { + rundownId: ingestModel.rundownId, + segmentId: { $in: rule.fromSegmentIds }, + }, + update: { + $set: { + segmentId: newSegmentId, + 'part.segmentId': newSegmentId, }, - ]), + }, }, - update: { - $set: { - segmentId: newSegmentId, - 'part.segmentId': newSegmentId, + }) + } + if (rule.partIds.length) { + writeOps.push({ + updateMany: { + filter: { + rundownId: ingestModel.rundownId, + 'part._id': { $in: rule.partIds }, + }, + update: { + $set: { + segmentId: newSegmentId, + 'part.segmentId': newSegmentId, + }, }, }, - }, - })) - ) + }) + } + } + if (writeOps.length) await context.directCollections.PartInstances.bulkWrite(writeOps) + + // Double check that there are no parts using the old segment ids: + const oldSegmentIds = Array.from(renameRules.keys()) + const [badPartInstances, badParts] = await Promise.all([ + await context.directCollections.PartInstances.findFetch({ + rundownId: ingestModel.rundownId, + segmentId: { $in: oldSegmentIds }, + }), + await context.directCollections.Parts.findFetch({ + rundownId: ingestModel.rundownId, + segmentId: { $in: oldSegmentIds }, + }), + ]) + if (badPartInstances.length > 0) { + logger.error( + `updatePartInstancesSegmentIds: Failed to update all PartInstances using old SegmentIds "${JSON.stringify( + oldSegmentIds + )}": ${JSON.stringify(badPartInstances)}, writeOps: ${JSON.stringify(writeOps)}` + ) + } + + if (badParts.length > 0) { + logger.error( + `updatePartInstancesSegmentIds: Failed to update all Parts using old SegmentIds "${JSON.stringify( + oldSegmentIds + )}": ${JSON.stringify(badParts)}, writeOps: ${JSON.stringify(writeOps)}` + ) + } } } @@ -661,7 +730,7 @@ async function removeSegments( _changedSegmentIds: ReadonlyDeep, removedSegmentIds: ReadonlyDeep ) { - const { currentPartInstance, nextPartInstance } = await getSelectedPartInstances( + const { previousPartInstance, currentPartInstance, nextPartInstance } = await getSelectedPartInstances( context, newPlaylist, rundownsInPlaylist.map((r) => r._id) @@ -671,7 +740,7 @@ async function removeSegments( const orphanDeletedSegmentIds = new Set() const orphanHiddenSegmentIds = new Set() for (const segmentId of removedSegmentIds) { - if (canRemoveSegment(currentPartInstance, nextPartInstance, segmentId)) { + if (canRemoveSegment(previousPartInstance, currentPartInstance, nextPartInstance, segmentId)) { purgeSegmentIds.add(segmentId) } else { logger.warn( @@ -684,8 +753,10 @@ async function removeSegments( for (const segment of ingestModel.getAllSegments()) { const segmentId = segment.segment._id if (segment.segment.isHidden) { - if (!canRemoveSegment(currentPartInstance, nextPartInstance, segmentId)) { - // Protect live segment from being hidden + // Blueprints want to hide the Segment + + if (!canRemoveSegment(previousPartInstance, currentPartInstance, nextPartInstance, segmentId)) { + // The Segment is live, so we need to protect it from being hidden logger.warn(`Cannot hide live segment ${segmentId}, it will be orphaned`) switch (segment.segment.orphaned) { case SegmentOrphanedReason.DELETED: @@ -705,7 +776,7 @@ async function removeSegments( } else if (!orphanDeletedSegmentIds.has(segmentId) && segment.parts.length === 0) { // No parts in segment - if (!canRemoveSegment(currentPartInstance, nextPartInstance, segmentId)) { + if (!canRemoveSegment(previousPartInstance, currentPartInstance, nextPartInstance, segmentId)) { // Protect live segment from being hidden logger.warn(`Cannot hide live segment ${segmentId}, it will be orphaned`) orphanHiddenSegmentIds.add(segmentId) diff --git a/packages/job-worker/src/ingest/createAdlibTestingRundown.ts b/packages/job-worker/src/ingest/createAdlibTestingRundown.ts index e24a942db4..e19c681005 100644 --- a/packages/job-worker/src/ingest/createAdlibTestingRundown.ts +++ b/packages/job-worker/src/ingest/createAdlibTestingRundown.ts @@ -1,17 +1,21 @@ import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' -import type { CreateAdlibTestingRundownForShowStyleVariantProps } from '@sofie-automation/corelib/dist/worker/ingest' +import type { + CreateAdlibTestingRundownForShowStyleVariantProps, + IngestUpdateRundownProps, +} from '@sofie-automation/corelib/dist/worker/ingest' import type { JobContext } from '../jobs' -import type { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { convertShowStyleVariantToBlueprints } from '../blueprints/context/lib' import { ShowStyleUserContext } from '../blueprints/context' import { WatchedPackagesHelper } from '../blueprints/context/watchedPackages' -import { handleUpdatedRundown } from './ingestRundownJobs' import type { IShowStyleUserContext, IBlueprintShowStyleVariant, IngestRundown, } from '@sofie-automation/blueprints-integration' import { logger } from '../logging' +import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { handleUpdatedRundown } from './ingestRundownJobs' +import { runIngestUpdateOperation } from './runOperation' export async function handleCreateAdlibTestingRundownForShowStyleVariant( context: JobContext, @@ -49,7 +53,7 @@ export async function handleCreateAdlibTestingRundownForShowStyleVariant( `Creating adlib testing rundown "${ingestRundown.name}" for showStyleVariant "${showStyleVariant.name}"` ) - return handleUpdatedRundown(context, { + const updateData: IngestUpdateRundownProps = { rundownExternalId: ingestRundown.externalId, ingestRundown, isCreateAction: true, @@ -57,7 +61,10 @@ export async function handleCreateAdlibTestingRundownForShowStyleVariant( type: 'testing', showStyleVariantId: showStyleVariant._id, }, - }) + } + return runIngestUpdateOperation(context, updateData, (ingestRundown) => + handleUpdatedRundown(context, updateData, ingestRundown) + ) } function fallbackBlueprintMethod( diff --git a/packages/job-worker/src/ingest/generationRundown.ts b/packages/job-worker/src/ingest/generationRundown.ts index f41ed7db49..ef63fd2781 100644 --- a/packages/job-worker/src/ingest/generationRundown.ts +++ b/packages/job-worker/src/ingest/generationRundown.ts @@ -1,5 +1,5 @@ import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { BlueprintId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { BlueprintId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { RundownNote } from '@sofie-automation/corelib/dist/dataModel/Notes' import { serializePieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { DBRundown, RundownSource } from '@sofie-automation/corelib/dist/dataModel/Rundown' @@ -16,7 +16,6 @@ import { import { logger } from '../logging' import _ = require('underscore') import { IngestModel } from './model/IngestModel' -import { LocalIngestRundown } from './ingestCache' import { extendIngestRundownCore, canRundownBeUpdated } from './lib' import { JobContext } from '../jobs' import { CommitIngestData } from './lock' @@ -25,8 +24,19 @@ import { updateExpectedPackagesForRundownBaseline } from './expectedPackages' import { ReadonlyDeep } from 'type-fest' import { BlueprintResultRundown, ExtendedIngestRundown } from '@sofie-automation/blueprints-integration' import { wrapTranslatableMessageFromBlueprints } from '@sofie-automation/corelib/dist/TranslatableMessage' -import { convertRundownToBlueprintSegmentRundown } from '../blueprints/context/lib' +import { convertRundownToBlueprintSegmentRundown, translateUserEditsFromBlueprint } from '../blueprints/context/lib' import { calculateSegmentsAndRemovalsFromIngestData } from './generationSegment' +import { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' + +export enum GenerateRundownMode { + Create = 'create', + Update = 'update', + MetadataChange = 'metadata-change', +} + +export interface CommitIngestDataExt extends CommitIngestData { + didRegenerateRundown: boolean +} /** * Regenerate and save a whole Rundown @@ -40,106 +50,64 @@ import { calculateSegmentsAndRemovalsFromIngestData } from './generationSegment' export async function updateRundownFromIngestData( context: JobContext, ingestModel: IngestModel, - ingestRundown: LocalIngestRundown, - isCreateAction: boolean, - rundownSource: RundownSource -): Promise { + ingestRundown: SofieIngestRundownWithSource, + generateMode: GenerateRundownMode +): Promise { const span = context.startSpan('ingest.rundownInput.updateRundownFromIngestData') - if (!canRundownBeUpdated(ingestModel.rundown, isCreateAction)) return null - - logger.info(`${ingestModel.rundown ? 'Updating' : 'Adding'} rundown ${ingestModel.rundownId}`) - - // canBeUpdated is to be run by the callers - - const extendedIngestRundown = extendIngestRundownCore(ingestRundown, ingestModel.rundown) - - const selectShowStyleContext = new StudioUserContext( - { - name: 'selectShowStyleVariant', - identifier: `studioId=${context.studio._id},rundownId=${ingestModel.rundownId},ingestRundownId=${ingestModel.rundownExternalId}`, - tempSendUserNotesIntoBlackHole: true, - }, - context.studio, - context.getStudioBlueprintConfig() - ) - // TODO-CONTEXT save any user notes from selectShowStyleContext - const showStyle = await selectShowStyleVariant( - context, - selectShowStyleContext, - extendedIngestRundown, - rundownSource - ) - if (!showStyle) { - logger.debug('Blueprint rejected the rundown') - throw new Error('Blueprint rejected the rundown') - } - - const pAllRundownWatchedPackages = WatchedPackagesHelper.createForIngestRundown(context, ingestModel) - - const showStyleBlueprint = await context.getShowStyleBlueprint(showStyle.base._id) - const allRundownWatchedPackages = await pAllRundownWatchedPackages - - // Call blueprints, get rundown - const dbRundown = await regenerateRundownAndBaselineFromIngestData( + const regenerateAllContents = await updateRundownFromIngestDataInner( context, ingestModel, - extendedIngestRundown, - rundownSource, - showStyle, - showStyleBlueprint, - allRundownWatchedPackages + ingestRundown, + generateMode ) - if (!dbRundown) { - // We got no rundown, abort: - return null - } - // TODO - store notes from rundownNotesContext + if (!regenerateAllContents) return null - const { changedSegmentIds, removedSegmentIds } = await calculateSegmentsAndRemovalsFromIngestData( - context, - ingestModel, - ingestRundown, - allRundownWatchedPackages - ) + const regenerateSegmentsChanges = regenerateAllContents.regenerateAllContents + ? await calculateSegmentsAndRemovalsFromIngestData( + context, + ingestModel, + ingestRundown, + regenerateAllContents.allRundownWatchedPackages + ) + : undefined - logger.info(`Rundown ${dbRundown._id} update complete`) + logger.info(`Rundown ${ingestModel.rundownId} update complete`) span?.end() - return literal({ - changedSegmentIds: changedSegmentIds, - removedSegmentIds: removedSegmentIds, - renamedSegments: null, + return literal({ + changedSegmentIds: regenerateSegmentsChanges?.changedSegmentIds ?? [], + removedSegmentIds: regenerateSegmentsChanges?.removedSegmentIds ?? [], + renamedSegments: new Map(), + + didRegenerateRundown: regenerateAllContents.regenerateAllContents, removeRundown: false, }) } -/** - * Regenerate Rundown if necessary from metadata change - * Note: callers are expected to check the change is allowed by calling `canBeUpdated` prior to this - * @param context Context for the running job - * @param ingestModel The ingest model of the rundown - * @param ingestRundown The rundown to regenerate - * @param rundownSource Source of this Rundown - * @returns CommitIngestData describing the change - */ -export async function updateRundownMetadataFromIngestData( +export interface UpdateRundownInnerResult { + allRundownWatchedPackages: WatchedPackagesHelper + regenerateAllContents: boolean +} + +export async function updateRundownFromIngestDataInner( context: JobContext, ingestModel: IngestModel, - ingestRundown: LocalIngestRundown, - rundownSource: RundownSource -): Promise { - if (!canRundownBeUpdated(ingestModel.rundown, false)) return null + ingestRundown: SofieIngestRundownWithSource, + generateMode: GenerateRundownMode +): Promise { + if (!canRundownBeUpdated(ingestModel.rundown, generateMode === GenerateRundownMode.Create)) return null + const existingRundown = ingestModel.rundown - if (!existingRundown) { + if (!existingRundown && generateMode === GenerateRundownMode.MetadataChange) { throw new Error(`Rundown "${ingestRundown.externalId}" does not exist`) } - const span = context.startSpan('ingest.rundownInput.handleUpdatedRundownMetaDataInner') + logger.info(`${ingestModel.rundown ? 'Updating' : 'Adding'} rundown ${ingestModel.rundownId}`) - logger.info(`Updating rundown ${ingestModel.rundownId}`) + // canBeUpdated is to be run by the callers const extendedIngestRundown = extendIngestRundownCore(ingestRundown, ingestModel.rundown) @@ -147,18 +115,16 @@ export async function updateRundownMetadataFromIngestData( { name: 'selectShowStyleVariant', identifier: `studioId=${context.studio._id},rundownId=${ingestModel.rundownId},ingestRundownId=${ingestModel.rundownExternalId}`, - tempSendUserNotesIntoBlackHole: true, }, context.studio, context.getStudioBlueprintConfig() ) - // TODO-CONTEXT save any user notes from selectShowStyleContext const showStyle = await selectShowStyleVariant( context, selectShowStyleContext, extendedIngestRundown, - rundownSource + ingestRundown.rundownSource ) if (!showStyle) { logger.debug('Blueprint rejected the rundown') @@ -170,50 +136,49 @@ export async function updateRundownMetadataFromIngestData( const showStyleBlueprint = await context.getShowStyleBlueprint(showStyle.base._id) const allRundownWatchedPackages = await pAllRundownWatchedPackages + const extraRundownNotes: RundownNote[] = selectShowStyleContext.notes.map((note) => ({ + type: note.type, + message: wrapTranslatableMessageFromBlueprints(note.message, [showStyleBlueprint.blueprintId]), + origin: { + name: 'selectShowStyleVariant', + }, + })) + // Call blueprints, get rundown const dbRundown = await regenerateRundownAndBaselineFromIngestData( context, ingestModel, extendedIngestRundown, - rundownSource, + ingestRundown.rundownSource, showStyle, showStyleBlueprint, - allRundownWatchedPackages + allRundownWatchedPackages, + extraRundownNotes ) if (!dbRundown) { // We got no rundown, abort: return null } - let changedSegmentIds: SegmentId[] | undefined - let removedSegmentIds: SegmentId[] | undefined - if ( - !_.isEqual( - convertRundownToBlueprintSegmentRundown(existingRundown, true), - convertRundownToBlueprintSegmentRundown(dbRundown, true) - ) - ) { - logger.info(`MetaData of rundown ${dbRundown.externalId} has been modified, regenerating segments`) - const changes = await calculateSegmentsAndRemovalsFromIngestData( - context, - ingestModel, - ingestRundown, - allRundownWatchedPackages - ) - changedSegmentIds = changes.changedSegmentIds - removedSegmentIds = changes.removedSegmentIds - } - - logger.info(`Rundown ${dbRundown._id} update complete`) + // TODO - store notes from rundownNotesContext - span?.end() - return literal({ - changedSegmentIds: changedSegmentIds ?? [], - removedSegmentIds: removedSegmentIds ?? [], - renamedSegments: null, + let regenerateAllContents = true + if (generateMode == GenerateRundownMode.MetadataChange) { + regenerateAllContents = + !existingRundown || + !_.isEqual( + convertRundownToBlueprintSegmentRundown(existingRundown, true), + convertRundownToBlueprintSegmentRundown(dbRundown, true) + ) + if (regenerateAllContents) { + logger.info(`MetaData of rundown ${dbRundown.externalId} has been modified, regenerating segments`) + } + } - removeRundown: false, - }) + return { + allRundownWatchedPackages, + regenerateAllContents, + } } /** @@ -225,6 +190,7 @@ export async function updateRundownMetadataFromIngestData( * @param showStyle ShowStyle to regenerate for * @param showStyleBlueprint ShowStyle Blueprint to regenerate with * @param allRundownWatchedPackages WatchedPackagesHelper for all packages belonging to the rundown + * @param extraRundownNotes Additional notes to add to the Rundown, produced earlier in the ingest process * @returns Generated documents or null if Blueprints reject the Rundown */ export async function regenerateRundownAndBaselineFromIngestData( @@ -234,7 +200,8 @@ export async function regenerateRundownAndBaselineFromIngestData( rundownSource: RundownSource, showStyle: SelectedShowStyleVariant, showStyleBlueprint: ReadonlyDeep, - allRundownWatchedPackages: WatchedPackagesHelper + allRundownWatchedPackages: WatchedPackagesHelper, + extraRundownNotes: RundownNote[] ): Promise | null> { const rundownBaselinePackages = allRundownWatchedPackages.filter( context, @@ -297,15 +264,17 @@ export async function regenerateRundownAndBaselineFromIngestData( } // Ensure the ids in the notes are clean - const rundownNotes = blueprintContext.notes.map((note) => - literal({ - type: note.type, - message: wrapTranslatableMessageFromBlueprints(note.message, translationNamespaces), - origin: { - name: `${showStyle.base.name}-${showStyle.variant.name}`, - }, - }) - ) + const rundownNotes = blueprintContext.notes + .map((note) => + literal({ + type: note.type, + message: wrapTranslatableMessageFromBlueprints(note.message, translationNamespaces), + origin: { + name: `${showStyle.base.name}-${showStyle.variant.name}`, + }, + }) + ) + .concat(extraRundownNotes) ingestModel.setRundownData( rundownRes.rundown, @@ -313,7 +282,8 @@ export async function regenerateRundownAndBaselineFromIngestData( showStyle.variant, showStyleBlueprint, rundownSource, - rundownNotes + rundownNotes, + translateUserEditsFromBlueprint(rundownRes.rundown.userEditOperations, translationNamespaces) ) // get the rundown separetely to ensure it exists now diff --git a/packages/job-worker/src/ingest/generationSegment.ts b/packages/job-worker/src/ingest/generationSegment.ts index e90d17dcdf..2898cef866 100644 --- a/packages/job-worker/src/ingest/generationSegment.ts +++ b/packages/job-worker/src/ingest/generationSegment.ts @@ -8,15 +8,15 @@ import { WatchedPackagesHelper } from '../blueprints/context/watchedPackages' import { postProcessAdLibActions, postProcessAdLibPieces, postProcessPieces } from '../blueprints/postProcess' import { logger } from '../logging' import { IngestModel, IngestModelReadonly, IngestReplaceSegmentType } from './model/IngestModel' -import { LocalIngestSegment, LocalIngestRundown } from './ingestCache' import { getSegmentId, canSegmentBeUpdated } from './lib' import { JobContext, ProcessedShowStyleCompound } from '../jobs' import { CommitIngestData } from './lock' import { BlueprintResultPart, BlueprintResultSegment, - IngestSegment, NoteSeverity, + SofieIngestRundown, + SofieIngestSegment, } from '@sofie-automation/blueprints-integration' import { wrapTranslatableMessageFromBlueprints } from '@sofie-automation/corelib/dist/TranslatableMessage' import { updateExpectedPackagesForPartModel } from './expectedPackages' @@ -24,12 +24,13 @@ import { IngestReplacePartType, IngestSegmentModel } from './model/IngestSegment import { ReadonlyDeep } from 'type-fest' import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { WrappedShowStyleBlueprint } from '../blueprints/cache' +import { translateUserEditsFromBlueprint } from '../blueprints/context/lib' async function getWatchedPackagesHelper( context: JobContext, allRundownWatchedPackages0: WatchedPackagesHelper | null, ingestModel: IngestModelReadonly, - ingestSegments: LocalIngestSegment[] + ingestSegments: SofieIngestSegment[] ): Promise { if (allRundownWatchedPackages0) { return allRundownWatchedPackages0 @@ -50,7 +51,7 @@ async function getWatchedPackagesHelper( export async function calculateSegmentsFromIngestData( context: JobContext, ingestModel: IngestModel, - ingestSegments: LocalIngestSegment[], + ingestSegments: SofieIngestSegment[], allRundownWatchedPackages0: WatchedPackagesHelper | null ): Promise { const span = context.startSpan('ingest.rundownInput.calculateSegmentsFromIngestData') @@ -97,7 +98,7 @@ async function regenerateSegmentAndUpdateModelFull( blueprint: ReadonlyDeep, allRundownWatchedPackages: WatchedPackagesHelper, ingestModel: IngestModel, - ingestSegment: LocalIngestSegment + ingestSegment: SofieIngestSegment ): Promise { // Ensure the parts are sorted by rank ingestSegment.parts.sort((a, b) => a.rank - b.rank) @@ -151,7 +152,7 @@ async function regenerateSegmentAndUpdateModel( showStyle: ReadonlyDeep, blueprint: ReadonlyDeep, ingestModel: IngestModel, - ingestSegment: LocalIngestSegment, + ingestSegment: SofieIngestSegment, watchedPackages: WatchedPackagesHelper ): Promise { const rundown = ingestModel.getRundown() @@ -214,7 +215,7 @@ async function generateSegmentWithBlueprints( showStyle: ReadonlyDeep, blueprint: ReadonlyDeep, rundown: ReadonlyDeep, - ingestSegment: IngestSegment, + ingestSegment: SofieIngestSegment, watchedPackages: WatchedPackagesHelper ): Promise<{ blueprintSegment: BlueprintResultSegment @@ -247,11 +248,10 @@ async function generateSegmentWithBlueprints( function createInternalErrorSegment( blueprintId: BlueprintId, - ingestSegment: LocalIngestSegment + ingestSegment: SofieIngestSegment ): IngestReplaceSegmentType { return { externalId: ingestSegment.externalId, - externalModified: ingestSegment.modified, _rank: ingestSegment.rank, notes: [ { @@ -275,7 +275,7 @@ function updateModelWithGeneratedSegment( context: JobContext, blueprintId: BlueprintId, ingestModel: IngestModel, - ingestSegment: LocalIngestSegment, + ingestSegment: SofieIngestSegment, blueprintSegment: BlueprintResultSegment, blueprintNotes: RawPartNote[] ): IngestSegmentModel { @@ -288,9 +288,11 @@ function updateModelWithGeneratedSegment( literal({ ...blueprintSegment.segment, externalId: ingestSegment.externalId, - externalModified: ingestSegment.modified, _rank: ingestSegment.rank, notes: segmentNotes, + userEditOperations: translateUserEditsFromBlueprint(blueprintSegment.segment.userEditOperations, [ + blueprintId, + ]), }) ) @@ -373,6 +375,7 @@ function updateModelWithGeneratedPart( ]), } : undefined, + userEditOperations: translateUserEditsFromBlueprint(blueprintPart.part.userEditOperations, [blueprintId]), }) // Update pieces @@ -479,7 +482,7 @@ function preserveOrphanedSegmentPositionInRundown( export async function updateSegmentFromIngestData( context: JobContext, ingestModel: IngestModel, - ingestSegment: LocalIngestSegment, + ingestSegment: SofieIngestSegment, isNewSegment: boolean ): Promise { const span = context.startSpan('ingest.rundownInput.handleUpdatedPartInner') @@ -515,7 +518,7 @@ export async function updateSegmentFromIngestData( export async function regenerateSegmentsFromIngestData( context: JobContext, ingestModel: IngestModel, - ingestRundown: LocalIngestRundown, + ingestRundown: SofieIngestRundown, segmentIds: SegmentId[] ): Promise<{ result: CommitIngestData | null; skippedSegments: SegmentId[] }> { const span = context.startSpan('ingest.rundownInput.handleUpdatedPartInner') @@ -527,7 +530,7 @@ export async function regenerateSegmentsFromIngestData( const rundown = ingestModel.getRundown() const skippedSegments: SegmentId[] = [] - const ingestSegments: LocalIngestSegment[] = [] + const ingestSegments: SofieIngestSegment[] = [] for (const segmentId of segmentIds) { const segment = ingestModel.getSegment(segmentId) @@ -573,7 +576,7 @@ export async function regenerateSegmentsFromIngestData( export async function calculateSegmentsAndRemovalsFromIngestData( context: JobContext, ingestModel: IngestModel, - ingestRundown: LocalIngestRundown, + ingestRundown: SofieIngestRundown, allRundownWatchedPackages: WatchedPackagesHelper ): Promise<{ changedSegmentIds: SegmentId[]; removedSegmentIds: SegmentId[] }> { const changedSegmentIds = await calculateSegmentsFromIngestData( @@ -591,6 +594,8 @@ export async function calculateSegmentsAndRemovalsFromIngestData( removedSegmentIds.push(oldSegment.segment._id) changedSegmentIds.push(oldSegment.segment._id) oldSegment.setOrphaned(SegmentOrphanedReason.DELETED) + + oldSegment.removeAllParts() } return { changedSegmentIds, removedSegmentIds } diff --git a/packages/job-worker/src/ingest/ingestCache.ts b/packages/job-worker/src/ingest/ingestCache.ts deleted file mode 100644 index 6fcb5a5904..0000000000 --- a/packages/job-worker/src/ingest/ingestCache.ts +++ /dev/null @@ -1,219 +0,0 @@ -import { RundownId, SegmentId, IngestDataCacheObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { - IngestDataCacheObj, - IngestCacheType, - IngestDataCacheObjRundown, - IngestDataCacheObjSegment, - IngestDataCacheObjPart, -} from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' -import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' -import { getCurrentTime } from '../lib' -import _ = require('underscore') -import { IngestRundown, IngestSegment, IngestPart } from '@sofie-automation/blueprints-integration' -import { JobContext } from '../jobs' -import { getPartId, getSegmentId } from './lib' -import { SetOptional } from 'type-fest' -import { groupByToMap, normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' -import { AnyBulkWriteOperation } from 'mongodb' -import { diffAndReturnLatestObjects } from './model/implementation/utils' - -interface LocalIngestBase { - modified: number -} -export interface LocalIngestRundown extends IngestRundown, LocalIngestBase { - segments: LocalIngestSegment[] -} -export interface LocalIngestSegment extends IngestSegment, LocalIngestBase { - parts: LocalIngestPart[] -} -export interface LocalIngestPart extends IngestPart, LocalIngestBase {} -export function isLocalIngestRundown(o: IngestRundown | LocalIngestRundown): o is LocalIngestRundown { - return 'modified' in o -} -export function makeNewIngestRundown(ingestRundown: SetOptional): LocalIngestRundown { - return { - ...ingestRundown, - segments: ingestRundown.segments ? _.map(ingestRundown.segments, makeNewIngestSegment) : [], - modified: getCurrentTime(), - } -} -export function makeNewIngestSegment(ingestSegment: IngestSegment): LocalIngestSegment { - return { - ...ingestSegment, - parts: _.map(ingestSegment.parts, makeNewIngestPart), - modified: getCurrentTime(), - } -} -export function makeNewIngestPart(ingestPart: IngestPart): LocalIngestPart { - return { ...ingestPart, modified: getCurrentTime() } -} - -export class RundownIngestDataCache { - readonly #changedDocumentIds = new Set() - - private constructor( - private readonly context: JobContext, - private readonly rundownId: RundownId, - private documents: IngestDataCacheObj[] - ) {} - - static async create(context: JobContext, rundownId: RundownId): Promise { - const docs = await context.directCollections.IngestDataCache.findFetch({ rundownId }) - - return new RundownIngestDataCache(context, rundownId, docs) - } - - fetchRundown(): LocalIngestRundown | undefined { - const span = this.context.startSpan('ingest.ingestCache.loadCachedRundownData') - - const cachedRundown = this.documents.find((e) => e.type === IngestCacheType.RUNDOWN) - if (!cachedRundown) { - span?.end() - return undefined - } - - const ingestRundown = cachedRundown.data as LocalIngestRundown - ingestRundown.modified = cachedRundown.modified - - const hasSegmentId = (obj: IngestDataCacheObj): obj is IngestDataCacheObjSegment | IngestDataCacheObjPart => { - return !!obj.segmentId - } - - const segmentMap = groupByToMap(this.documents.filter(hasSegmentId), 'segmentId') - for (const objs of segmentMap.values()) { - const segmentEntry = objs.find((e) => e.type === IngestCacheType.SEGMENT) - if (segmentEntry) { - const ingestSegment = segmentEntry.data as LocalIngestSegment - ingestSegment.modified = segmentEntry.modified - - for (const entry of objs) { - if (entry.type === IngestCacheType.PART) { - const ingestPart = entry.data as LocalIngestPart - ingestPart.modified = entry.modified - - ingestSegment.parts.push(ingestPart) - } - } - - ingestSegment.parts = _.sortBy(ingestSegment.parts, (s) => s.rank) - ingestRundown.segments.push(ingestSegment) - } - } - - ingestRundown.segments = _.sortBy(ingestRundown.segments, (s) => s.rank) - - span?.end() - return ingestRundown - } - - update(ingestRundown: LocalIngestRundown): void { - const cacheEntries: IngestDataCacheObj[] = generateCacheForRundown(this.rundownId, ingestRundown) - - this.documents = diffAndReturnLatestObjects(this.#changedDocumentIds, this.documents, cacheEntries) - } - - delete(): void { - // Mark each document for deletion - for (const doc of this.documents) { - this.#changedDocumentIds.add(doc._id) - } - - this.documents = [] - } - - async saveToDatabase(): Promise { - const documentsMap = normalizeArrayToMap(this.documents, '_id') - - const updates: AnyBulkWriteOperation[] = [] - const removedIds: IngestDataCacheObjId[] = [] - for (const changedId of this.#changedDocumentIds) { - const newDoc = documentsMap.get(changedId) - if (!newDoc) { - removedIds.push(changedId) - } else { - updates.push({ - replaceOne: { - filter: { - _id: changedId, - }, - replacement: newDoc, - upsert: true, - }, - }) - } - } - - if (removedIds.length) { - updates.push({ - deleteMany: { - filter: { - _id: { $in: removedIds as any }, - }, - }, - }) - } - - await this.context.directCollections.IngestDataCache.bulkWrite(updates) - } -} - -function generateCacheForRundown(rundownId: RundownId, ingestRundown: LocalIngestRundown): IngestDataCacheObj[] { - // cache the Data - const cacheEntries: IngestDataCacheObj[] = [] - const rundown: IngestDataCacheObjRundown = { - _id: protectString(unprotectString(rundownId)), - type: IngestCacheType.RUNDOWN, - rundownId: rundownId, - modified: ingestRundown.modified, - data: { - ..._.omit(ingestRundown, 'modified'), - segments: [], // omit the segments, they come as separate objects - }, - } - cacheEntries.push(rundown) - - for (const segment of ingestRundown.segments) { - cacheEntries.push(...generateCacheForSegment(rundownId, segment)) - } - - return cacheEntries -} -function generateCacheForSegment(rundownId: RundownId, ingestSegment: LocalIngestSegment): IngestDataCacheObj[] { - const segmentId = getSegmentId(rundownId, ingestSegment.externalId) - const cacheEntries: Array = [] - - const segment: IngestDataCacheObjSegment = { - _id: protectString(`${rundownId}_${segmentId}`), - type: IngestCacheType.SEGMENT, - rundownId: rundownId, - segmentId: segmentId, - modified: ingestSegment.modified, - data: { - ..._.omit(ingestSegment, 'modified'), - parts: [], // omit the parts, they come as separate objects - }, - } - cacheEntries.push(segment) - - for (const part of ingestSegment.parts) { - cacheEntries.push(generateCacheForPart(rundownId, segmentId, part)) - } - - return cacheEntries -} -function generateCacheForPart( - rundownId: RundownId, - segmentId: SegmentId, - part: LocalIngestPart -): IngestDataCacheObjPart { - const partId = getPartId(rundownId, part.externalId) - return { - _id: protectString(`${rundownId}_${partId}`), - type: IngestCacheType.PART, - rundownId: rundownId, - segmentId: segmentId, - partId: partId, - modified: part.modified, - data: _.omit(part, 'modified'), - } -} diff --git a/packages/job-worker/src/ingest/ingestPartJobs.ts b/packages/job-worker/src/ingest/ingestPartJobs.ts index 2886deeb24..5ed0b148bb 100644 --- a/packages/job-worker/src/ingest/ingestPartJobs.ts +++ b/packages/job-worker/src/ingest/ingestPartJobs.ts @@ -1,71 +1,90 @@ -import { getCurrentTime } from '../lib' import { JobContext } from '../jobs' -import { updateSegmentFromIngestData } from './generationSegment' -import { makeNewIngestPart } from './ingestCache' -import { runIngestJob } from './lock' import { IngestRemovePartProps, IngestUpdatePartProps } from '@sofie-automation/corelib/dist/worker/ingest' +import { UpdateIngestRundownChange } from './runOperation' +import { IngestChangeType, NrcsIngestPartChangeDetails } from '@sofie-automation/blueprints-integration' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' /** * Remove a Part from a Segment */ -export async function handleRemovedPart(context: JobContext, data: IngestRemovePartProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const ingestSegment = ingestRundown.segments.find((s) => s.externalId === data.segmentExternalId) - if (!ingestSegment) { - throw new Error( - `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to update` - ) - } - ingestSegment.parts = ingestSegment.parts.filter((p) => p.externalId !== data.partExternalId) - ingestSegment.modified = getCurrentTime() +export function handleRemovedPart( + _context: JobContext, + data: IngestRemovePartProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownChange { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } + const ingestSegment = ingestRundown.segments.find((s) => s.externalId === data.segmentExternalId) + if (!ingestSegment) { + throw new Error( + `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to update` + ) + } + const partCountBefore = ingestSegment.parts.length + ingestSegment.parts = ingestSegment.parts.filter((p) => p.externalId !== data.partExternalId) + + if (partCountBefore === ingestSegment.parts.length) { + return { + // No change + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + }, + } satisfies UpdateIngestRundownChange + } + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + [data.segmentExternalId]: { + partChanges: { + [data.partExternalId]: NrcsIngestPartChangeDetails.Deleted, + }, + }, + }, }, - async (context, ingestModel, ingestRundown) => { - const ingestSegment = ingestRundown?.segments?.find((s) => s.externalId === data.segmentExternalId) - if (!ingestSegment) throw new Error(`IngestSegment "${data.segmentExternalId}" is missing!`) - return updateSegmentFromIngestData(context, ingestModel, ingestSegment, false) - } - ) + } satisfies UpdateIngestRundownChange } /** * Insert or update a Part in a Segment */ -export async function handleUpdatedPart(context: JobContext, data: IngestUpdatePartProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const ingestSegment = ingestRundown.segments.find((s) => s.externalId === data.segmentExternalId) - if (!ingestSegment) { - throw new Error( - `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to update` - ) - } - ingestSegment.parts = ingestSegment.parts.filter((p) => p.externalId !== data.ingestPart.externalId) - ingestSegment.parts.push(makeNewIngestPart(data.ingestPart)) - ingestSegment.modified = getCurrentTime() +export function handleUpdatedPart( + _context: JobContext, + data: IngestUpdatePartProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownChange { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) + + const ingestSegment = ingestRundown.segments.find((s) => s.externalId === data.segmentExternalId) + if (!ingestSegment) { + throw new Error( + `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to update` + ) + } + const partCountBefore = ingestSegment.parts.length + ingestSegment.parts = ingestSegment.parts.filter((p) => p.externalId !== data.ingestPart.externalId) + const isUpdate = partCountBefore !== ingestSegment.parts.length + + ingestSegment.parts.push(data.ingestPart) - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + [data.segmentExternalId]: { + partChanges: { + [data.ingestPart.externalId]: isUpdate + ? NrcsIngestPartChangeDetails.Updated + : NrcsIngestPartChangeDetails.Inserted, + }, + }, + }, }, - async (context, ingestModel, ingestRundown) => { - const ingestSegment = ingestRundown?.segments?.find((s) => s.externalId === data.segmentExternalId) - if (!ingestSegment) throw new Error(`IngestSegment "${data.segmentExternalId}" is missing!`) - return updateSegmentFromIngestData(context, ingestModel, ingestSegment, false) - } - ) + } satisfies UpdateIngestRundownChange } diff --git a/packages/job-worker/src/ingest/ingestRundownJobs.ts b/packages/job-worker/src/ingest/ingestRundownJobs.ts index 12c13dfb82..dc949929bb 100644 --- a/packages/job-worker/src/ingest/ingestRundownJobs.ts +++ b/packages/job-worker/src/ingest/ingestRundownJobs.ts @@ -1,11 +1,8 @@ import { JobContext } from '../jobs' import { logger } from '../logging' -import { updateRundownFromIngestData, updateRundownMetadataFromIngestData } from './generationRundown' -import { makeNewIngestRundown } from './ingestCache' -import { canRundownBeUpdated, getRundownId } from './lib' -import { CommitIngestData, runIngestJob, runWithRundownLock, UpdateIngestRundownAction } from './lock' +import { runWithRundownLock } from './lock' +import { getRundownId } from './lib' import { removeRundownFromDb } from '../rundownPlaylists' -import { literal } from '@sofie-automation/corelib/dist/lib' import { DBRundown, RundownOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { IngestRegenerateRundownProps, @@ -15,36 +12,27 @@ import { UserRemoveRundownProps, UserUnsyncRundownProps, } from '@sofie-automation/corelib/dist/worker/ingest' -import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' -import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { ComputedIngestChangeAction, UpdateIngestRundownChange, UpdateIngestRundownResult } from './runOperation' +import { + IngestChangeType, + IngestRundown, + NrcsIngestRundownChangeDetails, +} from '@sofie-automation/blueprints-integration' +import { wrapGenericIngestJob } from './jobWrappers' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' /** * Attempt to remove a rundown, or orphan it */ -export async function handleRemovedRundown(context: JobContext, data: IngestRemoveRundownProps): Promise { - await runIngestJob( - context, - data, - () => { - // Remove it - return UpdateIngestRundownAction.DELETE - }, - async (_context, ingestModel) => { - const rundown = ingestModel.getRundown() - - const canRemove = data.forceDelete || canRundownBeUpdated(rundown, false) - if (!canRemove) throw UserError.create(UserErrorMessage.RundownRemoveWhileActive, { name: rundown.name }) - - return literal({ - changedSegmentIds: [], - removedSegmentIds: [], - renamedSegments: null, - removeRundown: true, - returnRemoveFailure: true, - }) - } - ) +export function handleRemovedRundown( + _context: JobContext, + data: IngestRemoveRundownProps, + _ingestRundown: IngestRundown | undefined +): UpdateIngestRundownResult { + // Remove it + return data.forceDelete ? ComputedIngestChangeAction.FORCE_DELETE : ComputedIngestChangeAction.DELETE } +const handleRemovedRundownWrapped = wrapGenericIngestJob(handleRemovedRundown) /** * User requested removing a rundown @@ -81,8 +69,8 @@ export async function handleUserRemoveRundown(context: JobContext, data: UserRem } }) } else { - // The ids match, meaning the typical ingest operation flow will work - return handleRemovedRundown(context, { + // Its a real rundown, so defer to the proper route for deletion + return handleRemovedRundownWrapped(context, { rundownExternalId: tmpRundown.externalId, forceDelete: data.force, }) @@ -92,84 +80,66 @@ export async function handleUserRemoveRundown(context: JobContext, data: UserRem /** * Insert or update a rundown with a new IngestRundown */ -export async function handleUpdatedRundown(context: JobContext, data: IngestUpdateRundownProps): Promise { - return runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown || data.isCreateAction) { - // We want to regenerate unmodified - return makeNewIngestRundown(data.ingestRundown) - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - async (context, ingestModel, ingestRundown) => { - if (!ingestRundown) throw new Error(`regenerateRundown lost the IngestRundown...`) +export function handleUpdatedRundown( + _context: JobContext, + data: IngestUpdateRundownProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownChange { + if (!ingestRundown && !data.isCreateAction) throw new Error(`Rundown "${data.rundownExternalId}" not found`) - return updateRundownFromIngestData( - context, - ingestModel, - ingestRundown, - data.isCreateAction, - data.rundownSource - ) - } - ) + return { + ingestRundown: { + ...data.ingestRundown, + rundownSource: data.rundownSource, + }, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } satisfies UpdateIngestRundownChange } /** * Update a rundown from a new IngestRundown (ingoring IngestSegments) */ -export async function handleUpdatedRundownMetaData( - context: JobContext, - data: IngestUpdateRundownMetaDataProps -): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - return { - ...makeNewIngestRundown(data.ingestRundown), - segments: ingestRundown.segments, - } - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - async (context, ingestModel, ingestRundown) => { - if (!ingestRundown) throw new Error(`handleUpdatedRundownMetaData lost the IngestRundown...`) +export function handleUpdatedRundownMetaData( + _context: JobContext, + data: IngestUpdateRundownMetaDataProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownChange { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) - return updateRundownMetadataFromIngestData(context, ingestModel, ingestRundown, data.rundownSource) - } - ) + return { + ingestRundown: { + ...data.ingestRundown, + rundownSource: data.rundownSource, + segments: ingestRundown.segments, + }, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Payload, + }, + } satisfies UpdateIngestRundownChange } /** * Regnerate a Rundown from the cached IngestRundown */ -export async function handleRegenerateRundown(context: JobContext, data: IngestRegenerateRundownProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - // We want to regenerate unmodified - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - async (context, ingestModel, ingestRundown) => { - // If the rundown is orphaned, then we can't regenerate as there wont be any data to use! - if (!ingestRundown) return null +export function handleRegenerateRundown( + _context: JobContext, + data: IngestRegenerateRundownProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownChange { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) - if (!ingestModel.rundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) - - return updateRundownFromIngestData(context, ingestModel, ingestRundown, false, ingestModel.rundown.source) - } - ) + return { + // We want to regenerate unmodified + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } satisfies UpdateIngestRundownChange } /** @@ -177,16 +147,16 @@ export async function handleRegenerateRundown(context: JobContext, data: IngestR */ export async function handleUserUnsyncRundown(context: JobContext, data: UserUnsyncRundownProps): Promise { return runWithRundownLock(context, data.rundownId, async (rundown) => { - if (rundown) { - if (!rundown.orphaned) { - await context.directCollections.Rundowns.update(rundown._id, { - $set: { - orphaned: RundownOrphanedReason.MANUAL, - }, - }) - } else { - logger.info(`Rundown "${rundown._id}" was already unsynced`) - } + if (!rundown) return // Ignore if rundown is not found + + if (!rundown.orphaned) { + await context.directCollections.Rundowns.update(rundown._id, { + $set: { + orphaned: RundownOrphanedReason.MANUAL, + }, + }) + } else { + logger.info(`Rundown "${rundown._id}" was already unsynced`) } }) } diff --git a/packages/job-worker/src/ingest/ingestSegmentJobs.ts b/packages/job-worker/src/ingest/ingestSegmentJobs.ts index bfe4fc20a1..06ac710f40 100644 --- a/packages/job-worker/src/ingest/ingestSegmentJobs.ts +++ b/packages/job-worker/src/ingest/ingestSegmentJobs.ts @@ -1,11 +1,6 @@ -import { SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { getCurrentTime } from '../lib' import { JobContext } from '../jobs' -import { logger } from '../logging' -import { regenerateSegmentsFromIngestData, updateSegmentFromIngestData } from './generationSegment' -import { makeNewIngestSegment } from './ingestCache' -import { canSegmentBeUpdated, getSegmentId } from './lib' -import { CommitIngestData, runIngestJob, UpdateIngestRundownAction } from './lock' +import { regenerateSegmentsFromIngestData } from './generationSegment' +import { CommitIngestData } from './lock' import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' import { literal } from '@sofie-automation/corelib/dist/lib' import { @@ -15,157 +10,140 @@ import { IngestUpdateSegmentRanksProps, RemoveOrphanedSegmentsProps, } from '@sofie-automation/corelib/dist/worker/ingest' +import { IngestUpdateOperationFunction, UpdateIngestRundownChange, UpdateIngestRundownResult } from './runOperation' +import { + IngestChangeType, + NrcsIngestSegmentChangeDetailsEnum, + SofieIngestRundown, +} from '@sofie-automation/blueprints-integration' +import { IngestModel } from './model/IngestModel' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' /** * Regnerate a Segment from the cached IngestSegment */ -export async function handleRegenerateSegment(context: JobContext, data: IngestRegenerateSegmentProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - // Ensure the target segment exists in the cache - const ingestSegment = ingestRundown.segments.find((s) => s.externalId === data.segmentExternalId) - if (!ingestSegment) { - throw new Error( - `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to update` - ) - } - - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } +export function handleRegenerateSegment( + _context: JobContext, + data: IngestRegenerateSegmentProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownChange { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) + + // Ensure the target segment exists in the cache + const ingestSegment = ingestRundown.segments.find((s) => s.externalId === data.segmentExternalId) + if (!ingestSegment) { + throw new Error( + `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to update` + ) + } + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + [data.segmentExternalId]: { + payloadChanged: true, + }, + }, }, - async (context, ingestModel, ingestRundown) => { - const ingestSegment = ingestRundown?.segments?.find((s) => s.externalId === data.segmentExternalId) - if (!ingestSegment) throw new Error(`IngestSegment "${data.segmentExternalId}" is missing!`) - return updateSegmentFromIngestData(context, ingestModel, ingestSegment, false) - } - ) + } } /** * Attempt to remove a segment, or orphan it */ -export async function handleRemovedSegment(context: JobContext, data: IngestRemoveSegmentProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const oldSegmentsLength = ingestRundown.segments.length - ingestRundown.segments = ingestRundown.segments.filter((s) => s.externalId !== data.segmentExternalId) - ingestRundown.modified = getCurrentTime() - - if (ingestRundown.segments.length === oldSegmentsLength) { - throw new Error( - `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to remove` - ) - } - - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } +export function handleRemovedSegment( + _context: JobContext, + data: IngestRemoveSegmentProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownChange { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) + + const oldSegmentsLength = ingestRundown.segments.length + ingestRundown.segments = ingestRundown.segments.filter((s) => s.externalId !== data.segmentExternalId) + + if (ingestRundown.segments.length === oldSegmentsLength) { + throw new Error( + `Rundown "${data.rundownExternalId}" does not have a Segment "${data.segmentExternalId}" to remove` + ) + } + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + [data.segmentExternalId]: NrcsIngestSegmentChangeDetailsEnum.Deleted, + }, }, - async (_context, ingestModel) => { - const rundown = ingestModel.getRundown() - const segmentId = getSegmentId(rundown._id, data.segmentExternalId) - const segment = ingestModel.getSegment(segmentId) - - if (!canSegmentBeUpdated(rundown, segment, false)) { - // segment has already been deleted - return null - } else { - return literal({ - changedSegmentIds: [], - removedSegmentIds: [segmentId], - renamedSegments: null, - - removeRundown: false, - }) - } - } - ) + } } /** * Insert or update a segment from a new IngestSegment */ -export async function handleUpdatedSegment(context: JobContext, data: IngestUpdateSegmentProps): Promise { +export function handleUpdatedSegment( + _context: JobContext, + data: IngestUpdateSegmentProps +): IngestUpdateOperationFunction { const segmentExternalId = data.ingestSegment.externalId - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - ingestRundown.segments = ingestRundown.segments.filter((s) => s.externalId !== segmentExternalId) - ingestRundown.segments.push(makeNewIngestSegment(data.ingestSegment)) - ingestRundown.modified = getCurrentTime() - - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - async (context, ingestModel, ingestRundown) => { - const ingestSegment = ingestRundown?.segments?.find((s) => s.externalId === segmentExternalId) - if (!ingestSegment) throw new Error(`IngestSegment "${segmentExternalId}" is missing!`) - return updateSegmentFromIngestData(context, ingestModel, ingestSegment, data.isCreateAction) + if (!segmentExternalId) throw new Error('Segment externalId must be set!') + + return (ingestRundown) => { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) + + const countBefore = ingestRundown.segments.length + ingestRundown.segments = ingestRundown.segments.filter((s) => s.externalId !== segmentExternalId) + if (countBefore === ingestRundown.segments.length && !data.isCreateAction) + throw new Error(`Segment "${data.ingestSegment.externalId}" not found`) + + ingestRundown.segments.push(data.ingestSegment) + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + [segmentExternalId]: NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, // This forces downstream to do a full diff themselves + }, + }, } - ) + } } /** * Update the ranks of the Segments in a Rundown */ -export async function handleUpdatedSegmentRanks( - context: JobContext, - data: IngestUpdateSegmentRanksProps -): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - // Update ranks on ingest data - for (const segment of ingestRundown.segments) { - segment.rank = data.newRanks[segment.externalId] ?? segment.rank - } - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - async (_context, ingestModel) => { - const changedSegmentIds: SegmentId[] = [] - for (const [externalId, rank] of Object.entries(data.newRanks)) { - const segment = ingestModel.getSegmentByExternalId(externalId) - if (segment) { - const changed = segment.setRank(rank) - - if (!changed) { - logger.warn(`Failed to update rank of segment "${externalId}" (${data.rundownExternalId})`) - } else { - changedSegmentIds.push(segment?.segment._id) - } - } - } - - return literal({ - changedSegmentIds, - removedSegmentIds: [], - renamedSegments: null, - removeRundown: false, - }) +export function handleUpdatedSegmentRanks( + _context: JobContext, + data: IngestUpdateSegmentRanksProps, + ingestRundown: IngestRundownWithSource | undefined +): UpdateIngestRundownResult { + if (!ingestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) + + let hasChange = false + + // Update ranks on ingest data + for (const segment of ingestRundown.segments) { + const newRank = Number(data.newRanks[segment.externalId]) + if (!isNaN(newRank)) { + segment.rank = newRank + hasChange = true } - ) + } + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: hasChange, + }, + } } /** @@ -173,70 +151,57 @@ export async function handleUpdatedSegmentRanks( */ export async function handleRemoveOrphanedSegemnts( context: JobContext, - data: RemoveOrphanedSegmentsProps -): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => ingestRundown ?? UpdateIngestRundownAction.DELETE, - async (_context, ingestModel, ingestRundown) => { - if (!ingestRundown) throw new Error(`handleRemoveOrphanedSegemnts lost the IngestRundown...`) - - // Find the segments that are still orphaned (in case they have resynced before this executes) - // We flag them for deletion again, and they will either be kept if they are somehow playing, or purged if they are not - const stillOrphanedSegments = ingestModel.getOrderedSegments().filter((s) => !!s.segment.orphaned) - - // Note: AdlibTesting segments are ignored here, as they will never be in the ingestModel - - const stillHiddenSegments = stillOrphanedSegments.filter( - (s) => - s.segment.orphaned === SegmentOrphanedReason.HIDDEN && - data.orphanedHiddenSegmentIds.includes(s.segment._id) - ) - - const stillDeletedSegmentIds = stillOrphanedSegments - .filter( - (s) => - s.segment.orphaned === SegmentOrphanedReason.DELETED && - data.orphanedDeletedSegmentIds.includes(s.segment._id) - ) - .map((s) => s.segment._id) - - const hiddenSegmentIds = ingestModel - .getOrderedSegments() - .filter((s) => !!stillHiddenSegments.find((a) => a.segment._id === s.segment._id)) - .map((s) => s.segment._id) - - const { result } = await regenerateSegmentsFromIngestData( - context, - ingestModel, - ingestRundown, - hiddenSegmentIds - ) - - const changedHiddenSegments = result?.changedSegmentIds ?? [] - - // Make sure any orphaned hidden segments arent marked as hidden - for (const segment of stillHiddenSegments) { - if (!changedHiddenSegments.includes(segment.segment._id)) { - if (segment.segment.isHidden && segment.segment.orphaned === SegmentOrphanedReason.HIDDEN) { - segment.setOrphaned(undefined) - changedHiddenSegments.push(segment.segment._id) - } - } - } + data: RemoveOrphanedSegmentsProps, + ingestModel: IngestModel, + ingestRundown: SofieIngestRundown +): Promise { + // Find the segments that are still orphaned (in case they have resynced before this executes) + // We flag them for deletion again, and they will either be kept if they are somehow playing, or purged if they are not + const stillOrphanedSegments = ingestModel.getOrderedSegments().filter((s) => !!s.segment.orphaned) + + // Note: AdlibTesting segments are ignored here, as they will never be in the ingestModel + + const stillHiddenSegments = stillOrphanedSegments.filter( + (s) => + s.segment.orphaned === SegmentOrphanedReason.HIDDEN && data.orphanedHiddenSegmentIds.includes(s.segment._id) + ) - if (changedHiddenSegments.length === 0 && stillDeletedSegmentIds.length === 0) { - // Nothing could have changed, so take a shortcut and skip any saving - return null + const stillDeletedSegmentIds = stillOrphanedSegments + .filter( + (s) => + s.segment.orphaned === SegmentOrphanedReason.DELETED && + data.orphanedDeletedSegmentIds.includes(s.segment._id) + ) + .map((s) => s.segment._id) + + const hiddenSegmentIds = ingestModel + .getOrderedSegments() + .filter((s) => !!stillHiddenSegments.find((a) => a.segment._id === s.segment._id)) + .map((s) => s.segment._id) + + const { result } = await regenerateSegmentsFromIngestData(context, ingestModel, ingestRundown, hiddenSegmentIds) + + const changedHiddenSegments = result?.changedSegmentIds ?? [] + + // Make sure any orphaned hidden segments arent marked as hidden + for (const segment of stillHiddenSegments) { + if (!changedHiddenSegments.includes(segment.segment._id)) { + if (segment.segment.isHidden && segment.segment.orphaned === SegmentOrphanedReason.HIDDEN) { + segment.setOrphaned(undefined) + changedHiddenSegments.push(segment.segment._id) } - - return literal({ - changedSegmentIds: changedHiddenSegments, - removedSegmentIds: stillDeletedSegmentIds, - renamedSegments: null, - removeRundown: false, - }) } - ) + } + + if (changedHiddenSegments.length === 0 && stillDeletedSegmentIds.length === 0) { + // Nothing could have changed, so take a shortcut and skip any saving + return null + } + + return literal({ + changedSegmentIds: changedHiddenSegments, + removedSegmentIds: stillDeletedSegmentIds, + renamedSegments: new Map(), + removeRundown: false, + }) } diff --git a/packages/job-worker/src/ingest/jobWrappers.ts b/packages/job-worker/src/ingest/jobWrappers.ts new file mode 100644 index 0000000000..c4fbc9024c --- /dev/null +++ b/packages/job-worker/src/ingest/jobWrappers.ts @@ -0,0 +1,83 @@ +import { IngestPropsBase } from '@sofie-automation/corelib/dist/worker/ingest' +import { JobContext } from '../jobs' +import { + IngestUpdateOperationFunction, + UpdateIngestRundownResult, + runCustomIngestUpdateOperation, + runIngestUpdateOperation, +} from './runOperation' +import { CommitIngestData } from './lock' +import { IngestModel } from './model/IngestModel' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' + +/** + * Wrap a mos specific ingest job to be an ingest update operation, with a provided function which runs a precheck and returns the final ingestRundown mutator + * @param fcn Function to generate the ingestRundown mutator + */ +export function wrapMosIngestJob( + fcn: (context: JobContext, data: TData) => IngestUpdateOperationFunction | null +): (context: JobContext, data: TData) => Promise { + return async (context, data) => { + const executeFcn = fcn(context, data) + if (!executeFcn) return + + await runIngestUpdateOperation(context, data, (ingestRundown) => { + if (ingestRundown && ingestRundown.type !== 'mos') { + throw new Error(`Rundown "${data.rundownExternalId}" is not a MOS rundown`) + } + + return executeFcn(ingestRundown) + }) + } +} + +/** + * Wrap an ingest job to be an ingest update operation, with a provided function which can mutate the ingestRundown + * @param fcn Function to mutate the ingestRundown + */ +export function wrapGenericIngestJob( + fcn: ( + context: JobContext, + data: TData, + oldIngestRundown: IngestRundownWithSource | undefined + ) => UpdateIngestRundownResult +): (context: JobContext, data: TData) => Promise { + return async (context, data) => { + await runIngestUpdateOperation(context, data, (ingestRundown) => fcn(context, data, ingestRundown)) + } +} + +/** + * Wrap an ingest job to be an ingest update operation, with a provided function which runs a precheck and returns the final ingestRundown mutator + * @param fcn Function to generate the ingestRundown mutator + */ +export function wrapGenericIngestJobWithPrecheck( + fcn: (context: JobContext, data: TData) => IngestUpdateOperationFunction | null +): (context: JobContext, data: TData) => Promise { + return async (context, data) => { + const executeFcn = fcn(context, data) + if (!executeFcn) return + + await runIngestUpdateOperation(context, data, (ingestRundown) => executeFcn(ingestRundown)) + } +} + +/** + * Wrap an ingest job to be an ingest update operation, with a provided function to run the job to modify the IngestModel + * @param fcn Function to mutate the IngestModel + */ +export function wrapCustomIngestJob( + fcn: ( + context: JobContext, + data: TData, + ingestModel: IngestModel, + ingestRundown: SofieIngestRundownWithSource + ) => Promise +): (context: JobContext, data: TData) => Promise { + return async (context, data) => { + await runCustomIngestUpdateOperation(context, data, async (_context, ingestModel, ingestRundown) => { + return fcn(context, data, ingestModel, ingestRundown) + }) + } +} diff --git a/packages/job-worker/src/ingest/lib.ts b/packages/job-worker/src/ingest/lib.ts index c059bf10e2..af031cce0f 100644 --- a/packages/job-worker/src/ingest/lib.ts +++ b/packages/job-worker/src/ingest/lib.ts @@ -4,7 +4,7 @@ import { getHash } from '@sofie-automation/corelib/dist/lib' import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { ReadonlyDeep } from 'type-fest' import { logger } from '../logging' -import { ExtendedIngestRundown, IngestRundown } from '@sofie-automation/blueprints-integration' +import { ExtendedIngestRundown, SofieIngestRundown } from '@sofie-automation/blueprints-integration' import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' import { convertRundownToBlueprints } from '../blueprints/context/lib' import { IngestSegmentModel } from './model/IngestSegmentModel' @@ -61,7 +61,7 @@ export function canSegmentBeUpdated( } export function extendIngestRundownCore( - ingestRundown: IngestRundown, + ingestRundown: SofieIngestRundown, existingDbRundown: ReadonlyDeep | undefined ): ExtendedIngestRundown { const extendedIngestRundown: ExtendedIngestRundown = { diff --git a/packages/job-worker/src/ingest/lock.ts b/packages/job-worker/src/ingest/lock.ts index 67999b33be..7645ff3abf 100644 --- a/packages/job-worker/src/ingest/lock.ts +++ b/packages/job-worker/src/ingest/lock.ts @@ -1,15 +1,7 @@ -import { SegmentId, PartId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { IngestModel, IngestModelReadonly } from './model/IngestModel' -import { BeforeIngestOperationPartMap, CommitIngestOperation } from './commit' -import { LocalIngestRundown, RundownIngestDataCache } from './ingestCache' -import { getRundownId } from './lib' +import { SegmentId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { JobContext } from '../jobs' -import { IngestPropsBase } from '@sofie-automation/corelib/dist/worker/ingest' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { RundownLock } from '../jobs/lock' -import { UserError } from '@sofie-automation/corelib/dist/error' -import { loadIngestModelFromRundownExternalId } from './model/implementation/LoadIngestModel' -import { clone } from '@sofie-automation/corelib/dist/lib' /** * The result of the initial stage of an Ingest operation @@ -34,106 +26,6 @@ export interface CommitIngestData { returnRemoveFailure?: boolean } -export enum UpdateIngestRundownAction { - REJECT = 'reject', - DELETE = 'delete', -} - -/** - * Perform an ingest update operation on a rundown - * This will automatically do some post-update data changes, to ensure the playout side (partinstances etc) is updated with the changes - * @param context Context of the job being run - * @param studioId Id of the studio the rundown belongs to - * @param rundownExternalId ExternalId of the rundown to lock - * @param updateCacheFcn Function to mutate the ingestData. Throw if the requested change is not valid. Return undefined to indicate the ingestData should be deleted - * @param calcFcn Function to run to update the Rundown. Return the blob of data about the change to help the post-update perform its duties. Return null to indicate that nothing changed - */ -export async function runIngestJob( - context: JobContext, - data: IngestPropsBase, - updateCacheFcn: ( - oldIngestRundown: LocalIngestRundown | undefined - ) => LocalIngestRundown | UpdateIngestRundownAction, - calcFcn: ( - context: JobContext, - ingestModel: IngestModel, - newIngestRundown: LocalIngestRundown | undefined, - oldIngestRundown: LocalIngestRundown | undefined - ) => Promise -): Promise { - if (!data.rundownExternalId) { - throw new Error(`Job is missing rundownExternalId`) - } - - const rundownId = getRundownId(context.studioId, data.rundownExternalId) - return runWithRundownLockInner(context, rundownId, async (rundownLock) => { - const span = context.startSpan(`ingestLockFunction.${context.studioId}`) - - // Load the old ingest data - const pIngestModel = loadIngestModelFromRundownExternalId(context, rundownLock, data.rundownExternalId) - const ingestObjCache = await RundownIngestDataCache.create(context, rundownId) - - // Recalculate the ingest data - const oldIngestRundown = ingestObjCache.fetchRundown() - const updatedIngestRundown = updateCacheFcn(clone(oldIngestRundown)) - let newIngestRundown: LocalIngestRundown | undefined - switch (updatedIngestRundown) { - case UpdateIngestRundownAction.REJECT: - // Reject change - return rundownId - case UpdateIngestRundownAction.DELETE: - ingestObjCache.delete() - newIngestRundown = undefined - break - default: - ingestObjCache.update(updatedIngestRundown) - newIngestRundown = updatedIngestRundown - break - } - // Start saving the ingest data - const pSaveIngestChanges = ingestObjCache.saveToDatabase() - - let resultingError: UserError | void | undefined - - try { - const ingestModel = await pIngestModel - - // Load any 'before' data for the commit - const beforeRundown = ingestModel.rundown - const beforePartMap = generatePartMap(ingestModel) - - const span = context.startSpan('ingest.calcFcn') - const commitData = await calcFcn(context, ingestModel, newIngestRundown, oldIngestRundown) - span?.end() - - if (commitData) { - const span = context.startSpan('ingest.commit') - // The change is accepted. Perform some playout calculations and save it all - resultingError = await CommitIngestOperation( - context, - ingestModel, - beforeRundown, - beforePartMap, - commitData - ) - span?.end() - } else { - // Should be no changes - ingestModel.assertNoChanges() - } - } finally { - // Ensure we save the ingest data - await pSaveIngestChanges - - span?.end() - } - - if (resultingError) throw resultingError - - return rundownId - }) -} - /** * Run a minimal rundown job. This is an alternative to `runIngestJob`, for operations to operate on a Rundown without the full Ingest flow * This automatically aquires the RundownLock, loads the Rundown and does a basic access check @@ -151,7 +43,7 @@ export async function runWithRundownLock( throw new Error(`Job is missing rundownId`) } - return runWithRundownLockInner(context, rundownId, async (lock) => { + return runWithRundownLockWithoutFetchingRundown(context, rundownId, async (lock) => { const rundown = await context.directCollections.Rundowns.findOne(rundownId) if (rundown && rundown.studioId !== context.studioId) { throw new Error(`Job rundown "${rundownId}" not found or for another studio`) @@ -164,7 +56,7 @@ export async function runWithRundownLock( /** * Lock the rundown for a quick task without the cache */ -async function runWithRundownLockInner( +export async function runWithRundownLockWithoutFetchingRundown( context: JobContext, rundownId: RundownId, fcn: (lock: RundownLock) => Promise @@ -178,17 +70,3 @@ async function runWithRundownLockInner( await rundownLock.release() } } - -function generatePartMap(ingestModel: IngestModelReadonly): BeforeIngestOperationPartMap { - const rundown = ingestModel.rundown - if (!rundown) return new Map() - - const res = new Map>() - for (const segment of ingestModel.getAllSegments()) { - res.set( - segment.segment._id, - segment.parts.map((p) => ({ id: p.part._id, rank: p.part._rank })) - ) - } - return res -} diff --git a/packages/job-worker/src/ingest/model/IngestModel.ts b/packages/job-worker/src/ingest/model/IngestModel.ts index 305a4197cd..f58ad02439 100644 --- a/packages/job-worker/src/ingest/model/IngestModel.ts +++ b/packages/job-worker/src/ingest/model/IngestModel.ts @@ -15,6 +15,7 @@ import { SegmentId, } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBRundown, RundownOrphanedReason, RundownSource } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { CoreUserEditingDefinition } from '@sofie-automation/corelib/dist/dataModel/UserEditingDefinitions' import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibPiece' import { LazyInitialiseReadonly } from '../../lib/lazy' @@ -109,7 +110,7 @@ export interface IngestModelReadonly { */ getSegment(id: SegmentId): IngestSegmentModelReadonly | undefined /** - * Get the Segments of this Rundown, in order + * Get the Segments of this Rundown, in no particular order */ getAllSegments(): IngestSegmentModelReadonly[] @@ -234,7 +235,8 @@ export interface IngestModel extends IngestModelReadonly, BaseModel { showStyleVariant: ReadonlyDeep, showStyleBlueprint: ReadonlyDeep, source: RundownSource, - rundownNotes: RundownNote[] + rundownNotes: RundownNote[], + userEdits: CoreUserEditingDefinition[] | undefined ): ReadonlyDeep /** diff --git a/packages/job-worker/src/ingest/model/IngestSegmentModel.ts b/packages/job-worker/src/ingest/model/IngestSegmentModel.ts index d708cb5228..74010848d0 100644 --- a/packages/job-worker/src/ingest/model/IngestSegmentModel.ts +++ b/packages/job-worker/src/ingest/model/IngestSegmentModel.ts @@ -67,7 +67,7 @@ export interface IngestSegmentModel extends IngestSegmentModelReadonly { setOrphaned(orphaned: SegmentOrphanedReason | undefined): void /** - * Mark this Part as being hidden + * Mark this Segment as being hidden * @param hidden New hidden state */ setHidden(hidden: boolean): void diff --git a/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts b/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts index 7cf7d4bfbf..377784191c 100644 --- a/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts +++ b/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts @@ -94,6 +94,10 @@ export class DocumentChangeTracker }> { } } + getDeletedIds(): TDoc['_id'][] { + return Array.from(this.#deletedIds.values()) + } + /** * Generate the mongodb BulkWrite operations for the documents known to this tracker * @returns mongodb BulkWrite operations diff --git a/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts b/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts index f441921f1a..747da75b21 100644 --- a/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts +++ b/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts @@ -20,6 +20,7 @@ import { import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { Piece, PieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { DBRundown, RundownOrphanedReason, RundownSource } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { CoreUserEditingDefinition } from '@sofie-automation/corelib/dist/dataModel/UserEditingDefinitions' import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibPiece' import { RundownBaselineObj } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineObj' @@ -291,7 +292,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { return undefined } /** - * Get the Segments of this Rundown, in order + * Get the Segments of this Rundown, in no particular order */ getAllSegments(): IngestSegmentModel[] { const segments: IngestSegmentModel[] = [] @@ -421,7 +422,8 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { showStyleVariant: ReadonlyDeep, showStyleBlueprint: ReadonlyDeep, source: RundownSource, - rundownNotes: RundownNote[] + rundownNotes: RundownNote[], + userEditOperations: CoreUserEditingDefinition[] | undefined ): ReadonlyDeep { const newRundown = literal>({ ...clone(rundownData as Complete), @@ -432,6 +434,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { studioId: this.context.studio._id, showStyleVariantId: showStyleVariant._id, showStyleBaseId: showStyleBase._id, + userEditOperations: clone(userEditOperations), orphaned: undefined, importVersions: { diff --git a/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts b/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts index c2995869cf..63dbd7fd7f 100644 --- a/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts +++ b/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts @@ -11,6 +11,8 @@ import { JobContext } from '../../../jobs' import { ExpectedPackagesStore } from './ExpectedPackagesStore' import { IngestSegmentModelImpl } from './IngestSegmentModelImpl' import { DocumentChangeTracker } from './DocumentChangeTracker' +import { logger } from '../../../logging' +import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' export class SaveIngestModelHelper { #expectedPackages = new DocumentChangeTracker() @@ -55,6 +57,23 @@ export class SaveIngestModelHelper { } commit(context: JobContext): Array> { + // Log deleted ids: + const deletedIds: { [key: string]: ProtectedString[] } = { + expectedPackages: this.#expectedPackages.getDeletedIds(), + expectedPlayoutItems: this.#expectedPlayoutItems.getDeletedIds(), + expectedMediaItems: this.#expectedMediaItems.getDeletedIds(), + segments: this.#segments.getDeletedIds(), + parts: this.#parts.getDeletedIds(), + pieces: this.#pieces.getDeletedIds(), + adLibPieces: this.#adLibPieces.getDeletedIds(), + adLibActions: this.#adLibActions.getDeletedIds(), + } + for (const [key, ids] of Object.entries[]>(deletedIds)) { + if (ids.length > 0) { + logger.debug(`Deleted ${key}: ${JSON.stringify(ids)} `) + } + } + return [ context.directCollections.ExpectedPackages.bulkWrite(this.#expectedPackages.generateWriteOps()), context.directCollections.ExpectedPlayoutItems.bulkWrite(this.#expectedPlayoutItems.generateWriteOps()), diff --git a/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap b/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap index d949eb684e..b234b72d5f 100644 --- a/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap +++ b/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap @@ -9,6 +9,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 1`] = ` "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -174,38 +175,34 @@ exports[`Test recieved mos ingest payloads mosRoCreate 2`] = ` exports[`Test recieved mos ingest payloads mosRoCreate 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -221,7 +218,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -231,7 +228,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -241,7 +238,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -251,7 +248,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -261,7 +258,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -271,7 +268,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -281,7 +278,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -291,7 +288,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -306,6 +303,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 1`] = ` "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -471,29 +469,26 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 2`] = ` exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 3`] = ` [ { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -509,7 +504,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -519,7 +514,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART3", }, { @@ -529,7 +524,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -539,7 +534,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -549,7 +544,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -559,7 +554,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -569,7 +564,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -579,7 +574,7 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -594,6 +589,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 1`] = ` "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -762,38 +758,34 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 2`] = ` exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -809,7 +801,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -819,7 +811,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -829,7 +821,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -839,7 +831,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -849,7 +841,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -859,7 +851,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -877,7 +869,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` }, }, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -887,7 +879,7 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -904,6 +896,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 1`] = ` "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -1072,38 +1065,34 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 2`] = ` exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -1119,7 +1108,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -1129,7 +1118,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -1139,7 +1128,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -1149,7 +1138,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -1159,7 +1148,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -1169,7 +1158,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -1179,7 +1168,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -1189,7 +1178,7 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -1204,6 +1193,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 1`] = ` "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -1370,38 +1360,34 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 2`] = ` exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -1417,7 +1403,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -1427,7 +1413,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -1437,7 +1423,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -1447,7 +1433,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -1457,7 +1443,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -1467,7 +1453,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -1477,7 +1463,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -1487,7 +1473,7 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -1502,6 +1488,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 1`] "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -1670,20 +1657,18 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 2`] exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 3`] = ` [ { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -1699,7 +1684,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 4`] "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART3", }, { @@ -1709,7 +1694,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 4`] "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -1719,7 +1704,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 4`] "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -1729,7 +1714,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 4`] "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -1739,7 +1724,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 4`] "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -1749,7 +1734,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 4`] "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -1766,6 +1751,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 1`] = "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -1934,38 +1920,34 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 2`] = exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -1981,7 +1963,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -1991,7 +1973,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -2000,7 +1982,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "externalId": "ro1;s1;newPart1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;new1", }, { @@ -2010,7 +1992,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -2020,7 +2002,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -2030,7 +2012,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -2040,7 +2022,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -2050,7 +2032,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -2060,7 +2042,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -2077,6 +2059,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 1`] = ` "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -2245,47 +2228,42 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 2`] = ` exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 4, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 4, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "9VE_IbHiHyW6VjY6Fi8fMJEgtS4_", + "_id": "Rjo_e_rlOh2eE8XOyVmXZCMgTNY_", "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1B_ro1;s1b;newPart1", - "externalModified": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1b;newPart1", "name": "SEGMENT1B", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -2301,7 +2279,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -2311,7 +2289,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -2320,7 +2298,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "externalId": "ro1;s1b;newPart1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "9VE_IbHiHyW6VjY6Fi8fMJEgtS4_", + "segmentId": "Rjo_e_rlOh2eE8XOyVmXZCMgTNY_", "title": "SEGMENT1B;new1", }, { @@ -2330,7 +2308,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -2340,7 +2318,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -2350,7 +2328,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -2360,7 +2338,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -2370,7 +2348,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -2380,7 +2358,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 4`] = ` "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -2397,6 +2375,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -2565,38 +2544,34 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to end 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -2612,7 +2587,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -2622,7 +2597,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -2632,7 +2607,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -2642,7 +2617,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -2652,7 +2627,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -2662,7 +2637,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -2672,7 +2647,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -2682,7 +2657,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -2699,6 +2674,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 1`] = "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -2867,38 +2843,34 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 2`] = exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -2914,7 +2886,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -2924,7 +2896,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -2934,7 +2906,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -2944,7 +2916,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -2954,7 +2926,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -2964,7 +2936,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -2974,7 +2946,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -2984,7 +2956,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -3001,6 +2973,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 1`] = "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -3169,38 +3142,34 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 2`] = exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -3216,7 +3185,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -3226,7 +3195,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -3235,7 +3204,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "externalId": "ro1;s1;newPart1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;new1", }, { @@ -3245,7 +3214,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -3255,7 +3224,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -3265,7 +3234,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -3275,7 +3244,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -3285,7 +3254,7 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 4`] = "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -3302,6 +3271,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -3470,29 +3440,26 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments 3`] = ` [ { - "_id": "sLfUx9cadyquE07Vw9byoX35G9I_", + "_id": "o6BHLNEWMc9FbHBRRWMOiwQ3IN0_", "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p2", - "externalModified": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p2", "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -3508,7 +3475,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "sLfUx9cadyquE07Vw9byoX35G9I_", + "segmentId": "o6BHLNEWMc9FbHBRRWMOiwQ3IN0_", "title": "SEGMENT3;PART2", }, { @@ -3518,7 +3485,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART3", }, { @@ -3528,7 +3495,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -3538,7 +3505,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -3548,7 +3515,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "sLfUx9cadyquE07Vw9byoX35G9I_", + "segmentId": "o6BHLNEWMc9FbHBRRWMOiwQ3IN0_", "title": "SEGMENT3;PART1", }, { @@ -3558,7 +3525,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -3568,7 +3535,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -3578,7 +3545,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -3595,6 +3562,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -3763,65 +3731,58 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 3`] = ` [ { - "_id": "oLlO42uuh1jzxrJrFmnAqDH5_Do_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p2", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "zz3BgLI_xxlvfTOTR55skUkKWHk_", + "_rank": 4, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p2", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "apDVfF5nk1_StK474hEUxLMZIag_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p3", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "yVemxI_brsRMvHAeFVtG2tahCgU_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p2", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 6, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 5, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "o0rZ5k7WadtZ2XSmf_c3txGILuw_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p3", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 5, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 6, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "6tE1R4qyvS2U8gUoAc23Y8R50UI_", - "_rank": 4, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p2", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -3837,7 +3798,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -3847,7 +3808,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -3857,7 +3818,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "oLlO42uuh1jzxrJrFmnAqDH5_Do_", + "segmentId": "yVemxI_brsRMvHAeFVtG2tahCgU_", "title": "SEGMENT2;PART2", }, { @@ -3867,7 +3828,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "apDVfF5nk1_StK474hEUxLMZIag_", + "segmentId": "o0rZ5k7WadtZ2XSmf_c3txGILuw_", "title": "SEGMENT1;PART3", }, { @@ -3877,7 +3838,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -3887,7 +3848,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -3897,7 +3858,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "6tE1R4qyvS2U8gUoAc23Y8R50UI_", + "segmentId": "zz3BgLI_xxlvfTOTR55skUkKWHk_", "title": "SEGMENT1;PART2", }, { @@ -3907,7 +3868,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -3924,6 +3885,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -4092,41 +4054,37 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same segment 3`] = ` [ { - "_id": "apDVfF5nk1_StK474hEUxLMZIag_", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", + "notes": [], + "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", + }, + { + "_id": "o0rZ5k7WadtZ2XSmf_c3txGILuw_", "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p3", - "externalModified": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p3", "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, - { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", - "notes": [], - "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - }, ] `; @@ -4139,7 +4097,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -4149,7 +4107,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -4159,7 +4117,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -4169,7 +4127,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "apDVfF5nk1_StK474hEUxLMZIag_", + "segmentId": "o0rZ5k7WadtZ2XSmf_c3txGILuw_", "title": "SEGMENT1;PART3", }, { @@ -4179,7 +4137,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -4189,7 +4147,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "apDVfF5nk1_StK474hEUxLMZIag_", + "segmentId": "o0rZ5k7WadtZ2XSmf_c3txGILuw_", "title": "SEGMENT1;PART1", }, { @@ -4199,7 +4157,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "apDVfF5nk1_StK474hEUxLMZIag_", + "segmentId": "o0rZ5k7WadtZ2XSmf_c3txGILuw_", "title": "SEGMENT1;PART2", }, { @@ -4209,7 +4167,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] @@ -4226,6 +4184,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 1 "modified": 0, "name": "All effect1 into clip combinations", "nextPartInfo": null, + "notes": [], "organizationId": null, "previousPartInfo": null, "rundownIdsInOrder": [ @@ -4394,38 +4353,34 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 2 exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 3`] = ` [ { - "_id": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", - "_rank": 3, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s4;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "rSEZMzZhJ55454sqsU_7TOq_DIk_", + "_rank": 2, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s3;p1", + "name": "SEGMENT3", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", - "_rank": 1, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT2_ro1;s2;p1", - "externalModified": 0, - "name": "SEGMENT2", + "_id": "baQfD5zawLDmJTRumGpHDH2MwaM_", + "_rank": 0, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s1;p1", + "name": "SEGMENT1", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", - "_rank": 0, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT1_ro1;s1;p1", - "externalModified": 0, - "name": "SEGMENT1", + "_id": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", + "_rank": 3, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s4;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, { - "_id": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", - "_rank": 2, - "externalId": "5meLdE_m5k28xXw1vtX2JX8mSYQ__SEGMENT3_ro1;s3;p1", - "externalModified": 0, - "name": "SEGMENT3", + "_id": "6cEU5uY8M93lfQssMy9XaGxT23E_", + "_rank": 1, + "externalId": "SLENPS01;P_NDSL\\W;68E40DE6-2D08-487D-aaaaa_ro1;s2;p1", + "name": "SEGMENT2", "notes": [], "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", }, @@ -4441,7 +4396,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART2", }, { @@ -4451,7 +4406,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "XF9ZBDI5IouvkmTbounEfoJ6ijY_", + "segmentId": "YXMZjMqslZFcM3K4sGelyBYJ_rA_", "title": "SEGMENT2;PART3", }, { @@ -4461,7 +4416,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART2", }, { @@ -4471,7 +4426,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART3", }, { @@ -4481,7 +4436,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "8GUNgE7zUulco2K3yuhJ1Fyceeo_", + "segmentId": "rSEZMzZhJ55454sqsU_7TOq_DIk_", "title": "SEGMENT3;PART1", }, { @@ -4491,7 +4446,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART1", }, { @@ -4501,7 +4456,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "MCxHIjO7_t3PRHpLiX0vbzwx4gg_", + "segmentId": "baQfD5zawLDmJTRumGpHDH2MwaM_", "title": "SEGMENT1;PART2", }, { @@ -4511,7 +4466,7 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 4 "notes": [], "privateData": {}, "rundownId": "5meLdE_m5k28xXw1vtX2JX8mSYQ_", - "segmentId": "Qz1OqWVatX_W4Sp5C0m8VhTTfME_", + "segmentId": "6cEU5uY8M93lfQssMy9XaGxT23E_", "title": "SEGMENT2;PART1", }, ] diff --git a/packages/job-worker/src/ingest/mosDevice/__tests__/diffSegmentEntries.test.ts b/packages/job-worker/src/ingest/mosDevice/__tests__/diffSegmentEntries.test.ts deleted file mode 100644 index a53f8f43b8..0000000000 --- a/packages/job-worker/src/ingest/mosDevice/__tests__/diffSegmentEntries.test.ts +++ /dev/null @@ -1,555 +0,0 @@ -import * as _ from 'underscore' - -import { diffSegmentEntries, compileSegmentEntries } from '../diff' -import { IngestSegment } from '@sofie-automation/blueprints-integration' -import { LocalIngestSegment } from '../../ingestCache' - -function clone(o: T): T { - return JSON.parse(JSON.stringify(o)) -} -function recalculateRank(ingestSegments: IngestSegment[]) { - ingestSegments.sort((a, b) => { - if (a.rank < b.rank) return -1 - if (a.rank > b.rank) return 1 - return 0 - }) - _.each(ingestSegments, (ingestSegment, i) => { - ingestSegment.rank = i - }) -} -describe('Ingest: MOS', () => { - describe('diffSegmentEntries', () => { - const ingestSegments: LocalIngestSegment[] = [ - { - rank: 0, - externalId: 'first', - name: 'Overblik', - parts: [ - { - name: 'AA3D07094F51297F', - rank: 0, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;C355011E-B1E2-425E-AA3D07094F51297F', - modified: Date.now(), - }, - { - name: 'AC9369C6A140CEBB', - rank: 1, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;484B952B-DE0F-40A0-AC9369C6A140CEBB', - modified: Date.now(), - }, - { - name: '8DAE5BF534A0EAD8', - rank: 2, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;F285B8FA-BD5F-492B-8DAE5BF534A0EAD8', - modified: Date.now(), - }, - { - name: 'B7D35BBDBFD9A4D2', - rank: 3, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;AE66B8D8-4595-4CA5-B7D35BBDBFD9A4D2', - modified: Date.now(), - }, - { - name: '8A872A00510269E', - rank: 4, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;5C4EE8B8-5459-4A94-8A872A00510269E8', - modified: Date.now(), - }, - ], - modified: Date.now(), - }, - { - rank: 1, - externalId: 'second', - name: 'Møller og DBU', - parts: [ - { - name: 'BB605A012DFAF93E', - rank: 0, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;0D1D077D-9720-4560-BB605A012DFAF93E', - modified: Date.now(), - }, - { - name: 'B21E0F016576BC73', - rank: 1, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;D7CCC920-28E9-41AC-B21E0F016576BC73', - modified: Date.now(), - }, - { - name: '8E100AB374A15DEA', - rank: 2, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;D87D86D3-FD0A-42AC-8E100AB374A15DEA', - modified: Date.now(), - }, - { - name: '86360F634827C56A', - rank: 3, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;950A200E-31CA-4DEA-86360F634827C56A', - modified: Date.now(), - }, - ], - modified: Date.now(), - }, - { - rank: 2, - externalId: 'third', - name: 'Webhenvisning TV 2 Sporten', - parts: [ - { - name: 'A0C24CCA21FE9969', - rank: 0, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;56CB0445-5782-4F92-A0C24CCA21FE9969', - modified: Date.now(), - }, - ], - modified: Date.now(), - }, - { - rank: 3, - externalId: 'fourth', - name: 'Møller og DBU', - parts: [ - { - name: 'B41C095014F35C2E', - rank: 0, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;3487F683-5939-4A37-B41C095014F35C2E', - modified: Date.now(), - }, - { - name: 'B9D0B70BA3F30F69', - rank: 1, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;A0BF2B88-EE9E-40B7-B9D0B70BA3F30F69', - modified: Date.now(), - }, - { - name: '87B4F8206386BBDD', - rank: 2, - externalId: - '2012R2ENPS8VM;P_ENPSMOS\\W\\F_HOLD ROs\\R_B10067B2-434B-4CF3-AFB1A02EEF8760CB;D05B62C7-19F8-4CD7-87B4F8206386BBDD', - modified: Date.now(), - }, - ], - modified: Date.now(), - }, - ] - - test('No changes', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - const diff = diffSegmentEntries(segmentEntries, segmentEntries, null) - expect(_.keys(diff.added)).toHaveLength(0) - expect(_.keys(diff.changed)).toHaveLength(0) - expect(_.keys(diff.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff.removed)).toHaveLength(0) - expect(_.keys(diff.unchanged)).toEqual(['first', 'second', 'third', 'fourth']) - }) - test('Remove middle of segment', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2[0].parts.splice(1, 1) - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.changed)).toEqual(['first']) - expect(_.keys(diff.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff.removed)).toHaveLength(0) - expect(_.keys(diff.unchanged)).toEqual(['second', 'third', 'fourth']) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3[1].parts.splice(1, 1) - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.changed)).toEqual(['second']) - expect(_.keys(diff2.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff2.removed)).toHaveLength(0) - expect(_.keys(diff2.unchanged)).toEqual(['first', 'third', 'fourth']) - - // Last segment - const ingestSegments4 = clone(ingestSegments) - ingestSegments4[3].parts.splice(1, 1) - const segmentEntries4 = compileSegmentEntries(ingestSegments4) - - const diff3 = diffSegmentEntries(segmentEntries, segmentEntries4, null) - expect(_.keys(diff3.changed)).toEqual(['fourth']) - expect(_.keys(diff3.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff3.removed)).toHaveLength(0) - expect(_.keys(diff3.unchanged)).toEqual(['first', 'second', 'third']) - }) - - test('Remove start of segment', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2[0].parts.splice(0, 1) - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.changed)).toEqual(['first']) - expect(_.keys(diff.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff.removed)).toHaveLength(0) - expect(_.keys(diff.unchanged)).toEqual(['second', 'third', 'fourth']) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3[1].parts.splice(0, 1) - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.changed)).toEqual(['second']) - expect(_.keys(diff2.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff2.removed)).toHaveLength(0) - expect(_.keys(diff2.unchanged)).toEqual(['first', 'third', 'fourth']) - - // Last segment - const ingestSegments4 = clone(ingestSegments) - ingestSegments4[3].parts.splice(0, 1) - const segmentEntries4 = compileSegmentEntries(ingestSegments4) - - const diff3 = diffSegmentEntries(segmentEntries, segmentEntries4, null) - expect(_.keys(diff3.changed)).toEqual(['fourth']) - expect(_.keys(diff3.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff3.removed)).toHaveLength(0) - expect(_.keys(diff3.unchanged)).toEqual(['first', 'second', 'third']) - }) - - test('Remove end of segment', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2[0].parts.splice(4, 1) - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.changed)).toEqual(['first']) - expect(_.keys(diff.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff.removed)).toHaveLength(0) - expect(_.keys(diff.unchanged)).toEqual(['second', 'third', 'fourth']) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3[1].parts.splice(3, 1) - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.changed)).toEqual(['second']) - expect(_.keys(diff2.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff2.removed)).toHaveLength(0) - expect(_.keys(diff2.unchanged)).toEqual(['first', 'third', 'fourth']) - - // Last segment - const ingestSegments4 = clone(ingestSegments) - ingestSegments4[3].parts.splice(2, 1) - const segmentEntries4 = compileSegmentEntries(ingestSegments4) - - const diff3 = diffSegmentEntries(segmentEntries, segmentEntries4, null) - expect(_.keys(diff3.changed)).toEqual(['fourth']) - expect(_.keys(diff3.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff3.removed)).toHaveLength(0) - expect(_.keys(diff3.unchanged)).toEqual(['first', 'second', 'third']) - }) - - test('Remove whole segment', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2.splice(0, 1) - recalculateRank(ingestSegments2) - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.changed)).toHaveLength(3) - expect(diff.onlyRankChanged).toEqual({ - second: 0, - third: 1, - fourth: 2, - }) - expect(_.keys(diff.removed)).toEqual(['first']) - expect(_.keys(diff.unchanged)).toHaveLength(0) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3.splice(1, 1) - recalculateRank(ingestSegments3) - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.changed)).toHaveLength(2) - expect(diff2.onlyRankChanged).toEqual({ - third: 1, - fourth: 2, - // { oldRank: 2, newRank: 1 }, - // { oldRank: 3, newRank: 2 } - }) - expect(_.keys(diff2.removed)).toEqual(['second']) - expect(_.keys(diff2.unchanged)).toEqual(['first']) - - // Last segment - const ingestSegments4 = clone(ingestSegments) - ingestSegments4.splice(3, 1) - recalculateRank(ingestSegments4) - const segmentEntries4 = compileSegmentEntries(ingestSegments4) - - const diff3 = diffSegmentEntries(segmentEntries, segmentEntries4, null) - // expect(_.keys(diff3.changed)).toHaveLength(3) - expect(_.keys(diff3.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff3.removed)).toEqual(['fourth']) - expect(_.keys(diff3.unchanged)).toEqual(['first', 'second', 'third']) - }) - test('Remove to combine segments', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - - const ingestSegments2 = clone(ingestSegments) - ingestSegments2[1].parts = ingestSegments2[1].parts.concat(ingestSegments2[3].parts) - - ingestSegments2.splice(2, 2) - // ingestSegments2.splice(3, 1) - recalculateRank(ingestSegments2) - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.changed)).toEqual(['second']) - expect(_.keys(diff.onlyRankChanged)).toEqual([]) - expect(_.keys(diff.removed)).toEqual(['third', 'fourth']) - expect(_.keys(diff.unchanged)).toEqual(['first']) - }) - - test('Rename/replace segment', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2[0].externalId = 'NEW' - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.added)).toEqual(['NEW']) - expect(_.keys(diff.changed)).toEqual([]) - expect(_.keys(diff.removed)).toEqual(['first']) - expect(_.keys(diff.unchanged)).toEqual(['second', 'third', 'fourth']) - expect(_.keys(diff.onlyRankChanged)).toHaveLength(0) - expect(diff.externalIdChanged).toEqual({ - first: 'NEW', - }) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3[1].externalId = 'NEW' - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.added)).toEqual(['NEW']) - expect(_.keys(diff2.changed)).toEqual([]) - expect(_.keys(diff2.removed)).toEqual(['second']) - expect(_.keys(diff2.unchanged)).toEqual(['first', 'third', 'fourth']) - expect(_.keys(diff2.onlyRankChanged)).toHaveLength(0) - expect(diff2.externalIdChanged).toEqual({ - second: 'NEW', - }) - - // Last segment - const ingestSegments4 = clone(ingestSegments) - ingestSegments4[3].externalId = 'NEW' - const segmentEntries4 = compileSegmentEntries(ingestSegments4) - - const diff3 = diffSegmentEntries(segmentEntries, segmentEntries4, null) - expect(_.keys(diff3.added)).toEqual(['NEW']) - expect(_.keys(diff3.changed)).toEqual([]) - expect(_.keys(diff3.removed)).toEqual(['fourth']) - expect(_.keys(diff3.unchanged)).toEqual(['first', 'second', 'third']) - expect(_.keys(diff3.onlyRankChanged)).toHaveLength(0) - expect(diff3.externalIdChanged).toEqual({ - fourth: 'NEW', - }) - }) - - test('Insert into segment', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2[0].parts.splice(2, 0, { name: 'abc', rank: 2.5, externalId: 'abc', modified: Date.now() }) - // segmentEntries2['first'].parts.splice(2, 0, 'abc') - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.changed)).toEqual(['first']) - expect(_.keys(diff.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff.removed)).toHaveLength(0) - expect(_.keys(diff.unchanged)).toEqual(['second', 'third', 'fourth']) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3[1].parts.splice(2, 0, { name: 'abc', rank: 2.5, externalId: 'abc', modified: Date.now() }) - // segmentEntries3['second'].parts.splice(2, 0, 'abc') - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.changed)).toEqual(['second']) - expect(_.keys(diff2.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff2.removed)).toHaveLength(0) - expect(_.keys(diff2.unchanged)).toEqual(['first', 'third', 'fourth']) - - // Last segment - const ingestSegments4 = clone(ingestSegments) - ingestSegments4[3].parts.splice(2, 0, { name: 'abc', rank: 2.5, externalId: 'abc', modified: Date.now() }) - // segmentEntries4['fourth'].parts.splice(2, 0, 'abc') - const segmentEntries4 = compileSegmentEntries(ingestSegments4) - - const diff3 = diffSegmentEntries(segmentEntries, segmentEntries4, null) - expect(_.keys(diff3.changed)).toEqual(['fourth']) - expect(_.keys(diff3.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff3.removed)).toHaveLength(0) - expect(_.keys(diff3.unchanged)).toEqual(['first', 'second', 'third']) - }) - - test('Insert new segment', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2.splice(0, 0, { - rank: -1, - externalId: 'new', - name: 'New Name', - parts: [{ name: 'abc', rank: 0, externalId: 'abc', modified: Date.now() }], - modified: Date.now(), - }) - recalculateRank(ingestSegments2) - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.changed)).toEqual(['first', 'second', 'third', 'fourth']) - expect(diff.onlyRankChanged).toEqual({ - first: 1, - second: 2, - third: 3, - fourth: 4, - }) - expect(_.keys(diff.added)).toEqual(['new']) - expect(_.keys(diff.removed)).toHaveLength(0) - expect(_.keys(diff.unchanged)).toHaveLength(0) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3.splice(1, 0, { - rank: 0.5, - externalId: 'new', - name: 'New Name', - parts: [{ name: 'abc', rank: 0, externalId: 'abc', modified: Date.now() }], - modified: Date.now(), - }) - recalculateRank(ingestSegments3) - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.changed)).toEqual(['second', 'third', 'fourth']) - expect(diff2.onlyRankChanged).toEqual({ - second: 2, - third: 3, - fourth: 4, - }) - expect(_.keys(diff.added)).toEqual(['new']) - expect(_.keys(diff2.removed)).toHaveLength(0) - expect(_.keys(diff2.unchanged)).toEqual(['first']) - - // Last segment - const ingestSegments4 = clone(ingestSegments) - ingestSegments4.splice(-1, 0, { - rank: 99, - externalId: 'new', - name: 'New Name', - parts: [{ name: 'abc', rank: 0, externalId: 'abc', modified: Date.now() }], - modified: Date.now(), - }) - recalculateRank(ingestSegments4) - const segmentEntries4 = compileSegmentEntries(ingestSegments4) - - const diff3 = diffSegmentEntries(segmentEntries, segmentEntries4, null) - expect(_.keys(diff3.onlyRankChanged)).toHaveLength(0) - expect(_.keys(diff3.removed)).toHaveLength(0) - expect(_.keys(diff.added)).toEqual(['new']) - expect(_.keys(diff3.changed)).toEqual([]) - expect(_.keys(diff3.unchanged)).toEqual(['first', 'second', 'third', 'fourth']) - }) - test('Insert new segment, split existing', async () => { - const segmentEntries = compileSegmentEntries(ingestSegments) - // // First segment - const ingestSegments2 = clone(ingestSegments) - ingestSegments2.splice( - 1, - 0, - { - rank: 0.5, - externalId: 'new', - name: 'New Name', - parts: [{ name: 'abc', rank: 0, externalId: 'abc', modified: Date.now() }], - modified: Date.now(), - }, - { - rank: 0.7, - externalId: 'new2', - name: 'New Name2', - parts: [{ name: 'abc2', rank: 0, externalId: 'abc2', modified: Date.now() }], - modified: Date.now(), - } - ) - recalculateRank(ingestSegments2) - const segmentEntries2 = compileSegmentEntries(ingestSegments2) - - const diff = diffSegmentEntries(segmentEntries, segmentEntries2, null) - expect(_.keys(diff.added)).toEqual(['new', 'new2']) - expect(_.keys(diff.changed)).toEqual(['second', 'third', 'fourth']) - expect(diff.onlyRankChanged).toEqual({ - second: 3, - third: 4, - fourth: 5, - }) - expect(_.keys(diff.removed)).toHaveLength(0) - expect(_.keys(diff.unchanged)).toEqual(['first']) - - // Middle segment - const ingestSegments3 = clone(ingestSegments) - ingestSegments3.splice( - 2, - 0, - { - rank: 1.5, - externalId: 'new', - name: 'New Name', - parts: [{ name: 'abc', rank: 0, externalId: 'abc', modified: Date.now() }], - modified: Date.now(), - }, - { - rank: 1.7, - externalId: 'new2', - name: 'New Name2', - parts: [{ name: 'abc2', rank: 0, externalId: 'abc2', modified: Date.now() }], - modified: Date.now(), - } - ) - recalculateRank(ingestSegments3) - const segmentEntries3 = compileSegmentEntries(ingestSegments3) - - const diff2 = diffSegmentEntries(segmentEntries, segmentEntries3, null) - expect(_.keys(diff2.added)).toEqual(['new', 'new2']) - expect(_.keys(diff2.changed)).toEqual(['third', 'fourth']) - expect(diff2.onlyRankChanged).toEqual({ - third: 4, - fourth: 5, - }) - expect(_.keys(diff2.removed)).toHaveLength(0) - expect(_.keys(diff2.unchanged)).toEqual(['first', 'second']) - }) - }) -}) diff --git a/packages/job-worker/src/ingest/mosDevice/__tests__/mock-mos-data.ts b/packages/job-worker/src/ingest/mosDevice/__tests__/mock-mos-data.ts index 3dadee7153..eb560b3a8a 100644 --- a/packages/job-worker/src/ingest/mosDevice/__tests__/mock-mos-data.ts +++ b/packages/job-worker/src/ingest/mosDevice/__tests__/mock-mos-data.ts @@ -85,19 +85,23 @@ export const mockRO = { segmentIdMap: () => [ { parts: ['ro1;s1;p1', 'ro1;s1;p2', 'ro1;s1;p3'], - segmentId: 'MCxHIjO7_t3PRHpLiX0vbzwx4gg_', + segmentName: 'SEGMENT1', + segmentId: 'baQfD5zawLDmJTRumGpHDH2MwaM_', }, { parts: ['ro1;s2;p1', 'ro1;s2;p2'], - segmentId: 'Qz1OqWVatX_W4Sp5C0m8VhTTfME_', + segmentName: 'SEGMENT2', + segmentId: '6cEU5uY8M93lfQssMy9XaGxT23E_', }, { parts: ['ro1;s3;p1', 'ro1;s3;p2'], - segmentId: '8GUNgE7zUulco2K3yuhJ1Fyceeo_', + segmentName: 'SEGMENT3', + segmentId: 'rSEZMzZhJ55454sqsU_7TOq_DIk_', }, { parts: ['ro1;s4;p1'], - segmentId: 'XF9ZBDI5IouvkmTbounEfoJ6ijY_', + segmentName: 'SEGMENT2', + segmentId: 'YXMZjMqslZFcM3K4sGelyBYJ_rA_', }, ], newItem: (id: string, slug: string): MOS.IMOSROStory => diff --git a/packages/job-worker/src/ingest/mosDevice/__tests__/mosIngest.test.ts b/packages/job-worker/src/ingest/mosDevice/__tests__/mosIngest.test.ts index 6e495e96b6..f251bf1586 100644 --- a/packages/job-worker/src/ingest/mosDevice/__tests__/mosIngest.test.ts +++ b/packages/job-worker/src/ingest/mosDevice/__tests__/mosIngest.test.ts @@ -3,8 +3,7 @@ import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' -import _ = require('underscore') +import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { sortPartsInSortedSegments, sortSegmentsInRundowns } from '@sofie-automation/corelib/dist/playout/playlist' import { handleMosDeleteStory, @@ -14,7 +13,7 @@ import { handleMosSwapStories, } from '../mosStoryJobs' import { handleMosRundownData, handleMosRundownReadyToAir, handleMosRundownStatus } from '../mosRundownJobs' -import { parseMosString } from '../lib' +import { getMosIngestSegmentExternalId, parseMosString } from '../lib' import { MockJobContext, setupDefaultJobEnvironment } from '../../../__mocks__/context' import { setupMockIngestDevice, setupMockShowStyleCompound } from '../../../__mocks__/presetCollections' import { fixSnapshot } from '../../../__mocks__/helpers/snapshot' @@ -23,18 +22,19 @@ import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/Rund import { MongoQuery } from '../../../db' import { handleRemovedRundown } from '../../ingestRundownJobs' import { MOS } from '@sofie-automation/corelib' -import { literal } from '@sofie-automation/corelib/dist/lib' -import { IngestCacheType } from '@sofie-automation/corelib/dist/dataModel/IngestDataCache' +import { groupByToMap, literal, normalizeArrayToMap, omit } from '@sofie-automation/corelib/dist/lib' +import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' import { getPartId } from '../../lib' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { handleSetNextPart } from '../../../playout/setNextJobs' import { handleTakeNextPart } from '../../../playout/take' import { handleActivateRundownPlaylist, handleDeactivateRundownPlaylist } from '../../../playout/activePlaylistJobs' import { removeRundownPlaylistFromDb } from '../../__tests__/lib' +import { UserErrorMessage } from '@sofie-automation/corelib/dist/error' +import { wrapCustomIngestJob, wrapGenericIngestJob, wrapMosIngestJob } from '../../jobWrappers' jest.mock('../../updateNext') import { ensureNextPartIsValid } from '../../updateNext' -import { UserErrorMessage } from '@sofie-automation/corelib/dist/error' type TensureNextPartIsValid = jest.MockedFunction const ensureNextPartIsValidMock = ensureNextPartIsValid as TensureNextPartIsValid @@ -43,18 +43,33 @@ const mosTypes = MOS.getMosTypes(true) function getPartIdMap(segments: DBSegment[], parts: DBPart[]) { const sortedParts = sortPartsInSortedSegments(parts, segments) - const groupedParts = _.groupBy(sortedParts, (p) => unprotectString(p.segmentId)) - const arr: [string, DBPart[]][] = _.pairs(groupedParts) - const idMap = _.map(arr, (g) => ({ - segmentId: protectString(g[0]), - parts: _.map(g[1], (p) => p.externalId), + const segmentMap = normalizeArrayToMap(segments, '_id') + + const groupedParts = groupByToMap(sortedParts, 'segmentId') + const arr: [SegmentId, DBPart[]][] = Array.from(groupedParts.entries()) + const idMap = arr.map((group) => ({ + segmentId: group[0], + segmentName: segmentMap.get(group[0])?.name ?? null, + parts: group[1].map((p) => p.externalId), })) - return _.sortBy(idMap, (s) => { - const obj = _.find(segments, (s2) => s2._id === s.segmentId) - return obj ? obj._rank : 99999 + + return idMap.sort((a, b) => { + const aRank = segmentMap.get(a.segmentId)?._rank ?? 99999 + const bRank = segmentMap.get(b.segmentId)?._rank ?? 99999 + + return aRank - bRank }) } +const handleMosDeleteStoryWrapped = wrapMosIngestJob(handleMosDeleteStory) +const handleMosFullStoryWrapped = wrapMosIngestJob(handleMosFullStory) +const handleMosInsertStoriesWrapped = wrapMosIngestJob(handleMosInsertStories) +const handleMosMoveStoriesWrapped = wrapMosIngestJob(handleMosMoveStories) +const handleMosSwapStoriesWrapped = wrapMosIngestJob(handleMosSwapStories) +const handleMosRundownDataWrapped = wrapMosIngestJob(handleMosRundownData) +const handleRemovedRundownWrapped = wrapGenericIngestJob(handleRemovedRundown) +const handleMosRundownReadyToAirWrapped = wrapCustomIngestJob(handleMosRundownReadyToAir) + function createRundownSource(peripheralDevice: PeripheralDevice): RundownSource { return { type: 'nrcs', @@ -87,7 +102,7 @@ describe('Test recieved mos ingest payloads', () => { // Reset RO const roData = mockRO.roCreate() - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: parseMosString(roData.ID), mosRunningOrder: roData, isUpdateOperation: false, @@ -150,7 +165,7 @@ describe('Test recieved mos ingest payloads', () => { await expect(context.mockCollections.Rundowns.findOne()).resolves.toBeFalsy() const roData = mockRO.roCreate() - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: parseMosString(roData.ID), mosRunningOrder: roData, isUpdateOperation: false, @@ -184,7 +199,7 @@ describe('Test recieved mos ingest payloads', () => { await context.mockCollections.Rundowns.findOne({ externalId: mosTypes.mosString128.stringify(roData.ID) }) ).toBeTruthy() - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: parseMosString(roData.ID), mosRunningOrder: roData, isUpdateOperation: false, @@ -221,7 +236,7 @@ describe('Test recieved mos ingest payloads', () => { await context.mockCollections.Rundowns.findOne({ externalId: mosTypes.mosString128.stringify(roData.ID) }) ).toBeTruthy() - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: parseMosString(roData.ID), mosRunningOrder: roData, isUpdateOperation: false, @@ -255,7 +270,7 @@ describe('Test recieved mos ingest payloads', () => { expect(await context.mockCollections.RundownPlaylists.findOne(rundown.playlistId)).toBeTruthy() await expect( - handleRemovedRundown(context, { + handleRemovedRundownWrapped(context, { rundownExternalId: parseMosString(roData.ID), }) ).rejects.toMatchUserError(UserErrorMessage.RundownRemoveWhileActive) @@ -277,7 +292,7 @@ describe('Test recieved mos ingest payloads', () => { expect(rundown.orphaned).toBeFalsy() expect(await context.mockCollections.RundownPlaylists.findOne(rundown.playlistId)).toBeTruthy() - await handleRemovedRundown(context, { + await handleRemovedRundownWrapped(context, { rundownExternalId: parseMosString(roData.ID), }) @@ -292,7 +307,7 @@ describe('Test recieved mos ingest payloads', () => { expect(await context.mockCollections.RundownPlaylists.findOne()).toBeFalsy() await expect( - handleRemovedRundown(context, { + handleRemovedRundownWrapped(context, { rundownExternalId: parseMosString(roData.ID), }) ).rejects.toThrow(/Rundown.*not found/i) @@ -363,7 +378,7 @@ describe('Test recieved mos ingest payloads', () => { expect(rundown.status).not.toEqual(newStatus.toString()) expect((rundown.privateData as any)?.airStatus).not.toEqual(newStatus.toString()) - await handleMosRundownReadyToAir(context, { + await handleMosRundownReadyToAirWrapped(context, { rundownExternalId: rundown.externalId, status: newStatus, }) @@ -386,7 +401,7 @@ describe('Test recieved mos ingest payloads', () => { expect(rundown).toBeTruthy() expect(rundown.status).not.toEqual(newStatus.toString()) - await handleMosRundownReadyToAir(context, { + await handleMosRundownReadyToAirWrapped(context, { rundownExternalId: rundown.externalId, status: newStatus, }) @@ -405,7 +420,7 @@ describe('Test recieved mos ingest payloads', () => { expect(await context.mockCollections.Rundowns.findOne({ externalId: externalId })).toBeFalsy() await expect( - handleMosRundownReadyToAir(context, { + handleMosRundownReadyToAirWrapped(context, { rundownExternalId: externalId, status: newStatus, }) @@ -421,7 +436,7 @@ describe('Test recieved mos ingest payloads', () => { const newPartData = mockRO.newItem('ro1;s1;newPart1', 'SEGMENT1;new1') - await handleMosInsertStories(context, { + await handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s1;p3'), newStories: [newPartData], @@ -441,9 +456,9 @@ describe('Test recieved mos ingest payloads', () => { // Clean up after ourselves: const partsToRemove = await context.mockCollections.Parts.findFetch({ externalId: 'ro1;s1;newPart1' }) await context.mockCollections.Parts.remove({ _id: { $in: partsToRemove.map((p) => p._id) } }) - await context.mockCollections.IngestDataCache.remove({ + await context.mockCollections.NrcsIngestDataCache.remove({ rundownId: rundown._id, - type: IngestCacheType.PART, + type: NrcsIngestCacheType.PART, partId: { $in: partsToRemove.map((p) => p._id) }, }) }) @@ -459,7 +474,7 @@ describe('Test recieved mos ingest payloads', () => { const newPartData = mockRO.newItem('ro1;s1;newPart2', 'SEGMENT1;new2') - await handleMosInsertStories(context, { + await handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s1;p3'), newStories: [newPartData], @@ -485,7 +500,7 @@ describe('Test recieved mos ingest payloads', () => { const newPartData = mockRO.newItem('ro1;s1b;newPart1', 'SEGMENT1B;new1') - await handleMosInsertStories(context, { + await handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s2;p1'), newStories: [newPartData], @@ -498,12 +513,13 @@ describe('Test recieved mos ingest payloads', () => { const partMap = mockRO.segmentIdMap() partMap.splice(1, 0, { - segmentId: '9VE_IbHiHyW6VjY6Fi8fMJEgtS4_', + segmentId: 'Rjo_e_rlOh2eE8XOyVmXZCMgTNY_', + segmentName: 'SEGMENT1B', parts: [mosTypes.mosString128.stringify(newPartData.ID)], }) - partMap[2].segmentId = 'Qz1OqWVatX_W4Sp5C0m8VhTTfME_' - partMap[3].segmentId = '8GUNgE7zUulco2K3yuhJ1Fyceeo_' - partMap[4].segmentId = 'XF9ZBDI5IouvkmTbounEfoJ6ijY_' + partMap[2].segmentId = '6cEU5uY8M93lfQssMy9XaGxT23E_' + partMap[3].segmentId = 'rSEZMzZhJ55454sqsU_7TOq_DIk_' + partMap[4].segmentId = 'YXMZjMqslZFcM3K4sGelyBYJ_rA_' expect(getPartIdMap(segments, parts)).toEqual(partMap) await expectRundownToMatchSnapshot(rundown._id, true, true) @@ -520,7 +536,7 @@ describe('Test recieved mos ingest payloads', () => { const beforeStoryId = mosTypes.mosString128.create('newFakePart') await expect( - handleMosInsertStories(context, { + handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: beforeStoryId, newStories: [newPartData], @@ -546,14 +562,16 @@ describe('Test recieved mos ingest payloads', () => { const newPartData = mockRO.roCreate().Stories[0] await expect( - handleMosInsertStories(context, { + handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s2;p1'), newStories: [newPartData], replace: false, }) ).rejects.toThrow( - `Parts ${mosTypes.mosString128.stringify(newPartData.ID)} already exist in rundown ${rundown.externalId}` + `Parts ${getMosIngestSegmentExternalId( + mosTypes.mosString128.stringify(newPartData.ID) + )} already exist in rundown ${rundown.externalId}` ) }) @@ -595,7 +613,7 @@ describe('Test recieved mos ingest payloads', () => { const newPartData = mockRO.newItem('ro1;s1;newPart1', 'SEGMENT1;new1') - await handleMosInsertStories(context, { + await handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s1;p2'), newStories: [newPartData], @@ -624,7 +642,7 @@ describe('Test recieved mos ingest payloads', () => { const newPartData = mockRO.newItem('ro1;s1;newPart2', 'SEGMENT1;new2') - await handleMosInsertStories(context, { + await handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s1;p3'), newStories: [newPartData], @@ -650,7 +668,7 @@ describe('Test recieved mos ingest payloads', () => { const beforeStoryId = mosTypes.mosString128.create('fakeId2') await expect( - handleMosInsertStories(context, { + handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: beforeStoryId, newStories: [newPartData], @@ -678,7 +696,7 @@ describe('Test recieved mos ingest payloads', () => { const partExternalIds = ['ro1;s3;p1', 'ro1;s3;p2'] - await handleMosDeleteStory(context, { + await handleMosDeleteStoryWrapped(context, { rundownExternalId: rundown.externalId, stories: partExternalIds.map((i) => mosTypes.mosString128.create(i)), }) @@ -704,11 +722,11 @@ describe('Test recieved mos ingest payloads', () => { const partExternalIds = ['ro1;s1;p2', 'fakeId'] await expect( - handleMosDeleteStory(context, { + handleMosDeleteStoryWrapped(context, { rundownExternalId: rundown.externalId, stories: partExternalIds.map((i) => mosTypes.mosString128.create(i)), }) - ).rejects.toThrow(`Parts fakeId in rundown ${rundown.externalId} were not found`) + ).rejects.toThrow(`Parts segment-fakeId in rundown ${rundown.externalId} were not found`) expect(await context.mockCollections.Parts.findFetch({ externalId: { $in: partExternalIds } })).toHaveLength(1) }) @@ -725,7 +743,7 @@ describe('Test recieved mos ingest payloads', () => { Body: [], }) - await handleMosFullStory(context, { + await handleMosFullStoryWrapped(context, { rundownExternalId: rundown.externalId, story: story, }) @@ -750,7 +768,7 @@ describe('Test recieved mos ingest payloads', () => { }) await expect( - handleMosFullStory(context, { + handleMosFullStoryWrapped(context, { rundownExternalId: rundown.externalId, story: story, }) @@ -772,13 +790,11 @@ describe('Test recieved mos ingest payloads', () => { }) await expect( - handleMosFullStory(context, { + handleMosFullStoryWrapped(context, { rundownExternalId: mosTypes.mosString128.stringify(story.RunningOrderId), story: story, }) - ).rejects.toThrow( - `handleMosFullStory: Missing MOS Rundown "${mosTypes.mosString128.stringify(story.RunningOrderId)}"` - ) + ).rejects.toThrow(`Rundown "${mosTypes.mosString128.stringify(story.RunningOrderId)}" not found`) }) test('mosRoStorySwap: Within same segment', async () => { @@ -793,7 +809,7 @@ describe('Test recieved mos ingest payloads', () => { const story0 = mosTypes.mosString128.create('ro1;s1;p2') const story1 = mosTypes.mosString128.create('ro1;s1;p3') - await handleMosSwapStories(context, { + await handleMosSwapStoriesWrapped(context, { rundownExternalId: rundown.externalId, story0, story1, @@ -823,7 +839,7 @@ describe('Test recieved mos ingest payloads', () => { const story0 = mosTypes.mosString128.create('ro1;s1;p1') const story1 = mosTypes.mosString128.create('ro1;s1;p3') - await handleMosSwapStories(context, { + await handleMosSwapStoriesWrapped(context, { rundownExternalId: rundown.externalId, story0, story1, @@ -834,7 +850,7 @@ describe('Test recieved mos ingest payloads', () => { const { segments, parts } = await getRundownData({ _id: rundown._id }) const partMap = mockRO.segmentIdMap() - partMap[0].segmentId = 'apDVfF5nk1_StK474hEUxLMZIag_' + partMap[0].segmentId = 'o0rZ5k7WadtZ2XSmf_c3txGILuw_' partMap[0].parts[0] = 'ro1;s1;p3' partMap[0].parts[2] = 'ro1;s1;p1' expect(getPartIdMap(segments, parts)).toEqual(partMap) @@ -843,20 +859,24 @@ describe('Test recieved mos ingest payloads', () => { }) test('mosRoStorySwap: Swap with self', async () => { + await resetOrphanedRundown() + const rundown = (await context.mockCollections.Rundowns.findOne()) as DBRundown expect(rundown).toBeTruthy() const story0 = mosTypes.mosString128.create('ro1;s1;p1') - await expect( - handleMosSwapStories(context, { - rundownExternalId: rundown.externalId, - story0, - story1: story0, - }) - ).rejects.toThrow( - `Cannot swap part ${mosTypes.mosString128.stringify(story0)} with itself in rundown ${rundown.externalId}` - ) + // Swap should happen without error + await handleMosSwapStoriesWrapped(context, { + rundownExternalId: rundown.externalId, + story0, + story1: story0, + }) + + // should match the default + const { segments, parts } = await getRundownData({ _id: rundown._id }) + const partMap = mockRO.segmentIdMap() + expect(getPartIdMap(segments, parts)).toEqual(partMap) }) test('mosRoStorySwap: Story not found', async () => { @@ -867,7 +887,7 @@ describe('Test recieved mos ingest payloads', () => { const story1 = mosTypes.mosString128.create('ro1;s1;p99') await expect( - handleMosSwapStories(context, { + handleMosSwapStoriesWrapped(context, { rundownExternalId: rundown.externalId, story0, story1, @@ -875,7 +895,7 @@ describe('Test recieved mos ingest payloads', () => { ).rejects.toThrow(`Story ${mosTypes.mosString128.stringify(story1)} not found in rundown ${rundown.externalId}`) await expect( - handleMosSwapStories(context, { + handleMosSwapStoriesWrapped(context, { rundownExternalId: rundown.externalId, story0: story1, story1: story0, @@ -895,7 +915,7 @@ describe('Test recieved mos ingest payloads', () => { const story0 = mosTypes.mosString128.create('ro1;s3;p1') const story1 = mosTypes.mosString128.create('ro1;s4;p1') - await handleMosSwapStories(context, { + await handleMosSwapStoriesWrapped(context, { rundownExternalId: rundown.externalId, story0, story1, @@ -907,7 +927,7 @@ describe('Test recieved mos ingest payloads', () => { const partMap = mockRO.segmentIdMap() partMap[1].parts.push('ro1;s4;p1') - partMap[2].segmentId = 'sLfUx9cadyquE07Vw9byoX35G9I_' + partMap[2].segmentId = 'o6BHLNEWMc9FbHBRRWMOiwQ3IN0_' partMap[2].parts.reverse() partMap.splice(3, 1) expect(getPartIdMap(segments, parts)).toEqual(partMap) @@ -927,7 +947,7 @@ describe('Test recieved mos ingest payloads', () => { const story0 = mosTypes.mosString128.create('ro1;s1;p2') const story1 = mosTypes.mosString128.create('ro1;s2;p2') - await handleMosSwapStories(context, { + await handleMosSwapStoriesWrapped(context, { rundownExternalId: rundown.externalId, story0, story1, @@ -935,7 +955,46 @@ describe('Test recieved mos ingest payloads', () => { expect(ensureNextPartIsValid).toHaveBeenCalledTimes(1) - // Don't care about the result here, just making sure there isnt an exception while updating the db + const { segments, parts } = await getRundownData({ _id: rundown._id }) + + const partMap = [ + { + parts: ['ro1;s1;p1'], + segmentName: 'SEGMENT1', + segmentId: 'baQfD5zawLDmJTRumGpHDH2MwaM_', + }, + { + parts: ['ro1;s2;p2'], + segmentName: 'SEGMENT2', + segmentId: 'yVemxI_brsRMvHAeFVtG2tahCgU_', + }, + { + parts: ['ro1;s1;p3'], + segmentName: 'SEGMENT1', + segmentId: 'o0rZ5k7WadtZ2XSmf_c3txGILuw_', + }, + { + parts: ['ro1;s2;p1'], + segmentName: 'SEGMENT2', + segmentId: '6cEU5uY8M93lfQssMy9XaGxT23E_', + }, + { + parts: ['ro1;s1;p2'], + segmentName: 'SEGMENT1', + segmentId: 'zz3BgLI_xxlvfTOTR55skUkKWHk_', + }, + { + parts: ['ro1;s3;p1', 'ro1;s3;p2'], + segmentName: 'SEGMENT3', + segmentId: 'rSEZMzZhJ55454sqsU_7TOq_DIk_', + }, + { + parts: ['ro1;s4;p1'], + segmentName: 'SEGMENT2', + segmentId: 'YXMZjMqslZFcM3K4sGelyBYJ_rA_', + }, + ] + expect(getPartIdMap(segments, parts)).toEqual(partMap) await expectRundownToMatchSnapshot(rundown._id, true, true) }) @@ -951,7 +1010,7 @@ describe('Test recieved mos ingest payloads', () => { const story0 = 'ro1;s1;p3' - await handleMosMoveStories(context, { + await handleMosMoveStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s1;p2'), stories: [mosTypes.mosString128.create(story0)], @@ -983,7 +1042,7 @@ describe('Test recieved mos ingest payloads', () => { mosTypes.mosString128.create('ro1;s1;p3'), ] - await handleMosMoveStories(context, { + await handleMosMoveStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create(''), stories, @@ -1014,13 +1073,15 @@ describe('Test recieved mos ingest payloads', () => { ] await expect( - handleMosMoveStories(context, { + handleMosMoveStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: beforeStoryId, stories, }) ).rejects.toThrow( - `Part ${mosTypes.mosString128.stringify(beforeStoryId)} was not found in rundown ${rundown.externalId}` + `Part ${getMosIngestSegmentExternalId(mosTypes.mosString128.stringify(beforeStoryId))} in rundown ${ + rundown.externalId + } not found` ) }) @@ -1036,13 +1097,15 @@ describe('Test recieved mos ingest payloads', () => { ] await expect( - handleMosMoveStories(context, { + handleMosMoveStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: beforeStoryId, stories, }) ).rejects.toThrow( - `Part ${mosTypes.mosString128.stringify(beforeStoryId)} was not found in rundown ${rundown.externalId}` + `Part ${getMosIngestSegmentExternalId(mosTypes.mosString128.stringify(beforeStoryId))} in rundown ${ + rundown.externalId + } not found` ) }) @@ -1060,7 +1123,7 @@ describe('Test recieved mos ingest payloads', () => { ] await expect( - handleMosMoveStories(context, { + handleMosMoveStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: beforeStoryId, stories, @@ -1093,7 +1156,7 @@ describe('Test recieved mos ingest payloads', () => { expect(partsInSegmentBefore).toHaveLength(3) // This should only remove the first part in the segment. The other parts will be regenerated - await handleMosDeleteStory(context, { + await handleMosDeleteStoryWrapped(context, { rundownExternalId: rundown.externalId, stories: [mosTypes.mosString128.create(partExternalId)], }) @@ -1110,9 +1173,9 @@ describe('Test recieved mos ingest payloads', () => { expect(partsInSegmentAfter).toHaveLength(2) // The other parts in the segment should not not have changed: - expect(partsInSegmentAfter[0]).toMatchObject(_.omit(partsInSegmentBefore[1], ['segmentId', '_rank'])) + expect(partsInSegmentAfter[0]).toMatchObject(omit(partsInSegmentBefore[1], 'segmentId', '_rank')) - expect(partsInSegmentAfter[1]).toMatchObject(_.omit(partsInSegmentBefore[2], ['segmentId', '_rank'])) + expect(partsInSegmentAfter[1]).toMatchObject(omit(partsInSegmentBefore[2], 'segmentId', '_rank')) }) async function mosReplaceBasicStory( @@ -1121,7 +1184,7 @@ describe('Test recieved mos ingest payloads', () => { newStoryId: string, newStoryName: string ): Promise { - return handleMosInsertStories(context, { + return handleMosInsertStoriesWrapped(context, { rundownExternalId: runningOrderId, insertBeforeStoryId: mosTypes.mosString128.create(oldStoryId), newStories: literal>([ @@ -1148,7 +1211,7 @@ describe('Test recieved mos ingest payloads', () => { const newSegment = newSegments.find((s) => s.name === newName) if (newSegment) { const oldSegmentId = oldSegment._id - expect(oldSegmentId).not.toEqual(newSegment._id) // If the id doesn't change, then the whole test is invalid + expect(oldSegmentId).toEqual(newSegment._id) // If the id doesn't change, then the whole test is invalid oldSegment.name = newSegment.name oldSegment._id = newSegment._id oldSegment.externalId = newSegment.externalId @@ -1229,7 +1292,7 @@ describe('Test recieved mos ingest payloads', () => { // cleanup await handleDeactivateRundownPlaylist(context, { playlistId: rundown.playlistId, - }) + }).catch(() => null) } }) @@ -1273,7 +1336,7 @@ describe('Test recieved mos ingest payloads', () => { } // regenerate the rundown - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: rundown.externalId, mosRunningOrder: mosRO, isUpdateOperation: false, @@ -1307,7 +1370,7 @@ describe('Test recieved mos ingest payloads', () => { // cleanup await handleDeactivateRundownPlaylist(context, { playlistId: rundown.playlistId, - }) + }).catch(() => null) } }) @@ -1327,7 +1390,7 @@ describe('Test recieved mos ingest payloads', () => { roData1.ID = mosTypes.mosString128.create('Rundown1') roData1.Slug = mosTypes.mosString128.create('Test Rundown 1') ;(roData1 as any).ForcePlaylistExternalId = 'playlist1' - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: mosTypes.mosString128.stringify(roData1.ID), mosRunningOrder: roData1, isUpdateOperation: false, @@ -1338,7 +1401,7 @@ describe('Test recieved mos ingest payloads', () => { roData2.ID = mosTypes.mosString128.create('Rundown2') roData2.Slug = mosTypes.mosString128.create('Test Rundown 2') ;(roData2 as any).ForcePlaylistExternalId = 'playlist1' - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: mosTypes.mosString128.stringify(roData2.ID), mosRunningOrder: roData2, isUpdateOperation: false, @@ -1364,7 +1427,7 @@ describe('Test recieved mos ingest payloads', () => { expect(playlist.name).not.toEqual(rundown2.name) // Remove the first rundown in the playlist - await handleRemovedRundown(context, { + await handleRemovedRundownWrapped(context, { rundownExternalId: mosTypes.mosString128.stringify(roData1.ID), }) expect(await context.mockCollections.Rundowns.findOne(rundown1._id)).toBeFalsy() @@ -1384,7 +1447,7 @@ describe('Test recieved mos ingest payloads', () => { const mosRO = mockRO.roCreate() // regenerate the rundown - await handleMosRundownData(context, { + await handleMosRundownDataWrapped(context, { rundownExternalId: mosTypes.mosString128.stringify(mosRO.ID), mosRunningOrder: mosRO, isUpdateOperation: false, @@ -1401,7 +1464,7 @@ describe('Test recieved mos ingest payloads', () => { // insert a part after segment1 const newPartData = mockRO.newItem('ro1;s2a;newPart1', 'SEGMENT2pre;new1') - await handleMosInsertStories(context, { + await handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: mosTypes.mosString128.create('ro1;s2;p1'), newStories: [newPartData], @@ -1435,7 +1498,7 @@ describe('Test recieved mos ingest payloads', () => { // Replace the story with itself, but different slug const replacementPartData = mockRO.newItem('ro1;s2a;newPart1', 'SEGMENT2;new1') - await handleMosInsertStories(context, { + await handleMosInsertStoriesWrapped(context, { rundownExternalId: rundown.externalId, insertBeforeStoryId: replacementPartData.ID, newStories: [replacementPartData], diff --git a/packages/job-worker/src/ingest/mosDevice/__tests__/mosStoryJobs.spec.ts b/packages/job-worker/src/ingest/mosDevice/__tests__/mosStoryJobs.spec.ts new file mode 100644 index 0000000000..e6182af5fe --- /dev/null +++ b/packages/job-worker/src/ingest/mosDevice/__tests__/mosStoryJobs.spec.ts @@ -0,0 +1,622 @@ +import { setupDefaultJobEnvironment } from '../../../__mocks__/context' +import { clone } from '@sofie-automation/corelib/dist/lib' +import { + IngestChangeType, + MOS, + NrcsIngestPartChangeDetails, + NrcsIngestSegmentChangeDetailsEnum, +} from '@sofie-automation/blueprints-integration' +import { + handleMosDeleteStory, + handleMosFullStory, + handleMosInsertStories, + handleMosMoveStories, + handleMosSwapStories, +} from '../mosStoryJobs' +import { IngestUpdateOperationFunction, UpdateIngestRundownChange } from '../../runOperation' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' + +function getDefaultIngestRundown(): IngestRundownWithSource { + return { + externalId: 'rundown0', + type: 'mos', + name: 'Rundown', + rundownSource: { type: 'http' }, + payload: undefined, + segments: [ + { + externalId: 'segment-part0', + name: 'Part 0', + rank: 0, + payload: undefined, + parts: [ + { + externalId: 'part0', + name: 'Part 0', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'segment-part1', + name: 'Part 1', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 'part1', + name: 'Part 1', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'segment-part2', + name: 'Part 2', + rank: 2, + payload: undefined, + parts: [ + { + externalId: 'part2', + name: 'Part 2', + rank: 0, + payload: undefined, + }, + ], + }, + { + externalId: 'segment-part3', + name: 'Part 3', + rank: 3, + payload: undefined, + parts: [ + { + externalId: 'part3', + name: 'Part 3', + rank: 0, + payload: undefined, + }, + ], + }, + ], + } +} + +const mosTypes = MOS.getMosTypes(false) + +describe('handleMosDeleteStory', () => { + it('no stories', () => { + const context = setupDefaultJobEnvironment() + + expect( + handleMosDeleteStory(context, { + rundownExternalId: 'rundown0', + stories: [], + }) + ).toBeNull() + }) + + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + const executeJob = handleMosDeleteStory(context, { + rundownExternalId: 'rundown0', + stories: [mosTypes.mosString128.create('story0')], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(undefined)).toThrow(/Rundown(.*)not found/) + }) + + it('missing story', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosDeleteStory(context, { + rundownExternalId: 'rundown0', + stories: [mosTypes.mosString128.create('story0')], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(clone(ingestRundown))).toThrow(/in rundown(.*)were not found/) + }) + + it('mixed found and missing', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosDeleteStory(context, { + rundownExternalId: 'rundown0', + stories: [ + mosTypes.mosString128.create('story0'), // missing + mosTypes.mosString128.create('part1'), // exists + ], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(clone(ingestRundown))).toThrow(/in rundown(.*)were not found/) + }) + + it('good', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosDeleteStory(context, { + rundownExternalId: 'rundown0', + stories: [ + mosTypes.mosString128.create('part1'), // exists + ], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + ingestRundown.segments.splice(1, 1) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + 'segment-part1': NrcsIngestSegmentChangeDetailsEnum.Deleted, + }, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleMosFullStory', () => { + const newMosStory: MOS.IMOSROFullStory = { + ID: mosTypes.mosString128.create('part1'), + RunningOrderId: mosTypes.mosString128.create('rundown0'), + Body: [ + { + itemType: 'other', + Type: 'p', + Content: 'Hello World!', + }, + ], + } + + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + const executeJob = handleMosFullStory(context, { + rundownExternalId: 'rundown0', + story: clone(newMosStory), + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(undefined)).toThrow(/Rundown(.*)not found/) + }) + + it('missing story', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosFullStory(context, { + rundownExternalId: 'rundown0', + story: { + ...clone(newMosStory), + ID: mosTypes.mosString128.create('storyX'), + }, + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(clone(ingestRundown))).toThrow(/Missing MOS Story(.*)in Rundown/) + }) + + it('good', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosFullStory(context, { + rundownExternalId: 'rundown0', + story: clone(newMosStory), + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + ingestRundown.segments[1].parts[0].payload = newMosStory + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + 'segment-part1': { + partChanges: { + part1: NrcsIngestPartChangeDetails.Updated, + }, + }, + }, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleMosInsertStories', () => { + function createMockStory(id: string, slug: string): MOS.IMOSROStory { + return { + ID: mosTypes.mosString128.create(id), + Slug: mosTypes.mosString128.create(slug), + Items: [], + } + } + + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + const newStory = createMockStory('partX', 'Part X') + + const executeJob = handleMosInsertStories(context, { + rundownExternalId: 'rundown0', + newStories: [newStory], + insertBeforeStoryId: null, + replace: false, + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(undefined)).toThrow(/Rundown(.*)not found/) + }) + + it('no stories', () => { + const context = setupDefaultJobEnvironment() + + const executeJob = handleMosInsertStories(context, { + rundownExternalId: 'rundown0', + newStories: [], + insertBeforeStoryId: null, + replace: false, + }) as IngestUpdateOperationFunction + expect(executeJob).toBeNull() + }) + + it('unknown insertBeforeStoryId', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + const newStory = createMockStory('partX', 'Part X') + + const executeJob = handleMosInsertStories(context, { + rundownExternalId: 'rundown0', + newStories: [newStory], + insertBeforeStoryId: mosTypes.mosString128.create('storyX'), + replace: false, + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(clone(ingestRundown))).toThrow(/Part (.*)in rundown(.*)not found/) + }) + + it('insert in middle', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + const newStory = createMockStory('partX', 'Part X') + + const executeJob = handleMosInsertStories(context, { + rundownExternalId: 'rundown0', + newStories: [newStory], + insertBeforeStoryId: mosTypes.mosString128.create('part1'), + replace: false, + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + ingestRundown.segments.splice(1, 0, { + externalId: 'segment-partX', + name: 'Part X', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 'partX', + name: 'Part X', + rank: 0, + payload: undefined, + }, + ], + }) + ingestRundown.segments[2].rank = 2 + ingestRundown.segments[3].rank = 3 + ingestRundown.segments[4].rank = 4 + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + 'segment-partX': NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('insert in middle, with replace', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + const newStory = createMockStory('partX', 'Part X') + + const executeJob = handleMosInsertStories(context, { + rundownExternalId: 'rundown0', + newStories: [newStory], + insertBeforeStoryId: mosTypes.mosString128.create('part1'), + replace: true, + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + ingestRundown.segments.splice(1, 1, { + externalId: 'segment-partX', + name: 'Part X', + rank: 1, + payload: undefined, + parts: [ + { + externalId: 'partX', + name: 'Part X', + rank: 0, + payload: undefined, + }, + ], + }) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + 'segment-part1': NrcsIngestSegmentChangeDetailsEnum.Deleted, + 'segment-partX': NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('insert at end', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + const newStory = createMockStory('partX', 'Part X') + + const executeJob = handleMosInsertStories(context, { + rundownExternalId: 'rundown0', + newStories: [newStory], + insertBeforeStoryId: null, + replace: true, + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + ingestRundown.segments.push({ + externalId: 'segment-partX', + name: 'Part X', + rank: 4, + payload: undefined, + parts: [ + { + externalId: 'partX', + name: 'Part X', + rank: 0, + payload: undefined, + }, + ], + }) + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + 'segment-partX': NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated, + }, + segmentOrderChanged: true, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleMosSwapStories', () => { + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + const executeJob = handleMosSwapStories(context, { + rundownExternalId: 'rundown0', + story0: mosTypes.mosString128.create('part1'), + story1: mosTypes.mosString128.create('part3'), + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(undefined)).toThrow(/Rundown(.*)not found/) + }) + + it('swap with itself', () => { + const context = setupDefaultJobEnvironment() + + const executeJob = handleMosSwapStories(context, { + rundownExternalId: 'rundown0', + story0: mosTypes.mosString128.create('part1'), + story1: mosTypes.mosString128.create('part1'), + }) as IngestUpdateOperationFunction + expect(executeJob).toBeNull() + }) + + it('missing story0', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosSwapStories(context, { + rundownExternalId: 'rundown0', + story0: mosTypes.mosString128.create('partX'), + story1: mosTypes.mosString128.create('part3'), + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(clone(ingestRundown))).toThrow(/Story (.*)not found in rundown(.*)/) + }) + + it('missing story1', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosSwapStories(context, { + rundownExternalId: 'rundown0', + story0: mosTypes.mosString128.create('part1'), + story1: mosTypes.mosString128.create('partX'), + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(clone(ingestRundown))).toThrow(/Story (.*)not found in rundown(.*)/) + }) + + it('swap', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosSwapStories(context, { + rundownExternalId: 'rundown0', + story0: mosTypes.mosString128.create('part1'), + story1: mosTypes.mosString128.create('part3'), + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + const oldPart3 = ingestRundown.segments.splice(3, 1, ingestRundown.segments[1]) + ingestRundown.segments.splice(1, 1, ...oldPart3) + ingestRundown.segments[1].rank = 1 + ingestRundown.segments[3].rank = 3 + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: true, + }, + } satisfies UpdateIngestRundownChange) + }) +}) + +describe('handleMosMoveStories', () => { + it('no rundown', () => { + const context = setupDefaultJobEnvironment() + + const executeJob = handleMosMoveStories(context, { + rundownExternalId: 'rundown0', + insertBeforeStoryId: null, + stories: [mosTypes.mosString128.create('part3')], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(undefined)).toThrow(/Rundown(.*)not found/) + }) + + it('no stories', () => { + const context = setupDefaultJobEnvironment() + + const executeJob = handleMosMoveStories(context, { + rundownExternalId: 'rundown0', + insertBeforeStoryId: mosTypes.mosString128.create('part1'), + stories: [], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeNull() + }) + + it('missing story', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosMoveStories(context, { + rundownExternalId: 'rundown0', + insertBeforeStoryId: null, + stories: [mosTypes.mosString128.create('partX'), mosTypes.mosString128.create('part3')], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + expect(() => executeJob(clone(ingestRundown))).toThrow(/were not found(.*)in rundown/) + }) + + it('move to end', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosMoveStories(context, { + rundownExternalId: 'rundown0', + insertBeforeStoryId: null, + stories: [mosTypes.mosString128.create('part1')], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + const oldPart1 = ingestRundown.segments.splice(1, 1) + ingestRundown.segments.push(...oldPart1) + ingestRundown.segments[1].rank = 1 + ingestRundown.segments[2].rank = 2 + ingestRundown.segments[3].rank = 3 + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: true, + }, + } satisfies UpdateIngestRundownChange) + }) + + it('move to middle', () => { + const context = setupDefaultJobEnvironment() + + const ingestRundown = getDefaultIngestRundown() + + const executeJob = handleMosMoveStories(context, { + rundownExternalId: 'rundown0', + insertBeforeStoryId: mosTypes.mosString128.create('part1'), + stories: [mosTypes.mosString128.create('part2')], + }) as IngestUpdateOperationFunction + expect(executeJob).toBeTruthy() + + const changes = executeJob(clone(ingestRundown)) + + // update the expected ingestRundown + const oldPart2 = ingestRundown.segments.splice(2, 1) + ingestRundown.segments.splice(1, 0, ...oldPart2) + ingestRundown.segments[1].rank = 1 + ingestRundown.segments[2].rank = 2 + + expect(changes).toEqual({ + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: true, + }, + } satisfies UpdateIngestRundownChange) + }) +}) diff --git a/packages/job-worker/src/ingest/mosDevice/diff.ts b/packages/job-worker/src/ingest/mosDevice/diff.ts deleted file mode 100644 index 32f42a7d5d..0000000000 --- a/packages/job-worker/src/ingest/mosDevice/diff.ts +++ /dev/null @@ -1,305 +0,0 @@ -import { JobContext } from '../../jobs' -import { ReadonlyDeep } from 'type-fest' -import { IngestModel } from '../model/IngestModel' -import { LocalIngestRundown, LocalIngestSegment } from '../ingestCache' -import { canRundownBeUpdated, getSegmentId } from '../lib' -import { calculateSegmentsFromIngestData } from '../generationSegment' -import _ = require('underscore') -import { clone, deleteAllUndefinedProperties, literal, normalizeArrayFunc } from '@sofie-automation/corelib/dist/lib' -import { SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { IngestSegment } from '@sofie-automation/blueprints-integration' -import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' -import { CommitIngestData } from '../lock' -import { IngestSegmentModel } from '../model/IngestSegmentModel' - -/** - * Update the Ids of Segments based on new Ingest data - * This assumes that no segments/parts were added or removed between the two LocalIngestRundowns provided - * @param context Context of the job being run - * @param ingestModel Ingest model for Rundown being updated - * @param oldIngestRundown Last known ingest data - * @param newIngestRundown New ingest data - * @returns Map of the SegmentId changes - */ -export function diffAndUpdateSegmentIds( - context: JobContext, - ingestModel: IngestModel, - oldIngestRundown: ReadonlyDeep, - newIngestRundown: ReadonlyDeep -): CommitIngestData['renamedSegments'] { - const span = context.startSpan('mosDevice.ingest.diffAndApplyChanges') - - const oldSegments = ingestModel.getOrderedSegments() - const oldSegmentEntries = compileSegmentEntries(oldIngestRundown.segments) - const newSegmentEntries = compileSegmentEntries(newIngestRundown.segments) - const segmentDiff = diffSegmentEntries(oldSegmentEntries, newSegmentEntries, oldSegments) - - // Updated segments that has had their segment.externalId changed: - const renamedSegments = applyExternalIdDiff(ingestModel, segmentDiff, false) - - span?.end() - return renamedSegments -} - -/** - * Update the Rundown for new Ingest data - * Performs a diff of the ingest data, and applies the changes including re-running blueprints on any changed segments - * @param context Context of the job being run - * @param ingestModel Ingest model for Rundown being updated - * @param newIngestRundown New ingest data (if any) - * @param oldIngestRundown Last known ingest data (if any) - * @returns Map of the SegmentId changes - */ -export async function diffAndApplyChanges( - context: JobContext, - ingestModel: IngestModel, - newIngestRundown: ReadonlyDeep | undefined, - oldIngestRundown: ReadonlyDeep | undefined - // newIngestParts: AnnotatedIngestPart[] -): Promise { - if (!newIngestRundown) throw new Error(`diffAndApplyChanges lost the new IngestRundown...`) - if (!oldIngestRundown) throw new Error(`diffAndApplyChanges lost the old IngestRundown...`) - - const rundown = ingestModel.getRundown() - if (!canRundownBeUpdated(rundown, false)) return null - - const span = context.startSpan('mosDevice.ingest.diffAndApplyChanges') - - // Fetch all existing segments: - const oldSegments = ingestModel.getOrderedSegments() - - const oldSegmentEntries = compileSegmentEntries(oldIngestRundown.segments) - const newSegmentEntries = compileSegmentEntries(newIngestRundown.segments) - const segmentDiff = diffSegmentEntries(oldSegmentEntries, newSegmentEntries, oldSegments) - - // Note: We may not need to do some of these quick updates anymore, but they are cheap so can stay for now - - // Update segment ranks: - for (const [segmentExternalId, newRank] of Object.entries(segmentDiff.onlyRankChanged)) { - const segment = ingestModel.getSegmentByExternalId(segmentExternalId) - if (segment) { - segment.setRank(newRank) - } - } - - // Updated segments that has had their segment.externalId changed: - const renamedSegments = applyExternalIdDiff(ingestModel, segmentDiff, true) - - // Figure out which segments need to be regenerated - const segmentsToRegenerate = Object.values(segmentDiff.added) - for (const changedSegment of Object.values(segmentDiff.changed)) { - // Rank changes are handled above - if (!segmentDiff.onlyRankChanged[changedSegment.externalId]) { - segmentsToRegenerate.push(changedSegment) - } - } - - // Create/Update segments - const changedSegmentIds = await calculateSegmentsFromIngestData( - context, - ingestModel, - _.sortBy(segmentsToRegenerate, (se) => se.rank), - null - ) - - // Remove/orphan old segments - const orphanedSegmentIds: SegmentId[] = [] - for (const segmentExternalId of Object.keys(segmentDiff.removed)) { - const segment = ingestModel.getSegmentByExternalId(segmentExternalId) - if (segment) { - // We orphan it and queue for deletion. the commit phase will complete if possible - orphanedSegmentIds.push(segment.segment._id) - segment.setOrphaned(SegmentOrphanedReason.DELETED) - - segment.removeAllParts() - } - } - - span?.end() - return literal({ - changedSegmentIds: changedSegmentIds, - removedSegmentIds: orphanedSegmentIds, // Only inform about the ones that werent renamed - renamedSegments: renamedSegments, - - removeRundown: false, - }) -} - -/** - * Apply the externalId renames from a DiffSegmentEntries - * @param ingestModel Ingest model of the rundown being updated - * @param segmentDiff Calculated Diff - * @returns Map of the SegmentId changes - */ -function applyExternalIdDiff( - ingestModel: IngestModel, - segmentDiff: Pick, - canDiscardParts: boolean -): CommitIngestData['renamedSegments'] { - // Updated segments that has had their segment.externalId changed: - const renamedSegments = new Map() - for (const [oldSegmentExternalId, newSegmentExternalId] of Object.entries(segmentDiff.externalIdChanged)) { - const oldSegmentId = getSegmentId(ingestModel.rundownId, oldSegmentExternalId) - const newSegmentId = getSegmentId(ingestModel.rundownId, newSegmentExternalId) - - // Track the rename - renamedSegments.set(oldSegmentId, newSegmentId) - - // If the segment doesnt exist (it should), then there isn't a segment to rename - const oldSegment = ingestModel.getSegment(oldSegmentId) - if (!oldSegment) continue - - if (ingestModel.getSegment(newSegmentId)) { - // If the new SegmentId already exists, we need to discard the old one rather than trying to merge it. - // This can only be done if the caller is expecting to regenerate Segments - const canDiscardPartsForSegment = canDiscardParts && !segmentDiff.onlyRankChanged[oldSegmentExternalId] - if (!canDiscardPartsForSegment) { - throw new Error(`Cannot merge Segments with only rank changes`) - } - - // Remove the old Segment and it's contents, the new one will be generated shortly - ingestModel.removeSegment(oldSegmentId) - } else { - // Perform the rename - ingestModel.changeSegmentId(oldSegmentId, newSegmentId) - } - } - - return renamedSegments -} - -/** - * Object of IngestSegment against their external ids - */ -export type SegmentEntries = { [segmentExternalId: string]: LocalIngestSegment } -/** - * Convert an array of IngestSegment into SegmentEntries - */ -export function compileSegmentEntries(ingestSegments: ReadonlyDeep>): SegmentEntries { - const segmentEntries: SegmentEntries = {} - - for (const ingestSegment of ingestSegments) { - if (segmentEntries[ingestSegment.externalId]) { - throw new Error(`compileSegmentEntries: Non-unique segment external ID: "${ingestSegment.externalId}"`) - } - segmentEntries[ingestSegment.externalId] = clone(ingestSegment) - } - - return segmentEntries -} - -/** - * Result of diffing two SegmentEntries - */ -export interface DiffSegmentEntries { - added: { [segmentExternalId: string]: LocalIngestSegment } - changed: { [segmentExternalId: string]: LocalIngestSegment } - removed: { [segmentExternalId: string]: LocalIngestSegment } - unchanged: { [segmentExternalId: string]: LocalIngestSegment } - - // Note: The objects present below are also present in the collections above - - /** Reference to segments which only had their ranks updated */ - onlyRankChanged: { [segmentExternalId: string]: number } // contains the new rank - - /** Reference to segments which has been REMOVED, but it looks like there is an ADDED segment that is closely related to the removed one */ - externalIdChanged: { [removedSegmentExternalId: string]: string } // contains the added segment's externalId -} - -/** - * Perform a diff of SegmentEntries, to calculate what has changed. - * Considers that the ids of some IngestSegments could have changed - * @param oldSegmentEntries The last known SegmentEntries - * @param newSegmentEntries The new SegmentEntries - * @param oldSegments The Segments in the DB. This allows for maintaining a stable modified timestamp, and ranks - * @returns DiffSegmentEntries describing the found changes - */ -export function diffSegmentEntries( - oldSegmentEntries: SegmentEntries, - newSegmentEntries: SegmentEntries, - oldSegments: IngestSegmentModel[] | null -): DiffSegmentEntries { - const diff: DiffSegmentEntries = { - added: {}, - changed: {}, - removed: {}, - unchanged: {}, - - onlyRankChanged: {}, - externalIdChanged: {}, - } - const oldSegmentMap: { [externalId: string]: IngestSegmentModel } | null = - oldSegments === null ? null : normalizeArrayFunc(oldSegments, (segment) => segment.segment.externalId) - - _.each(newSegmentEntries, (newSegmentEntry, segmentExternalId) => { - const oldSegmentEntry = oldSegmentEntries[segmentExternalId] as IngestSegment | undefined - let oldSegment: IngestSegmentModel | undefined - if (oldSegmentMap) { - oldSegment = oldSegmentMap[newSegmentEntry.externalId] - if (!oldSegment) { - // Segment has been added - diff.added[segmentExternalId] = newSegmentEntry - return - } - } - if (oldSegmentEntry) { - const modifiedIsEqual = oldSegment ? newSegmentEntry.modified === oldSegment.segment.externalModified : true - - // ensure there are no 'undefined' properties - deleteAllUndefinedProperties(oldSegmentEntry) - deleteAllUndefinedProperties(newSegmentEntry) - - // deep compare: - const ingestContentIsEqual = _.isEqual(_.omit(newSegmentEntry, 'rank'), _.omit(oldSegmentEntry, 'rank')) - const rankIsEqual = oldSegment - ? newSegmentEntry.rank === oldSegment.segment._rank - : newSegmentEntry.rank === oldSegmentEntry.rank - - // Compare the modified timestamps: - if (modifiedIsEqual && ingestContentIsEqual && rankIsEqual) { - diff.unchanged[segmentExternalId] = newSegmentEntry - } else { - // Something has changed - diff.changed[segmentExternalId] = newSegmentEntry - - // Check if it's only the rank that has changed: - if (ingestContentIsEqual && !rankIsEqual) { - diff.onlyRankChanged[segmentExternalId] = newSegmentEntry.rank - } - } - } else { - // Segment has been added - diff.added[segmentExternalId] = newSegmentEntry - } - }) - - _.each(oldSegmentEntries, (oldSegmentEntry, segmentExternalId) => { - const newSegmentEntry = newSegmentEntries[segmentExternalId] as IngestSegment | undefined - if (!newSegmentEntry) { - diff.removed[segmentExternalId] = oldSegmentEntry - } - }) - - // Handle when the externalId has change - _.each(diff.removed, (segmentEntry, segmentExternalId) => { - // try finding "it" in the added, using name - let newSegmentEntry = _.find(diff.added, (se) => se.name === segmentEntry.name) - if (!newSegmentEntry) { - // second try, match with any parts: - newSegmentEntry = _.find(diff.added, (se) => { - let found = false - _.each(segmentEntry.parts, (part) => { - if (found || _.find(se.parts, (p) => p.externalId === part.externalId)) { - found = true - } - }) - return found - }) - } - if (newSegmentEntry) { - diff.externalIdChanged[segmentExternalId] = newSegmentEntry.externalId - } - }) - - return diff -} diff --git a/packages/job-worker/src/ingest/mosDevice/lib.ts b/packages/job-worker/src/ingest/mosDevice/lib.ts index 07f8b0d425..cde75a8de2 100644 --- a/packages/job-worker/src/ingest/mosDevice/lib.ts +++ b/packages/job-worker/src/ingest/mosDevice/lib.ts @@ -1,18 +1,7 @@ import { MOS } from '@sofie-automation/corelib' -import { IngestPart } from '@sofie-automation/blueprints-integration' -import { getPartId } from '../lib' -import { PartId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { IngestRundown, IngestSegment } from '@sofie-automation/blueprints-integration' import _ = require('underscore') -export function getPartIdFromMosStory(rundownId: RundownId, partMosId: MOS.IMOSString128 | string): PartId { - if (!partMosId) throw new Error('parameter partMosId missing!') - return getPartId(rundownId, typeof partMosId === 'string' ? partMosId : parseMosString(partMosId)) -} - -export function getSegmentExternalId(rundownId: RundownId, ingestPart: IngestPart): string { - return `${rundownId}_${ingestPart.name.split(';')[0]}_${ingestPart.externalId}` -} - export function fixIllegalObject(o: unknown): void { if (_.isArray(o)) { _.each(o, (val, _key) => { @@ -38,3 +27,37 @@ export function parseMosString(str: MOS.IMOSString128): string { if (mosTypes.mosString128.is(str)) return mosTypes.mosString128.stringify(str) return (str as any).toString() } + +export function getMosIngestSegmentExternalId(partExternalId: string): string { + return `segment-${partExternalId}` +} + +export function updateRanksBasedOnOrder(ingestRundown: IngestRundown): void { + ingestRundown.segments.forEach((segment, i) => { + segment.rank = i + + segment.parts.forEach((part, j) => { + part.rank = j + }) + }) +} + +export function mosStoryToIngestSegment(mosStory: MOS.IMOSStory, undefinedPayload: boolean): IngestSegment { + const partExternalId = parseMosString(mosStory.ID) + + const name = mosStory.Slug ? parseMosString(mosStory.Slug) : '' + return { + externalId: getMosIngestSegmentExternalId(partExternalId), + name: name, + rank: 0, // Set later + parts: [ + { + externalId: partExternalId, + name: name, + rank: 0, + payload: undefinedPayload ? undefined : {}, + }, + ], + payload: undefined, + } +} diff --git a/packages/job-worker/src/ingest/mosDevice/mosRundownJobs.ts b/packages/job-worker/src/ingest/mosDevice/mosRundownJobs.ts index b0f9e50a90..445e52c0d0 100644 --- a/packages/job-worker/src/ingest/mosDevice/mosRundownJobs.ts +++ b/packages/job-worker/src/ingest/mosDevice/mosRundownJobs.ts @@ -1,5 +1,4 @@ -import { IngestPart } from '@sofie-automation/blueprints-integration' -import { PartId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { NrcsIngestRundownChangeDetails, IngestPart, IngestChangeType } from '@sofie-automation/blueprints-integration' import { literal } from '@sofie-automation/corelib/dist/lib' import { MosRundownProps, @@ -8,117 +7,95 @@ import { MosRundownReadyToAirProps, } from '@sofie-automation/corelib/dist/worker/ingest' import { JobContext } from '../../jobs' -import { getCurrentTime } from '../../lib' -import _ = require('underscore') -import { LocalIngestRundown } from '../ingestCache' -import { getRundownId, getPartId, canRundownBeUpdated } from '../lib' -import { runIngestJob, CommitIngestData, runWithRundownLock } from '../lock' -import { diffAndUpdateSegmentIds } from './diff' -import { parseMosString } from './lib' -import { groupedPartsToSegments, groupIngestParts, storiesToIngestParts } from './mosToIngest' -import { updateRundownFromIngestData, updateRundownMetadataFromIngestData } from '../generationRundown' +import { getRundownId, canRundownBeUpdated } from '../lib' +import { CommitIngestData, runWithRundownLock } from '../lock' +import { mosStoryToIngestSegment, parseMosString, updateRanksBasedOnOrder } from './lib' +import { GenerateRundownMode, updateRundownFromIngestData } from '../generationRundown' +import { IngestUpdateOperationFunction } from '../runOperation' +import { IngestModel } from '../model/IngestModel' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' /** * Insert or update a mos rundown */ -export async function handleMosRundownData(context: JobContext, data: MosRundownProps): Promise { +export function handleMosRundownData( + _context: JobContext, + data: MosRundownProps +): IngestUpdateOperationFunction | null { // Create or update a rundown (ie from rundownCreate or rundownList) if (parseMosString(data.mosRunningOrder.ID) !== data.rundownExternalId) throw new Error('mosRunningOrder.ID and rundownExternalId mismatch!') - await runIngestJob( - context, - data, - (ingestRundown) => { - const rundownId = getRundownId(context.studioId, data.rundownExternalId) - const parts = _.compact( - storiesToIngestParts(context, rundownId, data.mosRunningOrder.Stories || [], data.isUpdateOperation, []) - ) - const groupedStories = groupIngestParts(parts) - - // If this is a reload of a RO, then use cached data to make the change more seamless - if (data.isUpdateOperation && ingestRundown) { - const partCacheMap = new Map() - for (const segment of ingestRundown.segments) { - for (const part of segment.parts) { - partCacheMap.set(getPartId(rundownId, part.externalId), part) - } - } - - for (const annotatedPart of parts) { - const cached = partCacheMap.get(annotatedPart.partId) - if (cached && !annotatedPart.ingest.payload) { - annotatedPart.ingest.payload = cached.payload - } + return (ingestRundown) => { + const ingestSegments = (data.mosRunningOrder.Stories || []).map((story) => + mosStoryToIngestSegment(story, data.isUpdateOperation) + ) + + // If this is a reload of a RO, then use cached data to make the change more seamless + if (data.isUpdateOperation && ingestRundown) { + const partCacheMap = new Map() + for (const segment of ingestRundown.segments) { + for (const part of segment.parts) { + partCacheMap.set(part.externalId, part) } } - const ingestSegments = groupedPartsToSegments(rundownId, groupedStories) - - return literal({ - externalId: data.rundownExternalId, - name: parseMosString(data.mosRunningOrder.Slug), - type: 'mos', - segments: ingestSegments, - payload: data.mosRunningOrder, - modified: getCurrentTime(), - }) - }, - async (context, ingestModel, newIngestRundown, oldIngestRundown) => { - if (!newIngestRundown) throw new Error(`handleMosRundownData lost the IngestRundown...`) - - if (!canRundownBeUpdated(ingestModel.rundown, !data.isUpdateOperation)) return null - - let renamedSegments: CommitIngestData['renamedSegments'] = null - if (ingestModel.rundown && oldIngestRundown) { - // If we already have a rundown, update any modified segment ids - renamedSegments = diffAndUpdateSegmentIds(context, ingestModel, oldIngestRundown, newIngestRundown) - } + for (const newIngestSegment of ingestSegments) { + const ingestPart = newIngestSegment.parts[0] + if (!ingestPart) continue - const res = await updateRundownFromIngestData( - context, - ingestModel, - newIngestRundown, - !data.isUpdateOperation, - data.rundownSource - ) - if (res) { - return { - ...res, - renamedSegments: renamedSegments, + const cached = partCacheMap.get(ingestPart.externalId) + if (cached && !ingestPart.payload) { + ingestPart.payload = cached.payload } - } else { - return null } } - ) + + const newIngestRundown = literal({ + externalId: data.rundownExternalId, + name: parseMosString(data.mosRunningOrder.Slug), + type: 'mos', + segments: ingestSegments, + payload: data.mosRunningOrder, + rundownSource: data.rundownSource, + }) + updateRanksBasedOnOrder(newIngestRundown) + + return { + ingestRundown: newIngestRundown, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Regenerate, + }, + } + } } /** * Update the payload of a mos rundown, without changing any parts or segments */ -export async function handleMosRundownMetadata(context: JobContext, data: MosRundownMetadataProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - ingestRundown.payload = _.extend(ingestRundown.payload, data.mosRunningOrderBase) - ingestRundown.modified = getCurrentTime() - +export function handleMosRundownMetadata( + _context: JobContext, + data: MosRundownMetadataProps +): IngestUpdateOperationFunction | null { + return (ingestRundown) => { + if (ingestRundown) { + ingestRundown.payload = Object.assign(ingestRundown.payload as object, data.mosRunningOrderBase) + + return { // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + rundownChanges: NrcsIngestRundownChangeDetails.Payload, + }, } - }, - async (context, ingestModel, ingestRundown) => { - if (!ingestRundown) throw new Error(`handleMosRundownMetadata lost the IngestRundown...`) - - return updateRundownMetadataFromIngestData(context, ingestModel, ingestRundown, data.rundownSource) + } else { + throw new Error(`Rundown "${data.rundownExternalId}" not found`) } - ) + } } /** @@ -143,29 +120,18 @@ export async function handleMosRundownStatus(context: JobContext, data: MosRundo /** * Update the ready to air state of a mos rundown */ -export async function handleMosRundownReadyToAir(context: JobContext, data: MosRundownReadyToAirProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - // No change - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - async (context, ingestModel, ingestRundown) => { - if (!ingestRundown) throw new Error(`handleMosRundownReadyToAir lost the IngestRundown...`) - - if (!ingestModel.rundown || ingestModel.rundown.airStatus === data.status) return null +export async function handleMosRundownReadyToAir( + context: JobContext, + data: MosRundownReadyToAirProps, + ingestModel: IngestModel, + ingestRundown: SofieIngestRundownWithSource +): Promise { + if (!ingestModel.rundown || ingestModel.rundown.airStatus === data.status) return null - // If rundown is orphaned, then it should be ignored - if (ingestModel.rundown.orphaned) return null + // If rundown is orphaned, then it should be ignored + if (ingestModel.rundown.orphaned) return null - ingestModel.setRundownAirStatus(data.status) + ingestModel.setRundownAirStatus(data.status) - return updateRundownMetadataFromIngestData(context, ingestModel, ingestRundown, ingestModel.rundown.source) - } - ) + return updateRundownFromIngestData(context, ingestModel, ingestRundown, GenerateRundownMode.MetadataChange) } diff --git a/packages/job-worker/src/ingest/mosDevice/mosStoryJobs.ts b/packages/job-worker/src/ingest/mosDevice/mosStoryJobs.ts index 2e4cac9be0..b657bdf03f 100644 --- a/packages/job-worker/src/ingest/mosDevice/mosStoryJobs.ts +++ b/packages/job-worker/src/ingest/mosDevice/mosStoryJobs.ts @@ -1,4 +1,3 @@ -import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { MosDeleteStoryProps, MosFullStoryProps, @@ -7,298 +6,303 @@ import { MosSwapStoryProps, } from '@sofie-automation/corelib/dist/worker/ingest' import { logger } from '../../logging' -import _ = require('underscore') import { JobContext } from '../../jobs' -import { updateSegmentFromIngestData } from '../generationSegment' -import { LocalIngestRundown } from '../ingestCache' -import { getRundownId } from '../lib' -import { runIngestJob } from '../lock' -import { diffAndApplyChanges } from './diff' -import { fixIllegalObject, parseMosString } from './lib' -import { AnnotatedIngestPart, makeChangeToIngestParts, storiesToIngestParts } from './mosToIngest' - -function getAnnotatedIngestParts(context: JobContext, ingestRundown: LocalIngestRundown): AnnotatedIngestPart[] { - const span = context.startSpan('mosDevice.ingest.getAnnotatedIngestParts') - const ingestParts: AnnotatedIngestPart[] = [] - _.each(ingestRundown.segments, (s) => { - _.each(s.parts, (p) => { - ingestParts.push({ - externalId: p.externalId, - partId: protectString(''), // Not used - segmentName: s.name, - ingest: p, - }) - }) - }) - - span?.end() - return ingestParts -} +import { + fixIllegalObject, + getMosIngestSegmentExternalId, + mosStoryToIngestSegment, + parseMosString, + updateRanksBasedOnOrder, +} from './lib' +import { + IngestChangeType, + IngestSegment, + MOS, + NrcsIngestPartChangeDetails, + NrcsIngestSegmentChangeDetails, + NrcsIngestSegmentChangeDetailsEnum, +} from '@sofie-automation/blueprints-integration' +import { IngestUpdateOperationFunction } from '../runOperation' +import { normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' /** * Update the payload of a mos story */ -export async function handleMosFullStory(context: JobContext, data: MosFullStoryProps): Promise { +export function handleMosFullStory( + _context: JobContext, + data: MosFullStoryProps +): IngestUpdateOperationFunction | null { fixIllegalObject(data.story) const partExternalId = parseMosString(data.story.ID) - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const ingestPart = ingestRundown.segments - .map((s) => s.parts) - .flat() - .find((p) => p.externalId === partExternalId) - if (!ingestPart) { - throw new Error( - `handleMosFullStory: Missing MOS Story "${partExternalId}" in Rundown ingest data for "${data.rundownExternalId}"` - ) - } - - // TODO - can the name change during a fullStory? If so then we need to be sure to update the segment groupings too - // ingestPart.name = story.Slug ? parseMosString(story.Slug) : '' - ingestPart.payload = data.story - - // We modify in-place - return ingestRundown - } else { - throw new Error(`handleMosFullStory: Missing MOS Rundown "${data.rundownExternalId}"`) - } - }, - async (context, ingestModel, ingestRundown) => { - const ingestSegment = ingestRundown?.segments?.find((s) => - s.parts.find((p) => p.externalId === partExternalId) + return (ingestRundown: IngestRundownWithSource | undefined) => { + if (!ingestRundown) { + throw new Error(`Rundown "${data.rundownExternalId}" not found`) + } + + // It appears that the name can't change during a fullStory. (based on a few years of usage) + // If it can then we need to be sure to update the segment groupings too + + const segmentExternalId = getMosIngestSegmentExternalId(partExternalId) + + const ingestSegment = ingestRundown.segments.find((s) => s.externalId === segmentExternalId) + const ingestPart = ingestSegment?.parts.find((p) => p.externalId === partExternalId) + + if (!ingestPart) + // Part was not found + throw new Error( + `handleMosFullStory: Missing MOS Story "${partExternalId}" in Rundown ingest data for "${data.rundownExternalId}"` ) - if (!ingestSegment) throw new Error(`IngestSegment for story "${partExternalId}" is missing!`) - return updateSegmentFromIngestData(context, ingestModel, ingestSegment, false) + + // We modify in-place + ingestPart.payload = data.story + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: { + [segmentExternalId]: { + partChanges: { + [ingestPart.externalId]: NrcsIngestPartChangeDetails.Updated, + }, + }, + }, + }, } - ) + } } /** * Delete a mos story */ -export async function handleMosDeleteStory(context: JobContext, data: MosDeleteStoryProps): Promise { - if (data.stories.length === 0) return - - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const ingestParts = getAnnotatedIngestParts(context, ingestRundown) - const ingestPartIds = new Set(ingestParts.map((part) => part.externalId)) - - const storyIds = data.stories.map(parseMosString) - - logger.debug(`handleMosDeleteStory storyIds: [${storyIds.join(',')}]`) - - const missingIds = storyIds.filter((id) => !ingestPartIds.has(id)) - if (missingIds.length > 0) { - throw new Error( - `Parts ${missingIds.join(', ')} in rundown ${data.rundownExternalId} were not found` - ) - } - - const rundownId = getRundownId(context.studioId, data.rundownExternalId) - ingestRundown.segments = makeChangeToIngestParts(context, rundownId, ingestParts, (rundownParts) => { - const storyIdsSet = new Set(storyIds) - const filteredParts = rundownParts.filter((p) => !storyIdsSet.has(p.externalId)) - - logger.debug( - `handleMosDeleteStory, new part count ${filteredParts.length} (was ${rundownParts.length})` - ) - - return filteredParts - }) - - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - diffAndApplyChanges - ) +export function handleMosDeleteStory( + _context: JobContext, + data: MosDeleteStoryProps +): IngestUpdateOperationFunction | null { + if (data.stories.length === 0) return null + + return (ingestRundown: IngestRundownWithSource | undefined) => { + if (!ingestRundown) { + throw new Error(`Rundown "${data.rundownExternalId}" not found`) + } + + const storyIdsToDelete = data.stories.map(parseMosString) + const segmentExternalIdsToDelete = storyIdsToDelete.map(getMosIngestSegmentExternalId) + + logger.debug(`handleMosDeleteStory storyIds: [${storyIdsToDelete.join(',')}]`) + + const ingestSegmentIds = new Set(ingestRundown.segments.map((segment) => segment.externalId)) + + const missingIds = segmentExternalIdsToDelete.filter((id) => !ingestSegmentIds.has(id)) + if (missingIds.length > 0) { + throw new Error(`Parts ${missingIds.join(', ')} in rundown ${data.rundownExternalId} were not found`) + } + + // Remove any segments + const segmentExternalIdsToDeleteSet = new Set(segmentExternalIdsToDelete) + ingestRundown.segments = ingestRundown.segments.filter( + (segment) => !segmentExternalIdsToDeleteSet.has(segment.externalId) + ) + + // compute changes + const segmentChanges: Record = {} + for (const segmentId of segmentExternalIdsToDelete) { + segmentChanges[segmentId] = NrcsIngestSegmentChangeDetailsEnum.Deleted + } + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges, + }, + } + } } /** * Insert a mos story before the referenced existing story */ -export async function handleMosInsertStories(context: JobContext, data: MosInsertStoryProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const ingestParts = getAnnotatedIngestParts(context, ingestRundown) - - // The part of which we are about to insert stories after - const insertBeforePartExternalId = data.insertBeforeStoryId - ? parseMosString(data.insertBeforeStoryId) || '' - : '' - const insertIndex = !insertBeforePartExternalId // insert last - ? ingestParts.length - : ingestParts.findIndex((p) => p.externalId === insertBeforePartExternalId) - if (insertIndex === -1) { - throw new Error(`Part ${insertBeforePartExternalId} in rundown ${data.rundownExternalId} not found`) - } - - const rundownId = getRundownId(context.studioId, data.rundownExternalId) - const newParts = storiesToIngestParts( - context, - rundownId, - data.newStories || [], - true, - ingestParts - ).filter( - (p): p is AnnotatedIngestPart => !!p // remove falsy values from array - ) - - ingestRundown.segments = makeChangeToIngestParts( - context, - rundownId, - ingestParts, - (ingestPartsToModify) => { - const modifiedIngestParts = [...ingestPartsToModify] // clone - - if (data.replace) { - modifiedIngestParts.splice(insertIndex, 1) // Replace the previous part with new parts - } - - const newPartExtenalIds = new Set(newParts.map((part) => part.externalId)) - const collidingPartIds = modifiedIngestParts - .filter((part) => newPartExtenalIds.has(part.externalId)) - .map((part) => part.externalId) - - if (collidingPartIds.length > 0) { - throw new Error( - `Parts ${collidingPartIds.join(', ')} already exist in rundown ${ - data.rundownExternalId - }` - ) - } - // Update parts list - modifiedIngestParts.splice(insertIndex, 0, ...newParts) - - return modifiedIngestParts - } - ) - - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - diffAndApplyChanges - ) +export function handleMosInsertStories( + _context: JobContext, + data: MosInsertStoryProps +): IngestUpdateOperationFunction | null { + if (data.newStories.length === 0) return null + + return (ingestRundown: IngestRundownWithSource | undefined) => { + if (!ingestRundown) { + throw new Error(`Rundown "${data.rundownExternalId}" not found`) + } + + const newIngestSegments = data.newStories.map((story) => mosStoryToIngestSegment(story, true)) + + // The part of which we are about to insert stories after + const insertBeforeSegmentExternalId = storyIdToSegmentExternalId(data.insertBeforeStoryId) + const insertIndex = insertBeforeSegmentExternalId // insert last + ? ingestRundown.segments.findIndex((p) => p.externalId === insertBeforeSegmentExternalId) + : ingestRundown.segments.length + if (insertIndex === -1) { + throw new Error( + `Part ${data.insertBeforeStoryId && parseMosString(data.insertBeforeStoryId)} in rundown ${ + data.rundownExternalId + } not found` + ) + } + + const oldSegmentIds = new Set(ingestRundown.segments.map((s) => s.externalId)) + // Allow replacing with itself + if (data.replace && insertBeforeSegmentExternalId) oldSegmentIds.delete(insertBeforeSegmentExternalId) + + const duplicateSegments = newIngestSegments.filter((segment) => oldSegmentIds.has(segment.externalId)) + if (duplicateSegments.length > 0) { + throw new Error( + `Parts ${duplicateSegments.map((s) => s.externalId).join(', ')} already exist in rundown ${ + data.rundownExternalId + }` + ) + } + + // Perform the change + ingestRundown.segments.splice(insertIndex, data.replace ? 1 : 0, ...newIngestSegments) + updateRanksBasedOnOrder(ingestRundown) + + const segmentChanges: Record = {} + for (const segment of newIngestSegments) { + segmentChanges[segment.externalId] = NrcsIngestSegmentChangeDetailsEnum.InsertedOrUpdated + } + if (data.replace && insertBeforeSegmentExternalId && !segmentChanges[insertBeforeSegmentExternalId]) { + segmentChanges[insertBeforeSegmentExternalId] = NrcsIngestSegmentChangeDetailsEnum.Deleted + } + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentChanges: segmentChanges, + segmentOrderChanged: true, + }, + } + } } /** * Swap positions of two mos stories */ -export async function handleMosSwapStories(context: JobContext, data: MosSwapStoryProps): Promise { +export function handleMosSwapStories( + _context: JobContext, + data: MosSwapStoryProps +): IngestUpdateOperationFunction | null { const story0Str = parseMosString(data.story0) const story1Str = parseMosString(data.story1) - if (story0Str === story1Str) { - throw new Error(`Cannot swap part ${story0Str} with itself in rundown ${data.rundownExternalId}`) - } - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const ingestParts = getAnnotatedIngestParts(context, ingestRundown) - - const rundownId = getRundownId(context.studioId, data.rundownExternalId) - ingestRundown.segments = makeChangeToIngestParts(context, rundownId, ingestParts, (rundownParts) => { - const story0Index = rundownParts.findIndex((p) => p.externalId === story0Str) - if (story0Index === -1) { - throw new Error(`Story ${story0Str} not found in rundown ${data.rundownExternalId}`) - } - const story1Index = rundownParts.findIndex((p) => p.externalId === story1Str) - if (story1Index === -1) { - throw new Error(`Story ${story1Str} not found in rundown ${data.rundownExternalId}`) - } - const tmp = rundownParts[story0Index] - rundownParts[story0Index] = rundownParts[story1Index] - rundownParts[story1Index] = tmp - - return rundownParts - }) - - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - diffAndApplyChanges - ) + // If the stories are the same, we don't need to do anything + if (story0Str === story1Str) return null + + return (ingestRundown: IngestRundownWithSource | undefined) => { + if (!ingestRundown) { + throw new Error(`Rundown "${data.rundownExternalId}" not found`) + } + + const segment0Id = getMosIngestSegmentExternalId(parseMosString(data.story0)) + const story0Index = ingestRundown.segments.findIndex((s) => s.externalId === segment0Id) + if (story0Index === -1) { + throw new Error(`Story ${story0Str} not found in rundown ${data.rundownExternalId}`) + } + + const segment1Id = getMosIngestSegmentExternalId(parseMosString(data.story1)) + const story1Index = ingestRundown.segments.findIndex((s) => s.externalId === segment1Id) + if (story1Index === -1) { + throw new Error(`Story ${story1Str} not found in rundown ${data.rundownExternalId}`) + } + + // Fetch the values + const story0Segment = ingestRundown.segments[story0Index] + const story1Segment = ingestRundown.segments[story1Index] + + // Store the values + ingestRundown.segments[story0Index] = story1Segment + ingestRundown.segments[story1Index] = story0Segment + + updateRanksBasedOnOrder(ingestRundown) + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: true, + }, + } + } } /** * Move a list of mos stories */ -export async function handleMosMoveStories(context: JobContext, data: MosMoveStoryProps): Promise { - await runIngestJob( - context, - data, - (ingestRundown) => { - if (ingestRundown) { - const ingestParts = getAnnotatedIngestParts(context, ingestRundown) - - // Get story data - const storyIds = data.stories.map(parseMosString) - - const rundownId = getRundownId(context.studioId, data.rundownExternalId) - ingestRundown.segments = makeChangeToIngestParts(context, rundownId, ingestParts, (rundownParts) => { - // Extract the parts-to-be-moved: - const movingParts = _.sortBy( - rundownParts.filter((p) => storyIds.indexOf(p.externalId) !== -1), - (p) => storyIds.indexOf(p.externalId) - ) - const filteredParts = rundownParts.filter((p) => storyIds.indexOf(p.externalId) === -1) - - // Ensure all stories to move were found - const movingIds = _.map(movingParts, (p) => p.externalId) - const missingIds = _.filter(storyIds, (id) => movingIds.indexOf(id) === -1) - if (missingIds.length > 0) { - throw new Error( - `Parts ${missingIds.join(', ')} were not found in rundown ${data.rundownExternalId}` - ) - } - - // Find insert point - const insertBeforePartExternalId = data.insertBeforeStoryId - ? parseMosString(data.insertBeforeStoryId) || '' - : '' - const insertIndex = !insertBeforePartExternalId // insert last - ? filteredParts.length - : filteredParts.findIndex((p) => p.externalId === insertBeforePartExternalId) - if (insertIndex === -1) { - throw new Error( - `Part ${insertBeforePartExternalId} was not found in rundown ${data.rundownExternalId}` - ) - } - - // Reinsert parts - filteredParts.splice(insertIndex, 0, ...movingParts) - - return filteredParts - }) - - // We modify in-place - return ingestRundown - } else { - throw new Error(`Rundown "${data.rundownExternalId}" not found`) - } - }, - diffAndApplyChanges - ) +export function handleMosMoveStories( + _context: JobContext, + data: MosMoveStoryProps +): IngestUpdateOperationFunction | null { + if (data.stories.length === 0) return null + + return (ingestRundown: IngestRundownWithSource | undefined) => { + if (!ingestRundown) { + throw new Error(`Rundown "${data.rundownExternalId}" not found`) + } + + const oldIngestSegmentMap = normalizeArrayToMap(ingestRundown.segments, 'externalId') + + const moveStoryIds = data.stories.map(parseMosString) + + const moveIngestSegments: IngestSegment[] = [] + const missingIds: string[] = [] + for (const storyId of moveStoryIds) { + const segment = oldIngestSegmentMap.get(getMosIngestSegmentExternalId(storyId)) + if (segment) moveIngestSegments.push(segment) + else missingIds.push(storyId) + } + + if (missingIds.length > 0) { + throw new Error(`Parts ${missingIds.join(', ')} were not found in rundown ${data.rundownExternalId}`) + } + + // remove existing items + const moveIngestSegmentIds = moveIngestSegments.map((s) => s.externalId) + ingestRundown.segments = ingestRundown.segments.filter((s) => !moveIngestSegmentIds.includes(s.externalId)) + + // The part of which we are about to insert stories after + const insertBeforeSegmentExternalId = storyIdToSegmentExternalId(data.insertBeforeStoryId) + const insertIndex = insertBeforeSegmentExternalId // insert last + ? ingestRundown.segments.findIndex((p) => p.externalId === insertBeforeSegmentExternalId) + : ingestRundown.segments.length + if (insertIndex === -1) { + throw new Error(`Part ${insertBeforeSegmentExternalId} in rundown ${data.rundownExternalId} not found`) + } + + // Perform the change + ingestRundown.segments.splice(insertIndex, 0, ...moveIngestSegments) + updateRanksBasedOnOrder(ingestRundown) + + return { + // We modify in-place + ingestRundown, + changes: { + source: IngestChangeType.Ingest, + segmentOrderChanged: true, + }, + } + } +} + +function storyIdToSegmentExternalId(storyId: MOS.IMOSString128 | null | undefined): string | undefined { + if (!storyId) return undefined + const partExternalId = parseMosString(storyId) + if (!partExternalId) return undefined + return getMosIngestSegmentExternalId(partExternalId) } diff --git a/packages/job-worker/src/ingest/mosDevice/mosToIngest.ts b/packages/job-worker/src/ingest/mosDevice/mosToIngest.ts deleted file mode 100644 index 3e909a81e7..0000000000 --- a/packages/job-worker/src/ingest/mosDevice/mosToIngest.ts +++ /dev/null @@ -1,135 +0,0 @@ -import { MOS } from '@sofie-automation/corelib' -import { PartId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { normalizeArray, literal } from '@sofie-automation/corelib/dist/lib' -import _ = require('underscore') -import { JobContext } from '../../jobs' -import { getCurrentTime } from '../../lib' -import { LocalIngestPart, LocalIngestSegment } from '../ingestCache' -import { parseMosString, getPartIdFromMosStory, getSegmentExternalId } from './lib' - -export interface AnnotatedIngestPart { - externalId: string - partId: PartId - segmentName: string - ingest: LocalIngestPart -} -export function storiesToIngestParts( - context: JobContext, - rundownId: RundownId, - stories: MOS.IMOSStory[], - undefinedPayload: boolean, - existingIngestParts: AnnotatedIngestPart[] -): (AnnotatedIngestPart | null)[] { - const span = context.startSpan('ingest.storiesToIngestParts') - - const existingIngestPartsMap = normalizeArray(existingIngestParts, 'externalId') - - const parts = stories.map((s, i) => { - if (!s) return null - - const externalId = parseMosString(s.ID) - const existingIngestPart = existingIngestPartsMap[externalId] - - const name = s.Slug ? parseMosString(s.Slug) : '' - return { - externalId: externalId, - partId: getPartIdFromMosStory(rundownId, s.ID), - segmentName: name.split(';')[0], - ingest: literal({ - externalId: parseMosString(s.ID), - name: name, - rank: i, - payload: undefinedPayload ? undefined : {}, - modified: existingIngestPart ? existingIngestPart.ingest.modified : getCurrentTime(), - }), - } - }) - - span?.end() - return parts -} -/** Group IngestParts together into something that could be Segments */ -export function groupIngestParts(parts: AnnotatedIngestPart[]): { name: string; parts: LocalIngestPart[] }[] { - const groupedParts: { name: string; parts: LocalIngestPart[] }[] = [] - _.each(parts, (part) => { - const lastSegment = _.last(groupedParts) - if (lastSegment && lastSegment.name === part.segmentName) { - lastSegment.parts.push(part.ingest) - } else { - groupedParts.push({ name: part.segmentName, parts: [part.ingest] }) - } - }) - - // Ensure ranks are correct - _.each(groupedParts, (group) => { - for (let i = 0; i < group.parts.length; i++) { - group.parts[i].rank = i - } - }) - - return groupedParts -} -export function groupedPartsToSegments( - rundownId: RundownId, - groupedParts: { name: string; parts: LocalIngestPart[] }[] -): LocalIngestSegment[] { - return _.map(groupedParts, (grp, i) => { - return literal({ - externalId: getSegmentExternalId(rundownId, grp.parts[0]), - name: grp.name, - rank: i, - parts: grp.parts, - modified: Math.max(...grp.parts.map((p) => p.modified)), // pick the latest - }) - }) -} - -/** Takes a list of ingestParts, modify it, then output them grouped together into ingestSegments, keeping track of the modified property */ -export function makeChangeToIngestParts( - context: JobContext, - rundownId: RundownId, - ingestParts: AnnotatedIngestPart[], - modifyFunction: (ingestParts: AnnotatedIngestPart[]) => AnnotatedIngestPart[] -): LocalIngestSegment[] { - const span = context.startSpan('mosDevice.ingest.makeChangeToIngestParts') - - // Before making the modification to ingestParts, create a list of segments from the original data, to use for calculating the - // .modified property below. - const referenceIngestSegments = groupPartsIntoIngestSegments(rundownId, ingestParts) - - const modifiedParts = modifyFunction(ingestParts) - - // Compare to reference, to make sure that ingestSegment.modified is updated in case of a change - const newIngestSegments = groupPartsIntoIngestSegments(rundownId, modifiedParts) - - _.each(newIngestSegments, (ingestSegment) => { - if (!ingestSegment.modified) { - ingestSegment.modified = getCurrentTime() - } else { - const ref = referenceIngestSegments.find((s) => s.externalId === ingestSegment.externalId) - if (ref) { - if (ref.parts.length !== ingestSegment.parts.length) { - // A part has been added, or removed - ingestSegment.modified = getCurrentTime() - } else { - // No obvious change. - // (If an individual part has been updated, the segment.modified property has already been updated anyway) - } - } else { - // The reference doesn't exist (can happen for example if a segment has been merged, or split into two) - ingestSegment.modified = getCurrentTime() - } - } - }) - - span?.end() - return newIngestSegments -} -function groupPartsIntoIngestSegments( - rundownId: RundownId, - newIngestParts: AnnotatedIngestPart[] -): LocalIngestSegment[] { - // Group the parts and make them into Segments: - const newGroupedParts = groupIngestParts(newIngestParts) - return groupedPartsToSegments(rundownId, newGroupedParts) -} diff --git a/packages/job-worker/src/ingest/nrcsIngestCache.ts b/packages/job-worker/src/ingest/nrcsIngestCache.ts new file mode 100644 index 0000000000..3a203451f8 --- /dev/null +++ b/packages/job-worker/src/ingest/nrcsIngestCache.ts @@ -0,0 +1,248 @@ +import { RundownId, SegmentId, NrcsIngestDataCacheObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + NrcsIngestDataCacheObj, + NrcsIngestCacheType, + NrcsIngestDataCacheObjRundown, + NrcsIngestDataCacheObjSegment, + NrcsIngestDataCacheObjPart, + IngestRundownWithSource, +} from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { ProtectedString, protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' +import _ = require('underscore') +import { IngestPart, IngestSegment } from '@sofie-automation/blueprints-integration' +import { JobContext } from '../jobs' +import { getPartId, getSegmentId } from './lib' +import { SetOptional } from 'type-fest' +import { groupByToMap, normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import { AnyBulkWriteOperation } from 'mongodb' +import { diffAndReturnLatestObjects } from './model/implementation/utils' +import { ICollection } from '../db' +import { getCurrentTime } from '../lib' + +/** + * Represents a Rundown in the NRCSIngestDataCache collection and provides methods for interacting with it. + */ +export class NrcsIngestRundownDataCache { + readonly #changedDocumentIds = new Set() + + private constructor( + private readonly context: JobContext, + private readonly collection: ICollection, + private readonly rundownId: RundownId, + private documents: NrcsIngestDataCacheObj[] + ) {} + + static async create(context: JobContext, rundownId: RundownId): Promise { + const docs = await context.directCollections.NrcsIngestDataCache.findFetch({ rundownId }) + + return new NrcsIngestRundownDataCache(context, context.directCollections.NrcsIngestDataCache, rundownId, docs) + } + + /** + * Fetch the IngestRundown contained in the cache + * Note: This does not deep clone the objects, so the returned object should not be modified + */ + fetchRundown(): IngestRundownWithSource | undefined { + const span = this.context.startSpan('ingest.ingestCache.loadCachedRundownData') + + const cachedRundown = this.documents.find( + (e): e is NrcsIngestDataCacheObjRundown => e.type === NrcsIngestCacheType.RUNDOWN + ) + if (!cachedRundown) { + span?.end() + return undefined + } + + const ingestRundown: IngestRundownWithSource = { + ...cachedRundown.data, + segments: [], + } + + const hasSegmentId = ( + obj: NrcsIngestDataCacheObj + ): obj is NrcsIngestDataCacheObjSegment | NrcsIngestDataCacheObjPart => { + return !!obj.segmentId + } + + const segmentMap = groupByToMap(this.documents.filter(hasSegmentId), 'segmentId') + for (const objs of segmentMap.values()) { + const segmentEntry = objs.find( + (e): e is NrcsIngestDataCacheObjSegment => e.type === NrcsIngestCacheType.SEGMENT + ) + if (segmentEntry) { + const ingestSegment: IngestSegment = { + ...segmentEntry.data, + parts: [], + } + + for (const entry of objs) { + if (entry.type === NrcsIngestCacheType.PART) { + ingestSegment.parts.push(entry.data) + } + } + + ingestSegment.parts = _.sortBy(ingestSegment.parts, (s) => s.rank) + ingestRundown.segments.push(ingestSegment) + } + } + + ingestRundown.segments = _.sortBy(ingestRundown.segments, (s) => s.rank) + + span?.end() + return ingestRundown + } + + /** + * Replace the contents of the cache with the given IngestRundown + * This will diff and replace the documents in the cache + * @param ingestRundown The new IngestRundown to store in the cache + */ + replace(ingestRundown: IngestRundownWithSource): void { + const generator = new RundownIngestDataCacheGenerator(this.rundownId) + const cacheEntries: NrcsIngestDataCacheObj[] = generator.generateCacheForRundown(ingestRundown) + + this.documents = diffAndReturnLatestObjects(this.#changedDocumentIds, this.documents, cacheEntries) + } + + /** + * Delete the contents of the cache + */ + delete(): void { + // Mark each document for deletion + for (const doc of this.documents) { + this.#changedDocumentIds.add(doc._id) + } + + this.documents = [] + } + + /** + * Write any changes in the cache to the database + */ + async saveToDatabase(): Promise { + if (this.#changedDocumentIds.size === 0) return + + const documentsMap = normalizeArrayToMap(this.documents, '_id') + + const modifiedTime = getCurrentTime() + + const updates: AnyBulkWriteOperation[] = [] + const removedIds: NrcsIngestDataCacheObjId[] = [] + for (const changedId of this.#changedDocumentIds) { + const newDoc = documentsMap.get(changedId) + if (!newDoc) { + removedIds.push(changedId) + } else { + updates.push({ + replaceOne: { + filter: { + _id: changedId, + }, + replacement: { + ...newDoc, + modified: modifiedTime, + }, + upsert: true, + }, + }) + } + } + + if (removedIds.length) { + updates.push({ + deleteMany: { + filter: { + _id: { $in: removedIds as any }, + }, + }, + }) + } + + await this.collection.bulkWrite(updates) + } +} + +/** + * Convenience methods useful when interacting with the NrcsIngestRundownDataCache + */ +class RundownIngestDataCacheGenerator> { + constructor(public readonly rundownId: RundownId) {} + + getPartObjectId(partExternalId: string): TId { + return protectString(`${this.rundownId}_part_${partExternalId}`) + } + getSegmentObjectId(segmentExternalId: string): TId { + return protectString(`${this.rundownId}_segment_${segmentExternalId}`) + } + getRundownObjectId(): TId { + return protectString(unprotectString(this.rundownId)) + } + + generatePartObject(segmentId: SegmentId, part: IngestPart): NrcsIngestDataCacheObjPart { + return { + _id: this.getPartObjectId(part.externalId), + type: NrcsIngestCacheType.PART, + rundownId: this.rundownId, + segmentId: segmentId, + partId: getPartId(this.rundownId, part.externalId), + modified: 0, // Populated when saving + data: part, + } + } + + generateSegmentObject(ingestSegment: SetOptional): NrcsIngestDataCacheObjSegment { + return { + _id: this.getSegmentObjectId(ingestSegment.externalId), + type: NrcsIngestCacheType.SEGMENT, + rundownId: this.rundownId, + segmentId: getSegmentId(this.rundownId, ingestSegment.externalId), + modified: 0, // Populated when saving + data: { + ...ingestSegment, + parts: [], // omit the parts, they come as separate objects + }, + } + } + + generateRundownObject( + ingestRundown: SetOptional + ): NrcsIngestDataCacheObjRundown { + return { + _id: this.getRundownObjectId(), + type: NrcsIngestCacheType.RUNDOWN, + rundownId: this.rundownId, + modified: 0, // Populated when saving + data: { + ...ingestRundown, + segments: [], // omit the segments, they come as separate objects + }, + } + } + + generateCacheForRundown(ingestRundown: IngestRundownWithSource): NrcsIngestDataCacheObj[] { + const cacheEntries: NrcsIngestDataCacheObj[] = [] + + const rundown = this.generateRundownObject(ingestRundown) + cacheEntries.push(rundown) + + for (const segment of ingestRundown.segments) { + cacheEntries.push(...this.generateCacheForSegment(segment)) + } + + return cacheEntries + } + + private generateCacheForSegment(ingestSegment: IngestSegment): NrcsIngestDataCacheObj[] { + const cacheEntries: Array = [] + + const segment = this.generateSegmentObject(ingestSegment) + cacheEntries.push(segment) + + const segmentId = getSegmentId(this.rundownId, ingestSegment.externalId) + for (const part of ingestSegment.parts) { + cacheEntries.push(this.generatePartObject(segmentId, part)) + } + + return cacheEntries + } +} diff --git a/packages/job-worker/src/ingest/packageInfo.ts b/packages/job-worker/src/ingest/packageInfo.ts index a669f65d19..728c27069e 100644 --- a/packages/job-worker/src/ingest/packageInfo.ts +++ b/packages/job-worker/src/ingest/packageInfo.ts @@ -7,9 +7,10 @@ import { import { logger } from '../logging' import { JobContext } from '../jobs' import { regenerateSegmentsFromIngestData } from './generationSegment' -import { UpdateIngestRundownAction, runIngestJob, runWithRundownLock } from './lock' +import { runWithRundownLock } from './lock' import { updateExpectedPackagesForPartModel, updateExpectedPackagesForRundownBaseline } from './expectedPackages' import { loadIngestModelFromRundown } from './model/implementation/LoadIngestModel' +import { runCustomIngestUpdateOperation } from './runOperation' /** * Debug: Regenerate ExpectedPackages for a Rundown @@ -18,7 +19,7 @@ export async function handleExpectedPackagesRegenerate( context: JobContext, data: ExpectedPackagesRegenerateProps ): Promise { - await runWithRundownLock(context, data.rundownId, async (rundown, rundownLock) => { + return runWithRundownLock(context, data.rundownId, async (rundown, rundownLock) => { if (!rundown) throw new Error(`Rundown "${data.rundownId}" not found`) const ingestModel = await loadIngestModelFromRundown(context, rundownLock, rundown) @@ -44,74 +45,66 @@ export async function handleUpdatedPackageInfoForRundown( return } - await runIngestJob( - context, - data, - (ingestRundown) => { - if (!ingestRundown) { - logger.error( - `onUpdatedPackageInfoForRundown called but ingestRundown is undefined (rundownExternalId: "${data.rundownExternalId}")` - ) - return UpdateIngestRundownAction.REJECT - } - return ingestRundown // don't mutate any ingest data - }, - async (context, ingestModel, ingestRundown) => { - if (!ingestRundown) throw new Error('onUpdatedPackageInfoForRundown called but ingestRundown is undefined') + await runCustomIngestUpdateOperation(context, data, async (context, ingestModel, ingestRundown) => { + if (!ingestRundown) { + logger.error( + `onUpdatedPackageInfoForRundown called but ingestRundown is undefined (rundownExternalId: "${data.rundownExternalId}")` + ) + return null + } - /** All segments that need updating */ - const segmentsToUpdate = new Set() - let regenerateRundownBaseline = false + /** All segments that need updating */ + const segmentsToUpdate = new Set() + let regenerateRundownBaseline = false - for (const packageId of data.packageIds) { - const pkg = ingestModel.findExpectedPackage(packageId) - if (pkg) { - if ( - pkg.fromPieceType === ExpectedPackageDBType.PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_ACTION - ) { - segmentsToUpdate.add(pkg.segmentId) - } else if ( - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS - ) { - regenerateRundownBaseline = true - } - } else { - logger.warn(`onUpdatedPackageInfoForRundown: Missing package: "${packageId}"`) + for (const packageId of data.packageIds) { + const pkg = ingestModel.findExpectedPackage(packageId) + if (pkg) { + if ( + pkg.fromPieceType === ExpectedPackageDBType.PIECE || + pkg.fromPieceType === ExpectedPackageDBType.ADLIB_PIECE || + pkg.fromPieceType === ExpectedPackageDBType.ADLIB_ACTION + ) { + segmentsToUpdate.add(pkg.segmentId) + } else if ( + pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || + pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_PIECE || + pkg.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS + ) { + regenerateRundownBaseline = true } + } else { + logger.warn(`onUpdatedPackageInfoForRundown: Missing package: "${packageId}"`) } + } - logger.info( - `onUpdatedPackageInfoForRundown: PackageInfo for "${data.packageIds.join( - ', ' - )}" will trigger update of segments: ${Array.from(segmentsToUpdate).join(', ')}` - ) - - if (regenerateRundownBaseline) { - // trigger a re-generation of the rundown baseline - // TODO - to be implemented. - } + logger.info( + `onUpdatedPackageInfoForRundown: PackageInfo for "${data.packageIds.join( + ', ' + )}" will trigger update of segments: ${Array.from(segmentsToUpdate).join(', ')}` + ) - const { result, skippedSegments } = await regenerateSegmentsFromIngestData( - context, - ingestModel, - ingestRundown, - Array.from(segmentsToUpdate) - ) + if (regenerateRundownBaseline) { + // trigger a re-generation of the rundown baseline + // TODO - to be implemented. + } - if (skippedSegments.length > 0) { - logger.warn( - `onUpdatedPackageInfoForRundown: Some segments were skipped during update: ${skippedSegments.join( - ', ' - )}` - ) - } + const { result, skippedSegments } = await regenerateSegmentsFromIngestData( + context, + ingestModel, + ingestRundown, + Array.from(segmentsToUpdate) + ) - logger.warn(`onUpdatedPackageInfoForRundown: Changed ${result?.changedSegmentIds.length ?? 0} segments`) - return result + if (skippedSegments.length > 0) { + logger.warn( + `onUpdatedPackageInfoForRundown: Some segments were skipped during update: ${skippedSegments.join( + ', ' + )}` + ) } - ) + + logger.warn(`onUpdatedPackageInfoForRundown: Changed ${result?.changedSegmentIds.length ?? 0} segments`) + return result + }) } diff --git a/packages/job-worker/src/ingest/runOperation.ts b/packages/job-worker/src/ingest/runOperation.ts new file mode 100644 index 0000000000..b8db3c275f --- /dev/null +++ b/packages/job-worker/src/ingest/runOperation.ts @@ -0,0 +1,581 @@ +import { IngestModel, IngestModelReadonly } from './model/IngestModel' +import { BeforeIngestOperationPartMap, CommitIngestOperation } from './commit' +import { SofieIngestRundownDataCache, SofieIngestRundownDataCacheGenerator } from './sofieIngestCache' +import { canRundownBeUpdated, getRundownId, getSegmentId } from './lib' +import { JobContext } from '../jobs' +import { IngestPropsBase } from '@sofie-automation/corelib/dist/worker/ingest' +import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' +import { loadIngestModelFromRundownExternalId } from './model/implementation/LoadIngestModel' +import { Complete, clone } from '@sofie-automation/corelib/dist/lib' +import { CommitIngestData, runWithRundownLockWithoutFetchingRundown } from './lock' +import { DatabasePersistedModel } from '../modelBase' +import { + NrcsIngestChangeDetails, + IngestRundown, + UserOperationChange, + SofieIngestSegment, +} from '@sofie-automation/blueprints-integration' +import { MutableIngestRundownImpl } from '../blueprints/ingest/MutableIngestRundownImpl' +import { ProcessIngestDataContext } from '../blueprints/context' +import { PartId, RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { GenerateRundownMode, updateRundownFromIngestData, updateRundownFromIngestDataInner } from './generationRundown' +import { calculateSegmentsAndRemovalsFromIngestData, calculateSegmentsFromIngestData } from './generationSegment' +import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' +import { IngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' +import { NrcsIngestRundownDataCache } from './nrcsIngestCache' +import { logger } from '../logging' + +export enum ComputedIngestChangeAction { + DELETE = 'delete', + FORCE_DELETE = 'force-delete', +} + +export interface UpdateIngestRundownChange { + ingestRundown: IngestRundownWithSource + changes: NrcsIngestChangeDetails | UserOperationChange +} + +export type UpdateIngestRundownResult = UpdateIngestRundownChange | ComputedIngestChangeAction + +export interface ComputedIngestChangeObject { + ingestRundown: SofieIngestRundownWithSource + + // define what needs regenerating + segmentsToRemove: string[] + segmentsUpdatedRanks: Record // contains the new rank + segmentsToRegenerate: SofieIngestSegment[] + regenerateRundown: boolean // Future: full vs metadata? + + segmentExternalIdChanges: Record // old -> new +} + +export type ComputedIngestChanges = ComputedIngestChangeObject | ComputedIngestChangeAction + +/** + * Perform an 'ingest' update operation which modifies a Rundown without modifying the ingest data + * This will automatically do some post-update data changes, to ensure the playout side (partinstances etc) is updated with the changes + * @param context Context of the job being run + * @param data Ids for the rundown and peripheral device + * @param doWorkFcn Function to run to update the Rundown. Return the blob of data about the change to help the post-update perform its duties. Return null to indicate that nothing changed + */ +export async function runCustomIngestUpdateOperation( + context: JobContext, + data: IngestPropsBase, + doWorkFcn: ( + context: JobContext, + ingestModel: IngestModel, + ingestRundown: SofieIngestRundownWithSource + ) => Promise +): Promise { + if (!data.rundownExternalId) { + throw new Error(`Job is missing rundownExternalId`) + } + + const rundownId = getRundownId(context.studioId, data.rundownExternalId) + return runWithRundownLockWithoutFetchingRundown(context, rundownId, async (rundownLock) => { + const span = context.startSpan(`ingestLockFunction.${context.studioId}`) + + // Load the old ingest data + const pIngestModel = loadIngestModelFromRundownExternalId(context, rundownLock, data.rundownExternalId) + pIngestModel.catch((e) => logger.error(e)) // Prevent unhandled promise rejection + + const sofieIngestObjectCache = await SofieIngestRundownDataCache.create(context, rundownId) + const sofieIngestRundown = sofieIngestObjectCache.fetchRundown() + if (!sofieIngestRundown) throw new Error(`SofieIngestRundown "${rundownId}" not found`) + + let resultingError: UserError | void | undefined + + try { + const ingestModel = await pIngestModel + + // Load any 'before' data for the commit, + const beforeRundown = ingestModel.rundown + const beforePartMap = generatePartMap(ingestModel) + + // Perform the update operation + const commitData = await doWorkFcn(context, ingestModel, sofieIngestRundown) + + if (commitData) { + const commitSpan = context.startSpan('ingest.commit') + // The change is accepted. Perform some playout calculations and save it all + resultingError = await CommitIngestOperation( + context, + ingestModel, + beforeRundown, + beforePartMap, + commitData + ) + commitSpan?.end() + } else { + // Should be no changes + ingestModel.assertNoChanges() + } + } finally { + span?.end() + } + + if (resultingError) throw resultingError + + return rundownId + }) +} + +export type IngestUpdateOperationFunction = ( + oldIngestRundown: IngestRundownWithSource | undefined +) => UpdateIngestRundownResult + +/** + * Perform an ingest update operation on a rundown + * This will automatically do some post-update data changes, to ensure the playout side (partInstances etc) is updated with the changes + * @param context Context of the job being run + * @param data Ids for the rundown and peripheral device + * @param updateNrcsIngestModelFcn Function to mutate the ingestData. Throw if the requested change is not valid. Return undefined to indicate the ingestData should be deleted + */ +export async function runIngestUpdateOperation( + context: JobContext, + data: IngestPropsBase, + updateNrcsIngestModelFcn: IngestUpdateOperationFunction +): Promise { + return runIngestUpdateOperationBase(context, data, async (nrcsIngestObjectCache) => + updateNrcsIngestObjects(context, nrcsIngestObjectCache, updateNrcsIngestModelFcn) + ) +} + +/** + * Perform an ingest update operation on a rundown + * This will automatically do some post-update data changes, to ensure the playout side (partInstances etc) is updated with the changes + * @param context Context of the job being run + * @param data Ids for the rundown and peripheral device + * @param executeFcn Function to mutate the ingestData. Throw if the requested change is not valid. Return undefined to indicate the ingestData should be deleted + */ +export async function runIngestUpdateOperationBase( + context: JobContext, + data: IngestPropsBase, + executeFcn: (nrcsIngestObjectCache: NrcsIngestRundownDataCache) => Promise +): Promise { + if (!data.rundownExternalId) { + throw new Error(`Job is missing rundownExternalId`) + } + + const rundownId = getRundownId(context.studioId, data.rundownExternalId) + return runWithRundownLockWithoutFetchingRundown(context, rundownId, async (rundownLock) => { + const span = context.startSpan(`ingestLockFunction.${context.studioId}`) + + // Load the old ingest data + const pIngestModel = loadIngestModelFromRundownExternalId(context, rundownLock, data.rundownExternalId) + pIngestModel.catch((e) => logger.error(e)) // Prevent unhandled promise rejection + + const pSofieIngestObjectCache = SofieIngestRundownDataCache.create(context, rundownId) + pSofieIngestObjectCache.catch((e) => logger.error(e)) // Prevent unhandled promise rejection + + const nrcsIngestObjectCache = await NrcsIngestRundownDataCache.create(context, rundownId) + const originalNrcsIngestRundown = clone(nrcsIngestObjectCache.fetchRundown()) + + const ingestRundownChanges = await executeFcn(nrcsIngestObjectCache) + + // Start saving the nrcs ingest data + const pSaveNrcsIngestChanges = nrcsIngestObjectCache.saveToDatabase() + pSaveNrcsIngestChanges.catch((e) => logger.error(e)) // Prevent unhandled promise rejection + + let resultingError: UserError | void | undefined + + try { + // Update the Sofie ingest view + const sofieIngestObjectCache = await pSofieIngestObjectCache + const computedChanges = await updateSofieIngestRundown( + context, + rundownId, + sofieIngestObjectCache, + ingestRundownChanges, + originalNrcsIngestRundown + ) + + // Start saving the Sofie ingest data + const pSaveSofieIngestChanges = sofieIngestObjectCache.saveToDatabase() + + try { + resultingError = await updateSofieRundownModel(context, pIngestModel, computedChanges) + } finally { + // Ensure we save the sofie ingest data + await pSaveSofieIngestChanges + } + } finally { + // Ensure we save the nrcs ingest data + // await pSaveNrcsIngestChanges + + span?.end() + } + + if (resultingError) throw resultingError + + return rundownId + }) +} + +function updateNrcsIngestObjects( + context: JobContext, + nrcsIngestObjectCache: NrcsIngestRundownDataCache, + updateNrcsIngestModelFcn: (oldIngestRundown: IngestRundownWithSource | undefined) => UpdateIngestRundownResult +): UpdateIngestRundownResult { + const updateNrcsIngestModelSpan = context.startSpan('ingest.calcFcn') + const oldNrcsIngestRundown = nrcsIngestObjectCache.fetchRundown() + const updatedIngestRundown = updateNrcsIngestModelFcn(clone(oldNrcsIngestRundown)) + updateNrcsIngestModelSpan?.end() + + switch (updatedIngestRundown) { + // case UpdateIngestRundownAction.REJECT: + // // Reject change + // return + case ComputedIngestChangeAction.DELETE: + case ComputedIngestChangeAction.FORCE_DELETE: + nrcsIngestObjectCache.delete() + break + default: + nrcsIngestObjectCache.replace(updatedIngestRundown.ingestRundown) + break + } + + return updatedIngestRundown +} + +async function updateSofieIngestRundown( + context: JobContext, + rundownId: RundownId, + sofieIngestObjectCache: SofieIngestRundownDataCache, + ingestRundownChanges: UpdateIngestRundownResult, + previousNrcsIngestRundown: IngestRundown | undefined +): Promise { + if ( + ingestRundownChanges === ComputedIngestChangeAction.DELETE || + ingestRundownChanges === ComputedIngestChangeAction.FORCE_DELETE + ) { + // Also delete the Sofie view of the Rundown, so that future ingest calls know it has been deleted + sofieIngestObjectCache.delete() + + return ingestRundownChanges + } else { + const studioBlueprint = context.studioBlueprint.blueprint + + const nrcsIngestRundown = ingestRundownChanges.ingestRundown + const sofieIngestRundown = sofieIngestObjectCache.fetchRundown() + + sortIngestRundown(nrcsIngestRundown) + + const mutableIngestRundown = sofieIngestRundown + ? new MutableIngestRundownImpl(clone(sofieIngestRundown), true) + : new MutableIngestRundownImpl( + { + externalId: nrcsIngestRundown.externalId, + name: nrcsIngestRundown.name, + type: nrcsIngestRundown.type, + segments: [], + payload: undefined, + userEditStates: {}, + rundownSource: nrcsIngestRundown.rundownSource, + } satisfies Complete, + false + ) + + const blueprintContext = new ProcessIngestDataContext( + { + name: 'processIngestData', + identifier: `studio:${context.studioId},blueprint:${studioBlueprint.blueprintId}`, + }, + context.studio, + context.getStudioBlueprintConfig() + ) + + // Let blueprints apply changes to the Sofie ingest data + if (typeof studioBlueprint.processIngestData === 'function') { + await studioBlueprint.processIngestData( + blueprintContext, + mutableIngestRundown, + nrcsIngestRundown, + previousNrcsIngestRundown, + ingestRundownChanges.changes + ) + } else if (ingestRundownChanges.changes.source === 'ingest') { + // Backwards compatible mode: Blueprints has not defined a processIngestData() + // so we'll simply accept the incoming changes as-is: + + if (nrcsIngestRundown.type === 'mos') { + // MOS has a special flow to group parts into segments + const groupedResult = blueprintContext.groupMosPartsInRundownAndChangesWithSeparator( + nrcsIngestRundown, + previousNrcsIngestRundown, + ingestRundownChanges.changes, + ';' // Backwards compatibility + ) + + blueprintContext.defaultApplyIngestChanges( + mutableIngestRundown, + groupedResult.nrcsIngestRundown, + groupedResult.ingestChanges + ) + } else { + blueprintContext.defaultApplyIngestChanges( + mutableIngestRundown, + nrcsIngestRundown, + ingestRundownChanges.changes + ) + } + } else { + throw new Error(`Blueprint missing processIngestData function`) + } + + // Ensure the rundownSource is propogated + mutableIngestRundown.updateRundownSource(nrcsIngestRundown.rundownSource) + + const ingestObjectGenerator = new SofieIngestRundownDataCacheGenerator(rundownId) + const resultChanges = mutableIngestRundown.intoIngestRundown(ingestObjectGenerator) + + // Sync changes to the cache + sofieIngestObjectCache.replaceDocuments(resultChanges.changedCacheObjects) + sofieIngestObjectCache.removeAllOtherDocuments(resultChanges.allCacheObjectIds) + + return resultChanges.computedChanges + } +} + +function sortIngestRundown(rundown: IngestRundown): void { + rundown.segments.sort((a, b) => a.rank - b.rank) + for (const segment of rundown.segments) { + segment.parts.sort((a, b) => a.rank - b.rank) + } +} + +async function updateSofieRundownModel( + context: JobContext, + pIngestModel: Promise, + computedIngestChanges: ComputedIngestChanges | null +) { + const ingestModel = await pIngestModel + + // Load any 'before' data for the commit + const beforeRundown = ingestModel.rundown + const beforePartMap = generatePartMap(ingestModel) + + let commitData: CommitIngestData | null = null + + if ( + computedIngestChanges === ComputedIngestChangeAction.DELETE || + computedIngestChanges === ComputedIngestChangeAction.FORCE_DELETE + ) { + // Get the rundown, and fail if it doesn't exist + const rundown = ingestModel.getRundown() + + // Check if it can be deleted + const canRemove = + computedIngestChanges === ComputedIngestChangeAction.FORCE_DELETE || canRundownBeUpdated(rundown, false) + if (!canRemove) throw UserError.create(UserErrorMessage.RundownRemoveWhileActive, { name: rundown.name }) + + // The rundown has been deleted + commitData = { + changedSegmentIds: [], + removedSegmentIds: [], + renamedSegments: new Map(), + + removeRundown: true, + returnRemoveFailure: true, + } + } else if (computedIngestChanges) { + const calcSpan = context.startSpan('ingest.calcFcn') + commitData = await applyCalculatedIngestChangesToModel(context, ingestModel, computedIngestChanges) + calcSpan?.end() + } + + let resultingError: UserError | void | undefined + + if (commitData) { + const commitSpan = context.startSpan('ingest.commit') + // The change is accepted. Perform some playout calculations and save it all + resultingError = await CommitIngestOperation(context, ingestModel, beforeRundown, beforePartMap, commitData) + commitSpan?.end() + } else { + // Should be no changes + ingestModel.assertNoChanges() + } + + return resultingError +} + +async function applyCalculatedIngestChangesToModel( + context: JobContext, + ingestModel: IngestModel, + computedIngestChanges: ComputedIngestChangeObject +): Promise { + const newIngestRundown = computedIngestChanges.ingestRundown + + // Ensure the rundown can be updated + const rundown = ingestModel.rundown + // if (!canRundownBeUpdated(rundown, false)) return null + if (!canRundownBeUpdated(rundown, computedIngestChanges.regenerateRundown)) return null + + const span = context.startSpan('ingest.applyCalculatedIngestChangesToModel') + + if (!rundown || computedIngestChanges.regenerateRundown) { + // Do a full regeneration + + // Perform any segment id changes, to ensure the contents remains correctly linked + const renamedSegments = applyExternalIdDiff(ingestModel, computedIngestChanges, true) + + // perform the regeneration + const result = await updateRundownFromIngestData( + context, + ingestModel, + newIngestRundown, + GenerateRundownMode.Create + ) + + span?.end() + if (result) { + return { + ...result, + renamedSegments, + } + } else { + return { + changedSegmentIds: [], + removedSegmentIds: [], + removeRundown: false, + renamedSegments, + } + } + } else { + // Update segment ranks: + for (const [segmentExternalId, newRank] of Object.entries(computedIngestChanges.segmentsUpdatedRanks)) { + const segment = ingestModel.getSegmentByExternalId(segmentExternalId) + if (segment) { + segment.setRank(newRank) + } + } + + // Updated segments that has had their segment.externalId changed: + const renamedSegments = applyExternalIdDiff(ingestModel, computedIngestChanges, true) + + // If requested, regenerate the rundown in the 'metadata' mode + if (computedIngestChanges.regenerateRundown) { + const regenerateCommitData = await updateRundownFromIngestDataInner( + context, + ingestModel, + newIngestRundown, + GenerateRundownMode.MetadataChange + ) + if (regenerateCommitData?.regenerateAllContents) { + const regeneratedSegmentIds = await calculateSegmentsAndRemovalsFromIngestData( + context, + ingestModel, + newIngestRundown, + regenerateCommitData.allRundownWatchedPackages + ) + + // TODO - should this include the ones which were renamed/updated ranks above? + return { + changedSegmentIds: regeneratedSegmentIds.changedSegmentIds, + removedSegmentIds: regeneratedSegmentIds.removedSegmentIds, + renamedSegments: renamedSegments, + + removeRundown: false, + } satisfies CommitIngestData + } + } + + // Create/Update segments + const changedSegmentIds = await calculateSegmentsFromIngestData( + context, + ingestModel, + computedIngestChanges.segmentsToRegenerate, + null + ) + + const changedSegmentIdsSet = new Set(changedSegmentIds) + for (const segmentId of Object.keys(computedIngestChanges.segmentsUpdatedRanks)) { + changedSegmentIdsSet.add(ingestModel.getSegmentIdFromExternalId(segmentId)) + } + + // Remove/orphan old segments + const orphanedSegmentIds: SegmentId[] = [] + for (const segmentExternalId of computedIngestChanges.segmentsToRemove) { + const segment = ingestModel.getSegmentByExternalId(segmentExternalId) + if (segment) { + // We orphan it and queue for deletion. the commit phase will complete if possible + orphanedSegmentIds.push(segment.segment._id) + segment.setOrphaned(SegmentOrphanedReason.DELETED) + + segment.removeAllParts() + + // It can't also have been changed if it is deleted + changedSegmentIdsSet.delete(segment.segment._id) + } + } + + span?.end() + return { + changedSegmentIds: Array.from(changedSegmentIdsSet), + removedSegmentIds: orphanedSegmentIds, // Only inform about the ones that werent renamed + renamedSegments: renamedSegments, + + removeRundown: false, + } satisfies CommitIngestData + } +} + +/** + * Apply the externalId renames from a DiffSegmentEntries + * @param ingestModel Ingest model of the rundown being updated + * @param segmentDiff Calculated Diff + * @returns Map of the SegmentId changes + */ +function applyExternalIdDiff( + ingestModel: IngestModel, + segmentDiff: Pick, + canDiscardParts: boolean +): CommitIngestData['renamedSegments'] { + // Updated segments that has had their segment.externalId changed: + const renamedSegments = new Map() + for (const [oldSegmentExternalId, newSegmentExternalId] of Object.entries( + segmentDiff.segmentExternalIdChanges + )) { + const oldSegmentId = getSegmentId(ingestModel.rundownId, oldSegmentExternalId) + const newSegmentId = getSegmentId(ingestModel.rundownId, newSegmentExternalId) + + // Track the rename + renamedSegments.set(oldSegmentId, newSegmentId) + + // If the segment doesnt exist (it should), then there isn't a segment to rename + const oldSegment = ingestModel.getSegment(oldSegmentId) + if (!oldSegment) continue + + if (ingestModel.getSegment(newSegmentId)) { + // If the new SegmentId already exists, we need to discard the old one rather than trying to merge it. + // This can only be done if the caller is expecting to regenerate Segments + const canDiscardPartsForSegment = canDiscardParts && !segmentDiff.segmentsUpdatedRanks[oldSegmentExternalId] + if (!canDiscardPartsForSegment) { + throw new Error(`Cannot merge Segments with only rank changes`) + } + + // Remove the old Segment and it's contents, the new one will be generated shortly + ingestModel.removeSegment(oldSegmentId) + } else { + // Perform the rename + ingestModel.changeSegmentId(oldSegmentId, newSegmentId) + } + } + + return renamedSegments +} + +function generatePartMap(ingestModel: IngestModelReadonly): BeforeIngestOperationPartMap { + const rundown = ingestModel.rundown + if (!rundown) return new Map() + + const res = new Map>() + for (const segment of ingestModel.getAllSegments()) { + res.set( + segment.segment._id, + segment.parts.map((p) => ({ id: p.part._id, rank: p.part._rank })) + ) + } + return res +} diff --git a/packages/job-worker/src/ingest/sofieIngestCache.ts b/packages/job-worker/src/ingest/sofieIngestCache.ts new file mode 100644 index 0000000000..8340682fa5 --- /dev/null +++ b/packages/job-worker/src/ingest/sofieIngestCache.ts @@ -0,0 +1,269 @@ +import { RundownId, SegmentId, SofieIngestDataCacheObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + SofieIngestDataCacheObj, + SofieIngestCacheType, + SofieIngestDataCacheObjRundown, + SofieIngestDataCacheObjSegment, + SofieIngestDataCacheObjPart, + SofieIngestRundownWithSource, +} from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' +import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' +import _ = require('underscore') +import { SofieIngestPart, SofieIngestSegment } from '@sofie-automation/blueprints-integration' +import { JobContext } from '../jobs' +import { getPartId, getSegmentId } from './lib' +import { SetOptional } from 'type-fest' +import { groupByToMap, normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import { AnyBulkWriteOperation } from 'mongodb' +import { ICollection } from '../db' +import { getCurrentTime } from '../lib' + +/** + * Represents a Rundown in the SofieIngestDataCache collection and provides methods for interacting with it. + */ +export class SofieIngestRundownDataCache { + readonly #changedDocumentIds = new Set() + + private constructor( + private readonly context: JobContext, + private readonly collection: ICollection, + private documents: SofieIngestDataCacheObj[] + ) {} + + static async create(context: JobContext, rundownId: RundownId): Promise { + const docs = await context.directCollections.SofieIngestDataCache.findFetch({ rundownId }) + + return new SofieIngestRundownDataCache(context, context.directCollections.SofieIngestDataCache, docs) + } + + /** + * Fetch the IngestRundown contained in the cache + * Note: This does not deep clone the objects, so the returned object should not be modified + */ + fetchRundown(): SofieIngestRundownWithSource | undefined { + const span = this.context.startSpan('ingest.ingestCache.loadCachedRundownData') + + const cachedRundown = this.documents.find( + (e): e is SofieIngestDataCacheObjRundown => e.type === SofieIngestCacheType.RUNDOWN + ) + if (!cachedRundown) { + span?.end() + return undefined + } + + const ingestRundown: SofieIngestRundownWithSource = { + ...cachedRundown.data, + segments: [], + } + + const hasSegmentId = ( + obj: SofieIngestDataCacheObj + ): obj is SofieIngestDataCacheObjSegment | SofieIngestDataCacheObjPart => { + return !!obj.segmentId + } + + const segmentMap = groupByToMap(this.documents.filter(hasSegmentId), 'segmentId') + for (const objs of segmentMap.values()) { + const segmentEntry = objs.find( + (e): e is SofieIngestDataCacheObjSegment => e.type === SofieIngestCacheType.SEGMENT + ) + if (segmentEntry) { + const ingestSegment: SofieIngestSegment = { + ...segmentEntry.data, + parts: [], + } + + for (const entry of objs) { + if (entry.type === SofieIngestCacheType.PART) { + ingestSegment.parts.push(entry.data) + } + } + + ingestSegment.parts = _.sortBy(ingestSegment.parts, (s) => s.rank) + ingestRundown.segments.push(ingestSegment) + } + } + + ingestRundown.segments = _.sortBy(ingestRundown.segments, (s) => s.rank) + + span?.end() + return ingestRundown + } + + /** + * Delete the contents of the cache + */ + delete(): void { + // Mark each document for deletion + for (const doc of this.documents) { + this.#changedDocumentIds.add(doc._id) + } + + this.documents = [] + } + + /** + * Remove all documents from the cache other than the ids provided + * @param documentIdsToKeep The IDs of the documents to keep in the cache + */ + removeAllOtherDocuments(documentIdsToKeep: SofieIngestDataCacheObjId[]): void { + const documentIdsToKeepSet = new Set(documentIdsToKeep) + + const newDocuments: SofieIngestDataCacheObj[] = [] + for (const document of this.documents) { + if (!documentIdsToKeepSet.has(document._id)) { + this.#changedDocumentIds.add(document._id) + } else { + newDocuments.push(document) + } + } + this.documents = newDocuments + } + + /** + * Replace/insert a set of documents into the cache + * This can be used to insert or update multiple documents at once + * This does not diff the documents, it assumes that has already been done prior to calling this method + * @param changedCacheObjects Documents to store in the cache + */ + replaceDocuments(changedCacheObjects: SofieIngestDataCacheObj[]): void { + const newDocumentsMap = normalizeArrayToMap(this.documents, '_id') + + for (const newDocument of changedCacheObjects) { + this.#changedDocumentIds.add(newDocument._id) + newDocumentsMap.set(newDocument._id, newDocument) + } + + this.documents = Array.from(newDocumentsMap.values()) + } + + /** + * Write any changes in the cache to the database + */ + async saveToDatabase(): Promise { + if (this.#changedDocumentIds.size === 0) return + + const documentsMap = normalizeArrayToMap(this.documents, '_id') + + const modifiedTime = getCurrentTime() + + const updates: AnyBulkWriteOperation[] = [] + const removedIds: SofieIngestDataCacheObjId[] = [] + for (const changedId of this.#changedDocumentIds) { + const newDoc = documentsMap.get(changedId) + if (!newDoc) { + removedIds.push(changedId) + } else { + updates.push({ + replaceOne: { + filter: { + _id: changedId, + }, + replacement: { + ...newDoc, + modified: modifiedTime, + }, + upsert: true, + }, + }) + } + } + + if (removedIds.length) { + updates.push({ + deleteMany: { + filter: { + _id: { $in: removedIds as any }, + }, + }, + }) + } + + await this.collection.bulkWrite(updates) + } +} + +/** + * Convenience methods useful when interacting with the SofieIngestRundownDataCache + */ +export class SofieIngestRundownDataCacheGenerator { + constructor(public readonly rundownId: RundownId) {} + + getPartObjectId(partExternalId: string): SofieIngestDataCacheObjId { + return protectString(`${this.rundownId}_part_${partExternalId}`) + } + getSegmentObjectId(segmentExternalId: string): SofieIngestDataCacheObjId { + return protectString(`${this.rundownId}_segment_${segmentExternalId}`) + } + getRundownObjectId(): SofieIngestDataCacheObjId { + return protectString(unprotectString(this.rundownId)) + } + + generatePartObject(segmentId: SegmentId, part: SofieIngestPart): SofieIngestDataCacheObjPart { + return { + _id: this.getPartObjectId(part.externalId), + type: SofieIngestCacheType.PART, + rundownId: this.rundownId, + segmentId: segmentId, + partId: getPartId(this.rundownId, part.externalId), + modified: 0, // Populated when saving + data: part, + } + } + + generateSegmentObject(ingestSegment: SetOptional): SofieIngestDataCacheObjSegment { + return { + _id: this.getSegmentObjectId(ingestSegment.externalId), + type: SofieIngestCacheType.SEGMENT, + rundownId: this.rundownId, + segmentId: getSegmentId(this.rundownId, ingestSegment.externalId), + modified: 0, // Populated when saving + data: { + ...ingestSegment, + parts: [], // omit the parts, they come as separate objects + }, + } + } + + generateRundownObject( + ingestRundown: SetOptional + ): SofieIngestDataCacheObjRundown { + return { + _id: this.getRundownObjectId(), + type: SofieIngestCacheType.RUNDOWN, + rundownId: this.rundownId, + modified: 0, // Populated when saving + data: { + ...ingestRundown, + segments: [], // omit the segments, they come as separate objects + }, + } + } + + generateCacheForRundown(ingestRundown: SofieIngestRundownWithSource): SofieIngestDataCacheObj[] { + const cacheEntries: SofieIngestDataCacheObj[] = [] + + const rundown = this.generateRundownObject(ingestRundown) + cacheEntries.push(rundown) + + for (const segment of ingestRundown.segments) { + cacheEntries.push(...this.generateCacheForSegment(segment)) + } + + return cacheEntries + } + + private generateCacheForSegment(ingestSegment: SofieIngestSegment): SofieIngestDataCacheObj[] { + const cacheEntries: Array = [] + + const segment = this.generateSegmentObject(ingestSegment) + cacheEntries.push(segment) + + const segmentId = getSegmentId(this.rundownId, ingestSegment.externalId) + for (const part of ingestSegment.parts) { + cacheEntries.push(this.generatePartObject(segmentId, part)) + } + + return cacheEntries + } +} diff --git a/packages/job-worker/src/ingest/syncChangesToPartInstance.ts b/packages/job-worker/src/ingest/syncChangesToPartInstance.ts index ac90df10db..9cb61b334b 100644 --- a/packages/job-worker/src/ingest/syncChangesToPartInstance.ts +++ b/packages/job-worker/src/ingest/syncChangesToPartInstance.ts @@ -31,6 +31,7 @@ import { import { validateAdlibTestingPartInstanceProperties } from '../playout/adlibTesting' import { ReadonlyDeep } from 'type-fest' import { convertIngestModelToPlayoutRundownWithSegments } from './commit' +import { PlayoutRundownModel } from '../playout/model/PlayoutRundownModel' type PlayStatus = 'previous' | 'current' | 'next' type SyncedInstance = { @@ -131,12 +132,22 @@ export async function syncChangesToPartInstances( pieceInstances: pieceInstancesInPart.map((p) => convertPieceInstanceToBlueprints(p.pieceInstance)), } + const part = newPart ?? existingPartInstance.partInstance.part + + let playoutRundownModelForPart: PlayoutRundownModel | undefined = playoutRundownModel + // Handle a case where the part is in a different rundown than the playoutRundownModel: + if (playoutRundownModel.rundown._id !== part.rundownId) { + playoutRundownModelForPart = playoutModel.getRundown(part.rundownId) + } + if (!playoutRundownModelForPart) + throw new Error(`Internal Error: playoutRundownModelForPart is undefined (it should never be)`) + const proposedPieceInstances = getPieceInstancesForPart( context, playoutModel, previousPartInstance, - playoutRundownModel, - newPart ?? existingPartInstance.partInstance.part, + playoutRundownModelForPart, + part, await piecesThatMayBeActive, existingPartInstance.partInstance._id ) @@ -211,10 +222,11 @@ export async function syncChangesToPartInstances( // TODO - these dont get shown to the user currently // TODO - old notes from the sync may need to be pruned, or we will end up with duplicates and 'stuck' notes?+ existingPartInstance.appendNotes(newNotes) - - validateAdlibTestingPartInstanceProperties(context, playoutModel, existingPartInstance) } + // Make sure an adlib-testing part is still labeled correctly. This could happen if the partInstance used any recently updated adlibs + validateAdlibTestingPartInstanceProperties(context, playoutModel, existingPartInstance) + if (existingPartInstance.partInstance._id === playoutModel.playlist.currentPartInfo?.partInstanceId) { // This should be run after 'current', before 'next': await syncPlayheadInfinitesForNextPartInstance( diff --git a/packages/job-worker/src/ingest/userOperation.ts b/packages/job-worker/src/ingest/userOperation.ts new file mode 100644 index 0000000000..c3e068715b --- /dev/null +++ b/packages/job-worker/src/ingest/userOperation.ts @@ -0,0 +1,23 @@ +import { UserExecuteChangeOperationProps } from '@sofie-automation/corelib/dist/worker/ingest' +import { JobContext } from '../jobs' +import { UpdateIngestRundownResult, runIngestUpdateOperationBase } from './runOperation' +import { IngestChangeType } from '@sofie-automation/blueprints-integration' + +export async function handleUserExecuteChangeOperation( + context: JobContext, + data: UserExecuteChangeOperationProps +): Promise { + await runIngestUpdateOperationBase(context, data, async (nrcsIngestObjectCache) => { + const nrcsIngestRundown = nrcsIngestObjectCache.fetchRundown() + if (!nrcsIngestRundown) throw new Error(`Rundown "${data.rundownExternalId}" not found`) + + return { + ingestRundown: nrcsIngestRundown, + changes: { + source: IngestChangeType.User, + operation: data.operation as unknown as any, + operationTarget: data.operationTarget, + }, + } satisfies UpdateIngestRundownResult + }) +} diff --git a/packages/job-worker/src/jobs/index.ts b/packages/job-worker/src/jobs/index.ts index c2e71c4ec9..f8ab90e9f4 100644 --- a/packages/job-worker/src/jobs/index.ts +++ b/packages/job-worker/src/jobs/index.ts @@ -64,6 +64,26 @@ export interface JobContext extends StudioCacheContext { /** Hack: fast-track the timeline out to the playout-gateway. */ hackPublishTimelineToFastTrack(newTimeline: TimelineComplete): void + + /** + * Set whether a routeset for this studio is active. + * Any routeset `exclusivityGroup` will be respected. + * The changes will be immediately visible in subsequent calls to the `studio` getter + * @param routeSetId The routeSetId to change + * @param isActive Whether the routeSet should be active, or toggle + * @returns Whether the change could affect playout + */ + setRouteSetActive(routeSetId: string, isActive: boolean | 'toggle'): boolean + + /** + * Save any changes to the routesets for this studio to the database + */ + saveRouteSetChanges(): Promise + + /** + * Discard any unsaved changes to the routesets for this studio + */ + discardRouteSetChanges(): void } /** diff --git a/packages/job-worker/src/playout/__tests__/helpers/rundowns.ts b/packages/job-worker/src/playout/__tests__/helpers/rundowns.ts index bc410b2897..deea4059bc 100644 --- a/packages/job-worker/src/playout/__tests__/helpers/rundowns.ts +++ b/packages/job-worker/src/playout/__tests__/helpers/rundowns.ts @@ -59,7 +59,6 @@ export async function setupRundownBase( externalId: 'MOCK_SEGMENT_0', rundownId: rundown._id, name: 'Segment 0', - externalModified: 1, } await context.mockCollections.Segments.insertOne(segment0) /* tslint:disable:ter-indent*/ diff --git a/packages/job-worker/src/playout/__tests__/selectNextPart.test.ts b/packages/job-worker/src/playout/__tests__/selectNextPart.test.ts index 2d5f84d4be..bdf8a6dbec 100644 --- a/packages/job-worker/src/playout/__tests__/selectNextPart.test.ts +++ b/packages/job-worker/src/playout/__tests__/selectNextPart.test.ts @@ -417,4 +417,11 @@ describe('selectNextPart', () => { expect(nextPart).toEqual({ index: 6, part: defaultParts[6], consumesQueuedSegmentId: false }) } }) + + test('on last part, with queued segment', () => { + // On the last part in the rundown, with a queuedSegment id set to earlier + defaultPlaylist.queuedSegmentId = segment2 + const nextPart = selectNextPart2(defaultParts[8].toPartInstance(), defaultParts[8].toPartInstance()) + expect(nextPart).toEqual({ index: 4, part: defaultParts[4], consumesQueuedSegmentId: true }) + }) }) diff --git a/packages/job-worker/src/playout/abPlayback/__tests__/abPlayback.spec.ts b/packages/job-worker/src/playout/abPlayback/__tests__/abPlayback.spec.ts index 10f0739173..e6ac20f779 100644 --- a/packages/job-worker/src/playout/abPlayback/__tests__/abPlayback.spec.ts +++ b/packages/job-worker/src/playout/abPlayback/__tests__/abPlayback.spec.ts @@ -1,4 +1,5 @@ import { + AbPlayerId, ABResolverOptions, IBlueprintPieceType, PieceAbSessionInfo, @@ -79,7 +80,7 @@ function resolveAbSessions( timelineObjs: OnGenerateTimelineObjExt[], previousAssignmentMap: ABSessionAssignments, sessionPool: string, - playerIds: Array, + playerIds: Array, now: number ): AssignmentResult { const sessionRequests = calculateSessionTimeRanges( @@ -458,7 +459,7 @@ describe('resolveMediaPlayers', () => { }, inst_1_clip_def: { sessionId: 'inst_1_clip_def', - playerId: 3, + playerId: 1, lookahead: true, }, } @@ -482,13 +483,13 @@ describe('resolveMediaPlayers', () => { [1, 2], 0 ) - expect(assignments.failedRequired).toHaveLength(0) + expect(assignments.failedRequired).toEqual(['inst_2_clip_ghi']) expect(assignments.failedOptional).toHaveLength(0) expect(assignments.requests).toHaveLength(3) expect(assignments.requests).toEqual([ - { end: 7400, id: 'inst_0_clip_abc', playerId: 5, start: 2400, optional: false }, - { end: 7400, id: 'inst_1_clip_def', playerId: 3, start: 2400, optional: false }, - { end: 6800, id: 'inst_2_clip_ghi', playerId: 1, start: 2800, optional: false }, + { end: 7400, id: 'inst_0_clip_abc', playerId: 2, start: 2400, optional: false }, + { end: 7400, id: 'inst_1_clip_def', playerId: 1, start: 2400, optional: false }, + { end: 6800, id: 'inst_2_clip_ghi', playerId: undefined, start: 2800, optional: false }, ]) expect(mockGetPieceSessionId).toHaveBeenCalledTimes(3) diff --git a/packages/job-worker/src/playout/abPlayback/__tests__/abPlaybackResolver.spec.ts b/packages/job-worker/src/playout/abPlayback/__tests__/abPlaybackResolver.spec.ts index 5cc5d4c267..cf961f0490 100644 --- a/packages/job-worker/src/playout/abPlayback/__tests__/abPlaybackResolver.spec.ts +++ b/packages/job-worker/src/playout/abPlayback/__tests__/abPlaybackResolver.spec.ts @@ -628,4 +628,149 @@ describe('resolveAbAssignmentsFromRequests', () => { expectGotPlayer(res, 'e', 3) expectGotPlayer(res, 'f', undefined) }) + + describe('add/remove players', () => { + test('reshuffle lookahead when removing player', () => { + const requests: SessionRequest[] = [ + // current clip + { + id: 'a', + start: 1000, + end: undefined, + playerId: 2, + }, + // previous clip + { + id: 'b', + start: 0, + playerId: 1, + end: 5000, + }, + // lookaheads + { + id: 'd', + start: Number.POSITIVE_INFINITY, + end: undefined, + lookaheadRank: 1, + playerId: 1, + }, + { + id: 'e', + start: Number.POSITIVE_INFINITY, + playerId: 3, // From before + end: undefined, + lookaheadRank: 2, + }, + { + id: 'f', + start: Number.POSITIVE_INFINITY, + end: undefined, + lookaheadRank: 3, + playerId: 2, + }, + ] + + const res = resolveAbAssignmentsFromRequests(resolverOptions, TWO_SLOTS, requests, 10000) + expect(res).toBeTruthy() + expect(res.failedOptional).toEqual([]) + expect(res.failedRequired).toEqual([]) + expectGotPlayer(res, 'a', 2) + expectGotPlayer(res, 'b', 1) + expectGotPlayer(res, 'd', 1) + expectGotPlayer(res, 'e', undefined) + expectGotPlayer(res, 'f', undefined) + }) + + test('reshuffle current when removing player', () => { + const requests: SessionRequest[] = [ + // current clip + { + id: 'a', + start: 1000, + end: undefined, + playerId: 3, + }, + // previous clip + { + id: 'b', + start: 0, + playerId: 1, + end: 5000, + }, + // lookaheads + { + id: 'd', + start: Number.POSITIVE_INFINITY, + end: undefined, + lookaheadRank: 1, + playerId: 1, + }, + { + id: 'e', + start: Number.POSITIVE_INFINITY, + playerId: 2, + end: undefined, + lookaheadRank: 2, + }, + ] + + const res = resolveAbAssignmentsFromRequests(resolverOptions, TWO_SLOTS, requests, 10000) + expect(res).toBeTruthy() + expect(res.failedOptional).toEqual([]) + expect(res.failedRequired).toEqual([]) + expectGotPlayer(res, 'a', 2) + expectGotPlayer(res, 'b', 1) + expectGotPlayer(res, 'd', 1) + expectGotPlayer(res, 'e', undefined) + }) + + test('add player allows distributing timed clips', () => { + const requests: SessionRequest[] = [ + // current clip + { + id: 'a', + start: 1000, + end: 11000, + playerId: 1, + }, + { + id: 'b', + start: 13000, // soon + end: undefined, + playerId: 1, + }, + { + id: 'c', + start: 1000, + end: undefined, + playerId: 2, + }, + // lookaheads + { + id: 'd', + start: Number.POSITIVE_INFINITY, + end: undefined, + lookaheadRank: 1, + playerId: 1, + }, + { + id: 'e', + start: Number.POSITIVE_INFINITY, + playerId: 2, + end: undefined, + lookaheadRank: 2, + }, + ] + + const res = resolveAbAssignmentsFromRequests(resolverOptions, THREE_SLOTS, requests, 10000) + expect(res).toBeTruthy() + expect(res.failedOptional).toEqual([]) + expect(res.failedRequired).toEqual([]) + expectGotPlayer(res, 'a', 1) + expectGotPlayer(res, 'b', 3) + expectGotPlayer(res, 'c', 2) + expectGotPlayer(res, 'd', 1) + expectGotPlayer(res, 'e', undefined) + }) + }) }) diff --git a/packages/job-worker/src/playout/abPlayback/__tests__/routeSetDisabling.spec.ts b/packages/job-worker/src/playout/abPlayback/__tests__/routeSetDisabling.spec.ts new file mode 100644 index 0000000000..88228e04ec --- /dev/null +++ b/packages/job-worker/src/playout/abPlayback/__tests__/routeSetDisabling.spec.ts @@ -0,0 +1,211 @@ +import { StudioRouteBehavior, StudioRouteSet } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { abPoolFilterDisabled, findPlayersInRouteSets } from '../routeSetDisabling' +import { ABPlayerDefinition } from '@sofie-automation/blueprints-integration' +import { clone } from '@sofie-automation/corelib/dist/lib' + +describe('route set disabling ab players', () => { + const POOL_NAME = '_test_' + function runDisablePlayersFiltering( + routeSets: Record, + players: ABPlayerDefinition[] + ): ABPlayerDefinition[] { + const members = findPlayersInRouteSets(routeSets) + return abPoolFilterDisabled(POOL_NAME, players, members) + } + + const DEFAULT_PLAYERS: ABPlayerDefinition[] = [ + { playerId: 1 }, + { playerId: 2 }, + { playerId: 3 }, + { playerId: 4 }, + { playerId: 5 }, + ] + + test('no routesets', () => { + const result = runDisablePlayersFiltering({}, DEFAULT_PLAYERS) + expect(result).toEqual(DEFAULT_PLAYERS) + }) + + test('mismatch of playerId types', () => { + const routesets: Record = { + route1: { + name: '', + active: false, + behavior: StudioRouteBehavior.TOGGLE, + routes: [], + abPlayers: [ + { + poolName: POOL_NAME, + playerId: '1', // because ui field is always a string + }, + ], + }, + } + + const players: ABPlayerDefinition[] = [ + { + playerId: 1, // number because blueprint defined it as such + }, + { playerId: 2 }, + ] + + const result = runDisablePlayersFiltering(routesets, players) + + const expectedPlayers = players.filter((p) => p.playerId !== 1) + expect(result).toEqual(expectedPlayers) + }) + + describe('single routeset per player', () => { + const ROUTESETS_SEPARATE: Record = { + pl1: { + name: '', + active: true, + behavior: StudioRouteBehavior.TOGGLE, + routes: [], + abPlayers: [ + { + poolName: POOL_NAME, + playerId: 1, + }, + ], + }, + pl2: { + name: '', + active: true, + behavior: StudioRouteBehavior.TOGGLE, + routes: [], + abPlayers: [ + { + poolName: POOL_NAME, + playerId: 2, + }, + ], + }, + pl3: { + name: '', + active: true, + behavior: StudioRouteBehavior.TOGGLE, + routes: [], + abPlayers: [ + { + poolName: POOL_NAME, + playerId: 3, + }, + ], + }, + } + + test('active routes', () => { + const result = runDisablePlayersFiltering(ROUTESETS_SEPARATE, DEFAULT_PLAYERS) + expect(result).toEqual(DEFAULT_PLAYERS) + }) + + test('inactive routes', () => { + const routesets = clone(ROUTESETS_SEPARATE) + routesets['pl3'].active = false + + // deactivate this, but for a different pool + routesets['pl2'].active = false + routesets['pl2'].abPlayers[0].poolName = 'ANOTHER' + + const result = runDisablePlayersFiltering(routesets, DEFAULT_PLAYERS) + + const expectedPlayers = DEFAULT_PLAYERS.filter((p) => p.playerId !== 3) + expect(result).toEqual(expectedPlayers) + }) + }) + + describe('multiple routesets per player', () => { + /** + * This is testing the scenario of these 3 routesets where only one can be active at a time + */ + const ROUTESETS_GROUPED: Record = { + all: { + name: '', + active: true, + behavior: StudioRouteBehavior.TOGGLE, + exclusivityGroup: 'ab', + routes: [], + abPlayers: [ + { + poolName: POOL_NAME, + playerId: 1, + }, + { + poolName: POOL_NAME, + playerId: 2, + }, + { + poolName: POOL_NAME, + playerId: 3, + }, + { + poolName: POOL_NAME, + playerId: 4, + }, + ], + }, + first: { + name: '', + active: false, + behavior: StudioRouteBehavior.TOGGLE, + exclusivityGroup: 'ab', + routes: [], + abPlayers: [ + { + poolName: POOL_NAME, + playerId: 1, + }, + { + poolName: POOL_NAME, + playerId: 2, + }, + ], + }, + second: { + name: '', + active: false, + behavior: StudioRouteBehavior.TOGGLE, + exclusivityGroup: 'ab', + routes: [], + abPlayers: [ + { + poolName: POOL_NAME, + playerId: 3, + }, + { + poolName: POOL_NAME, + playerId: 4, + }, + ], + }, + } + + test('all', () => { + const result = runDisablePlayersFiltering(ROUTESETS_GROUPED, DEFAULT_PLAYERS) + expect(result).toEqual(DEFAULT_PLAYERS) + }) + + test('first', () => { + const routesets = clone(ROUTESETS_GROUPED) + routesets['all'].active = false + routesets['first'].active = true + + const result = runDisablePlayersFiltering(routesets, DEFAULT_PLAYERS) + + const expectedPlayers = DEFAULT_PLAYERS.filter((p) => p.playerId !== 3 && p.playerId !== 4) + expect(result).toEqual(expectedPlayers) + }) + + test('second', () => { + const routesets = clone(ROUTESETS_GROUPED) + routesets['all'].active = false + routesets['second'].active = true + + const result = runDisablePlayersFiltering(routesets, DEFAULT_PLAYERS) + + const expectedPlayers = DEFAULT_PLAYERS.filter((p) => p.playerId !== 1 && p.playerId !== 2) + expect(result).toEqual(expectedPlayers) + }) + }) +}) diff --git a/packages/job-worker/src/playout/abPlayback/abPlaybackResolver.ts b/packages/job-worker/src/playout/abPlayback/abPlaybackResolver.ts index 782a8b9229..217f9bab51 100644 --- a/packages/job-worker/src/playout/abPlayback/abPlaybackResolver.ts +++ b/packages/job-worker/src/playout/abPlayback/abPlaybackResolver.ts @@ -1,16 +1,14 @@ -import { ABResolverOptions } from '@sofie-automation/blueprints-integration' +import type { AbPlayerId, ABResolverOptions } from '@sofie-automation/blueprints-integration' import { clone } from '@sofie-automation/corelib/dist/lib' import * as _ from 'underscore' -export type PlayerId = number | string - export interface SessionRequest { readonly id: string readonly start: number readonly end: number | undefined readonly optional?: boolean readonly lookaheadRank?: number - playerId?: PlayerId + playerId?: AbPlayerId } export interface AssignmentResult { @@ -23,7 +21,7 @@ export interface AssignmentResult { } interface SlotAvailability { - id: PlayerId + id: AbPlayerId before: (SessionRequest & { end: number }) | null after: SessionRequest | null clashes: SessionRequest[] @@ -57,10 +55,26 @@ function safeMin(arr: T[], func: (val: T) => number): T | undefined { */ export function resolveAbAssignmentsFromRequests( resolverOptions: ABResolverOptions, - playerIds: PlayerId[], + playerIds: AbPlayerId[], rawRequests: SessionRequest[], now: number // Current time ): AssignmentResult { + // Check that the player assigned still exists + const validPlayerIdsSet = new Set(playerIds) + for (const req of rawRequests) { + if (req.playerId !== undefined && !validPlayerIdsSet.has(req.playerId)) { + delete req.playerId + } + } + + const originalLookaheadAssignments: Record = {} + for (const req of rawRequests) { + if (req.lookaheadRank !== undefined && req.playerId !== undefined) { + originalLookaheadAssignments[req.id] = req.playerId + delete req.playerId + } + } + const res: AssignmentResult = { requests: _.sortBy(rawRequests, (r) => r.start).map((v) => clone(v)), failedRequired: [], @@ -82,14 +96,6 @@ export function resolveAbAssignmentsFromRequests( return res } - const originalLookaheadAssignments: Record = {} - for (const req of rawRequests) { - if (req.lookaheadRank !== undefined && req.playerId !== undefined) { - originalLookaheadAssignments[req.id] = req.playerId - delete req.playerId - } - } - const safeNow = now + resolverOptions.nowWindow // Treat now + nowWindow as now, as it is likely that anything changed within that window will be late to air // Clear assignments for anything which has no chance of being preloaded yet @@ -106,7 +112,7 @@ export function resolveAbAssignmentsFromRequests( pendingRequests = grouped[undefined as any] // build map of slots and what they already have assigned - const slots = new Map() + const slots = new Map() _.each(playerIds, (id) => slots.set(id, grouped[id] || [])) const beforeHasGap = (p: SlotAvailability, req: SessionRequest): boolean => @@ -319,14 +325,14 @@ export function resolveAbAssignmentsFromRequests( } function assignPlayersForLookahead( - slots: Map, + slots: Map, res: AssignmentResult, - originalLookaheadAssignments: Record, + originalLookaheadAssignments: Record, safeNow: number ) { // Ensure lookahead gets assigned based on priority not some randomness // Includes slots which have either no sessions, or the last has a known end time - const lastSessionPerSlot = new Map() // playerId, end + const lastSessionPerSlot = new Map() // playerId, end for (const [playerId, sessions] of slots) { const last = _.last(sessions.filter((s) => s.lookaheadRank === undefined)) if (!last) { @@ -373,7 +379,7 @@ function assignPlayersForLookahead( } } -function getAvailability(id: PlayerId, thisReq: SessionRequest, orderedRequests: SessionRequest[]): SlotAvailability { +function getAvailability(id: AbPlayerId, thisReq: SessionRequest, orderedRequests: SessionRequest[]): SlotAvailability { const res: SlotAvailability = { id, before: null, diff --git a/packages/job-worker/src/playout/abPlayback/abPlaybackSessions.ts b/packages/job-worker/src/playout/abPlayback/abPlaybackSessions.ts index 94fb3b7213..1a9fd75e50 100644 --- a/packages/job-worker/src/playout/abPlayback/abPlaybackSessions.ts +++ b/packages/job-worker/src/playout/abPlayback/abPlaybackSessions.ts @@ -5,6 +5,7 @@ import { OnGenerateTimelineObjExt } from '@sofie-automation/corelib/dist/dataMod import * as _ from 'underscore' import { SessionRequest } from './abPlaybackResolver' import { AbSessionHelper } from './abSessionHelper' +import { ReadonlyDeep } from 'type-fest' /** * Calculate all of the AB-playback sessions currently on the timeline @@ -19,7 +20,7 @@ export function calculateSessionTimeRanges( abSessionHelper: AbSessionHelper, resolvedPieces: ResolvedPieceInstance[], timelineObjects: OnGenerateTimelineObjExt[], - previousAssignmentMap: ABSessionAssignments, + previousAssignmentMap: ReadonlyDeep | undefined, poolName: string ): SessionRequest[] { const sessionRequests: { [sessionId: string]: SessionRequest | undefined } = {} @@ -47,7 +48,7 @@ export function calculateSessionTimeRanges( end: val.end === undefined || end === undefined ? undefined : Math.max(val.end, end), optional: val.optional && (session.optional ?? false), lookaheadRank: undefined, - playerId: previousAssignmentMap[sessionId]?.playerId, // Persist previous assignments + playerId: previousAssignmentMap?.[sessionId]?.playerId, // Persist previous assignments } } else { // New session @@ -57,7 +58,7 @@ export function calculateSessionTimeRanges( end, optional: session.optional ?? false, lookaheadRank: undefined, - playerId: previousAssignmentMap[sessionId]?.playerId, // Persist previous assignments + playerId: previousAssignmentMap?.[sessionId]?.playerId, // Persist previous assignments } } } @@ -102,7 +103,7 @@ export function calculateSessionTimeRanges( start: Number.MAX_SAFE_INTEGER, // Distant future end: undefined, lookaheadRank: i + 1, // This is so that we can easily work out which to use first - playerId: previousAssignmentMap[grp.id]?.playerId, + playerId: previousAssignmentMap?.[grp.id]?.playerId, }) } }) diff --git a/packages/job-worker/src/playout/abPlayback/applyAssignments.ts b/packages/job-worker/src/playout/abPlayback/applyAssignments.ts index 42125a702b..62cae611fc 100644 --- a/packages/job-worker/src/playout/abPlayback/applyAssignments.ts +++ b/packages/job-worker/src/playout/abPlayback/applyAssignments.ts @@ -4,6 +4,7 @@ import { ABResolverConfiguration, ICommonContext, ABTimelineLayerChangeRules, + AbPlayerId, } from '@sofie-automation/blueprints-integration' import { ABSessionAssignment, ABSessionAssignments } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { OnGenerateTimelineObjExt } from '@sofie-automation/corelib/dist/dataModel/Timeline' @@ -11,6 +12,7 @@ import { logger } from '../../logging' import * as _ from 'underscore' import { SessionRequest } from './abPlaybackResolver' import { AbSessionHelper } from './abSessionHelper' +import { ReadonlyDeep } from 'type-fest' /** * Apply the ab assignments for a pool to the timeline @@ -28,12 +30,12 @@ export function applyAbPlayerObjectAssignments( blueprintContext: ICommonContext, abConfiguration: Pick, timelineObjs: OnGenerateTimelineObjExt[], - previousAssignmentMap: ABSessionAssignments, + previousAssignmentMap: ReadonlyDeep | undefined, resolvedAssignments: Readonly, poolName: string ): ABSessionAssignments { const newAssignments: ABSessionAssignments = {} - const persistAssignment = (sessionId: string, playerId: number | string, lookahead: boolean): void => { + const persistAssignment = (sessionId: string, playerId: AbPlayerId, lookahead: boolean): void => { // Track the assignment, so that the next onTimelineGenerate can try to reuse the same session if (newAssignments[sessionId]) { // TODO - warn? @@ -86,7 +88,7 @@ export function applyAbPlayerObjectAssignments( unexpectedSessions.push(`${sessionId}(${objs.map((obj) => obj.id).join(',')})`) // If there was a previous assignment, hopefully that is better than nothing - const prev = previousAssignmentMap[sessionId] + const prev = previousAssignmentMap?.[sessionId] if (prev) { failedObjects.push( ...updateObjectsToAbPlayer(blueprintContext, abConfiguration, poolName, prev.playerId, objs) @@ -119,7 +121,7 @@ function updateObjectsToAbPlayer( context: ICommonContext, abConfiguration: Pick, poolName: string, - playerId: number | string, + playerId: AbPlayerId, objs: OnGenerateTimelineObj[] ): OnGenerateTimelineObj[] { const failedObjects: OnGenerateTimelineObj[] = [] @@ -142,7 +144,7 @@ function updateObjectsToAbPlayer( function applyUpdateToKeyframes( poolName: string, - playerId: number | string, + playerId: AbPlayerId, obj: OnGenerateTimelineObj ): boolean { if (!obj.keyframes) return false @@ -174,7 +176,7 @@ function applyUpdateToKeyframes( function applylayerMoveRule( timelineObjectLayerChangeRules: ABTimelineLayerChangeRules | undefined, poolName: string, - playerId: number | string, + playerId: AbPlayerId, obj: OnGenerateTimelineObj ): boolean { const ruleId = obj.isLookahead ? obj.lookaheadForLayer || obj.layer : obj.layer diff --git a/packages/job-worker/src/playout/abPlayback/index.ts b/packages/job-worker/src/playout/abPlayback/index.ts index 0de0ce19a9..d803ecac46 100644 --- a/packages/job-worker/src/playout/abPlayback/index.ts +++ b/packages/job-worker/src/playout/abPlayback/index.ts @@ -17,6 +17,8 @@ import { AbSessionHelper } from './abSessionHelper' import { ShowStyleContext } from '../../blueprints/context' import { logger } from '../../logging' import { ABPlayerDefinition } from '@sofie-automation/blueprints-integration' +import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { abPoolFilterDisabled, findPlayersInRouteSets } from './routeSetDisabling' /** * Resolve and apply AB-playback for the given timeline @@ -71,8 +73,14 @@ export function applyAbPlaybackForTimeline( const now = getCurrentTime() const abConfiguration = blueprint.blueprint.getAbResolverConfiguration(blueprintContext) + const routeSetMembers = findPlayersInRouteSets(applyAndValidateOverrides(context.studio.routeSetsWithOverrides).obj) + for (const [poolName, players] of Object.entries(abConfiguration.pools)) { - const previousAssignmentMap: ABSessionAssignments = previousAbSessionAssignments[poolName] || {} + // Filter out offline devices + const filteredPlayers = abPoolFilterDisabled(poolName, players, routeSetMembers) + + const previousAssignmentMap: ReadonlyDeep | undefined = + playlist.assignedAbSessions?.[poolName] const sessionRequests = calculateSessionTimeRanges( abSessionHelper, resolvedPieces, @@ -83,7 +91,7 @@ export function applyAbPlaybackForTimeline( const assignments = resolveAbAssignmentsFromRequests( abConfiguration.resolverOptions, - players.map((player) => player.playerId), + filteredPlayers.map((player) => player.playerId), sessionRequests, now ) diff --git a/packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts b/packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts new file mode 100644 index 0000000000..d6be0a4ab6 --- /dev/null +++ b/packages/job-worker/src/playout/abPlayback/routeSetDisabling.ts @@ -0,0 +1,47 @@ +import type { ABPlayerDefinition } from '@sofie-automation/blueprints-integration' +import type { StudioRouteSet } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { logger } from '../../logging' + +/** + * Map> + * Note: this explicitly uses a string for the playerId, to avoid issues with types for values from the ui + */ +type MembersOfRouteSets = Map> + +export function findPlayersInRouteSets(routeSets: Record): MembersOfRouteSets { + const routeSetEnabledPlayers: MembersOfRouteSets = new Map() + for (const [_key, routeSet] of Object.entries(routeSets)) { + for (const abPlayer of routeSet.abPlayers) { + let poolEntry = routeSetEnabledPlayers.get(abPlayer.poolName) + if (!poolEntry) { + poolEntry = new Map() + routeSetEnabledPlayers.set(abPlayer.poolName, poolEntry) + } + + // Make sure player is marked as enabled + const currentState = poolEntry.get(String(abPlayer.playerId)) + poolEntry.set(String(abPlayer.playerId), currentState || routeSet.active) + } + } + return routeSetEnabledPlayers +} + +export function abPoolFilterDisabled( + poolName: string, + players: ABPlayerDefinition[], + membersOfRouteSets: MembersOfRouteSets +): ABPlayerDefinition[] { + const poolRouteSetEnabledPlayers = membersOfRouteSets.get(poolName) + if (!poolRouteSetEnabledPlayers || poolRouteSetEnabledPlayers.size == 0) return players + + // Filter out any disabled players: + return players.filter((player) => { + const playerState = poolRouteSetEnabledPlayers.get(String(player.playerId)) + if (playerState === false) { + logger.silly(`AB Pool ${poolName} playerId : ${player.playerId} are disabled`) + return false + } + + return true + }) +} diff --git a/packages/job-worker/src/playout/adlibAction.ts b/packages/job-worker/src/playout/adlibAction.ts index 85761622b4..317216d5c1 100644 --- a/packages/job-worker/src/playout/adlibAction.ts +++ b/packages/job-worker/src/playout/adlibAction.ts @@ -102,6 +102,8 @@ export async function executeAdlibActionAndSaveModel( userData: data.userData, triggerMode: data.triggerMode, privateData: adLibActionDoc?.privateData, + publicData: adLibActionDoc?.publicData, + actionOptions: data.actionOptions, } try { @@ -159,6 +161,10 @@ export interface ExecuteActionParameters { userData: ActionUserData /** Arbitraty data storage for internal use in the blueprints */ privateData: unknown | undefined + /** Optional arbitraty data used to modify the action parameters */ + publicData: unknown | undefined + /** Optional arbitraty data used to modify the action parameters */ + actionOptions: { [key: string]: any } | undefined triggerMode: string | undefined } @@ -208,7 +214,9 @@ export async function executeActionInner( actionParameters.actionId, actionParameters.userData, actionParameters.triggerMode, - actionParameters.privateData + actionParameters.privateData, + actionParameters.publicData, + actionParameters.actionOptions ?? {} ) } catch (err) { logger.error(`Error in showStyleBlueprint.executeAction: ${stringifyError(err)}`) @@ -234,6 +242,7 @@ async function applyAnyExecutionSideEffects( if (actionContext.takeAfterExecute) { await performTakeToNextedPart(context, playoutModel, now) } else if ( + actionContext.forceRegenerateTimeline || actionContext.currentPartState !== ActionPartChange.NONE || actionContext.nextPartState !== ActionPartChange.NONE ) { diff --git a/packages/job-worker/src/playout/adlibJobs.ts b/packages/job-worker/src/playout/adlibJobs.ts index 31eeb8382c..25fc2e0d55 100644 --- a/packages/job-worker/src/playout/adlibJobs.ts +++ b/packages/job-worker/src/playout/adlibJobs.ts @@ -35,6 +35,12 @@ import { PlayoutPieceInstanceModel } from './model/PlayoutPieceInstanceModel' * Play an existing Piece in the Rundown as an AdLib */ export async function handleTakePieceAsAdlibNow(context: JobContext, data: TakePieceAsAdlibNowProps): Promise { + if (!context.studio.settings.allowPieceDirectPlay) { + // Piece direct play isn't allowed, making this a noop + logger.debug(`Piece direct play isn't allowed, skipping`) + return + } + return runJobWithPlayoutModel( context, data, @@ -115,6 +121,8 @@ export async function handleTakePieceAsAdlibNow(context: JobContext, data: TakeP ...executeProps, triggerMode: undefined, privateData: undefined, + publicData: undefined, + actionOptions: undefined, } ) break diff --git a/packages/job-worker/src/playout/adlibUtils.ts b/packages/job-worker/src/playout/adlibUtils.ts index 2c6f0a00b8..8392d9ec51 100644 --- a/packages/job-worker/src/playout/adlibUtils.ts +++ b/packages/job-worker/src/playout/adlibUtils.ts @@ -298,61 +298,69 @@ export function innerStopPieces( for (const resolvedPieceInstance of resolvedPieces) { const pieceInstance = resolvedPieceInstance.instance - if ( - !pieceInstance.userDuration && - !pieceInstance.piece.virtual && - filter(pieceInstance) && - resolvedPieceInstance.resolvedStart !== undefined && - resolvedPieceInstance.resolvedStart <= relativeStopAt && - !pieceInstance.plannedStoppedPlayback - ) { - switch (pieceInstance.piece.lifespan) { - case PieceLifespan.WithinPart: - case PieceLifespan.OutOnSegmentChange: - case PieceLifespan.OutOnRundownChange: { - logger.info(`Blueprint action: Cropping PieceInstance "${pieceInstance._id}" to ${stopAt}`) - - const pieceInstanceModel = playoutModel.findPieceInstance(pieceInstance._id) - if (pieceInstanceModel) { - const newDuration: Required['userDuration'] = playoutModel.isMultiGatewayMode - ? { - endRelativeToNow: offsetRelativeToNow, - } - : { - endRelativeToPart: relativeStopAt, - } - - pieceInstanceModel.pieceInstance.setDuration(newDuration) - - stoppedInstances.push(pieceInstance._id) - } else { - logger.warn( - `Blueprint action: Failed to crop PieceInstance "${pieceInstance._id}", it was not found` - ) - } - - break - } - case PieceLifespan.OutOnSegmentEnd: - case PieceLifespan.OutOnRundownEnd: - case PieceLifespan.OutOnShowStyleEnd: { - logger.info( - `Blueprint action: Cropping PieceInstance "${pieceInstance._id}" to ${stopAt} with a virtual` - ) - currentPartInstance.insertVirtualPiece( - relativeStopAt, - pieceInstance.piece.lifespan, - pieceInstance.piece.sourceLayerId, - pieceInstance.piece.outputLayerId - ) + // Virtual pieces aren't allowed a timed end + if (pieceInstance.piece.virtual) continue + + // Check if piece has already had an end defined + if (pieceInstance.userDuration) continue + + // Caller can filter out pieces + if (!filter(pieceInstance)) continue + + // Check if piece has started yet + if (resolvedPieceInstance.resolvedStart == undefined || resolvedPieceInstance.resolvedStart > relativeStopAt) + continue + + // If there end time of the piece is already known, make sure it is in the future + if (pieceInstance.plannedStoppedPlayback && pieceInstance.plannedStoppedPlayback <= stopAt) continue + + switch (pieceInstance.piece.lifespan) { + case PieceLifespan.WithinPart: + case PieceLifespan.OutOnSegmentChange: + case PieceLifespan.OutOnRundownChange: { + logger.info(`Blueprint action: Cropping PieceInstance "${pieceInstance._id}" to ${stopAt}`) + + const pieceInstanceModel = playoutModel.findPieceInstance(pieceInstance._id) + if (pieceInstanceModel) { + const newDuration: Required['userDuration'] = playoutModel.isMultiGatewayMode + ? { + endRelativeToNow: offsetRelativeToNow, + } + : { + endRelativeToPart: relativeStopAt, + } + + pieceInstanceModel.pieceInstance.setDuration(newDuration) stoppedInstances.push(pieceInstance._id) - break + } else { + logger.warn( + `Blueprint action: Failed to crop PieceInstance "${pieceInstance._id}", it was not found` + ) } - default: - assertNever(pieceInstance.piece.lifespan) + + break + } + case PieceLifespan.OutOnSegmentEnd: + case PieceLifespan.OutOnRundownEnd: + case PieceLifespan.OutOnShowStyleEnd: { + logger.info( + `Blueprint action: Cropping PieceInstance "${pieceInstance._id}" to ${stopAt} with a virtual` + ) + + currentPartInstance.insertVirtualPiece( + relativeStopAt, + pieceInstance.piece.lifespan, + pieceInstance.piece.sourceLayerId, + pieceInstance.piece.outputLayerId + ) + + stoppedInstances.push(pieceInstance._id) + break } + default: + assertNever(pieceInstance.piece.lifespan) } } diff --git a/packages/job-worker/src/playout/debug.ts b/packages/job-worker/src/playout/debug.ts index 596a0941b6..b46cadcad5 100644 --- a/packages/job-worker/src/playout/debug.ts +++ b/packages/job-worker/src/playout/debug.ts @@ -8,7 +8,8 @@ import { logger } from '../logging' import { syncPlayheadInfinitesForNextPartInstance } from './infinites' import { setNextPart } from './setNext' import { runJobWithPlayoutModel } from './lock' -import { updateStudioTimeline, updateTimeline } from './timeline/generate' +import { updateTimeline } from './timeline/generate' +import { updateTimelineFromStudioPlayoutModel } from './lib' /** * Ensure that the infinite pieces on the nexted-part are correct @@ -80,17 +81,6 @@ export async function handleDebugCrash(context: JobContext, data: DebugRegenerat */ export async function handleDebugUpdateTimeline(context: JobContext, _data: void): Promise { await runJobWithStudioPlayoutModel(context, async (studioPlayoutModel) => { - const activePlaylists = studioPlayoutModel.getActiveRundownPlaylists() - if (activePlaylists.length > 1) { - throw new Error(`Too many active playlists`) - } else if (activePlaylists.length > 0) { - const playlist = activePlaylists[0] - - await runJobWithPlayoutModel(context, { playlistId: playlist._id }, null, async (playoutModel) => { - await updateTimeline(context, playoutModel) - }) - } else { - await updateStudioTimeline(context, studioPlayoutModel) - } + await updateTimelineFromStudioPlayoutModel(context, studioPlayoutModel) }) } diff --git a/packages/job-worker/src/playout/holdJobs.ts b/packages/job-worker/src/playout/holdJobs.ts index ab00c41738..3c88aca6a6 100644 --- a/packages/job-worker/src/playout/holdJobs.ts +++ b/packages/job-worker/src/playout/holdJobs.ts @@ -5,11 +5,18 @@ import { ActivateHoldProps, DeactivateHoldProps } from '@sofie-automation/coreli import { JobContext } from '../jobs' import { runJobWithPlayoutModel } from './lock' import { updateTimeline } from './timeline/generate' +import { logger } from '../logging' /** * Activate Hold */ export async function handleActivateHold(context: JobContext, data: ActivateHoldProps): Promise { + if (!context.studio.settings.allowHold) { + // Hold isn't allowed, making this a noop + logger.debug(`Hold isn't allowed, skipping`) + return + } + return runJobWithPlayoutModel( context, data, @@ -59,6 +66,8 @@ export async function handleActivateHold(context: JobContext, data: ActivateHold * Deactivate Hold */ export async function handleDeactivateHold(context: JobContext, data: DeactivateHoldProps): Promise { + // This should be possible even when hold is not allowed, as it is a way to get out of a stuck state + return runJobWithPlayoutModel( context, data, diff --git a/packages/job-worker/src/playout/infinites.ts b/packages/job-worker/src/playout/infinites.ts index 5924cb4a5c..222791d0f1 100644 --- a/packages/job-worker/src/playout/infinites.ts +++ b/packages/job-worker/src/playout/infinites.ts @@ -23,6 +23,7 @@ import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/ import { sortRundownIDsInPlaylist } from '@sofie-automation/corelib/dist/playout/playlist' import { mongoWhere } from '@sofie-automation/corelib/dist/mongo' import { PlayoutRundownModel } from './model/PlayoutRundownModel' +import { logger } from '../logging' /** When we crop a piece, set the piece as "it has definitely ended" this far into the future. */ export const DEFINITELY_ENDED_FUTURE_DURATION = 1 * 1000 @@ -330,7 +331,26 @@ export function getPieceInstancesForPart( if (!playingRundown) throw new Error(`Rundown "${playingPartInstance.partInstance.rundownId}" not found!`) playingSegment = playingRundown.getSegment(playingPartInstance.partInstance.segmentId) - if (!playingSegment) throw new Error(`Segment "${playingPartInstance.partInstance.segmentId}" not found!`) + if (!playingSegment) { + const rundownId = playingRundown.rundown._id + context.directCollections.Segments.findFetch({ + rundownId: rundownId, + }) + .then((segment) => { + logger.error( + `TROUBLESHOOT: Segment not found, rundown "${rundownId}", segments in db: ${JSON.stringify( + segment.map((s) => s._id) + )}` + ) + }) + .catch((e) => logger.error(e)) + + throw new Error( + `Segment "${playingPartInstance.partInstance.segmentId}" in Rundown "${ + playingRundown.rundown._id + }" not found! (other segments: ${JSON.stringify(playingRundown.segments.map((s) => s.segment._id))})` + ) + } } const segment = rundown.getSegment(part.segmentId) diff --git a/packages/job-worker/src/playout/lib.ts b/packages/job-worker/src/playout/lib.ts index c2589d381d..c2c67bf0e2 100644 --- a/packages/job-worker/src/playout/lib.ts +++ b/packages/job-worker/src/playout/lib.ts @@ -10,6 +10,9 @@ import { MongoQuery } from '../db' import { mongoWhere } from '@sofie-automation/corelib/dist/mongo' import { setNextPart } from './setNext' import { selectNextPart } from './selectNextPart' +import { StudioPlayoutModel } from '../studio/model/StudioPlayoutModel' +import { runJobWithPlayoutModel } from './lock' +import { updateTimeline, updateStudioTimeline } from './timeline/generate' /** * Reset the rundownPlaylist (all of the rundowns within the playlist): @@ -175,3 +178,21 @@ export function prefixAllObjectIds(objList: T[], p return obj }) } + +export async function updateTimelineFromStudioPlayoutModel( + context: JobContext, + studioPlayoutModel: StudioPlayoutModel +): Promise { + const activePlaylists = studioPlayoutModel.getActiveRundownPlaylists() + if (activePlaylists.length > 1) { + throw new Error(`Too many active playlists`) + } else if (activePlaylists.length > 0) { + const playlist = activePlaylists[0] + + await runJobWithPlayoutModel(context, { playlistId: playlist._id }, null, async (playoutModel) => { + await updateTimeline(context, playoutModel) + }) + } else { + await updateStudioTimeline(context, studioPlayoutModel) + } +} diff --git a/packages/job-worker/src/playout/lookahead/__tests__/util.test.ts b/packages/job-worker/src/playout/lookahead/__tests__/util.test.ts index b1762bce98..4c3cc76bd8 100644 --- a/packages/job-worker/src/playout/lookahead/__tests__/util.test.ts +++ b/packages/job-worker/src/playout/lookahead/__tests__/util.test.ts @@ -86,7 +86,6 @@ describe('getOrderedPartsAfterPlayhead', () => { externalId: 'MOCK_SEGMENT_0', rundownId: rundownId, name: 'Segment 0', - externalModified: 1, }), context.mockCollections.Segments.insertOne({ _id: protectString(rundownId + '_segment01'), @@ -94,7 +93,6 @@ describe('getOrderedPartsAfterPlayhead', () => { externalId: 'MOCK_SEGMENT_1', rundownId: rundownId, name: 'Segment 1', - externalModified: 1, }), context.mockCollections.Segments.insertOne({ _id: protectString(rundownId + '_segment2'), @@ -102,7 +100,6 @@ describe('getOrderedPartsAfterPlayhead', () => { externalId: 'MOCK_SEGMENT_2', rundownId: rundownId, name: 'Segment 2', - externalModified: 1, }), ]) segmentId0 = segmentIds[0] diff --git a/packages/job-worker/src/playout/model/PlayoutModel.ts b/packages/job-worker/src/playout/model/PlayoutModel.ts index a21b3502a6..420209a4a6 100644 --- a/packages/job-worker/src/playout/model/PlayoutModel.ts +++ b/packages/job-worker/src/playout/model/PlayoutModel.ts @@ -7,6 +7,7 @@ import { RundownPlaylistActivationId, RundownPlaylistId, SegmentId, + SegmentPlayoutId, } from '@sofie-automation/corelib/dist/dataModel/Ids' import { BaseModel } from '../../modelBase' import { @@ -58,7 +59,7 @@ export interface PlayoutModelPreInit { */ readonly playlist: ReadonlyDeep /** - * The unwrapped Rundowns in this RundownPlaylist + * The unwrapped Rundowns in this RundownPlaylist, sorted in order specified by RundownPlaylist */ readonly rundowns: ReadonlyDeep @@ -191,6 +192,14 @@ export interface PlayoutModel extends PlayoutModelReadonly, StudioPlayoutModelBa */ activatePlaylist(rehearsal: boolean): RundownPlaylistActivationId + /** + * Update the active state of a RouteSet + * @param routeSetId + * @param isActive + * @returns Whether the change may affect timeline generation + */ + switchRouteSet(routeSetId: string, isActive: boolean | 'toggle'): boolean + /** * Clear the currently selected PartInstances, so that nothing is selected for playback */ @@ -323,10 +332,10 @@ export interface PlayoutModel extends PlayoutModelReadonly, StudioPlayoutModelBa /** * Track a Segment as having started playback - * @param segmentId Id of the Segment + * @param segmentPlayoutId Playout id of the Segment * @param timestamp Timestamp playback started */ - setSegmentStartedPlayback(segmentId: SegmentId, timestamp: number): void + setSegmentStartedPlayback(segmentPlayoutId: SegmentPlayoutId, timestamp: number): void /** * Set or clear a QuickLoop Marker @@ -335,6 +344,14 @@ export interface PlayoutModel extends PlayoutModelReadonly, StudioPlayoutModelBa */ setQuickLoopMarker(type: 'start' | 'end', marker: QuickLoopMarker | null): void + /** + * Returns any segmentId's that are found between 2 quickloop markers, none will be returned if + * the end is before the start. + * @param start A quickloop marker + * @param end A quickloop marker + */ + getSegmentsBetweenQuickLoopMarker(start: QuickLoopMarker, end: QuickLoopMarker): SegmentId[] + calculatePartTimings( fromPartInstance: PlayoutPartInstanceModel | null, toPartInstance: PlayoutPartInstanceModel, diff --git a/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts b/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts index 15693bf8fb..e546e9fb5b 100644 --- a/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts +++ b/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts @@ -6,6 +6,7 @@ import { PartNote } from '@sofie-automation/corelib/dist/dataModel/Notes' import { IBlueprintMutatablePart, PieceLifespan, Time } from '@sofie-automation/blueprints-integration' import { PartCalculatedTimings } from '@sofie-automation/corelib/dist/playout/timings' import { PlayoutPieceInstanceModel } from './PlayoutPieceInstanceModel' +import { CoreUserEditingDefinition } from '@sofie-automation/corelib/dist/dataModel/UserEditingDefinitions' /** * Token returned when making a backup copy of a PlayoutPartInstanceModel @@ -15,6 +16,10 @@ export interface PlayoutPartInstanceModelSnapshot { __isPlayoutPartInstanceModelBackup: true } +export interface PlayoutMutatablePart extends Omit { + userEditOperations?: CoreUserEditingDefinition[] +} + export interface PlayoutPartInstanceModel { /** * The PartInstance properties @@ -176,6 +181,11 @@ export interface PlayoutPartInstanceModel { * @param time Reported stopped time */ setReportedStoppedPlayback(time: Time): boolean + /** + * Set the Reported stopped playback time, including still-playing PieceInstances + * @param time Reported stopped time on all available objects + */ + setReportedStoppedPlaybackWithPieceInstances(time: Time): boolean /** * Set the rank of this PartInstance, to update it's position in the Segment @@ -207,7 +217,7 @@ export interface PlayoutPartInstanceModel { * @param props New properties for the Part being wrapped * @returns True if any valid properties were provided */ - updatePartProps(props: Partial): boolean + updatePartProps(props: Partial): boolean /** * Ensure that this PartInstance is setup correctly for being in the AdlibTesting Segment diff --git a/packages/job-worker/src/playout/model/implementation/LoadPlayoutModel.ts b/packages/job-worker/src/playout/model/implementation/LoadPlayoutModel.ts index fdbf67d72b..210524ff2d 100644 --- a/packages/job-worker/src/playout/model/implementation/LoadPlayoutModel.ts +++ b/packages/job-worker/src/playout/model/implementation/LoadPlayoutModel.ts @@ -21,6 +21,8 @@ import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/Perip import { PlayoutModel, PlayoutModelPreInit } from '../PlayoutModel' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { RundownBaselineObj } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineObj' +import { sortRundownsWithinPlaylist } from '@sofie-automation/corelib/dist/playout/playlist' +import { logger } from '../../../logging' /** * Load a PlayoutModelPreInit for the given RundownPlaylist @@ -58,7 +60,7 @@ export async function loadPlayoutModelPreInit( peripheralDevices: PeripheralDevices, playlist: Playlist, - rundowns: Rundowns, + rundowns: sortRundownsWithinPlaylist(Playlist.rundownIdsInOrder, Rundowns), getRundown: (id: RundownId) => Rundowns.find((rd) => rd._id === id), } @@ -88,7 +90,7 @@ export async function createPlayoutModelFromIngestModel( const [{ partInstances, groupedPieceInstances }, rundownsWithContent, timeline] = await Promise.all([ loadPartInstances(context, loadedPlaylist, rundownIds), - loadRundowns(context, ingestModel, rundowns), + loadRundowns(context, ingestModel, sortRundownsWithinPlaylist(playlist.rundownIdsInOrder, rundowns)), loadTimeline(context), ]) @@ -188,7 +190,7 @@ async function loadRundowns( context.directCollections.Segments.findFetch({ $or: [ { - // In a different rundown + // Either in rundown when ingestModel === null or not available in ingestModel rundownId: { $in: loadRundownIds }, }, { @@ -233,14 +235,25 @@ async function loadRundowns( } } - return rundowns.map( - (rundown) => - new PlayoutRundownModelImpl( - rundown, - groupedSegmentsWithParts.get(rundown._id) ?? [], - groupedBaselineObjects.get(rundown._id) ?? [] + return rundowns.map((rundown) => { + const groupedSegmentsWithPartsForRundown = groupedSegmentsWithParts.get(rundown._id) + if (!groupedSegmentsWithPartsForRundown) { + logger.debug( + `groupedSegmentsWithPartsForRundown for Rundown "${rundown._id}" is undefined (has the rundown no segments?)` ) - ) + } + const groupedBaselineObjectsForRundown = groupedBaselineObjects.get(rundown._id) + if (!groupedBaselineObjectsForRundown) + logger.debug( + `groupedBaselineObjectsForRundown for Rundown "${rundown._id}" is undefined (has the rundown no baseline objects?)` + ) + + return new PlayoutRundownModelImpl( + rundown, + groupedSegmentsWithPartsForRundown ?? [], + groupedBaselineObjectsForRundown ?? [] + ) + }) } async function loadPartInstances( diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts index cb53c18586..dcb0b1429e 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts @@ -480,6 +480,10 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou return partInstance } + switchRouteSet(routeSetId: string, isActive: boolean | 'toggle'): boolean { + return this.context.setRouteSetActive(routeSetId, isActive) + } + cycleSelectedPartInstances(): void { this.playlistImpl.previousPartInfo = this.playlistImpl.currentPartInfo this.playlistImpl.currentPartInfo = this.playlistImpl.nextPartInfo @@ -498,6 +502,11 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou deactivatePlaylist(): void { delete this.playlistImpl.activationId + if (this.currentPartInstance) { + this.currentPartInstance.setReportedStoppedPlaybackWithPieceInstances(getCurrentTime()) + this.queuePartInstanceTimingEvent(this.currentPartInstance.partInstance._id) + } + this.clearSelectedPartInstances() this.playlistImpl.quickLoop = this.quickLoopService.getUpdatedPropsByClearingMarkers() @@ -583,13 +592,18 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou delete this.playlistImpl.lastTakeTime delete this.playlistImpl.startedPlayback delete this.playlistImpl.rundownsStartedPlayback + delete this.playlistImpl.segmentsStartedPlayback delete this.playlistImpl.previousPersistentState delete this.playlistImpl.trackedAbSessions delete this.playlistImpl.queuedSegmentId if (regenerateActivationId) this.playlistImpl.activationId = getRandomId() - if (this.playlistImpl.quickLoop?.running) this.playlistImpl.quickLoop.running = false + // reset quickloop if applicable: + if (this.playlist.quickLoop && !this.playlist.quickLoop.locked) { + this.setQuickLoopMarker('start', null) + this.setQuickLoopMarker('end', null) + } this.#playlistHasChanged = true } @@ -631,6 +645,7 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou ...writePartInstancesAndPieceInstances(this.context, this.allPartInstances), writeAdlibTestingSegments(this.context, this.rundownsImpl), this.#baselineHelper.saveAllToDatabase(), + this.context.saveRouteSetChanges(), ]) this.#playlistHasChanged = false @@ -736,21 +751,21 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou this.#playlistHasChanged = true } - setSegmentStartedPlayback(segmentId: SegmentId, timestamp: number): void { - const segmentIdsToKeep: string[] = [] + setSegmentStartedPlayback(segmentPlayoutId: SegmentPlayoutId, timestamp: number): void { + const segmentPlayoutIdsToKeep: string[] = [] if (this.previousPartInstance) { - segmentIdsToKeep.push(unprotectString(this.previousPartInstance.partInstance.segmentId)) + segmentPlayoutIdsToKeep.push(unprotectString(this.previousPartInstance.partInstance.segmentPlayoutId)) } if (this.currentPartInstance) { - segmentIdsToKeep.push(unprotectString(this.currentPartInstance.partInstance.segmentId)) + segmentPlayoutIdsToKeep.push(unprotectString(this.currentPartInstance.partInstance.segmentPlayoutId)) } this.playlistImpl.segmentsStartedPlayback = this.playlistImpl.segmentsStartedPlayback - ? _.pick(this.playlistImpl.segmentsStartedPlayback, segmentIdsToKeep) + ? _.pick(this.playlistImpl.segmentsStartedPlayback, segmentPlayoutIdsToKeep) : {} - const segmentIdStr = unprotectString(segmentId) - this.playlistImpl.segmentsStartedPlayback[segmentIdStr] = timestamp + const segmentPlayoutIdStr = unprotectString(segmentPlayoutId) + this.playlistImpl.segmentsStartedPlayback[segmentPlayoutIdStr] = timestamp this.#playlistHasChanged = true } @@ -788,6 +803,10 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou this.#playlistHasChanged = true } + getSegmentsBetweenQuickLoopMarker(start: QuickLoopMarker, end: QuickLoopMarker): SegmentId[] { + return this.quickLoopService.getSegmentsBetweenMarkers(start, end) + } + /** Lifecycle */ /** @deprecated */ @@ -844,12 +863,30 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou if (this.rundownsImpl.find((rd) => rd.AdlibTestingSegmentHasChanged)) logOrThrowError(new Error(`Failed no changes in model assertion, an AdlibTesting Segment has been changed`)) - if ( - Array.from(this.allPartInstances.values()).find( - (part) => !part || part.partInstanceHasChanges || part.changedPieceInstanceIds().length > 0 - ) + const changedPartInstances = Array.from(this.allPartInstances.entries()).filter( + ([_, partInstance]) => + !partInstance || + partInstance.partInstanceHasChanges || + partInstance.changedPieceInstanceIds().length > 0 ) - logOrThrowError(new Error(`Failed no changes in model assertion, a PartInstance has been changed`)) + + if (changedPartInstances.length > 0) { + logOrThrowError( + new Error( + `Failed no changes in model assertion, PartInstances has been changed: ${JSON.stringify( + changedPartInstances.map( + ([id, pi]) => + `${id}: ` + + (!pi + ? 'null' + : `partInstanceHasChanges: ${ + pi.partInstanceHasChanges + }, changedPieceInstanceIds: ${JSON.stringify(pi.changedPieceInstanceIds())}`) + ) + )}` + ) + ) + } if (span) span.end() } diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts index 1970195c1e..a97807e119 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts @@ -15,20 +15,19 @@ import { PartCalculatedTimings, } from '@sofie-automation/corelib/dist/playout/timings' import { PartNote } from '@sofie-automation/corelib/dist/dataModel/Notes' +import { IBlueprintPieceType, PieceLifespan, Time } from '@sofie-automation/blueprints-integration' import { - IBlueprintMutatablePart, - IBlueprintPieceType, - PieceLifespan, - Time, -} from '@sofie-automation/blueprints-integration' -import { PlayoutPartInstanceModel, PlayoutPartInstanceModelSnapshot } from '../PlayoutPartInstanceModel' + PlayoutMutatablePart, + PlayoutPartInstanceModel, + PlayoutPartInstanceModelSnapshot, +} from '../PlayoutPartInstanceModel' import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { PlayoutPieceInstanceModel } from '../PlayoutPieceInstanceModel' import { PlayoutPieceInstanceModelImpl } from './PlayoutPieceInstanceModelImpl' import { EmptyPieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import _ = require('underscore') import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' -import { IBlueprintMutatablePartSampleKeys } from '../../../blueprints/context/lib' +import { PlayoutMutatablePartSampleKeys } from '../../../blueprints/context/lib' import { QuickLoopService } from '../services/QuickLoopService' /** @@ -493,6 +492,19 @@ export class PlayoutPartInstanceModelImpl implements PlayoutPartInstanceModel { } return false } + setReportedStoppedPlaybackWithPieceInstances(time: number): boolean { + if (!this.partInstance.timings?.reportedStartedPlayback) return false + + let setOnAll = this.setReportedStoppedPlayback(time) + + for (const model of this.pieceInstances) { + if (model.pieceInstance.reportedStartedPlayback) { + setOnAll &&= model.setReportedStoppedPlayback(time) + } + } + + return setOnAll + } setRank(rank: number): void { this.#compareAndSetPartValue('_rank', rank) @@ -525,11 +537,11 @@ export class PlayoutPartInstanceModelImpl implements PlayoutPartInstanceModel { this.#setPartInstanceValue('previousPartEndState', previousPartEndState) } - updatePartProps(props: Partial): boolean { + updatePartProps(props: Partial): boolean { // Future: this could do some better validation // filter the submission to the allowed ones - const trimmedProps: Partial = filterPropsToAllowed(props) + const trimmedProps: Partial = filterPropsToAllowed(props) if (Object.keys(trimmedProps).length === 0) return false this.#compareAndSetPartInstanceValue( @@ -575,8 +587,6 @@ export class PlayoutPartInstanceModelImpl implements PlayoutPartInstanceModel { } } -function filterPropsToAllowed( - props: Partial> -): Partial> { - return _.pick(props, [...IBlueprintMutatablePartSampleKeys]) +function filterPropsToAllowed(props: Partial): Partial { + return _.pick(props, [...PlayoutMutatablePartSampleKeys]) } diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutRundownModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutRundownModelImpl.ts index e5e37fe8ef..5483307d1f 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutRundownModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutRundownModelImpl.ts @@ -8,7 +8,6 @@ import { PlayoutSegmentModel } from '../PlayoutSegmentModel' import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' import { getRandomId } from '@sofie-automation/corelib/dist/lib' -import { getCurrentTime } from '../../../lib' import { PlayoutSegmentModelImpl } from './PlayoutSegmentModelImpl' export class PlayoutRundownModelImpl implements PlayoutRundownModel { @@ -74,7 +73,6 @@ export class PlayoutRundownModelImpl implements PlayoutRundownModel { _id: segmentId, _rank: calculateRankForAdlibTestingSegment(this.#segments), externalId: '__adlib-testing__', - externalModified: getCurrentTime(), rundownId: this.rundown._id, orphaned: SegmentOrphanedReason.ADLIB_TESTING, name: '', diff --git a/packages/job-worker/src/playout/model/implementation/__tests__/LoadPlayoutModel.spec.ts b/packages/job-worker/src/playout/model/implementation/__tests__/LoadPlayoutModel.spec.ts new file mode 100644 index 0000000000..2766d1017f --- /dev/null +++ b/packages/job-worker/src/playout/model/implementation/__tests__/LoadPlayoutModel.spec.ts @@ -0,0 +1,159 @@ +import { + setupDefaultRundown, + setupDefaultRundownPlaylist, + setupMockShowStyleCompound, +} from '../../../../__mocks__/presetCollections' +import { MockJobContext, setupDefaultJobEnvironment } from '../../../../__mocks__/context' +import { ProcessedShowStyleCompound } from '../../../../jobs' +import { ReadonlyDeep } from 'type-fest' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { createPlayoutModelFromIngestModel, loadPlayoutModelPreInit } from '../LoadPlayoutModel' +import { runWithPlaylistLock } from '../../../../playout/lock' +import { loadIngestModelFromRundown } from '../../../../ingest/model/implementation/LoadIngestModel' +import { runWithRundownLock } from '../../../../ingest/lock' +import { IngestModelReadonly } from '../../../../ingest/model/IngestModel' + +describe('LoadPlayoutModel', () => { + let context: MockJobContext + let showStyleCompound: ReadonlyDeep + + beforeAll(async () => { + context = setupDefaultJobEnvironment() + + showStyleCompound = await setupMockShowStyleCompound(context) + }) + + describe('loadPlayoutModelPreInit', () => { + afterEach(async () => + Promise.all([ + context.mockCollections.RundownBaselineAdLibPieces.remove({}), + context.mockCollections.RundownBaselineAdLibActions.remove({}), + context.mockCollections.RundownBaselineObjects.remove({}), + context.mockCollections.AdLibActions.remove({}), + context.mockCollections.AdLibPieces.remove({}), + context.mockCollections.Pieces.remove({}), + context.mockCollections.Parts.remove({}), + context.mockCollections.Segments.remove({}), + context.mockCollections.Rundowns.remove({}), + context.mockCollections.RundownPlaylists.remove({}), + ]) + ) + + test('Rundowns are in order specified in RundownPlaylist', async () => { + // Set up a playlist: + const { rundownId: rundownId00, playlistId: playlistId0 } = await setupDefaultRundownPlaylist( + context, + showStyleCompound, + protectString('rundown00') + ) + const rundownId01 = protectString('rundown01') + await setupDefaultRundown(context, showStyleCompound, playlistId0, rundownId01) + const rundownId02 = protectString('rundown02') + await setupDefaultRundown(context, showStyleCompound, playlistId0, rundownId02) + + const playlist0 = await context.mockCollections.RundownPlaylists.findOne(playlistId0) + expect(playlist0).toBeTruthy() + + if (!playlist0) throw new Error(`Playlist "${playlistId0}" not found!`) + + const rundownIdsInOrder = [rundownId01, rundownId02, rundownId00] + + await context.mockCollections.RundownPlaylists.update(playlistId0, { + rundownIdsInOrder, + }) + + await runWithPlaylistLock(context, playlistId0, async (lock) => { + const model = await loadPlayoutModelPreInit(context, lock, playlist0) + expect(model.rundowns.map((r) => r._id)).toMatchObject([rundownId01, rundownId02, rundownId00]) + }) + }) + + test('Rundowns not ordered in RundownPlaylist are at the end', async () => { + // Set up a playlist: + const { rundownId: rundownId00, playlistId: playlistId0 } = await setupDefaultRundownPlaylist( + context, + showStyleCompound, + protectString('rundown00') + ) + const rundownId01 = protectString('rundown01') + await setupDefaultRundown(context, showStyleCompound, playlistId0, rundownId01) + const rundownId02 = protectString('rundown02') + await setupDefaultRundown(context, showStyleCompound, playlistId0, rundownId02) + + const playlist0 = await context.mockCollections.RundownPlaylists.findOne(playlistId0) + expect(playlist0).toBeTruthy() + + if (!playlist0) throw new Error(`Playlist "${playlistId0}" not found!`) + + const rundownIdsInOrder = [rundownId01] + + await context.mockCollections.RundownPlaylists.update(playlistId0, { + rundownIdsInOrder, + }) + + await runWithPlaylistLock(context, playlistId0, async (lock) => { + const model = await loadPlayoutModelPreInit(context, lock, playlist0) + expect(model.rundowns.map((r) => r._id)).toMatchObject([rundownId01, rundownId00, rundownId02]) + }) + }) + }) + + describe('createPlayoutModelFromIngestModel', () => { + afterEach(async () => + Promise.all([ + context.mockCollections.RundownBaselineAdLibPieces.remove({}), + context.mockCollections.RundownBaselineAdLibActions.remove({}), + context.mockCollections.RundownBaselineObjects.remove({}), + context.mockCollections.AdLibActions.remove({}), + context.mockCollections.AdLibPieces.remove({}), + context.mockCollections.Pieces.remove({}), + context.mockCollections.Parts.remove({}), + context.mockCollections.Segments.remove({}), + context.mockCollections.Rundowns.remove({}), + context.mockCollections.RundownPlaylists.remove({}), + ]) + ) + + test('Rundowns are in order specified in RundownPlaylist', async () => { + // Set up a playlist: + const { rundownId: rundownId00, playlistId: playlistId0 } = await setupDefaultRundownPlaylist( + context, + showStyleCompound, + protectString('rundown00') + ) + const rundownId01 = protectString('rundown01') + await setupDefaultRundown(context, showStyleCompound, playlistId0, rundownId01) + const rundownId02 = protectString('rundown02') + await setupDefaultRundown(context, showStyleCompound, playlistId0, rundownId02) + + const rundownIdsInOrder = [rundownId01, rundownId02, rundownId00] + + await context.mockCollections.RundownPlaylists.update(playlistId0, { + rundownIdsInOrder, + }) + + const playlist0 = await context.mockCollections.RundownPlaylists.findOne(playlistId0) + expect(playlist0).toBeTruthy() + + if (!playlist0) throw new Error(`Playlist "${playlistId0}" not found!`) + + let ingestModel: IngestModelReadonly | undefined + + await runWithRundownLock(context, rundownId01, async (rundown, lock) => { + if (!rundown) throw new Error(`Rundown "${rundownId01}" not found!`) + + ingestModel = await loadIngestModelFromRundown(context, lock, rundown) + }) + + await runWithPlaylistLock(context, playlistId0, async (lock) => { + if (!ingestModel) throw new Error('Ingest model could not be created!') + + const rundowns = await context.mockCollections.Rundowns.findFetch({}) + + const model = await createPlayoutModelFromIngestModel(context, lock, playlist0, rundowns, ingestModel) + + expect(model.rundowns.map((r) => r.rundown._id)).toMatchObject([rundownId01, rundownId02, rundownId00]) + }) + }) + }) +}) diff --git a/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutRundownModelImpl.spec.ts b/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutRundownModelImpl.spec.ts index b49b9d0d2a..410e2ec179 100644 --- a/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutRundownModelImpl.spec.ts +++ b/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutRundownModelImpl.spec.ts @@ -34,7 +34,6 @@ describe('PlayoutRundownModelImpl', () => { _id: protectString(id), rundownId: protectString('rd0'), externalId: id, - externalModified: 100000, _rank: rank, name: `${id} segment`, } @@ -108,14 +107,12 @@ describe('PlayoutRundownModelImpl', () => { const fixedSegment: ReadonlyDeep = { ...createdSegment.segment, - externalModified: 0, } expect(fixedSegment).toEqual({ _id: expectedId, rundownId: protectString('rd0'), externalId: '__adlib-testing__', - externalModified: 0, _rank: -1, name: '', orphaned: SegmentOrphanedReason.ADLIB_TESTING, diff --git a/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutSegmentModelImpl.spec.ts b/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutSegmentModelImpl.spec.ts index dca9722599..33e087c5ad 100644 --- a/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutSegmentModelImpl.spec.ts +++ b/packages/job-worker/src/playout/model/implementation/__tests__/PlayoutSegmentModelImpl.spec.ts @@ -9,7 +9,6 @@ describe('PlayoutSegmentModelImpl', () => { _id: protectString('abc'), rundownId: protectString('rd0'), externalId: 'ext1', - externalModified: 100000, _rank: 1, name: 'test segment', } diff --git a/packages/job-worker/src/playout/model/services/QuickLoopService.ts b/packages/job-worker/src/playout/model/services/QuickLoopService.ts index 91a6245fc9..eb7bb0c6e1 100644 --- a/packages/job-worker/src/playout/model/services/QuickLoopService.ts +++ b/packages/job-worker/src/playout/model/services/QuickLoopService.ts @@ -8,7 +8,7 @@ import { } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { ReadonlyObjectDeep } from 'type-fest/source/readonly-deep' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' -import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { PlayoutPartInstanceModel } from '../PlayoutPartInstanceModel' import { JobContext } from '../../../jobs' @@ -149,6 +149,58 @@ export class QuickLoopService { return quickLoopProps } + getSegmentsBetweenMarkers(startMarker: QuickLoopMarker, endMarker: QuickLoopMarker): SegmentId[] { + const segments = this.playoutModel.getAllOrderedSegments() + const segmentIds: SegmentId[] = [] + + let passedStart = false + let seenLastRundown = false + + for (const s of segments) { + if ( + (!passedStart && + ((startMarker.type === QuickLoopMarkerType.PART && s.getPart(startMarker.id)) || + (startMarker.type === QuickLoopMarkerType.SEGMENT && s.segment._id === startMarker.id) || + (startMarker.type === QuickLoopMarkerType.RUNDOWN && + s.segment.rundownId === startMarker.id))) || + startMarker.type === QuickLoopMarkerType.PLAYLIST + ) { + // the start marker is inside this segment, is this segment, or this is the first segment that is in the loop + // segments from here on are included in the loop + passedStart = true + } + + if (endMarker.type === QuickLoopMarkerType.RUNDOWN) { + // last rundown needs to be inclusive so we need to break once the rundownId is not equal to segment's rundownId + if (s.segment.rundownId === endMarker.id) { + if (!passedStart) { + // we hit the end before the start so quit now: + break + } + seenLastRundown = true + } else if (seenLastRundown) { + // we have passed the last rundown + break + } + } + + if (passedStart) { + // passed the start but we have not seen the end yet + segmentIds.push(s.segment._id) + } + + if ( + (endMarker.type === QuickLoopMarkerType.PART && s.getPart(endMarker.id)) || + (endMarker.type === QuickLoopMarkerType.SEGMENT && s.segment._id === endMarker.id) + ) { + // the endMarker is in this segment or this segment is the end marker + break + } + } + + return segmentIds + } + private areMarkersFlipped(startPosition: MarkerPosition, endPosition: MarkerPosition) { return compareMarkerPositions(startPosition, endPosition) < 0 } diff --git a/packages/job-worker/src/playout/moveNextPart.ts b/packages/job-worker/src/playout/moveNextPart.ts index 14d64aaaae..ca6a3e4e9c 100644 --- a/packages/job-worker/src/playout/moveNextPart.ts +++ b/packages/job-worker/src/playout/moveNextPart.ts @@ -1,20 +1,18 @@ import { groupByToMap } from '@sofie-automation/corelib/dist/lib' import { DBPart, isPartPlayable } from '@sofie-automation/corelib/dist/dataModel/Part' import { JobContext } from '../jobs' -import { PartId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { PlayoutModel } from './model/PlayoutModel' +import { PlayoutModelReadonly } from './model/PlayoutModel' import { sortPartsInSortedSegments } from '@sofie-automation/corelib/dist/playout/playlist' -import { setNextPartFromPart } from './setNext' import { logger } from '../logging' import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' import { ReadonlyDeep } from 'type-fest' -export async function moveNextPart( - context: JobContext, - playoutModel: PlayoutModel, +export function selectNewPartWithOffsets( + _context: JobContext, + playoutModel: PlayoutModelReadonly, partDelta: number, segmentDelta: number -): Promise { +): ReadonlyDeep | null { const playlist = playoutModel.playlist const currentPartInstance = playoutModel.currentPartInstance?.partInstance @@ -69,8 +67,7 @@ export async function moveNextPart( // TODO - looping playlists if (selectedPart) { // Switch to that part - await setNextPartFromPart(context, playoutModel, selectedPart, true) - return selectedPart._id + return selectedPart } else { // Nothing looked valid so do nothing // Note: we should try and a smaller delta if it is not -1/1 @@ -101,8 +98,7 @@ export async function moveNextPart( if (targetPart) { // Switch to that part - await setNextPartFromPart(context, playoutModel, targetPart, true) - return targetPart._id + return targetPart } else { // Nothing looked valid so do nothing // Note: we should try and a smaller delta if it is not -1/1 diff --git a/packages/job-worker/src/playout/quickLoopMarkers.ts b/packages/job-worker/src/playout/quickLoopMarkers.ts index 67e3558822..3803f59f93 100644 --- a/packages/job-worker/src/playout/quickLoopMarkers.ts +++ b/packages/job-worker/src/playout/quickLoopMarkers.ts @@ -1,10 +1,15 @@ import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' -import { SetQuickLoopMarkerProps } from '@sofie-automation/corelib/dist/worker/studio' +import { ClearQuickLoopMarkersProps, SetQuickLoopMarkerProps } from '@sofie-automation/corelib/dist/worker/studio' import { JobContext } from '../jobs' import { runJobWithPlayoutModel } from './lock' import { updateTimeline } from './timeline/generate' import { selectNextPart } from './selectNextPart' import { setNextPart } from './setNext' +import { resetPartInstancesWithPieceInstances } from './lib' +import { QuickLoopMarker, QuickLoopMarkerType } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { clone } from 'underscore' +import { PlayoutModel } from './model/PlayoutModel' export async function handleSetQuickLoopMarker(context: JobContext, data: SetQuickLoopMarkerProps): Promise { return runJobWithPlayoutModel( @@ -17,24 +22,145 @@ export async function handleSetQuickLoopMarker(context: JobContext, data: SetQui async (playoutModel) => { const playlist = playoutModel.playlist if (!playlist.activationId) throw new Error(`Playlist has no activationId!`) - const wasQuickLoopRunning = playoutModel.playlist.quickLoop?.running + const oldProps = clone(playoutModel.playlist.quickLoop) + const wasQuickLoopRunning = oldProps?.running + playoutModel.setQuickLoopMarker(data.type, data.marker) + const markerChanged = ( + markerA: QuickLoopMarker | undefined, + markerB: QuickLoopMarker | undefined + ): boolean => { + if (!markerA || !markerB) return false + + if ( + (markerA.type === QuickLoopMarkerType.RUNDOWN || + markerA.type === QuickLoopMarkerType.SEGMENT || + markerA.type === QuickLoopMarkerType.PART) && + (markerB.type === QuickLoopMarkerType.RUNDOWN || + markerB.type === QuickLoopMarkerType.SEGMENT || + markerB.type === QuickLoopMarkerType.PART) + ) { + return markerA.id !== markerB.id + } + + return false + } + + if (playlist.currentPartInfo) { + // rundown is on air + let segmentIdsToReset: SegmentId[] = [] + + if ( + playlist.quickLoop?.start && + oldProps?.start && + markerChanged(oldProps.start, playlist.quickLoop.start) + ) { + // start marker changed + segmentIdsToReset = playoutModel.getSegmentsBetweenQuickLoopMarker( + playlist.quickLoop.start, + oldProps.start + ) + } else if ( + playlist.quickLoop?.end && + oldProps?.end && + markerChanged(oldProps.end, playlist.quickLoop.end) + ) { + // end marker changed + segmentIdsToReset = playoutModel.getSegmentsBetweenQuickLoopMarker( + oldProps.end, + playlist.quickLoop.end + ) + } else if (playlist.quickLoop?.start && playlist.quickLoop.end && !(oldProps?.start && oldProps.end)) { + // a new loop was created + segmentIdsToReset = playoutModel.getSegmentsBetweenQuickLoopMarker( + playlist.quickLoop.start, + playlist.quickLoop.end + ) + } + + // reset segments that have been added to the loop and are not on-air + resetPartInstancesWithPieceInstancesForAffectedSegments(context, playoutModel, segmentIdsToReset) + } + if (wasQuickLoopRunning) { - const nextPart = selectNextPart( - context, - playoutModel.playlist, - playoutModel.currentPartInstance?.partInstance ?? null, - playoutModel.nextPartInstance?.partInstance ?? null, - playoutModel.getAllOrderedSegments(), - playoutModel.getAllOrderedParts(), - { ignoreUnplayable: true, ignoreQuickLoop: false } + await updateNextedPartAfterQuickLoopMarkerChange(context, playoutModel) + } + await updateTimeline(context, playoutModel) + } + ) +} + +export async function handleClearQuickLoopMarkers( + context: JobContext, + data: ClearQuickLoopMarkersProps +): Promise { + return runJobWithPlayoutModel( + context, + data, + async (playoutModel) => { + const playlist = playoutModel.playlist + if (!playlist.activationId) throw UserError.create(UserErrorMessage.InactiveRundown) + }, + async (playoutModel) => { + const playlist = playoutModel.playlist + if (!playlist.activationId) throw new Error(`Playlist has no activationId!`) + + const wasQuickLoopRunning = playoutModel.playlist.quickLoop?.running + + // a new loop was created + if (playlist.quickLoop?.start && playlist.quickLoop.end) { + const segmentIdsToReset = playoutModel.getSegmentsBetweenQuickLoopMarker( + playlist.quickLoop.start, + playlist.quickLoop.end ) - if (nextPart?.part._id !== playoutModel.nextPartInstance?.partInstance.part._id) { - await setNextPart(context, playoutModel, nextPart, false) - } + + // reset segments that have been added to the loop and are not on-air + resetPartInstancesWithPieceInstancesForAffectedSegments(context, playoutModel, segmentIdsToReset) + } + + playoutModel.setQuickLoopMarker('start', null) + playoutModel.setQuickLoopMarker('end', null) + + if (wasQuickLoopRunning) { + await updateNextedPartAfterQuickLoopMarkerChange(context, playoutModel) } await updateTimeline(context, playoutModel) } ) } + +async function updateNextedPartAfterQuickLoopMarkerChange(context: JobContext, playoutModel: PlayoutModel) { + const nextPart = selectNextPart( + context, + playoutModel.playlist, + playoutModel.currentPartInstance?.partInstance ?? null, + playoutModel.nextPartInstance?.partInstance ?? null, + playoutModel.getAllOrderedSegments(), + playoutModel.getAllOrderedParts(), + { ignoreUnplayable: true, ignoreQuickLoop: false } + ) + if (nextPart?.part._id !== playoutModel.nextPartInstance?.partInstance.part._id) { + await setNextPart(context, playoutModel, nextPart, false) + } +} + +function resetPartInstancesWithPieceInstancesForAffectedSegments( + context: JobContext, + playoutModel: PlayoutModel, + segmentIdsToReset: SegmentId[] +) { + const segmentIdsExceptTheCurrent = segmentIdsToReset.filter( + (segmentId) => + segmentId !== playoutModel.currentPartInstance?.partInstance.segmentId && + segmentId !== playoutModel.nextPartInstance?.partInstance.segmentId + ) + if (segmentIdsExceptTheCurrent.length === 0) return + + // reset segments that have been added to the loop and are not on-air + resetPartInstancesWithPieceInstances(context, playoutModel, { + segmentId: { + $in: segmentIdsExceptTheCurrent, + }, + }) +} diff --git a/packages/job-worker/src/playout/resolvedPieces.ts b/packages/job-worker/src/playout/resolvedPieces.ts index 089ea5f83f..f13ce67c72 100644 --- a/packages/job-worker/src/playout/resolvedPieces.ts +++ b/packages/job-worker/src/playout/resolvedPieces.ts @@ -46,8 +46,10 @@ export function getResolvedPiecesForPartInstancesOnTimeline( if (!partInstancesInfo.current) return [] const currentPartStarted = partInstancesInfo.current.partStarted ?? now + const nextPartStarted = partInstancesInfo.current.partInstance.part.autoNext && + partInstancesInfo.current.partInstance.part.expectedDuration !== 0 && partInstancesInfo.current.partInstance.part.expectedDuration !== undefined ? currentPartStarted + partInstancesInfo.current.partInstance.part.expectedDuration : null diff --git a/packages/job-worker/src/playout/selectNextPart.ts b/packages/job-worker/src/playout/selectNextPart.ts index d07abbf446..9b0ae14dd7 100644 --- a/packages/job-worker/src/playout/selectNextPart.ts +++ b/packages/job-worker/src/playout/selectNextPart.ts @@ -176,7 +176,7 @@ export function selectNextPart( if (rundownPlaylist.queuedSegmentId) { // No previous part, or segment has changed - if (!previousPartInstance || (nextPart && previousPartInstance.segmentId !== nextPart.part.segmentId)) { + if (!previousPartInstance || !nextPart || previousPartInstance.segmentId !== nextPart.part.segmentId) { // Find first in segment const newSegmentPart = findFirstPlayablePart( 0, diff --git a/packages/job-worker/src/playout/setNext.ts b/packages/job-worker/src/playout/setNext.ts index a6178d3347..55518a7010 100644 --- a/packages/job-worker/src/playout/setNext.ts +++ b/packages/job-worker/src/playout/setNext.ts @@ -47,10 +47,61 @@ export async function setNextPart( ): Promise { const span = context.startSpan('setNextPart') - const rundownIds = playoutModel.getRundownIds() - const currentPartInstance = playoutModel.currentPartInstance - const nextPartInstance = playoutModel.nextPartInstance + const attemptedPartIds = new Set() + if (rawNextPart && 'part' in rawNextPart) attemptedPartIds.add(rawNextPart.part._id) + let moveNextToPart = await setNextPartAndCheckForPendingMoveNextPart( + context, + playoutModel, + rawNextPart, + setManually, + nextTimeOffset + ) + while (moveNextToPart) { + // Ensure that we aren't stuck in an infinite loop. If this while loop is being run for a part twice, then the blueprints are behaving oddly and will likely get stuck + // Instead of throwing and causing a larger failure, we can stop processing here, and leave something as next + const nextPartId = moveNextToPart.selectedPart?._id ?? null + if (attemptedPartIds.has(nextPartId)) { + logger.error( + `Blueprint onSetAsNext callback moved the next part "${nextPartId}" (trace: ${JSON.stringify( + Array.from(attemptedPartIds.values()) + )}), forming a loop` + ) + break + } + attemptedPartIds.add(nextPartId) + + moveNextToPart = await setNextPartAndCheckForPendingMoveNextPart( + context, + playoutModel, + moveNextToPart.selectedPart + ? { + part: moveNextToPart.selectedPart, + consumesQueuedSegmentId: false, + } + : null, + true + ) + } + + playoutModel.removeUntakenPartInstances() + + resetPartInstancesWhenChangingSegment(context, playoutModel) + + playoutModel.updateQuickLoopState() + + await cleanupOrphanedItems(context, playoutModel) + + if (span) span.end() +} + +async function setNextPartAndCheckForPendingMoveNextPart( + context: JobContext, + playoutModel: PlayoutModel, + rawNextPart: ReadonlyDeep> | PlayoutPartInstanceModel | null, + setManually: boolean, + nextTimeOffset?: number | undefined +): Promise<{ selectedPart: ReadonlyDeep | null } | undefined> { if (rawNextPart) { if (!playoutModel.playlist.activationId) throw new Error(`RundownPlaylist "${playoutModel.playlist._id}" is not active`) @@ -64,7 +115,7 @@ export async function setNextPart( throw new Error('Part is marked as invalid, cannot set as next.') } - if (!rundownIds.includes(inputPartInstance.partInstance.rundownId)) { + if (!playoutModel.getRundown(inputPartInstance.partInstance.rundownId)) { throw new Error( `PartInstance "${inputPartInstance.partInstance._id}" of rundown "${inputPartInstance.partInstance.rundownId}" not part of RundownPlaylist "${playoutModel.playlist._id}"` ) @@ -78,7 +129,7 @@ export async function setNextPart( throw new Error('Part is marked as invalid, cannot set as next.') } - if (!rundownIds.includes(selectedPart.part.rundownId)) { + if (!playoutModel.getRundown(selectedPart.part.rundownId)) { throw new Error( `Part "${selectedPart.part._id}" of rundown "${selectedPart.part.rundownId}" not part of RundownPlaylist "${playoutModel.playlist._id}"` ) @@ -86,6 +137,9 @@ export async function setNextPart( consumesQueuedSegmentId = selectedPart.consumesQueuedSegmentId ?? false + const currentPartInstance = playoutModel.currentPartInstance + const nextPartInstance = playoutModel.nextPartInstance + if (nextPartInstance && nextPartInstance.partInstance.part._id === selectedPart.part._id) { // Re-use existing @@ -120,22 +174,13 @@ export async function setNextPart( playoutModel.setPartInstanceAsNext(newPartInstance, setManually, consumesQueuedSegmentId, nextTimeOffset) - await executeOnSetAsNextCallback(playoutModel, newPartInstance, context) + return executeOnSetAsNextCallback(playoutModel, newPartInstance, context) } else { // Set to null playoutModel.setPartInstanceAsNext(null, setManually, false, nextTimeOffset) + return undefined } - - playoutModel.removeUntakenPartInstances() - - resetPartInstancesWhenChangingSegment(context, playoutModel) - - playoutModel.updateQuickLoopState() - - await cleanupOrphanedItems(context, playoutModel) - - if (span) span.end() } async function executeOnSetAsNextCallback( @@ -144,38 +189,40 @@ async function executeOnSetAsNextCallback( context: JobContext ) { const rundownOfNextPart = playoutModel.getRundown(newPartInstance.partInstance.rundownId) - if (rundownOfNextPart) { - const blueprint = await context.getShowStyleBlueprint(rundownOfNextPart.rundown.showStyleBaseId) - if (blueprint.blueprint.onSetAsNext) { - const showStyle = await context.getShowStyleCompound( - rundownOfNextPart.rundown.showStyleVariantId, - rundownOfNextPart.rundown.showStyleBaseId - ) - const watchedPackagesHelper = WatchedPackagesHelper.empty(context) - const onSetAsNextContext = new OnSetAsNextContext( - { - name: `${rundownOfNextPart.rundown.name}(${playoutModel.playlist.name})`, - identifier: `playlist=${playoutModel.playlist._id},rundown=${ - rundownOfNextPart.rundown._id - },currentPartInstance=${ - playoutModel.playlist.currentPartInfo?.partInstanceId - },execution=${getRandomId()}`, - tempSendUserNotesIntoBlackHole: true, // TODO-CONTEXT store these notes - }, - context, - playoutModel, - showStyle, - watchedPackagesHelper, - new PartAndPieceInstanceActionService(context, playoutModel, showStyle, rundownOfNextPart) - ) - try { - await blueprint.blueprint.onSetAsNext(onSetAsNextContext) - await applyOnSetAsNextSideEffects(context, playoutModel, onSetAsNextContext) - } catch (err) { - logger.error(`Error in showStyleBlueprint.onSetAsNext: ${stringifyError(err)}`) - } - } + if (!rundownOfNextPart) return undefined + + const blueprint = await context.getShowStyleBlueprint(rundownOfNextPart.rundown.showStyleBaseId) + if (!blueprint.blueprint.onSetAsNext) return undefined + + const showStyle = await context.getShowStyleCompound( + rundownOfNextPart.rundown.showStyleVariantId, + rundownOfNextPart.rundown.showStyleBaseId + ) + + const watchedPackagesHelper = WatchedPackagesHelper.empty(context) + const onSetAsNextContext = new OnSetAsNextContext( + { + name: `${rundownOfNextPart.rundown.name}(${playoutModel.playlist.name})`, + identifier: `playlist=${playoutModel.playlist._id},rundown=${ + rundownOfNextPart.rundown._id + },currentPartInstance=${playoutModel.playlist.currentPartInfo?.partInstanceId},execution=${getRandomId()}`, + tempSendUserNotesIntoBlackHole: true, // TODO-CONTEXT store these notes + }, + context, + playoutModel, + showStyle, + watchedPackagesHelper, + new PartAndPieceInstanceActionService(context, playoutModel, showStyle, rundownOfNextPart) + ) + + try { + await blueprint.blueprint.onSetAsNext(onSetAsNextContext) + await applyOnSetAsNextSideEffects(context, playoutModel, onSetAsNextContext) + } catch (err) { + logger.error(`Error in showStyleBlueprint.onSetAsNext: ${stringifyError(err)}`) } + + return onSetAsNextContext.pendingMoveNextPart } async function applyOnSetAsNextSideEffects( diff --git a/packages/job-worker/src/playout/setNextJobs.ts b/packages/job-worker/src/playout/setNextJobs.ts index eaaec41949..fe297ae313 100644 --- a/packages/job-worker/src/playout/setNextJobs.ts +++ b/packages/job-worker/src/playout/setNextJobs.ts @@ -12,7 +12,7 @@ import { import { JobContext } from '../jobs' import { runJobWithPlayoutModel } from './lock' import { setNextPartFromPart, setNextSegment, queueNextSegment } from './setNext' -import { moveNextPart } from './moveNextPart' +import { selectNewPartWithOffsets } from './moveNextPart' import { updateTimeline } from './timeline/generate' import { PlayoutSegmentModel } from './model/PlayoutSegmentModel' import { ReadonlyDeep } from 'type-fest' @@ -68,11 +68,13 @@ export async function handleMoveNextPart(context: JobContext, data: MoveNextPart } }, async (playoutModel) => { - const newPartId = await moveNextPart(context, playoutModel, data.partDelta, data.segmentDelta) + const selectedPart = selectNewPartWithOffsets(context, playoutModel, data.partDelta, data.segmentDelta) + if (!selectedPart) return null - if (newPartId) await updateTimeline(context, playoutModel) + await setNextPartFromPart(context, playoutModel, selectedPart, true) + await updateTimeline(context, playoutModel) - return newPartId + return selectedPart._id } ) } diff --git a/packages/job-worker/src/playout/snapshot.ts b/packages/job-worker/src/playout/snapshot.ts index 9b479f0968..efa997ffb7 100644 --- a/packages/job-worker/src/playout/snapshot.ts +++ b/packages/job-worker/src/playout/snapshot.ts @@ -19,16 +19,10 @@ import { RestorePlaylistSnapshotResult, } from '@sofie-automation/corelib/dist/worker/studio' import { getCurrentTime, getSystemVersion } from '../lib' -import _ = require('underscore') import { JobContext } from '../jobs' import { runWithPlaylistLock } from './lock' import { CoreRundownPlaylistSnapshot } from '@sofie-automation/corelib/dist/snapshots' -import { - unprotectString, - ProtectedString, - protectStringArray, - protectString, -} from '@sofie-automation/corelib/dist/protectedString' +import { unprotectString, ProtectedString, protectString } from '@sofie-automation/corelib/dist/protectedString' import { saveIntoDb } from '../db/changes' import { getPartId, getSegmentId } from '../ingest/lib' import { assertNever, getRandomId, literal } from '@sofie-automation/corelib/dist/lib' @@ -36,6 +30,7 @@ import { logger } from '../logging' import { JSONBlobParse, JSONBlobStringify } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { RundownOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { SofieIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' /** * Generate the Playlist owned portions of a Playlist snapshot @@ -53,10 +48,15 @@ export async function handleGeneratePlaylistSnapshot( const rundowns = await context.directCollections.Rundowns.findFetch({ playlistId: playlist._id }) const rundownIds = rundowns.map((i) => i._id) - const ingestData = await context.directCollections.IngestDataCache.findFetch( + const ingestData = await context.directCollections.NrcsIngestDataCache.findFetch( + { rundownId: { $in: rundownIds } }, + { sort: { modified: -1 } } + ) // @todo: check sorting order + const sofieIngestData = await context.directCollections.SofieIngestDataCache.findFetch( { rundownId: { $in: rundownIds } }, { sort: { modified: -1 } } ) // @todo: check sorting order + // const userActions = await context.directCollections.UserActionsLog.findFetch({ // args: { // $regex: @@ -121,6 +121,7 @@ export async function handleGeneratePlaylistSnapshot( playlist, rundowns, ingestData, + sofieIngestData, baselineObjs, baselineAdlibs, segments, @@ -242,12 +243,14 @@ export async function handleRestorePlaylistSnapshot( partIdMap.set(oldId, part._id) } + const partInstanceOldRundownIdMap = new Map() const partInstanceIdMap = new Map() for (const partInstance of snapshot.partInstances) { const oldId = partInstance._id partInstance._id = getRandomId() partInstanceIdMap.set(oldId, partInstance._id) + partInstance.part._id = partIdMap.get(partInstance.part._id) || getRandomId() partInstanceOldRundownIdMap.set(oldId, partInstance.rundownId) } @@ -279,7 +282,6 @@ export async function handleRestorePlaylistSnapshot( ...snapshot.baselineAdLibActions, ]) { const oldId = adlib._id - if (adlib.partId) adlib.partId = partIdMap.get(adlib.partId) adlib._id = getRandomId() pieceIdMap.set(oldId, adlib._id) } @@ -367,7 +369,7 @@ export async function handleRestorePlaylistSnapshot( piece?: unknown } >(objs: undefined | T[], updateId: boolean): T[] { - const updateIds = (obj: T) => { + const updateIds = (obj: T, updateOwnId: boolean) => { if (obj.rundownId) { obj.rundownId = getNewRundownId(obj.rundownId) } @@ -382,20 +384,20 @@ export async function handleRestorePlaylistSnapshot( obj.partInstanceId = partInstanceIdMap.get(obj.partInstanceId) || getRandomId() } - if (updateId) { + if (updateOwnId) { obj._id = getRandomId() } if (obj.part) { - updateIds(obj.part as any) + updateIds(obj.part as any, false) } if (obj.piece) { - updateIds(obj.piece as any) + updateIds(obj.piece as any, false) } return obj } - return (objs || []).map((obj) => updateIds(obj)) + return (objs || []).map((obj) => updateIds(obj, updateId)) } await Promise.all([ @@ -403,10 +405,16 @@ export async function handleRestorePlaylistSnapshot( saveIntoDb(context, context.directCollections.Rundowns, { playlistId }, snapshot.rundowns), saveIntoDb( context, - context.directCollections.IngestDataCache, + context.directCollections.NrcsIngestDataCache, { rundownId: { $in: rundownIds } }, updateItemIds(snapshot.ingestData, true) ), + saveIntoDb( + context, + context.directCollections.SofieIngestDataCache, + { rundownId: { $in: rundownIds } }, + updateItemIds(snapshot.sofieIngestData || (snapshot.ingestData as any as SofieIngestDataCacheObj[]), true) + ), saveIntoDb( context, context.directCollections.RundownBaselineObjects, @@ -417,13 +425,13 @@ export async function handleRestorePlaylistSnapshot( context, context.directCollections.RundownBaselineAdLibPieces, { rundownId: { $in: rundownIds } }, - updateItemIds(snapshot.baselineAdlibs, true) + updateItemIds(snapshot.baselineAdlibs, false) ), saveIntoDb( context, context.directCollections.RundownBaselineAdLibActions, { rundownId: { $in: rundownIds } }, - updateItemIds(snapshot.baselineAdLibActions, true) + updateItemIds(snapshot.baselineAdLibActions, false) ), saveIntoDb( context, @@ -459,31 +467,31 @@ export async function handleRestorePlaylistSnapshot( context, context.directCollections.AdLibPieces, { rundownId: { $in: rundownIds } }, - updateItemIds(snapshot.adLibPieces, true) + updateItemIds(snapshot.adLibPieces, false) ), saveIntoDb( context, context.directCollections.AdLibActions, { rundownId: { $in: rundownIds } }, - updateItemIds(snapshot.adLibActions, true) + updateItemIds(snapshot.adLibActions, false) ), saveIntoDb( context, context.directCollections.ExpectedMediaItems, - { partId: { $in: protectStringArray(_.keys(partIdMap)) } }, + { partId: { $in: Array.from(partIdMap.keys()) } }, updateItemIds(snapshot.expectedMediaItems, true) ), saveIntoDb( context, context.directCollections.ExpectedPlayoutItems, { rundownId: { $in: rundownIds } }, - updateItemIds(snapshot.expectedPlayoutItems || [], false) + updateItemIds(snapshot.expectedPlayoutItems || [], true) ), saveIntoDb( context, context.directCollections.ExpectedPackages, { rundownId: { $in: rundownIds } }, - updateItemIds(snapshot.expectedPackages || [], false) + snapshot.expectedPackages || [] ), ]) diff --git a/packages/job-worker/src/playout/timeline/generate.ts b/packages/job-worker/src/playout/timeline/generate.ts index 77856edf60..30011cd545 100644 --- a/packages/job-worker/src/playout/timeline/generate.ts +++ b/packages/job-worker/src/playout/timeline/generate.ts @@ -1,4 +1,4 @@ -import { BlueprintId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { BlueprintId, TimelineHash } from '@sofie-automation/corelib/dist/dataModel/Ids' import { JobContext } from '../../jobs' import { ReadonlyDeep } from 'type-fest' import { @@ -127,13 +127,13 @@ export async function updateStudioTimeline( logAnyRemainingNowTimes(context, baselineObjects) } - saveTimeline(context, playoutModel, baselineObjects, versions) + const timelineHash = saveTimeline(context, playoutModel, baselineObjects, versions) if (studioBaseline) { updateBaselineExpectedPackagesOnStudio(context, playoutModel, studioBaseline) } - logger.debug('updateStudioTimeline done!') + logger.verbose(`updateStudioTimeline done, hash: "${timelineHash}"`) if (span) span.end() } @@ -157,9 +157,8 @@ export async function updateTimeline(context: JobContext, playoutModel: PlayoutM logAnyRemainingNowTimes(context, timelineObjs) } - saveTimeline(context, playoutModel, timelineObjs, versions) - - logger.debug('updateTimeline done!') + const timelineHash = saveTimeline(context, playoutModel, timelineObjs, versions) + logger.verbose(`updateTimeline done, hash: "${timelineHash}"`) if (span) span.end() } @@ -198,32 +197,31 @@ function preserveOrReplaceNowTimesInObjects( } function logAnyRemainingNowTimes(_context: JobContext, timelineObjs: Array): void { - const ids: string[] = [] - - const hasNow = (obj: TimelineEnableExt | TimelineEnableExt[]) => { - let res = false - applyToArray(obj, (enable) => { - if (enable.start === 'now' || enable.end === 'now') res = true - }) - return res - } + const badTimelineObjs: any[] = [] for (const obj of timelineObjs) { if (hasNow(obj.enable)) { - ids.push(obj.id) + badTimelineObjs.push(obj) } for (const kf of obj.keyframes || []) { if (hasNow(kf.enable)) { - ids.push(kf.id) + badTimelineObjs.push(kf) } } } - if (ids.length) { - logger.error(`Some timeline objects have unexpected now times!: ${JSON.stringify(ids)}`) + if (badTimelineObjs.length) { + logger.error(`Some timeline objects have unexpected now times!: ${JSON.stringify(badTimelineObjs)}`) } } +function hasNow(obj: TimelineEnableExt | TimelineEnableExt[]) { + let res = false + applyToArray(obj, (enable) => { + if (enable.start === 'now' || enable.end === 'now') res = true + }) + return res +} /** Store the timelineobjects into the model, and perform any post-save actions */ export function saveTimeline( @@ -231,11 +229,13 @@ export function saveTimeline( studioPlayoutModel: StudioPlayoutModelBase, timelineObjs: TimelineObjGeneric[], generationVersions: TimelineCompleteGenerationVersions -): void { +): TimelineHash { const newTimeline = studioPlayoutModel.setTimeline(timelineObjs, generationVersions) // Also do a fast-track for the timeline to be published faster: context.hackPublishTimelineToFastTrack(newTimeline) + + return newTimeline.timelineHash } export interface SelectedPartInstancesTimelineInfo { diff --git a/packages/job-worker/src/playout/timeline/piece.ts b/packages/job-worker/src/playout/timeline/piece.ts index cea83bbcf7..bbaebb02b2 100644 --- a/packages/job-worker/src/playout/timeline/piece.ts +++ b/packages/job-worker/src/playout/timeline/piece.ts @@ -108,7 +108,7 @@ export function getPieceEnableInsidePart( if (partHasEndTime && partTimings.toPartPostroll) { if (!pieceEnable.duration) { // make sure that the control object is shortened correctly - pieceEnable.duration = `#${partGroupId} - ${partTimings.toPartPostroll}` + pieceEnable.end = `#${partGroupId} - ${partTimings.toPartPostroll}` } } diff --git a/packages/job-worker/src/playout/timeline/rundown.ts b/packages/job-worker/src/playout/timeline/rundown.ts index ec21446be0..941c0a80d9 100644 --- a/packages/job-worker/src/playout/timeline/rundown.ts +++ b/packages/job-worker/src/playout/timeline/rundown.ts @@ -146,7 +146,8 @@ export function buildTimelineObjsForRundown( // If there is a valid autonext out of the current part, then calculate the duration currentPartEnable.duration = partInstancesInfo.current.partInstance.part.expectedDuration + - partInstancesInfo.current.calculatedTimings.toPartDelay + partInstancesInfo.current.calculatedTimings.toPartDelay + + partInstancesInfo.current.calculatedTimings.toPartPostroll // autonext should have the postroll added to it to not confuse the timeline if ( typeof currentPartEnable.start === 'number' && @@ -327,8 +328,17 @@ function generateCurrentInfinitePieceObjects( infiniteGroup.enable.duration = infiniteInNextPart.piece.enable.duration } - // If this piece does not continue in the next part, then set it to end with the part it belongs to - if ( + const pieceInstanceWithUpdatedEndCap: PieceInstanceWithTimings = { ...pieceInstance } + // Give the infinite group and end cap when the end of the piece is known + if (pieceInstance.resolvedEndCap) { + // If the cap is a number, it is relative to the part, not the parent group so needs to be handled here + if (typeof pieceInstance.resolvedEndCap === 'number') { + infiniteGroup.enable.end = `#${timingContext.currentPartGroup.id}.start + ${pieceInstance.resolvedEndCap}` + delete infiniteGroup.enable.duration + delete pieceInstanceWithUpdatedEndCap.resolvedEndCap + } + } else if ( + // If this piece does not continue in the next part, then set it to end with the part it belongs to !infiniteInNextPart && currentPartInfo.partInstance.part.autoNext && infiniteGroup.enable.duration === undefined && @@ -354,7 +364,7 @@ function generateCurrentInfinitePieceObjects( activePlaylist._id, infiniteGroup, nowInParent, - pieceInstance, + pieceInstanceWithUpdatedEndCap, pieceEnable, 0, groupClasses, diff --git a/packages/job-worker/src/playout/timings/partPlayback.ts b/packages/job-worker/src/playout/timings/partPlayback.ts index 71aea9a162..d76db7910f 100644 --- a/packages/job-worker/src/playout/timings/partPlayback.ts +++ b/packages/job-worker/src/playout/timings/partPlayback.ts @@ -29,7 +29,7 @@ export async function onPartPlaybackStarted( const playingPartInstance = playoutModel.getPartInstance(data.partInstanceId) if (!playingPartInstance) throw new Error( - `PartInstance "${data.partInstanceId}" in RundownPlayst "${playoutModel.playlistId}" not found!` + `PartInstance "${data.partInstanceId}" in RundownPlaylist "${playoutModel.playlistId}" not found!` ) // make sure we don't run multiple times, even if TSR calls us multiple times @@ -178,30 +178,32 @@ export function reportPartInstanceHasStarted( partInstance: PlayoutPartInstanceModel, timestamp: Time ): void { - if (partInstance) { - const timestampUpdated = partInstance.setReportedStartedPlayback(timestamp) - if (timestamp && !playoutModel.isMultiGatewayMode) { + const timestampUpdated = partInstance.setReportedStartedPlayback(timestamp) + + if (!playoutModel.isMultiGatewayMode) { + if (timestamp) { partInstance.setPlannedStartedPlayback(timestamp) } - const previousPartInstance = playoutModel.previousPartInstance - if (timestampUpdated && !playoutModel.isMultiGatewayMode && previousPartInstance) { + if (timestampUpdated && previousPartInstance) { // Ensure the plannedStoppedPlayback is set for the previous partinstance too previousPartInstance.setPlannedStoppedPlayback(timestamp) } + } - // Update the playlist: - if (!partInstance.partInstance.part.untimed) { - playoutModel.setRundownStartedPlayback(partInstance.partInstance.rundownId, timestamp) - } + // Update the playlist: + if (!partInstance.partInstance.part.untimed) { + playoutModel.setRundownStartedPlayback(partInstance.partInstance.rundownId, timestamp) + } - if (partInstance.partInstance.segmentId !== playoutModel.previousPartInstance?.partInstance.segmentId) { - playoutModel.setSegmentStartedPlayback(partInstance.partInstance.segmentId, timestamp) - } + if ( + partInstance.partInstance.segmentPlayoutId !== playoutModel.previousPartInstance?.partInstance.segmentPlayoutId + ) { + playoutModel.setSegmentStartedPlayback(partInstance.partInstance.segmentPlayoutId, timestamp) + } - if (timestampUpdated) { - playoutModel.queuePartInstanceTimingEvent(partInstance.partInstance._id) - } + if (timestampUpdated) { + playoutModel.queuePartInstanceTimingEvent(partInstance.partInstance._id) } } diff --git a/packages/job-worker/src/playout/timings/timelineTriggerTime.ts b/packages/job-worker/src/playout/timings/timelineTriggerTime.ts index e3c585f0ca..eccac86df8 100644 --- a/packages/job-worker/src/playout/timings/timelineTriggerTime.ts +++ b/packages/job-worker/src/playout/timings/timelineTriggerTime.ts @@ -181,7 +181,9 @@ function timelineTriggerTimeInner( } } if (tlChanged) { - saveTimeline(context, studioPlayoutModel, timelineObjs, timeline.generationVersions) + const timelineHash = saveTimeline(context, studioPlayoutModel, timelineObjs, timeline.generationVersions) + + logger.verbose(`timelineTriggerTime: Updated Timeline, hash: "${timelineHash}"`) } } diff --git a/packages/job-worker/src/playout/upgrade.ts b/packages/job-worker/src/playout/upgrade.ts index b03dc1d286..66fba34cfa 100644 --- a/packages/job-worker/src/playout/upgrade.ts +++ b/packages/job-worker/src/playout/upgrade.ts @@ -1,9 +1,18 @@ -import { BlueprintMapping, BlueprintMappings, JSONBlobParse, TSR } from '@sofie-automation/blueprints-integration' +import { + BlueprintMapping, + BlueprintMappings, + JSONBlobParse, + StudioRouteBehavior, + TSR, +} from '@sofie-automation/blueprints-integration' import { MappingsExt, StudioIngestDevice, StudioInputDevice, + StudioPackageContainer, StudioPlayoutDevice, + StudioRouteSet, + StudioRouteSetExclusivityGroup, } from '@sofie-automation/corelib/dist/dataModel/Studio' import { Complete, clone, literal } from '@sofie-automation/corelib/dist/lib' import { protectString } from '@sofie-automation/corelib/dist/protectedString' @@ -67,6 +76,38 @@ export async function handleBlueprintUpgradeForStudio(context: JobContext, _data }), ]) ) + const routeSets = Object.fromEntries( + Object.entries>(result.routeSets ?? {}).map((dev) => [ + dev[0], + literal>({ + name: dev[1].name ?? '', + active: dev[1].active ?? false, + defaultActive: dev[1].defaultActive ?? false, + behavior: dev[1].behavior ?? StudioRouteBehavior.TOGGLE, + exclusivityGroup: dev[1].exclusivityGroup ?? undefined, + routes: dev[1].routes ?? [], + abPlayers: dev[1].abPlayers ?? [], + }), + ]) + ) + const routeSetExclusivityGroups = Object.fromEntries( + Object.entries>(result.routeSetExclusivityGroups ?? {}).map((dev) => [ + dev[0], + literal>({ + name: dev[1].name ?? '', + }), + ]) + ) + + const packageContainers = Object.fromEntries( + Object.entries>(result.packageContainers ?? {}).map((dev) => [ + dev[0], + literal>({ + deviceIds: dev[1].deviceIds ?? [], + container: dev[1].container as any, + }), + ]) + ) await context.directCollections.Studios.update(context.studioId, { $set: { @@ -74,6 +115,9 @@ export async function handleBlueprintUpgradeForStudio(context: JobContext, _data 'peripheralDeviceSettings.playoutDevices.defaults': playoutDevices, 'peripheralDeviceSettings.ingestDevices.defaults': ingestDevices, 'peripheralDeviceSettings.inputDevices.defaults': inputDevices, + 'routeSetsWithOverrides.defaults': routeSets, + 'routeSetExclusivityGroupsWithOverrides.defaults': routeSetExclusivityGroups, + 'packageContainersWithOverrides.defaults': packageContainers, lastBlueprintConfig: { blueprintHash: blueprint.blueprintDoc.blueprintHash, blueprintId: blueprint.blueprintId, diff --git a/packages/job-worker/src/rundownPlaylists.ts b/packages/job-worker/src/rundownPlaylists.ts index dce9f538dd..c5efcae563 100644 --- a/packages/job-worker/src/rundownPlaylists.ts +++ b/packages/job-worker/src/rundownPlaylists.ts @@ -5,7 +5,13 @@ import { ForceQuickLoopAutoNext, QuickLoopMarkerType, } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { clone, getHash, getRandomString, normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import { + clone, + getHash, + getRandomString, + normalizeArrayToMap, + generateTranslation, +} from '@sofie-automation/corelib/dist/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { IngestJobs } from '@sofie-automation/corelib/dist/worker/ingest' @@ -16,7 +22,11 @@ import { RemovePlaylistProps, } from '@sofie-automation/corelib/dist/worker/studio' import { ReadonlyDeep } from 'type-fest' -import { BlueprintResultRundownPlaylist, IBlueprintRundown } from '@sofie-automation/blueprints-integration' +import { + BlueprintResultRundownPlaylist, + IBlueprintRundown, + NoteSeverity, +} from '@sofie-automation/blueprints-integration' import { JobContext } from './jobs' import { logger } from './logging' import { resetRundownPlaylist } from './playout/lib' @@ -38,6 +48,7 @@ import { RundownLock } from './jobs/lock' import { runWithRundownLock } from './ingest/lock' import { convertRundownToBlueprints } from './blueprints/context/lib' import { sortRundownIDsInPlaylist } from '@sofie-automation/corelib/dist/playout/playlist' +import { INoteBase } from '@sofie-automation/corelib/dist/dataModel/Notes' /** * Debug: Remove a Playlist and all its contents @@ -150,7 +161,8 @@ export async function removeRundownFromDb(context: JobContext, lock: RundownLock context.directCollections.ExpectedMediaItems.remove({ rundownId: rundownId }), context.directCollections.ExpectedPlayoutItems.remove({ rundownId: rundownId }), context.directCollections.ExpectedPackages.remove({ rundownId: rundownId }), - context.directCollections.IngestDataCache.remove({ rundownId: rundownId }), + context.directCollections.SofieIngestDataCache.remove({ rundownId: rundownId }), + context.directCollections.NrcsIngestDataCache.remove({ rundownId: rundownId }), context.directCollections.RundownBaselineAdLibPieces.remove({ rundownId: rundownId }), context.directCollections.Segments.remove({ rundownId: rundownId }), context.directCollections.Parts.remove({ rundownId: rundownId }), @@ -171,27 +183,38 @@ export function produceRundownPlaylistInfoFromRundown( rundowns: ReadonlyDeep> ): DBRundownPlaylist { let playlistInfo: BlueprintResultRundownPlaylist | null = null + + let notes: INoteBase[] = [] + try { if (studioBlueprint?.blueprint?.getRundownPlaylistInfo) { + const blueprintContext = new StudioUserContext( + { + name: 'produceRundownPlaylistInfoFromRundown', + identifier: `studioId=${context.studioId},playlistId=${playlistId},rundownIds=${rundowns + .map((r) => r._id) + .join(',')}`, + }, + context.studio, + context.getStudioBlueprintConfig() + ) + playlistInfo = studioBlueprint.blueprint.getRundownPlaylistInfo( - new StudioUserContext( - { - name: 'produceRundownPlaylistInfoFromRundown', - identifier: `studioId=${context.studioId},playlistId=${playlistId},rundownIds=${rundowns - .map((r) => r._id) - .join(',')}`, - tempSendUserNotesIntoBlackHole: true, - }, - context.studio, - context.getStudioBlueprintConfig() - ), + blueprintContext, rundowns.map(convertRundownToBlueprints), playlistExternalId ) + + notes = blueprintContext.notes } } catch (err) { logger.error(`Error in studioBlueprint.getRundownPlaylistInfo: ${stringifyError(err)}`) playlistInfo = null + + notes.push({ + type: NoteSeverity.ERROR, + message: generateTranslation(`Internal Error generating RundownPlaylist`), + }) } const rundownsInDefaultOrder = sortDefaultRundownInPlaylistOrder(rundowns) @@ -240,6 +263,14 @@ export function produceRundownPlaylistInfoFromRundown( } } + // Update the notes on the playlist + newPlaylist.notes = notes.map((note) => ({ + ...note, + origin: { + name: 'produceRundownPlaylistInfoFromRundown', + }, + })) + if (!newPlaylist.rundownRanksAreSetInSofie) { if (playlistInfo?.order) { // The blueprints gave us an order diff --git a/packages/job-worker/src/studio/model/StudioPlayoutModel.ts b/packages/job-worker/src/studio/model/StudioPlayoutModel.ts index a4b22ed20a..3a7cdb60c0 100644 --- a/packages/job-worker/src/studio/model/StudioPlayoutModel.ts +++ b/packages/job-worker/src/studio/model/StudioPlayoutModel.ts @@ -68,4 +68,12 @@ export interface StudioPlayoutModel extends StudioPlayoutModelBase, BaseModel { * @param excludeRundownPlaylistId Ignore a given RundownPlaylist, useful to see if any other RundownPlaylists are active */ getActiveRundownPlaylists(excludeRundownPlaylistId?: RundownPlaylistId): ReadonlyDeep + + /** + * Update the active state of a RouteSet + * @param routeSetId The RouteSet to update + * @param isActive The new active state of the RouteSet + * @returns Whether the change may affect timeline generation + */ + switchRouteSet(routeSetId: string, isActive: boolean | 'toggle'): boolean } diff --git a/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts b/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts index 06e9689f93..8abd587def 100644 --- a/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts +++ b/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts @@ -34,6 +34,7 @@ export class StudioPlayoutModelImpl implements StudioPlayoutModel { #timelineHasChanged = false #timeline: TimelineComplete | null + public get timeline(): TimelineComplete | null { return this.#timeline } @@ -100,6 +101,10 @@ export class StudioPlayoutModelImpl implements StudioPlayoutModel { return this.#timeline } + switchRouteSet(routeSetId: string, isActive: boolean | 'toggle'): boolean { + return this.context.setRouteSetActive(routeSetId, isActive) + } + /** * Discards all documents in this model, and marks it as unusable */ @@ -120,7 +125,11 @@ export class StudioPlayoutModelImpl implements StudioPlayoutModel { } this.#timelineHasChanged = false - await this.#baselineHelper.saveAllToDatabase() + await Promise.all([ + this.#baselineHelper.saveAllToDatabase(), + this.context.saveRouteSetChanges(), + // + ]) if (span) span.end() } diff --git a/packages/job-worker/src/studio/routeSet.ts b/packages/job-worker/src/studio/routeSet.ts new file mode 100644 index 0000000000..eba9504062 --- /dev/null +++ b/packages/job-worker/src/studio/routeSet.ts @@ -0,0 +1,14 @@ +import { SwitchRouteSetProps } from '@sofie-automation/corelib/dist/worker/studio' +import { JobContext } from '../jobs' +import { runJobWithStudioPlayoutModel } from './lock' +import { updateTimelineFromStudioPlayoutModel } from '../playout/lib' + +export async function handleSwitchRouteSet(context: JobContext, data: SwitchRouteSetProps): Promise { + await runJobWithStudioPlayoutModel(context, async (studioPlayoutModel) => { + const routesetChangeMayAffectTimeline = studioPlayoutModel.switchRouteSet(data.routeSetId, data.state) + + if (routesetChangeMayAffectTimeline) { + await updateTimelineFromStudioPlayoutModel(context, studioPlayoutModel) + } + }) +} diff --git a/packages/job-worker/src/workers/caches.ts b/packages/job-worker/src/workers/caches.ts index f816a8f407..f252a9a0de 100644 --- a/packages/job-worker/src/workers/caches.ts +++ b/packages/job-worker/src/workers/caches.ts @@ -16,7 +16,7 @@ import { clone, deepFreeze } from '@sofie-automation/corelib/dist/lib' import { logger } from '../logging' import deepmerge = require('deepmerge') import { ProcessedShowStyleBase, ProcessedShowStyleVariant, StudioCacheContext } from '../jobs' -import { StudioCacheContextImpl } from './context' +import { StudioCacheContextImpl } from './context/StudioCacheContextImpl' /** * A Wrapper to maintain a cache and provide a context using the cache when appropriate diff --git a/packages/job-worker/src/workers/context/JobContextImpl.ts b/packages/job-worker/src/workers/context/JobContextImpl.ts new file mode 100644 index 0000000000..7be35b55f2 --- /dev/null +++ b/packages/job-worker/src/workers/context/JobContextImpl.ts @@ -0,0 +1,164 @@ +import { IDirectCollections } from '../../db' +import { JobContext } from '../../jobs' +import { WorkerDataCache } from '../caches' +import { RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { getIngestQueueName, IngestJobFunc } from '@sofie-automation/corelib/dist/worker/ingest' +import { ApmSpan, ApmTransaction } from '../../profiler' +import { getRandomString } from '@sofie-automation/corelib/dist/lib' +import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' +import { getStudioQueueName, StudioJobFunc } from '@sofie-automation/corelib/dist/worker/studio' +import { LockBase, PlaylistLock, RundownLock } from '../../jobs/lock' +import { logger } from '../../logging' +import { BaseModel } from '../../modelBase' +import { LocksManager } from '../locks' +import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' +import { EventsJobFunc, getEventsQueueName } from '@sofie-automation/corelib/dist/worker/events' +import { FastTrackTimelineFunc } from '../../main' +import { TimelineComplete } from '@sofie-automation/corelib/dist/dataModel/Timeline' +import type { QueueJobFunc } from './util' +import { StudioCacheContextImpl } from './StudioCacheContextImpl' +import { PlaylistLockImpl, RundownLockImpl } from './Locks' +import { StudioRouteSetUpdater } from './StudioRouteSetUpdater' +import type { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import type { ReadonlyDeep } from 'type-fest' + +export class JobContextImpl extends StudioCacheContextImpl implements JobContext { + private readonly locks: Array = [] + private readonly caches: Array = [] + + private readonly studioRouteSetUpdater: StudioRouteSetUpdater + + constructor( + directCollections: Readonly, + cacheData: WorkerDataCache, + private readonly locksManager: LocksManager, + private readonly transaction: ApmTransaction | undefined, + private readonly queueJob: QueueJobFunc, + private readonly fastTrackTimeline: FastTrackTimelineFunc | null + ) { + super(directCollections, cacheData) + + this.studioRouteSetUpdater = new StudioRouteSetUpdater(directCollections, cacheData) + } + + get studio(): ReadonlyDeep { + return this.studioRouteSetUpdater.studioWithChanges ?? super.studio + } + + trackCache(cache: BaseModel): void { + this.caches.push(cache) + } + + async lockPlaylist(playlistId: RundownPlaylistId): Promise { + const span = this.startSpan('lockPlaylist') + if (span) span.setLabel('playlistId', unprotectString(playlistId)) + + const lockId = getRandomString() + logger.silly(`PlaylistLock: Locking "${playlistId}"`) + + const resourceId = `playlist:${playlistId}` + await this.locksManager.aquire(lockId, resourceId) + + const doRelease = async () => { + const span = this.startSpan('unlockPlaylist') + if (span) span.setLabel('playlistId', unprotectString(playlistId)) + + await this.locksManager.release(lockId, resourceId) + + if (span) span.end() + } + const lock = new PlaylistLockImpl(playlistId, doRelease) + this.locks.push(lock) + + logger.silly(`PlaylistLock: Locked "${playlistId}"`) + + if (span) span.end() + + return lock + } + + async lockRundown(rundownId: RundownId): Promise { + const span = this.startSpan('lockRundown') + if (span) span.setLabel('rundownId', unprotectString(rundownId)) + + const lockId = getRandomString() + logger.silly(`RundownLock: Locking "${rundownId}"`) + + const resourceId = `rundown:${rundownId}` + await this.locksManager.aquire(lockId, resourceId) + + const doRelease = async () => { + const span = this.startSpan('unlockRundown') + if (span) span.setLabel('rundownId', unprotectString(rundownId)) + + await this.locksManager.release(lockId, resourceId) + + if (span) span.end() + } + const lock = new RundownLockImpl(rundownId, doRelease) + this.locks.push(lock) + + logger.silly(`RundownLock: Locked "${rundownId}"`) + + if (span) span.end() + + return lock + } + + /** Ensure resources are cleaned up after the job completes */ + async cleanupResources(): Promise { + // Ensure all locks are freed + for (const lock of this.locks) { + if (lock.isLocked) { + logger.warn(`Lock never freed: ${lock}`) + await lock.release().catch((e) => { + logger.error(`Lock free failed: ${stringifyError(e)}`) + }) + } + } + + // Ensure all caches were saved/aborted + for (const cache of this.caches) { + try { + cache.assertNoChanges() + } catch (e) { + logger.warn(`${cache.displayName} has unsaved changes: ${stringifyError(e)}`) + } + } + } + + startSpan(spanName: string): ApmSpan | null { + if (this.transaction) return this.transaction.startSpan(spanName) + return null + } + + async queueIngestJob(name: T, data: Parameters[0]): Promise { + await this.queueJob(getIngestQueueName(this.studioId), name, data) + } + async queueStudioJob(name: T, data: Parameters[0]): Promise { + await this.queueJob(getStudioQueueName(this.studioId), name, data) + } + async queueEventJob(name: T, data: Parameters[0]): Promise { + await this.queueJob(getEventsQueueName(this.studioId), name, data) + } + + hackPublishTimelineToFastTrack(newTimeline: TimelineComplete): void { + if (this.fastTrackTimeline) { + this.fastTrackTimeline(newTimeline).catch((e) => { + logger.error(`Failed to publish timeline to fast track: ${stringifyError(e)}`) + }) + } + } + + setRouteSetActive(routeSetId: string, isActive: boolean | 'toggle'): boolean { + return this.studioRouteSetUpdater.setRouteSetActive(routeSetId, isActive) + } + + async saveRouteSetChanges(): Promise { + return this.studioRouteSetUpdater.saveRouteSetChanges() + } + + discardRouteSetChanges(): void { + return this.studioRouteSetUpdater.discardRouteSetChanges() + } +} diff --git a/packages/job-worker/src/workers/context/Locks.ts b/packages/job-worker/src/workers/context/Locks.ts new file mode 100644 index 0000000000..55cc72f36d --- /dev/null +++ b/packages/job-worker/src/workers/context/Locks.ts @@ -0,0 +1,67 @@ +import type { RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PlaylistLock, RundownLock } from '../../jobs/lock' +import { logger } from '../../logging' + +export class PlaylistLockImpl extends PlaylistLock { + #isLocked = true + + public constructor(playlistId: RundownPlaylistId, private readonly doRelease: () => Promise) { + super(playlistId) + } + + get isLocked(): boolean { + return this.#isLocked + } + + async release(): Promise { + if (!this.#isLocked) { + logger.warn(`PlaylistLock: Already released "${this.playlistId}"`) + } else { + logger.silly(`PlaylistLock: Releasing "${this.playlistId}"`) + + this.#isLocked = false + + await this.doRelease() + + logger.silly(`PlaylistLock: Released "${this.playlistId}"`) + + if (this.deferedFunctions.length > 0) { + for (const fcn of this.deferedFunctions) { + await fcn() + } + } + } + } +} + +export class RundownLockImpl extends RundownLock { + #isLocked = true + + public constructor(rundownId: RundownId, private readonly doRelease: () => Promise) { + super(rundownId) + } + + get isLocked(): boolean { + return this.#isLocked + } + + async release(): Promise { + if (!this.#isLocked) { + logger.warn(`RundownLock: Already released "${this.rundownId}"`) + } else { + logger.silly(`RundownLock: Releasing "${this.rundownId}"`) + + this.#isLocked = false + + await this.doRelease() + + logger.silly(`RundownLock: Released "${this.rundownId}"`) + + if (this.deferedFunctions.length > 0) { + for (const fcn of this.deferedFunctions) { + await fcn() + } + } + } + } +} diff --git a/packages/job-worker/src/workers/context.ts b/packages/job-worker/src/workers/context/StudioCacheContextImpl.ts similarity index 57% rename from packages/job-worker/src/workers/context.ts rename to packages/job-worker/src/workers/context/StudioCacheContextImpl.ts index 383b7f41a9..dff38b6e88 100644 --- a/packages/job-worker/src/workers/context.ts +++ b/packages/job-worker/src/workers/context/StudioCacheContextImpl.ts @@ -1,48 +1,28 @@ -import { IDirectCollections } from '../db' +import { IDirectCollections } from '../../db' import { ProcessedShowStyleBase, ProcessedShowStyleVariant, - JobContext, ProcessedShowStyleCompound, StudioCacheContext, -} from '../jobs' +} from '../../jobs' import { ReadonlyDeep } from 'type-fest' -import { WorkerDataCache } from './caches' +import { WorkerDataCache } from '../caches' import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' -import { - RundownId, - RundownPlaylistId, - ShowStyleBaseId, - ShowStyleVariantId, - StudioId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' -import { getIngestQueueName, IngestJobFunc } from '@sofie-automation/corelib/dist/worker/ingest' -import { parseBlueprintDocument, WrappedShowStyleBlueprint, WrappedStudioBlueprint } from '../blueprints/cache' +import { ShowStyleBaseId, ShowStyleVariantId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { parseBlueprintDocument, WrappedShowStyleBlueprint, WrappedStudioBlueprint } from '../../blueprints/cache' import { ReadonlyObjectDeep } from 'type-fest/source/readonly-deep' -import { ApmSpan, ApmTransaction } from '../profiler' import { DBShowStyleBase } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' -import { clone, deepFreeze, getRandomString } from '@sofie-automation/corelib/dist/lib' -import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' -import { createShowStyleCompound } from '../showStyles' +import { clone, deepFreeze } from '@sofie-automation/corelib/dist/lib' +import { createShowStyleCompound } from '../../showStyles' import { BlueprintManifestType } from '@sofie-automation/blueprints-integration' import { preprocessShowStyleConfig, preprocessStudioConfig, ProcessedShowStyleConfig, ProcessedStudioConfig, -} from '../blueprints/config' -import { getStudioQueueName, StudioJobFunc } from '@sofie-automation/corelib/dist/worker/studio' -import { LockBase, PlaylistLock, RundownLock } from '../jobs/lock' -import { logger } from '../logging' -import { BaseModel } from '../modelBase' -import { LocksManager } from './locks' -import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' -import { EventsJobFunc, getEventsQueueName } from '@sofie-automation/corelib/dist/worker/events' -import { FastTrackTimelineFunc } from '../main' -import { TimelineComplete } from '@sofie-automation/corelib/dist/dataModel/Timeline' -import { processShowStyleBase, processShowStyleVariant } from '../jobs/showStyle' - -export type QueueJobFunc = (queueName: string, jobName: string, jobData: unknown) => Promise +} from '../../blueprints/config' + +import { processShowStyleBase, processShowStyleVariant } from '../../jobs/showStyle' export class StudioCacheContextImpl implements StudioCacheContext { constructor( @@ -276,127 +256,6 @@ export class StudioCacheContextImpl implements StudioCacheContext { } } -export class JobContextImpl extends StudioCacheContextImpl implements JobContext { - private readonly locks: Array = [] - private readonly caches: Array = [] - - constructor( - directCollections: Readonly, - cacheData: WorkerDataCache, - private readonly locksManager: LocksManager, - private readonly transaction: ApmTransaction | undefined, - private readonly queueJob: QueueJobFunc, - private readonly fastTrackTimeline: FastTrackTimelineFunc | null - ) { - super(directCollections, cacheData) - } - - trackCache(cache: BaseModel): void { - this.caches.push(cache) - } - - async lockPlaylist(playlistId: RundownPlaylistId): Promise { - const span = this.startSpan('lockPlaylist') - if (span) span.setLabel('playlistId', unprotectString(playlistId)) - - const lockId = getRandomString() - logger.silly(`PlaylistLock: Locking "${playlistId}"`) - - const resourceId = `playlist:${playlistId}` - await this.locksManager.aquire(lockId, resourceId) - - const doRelease = async () => { - const span = this.startSpan('unlockPlaylist') - if (span) span.setLabel('playlistId', unprotectString(playlistId)) - - await this.locksManager.release(lockId, resourceId) - - if (span) span.end() - } - const lock = new PlaylistLockImpl(playlistId, doRelease) - this.locks.push(lock) - - logger.silly(`PlaylistLock: Locked "${playlistId}"`) - - if (span) span.end() - - return lock - } - - async lockRundown(rundownId: RundownId): Promise { - const span = this.startSpan('lockRundown') - if (span) span.setLabel('rundownId', unprotectString(rundownId)) - - const lockId = getRandomString() - logger.silly(`RundownLock: Locking "${rundownId}"`) - - const resourceId = `rundown:${rundownId}` - await this.locksManager.aquire(lockId, resourceId) - - const doRelease = async () => { - const span = this.startSpan('unlockRundown') - if (span) span.setLabel('rundownId', unprotectString(rundownId)) - - await this.locksManager.release(lockId, resourceId) - - if (span) span.end() - } - const lock = new RundownLockImpl(rundownId, doRelease) - this.locks.push(lock) - - logger.silly(`RundownLock: Locked "${rundownId}"`) - - if (span) span.end() - - return lock - } - - /** Ensure resources are cleaned up after the job completes */ - async cleanupResources(): Promise { - // Ensure all locks are freed - for (const lock of this.locks) { - if (lock.isLocked) { - logger.warn(`Lock never freed: ${lock}`) - await lock.release().catch((e) => { - logger.error(`Lock free failed: ${stringifyError(e)}`) - }) - } - } - - // Ensure all caches were saved/aborted - for (const cache of this.caches) { - try { - cache.assertNoChanges() - } catch (e) { - logger.warn(`${cache.displayName} has unsaved changes: ${stringifyError(e)}`) - } - } - } - - startSpan(spanName: string): ApmSpan | null { - if (this.transaction) return this.transaction.startSpan(spanName) - return null - } - - async queueIngestJob(name: T, data: Parameters[0]): Promise { - await this.queueJob(getIngestQueueName(this.studioId), name, data) - } - async queueStudioJob(name: T, data: Parameters[0]): Promise { - await this.queueJob(getStudioQueueName(this.studioId), name, data) - } - async queueEventJob(name: T, data: Parameters[0]): Promise { - await this.queueJob(getEventsQueueName(this.studioId), name, data) - } - - hackPublishTimelineToFastTrack(newTimeline: TimelineComplete): void { - if (this.fastTrackTimeline) { - this.fastTrackTimeline(newTimeline).catch((e) => { - logger.error(`Failed to publish timeline to fast track: ${stringifyError(e)}`) - }) - } - } -} - async function loadShowStyleBlueprint( collections: IDirectCollections, showStyleBase: Pick, '_id' | 'blueprintId'> @@ -424,67 +283,3 @@ async function loadShowStyleBlueprint( blueprint: blueprintManifest, }) } - -class PlaylistLockImpl extends PlaylistLock { - #isLocked = true - - public constructor(playlistId: RundownPlaylistId, private readonly doRelease: () => Promise) { - super(playlistId) - } - - get isLocked(): boolean { - return this.#isLocked - } - - async release(): Promise { - if (!this.#isLocked) { - logger.warn(`PlaylistLock: Already released "${this.playlistId}"`) - } else { - logger.silly(`PlaylistLock: Releasing "${this.playlistId}"`) - - this.#isLocked = false - - await this.doRelease() - - logger.silly(`PlaylistLock: Released "${this.playlistId}"`) - - if (this.deferedFunctions.length > 0) { - for (const fcn of this.deferedFunctions) { - await fcn() - } - } - } - } -} - -class RundownLockImpl extends RundownLock { - #isLocked = true - - public constructor(rundownId: RundownId, private readonly doRelease: () => Promise) { - super(rundownId) - } - - get isLocked(): boolean { - return this.#isLocked - } - - async release(): Promise { - if (!this.#isLocked) { - logger.warn(`RundownLock: Already released "${this.rundownId}"`) - } else { - logger.silly(`RundownLock: Releasing "${this.rundownId}"`) - - this.#isLocked = false - - await this.doRelease() - - logger.silly(`RundownLock: Released "${this.rundownId}"`) - - if (this.deferedFunctions.length > 0) { - for (const fcn of this.deferedFunctions) { - await fcn() - } - } - } - } -} diff --git a/packages/job-worker/src/workers/context/StudioRouteSetUpdater.ts b/packages/job-worker/src/workers/context/StudioRouteSetUpdater.ts new file mode 100644 index 0000000000..cea5c9e53b --- /dev/null +++ b/packages/job-worker/src/workers/context/StudioRouteSetUpdater.ts @@ -0,0 +1,109 @@ +import { StudioRouteBehavior, StudioRouteSet } from '@sofie-automation/blueprints-integration' +import type { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { deepFreeze } from '@sofie-automation/corelib/dist/lib' +import { + getAllCurrentItemsFromOverrides, + OverrideOpHelperImpl, + WrappedOverridableItemNormal, +} from '@sofie-automation/corelib/dist/overrideOpHelper' +import { logger } from '../../logging' +import type { ReadonlyDeep } from 'type-fest' +import type { WorkerDataCache } from '../caches' +import type { IDirectCollections } from '../../db' + +export class StudioRouteSetUpdater { + readonly #directCollections: Readonly + readonly #cacheData: Pick + + constructor(directCollections: Readonly, cacheData: Pick) { + this.#directCollections = directCollections + this.#cacheData = cacheData + } + + // Future: this could store a Map, if the context exposed a simplified view of DBStudio + #studioWithRouteSetChanges: ReadonlyDeep | undefined = undefined + + get studioWithChanges(): ReadonlyDeep | undefined { + return this.#studioWithRouteSetChanges + } + + setRouteSetActive(routeSetId: string, isActive: boolean | 'toggle'): boolean { + const currentStudio = this.#studioWithRouteSetChanges ?? this.#cacheData.studio + const currentRouteSets = getAllCurrentItemsFromOverrides(currentStudio.routeSetsWithOverrides, null) + + const routeSet = currentRouteSets.find((routeSet) => routeSet.id === routeSetId) + if (!routeSet) throw new Error(`RouteSet "${routeSetId}" not found!`) + + if (isActive === 'toggle') { + isActive = !routeSet.computed.active + } + + if (routeSet.computed.behavior === StudioRouteBehavior.ACTIVATE_ONLY && !isActive) + throw new Error(`RouteSet "${routeSet.id}" is ACTIVATE_ONLY`) + + const overrideHelper = new OverrideOpHelperImpl(null, currentStudio.routeSetsWithOverrides) + + // Update the pending changes + logger.debug(`switchRouteSet "${this.#cacheData.studio._id}" "${routeSet.id}"=${isActive}`) + overrideHelper.setItemValue(routeSetId, 'active', isActive) + + let mayAffectTimeline = couldRoutesetAffectTimelineGeneration(routeSet) + + // Deactivate other routeSets in the same exclusivity group: + if (routeSet.computed.exclusivityGroup && isActive) { + for (const otherRouteSet of Object.values>(currentRouteSets)) { + if (otherRouteSet.id === routeSet.id) continue + if (otherRouteSet.computed?.exclusivityGroup === routeSet.computed.exclusivityGroup) { + logger.debug(`switchRouteSet Other ID "${this.#cacheData.studio._id}" "${otherRouteSet.id}"=false`) + overrideHelper.setItemValue(otherRouteSet.id, 'active', false) + + mayAffectTimeline = mayAffectTimeline || couldRoutesetAffectTimelineGeneration(otherRouteSet) + } + } + } + + const updatedOverrideOps = overrideHelper.getPendingOps() + + // Update the cached studio + this.#studioWithRouteSetChanges = Object.freeze({ + ...currentStudio, + routeSetsWithOverrides: Object.freeze({ + ...currentStudio.routeSetsWithOverrides, + overrides: deepFreeze(updatedOverrideOps), + }), + }) + + return mayAffectTimeline + } + + async saveRouteSetChanges(): Promise { + if (!this.#studioWithRouteSetChanges) return + + // Save the changes to the database + // This is technically a little bit of a race condition, if someone uses the config pages but no more so than the rest of the system + await this.#directCollections.Studios.update( + { + _id: this.#cacheData.studio._id, + }, + { + $set: { + 'routeSetsWithOverrides.overrides': + this.#studioWithRouteSetChanges.routeSetsWithOverrides.overrides, + }, + } + ) + + // Pretend that the studio as reported by the database has changed, this will be fixed after this job by the ChangeStream firing + this.#cacheData.studio = this.#studioWithRouteSetChanges + this.#studioWithRouteSetChanges = undefined + } + + discardRouteSetChanges(): void { + // Discard any pending changes + this.#studioWithRouteSetChanges = undefined + } +} + +function couldRoutesetAffectTimelineGeneration(routeSet: WrappedOverridableItemNormal): boolean { + return routeSet.computed.abPlayers.length > 0 +} diff --git a/packages/job-worker/src/workers/context/__tests__/StudioRouteSetUpdater.spec.ts b/packages/job-worker/src/workers/context/__tests__/StudioRouteSetUpdater.spec.ts new file mode 100644 index 0000000000..77692f4072 --- /dev/null +++ b/packages/job-worker/src/workers/context/__tests__/StudioRouteSetUpdater.spec.ts @@ -0,0 +1,403 @@ +import { StudioRouteBehavior, StudioRouteSet } from '@sofie-automation/blueprints-integration' +import { setupDefaultJobEnvironment } from '../../../__mocks__/context' +import { StudioRouteSetUpdater } from '../StudioRouteSetUpdater' +import type { WorkerDataCache } from '../../caches' +import { wrapDefaultObject } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' + +function setupTest(routeSets: Record) { + const context = setupDefaultJobEnvironment() + const mockCache: Pick = { + studio: { + ...context.studio, + routeSetsWithOverrides: wrapDefaultObject(routeSets), + }, + } + const mockCollection = context.mockCollections.Studios + const routeSetHelper = new StudioRouteSetUpdater(context.directCollections, mockCache) + + return { context, mockCache, mockCollection, routeSetHelper } +} + +const SINGLE_ROUTESET: Record = { + one: { + name: 'test', + active: false, + behavior: StudioRouteBehavior.TOGGLE, + routes: [], + abPlayers: [], + }, +} +const SINGLE_ROUTESET_WITH_AB: Record = { + one: { + name: 'test', + active: false, + behavior: StudioRouteBehavior.TOGGLE, + routes: [], + abPlayers: [{ playerId: 'test', poolName: 'test' }], + }, +} +const EXCLUSIVE_ROUTESETS: Record = { + one: { + name: 'test', + active: false, + behavior: StudioRouteBehavior.TOGGLE, + exclusivityGroup: 'main', + routes: [], + abPlayers: [{ playerId: 'test', poolName: 'test' }], + }, + two: { + name: 'test', + active: true, + behavior: StudioRouteBehavior.TOGGLE, + exclusivityGroup: 'main', + routes: [], + abPlayers: [], + }, + activate: { + name: 'test', + active: false, + behavior: StudioRouteBehavior.ACTIVATE_ONLY, + exclusivityGroup: 'main', + routes: [], + abPlayers: [], + }, +} + +describe('StudioRouteSetUpdater', () => { + it('no changes should not save', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(0) + }) + + it('no changes when setting missing routeset', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + expect(() => routeSetHelper.setRouteSetActive('missing', true)).toThrow(/not found/) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(0) + }) + + it('change when setting routeset - true', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + routeSetHelper.setRouteSetActive('one', true) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toEqual([ + { + type: 'update', + args: [ + { _id: 'mockStudio0' }, + { + $set: { + 'routeSetsWithOverrides.overrides': [ + { + op: 'set', + path: 'one.active', + value: true, + }, + ], + }, + }, + ], + }, + ]) + }) + it('change when setting routeset - false', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + routeSetHelper.setRouteSetActive('one', false) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toEqual([ + { + type: 'update', + args: [ + { _id: 'mockStudio0' }, + { + $set: { + 'routeSetsWithOverrides.overrides': [ + { + op: 'set', + path: 'one.active', + value: false, + }, + ], + }, + }, + ], + }, + ]) + }) + it('change when setting routeset - toggle', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + routeSetHelper.setRouteSetActive('one', 'toggle') + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toEqual([ + { + type: 'update', + args: [ + { _id: 'mockStudio0' }, + { + $set: { + 'routeSetsWithOverrides.overrides': [ + { + op: 'set', + path: 'one.active', + value: true, + }, + ], + }, + }, + ], + }, + ]) + }) + it('change when setting routeset - toggle twice', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + routeSetHelper.setRouteSetActive('one', 'toggle') + routeSetHelper.setRouteSetActive('one', 'toggle') + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toEqual([ + { + type: 'update', + args: [ + { _id: 'mockStudio0' }, + { + $set: { + 'routeSetsWithOverrides.overrides': [ + { + op: 'set', + path: 'one.active', + value: false, + }, + ], + }, + }, + ], + }, + ]) + }) + + it('discard changes should not save', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + routeSetHelper.setRouteSetActive('one', true) + + expect(routeSetHelper.studioWithChanges).toBeTruthy() + + routeSetHelper.discardRouteSetChanges() + + expect(routeSetHelper.studioWithChanges).toBeFalsy() + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(0) + }) + + it('save should update mockCache', async () => { + const { mockCache, mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + const studioBefore = mockCache.studio + expect(routeSetHelper.studioWithChanges).toBeFalsy() + + routeSetHelper.setRouteSetActive('one', true) + expect(routeSetHelper.studioWithChanges).toBeTruthy() + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(1) + + // Object should have changed + expect(mockCache.studio).not.toBe(studioBefore) + // Object should not be equal + expect(mockCache.studio).not.toEqual(studioBefore) + expect(routeSetHelper.studioWithChanges).toBeFalsy() + }) + + it('no changes should not update mockCache', async () => { + const { mockCache, mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + const studioBefore = mockCache.studio + expect(routeSetHelper.studioWithChanges).toBeFalsy() + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(0) + + expect(mockCache.studio).toBe(studioBefore) + expect(routeSetHelper.studioWithChanges).toBeFalsy() + }) + + it('discard changes should not update mockCache', async () => { + const { mockCache, mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + const studioBefore = mockCache.studio + expect(routeSetHelper.studioWithChanges).toBeFalsy() + + routeSetHelper.setRouteSetActive('one', true) + expect(routeSetHelper.studioWithChanges).toBeTruthy() + routeSetHelper.discardRouteSetChanges() + expect(routeSetHelper.studioWithChanges).toBeFalsy() + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(0) + + expect(mockCache.studio).toBe(studioBefore) + expect(routeSetHelper.studioWithChanges).toBeFalsy() + }) + + it('ACTIVATE_ONLY routeset can be activated', async () => { + const { mockCollection, routeSetHelper } = setupTest(EXCLUSIVE_ROUTESETS) + + routeSetHelper.setRouteSetActive('activate', true) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(1) + }) + + it('ACTIVATE_ONLY routeset canot be deactivated', async () => { + const { mockCollection, routeSetHelper } = setupTest(EXCLUSIVE_ROUTESETS) + + expect(() => routeSetHelper.setRouteSetActive('activate', false)).toThrow(/ACTIVATE_ONLY/) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(0) + }) + + describe('exclusive groups', () => { + it('deactivate member of exclusive group', async () => { + const { mockCollection, routeSetHelper } = setupTest(EXCLUSIVE_ROUTESETS) + + routeSetHelper.setRouteSetActive('one', false) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toEqual([ + { + type: 'update', + args: [ + { _id: 'mockStudio0' }, + { + $set: { + 'routeSetsWithOverrides.overrides': [ + { + op: 'set', + path: 'one.active', + value: false, + }, + ], + }, + }, + ], + }, + ]) + }) + + it('activate member of exclusive group', async () => { + const { mockCollection, routeSetHelper } = setupTest(EXCLUSIVE_ROUTESETS) + + routeSetHelper.setRouteSetActive('one', true) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toEqual([ + { + type: 'update', + args: [ + { _id: 'mockStudio0' }, + { + $set: { + 'routeSetsWithOverrides.overrides': [ + { + op: 'set', + path: 'one.active', + value: true, + }, + { + op: 'set', + path: 'two.active', + value: false, + }, + { + op: 'set', + path: 'activate.active', + value: false, + }, + ], + }, + }, + ], + }, + ]) + }) + }) + + describe('Return value', () => { + it('update player with ab', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET_WITH_AB) + + expect(routeSetHelper.setRouteSetActive('one', false)).toBe(true) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(1) + }) + + it('update player without ab', async () => { + const { mockCollection, routeSetHelper } = setupTest(SINGLE_ROUTESET) + + expect(routeSetHelper.setRouteSetActive('one', false)).toBe(false) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(1) + }) + + it('update exclusive group - disabling player without ab', async () => { + const { mockCollection, routeSetHelper } = setupTest(EXCLUSIVE_ROUTESETS) + + expect(routeSetHelper.setRouteSetActive('two', false)).toBe(false) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(1) + }) + + it('update exclusive group - disabling player with ab', async () => { + const { mockCollection, routeSetHelper } = setupTest(EXCLUSIVE_ROUTESETS) + + expect(routeSetHelper.setRouteSetActive('one', false)).toBe(true) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(1) + }) + + it('update exclusive group - enabling player without ab', async () => { + const { mockCollection, routeSetHelper } = setupTest(EXCLUSIVE_ROUTESETS) + + expect(routeSetHelper.setRouteSetActive('two', true)).toBe(true) + + expect(mockCollection.operations).toHaveLength(0) + await routeSetHelper.saveRouteSetChanges() + expect(mockCollection.operations).toHaveLength(1) + }) + }) +}) diff --git a/packages/job-worker/src/workers/context/util.ts b/packages/job-worker/src/workers/context/util.ts new file mode 100644 index 0000000000..38ac084220 --- /dev/null +++ b/packages/job-worker/src/workers/context/util.ts @@ -0,0 +1 @@ +export type QueueJobFunc = (queueName: string, jobName: string, jobData: unknown) => Promise diff --git a/packages/job-worker/src/workers/events/child.ts b/packages/job-worker/src/workers/events/child.ts index 55745e5331..76d95c4c31 100644 --- a/packages/job-worker/src/workers/events/child.ts +++ b/packages/job-worker/src/workers/events/child.ts @@ -11,7 +11,8 @@ import { WorkerDataCache, WorkerDataCacheWrapperImpl, } from '../caches' -import { JobContextImpl, QueueJobFunc } from '../context' +import { JobContextImpl } from '../context/JobContextImpl' +import { QueueJobFunc } from '../context/util' import { AnyLockEvent, LocksManager } from '../locks' import { FastTrackTimelineFunc, LogLineWithSourceFunc } from '../../main' import { interceptLogging, logger } from '../../logging' diff --git a/packages/job-worker/src/workers/ingest/child.ts b/packages/job-worker/src/workers/ingest/child.ts index d00f8a4ae8..86af4b8634 100644 --- a/packages/job-worker/src/workers/ingest/child.ts +++ b/packages/job-worker/src/workers/ingest/child.ts @@ -5,7 +5,8 @@ import { createMongoConnection, getMongoCollections, IDirectCollections } from ' import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { setupApmAgent, startTransaction } from '../../profiler' import { InvalidateWorkerDataCache, invalidateWorkerDataCache, loadWorkerDataCache, WorkerDataCache } from '../caches' -import { JobContextImpl, QueueJobFunc } from '../context' +import { JobContextImpl } from '../context/JobContextImpl' +import { QueueJobFunc } from '../context/util' import { AnyLockEvent, LocksManager } from '../locks' import { FastTrackTimelineFunc, LogLineWithSourceFunc } from '../../main' import { interceptLogging, logger } from '../../logging' diff --git a/packages/job-worker/src/workers/ingest/jobs.ts b/packages/job-worker/src/workers/ingest/jobs.ts index 1fa5a5b922..2d2ef71a17 100644 --- a/packages/job-worker/src/workers/ingest/jobs.ts +++ b/packages/job-worker/src/workers/ingest/jobs.ts @@ -39,6 +39,13 @@ import { handleBucketRemoveAdlibPiece, } from '../../ingest/bucket/bucketAdlibs' import { handleBucketItemImport, handleBucketItemRegenerate } from '../../ingest/bucket/import' +import { handleUserExecuteChangeOperation } from '../../ingest/userOperation' +import { + wrapCustomIngestJob, + wrapGenericIngestJob, + wrapGenericIngestJobWithPrecheck, + wrapMosIngestJob, +} from '../../ingest/jobWrappers' import { handleCreateAdlibTestingRundownForShowStyleVariant } from '../../ingest/createAdlibTestingRundown' type ExecutableFunction = ( @@ -51,34 +58,35 @@ export type IngestJobHandlers = { } export const ingestJobHandlers: IngestJobHandlers = { - [IngestJobs.RemoveRundown]: handleRemovedRundown, - [IngestJobs.UpdateRundown]: handleUpdatedRundown, - [IngestJobs.UpdateRundownMetaData]: handleUpdatedRundownMetaData, - [IngestJobs.RemoveSegment]: handleRemovedSegment, - [IngestJobs.UpdateSegment]: handleUpdatedSegment, - [IngestJobs.UpdateSegmentRanks]: handleUpdatedSegmentRanks, - [IngestJobs.RemovePart]: handleRemovedPart, - [IngestJobs.UpdatePart]: handleUpdatedPart, - [IngestJobs.RegenerateRundown]: handleRegenerateRundown, - [IngestJobs.RegenerateSegment]: handleRegenerateSegment, + [IngestJobs.RemoveRundown]: wrapGenericIngestJob(handleRemovedRundown), + [IngestJobs.UpdateRundown]: wrapGenericIngestJob(handleUpdatedRundown), + [IngestJobs.UpdateRundownMetaData]: wrapGenericIngestJob(handleUpdatedRundownMetaData), + [IngestJobs.RemoveSegment]: wrapGenericIngestJob(handleRemovedSegment), + [IngestJobs.UpdateSegment]: wrapGenericIngestJobWithPrecheck(handleUpdatedSegment), + [IngestJobs.UpdateSegmentRanks]: wrapGenericIngestJob(handleUpdatedSegmentRanks), + [IngestJobs.RemovePart]: wrapGenericIngestJob(handleRemovedPart), + [IngestJobs.UpdatePart]: wrapGenericIngestJob(handleUpdatedPart), + [IngestJobs.RegenerateRundown]: wrapGenericIngestJob(handleRegenerateRundown), + [IngestJobs.RegenerateSegment]: wrapGenericIngestJob(handleRegenerateSegment), - [IngestJobs.RemoveOrphanedSegments]: handleRemoveOrphanedSegemnts, + [IngestJobs.RemoveOrphanedSegments]: wrapCustomIngestJob(handleRemoveOrphanedSegemnts), - [IngestJobs.MosRundown]: handleMosRundownData, - [IngestJobs.MosRundownMetadata]: handleMosRundownMetadata, + [IngestJobs.MosRundown]: wrapMosIngestJob(handleMosRundownData), + [IngestJobs.MosRundownMetadata]: wrapMosIngestJob(handleMosRundownMetadata), [IngestJobs.MosRundownStatus]: handleMosRundownStatus, - [IngestJobs.MosRundownReadyToAir]: handleMosRundownReadyToAir, - [IngestJobs.MosFullStory]: handleMosFullStory, - [IngestJobs.MosDeleteStory]: handleMosDeleteStory, - [IngestJobs.MosInsertStory]: handleMosInsertStories, - [IngestJobs.MosMoveStory]: handleMosMoveStories, - [IngestJobs.MosSwapStory]: handleMosSwapStories, + [IngestJobs.MosRundownReadyToAir]: wrapCustomIngestJob(handleMosRundownReadyToAir), + [IngestJobs.MosFullStory]: wrapMosIngestJob(handleMosFullStory), + [IngestJobs.MosDeleteStory]: wrapMosIngestJob(handleMosDeleteStory), + [IngestJobs.MosInsertStory]: wrapMosIngestJob(handleMosInsertStories), + [IngestJobs.MosMoveStory]: wrapMosIngestJob(handleMosMoveStories), + [IngestJobs.MosSwapStory]: wrapMosIngestJob(handleMosSwapStories), [IngestJobs.ExpectedPackagesRegenerate]: handleExpectedPackagesRegenerate, [IngestJobs.PackageInfosUpdatedRundown]: handleUpdatedPackageInfoForRundown, [IngestJobs.UserRemoveRundown]: handleUserRemoveRundown, [IngestJobs.UserUnsyncRundown]: handleUserUnsyncRundown, + [IngestJobs.UserExecuteChangeOperation]: handleUserExecuteChangeOperation, [IngestJobs.BucketItemImport]: handleBucketItemImport, [IngestJobs.BucketItemRegenerate]: handleBucketItemRegenerate, diff --git a/packages/job-worker/src/workers/studio/child.ts b/packages/job-worker/src/workers/studio/child.ts index d582e03e80..40903527c6 100644 --- a/packages/job-worker/src/workers/studio/child.ts +++ b/packages/job-worker/src/workers/studio/child.ts @@ -5,7 +5,8 @@ import { createMongoConnection, getMongoCollections, IDirectCollections } from ' import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { setupApmAgent, startTransaction } from '../../profiler' import { InvalidateWorkerDataCache, invalidateWorkerDataCache, loadWorkerDataCache, WorkerDataCache } from '../caches' -import { QueueJobFunc, JobContextImpl } from '../context' +import { JobContextImpl } from '../context/JobContextImpl' +import { QueueJobFunc } from '../context/util' import { AnyLockEvent, LocksManager } from '../locks' import { FastTrackTimelineFunc, LogLineWithSourceFunc } from '../../main' import { interceptLogging, logger } from '../../logging' diff --git a/packages/job-worker/src/workers/studio/jobs.ts b/packages/job-worker/src/workers/studio/jobs.ts index a90a65d233..4cc371d5df 100644 --- a/packages/job-worker/src/workers/studio/jobs.ts +++ b/packages/job-worker/src/workers/studio/jobs.ts @@ -44,9 +44,10 @@ import { import { handleTimelineTriggerTime, handleOnPlayoutPlaybackChanged } from '../../playout/timings' import { handleExecuteAdlibAction } from '../../playout/adlibAction' import { handleTakeNextPart } from '../../playout/take' -import { handleSetQuickLoopMarker } from '../../playout/quickLoopMarkers' +import { handleClearQuickLoopMarkers, handleSetQuickLoopMarker } from '../../playout/quickLoopMarkers' import { handleActivateAdlibTesting } from '../../playout/adlibTesting' import { handleExecuteBucketAdLibOrAction } from '../../playout/bucketAdlibJobs' +import { handleSwitchRouteSet } from '../../studio/routeSet' type ExecutableFunction = ( context: JobContext, @@ -106,4 +107,7 @@ export const studioJobHandlers: StudioJobHandlers = { [StudioJobs.ActivateAdlibTesting]: handleActivateAdlibTesting, [StudioJobs.SetQuickLoopMarker]: handleSetQuickLoopMarker, + [StudioJobs.ClearQuickLoopMarkers]: handleClearQuickLoopMarkers, + + [StudioJobs.SwitchRouteSet]: handleSwitchRouteSet, } diff --git a/packages/live-status-gateway/Dockerfile b/packages/live-status-gateway/Dockerfile index c9b3548b45..bf5043af45 100644 --- a/packages/live-status-gateway/Dockerfile +++ b/packages/live-status-gateway/Dockerfile @@ -27,4 +27,4 @@ COPY --from=0 /opt/shared-lib /opt/shared-lib COPY --from=0 /opt/corelib /opt/corelib WORKDIR /opt/live-status-gateway -CMD ["yarn", "start"] +CMD ["node", "dist/index.js"] diff --git a/packages/live-status-gateway/Dockerfile.circle b/packages/live-status-gateway/Dockerfile.circle index 4c01a0391e..974fead09d 100644 --- a/packages/live-status-gateway/Dockerfile.circle +++ b/packages/live-status-gateway/Dockerfile.circle @@ -10,4 +10,4 @@ COPY shared-lib /opt/shared-lib COPY corelib /opt/corelib WORKDIR /opt/live-status-gateway -CMD ["yarn", "start"] +CMD ["node", "dist/index.js"] diff --git a/packages/live-status-gateway/api/schemas/activePlaylist.yaml b/packages/live-status-gateway/api/schemas/activePlaylist.yaml index 5b373bfcc3..2624e4e684 100644 --- a/packages/live-status-gateway/api/schemas/activePlaylist.yaml +++ b/packages/live-status-gateway/api/schemas/activePlaylist.yaml @@ -29,7 +29,49 @@ $defs: $ref: '#/$defs/part' publicData: description: Optional arbitrary data - required: [event, id, name, rundownIds, currentPart, currentSegment, nextPart] + timing: + description: Timing information about the active playlist + type: object + properties: + timingMode: + description: 'Timing mode for the playlist.' + type: string + enum: + - none + - forward-time + - back-time + startedPlayback: + description: Unix timestamp of when the playlist started (milliseconds) + type: number + expectedStart: + description: Unix timestamp of when the playlist is expected to start (milliseconds). Required when the timingMode is set to forward-time. + type: number + expectedDurationMs: + description: Duration of the playlist in ms + type: number + expectedEnd: + description: Unix timestamp of when the playlist is expected to end (milliseconds) Required when the timingMode is set to back-time. + type: number + required: [timingMode] + additionalProperties: false + quickLoop: + description: Information about the current quickLoop, if any + type: object + properties: + locked: + description: Whether the user is allowed to make alterations to the Start/End markers + type: boolean + running: + description: Whether the loop has two valid markers and is currently running + type: boolean + start: + description: The start of the loop + $ref: '#/$defs/quickLoopMarker' + end: + description: The end of the loop + $ref: '#/$defs/quickLoopMarker' + required: [locked, running] + required: [event, id, name, rundownIds, currentPart, currentSegment, nextPart, timing] additionalProperties: false examples: - event: activePlaylist @@ -44,6 +86,10 @@ $defs: $ref: '#/$defs/part/examples/0' publicData: category: 'Evening News' + timing: + timingMode: 'forward-time' + expectedStart: 1728895750727 + expectedDurationMs: 180000 partBase: type: object properties: @@ -143,6 +189,12 @@ $defs: projectedEndTime: description: Unix timestamp of when the segment is projected to end (milliseconds). The time this segment started, offset by its budget duration, if the segment has a defined budget duration. Otherwise, the time the current part started, offset by the difference between expected durations of all parts in this segment and the as-played durations of the parts that already stopped. type: number + countdownType: + description: 'Countdown type within the segment. Default: `part_expected_duration`' + type: string + enum: + - part_expected_duration + - segment_budget_duration required: [expectedDurationMs, projectedEndTime] required: [id, timing] additionalProperties: false @@ -152,6 +204,7 @@ $defs: expectedDurationMs: 15000 budgetDurationMs: 20000 projectedEndTime: 1600000075000 + countdownType: segment_budget_duration piece: type: object properties: @@ -184,3 +237,24 @@ $defs: tags: ['camera'] publicData: switcherSource: 1 + quickLoopMarker: + type: object + properties: + markerType: + description: The type of entity the marker is locked to + type: string + enum: + - playlist + - rundown + - segment + - part + rundownId: + description: The rundown that this marker references. This will be set for rundown, segment and part markers + type: string + segmentId: + description: The segment that this marker references. This will be set for segment and part markers + type: string + partId: + description: The part that this marker references. This will be set for only part markers + type: string + required: [markerType] diff --git a/packages/live-status-gateway/api/schemas/adLibs.yaml b/packages/live-status-gateway/api/schemas/adLibs.yaml index b69918ae1f..97b90dcf47 100644 --- a/packages/live-status-gateway/api/schemas/adLibs.yaml +++ b/packages/live-status-gateway/api/schemas/adLibs.yaml @@ -92,6 +92,9 @@ $defs: type: string publicData: description: Optional arbitrary data + optionsSchema: + description: JSON schema definition of the adLib properties that can be modified using the adLibOptions property in executeAdLib + type: string required: [id, name, sourceLayer, actionType] examples: - id: 'C6K_yIMuGFUk8X_L9A9_jRT6aq4_' @@ -103,3 +106,4 @@ $defs: tags: ['music_video'] publicData: fileName: MV000123.mxf + optionsSchema: '{"$schema":"https://json-schema.org/draft/2020-12/schema","title":"Play Video Clip","type":"object","properties":{"type":"adlib_action_video_clip","label":{"type":"string"},"clipId":{"type":"string"},"vo":{"type":"boolean"},"target":{"$schema":"https://json-schema.org/draft/2020-12/schema","title":"Object Id","description":"Id of an object sent to Sofie","type":"string"},"duration":{"type":"number","exclusiveMinimum":0},"takeType":{"$schema":"https://json-schema.org/draft/2020-12/schema","title":"AdLib Action Take Type","type":"string","enum":["take_immediate","queue"]},"transition":{"$schema":"https://json-schema.org/draft/2020-12/schema","title":"AdLib Action Transition Type","oneOf":[{"type":"object","properties":{"type":"cut"},"required":["type"],"additionalProperties":false},{"type":"object","properties":{"type":"mix","duration":{"type":"number","exclusiveMinimum":0,"description":"Duration in ms"}},"required":["type","duration"],"additionalProperties":false},{"type":"object","properties":{"type":"wipe","duration":{"type":"number","exclusiveMinimum":0,"description":"Duration in ms"},"patternId":{"type":"string","description":"Type of wipe to use"}},"required":["type","duration","patternId"],"additionalProperties":false},{"type":"object","properties":{"type":"macro","macroId":{"type":"string","description":"Macro template to recall"}},"required":["type","macroId"],"additionalProperties":false}]}},"required":["type","clipId","vo","target"],"additionalProperties":false}"' diff --git a/packages/live-status-gateway/api/schemas/segments.yaml b/packages/live-status-gateway/api/schemas/segments.yaml index 29180418ea..ca87639614 100644 --- a/packages/live-status-gateway/api/schemas/segments.yaml +++ b/packages/live-status-gateway/api/schemas/segments.yaml @@ -48,6 +48,12 @@ $defs: budgetDurationMs: description: Budget duration of the segment (milliseconds) type: number + countdownType: + description: 'Countdown type within the segment. Default: `part_expected_duration`' + type: string + enum: + - part_expected_duration + - segment_budget_duration required: [expectedDurationMs] publicData: description: Optional arbitrary data @@ -60,5 +66,6 @@ $defs: timing: expectedDurationMs: 15000 budgetDurationMs: 20000 + countdownType: segment_budget_duration publicData: containsLiveSource: true diff --git a/packages/live-status-gateway/src/collections/segmentHandler.ts b/packages/live-status-gateway/src/collections/segmentHandler.ts index da4c85bcff..830af41e0b 100644 --- a/packages/live-status-gateway/src/collections/segmentHandler.ts +++ b/packages/live-status-gateway/src/collections/segmentHandler.ts @@ -31,14 +31,19 @@ export class SegmentHandler if (!collection) throw new Error(`collection '${this._collectionName}' not found!`) const allSegments = collection.find(undefined) await this._segmentsHandler.setSegments(allSegments) - if (this._currentSegmentId) { - this._collectionData = collection.findOne(this._currentSegmentId) + await this.updateAndNotify() + } + + private async updateAndNotify() { + const collection = this._core.getCollection(this._collectionName) + const newData = this._currentSegmentId ? collection.findOne(this._currentSegmentId) : undefined + if (this._collectionData !== newData) { + this._collectionData = newData await this.notify(this._collectionData) } } async update(source: string, data: SelectedPartInstances | DBRundownPlaylist | undefined): Promise { - const previousSegmentId = this._currentSegmentId const previousRundownIds = this._rundownIds switch (source) { @@ -91,11 +96,6 @@ export class SegmentHandler const allSegments = collection.find(undefined) await this._segmentsHandler.setSegments(allSegments) } - if (previousSegmentId !== this._currentSegmentId) { - if (this._currentSegmentId) { - this._collectionData = collection.findOne(this._currentSegmentId) - await this.notify(this._collectionData) - } - } + await this.updateAndNotify() } } diff --git a/packages/live-status-gateway/src/liveStatusServer.ts b/packages/live-status-gateway/src/liveStatusServer.ts index 70590e22b0..90bd64c471 100644 --- a/packages/live-status-gateway/src/liveStatusServer.ts +++ b/packages/live-status-gateway/src/liveStatusServer.ts @@ -108,6 +108,8 @@ export class LiveStatusServer { await partInstancesHandler.subscribe(activePlaylistTopic) await partsHandler.subscribe(activePlaylistTopic) await pieceInstancesHandler.subscribe(activePlaylistTopic) + await segmentHandler.subscribe(activePlaylistTopic) + await segmentsHandler.subscribe(activePlaylistTopic) await playlistHandler.subscribe(activePiecesTopic) await showStyleBaseHandler.subscribe(activePiecesTopic) diff --git a/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts b/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts index 2c31b1cf3b..b9b1af95b3 100644 --- a/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts +++ b/packages/live-status-gateway/src/topics/__tests__/activePlaylist.spec.ts @@ -9,6 +9,10 @@ import { literal } from '@sofie-automation/corelib/dist/lib' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { PartsHandler } from '../../collections/partsHandler' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' +import { SegmentHandler } from '../../collections/segmentHandler' +import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' +import { CountdownType } from '@sofie-automation/blueprints-integration' +import { PlaylistTimingType } from '@sofie-automation/blueprints-integration' function makeEmptyTestPartInstances(): SelectedPartInstances { return { @@ -46,6 +50,10 @@ describe('ActivePlaylistTopic', () => { currentSegment: null, rundownIds: unprotectStringArray(playlist.rundownIdsInOrder), publicData: undefined, + timing: { + timingMode: PlaylistTimingType.None, + }, + quickLoop: undefined, } // eslint-disable-next-line @typescript-eslint/unbound-method @@ -73,10 +81,12 @@ describe('ActivePlaylistTopic', () => { const testShowStyleBase = makeTestShowStyleBase() await topic.update(ShowStyleBaseHandler.name, testShowStyleBase as ShowStyleBaseExt) + + const segment1id = protectString('SEGMENT_1') const part1: Partial = { _id: protectString('PART_1'), title: 'Test Part', - segmentId: protectString('SEGMENT_1'), + segmentId: segment1id, expectedDurationWithTransition: 10000, expectedDuration: 10000, publicData: { b: 'c' }, @@ -86,6 +96,7 @@ describe('ActivePlaylistTopic', () => { _id: currentPartInstanceId, part: part1, timings: { plannedStartedPlayback: 1600000060000 }, + segmentId: segment1id, }, firstInSegmentPlayout: {}, inCurrentSegment: [ @@ -100,6 +111,10 @@ describe('ActivePlaylistTopic', () => { await topic.update(PartsHandler.name, [part1] as DBPart[]) + await topic.update(SegmentHandler.name, { + _id: segment1id, + } as DBSegment) + topic.addSubscriber(mockSubscriber) const expectedStatus: ActivePlaylistStatus = { @@ -125,6 +140,104 @@ describe('ActivePlaylistTopic', () => { }, rundownIds: unprotectStringArray(playlist.rundownIdsInOrder), publicData: { a: 'b' }, + timing: { + timingMode: PlaylistTimingType.None, + }, + quickLoop: undefined, + } + + // eslint-disable-next-line @typescript-eslint/unbound-method + expect(mockSubscriber.send).toHaveBeenCalledTimes(1) + expect(JSON.parse(mockSubscriber.send.mock.calls[0][0] as string)).toMatchObject( + JSON.parse(JSON.stringify(expectedStatus)) + ) + }) + + it('provides segment and part with segment timing', async () => { + const topic = new ActivePlaylistTopic(makeMockLogger()) + const mockSubscriber = makeMockSubscriber() + + const currentPartInstanceId = 'CURRENT_PART_INSTANCE_ID' + + const playlist = makeTestPlaylist() + playlist.activationId = protectString('somethingRandom') + playlist.currentPartInfo = { + consumesQueuedSegmentId: false, + manuallySelected: false, + partInstanceId: protectString(currentPartInstanceId), + rundownId: playlist.rundownIdsInOrder[0], + } + await topic.update(PlaylistHandler.name, playlist) + + const testShowStyleBase = makeTestShowStyleBase() + await topic.update(ShowStyleBaseHandler.name, testShowStyleBase as ShowStyleBaseExt) + + const segment1id = protectString('SEGMENT_1') + const part1: Partial = { + _id: protectString('PART_1'), + title: 'Test Part', + segmentId: protectString('SEGMENT_1'), + expectedDurationWithTransition: 10000, + expectedDuration: 10000, + publicData: { b: 'c' }, + } + const currentPartInstance = { + _id: currentPartInstanceId, + part: part1, + timings: { plannedStartedPlayback: 1600000060000 }, + segmentId: segment1id, + } + const testPartInstances: PartialDeep = { + current: currentPartInstance, + firstInSegmentPlayout: currentPartInstance, + inCurrentSegment: [ + literal>({ + _id: protectString(currentPartInstanceId), + part: part1, + timings: { plannedStartedPlayback: 1600000060000 }, + }), + ] as DBPartInstance[], + } + await topic.update(PartInstancesHandler.name, testPartInstances as SelectedPartInstances) + + await topic.update(PartsHandler.name, [part1] as DBPart[]) + + await topic.update(SegmentHandler.name, { + _id: segment1id, + segmentTiming: { budgetDuration: 12300, countdownType: CountdownType.SEGMENT_BUDGET_DURATION }, + } as DBSegment) + + topic.addSubscriber(mockSubscriber) + + const expectedStatus: ActivePlaylistStatus = { + event: 'activePlaylist', + name: playlist.name, + id: unprotectString(playlist._id), + currentPart: { + id: 'PART_1', + name: 'Test Part', + segmentId: 'SEGMENT_1', + timing: { startTime: 1600000060000, expectedDurationMs: 10000, projectedEndTime: 1600000070000 }, + pieces: [], + autoNext: undefined, + publicData: { b: 'c' }, + }, + nextPart: null, + currentSegment: { + id: 'SEGMENT_1', + timing: { + expectedDurationMs: 10000, + budgetDurationMs: 12300, + projectedEndTime: 1600000072300, + countdownType: 'segment_budget_duration', + }, + }, + rundownIds: unprotectStringArray(playlist.rundownIdsInOrder), + publicData: { a: 'b' }, + timing: { + timingMode: PlaylistTimingType.None, + }, + quickLoop: undefined, } // eslint-disable-next-line @typescript-eslint/unbound-method diff --git a/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts b/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts index 9fe1029f8b..c26f1f1763 100644 --- a/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts +++ b/packages/live-status-gateway/src/topics/__tests__/segmentsTopic.spec.ts @@ -11,14 +11,14 @@ const RUNDOWN_1_ID = 'RUNDOWN_1' const RUNDOWN_2_ID = 'RUNDOWN_2' const THROTTLE_PERIOD_MS = 205 -function makeTestSegment(id: string, rank: number, rundownId: string): DBSegment { +function makeTestSegment(id: string, rank: number, rundownId: string, segmentProps?: Partial): DBSegment { return { _id: protectString(id), externalId: `NCS_SEGMENT_${id}`, name: `Segment ${id}`, _rank: rank, rundownId: protectString(rundownId), - externalModified: 1695799420147, + ...segmentProps, } } @@ -27,7 +27,7 @@ function makeTestPart( rank: number, rundownId: string, segmentId: string, - partProps: Partial + partProps?: Partial ): DBPart { return { _id: protectString(id), @@ -260,33 +260,19 @@ describe('SegmentsTopic', () => { const segment_2_2_id = '2_2' await topic.update(SegmentsHandler.name, [ makeTestSegment('2_1', 1, RUNDOWN_2_ID), - makeTestSegment(segment_2_2_id, 2, RUNDOWN_2_ID), - makeTestSegment(segment_1_2_id, 2, RUNDOWN_1_ID), - makeTestSegment(segment_1_1_id, 1, RUNDOWN_1_ID), + makeTestSegment(segment_2_2_id, 2, RUNDOWN_2_ID, { segmentTiming: { budgetDuration: 51000 } }), + makeTestSegment(segment_1_2_id, 2, RUNDOWN_1_ID, { segmentTiming: { budgetDuration: 15000 } }), + makeTestSegment(segment_1_1_id, 1, RUNDOWN_1_ID, { segmentTiming: { budgetDuration: 5000 } }), ]) mockSubscriber.send.mockClear() await topic.update(PartsHandler.name, [ - makeTestPart('1_2_1', 1, RUNDOWN_1_ID, segment_1_2_id, { - budgetDuration: 10000, - }), - makeTestPart('2_2_1', 1, RUNDOWN_1_ID, segment_2_2_id, { - budgetDuration: 40000, - }), - makeTestPart('1_2_2', 2, RUNDOWN_1_ID, segment_1_2_id, { - budgetDuration: 5000, - }), - makeTestPart('1_1_2', 2, RUNDOWN_1_ID, segment_1_1_id, { - budgetDuration: 1000, - }), - makeTestPart('1_1_1', 1, RUNDOWN_1_ID, segment_1_1_id, { - budgetDuration: 3000, - }), - makeTestPart('2_2_2', 2, RUNDOWN_1_ID, segment_2_2_id, { - budgetDuration: 11000, - }), - makeTestPart('1_1_2', 2, RUNDOWN_1_ID, segment_1_1_id, { - budgetDuration: 1000, - }), + makeTestPart('1_2_1', 1, RUNDOWN_1_ID, segment_1_2_id), + makeTestPart('2_2_1', 1, RUNDOWN_1_ID, segment_2_2_id), + makeTestPart('1_2_2', 2, RUNDOWN_1_ID, segment_1_2_id), + makeTestPart('1_1_2', 2, RUNDOWN_1_ID, segment_1_1_id), + makeTestPart('1_1_1', 1, RUNDOWN_1_ID, segment_1_1_id), + makeTestPart('2_2_2', 2, RUNDOWN_1_ID, segment_2_2_id), + makeTestPart('1_1_2', 2, RUNDOWN_1_ID, segment_1_1_id), ]) jest.advanceTimersByTime(THROTTLE_PERIOD_MS) diff --git a/packages/live-status-gateway/src/topics/activePlaylistTopic.ts b/packages/live-status-gateway/src/topics/activePlaylistTopic.ts index a9a9bb5d2d..b557a4e1d9 100644 --- a/packages/live-status-gateway/src/topics/activePlaylistTopic.ts +++ b/packages/live-status-gateway/src/topics/activePlaylistTopic.ts @@ -1,9 +1,13 @@ import { Logger } from 'winston' import { WebSocket } from 'ws' import { unprotectString } from '@sofie-automation/shared-lib/dist/lib/protectedString' -import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { + DBRundownPlaylist, + QuickLoopMarker, + QuickLoopMarkerType, +} from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' -import { literal } from '@sofie-automation/shared-lib/dist/lib/lib' +import { assertNever, literal } from '@sofie-automation/shared-lib/dist/lib/lib' import { WebSocketTopicBase, WebSocketTopic, CollectionObserver } from '../wsHandler' import { SelectedPartInstances, PartInstancesHandler } from '../collections/partInstancesHandler' import { PlaylistHandler } from '../collections/playlistHandler' @@ -15,6 +19,11 @@ import _ = require('underscore') import { PartTiming, calculateCurrentPartTiming } from './helpers/partTiming' import { SelectedPieceInstances, PieceInstancesHandler, PieceInstanceMin } from '../collections/pieceInstancesHandler' import { PieceStatus, toPieceStatus } from './helpers/pieceStatus' +import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' +import { SegmentHandler } from '../collections/segmentHandler' +import { PlaylistTimingType } from '@sofie-automation/blueprints-integration' +import { SegmentsHandler } from '../collections/segmentsHandler' +import { normalizeArray } from '@sofie-automation/corelib/dist/lib' const THROTTLE_PERIOD_MS = 100 @@ -36,6 +45,20 @@ interface CurrentSegmentStatus { timing: CurrentSegmentTiming } +interface ActivePlaylistQuickLoopMarker { + type: 'playlist' | 'rundown' | 'segment' | 'part' + rundownId: string | undefined + segmentId: string | undefined + partId: string | undefined +} + +interface ActivePlaylistQuickLoopStatus { + locked: boolean + running: boolean + start: ActivePlaylistQuickLoopMarker | undefined + end: ActivePlaylistQuickLoopMarker | undefined +} + export interface ActivePlaylistStatus { event: string id: string | null @@ -44,7 +67,15 @@ export interface ActivePlaylistStatus { currentPart: CurrentPartStatus | null currentSegment: CurrentSegmentStatus | null nextPart: PartStatus | null + quickLoop: ActivePlaylistQuickLoopStatus | undefined publicData: unknown + timing: { + timingMode: PlaylistTimingType + startedPlayback?: number + expectedStart?: number + expectedDurationMs?: number + expectedEnd?: number + } } export class ActivePlaylistTopic @@ -55,7 +86,8 @@ export class ActivePlaylistTopic CollectionObserver, CollectionObserver, CollectionObserver, - CollectionObserver + CollectionObserver, + CollectionObserver { public observerName = ActivePlaylistTopic.name private _activePlaylist: DBRundownPlaylist | undefined @@ -64,9 +96,12 @@ export class ActivePlaylistTopic private _firstInstanceInSegmentPlayout: DBPartInstance | undefined private _partInstancesInCurrentSegment: DBPartInstance[] = [] private _partsBySegmentId: Record = {} + private _partsById: Record = {} + private _segmentsById: Record = {} private _pieceInstancesInCurrentPartInstance: PieceInstanceMin[] | undefined private _pieceInstancesInNextPartInstance: PieceInstanceMin[] | undefined private _showStyleBaseExt: ShowStyleBaseExt | undefined + private _currentSegment: DBSegment | undefined private throttledSendStatusToAll: () => void constructor(logger: Logger) { @@ -116,10 +151,11 @@ export class ActivePlaylistTopic }) : null, currentSegment: - this._currentPartInstance && currentPart + this._currentPartInstance && currentPart && this._currentSegment ? literal({ id: unprotectString(currentPart.segmentId), timing: calculateCurrentSegmentTiming( + this._currentSegment, this._currentPartInstance, this._firstInstanceInSegmentPlayout, this._partInstancesInCurrentSegment, @@ -140,7 +176,21 @@ export class ActivePlaylistTopic publicData: nextPart.publicData, }) : null, + quickLoop: this.transformQuickLoopStatus(), publicData: this._activePlaylist.publicData, + timing: { + timingMode: this._activePlaylist.timing.type, + startedPlayback: this._activePlaylist.startedPlayback, + expectedDurationMs: this._activePlaylist.timing.expectedDuration, + expectedStart: + this._activePlaylist.timing.type !== PlaylistTimingType.None + ? this._activePlaylist.timing.expectedStart + : undefined, + expectedEnd: + this._activePlaylist.timing.type !== PlaylistTimingType.None + ? this._activePlaylist.timing.expectedEnd + : undefined, + }, }) : literal({ event: 'activePlaylist', @@ -150,15 +200,80 @@ export class ActivePlaylistTopic currentPart: null, currentSegment: null, nextPart: null, + quickLoop: undefined, publicData: undefined, + timing: { + timingMode: PlaylistTimingType.None, + }, }) this.sendMessage(subscribers, message) } + private transformQuickLoopStatus(): ActivePlaylistQuickLoopStatus | undefined { + if (!this._activePlaylist) return + + const quickLoopProps = this._activePlaylist.quickLoop + if (!quickLoopProps) return undefined + + return { + locked: quickLoopProps.locked, + running: quickLoopProps.running, + start: this.transformQuickLoopMarkerStatus(quickLoopProps.start), + end: this.transformQuickLoopMarkerStatus(quickLoopProps.end), + } + } + + private transformQuickLoopMarkerStatus( + marker: QuickLoopMarker | undefined + ): ActivePlaylistQuickLoopMarker | undefined { + if (!marker) return undefined + + switch (marker.type) { + case QuickLoopMarkerType.PLAYLIST: + return { + type: 'playlist', + rundownId: undefined, + segmentId: undefined, + partId: undefined, + } + case QuickLoopMarkerType.RUNDOWN: + return { + type: 'rundown', + rundownId: unprotectString(marker.id), + segmentId: undefined, + partId: undefined, + } + case QuickLoopMarkerType.SEGMENT: { + const segment = this._segmentsById[unprotectString(marker.id)] + + return { + type: 'segment', + rundownId: unprotectString(segment?.rundownId), + segmentId: unprotectString(marker.id), + partId: undefined, + } + } + case QuickLoopMarkerType.PART: { + const part = this._partsById[unprotectString(marker.id)] + + return { + type: 'part', + rundownId: unprotectString(part?.rundownId), + segmentId: unprotectString(part?.segmentId), + partId: unprotectString(marker.id), + } + } + default: + assertNever(marker) + return undefined + } + } + private isDataInconsistent() { return ( this._currentPartInstance?._id !== this._activePlaylist?.currentPartInfo?.partInstanceId || + this._currentPartInstance?.segmentId !== this._currentSegment?._id || this._nextPartInstance?._id !== this._activePlaylist?.nextPartInfo?.partInstanceId || (this._pieceInstancesInCurrentPartInstance?.[0] && this._pieceInstancesInCurrentPartInstance?.[0].partInstanceId !== this._currentPartInstance?._id) || @@ -175,6 +290,8 @@ export class ActivePlaylistTopic | SelectedPartInstances | DBPart[] | SelectedPieceInstances + | DBSegment + | DBSegment[] | undefined ): Promise { let hasAnythingChanged = false @@ -212,6 +329,7 @@ export class ActivePlaylistTopic break } case PartsHandler.name: { + this._partsById = normalizeArray(data as DBPart[], '_id') this._partsBySegmentId = _.groupBy(data as DBPart[], 'segmentId') this.logUpdateReceived('parts', source) hasAnythingChanged = true // TODO: can this be smarter? @@ -230,6 +348,18 @@ export class ActivePlaylistTopic this._pieceInstancesInNextPartInstance = pieceInstances.nextPartInstance break } + case SegmentHandler.name: { + this._currentSegment = data as DBSegment + this.logUpdateReceived('segment', source) + hasAnythingChanged = true + break + } + case SegmentsHandler.name: { + this._segmentsById = normalizeArray(data as DBSegment[], '_id') + this.logUpdateReceived('segments', source) + hasAnythingChanged = true // TODO: can this be smarter? + break + } default: throw new Error(`${this._name} received unsupported update from ${source}}`) } diff --git a/packages/live-status-gateway/src/topics/adLibsTopic.ts b/packages/live-status-gateway/src/topics/adLibsTopic.ts index e5de8fd7d4..f3f07b8557 100644 --- a/packages/live-status-gateway/src/topics/adLibsTopic.ts +++ b/packages/live-status-gateway/src/topics/adLibsTopic.ts @@ -54,6 +54,7 @@ interface AdLibStatusBase { actionType: AdLibActionType[] tags?: string[] publicData: unknown + optionsSchema?: any } export class AdLibsTopic @@ -125,6 +126,7 @@ export class AdLibsTopic actionType: triggerModes, tags: action.display.tags, publicData: action.publicData, + optionsSchema: action.userDataManifest.optionsSchema, }, id: unprotectString(action._id), label: name, @@ -193,6 +195,7 @@ export class AdLibsTopic actionType: triggerModes, tags: action.display.tags, publicData: action.publicData, + optionsSchema: action.userDataManifest.optionsSchema, }, id: unprotectString(action._id), label: name, diff --git a/packages/live-status-gateway/src/topics/helpers/segmentTiming.ts b/packages/live-status-gateway/src/topics/helpers/segmentTiming.ts index bf6c58c0eb..693dff9555 100644 --- a/packages/live-status-gateway/src/topics/helpers/segmentTiming.ts +++ b/packages/live-status-gateway/src/topics/helpers/segmentTiming.ts @@ -1,9 +1,11 @@ import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' +import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' export interface SegmentTiming { budgetDurationMs?: number expectedDurationMs: number + countdownType?: 'part_expected_duration' | 'segment_budget_duration' } export interface CurrentSegmentTiming extends SegmentTiming { @@ -11,12 +13,13 @@ export interface CurrentSegmentTiming extends SegmentTiming { } export function calculateCurrentSegmentTiming( + segment: DBSegment, currentPartInstance: DBPartInstance, firstInstanceInSegmentPlayout: DBPartInstance | undefined, segmentPartInstances: DBPartInstance[], segmentParts: DBPart[] ): CurrentSegmentTiming { - const segmentTiming = calculateSegmentTiming(segmentParts) + const segmentTiming = calculateSegmentTiming(segment, segmentParts) const playedDurations = segmentPartInstances.reduce((sum, partInstance) => { return (partInstance.timings?.duration ?? 0) + sum }, 0) @@ -29,22 +32,21 @@ export function calculateCurrentSegmentTiming( const projectedBudgetEndTime = (firstInstanceInSegmentPlayout?.timings?.reportedStartedPlayback ?? firstInstanceInSegmentPlayout?.timings?.plannedStartedPlayback ?? - 0) + (segmentTiming.budgetDurationMs ?? 0) + Date.now()) + (segmentTiming.budgetDurationMs ?? 0) return { ...segmentTiming, projectedEndTime: segmentTiming.budgetDurationMs != null ? projectedBudgetEndTime : projectedEndTime, } } -export function calculateSegmentTiming(segmentParts: DBPart[]): SegmentTiming { +export function calculateSegmentTiming(segment: DBSegment, segmentParts: DBPart[]): SegmentTiming { return { - budgetDurationMs: segmentParts.reduce((sum, part): number | undefined => { - return part.budgetDuration != null && !part.untimed ? (sum ?? 0) + part.budgetDuration : sum - }, undefined), + budgetDurationMs: segment.segmentTiming?.budgetDuration, expectedDurationMs: segmentParts.reduce((sum, part): number => { return part.expectedDurationWithTransition != null && !part.untimed ? sum + part.expectedDurationWithTransition : sum }, 0), + countdownType: segment.segmentTiming?.countdownType, } } diff --git a/packages/live-status-gateway/src/topics/segmentsTopic.ts b/packages/live-status-gateway/src/topics/segmentsTopic.ts index a03a81c72e..3f9e17247a 100644 --- a/packages/live-status-gateway/src/topics/segmentsTopic.ts +++ b/packages/live-status-gateway/src/topics/segmentsTopic.ts @@ -68,7 +68,7 @@ export class SegmentsTopic id: segmentId, rundownId: unprotectString(segment.rundownId), name: segment.name, - timing: calculateSegmentTiming(this._partsBySegment[segmentId] ?? []), + timing: calculateSegmentTiming(segment, this._partsBySegment[segmentId] ?? []), identifier: segment.identifier, publicData: segment.publicData, } diff --git a/packages/meteor-lib/src/KeyboardLayout.ts b/packages/meteor-lib/src/KeyboardLayout.ts deleted file mode 100644 index 14644d819b..0000000000 --- a/packages/meteor-lib/src/KeyboardLayout.ts +++ /dev/null @@ -1,99 +0,0 @@ -import * as _ from 'underscore' - -/** - * Convert an array of strings into a PhysicalLayout. - * See https://w3c.github.io/uievents-code/#keyboard-sections for rows and sections - * - * @param {string[]} shortForm Order of keys is: Alphanum Row E...A, Function Section Row K, Control Pad E, - * Control Pad D, Arrow Pad B, Arrow Pad A, Numpad Row E...A. - * @returns {PhysicalLayout} - */ -function createPhysicalLayout(shortForm: string[]): PhysicalLayout { - return shortForm.map((row) => { - return _.compact( - row.split(',').map((keyPosition) => { - const args = keyPosition.split(':') - return args[0] - ? { - code: args[1] ? args[1] : args[0], - width: args[1] ? (args[0] === 'X' ? -1 : parseFloat(args[0])) : 3, - } - : undefined - }) - ) - }) -} - -export interface KeyPositon { - code: string - width: number - space?: true -} - -/** - * Order of keys is: Alphanum Row E...A, Function Section Row K, Control Pad E, - * Control Pad D, Arrow Pad B, Arrow Pad A, Numpad Row E...A. Not all rows need to be specified. - */ -export type PhysicalLayout = KeyPositon[][] - -const STANDARD_102_TKL_TEMPLATE = [ - // Row E - 'Backquote,Digit1,Digit2,Digit3,Digit4,Digit5,Digit6,Digit7,Digit8,Digit9,Digit0,Minus,Equal,X:Backspace', - // Row D - '4:Tab,KeyQ,KeyW,KeyE,KeyR,KeyT,KeyY,KeyU,KeyI,KeyO,KeyP,BracketLeft,BracketRight', - // Row C - '5:CapsLock,KeyA,KeyS,KeyD,KeyF,KeyG,KeyH,KeyJ,KeyK,KeyL,Semicolon,Quote,Backslash,X:Enter', - // Row B - '3.5:ShiftLeft,IntlBackslash,KeyZ,KeyX,KeyC,KeyV,KeyB,KeyN,KeyM,Comma,Period,Slash,X:ShiftRight', - // Row A - '4:ControlLeft,MetaLeft,AltLeft,21:Space,AltRight,MetaRight,ContextMenu,X:ControlRight', - - // Row K - 'Escape,-1:$space,F1,F2,F3,F4,-1:$space,F5,F6,F7,F8,-1:$space,F9,F10,F11,F12', - - // Control Pad E - 'Insert,Home,PageUp', - // Control Pad D - 'Delete,End,PageDown', - - // Arrow Pad B - '$space,ArrowUp,$space', - // Arrow Pad A - 'ArrowLeft,ArrowDown,ArrowRight', -] - -const STANDARD_102_EXTENDED_TEMPLATE = [ - ...STANDARD_102_TKL_TEMPLATE, - // Row E - 'NumLock,NumpadDivide,NumpadMultiply,NumpadSubtract', - // Row D - 'Numpad7,Numpad8,Numpad9,NumpadAdd', - // Row C - 'Numpad4,Numpad5,Numpad6', - // Row B - 'Numpad1,Numpad2,Numpad3,NumpadEnter', - // Row A - '6.16:Numpad0,NumpadDecimal', -] - -// eslint-disable-next-line @typescript-eslint/no-namespace -export namespace KeyboardLayouts { - // This is a small keyboard layout: 102-Standard keybord, without the Numpad - export const STANDARD_102_TKL: PhysicalLayout = createPhysicalLayout(STANDARD_102_TKL_TEMPLATE) - export const STANDARD_102_EXTENDED: PhysicalLayout = createPhysicalLayout(STANDARD_102_EXTENDED_TEMPLATE) - - export function nameToPhysicalLayout(name: Names): PhysicalLayout { - switch (name) { - case Names.STANDARD_102_EXTENDED: - return STANDARD_102_EXTENDED - case Names.STANDARD_102_TKL: - default: - return STANDARD_102_TKL - } - } - - export enum Names { - STANDARD_102_TKL = 'STANDARD_102_TKL', - STANDARD_102_EXTENDED = 'STANDARD_102_EXTENDED', - } -} diff --git a/packages/meteor-lib/src/Settings.ts b/packages/meteor-lib/src/Settings.ts index 11c26a3a47..82965e4aa9 100644 --- a/packages/meteor-lib/src/Settings.ts +++ b/packages/meteor-lib/src/Settings.ts @@ -1,5 +1,3 @@ -import { KeyboardLayouts } from './KeyboardLayout' - /** * This is an object specifying installation-wide, User Interface settings. * There are default values for these settings that will be used, unless overriden @@ -21,8 +19,6 @@ export interface ISettings { enableUserAccounts: boolean /** Default duration to use to render parts when no duration is provided */ defaultDisplayDuration: number - /** If true, allows creation of new playlists in the Lobby Gui (rundown list). If false; only pre-existing playlists are allowed. */ - allowMultiplePlaylistsInGUI: boolean /** How many segments of history to show when scrolling back in time (0 = show current segment only) */ followOnAirSegmentsHistory: number /** Clean up stuff that are older than this [ms] */ @@ -38,12 +34,6 @@ export interface ISettings { /** Default value used to toggle Shelf options when the 'display' URL argument is not provided. */ defaultShelfDisplayOptions: string - /** The KeyboardPreview is a feature that is not implemented in the main Fork, and is kept here for compatibility */ - enableKeyboardPreview: boolean - - /** Keyboard map layout (what physical layout to use for the keyboard) */ - keyboardMapLayout: KeyboardLayouts.Names - /** * CSS class applied to the body of the page. Used to include custom implementations that differ from the main Fork. * I.e. custom CSS etc. Leave undefined if no custom implementation is needed @@ -71,14 +61,11 @@ export const DEFAULT_SETTINGS = Object.freeze({ allowGrabbingTimeline: true, enableUserAccounts: false, defaultDisplayDuration: 3000, - allowMultiplePlaylistsInGUI: false, poisonKey: 'Escape', followOnAirSegmentsHistory: 0, maximumDataAge: 1000 * 60 * 60 * 24 * 100, // 100 days enableNTPTimeChecker: null, defaultShelfDisplayOptions: 'buckets,layout,shelfLayout,inspector', - enableKeyboardPreview: false, - keyboardMapLayout: KeyboardLayouts.Names.STANDARD_102_TKL, useCountdownToFreezeFrame: true, confirmKeyCode: 'Enter', }) diff --git a/packages/meteor-lib/src/api/pubsub.ts b/packages/meteor-lib/src/api/pubsub.ts index e7e3fb6382..9ef1d945e6 100644 --- a/packages/meteor-lib/src/api/pubsub.ts +++ b/packages/meteor-lib/src/api/pubsub.ts @@ -2,7 +2,6 @@ import { BucketId, OrganizationId, PartId, - RundownId, RundownPlaylistActivationId, RundownPlaylistId, ShowStyleBaseId, @@ -247,9 +246,8 @@ export interface MeteorPubSubTypes { bucketId: BucketId ) => CustomCollectionName.UIBucketContentStatuses [MeteorPubSub.uiBlueprintUpgradeStatuses]: () => CustomCollectionName.UIBlueprintUpgradeStatuses - [MeteorPubSub.uiParts]: (playlistId: RundownPlaylistId) => CustomCollectionName.UIParts + [MeteorPubSub.uiParts]: (playlistId: RundownPlaylistId | null) => CustomCollectionName.UIParts [MeteorPubSub.uiPartInstances]: ( - rundownIds: RundownId[], playlistActivationId: RundownPlaylistActivationId | null ) => CustomCollectionName.UIPartInstances } diff --git a/packages/meteor-lib/src/api/userActions.ts b/packages/meteor-lib/src/api/userActions.ts index 55cc2e2bfe..91f521b617 100644 --- a/packages/meteor-lib/src/api/userActions.ts +++ b/packages/meteor-lib/src/api/userActions.ts @@ -1,7 +1,7 @@ import { ClientAPI } from './client' import { EvaluationBase } from '../collections/Evaluations' import { Bucket } from '../collections/Buckets' -import { IngestAdlib, ActionUserData } from '@sofie-automation/blueprints-integration' +import { IngestAdlib, ActionUserData, UserOperationTarget } from '@sofie-automation/blueprints-integration' import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibPiece' import { AdLibActionCommon } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { BucketAdLibAction } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibAction' @@ -301,7 +301,7 @@ export interface NewUserActionAPI { eventTime: Time, studioId: StudioId, routeSetId: string, - state: boolean + state: boolean | 'toggle' ): Promise> moveRundown( userEvent: string, @@ -328,6 +328,13 @@ export interface NewUserActionAPI { playlistId: RundownPlaylistId, rundownId: RundownId ): Promise> + executeUserChangeOperation( + userEvent: string, + eventTime: Time, + rundownId: RundownId, + operationTarget: UserOperationTarget, + operation: { id: string; [key: string]: any } + ): Promise> createAdlibTestingRundownForShowStyleVariant( userEvent: string, @@ -348,6 +355,11 @@ export interface NewUserActionAPI { rundownPlaylistId: RundownPlaylistId, marker: QuickLoopMarker | null ): Promise> + clearQuickLoop( + userEvent: string, + eventTime: Time, + rundownPlaylistId: RundownPlaylistId + ): Promise> } export enum UserActionAPIMethods { @@ -427,12 +439,14 @@ export enum UserActionAPIMethods { 'disablePeripheralSubDevice' = 'userAction.system.disablePeripheralSubDevice', + 'executeUserChangeOperation' = 'userAction.executeUserChangeOperation', 'activateAdlibTestingMode' = 'userAction.activateAdlibTestingMode', 'createAdlibTestingRundownForShowStyleVariant' = 'userAction.createAdlibTestingRundownForShowStyleVariant', 'setQuickLoopStart' = 'userAction.setQuickLoopStart', 'setQuickLoopEnd' = 'userAction.setQuickLoopEnd', + 'clearQuickLoop' = 'userAction.clearQuickLoop', } export interface ReloadRundownPlaylistResponse { diff --git a/packages/meteor-lib/src/collections/RundownLayouts.ts b/packages/meteor-lib/src/collections/RundownLayouts.ts index 1a171fc49a..3d26dcbaba 100644 --- a/packages/meteor-lib/src/collections/RundownLayouts.ts +++ b/packages/meteor-lib/src/collections/RundownLayouts.ts @@ -42,7 +42,6 @@ export enum RundownLayoutElementType { FILTER = 'filter', EXTERNAL_FRAME = 'external_frame', ADLIB_REGION = 'adlib_region', - KEYBOARD_PREVIEW = 'keyboard_preview', // This is used by TV2 PIECE_COUNTDOWN = 'piece_countdown', NEXT_INFO = 'next_info', PLAYLIST_START_TIMER = 'playlist_start_timer', @@ -256,10 +255,6 @@ export interface RundownLayoutFilter extends RundownLayoutFilterBase { default: boolean } -export interface RundownLayoutKeyboardPreview extends RundownLayoutElementBase { - type: RundownLayoutElementType.KEYBOARD_PREVIEW -} - export enum DashboardPanelUnit { /** Dashboard panels are defined in absolute (em) units */ EM = 'em', @@ -304,7 +299,6 @@ export type DashboardLayoutShowStyleDisplay = DashboardPanel export type DashboardLayoutPartName = DashboardPanel export type DashboardLayoutColoredBox = DashboardPanel -export type DashboardLayoutKeyboardPreview = DashboardPanel export type DashboardLayoutMiniRundown = DashboardPanel export type DashboardLayoutFilter = DashboardPanel< RundownLayoutFilterBase & { diff --git a/packages/meteor-lib/src/collections/Studios.ts b/packages/meteor-lib/src/collections/Studios.ts index b5f8238944..d7caa2e49f 100644 --- a/packages/meteor-lib/src/collections/Studios.ts +++ b/packages/meteor-lib/src/collections/Studios.ts @@ -4,20 +4,19 @@ import { MappingExt, StudioRouteType, StudioRouteSet, - RouteMapping, } from '@sofie-automation/corelib/dist/dataModel/Studio' import { omit } from '@sofie-automation/corelib/dist/lib' import { protectString } from '@sofie-automation/corelib/dist/protectedString' -import { ReadonlyDeep } from 'type-fest' -export function getActiveRoutes(routeSets: ReadonlyDeep>): ResultingMappingRoutes { +export function getActiveRoutes(routeSets: Record): ResultingMappingRoutes { const routes: ResultingMappingRoutes = { existing: {}, inserted: [], } const exclusivityGroups: { [groupId: string]: true } = {} - for (const routeSet of Object.values>(routeSets)) { + + for (const routeSet of Object.values(routeSets)) { if (routeSet.active) { let useRoute = true if (routeSet.exclusivityGroup) { @@ -28,7 +27,7 @@ export function getActiveRoutes(routeSets: ReadonlyDeep>(routeSet.routes)) { + for (const routeMapping of routeSet.routes || []) { if (routeMapping.outputMappedLayer) { if (routeMapping.mappedLayer) { // Route an existing layer diff --git a/packages/meteor-lib/src/collections/lib.ts b/packages/meteor-lib/src/collections/lib.ts index 636a56578f..093ba0bd43 100644 --- a/packages/meteor-lib/src/collections/lib.ts +++ b/packages/meteor-lib/src/collections/lib.ts @@ -169,11 +169,25 @@ export interface MongoCursor }> * @param callbacks Functions to call to deliver the result set as it changes */ observe(callbacks: ObserveCallbacks): MongoLiveQueryHandle + /** + * Watch a query. Receive callbacks as the result set changes. + * @param callbacks Functions to call to deliver the result set as it changes + */ + observeAsync(callbacks: ObserveCallbacks): Promise /** * Watch a query. Receive callbacks as the result set changes. Only the differences between the old and new documents are passed to the callbacks. * @param callbacks Functions to call to deliver the result set as it changes */ observeChanges(callbacks: ObserveChangesCallbacks): MongoLiveQueryHandle + /** + * Watch a query. Receive callbacks as the result set changes. Only the differences between the old and new documents are passed to the callbacks. + * @param callbacks Functions to call to deliver the result set as it changes + * @param options { nonMutatingCallbacks: boolean } + */ + observeChangesAsync( + callbacks: ObserveChangesCallbacks, + options?: { nonMutatingCallbacks?: boolean | undefined } + ): Promise } export interface ObserveCallbacks { added?(document: DBInterface): void diff --git a/packages/meteor-lib/src/lib.ts b/packages/meteor-lib/src/lib.ts index c2af24b1f4..bbe5a9daa6 100644 --- a/packages/meteor-lib/src/lib.ts +++ b/packages/meteor-lib/src/lib.ts @@ -1,5 +1,3 @@ -import { ITranslatableMessage } from '@sofie-automation/corelib/dist/TranslatableMessage' - export enum LogLevel { SILLY = 'silly', DEBUG = 'debug', @@ -9,16 +7,3 @@ export enum LogLevel { ERROR = 'error', NONE = 'crit', } - -/** Generate the translation for a string, to be applied later when it gets rendered */ -export function generateTranslation( - key: string, - args?: { [k: string]: any }, - namespaces?: string[] -): ITranslatableMessage { - return { - key, - args, - namespaces, - } -} diff --git a/packages/meteor-lib/src/triggers/actionFactory.ts b/packages/meteor-lib/src/triggers/actionFactory.ts index 26b944a591..7796716ffc 100644 --- a/packages/meteor-lib/src/triggers/actionFactory.ts +++ b/packages/meteor-lib/src/triggers/actionFactory.ts @@ -16,12 +16,18 @@ import RundownViewEventBus, { RundownViewEvents } from '../triggers/RundownViewE import { UserAction } from '../userAction' import { AdLibFilterChainLink, compileAdLibFilter, IWrappedAdLib } from './actionFilterChainCompilers' import { ClientAPI } from '../api/client' -import { PartId, PartInstanceId, RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + PartId, + PartInstanceId, + RundownId, + RundownPlaylistId, + StudioId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' import { DeviceActions } from '@sofie-automation/shared-lib/dist/core/model/ShowStyle' import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/error' import { MountedAdLibTriggerType } from '../api/MountedTriggers' -import { DummyReactiveVar, ReactiveVar } from './reactive-var' -import { TriggersContext } from './triggersContext' +import { DummyReactiveVar, TriggerReactiveVar } from './reactive-var' +import { TriggersContext, TriggerTrackerComputation } from './triggersContext' import { assertNever } from '@sofie-automation/corelib/dist/lib' // as described in this issue: https://github.com/Microsoft/TypeScript/issues/14094 @@ -30,17 +36,18 @@ type Without = { [P in Exclude]?: never } type XOR = T | U extends object ? (Without & U) | (Without & T) : T | U export interface ReactivePlaylistActionContext { - rundownPlaylistId: ReactiveVar - rundownPlaylist: ReactiveVar< + studioId: TriggerReactiveVar + rundownPlaylistId: TriggerReactiveVar + rundownPlaylist: TriggerReactiveVar< Pick > - currentRundownId: ReactiveVar - currentSegmentPartIds: ReactiveVar - nextSegmentPartIds: ReactiveVar - currentPartInstanceId: ReactiveVar - currentPartId: ReactiveVar - nextPartId: ReactiveVar + currentRundownId: TriggerReactiveVar + currentSegmentPartIds: TriggerReactiveVar + nextSegmentPartIds: TriggerReactiveVar + currentPartInstanceId: TriggerReactiveVar + currentPartId: TriggerReactiveVar + nextPartId: TriggerReactiveVar } interface PlainPlaylistContext { @@ -57,11 +64,11 @@ interface PlainStudioContext { showStyleBase: DBShowStyleBase } -type PlainActionContext = XOR +export type PlainActionContext = XOR export type ActionContext = XOR -type ActionExecutor = (t: TFunction, e: any, ctx: ActionContext) => void +type ActionExecutor = (t: TFunction, e: any, ctx: ActionContext) => Promise | void /** * An action compiled down to a single function that can be executed @@ -82,7 +89,7 @@ export interface ExecutableAction { * @extends {ExecutableAction} */ interface PreviewableAction extends ExecutableAction { - preview: (ctx: ReactivePlaylistActionContext) => IWrappedAdLib[] + preview: (ctx: ActionContext, computation: TriggerTrackerComputation | null) => Promise } interface ExecutableAdLibAction extends PreviewableAction { @@ -92,30 +99,35 @@ interface ExecutableAdLibAction extends PreviewableAction { export function isPreviewableAction(action: ExecutableAction): action is PreviewableAction { return action.action && 'preview' in action && typeof action['preview'] === 'function' } -function createRundownPlaylistContext( +async function createRundownPlaylistContext( + computation: TriggerTrackerComputation | null, triggersContext: TriggersContext, context: ActionContext, filterChain: IBaseFilterLink[] -): ReactivePlaylistActionContext | undefined { +): Promise { if (filterChain.length < 1) { return undefined } else if (filterChain[0].object === 'view' && context.rundownPlaylistId) { return context as ReactivePlaylistActionContext } else if (filterChain[0].object === 'view' && context.rundownPlaylist) { const playlistContext = context as PlainPlaylistContext - return { - rundownPlaylistId: new DummyReactiveVar(playlistContext.rundownPlaylist._id), - rundownPlaylist: new DummyReactiveVar(playlistContext.rundownPlaylist), - currentRundownId: new DummyReactiveVar(playlistContext.currentRundownId), - currentPartId: new DummyReactiveVar(playlistContext.currentPartId), - nextPartId: new DummyReactiveVar(playlistContext.nextPartId), - currentSegmentPartIds: new DummyReactiveVar(playlistContext.currentSegmentPartIds), - nextSegmentPartIds: new DummyReactiveVar(playlistContext.nextSegmentPartIds), - currentPartInstanceId: new DummyReactiveVar( - playlistContext.rundownPlaylist.currentPartInfo?.partInstanceId ?? null - ), - } + return triggersContext.withComputation(computation, async () => { + return { + studioId: new DummyReactiveVar(playlistContext.rundownPlaylist.studioId), + rundownPlaylistId: new DummyReactiveVar(playlistContext.rundownPlaylist._id), + rundownPlaylist: new DummyReactiveVar(playlistContext.rundownPlaylist), + currentRundownId: new DummyReactiveVar(playlistContext.currentRundownId), + currentPartId: new DummyReactiveVar(playlistContext.currentPartId), + nextPartId: new DummyReactiveVar(playlistContext.nextPartId), + currentSegmentPartIds: new DummyReactiveVar(playlistContext.currentSegmentPartIds), + nextSegmentPartIds: new DummyReactiveVar(playlistContext.nextSegmentPartIds), + currentPartInstanceId: new DummyReactiveVar( + playlistContext.rundownPlaylist.currentPartInfo?.partInstanceId ?? null + ), + } + }) } else if (filterChain[0].object === 'rundownPlaylist' && context.studio) { + // Note: this is only implemented on the server return triggersContext.createContextForRundownPlaylistChain(context.studio._id, filterChain) } else { throw new Error('Invalid filter combination') @@ -140,12 +152,12 @@ function createAdLibAction( return { action: PlayoutActions.adlib, - preview: (ctx) => { - const innerCtx = createRundownPlaylistContext(triggersContext, ctx, filterChain) + preview: async (ctx, computation) => { + const innerCtx = await createRundownPlaylistContext(computation, triggersContext, ctx, filterChain) if (innerCtx) { try { - return compiledAdLibFilter(innerCtx) + return compiledAdLibFilter(innerCtx, computation) } catch (e) { triggersContext.logger.error(e) return [] @@ -154,8 +166,8 @@ function createAdLibAction( return [] } }, - execute: (t, e, ctx) => { - const innerCtx = createRundownPlaylistContext(triggersContext, ctx, filterChain) + execute: async (t, e, ctx) => { + const innerCtx = await createRundownPlaylistContext(null, triggersContext, ctx, filterChain) if (!innerCtx) { triggersContext.logger.warn( @@ -164,93 +176,97 @@ function createAdLibAction( ) return } - const currentPartInstanceId = innerCtx.rundownPlaylist.get().currentPartInfo?.partInstanceId + const currentPartInstanceId = innerCtx.rundownPlaylist.get(null).currentPartInfo?.partInstanceId const sourceLayerIdsToClear: string[] = [] - triggersContext - .nonreactiveTracker(() => compiledAdLibFilter(innerCtx)) - .forEach((wrappedAdLib) => { - switch (wrappedAdLib.type) { - case MountedAdLibTriggerType.adLibPiece: - triggersContext.doUserAction(t, e, UserAction.START_ADLIB, async (e, ts) => - currentPartInstanceId - ? triggersContext.MeteorCall.userAction.segmentAdLibPieceStart( - e, - ts, - innerCtx.rundownPlaylistId.get(), - currentPartInstanceId, - wrappedAdLib.item._id, - false - ) - : ClientAPI.responseSuccess(undefined) - ) - break - case MountedAdLibTriggerType.rundownBaselineAdLibItem: - triggersContext.doUserAction(t, e, UserAction.START_GLOBAL_ADLIB, async (e, ts) => - currentPartInstanceId - ? triggersContext.MeteorCall.userAction.baselineAdLibPieceStart( - e, - ts, - innerCtx.rundownPlaylistId.get(), - currentPartInstanceId, - wrappedAdLib.item._id, - false - ) - : ClientAPI.responseSuccess(undefined) - ) - break - case MountedAdLibTriggerType.adLibAction: - triggersContext.doUserAction(t, e, UserAction.START_ADLIB, async (e, ts) => - triggersContext.MeteorCall.userAction.executeAction( - e, - ts, - innerCtx.rundownPlaylistId.get(), - wrappedAdLib._id, - wrappedAdLib.item.actionId, - wrappedAdLib.item.userData, - (actionArguments && actionArguments.triggerMode) || undefined - ) + + // This withComputation is probably not needed, but it ensures there is no accidental reactivity + const wrappedAdLibs = await triggersContext.withComputation(null, async () => + compiledAdLibFilter(innerCtx, null) + ) + + wrappedAdLibs.forEach((wrappedAdLib) => { + switch (wrappedAdLib.type) { + case MountedAdLibTriggerType.adLibPiece: + triggersContext.doUserAction(t, e, UserAction.START_ADLIB, async (e, ts) => + currentPartInstanceId + ? triggersContext.MeteorCall.userAction.segmentAdLibPieceStart( + e, + ts, + innerCtx.rundownPlaylistId.get(null), + currentPartInstanceId, + wrappedAdLib.item._id, + false + ) + : ClientAPI.responseSuccess(undefined) + ) + break + case MountedAdLibTriggerType.rundownBaselineAdLibItem: + triggersContext.doUserAction(t, e, UserAction.START_GLOBAL_ADLIB, async (e, ts) => + currentPartInstanceId + ? triggersContext.MeteorCall.userAction.baselineAdLibPieceStart( + e, + ts, + innerCtx.rundownPlaylistId.get(null), + currentPartInstanceId, + wrappedAdLib.item._id, + false + ) + : ClientAPI.responseSuccess(undefined) + ) + break + case MountedAdLibTriggerType.adLibAction: + triggersContext.doUserAction(t, e, UserAction.START_ADLIB, async (e, ts) => + triggersContext.MeteorCall.userAction.executeAction( + e, + ts, + innerCtx.rundownPlaylistId.get(null), + wrappedAdLib._id, + wrappedAdLib.item.actionId, + wrappedAdLib.item.userData, + (actionArguments && actionArguments.triggerMode) || undefined ) - break - case MountedAdLibTriggerType.rundownBaselineAdLibAction: - triggersContext.doUserAction(t, e, UserAction.START_GLOBAL_ADLIB, async (e, ts) => - triggersContext.MeteorCall.userAction.executeAction( - e, - ts, - innerCtx.rundownPlaylistId.get(), - wrappedAdLib._id, - wrappedAdLib.item.actionId, - wrappedAdLib.item.userData, - (actionArguments && actionArguments.triggerMode) || undefined - ) + ) + break + case MountedAdLibTriggerType.rundownBaselineAdLibAction: + triggersContext.doUserAction(t, e, UserAction.START_GLOBAL_ADLIB, async (e, ts) => + triggersContext.MeteorCall.userAction.executeAction( + e, + ts, + innerCtx.rundownPlaylistId.get(null), + wrappedAdLib._id, + wrappedAdLib.item.actionId, + wrappedAdLib.item.userData, + (actionArguments && actionArguments.triggerMode) || undefined ) - break - case MountedAdLibTriggerType.clearSourceLayer: - // defer this action to send a single clear action all at once - sourceLayerIdsToClear.push(wrappedAdLib.sourceLayerId) - break - case MountedAdLibTriggerType.sticky: - triggersContext.doUserAction(t, e, UserAction.START_STICKY_PIECE, async (e, ts) => - triggersContext.MeteorCall.userAction.sourceLayerStickyPieceStart( - e, - ts, - innerCtx.rundownPlaylistId.get(), - wrappedAdLib.sourceLayerId // - ) + ) + break + case MountedAdLibTriggerType.clearSourceLayer: + // defer this action to send a single clear action all at once + sourceLayerIdsToClear.push(wrappedAdLib.sourceLayerId) + break + case MountedAdLibTriggerType.sticky: + triggersContext.doUserAction(t, e, UserAction.START_STICKY_PIECE, async (e, ts) => + triggersContext.MeteorCall.userAction.sourceLayerStickyPieceStart( + e, + ts, + innerCtx.rundownPlaylistId.get(null), + wrappedAdLib.sourceLayerId // ) - break - default: - assertNever(wrappedAdLib) - return - } - }) + ) + break + default: + assertNever(wrappedAdLib) + return + } + }) if (currentPartInstanceId && sourceLayerIdsToClear.length > 0) { triggersContext.doUserAction(t, e, UserAction.CLEAR_SOURCELAYER, async (e, ts) => triggersContext.MeteorCall.userAction.sourceLayerOnPartStop( e, ts, - innerCtx.rundownPlaylistId.get(), + innerCtx.rundownPlaylistId.get(null), currentPartInstanceId, sourceLayerIdsToClear ) @@ -401,9 +417,10 @@ function createUserActionWithCtx( ): ExecutableAction { return { action: action.action, - execute: (t, e, ctx) => { - const innerCtx = triggersContext.nonreactiveTracker(() => - createRundownPlaylistContext(triggersContext, ctx, action.filterChain) + execute: async (t, e, ctx) => { + // This outer withComputation is probably not needed, but it ensures there is no accidental reactivity + const innerCtx = await triggersContext.withComputation(null, async () => + createRundownPlaylistContext(null, triggersContext, ctx, action.filterChain) ) if (innerCtx) { triggersContext.doUserAction(t, e, userAction, async (e, ts) => userActionExec(e, ts, innerCtx)) @@ -442,7 +459,7 @@ export function createAction( triggersContext.MeteorCall.userAction.forceResetAndActivate( e, ts, - ctx.rundownPlaylistId.get(), + ctx.rundownPlaylistId.get(null), !!action.rehearsal || false ) ) @@ -461,7 +478,7 @@ export function createAction( triggersContext.MeteorCall.userAction.activate( e, ts, - ctx.rundownPlaylistId.get(), + ctx.rundownPlaylistId.get(null), !!action.rehearsal || false ) ) @@ -476,7 +493,7 @@ export function createAction( action, UserAction.DEACTIVATE_RUNDOWN_PLAYLIST, async (e, ts, ctx) => - triggersContext.MeteorCall.userAction.deactivate(e, ts, ctx.rundownPlaylistId.get()) + triggersContext.MeteorCall.userAction.deactivate(e, ts, ctx.rundownPlaylistId.get(null)) ) case PlayoutActions.activateAdlibTestingMode: return createUserActionWithCtx( @@ -484,12 +501,12 @@ export function createAction( action, UserAction.ACTIVATE_ADLIB_TESTING, async (e, ts, ctx) => { - const rundownId = ctx.currentRundownId.get() + const rundownId = ctx.currentRundownId.get(null) if (rundownId) { return triggersContext.MeteorCall.userAction.activateAdlibTestingMode( e, ts, - ctx.rundownPlaylistId.get(), + ctx.rundownPlaylistId.get(null), rundownId ) } else { @@ -505,21 +522,26 @@ export function createAction( triggersContext.MeteorCall.userAction.take( e, ts, - ctx.rundownPlaylistId.get(), - ctx.currentPartInstanceId.get() + ctx.rundownPlaylistId.get(null), + ctx.currentPartInstanceId.get(null) ) ) } case PlayoutActions.hold: return createUserActionWithCtx(triggersContext, action, UserAction.ACTIVATE_HOLD, async (e, ts, ctx) => - triggersContext.MeteorCall.userAction.activateHold(e, ts, ctx.rundownPlaylistId.get(), !!action.undo) + triggersContext.MeteorCall.userAction.activateHold( + e, + ts, + ctx.rundownPlaylistId.get(null), + !!action.undo + ) ) case PlayoutActions.disableNextPiece: return createUserActionWithCtx(triggersContext, action, UserAction.DISABLE_NEXT_PIECE, async (e, ts, ctx) => triggersContext.MeteorCall.userAction.disableNextPiece( e, ts, - ctx.rundownPlaylistId.get(), + ctx.rundownPlaylistId.get(null), !!action.undo ) ) @@ -538,7 +560,7 @@ export function createAction( e, ts, triggersContext.hashSingleUseToken(tokenResult.result), - ctx.rundownPlaylistId.get(), + ctx.rundownPlaylistId.get(null), `action`, false ) @@ -550,7 +572,7 @@ export function createAction( triggersContext.MeteorCall.userAction.moveNext( e, ts, - ctx.rundownPlaylistId.get(), + ctx.rundownPlaylistId.get(null), action.parts ?? 0, action.segments ?? 0 ) @@ -566,7 +588,11 @@ export function createAction( async (e, ts, ctx) => // TODO: Needs some handling of the response. Perhaps this should switch to // an event on the RundownViewEventBus, if ran on the client? - triggersContext.MeteorCall.userAction.resyncRundownPlaylist(e, ts, ctx.rundownPlaylistId.get()) + triggersContext.MeteorCall.userAction.resyncRundownPlaylist( + e, + ts, + ctx.rundownPlaylistId.get(null) + ) ) } case PlayoutActions.resetRundownPlaylist: @@ -578,7 +604,11 @@ export function createAction( action, UserAction.RESET_RUNDOWN_PLAYLIST, async (e, ts, ctx) => - triggersContext.MeteorCall.userAction.resetRundownPlaylist(e, ts, ctx.rundownPlaylistId.get()) + triggersContext.MeteorCall.userAction.resetRundownPlaylist( + e, + ts, + ctx.rundownPlaylistId.get(null) + ) ) } case PlayoutActions.resyncRundownPlaylist: @@ -587,7 +617,17 @@ export function createAction( action, UserAction.RESYNC_RUNDOWN_PLAYLIST, async (e, ts, ctx) => - triggersContext.MeteorCall.userAction.resyncRundownPlaylist(e, ts, ctx.rundownPlaylistId.get()) + triggersContext.MeteorCall.userAction.resyncRundownPlaylist(e, ts, ctx.rundownPlaylistId.get(null)) + ) + case PlayoutActions.switchRouteSet: + return createUserActionWithCtx(triggersContext, action, UserAction.SWITCH_ROUTE_SET, async (e, ts, ctx) => + triggersContext.MeteorCall.userAction.switchRouteSet( + e, + ts, + ctx.studioId.get(null), + action.routeSetId, + action.state + ) ) case ClientActions.showEntireCurrentSegment: return createShowEntireCurrentSegmentAction(action.filterChain, action.on) diff --git a/packages/meteor-lib/src/triggers/actionFilterChainCompilers.ts b/packages/meteor-lib/src/triggers/actionFilterChainCompilers.ts index e63e456f05..d8afa19d94 100644 --- a/packages/meteor-lib/src/triggers/actionFilterChainCompilers.ts +++ b/packages/meteor-lib/src/triggers/actionFilterChainCompilers.ts @@ -23,10 +23,9 @@ import { ReactivePlaylistActionContext } from './actionFactory' import { PartId, RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { IWrappedAdLibBase } from '@sofie-automation/shared-lib/dist/input-gateway/deviceTriggerPreviews' import { MountedAdLibTriggerType } from '../api/MountedTriggers' -import { assertNever } from '@sofie-automation/corelib/dist/lib' -import { generateTranslation } from '../lib' +import { assertNever, generateTranslation } from '@sofie-automation/corelib/dist/lib' import { FindOptions } from '../collections/lib' -import { TriggersContext } from './triggersContext' +import { TriggersContext, TriggerTrackerComputation } from './triggersContext' export type AdLibFilterChainLink = IRundownPlaylistFilterLink | IGUIContextFilterLink | IAdLibFilterLink @@ -491,7 +490,7 @@ export function compileAdLibFilter( triggersContext: TriggersContext, filterChain: AdLibFilterChainLink[], sourceLayers: SourceLayers -): (context: ReactivePlaylistActionContext) => IWrappedAdLib[] { +): (context: ReactivePlaylistActionContext, computation: TriggerTrackerComputation | null) => Promise { const onlyAdLibLinks = filterChain.filter((link) => link.object === 'adLib') as IAdLibFilterLink[] const adLibPieceTypeFilter = compileAdLibPieceFilter(onlyAdLibLinks, sourceLayers) const adLibActionTypeFilter = compileAdLibActionFilter(onlyAdLibLinks, sourceLayers) @@ -499,23 +498,23 @@ export function compileAdLibFilter( const clearAdLibs = compileAndRunClearFilter(onlyAdLibLinks, sourceLayers) const stickyAdLibs = compileAndRunStickyFilter(onlyAdLibLinks, sourceLayers) - return (context: ReactivePlaylistActionContext) => { + return async (context: ReactivePlaylistActionContext, computation: TriggerTrackerComputation | null) => { let rundownBaselineAdLibItems: IWrappedAdLib[] = [] let adLibPieces: IWrappedAdLib[] = [] let rundownBaselineAdLibActions: IWrappedAdLib[] = [] let adLibActions: IWrappedAdLib[] = [] const segmentPartIds = adLibPieceTypeFilter.segment === 'current' - ? context.currentSegmentPartIds.get() + ? context.currentSegmentPartIds.get(computation) : adLibPieceTypeFilter.segment === 'next' - ? context.nextSegmentPartIds.get() + ? context.nextSegmentPartIds.get(computation) : undefined const singlePartId = adLibPieceTypeFilter.part === 'current' - ? context.currentPartId.get() + ? context.currentPartId.get(computation) : adLibPieceTypeFilter.part === 'next' - ? context.nextPartId.get() + ? context.nextPartId.get(computation) : undefined /** Note: undefined means that all parts are to be considered */ @@ -555,25 +554,31 @@ export function compileAdLibFilter( } } - const currentRundownId = context.currentRundownId.get() + const currentRundownId = context.currentRundownId.get(computation) if (!skip && currentRundownId) { if (adLibPieceTypeFilter.global === undefined || adLibPieceTypeFilter.global === true) - rundownBaselineAdLibItems = triggersContext.RundownBaselineAdLibPieces.find( - { - ...adLibPieceTypeFilter.selector, - ...currentNextOverride, - rundownId: currentRundownId, - } as MongoQuery, - adLibPieceTypeFilter.options + rundownBaselineAdLibItems = ( + await triggersContext.RundownBaselineAdLibPieces.findFetchAsync( + computation, + { + ...adLibPieceTypeFilter.selector, + ...currentNextOverride, + rundownId: currentRundownId, + } as MongoQuery, + adLibPieceTypeFilter.options + ) ).map((item) => wrapAdLibPiece(item, MountedAdLibTriggerType.rundownBaselineAdLibItem)) if (adLibPieceTypeFilter.global === undefined || adLibPieceTypeFilter.global === false) - adLibPieces = triggersContext.AdLibPieces.find( - { - ...adLibPieceTypeFilter.selector, - ...currentNextOverride, - rundownId: currentRundownId, - } as MongoQuery, - adLibPieceTypeFilter.options + adLibPieces = ( + await triggersContext.AdLibPieces.findFetchAsync( + computation, + { + ...adLibPieceTypeFilter.selector, + ...currentNextOverride, + rundownId: currentRundownId, + } as MongoQuery, + adLibPieceTypeFilter.options + ) ).map((item) => wrapAdLibPiece(item, MountedAdLibTriggerType.adLibPiece)) } } @@ -592,27 +597,33 @@ export function compileAdLibFilter( } } - const currentRundownId = context.currentRundownId.get() + const currentRundownId = context.currentRundownId.get(computation) if (!skip && currentRundownId) { if (adLibActionTypeFilter.global === undefined || adLibActionTypeFilter.global === true) - rundownBaselineAdLibActions = triggersContext.RundownBaselineAdLibActions.find( - { - ...adLibActionTypeFilter.selector, - ...currentNextOverride, - rundownId: currentRundownId, - } as MongoQuery, - adLibActionTypeFilter.options + rundownBaselineAdLibActions = ( + await triggersContext.RundownBaselineAdLibActions.findFetchAsync( + computation, + { + ...adLibActionTypeFilter.selector, + ...currentNextOverride, + rundownId: currentRundownId, + } as MongoQuery, + adLibActionTypeFilter.options + ) ).map((item) => wrapRundownBaselineAdLibAction(item, MountedAdLibTriggerType.rundownBaselineAdLibAction) ) if (adLibActionTypeFilter.global === undefined || adLibActionTypeFilter.global === false) - adLibActions = triggersContext.AdLibActions.find( - { - ...adLibActionTypeFilter.selector, - ...currentNextOverride, - rundownId: currentRundownId, - } as MongoQuery, - adLibActionTypeFilter.options + adLibActions = ( + await triggersContext.AdLibActions.findFetchAsync( + computation, + { + ...adLibActionTypeFilter.selector, + ...currentNextOverride, + rundownId: currentRundownId, + } as MongoQuery, + adLibActionTypeFilter.options + ) ).map((item) => wrapAdLibAction(item, MountedAdLibTriggerType.adLibAction)) } } @@ -625,38 +636,49 @@ export function compileAdLibFilter( // Note: We need to return an array from within memoizedIsolatedAutorun, // because _.isEqual (used in memoizedIsolatedAutorun) doesn't work with Maps.. - const rundownPlaylistId = context.rundownPlaylistId.get() - const rundownRanks = triggersContext.memoizedIsolatedAutorun(() => { - const playlist = triggersContext.RundownPlaylists.findOne(rundownPlaylistId, { - projection: { - rundownIdsInOrder: 1, - }, - }) as Pick | undefined - - if (playlist?.rundownIdsInOrder) { - return playlist.rundownIdsInOrder - } else { - const rundowns = triggersContext.Rundowns.find( + const rundownPlaylistId = context.rundownPlaylistId.get(computation) + const rundownRanks = await triggersContext.memoizedIsolatedAutorun( + computation, + async (computation) => { + const playlist = (await triggersContext.RundownPlaylists.findOneAsync( + computation, + rundownPlaylistId, { - playlistId: rundownPlaylistId, - }, - { - fields: { - _id: 1, + projection: { + rundownIdsInOrder: 1, }, } - ).fetch() as Pick[] - - return rundowns.map((r) => r._id) - } - }, `rundownsRanksForPlaylist_${rundownPlaylistId}`) + )) as Pick | undefined + + if (playlist?.rundownIdsInOrder) { + return playlist.rundownIdsInOrder + } else { + const rundowns = (await triggersContext.Rundowns.findFetchAsync( + computation, + { + playlistId: rundownPlaylistId, + }, + { + fields: { + _id: 1, + }, + } + )) as Pick[] + + return rundowns.map((r) => r._id) + } + }, + `rundownsRanksForPlaylist_${rundownPlaylistId}` + ) rundownRanks.forEach((id, index) => { rundownRankMap.set(id, index) }) - const segmentRanks = triggersContext.memoizedIsolatedAutorun( - () => - triggersContext.Segments.find( + const segmentRanks = await triggersContext.memoizedIsolatedAutorun( + computation, + async (computation) => + (await triggersContext.Segments.findFetchAsync( + computation, { rundownId: { $in: Array.from(rundownRankMap.keys()) }, }, @@ -666,42 +688,48 @@ export function compileAdLibFilter( _rank: 1, }, } - ).fetch() as Pick[], + )) as Pick[], `segmentRanksForRundowns_${Array.from(rundownRankMap.keys()).join(',')}` ) segmentRanks.forEach((segment) => { segmentRankMap.set(segment._id, segment._rank) }) - const partRanks = triggersContext.memoizedIsolatedAutorun(() => { - if (!partFilter) { - return triggersContext.Parts.find( - { - rundownId: { $in: Array.from(rundownRankMap.keys()) }, - }, - { - fields: { - _id: 1, - segmentId: 1, - rundownId: 1, - _rank: 1, - }, - } - ).fetch() as Pick[] - } else { - return triggersContext.Parts.find( - { _id: { $in: partFilter } }, - { - fields: { - _id: 1, - segmentId: 1, - rundownId: 1, - _rank: 1, + const partRanks = await triggersContext.memoizedIsolatedAutorun( + computation, + async (computation) => { + if (!partFilter) { + return (await triggersContext.Parts.findFetchAsync( + computation, + { + rundownId: { $in: Array.from(rundownRankMap.keys()) }, }, - } - ).fetch() as Pick[] - } - }, `partRanks_${JSON.stringify(partFilter ?? rundownRankMap.keys())}`) + { + fields: { + _id: 1, + segmentId: 1, + rundownId: 1, + _rank: 1, + }, + } + )) as Pick[] + } else { + return (await triggersContext.Parts.findFetchAsync( + computation, + { _id: { $in: partFilter } }, + { + fields: { + _id: 1, + segmentId: 1, + rundownId: 1, + _rank: 1, + }, + } + )) as Pick[] + } + }, + `partRanks_${JSON.stringify(partFilter ?? rundownRankMap.keys())}` + ) partRanks.forEach((part) => { partRankMap.set(part._id, part) diff --git a/packages/meteor-lib/src/triggers/reactive-var.ts b/packages/meteor-lib/src/triggers/reactive-var.ts index f9d7d58758..765174dd7f 100644 --- a/packages/meteor-lib/src/triggers/reactive-var.ts +++ b/packages/meteor-lib/src/triggers/reactive-var.ts @@ -1,9 +1,11 @@ +import type { TriggerTrackerComputation } from './triggersContext' + // Copied from Meteor -export interface ReactiveVar { +export interface TriggerReactiveVar { /** * Returns the current value of the ReactiveVar, establishing a reactive dependency. */ - get(): T + get(computation: TriggerTrackerComputation | null): T /** * Sets the current value of the ReactiveVar, invalidating the Computations that called `get` if `newValue` is different from the old value. */ @@ -14,7 +16,7 @@ export interface ReactiveVar { * This just looks like a ReactiveVar, but is not reactive. * It's used to use the same interface/typings, but when code is run on both client and server side. * */ -export class DummyReactiveVar implements ReactiveVar { +export class DummyReactiveVar implements TriggerReactiveVar { constructor(private value: T) {} public get(): T { return this.value diff --git a/packages/meteor-lib/src/triggers/triggersContext.ts b/packages/meteor-lib/src/triggers/triggersContext.ts index 4c94b4ac52..94b179cb77 100644 --- a/packages/meteor-lib/src/triggers/triggersContext.ts +++ b/packages/meteor-lib/src/triggers/triggersContext.ts @@ -2,7 +2,7 @@ import { UserAction } from '../userAction' import { IMeteorCall } from '../api/methods' import { Time } from '@sofie-automation/shared-lib/dist/lib/lib' import { ClientAPI } from '../api/client' -import { MongoReadOnlyCollection } from '../collections/lib' +import { FindOneOptions, FindOptions } from '../collections/lib' import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' @@ -16,6 +16,37 @@ import { IBaseFilterLink } from '@sofie-automation/blueprints-integration' import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReactivePlaylistActionContext } from './actionFactory' import { TFunction } from 'i18next' +import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' +import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' + +/** + * A opaque type that is used in the meteor-lib api instead of implementation specific computations. + * This should be treated as equivalent to the Meteor `Tracker.Computation` type. + */ +export type TriggerTrackerComputation = { __internal: true } + +export interface TriggersAsyncCollection }> { + /** + * Find and return multiple documents + * @param selector A query describing the documents to find + * @param options Options for the operation + */ + findFetchAsync( + computation: TriggerTrackerComputation | null, + selector: MongoQuery, + options?: FindOptions + ): Promise> + + /** + * Finds the first document that matches the selector, as ordered by sort and skip options. Returns `undefined` if no matching document is found. + * @param selector A query describing the documents to find + */ + findOneAsync( + computation: TriggerTrackerComputation | null, + selector: MongoQuery | DBInterface['_id'], + options?: FindOneOptions + ): Promise +} export interface TriggersContext { readonly MeteorCall: IMeteorCall @@ -24,14 +55,14 @@ export interface TriggersContext { readonly isClient: boolean - readonly AdLibActions: MongoReadOnlyCollection - readonly AdLibPieces: MongoReadOnlyCollection - readonly Parts: MongoReadOnlyCollection - readonly RundownBaselineAdLibActions: MongoReadOnlyCollection - readonly RundownBaselineAdLibPieces: MongoReadOnlyCollection - readonly RundownPlaylists: MongoReadOnlyCollection - readonly Rundowns: MongoReadOnlyCollection - readonly Segments: MongoReadOnlyCollection + readonly AdLibActions: TriggersAsyncCollection + readonly AdLibPieces: TriggersAsyncCollection + readonly Parts: TriggersAsyncCollection + readonly RundownBaselineAdLibActions: TriggersAsyncCollection + readonly RundownBaselineAdLibPieces: TriggersAsyncCollection + readonly RundownPlaylists: TriggersAsyncCollection + readonly Rundowns: TriggersAsyncCollection + readonly Segments: TriggersAsyncCollection hashSingleUseToken(token: string): string @@ -44,16 +75,28 @@ export interface TriggersContext { _okMessage?: string ): void - nonreactiveTracker(func: () => T): T + /** + * Equivalent to the Meteor `Tracker.withComputation` function, but implementation specific. + * Use this to ensure that a function is run as part of the provided computation. + */ + withComputation(computation: TriggerTrackerComputation | null, func: () => Promise): Promise - memoizedIsolatedAutorun any>( - fnc: T, + /** + * Create a reactive computation that will be run independently of the outer one. If the same function (using the same + * name and parameters) will be used again, this computation will only be computed once on invalidation and it's + * result will be memoized and reused on every other call. + * + * This will be run as part of the provided computation, and passes the inner computation to the function. + */ + memoizedIsolatedAutorun( + computation: TriggerTrackerComputation | null, + fnc: (computation: TriggerTrackerComputation | null, ...args: TArgs) => Promise, functionName: string, - ...params: Parameters - ): ReturnType + ...params: TArgs + ): Promise createContextForRundownPlaylistChain( _studioId: StudioId, _filterChain: IBaseFilterLink[] - ): ReactivePlaylistActionContext | undefined + ): Promise } diff --git a/packages/meteor-lib/src/tv2/AHKkeyboardMap.ts b/packages/meteor-lib/src/tv2/AHKkeyboardMap.ts deleted file mode 100644 index 71af9f8486..0000000000 --- a/packages/meteor-lib/src/tv2/AHKkeyboardMap.ts +++ /dev/null @@ -1,75 +0,0 @@ -import * as _ from 'underscore' - -export const AHKKeyboardMap: Record = { - '½': ['SC029', '{vkDCsc029}'], - f1: ['F1', '{F1}'], - f2: ['F2', '{F2}'], - f3: ['F3', '{F3}'], - f4: ['F4', '{F4}'], - f5: ['F5', '{F5}'], - f6: ['F6', '{F6}'], - f7: ['F7', '{F7}'], - f8: ['F8', '{F8}'], - f9: ['F9', '{F9}'], - f10: ['F10', '{F10}'], - f11: ['F11', '{F11}'], - f12: ['F12', '{F12}'], - '!': ['!', '{!}'], - '#': ['#', '{#}'], - add: ['+', '{+}'], - comma: ['SC033', '{,}'], - period: '.', - '^': ['^', '{^}'], - '{': ['{', '{{}'], - '}': ['}', '{}}'], - enter: ['Enter', '{Enter}'], - esc: ['Escape', '{Escape}'], - space: ['Space', '{Space}'], - tab: ['Tab', '{Tab}'], - backspace: ['Backspace', '{Backspace}'], - del: ['Delete', '{Delete}'], - ins: ['Insert', '{Insert}'], - up: ['Up', '{Up}'], - down: ['Down', '{Down}'], - left: ['Left', '{Left}'], - right: ['Right', '{Right}'], - home: ['Home', '{Home}'], - end: ['End', '{End}'], - pageup: ['PgUp', '{PgUp}'], - pagedown: ['PgDn', '{PgDn}'], - capslock: ['CapsLock', '{CapsLock}'], - numlock: ['NumLock', '{NumLock}'], - scrolllock: ['ScrollLock', '{ScrollLock}'], - num0: ['Numpad0', '{Numpad0}'], - num1: ['Numpad1', '{Numpad1}'], - num2: ['Numpad0', '{Numpad0}'], - num3: ['Numpad0', '{Numpad0}'], - num4: ['Numpad0', '{Numpad0}'], - num5: ['Numpad0', '{Numpad0}'], - num6: ['Numpad0', '{Numpad0}'], - num7: ['Numpad0', '{Numpad0}'], - num8: ['Numpad0', '{Numpad0}'], - num9: ['Numpad0', '{Numpad0}'], - numadd: ['NumpadAdd', '{NumpadAdd}'], - numsub: ['NumpadSub', '{NumpadSub}'], - nummul: ['NumpadMult', '{NumpadMult}'], - numdiv: ['NumpadDiv', '{NumpadDiv}'], -} - -export const AHKModifierMap: Record = { - ctrl: '^', - shift: '+', - alt: '!', - cmd: '#', -} - -export const AHKBaseHeader = [ - '#NoEnv', - 'SendMode Input', - 'SetWorkingDir %A_ScriptDir%', - '', - '#IfWinActive, ahk_class Chrome_WidgetWin_1', - '', -] - -export const useAHKComboTemplate = _.template('<%=platformKeyCombo%> up:: send <%=browserKeyCombo%>') diff --git a/packages/meteor-lib/src/userAction.ts b/packages/meteor-lib/src/userAction.ts index b9eb3dc262..43b54388a2 100644 --- a/packages/meteor-lib/src/userAction.ts +++ b/packages/meteor-lib/src/userAction.ts @@ -50,7 +50,9 @@ export enum UserAction { PERIPHERAL_DEVICE_REFRESH_DEBUG_STATES, ACTIVATE_ADLIB_TESTING, QUEUE_NEXT_SEGMENT, + EXECUTE_USER_OPERATION, CREATE_ADLIB_TESTING_RUNDOWN, SET_QUICK_LOOP_START, SET_QUICK_LOOP_END, + CLEAR_QUICK_LOOP, } diff --git a/packages/mos-gateway/CHANGELOG.md b/packages/mos-gateway/CHANGELOG.md index bd46ea33b2..9a91703d98 100644 --- a/packages/mos-gateway/CHANGELOG.md +++ b/packages/mos-gateway/CHANGELOG.md @@ -3,6 +3,105 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.51.3](https://github.com/nrkno/sofie-core/compare/v1.51.2...v1.51.3) (2024-11-21) + + +### Bug Fixes + +* change CMD in GHA Dockerfile ([b13ba2d](https://github.com/nrkno/sofie-core/commit/b13ba2d60afb5a95314fcd217b4e63b4b5f241fa)) + + + + + +## [1.51.2](https://github.com/nrkno/sofie-core/compare/v1.51.1...v1.51.2) (2024-11-21) + +**Note:** Version bump only for package mos-gateway + + + + + +## [1.51.1](https://github.com/nrkno/sofie-core/compare/v1.51.1-2...v1.51.1) (2024-11-13) + +**Note:** Version bump only for package mos-gateway + + + + + +## [1.51.1-2](https://github.com/nrkno/sofie-core/compare/v1.51.1-1...v1.51.1-2) (2024-10-24) + +**Note:** Version bump only for package mos-gateway + + + + + +## [1.51.1-2](https://github.com/nrkno/sofie-core/compare/v1.51.1-1...v1.51.1-2) (2024-10-24) + +**Note:** Version bump only for package mos-gateway + + + + + +## [1.51.1-1](https://github.com/nrkno/sofie-core/compare/v1.51.1-0...v1.51.1-1) (2024-10-18) + +**Note:** Version bump only for package mos-gateway + + + + + +## [1.51.1-0](https://github.com/nrkno/sofie-core/compare/v1.51.0...v1.51.1-0) (2024-10-18) + +**Note:** Version bump only for package mos-gateway + + + + + +## [1.51.1-0](https://github.com/nrkno/sofie-core/compare/v1.51.0...v1.51.1-0) (2024-10-18) + +**Note:** Version bump only for package mos-gateway + + + + + +# [1.51.0](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.3...v1.51.0) (2024-10-07) + +**Note:** Version bump only for package mos-gateway + + + + + +# [1.51.0](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.3...v1.51.0) (2024-10-07) + +**Note:** Version bump only for package mos-gateway + + + + + +# [1.51.0-in-testing.3](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.2...v1.51.0-in-testing.3) (2024-09-25) + +**Note:** Version bump only for package mos-gateway + + + + + +# [1.51.0-in-testing.2](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.1...v1.51.0-in-testing.2) (2024-09-24) + +**Note:** Version bump only for package mos-gateway + + + + + # [1.51.0-in-testing.1](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.0...v1.51.0-in-testing.1) (2024-09-06) diff --git a/packages/mos-gateway/Dockerfile.circle b/packages/mos-gateway/Dockerfile.circle index e6789a9abc..8f2d445450 100644 --- a/packages/mos-gateway/Dockerfile.circle +++ b/packages/mos-gateway/Dockerfile.circle @@ -8,4 +8,4 @@ COPY server-core-integration /opt/server-core-integration COPY shared-lib /opt/shared-lib WORKDIR /opt/mos-gateway -CMD ["yarn", "start"] +CMD ["node", "dist/index.js"] diff --git a/packages/openapi/api/actions.yaml b/packages/openapi/api/actions.yaml index eb12981019..26ddddabd8 100644 --- a/packages/openapi/api/actions.yaml +++ b/packages/openapi/api/actions.yaml @@ -74,6 +74,8 @@ paths: $ref: 'definitions/studios.yaml#/resources/studios' /studios/{studioId}: $ref: 'definitions/studios.yaml#/resources/studio' + /studios/{studioId}/config: + $ref: 'definitions/studios.yaml#/resources/config' /studios/{studioId}/switch-route-set: $ref: 'definitions/studios.yaml#/resources/switchRouteSet' /studios/{studioId}/devices: @@ -87,6 +89,8 @@ paths: $ref: 'definitions/showstyles.yaml#/resources/showStyleBases' /showstyles/{showStyleBaseId}: $ref: 'definitions/showstyles.yaml#/resources/showStyleBase' + /showstyles/{showStyleBaseId}/config: + $ref: 'definitions/showstyles.yaml#/resources/config' /showstyles/{showStyleBaseId}/variants: $ref: 'definitions/showstyles.yaml#/resources/showStyleVariants' /showstyles/{showStyleBaseId}/variants/{showStyleVariantId}: diff --git a/packages/openapi/api/definitions/playlists.yaml b/packages/openapi/api/definitions/playlists.yaml index 8a4b050414..6ce6e478ed 100644 --- a/packages/openapi/api/definitions/playlists.yaml +++ b/packages/openapi/api/definitions/playlists.yaml @@ -109,15 +109,19 @@ resources: schema: type: object properties: - actionType: - type: string - description: An actionType string to specify a particular variation for the AdLibAction, valid strings are to be read from the status API adLibId: type: string description: AdLib to execute + actionType: + type: string + description: An actionType string to specify a particular variation for the AdLibAction, valid strings are to be read from the status API + adLibOptions: + type: object + description: AdLibAction options object defined according to the optionsSchema provided in the adLib status API required: - adLibId example: + adLibId: adlib_action_camera actionType: pvw responses: 200: diff --git a/packages/openapi/api/definitions/showstyles.yaml b/packages/openapi/api/definitions/showstyles.yaml index c2dc3f0af1..f1f66fdc60 100644 --- a/packages/openapi/api/definitions/showstyles.yaml +++ b/packages/openapi/api/definitions/showstyles.yaml @@ -224,6 +224,117 @@ resources: additionalProperties: false 500: $ref: '#/components/responses/internalServerError' + config: + get: + operationId: getShowStyleConfig + tags: + - showstyles + summary: Returns the requested ShowStyle config + parameters: + - name: showStyleBaseId + in: path + description: Id of ShowStyle to retrieve the config from + required: true + schema: + type: string + responses: + 200: + description: ShowStyle config found. + content: + application/json: + schema: + type: object + properties: + status: + type: number + example: 200 + result: + type: object + description: Blueprint config. + properties: + developerMode: + type: boolean + example: true + additionalProperties: true + 404: + $ref: '#/components/responses/showStyleBaseNotFound' + 500: + $ref: '#/components/responses/internalServerError' + put: + operationId: updateShowStyleConfig + tags: + - showstyles + summary: Updates an existing ShowStyle config. + parameters: + - name: showStyleBaseId + in: path + description: Id of ShowStyle to update the config for. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + type: object + description: Blueprint config. + properties: + developerMode: + type: boolean + example: true + additionalProperties: true + responses: + 200: + description: Operation successful. + content: + application/json: + schema: + type: object + properties: + status: + type: number + example: 200 + 404: + $ref: '#/components/responses/showStyleBaseNotFound' + 409: + description: The specified ShowStyle config has failed validation. + content: + application/json: + schema: + type: object + properties: + status: + type: number + example: 409 + message: + type: string + details: + type: array + items: + type: string + example: 'Invalid Union' + required: + - status + - message + additionalProperties: false + 412: + description: The specified ShowStyleBase is in use in an on-air Rundown. + content: + application/json: + schema: + type: object + properties: + status: + type: number + example: 412 + message: + type: string + required: + - status + - message + additionalProperties: false + 500: + $ref: '#/components/responses/internalServerError' showStyleVariants: get: operationId: getShowStyleVariants diff --git a/packages/openapi/api/definitions/studios.yaml b/packages/openapi/api/definitions/studios.yaml index 8b70c2e7e5..2c27bbd1cb 100644 --- a/packages/openapi/api/definitions/studios.yaml +++ b/packages/openapi/api/definitions/studios.yaml @@ -157,6 +157,76 @@ resources: $ref: '#/components/responses/studioInUse' 500: $ref: '#/components/responses/internalServerError' + config: + get: + operationId: getStudioConfig + tags: + - studios + summary: Gets a Studio blueprint configuration. + parameters: + - name: studioId + in: path + description: Id of Studio config to retrieve. + required: true + schema: + type: string + responses: + 200: + description: Configuration found. + content: + application/json: + schema: + type: object + properties: + status: + type: number + example: 200 + result: + type: object + description: Blueprint configuration. + properties: + developerMode: + type: boolean + example: true + additionalProperties: true + 404: + $ref: '#/components/responses/studioNotFound' + 500: + $ref: '#/components/responses/internalServerError' + put: + operationId: updateStudioConfig + tags: + - studios + summary: Updates an existing Studio blueprint configuration. + parameters: + - name: studioId + in: path + description: Id of Studio to update/create. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + type: object + description: Blueprint configuration. + properties: + developerMode: + type: boolean + example: true + additionalProperties: true + responses: + 200: + $ref: '#/components/responses/putSuccess' + 404: + $ref: '#/components/responses/studioNotFound' + 409: + $ref: '#/components/responses/studioNotValid' + 412: + $ref: '#/components/responses/studioInUse' + 500: + $ref: '#/components/responses/internalServerError' switchRouteSet: put: operationId: switchRouteSet @@ -476,6 +546,15 @@ components: fallbackPartDuration: type: number description: The duration to apply on too short Parts Within QuickLoop when forceQuickLoopAutoNext is set to `enabled_forcing_min_duration` + allowAdlibTestingSegment: + type: boolean + description: Whether to allow adlib testing mode, before a Part is playing in a Playlist + allowHold: + type: boolean + description: Whether to allow hold operations for Rundowns in this Studio + allowPieceDirectPlay: + type: boolean + description: Whether to allow direct playing of a piece in the rundown required: - frameRate diff --git a/packages/openapi/src/__tests__/showstyles.spec.ts b/packages/openapi/src/__tests__/showstyles.spec.ts index d510848272..c4a6d803da 100644 --- a/packages/openapi/src/__tests__/showstyles.spec.ts +++ b/packages/openapi/src/__tests__/showstyles.spec.ts @@ -1,5 +1,11 @@ // eslint-disable-next-line node/no-missing-import -import { Configuration, ShowStyleBase, ShowstylesApi, ShowStyleVariant } from '../../client/ts' +import { + Configuration, + GetShowStyleConfig200ResponseResult, + ShowStyleBase, + ShowstylesApi, + ShowStyleVariant, +} from '../../client/ts' import { checkServer } from '../checkServer' import Logging from '../httpLogging' @@ -72,6 +78,26 @@ describe('Network client', () => { expect(showStyle.status).toBe(200) }) + let showStyleConfig: GetShowStyleConfig200ResponseResult | undefined + test('can request a ShowStyle config by id', async () => { + const showStyle = await showStylesApi.getShowStyleConfig({ + showStyleBaseId: showStyleBaseIds[0], + }) + expect(showStyle.status).toBe(200) + expect(showStyle).toHaveProperty('result') + expect(showStyle.result).toHaveProperty('developerMode') + showStyleConfig = JSON.parse(JSON.stringify(showStyle.result)) + }) + + test('can update a ShowStyle config', async () => { + showStyleConfig.developerMode = !showStyleConfig.developerMode + const showStyle = await showStylesApi.updateShowStyleConfig({ + showStyleBaseId: showStyleBaseIds[0], + requestBody: showStyleConfig, + }) + expect(showStyle.status).toBe(200) + }) + const showStyleVariantIds: string[] = [] test('can request all ShowStyleVariants', async () => { const showStyleVariants = await showStylesApi.getShowStyleVariants({ diff --git a/packages/openapi/src/__tests__/studios.spec.ts b/packages/openapi/src/__tests__/studios.spec.ts index e81e4a7694..f0c601ffc4 100644 --- a/packages/openapi/src/__tests__/studios.spec.ts +++ b/packages/openapi/src/__tests__/studios.spec.ts @@ -1,5 +1,5 @@ // eslint-disable-next-line node/no-missing-import -import { Configuration, Studio, StudiosApi } from '../../client/ts' +import { Configuration, GetStudioConfig200ResponseResult, Studio, StudiosApi } from '../../client/ts' import { checkServer } from '../checkServer' import Logging from '../httpLogging' @@ -58,6 +58,26 @@ describe('Network client', () => { expect(studio.status).toBe(200) }) + let studioConfig: GetStudioConfig200ResponseResult | undefined + test('can request a Studio config by id', async () => { + const studio = await studiosApi.getStudioConfig({ + studioId: studioIds[0], + }) + expect(studio.status).toBe(200) + expect(studio).toHaveProperty('result') + expect(studio.result).toHaveProperty('developerMode') + studioConfig = JSON.parse(JSON.stringify(studio.result)) + }) + + test('can update a studio config', async () => { + studioConfig.developerMode = !studioConfig.developerMode + const studio = await studiosApi.updateStudioConfig({ + studioId: studioIds[0], + requestBody: studioConfig, + }) + expect(studio.status).toBe(200) + }) + const studioDevices: string[] = [] test('can request a list of devices for a studio', async () => { const devices = await studiosApi.devices({ studioId: studioIds[0] }) diff --git a/packages/package.json b/packages/package.json index a9e4603c2b..13435f2a65 100644 --- a/packages/package.json +++ b/packages/package.json @@ -68,5 +68,8 @@ "typescript": "~4.9.5" }, "name": "packages", - "packageManager": "yarn@3.5.0" + "packageManager": "yarn@3.5.0", + "resolutions": { + "timecode@0.0.4": "patch:timecode@npm%3A0.0.4#./.yarn/patches/timecode-npm-0.0.4-82bde9e6fe.patch" + } } diff --git a/packages/playout-gateway/CHANGELOG.md b/packages/playout-gateway/CHANGELOG.md index d68d91d33d..560b526971 100644 --- a/packages/playout-gateway/CHANGELOG.md +++ b/packages/playout-gateway/CHANGELOG.md @@ -3,6 +3,105 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.51.3](https://github.com/nrkno/sofie-core/compare/v1.51.2...v1.51.3) (2024-11-21) + + +### Bug Fixes + +* change CMD in GHA Dockerfile ([b13ba2d](https://github.com/nrkno/sofie-core/commit/b13ba2d60afb5a95314fcd217b4e63b4b5f241fa)) + + + + + +## [1.51.2](https://github.com/nrkno/sofie-core/compare/v1.51.1...v1.51.2) (2024-11-21) + +**Note:** Version bump only for package playout-gateway + + + + + +## [1.51.1](https://github.com/nrkno/sofie-core/compare/v1.51.1-2...v1.51.1) (2024-11-13) + +**Note:** Version bump only for package playout-gateway + + + + + +## [1.51.1-2](https://github.com/nrkno/sofie-core/compare/v1.51.1-1...v1.51.1-2) (2024-10-24) + +**Note:** Version bump only for package playout-gateway + + + + + +## [1.51.1-2](https://github.com/nrkno/sofie-core/compare/v1.51.1-1...v1.51.1-2) (2024-10-24) + +**Note:** Version bump only for package playout-gateway + + + + + +## [1.51.1-1](https://github.com/nrkno/sofie-core/compare/v1.51.1-0...v1.51.1-1) (2024-10-18) + +**Note:** Version bump only for package playout-gateway + + + + + +## [1.51.1-0](https://github.com/nrkno/sofie-core/compare/v1.51.0...v1.51.1-0) (2024-10-18) + +**Note:** Version bump only for package playout-gateway + + + + + +## [1.51.1-0](https://github.com/nrkno/sofie-core/compare/v1.51.0...v1.51.1-0) (2024-10-18) + +**Note:** Version bump only for package playout-gateway + + + + + +# [1.51.0](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.3...v1.51.0) (2024-10-07) + +**Note:** Version bump only for package playout-gateway + + + + + +# [1.51.0](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.3...v1.51.0) (2024-10-07) + +**Note:** Version bump only for package playout-gateway + + + + + +# [1.51.0-in-testing.3](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.2...v1.51.0-in-testing.3) (2024-09-25) + +**Note:** Version bump only for package playout-gateway + + + + + +# [1.51.0-in-testing.2](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.1...v1.51.0-in-testing.2) (2024-09-24) + +**Note:** Version bump only for package playout-gateway + + + + + # [1.51.0-in-testing.1](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.0...v1.51.0-in-testing.1) (2024-09-06) diff --git a/packages/playout-gateway/Dockerfile.circle b/packages/playout-gateway/Dockerfile.circle index 89e32da5d8..3dc8d9470e 100644 --- a/packages/playout-gateway/Dockerfile.circle +++ b/packages/playout-gateway/Dockerfile.circle @@ -8,4 +8,4 @@ COPY server-core-integration /opt/server-core-integration COPY shared-lib /opt/shared-lib WORKDIR /opt/playout-gateway -CMD ["yarn", "start"] +CMD ["node", "dist/index.js"] diff --git a/packages/playout-gateway/package.json b/packages/playout-gateway/package.json index d884d638d1..cb6bfdc773 100644 --- a/packages/playout-gateway/package.json +++ b/packages/playout-gateway/package.json @@ -60,7 +60,7 @@ "@sofie-automation/shared-lib": "1.52.0-in-development", "debug": "^4.3.4", "influx": "^5.9.3", - "timeline-state-resolver": "9.2.0-nightly-release52-20240909-111856-517f0ee37.0", + "timeline-state-resolver": "9.2.0-nightly-release52-20240923-122840-58cfbb259.0", "tslib": "^2.6.2", "underscore": "^1.13.6", "winston": "^3.11.0" diff --git a/packages/playout-gateway/src/coreHandler.ts b/packages/playout-gateway/src/coreHandler.ts index dd31642400..1f73222973 100644 --- a/packages/playout-gateway/src/coreHandler.ts +++ b/packages/playout-gateway/src/coreHandler.ts @@ -346,7 +346,7 @@ export class CoreHandler { const devices: any[] = [] if (this._tsrHandler) { - for (const device of this._tsrHandler.tsr.getDevices()) { + for (const device of this._tsrHandler.tsr.connectionManager.getConnections()) { devices.push({ instanceId: device.instanceId, deviceId: device.deviceId, @@ -416,7 +416,6 @@ export class CoreTSRDeviceHandler { public _deviceId: string public _device!: BaseRemoteDeviceIntegration private _coreParentHandler: CoreHandler - private _tsrHandler: TSRHandler private _hasGottenStatusChange = false private _deviceStatus: PeripheralDeviceAPI.PeripheralDeviceStatusObject = { statusCode: StatusCode.BAD, @@ -424,16 +423,10 @@ export class CoreTSRDeviceHandler { } private disposed = false - constructor( - parent: CoreHandler, - device: Promise>, - deviceId: string, - tsrHandler: TSRHandler - ) { + constructor(parent: CoreHandler, device: Promise>, deviceId: string) { this._coreParentHandler = parent this._devicePr = device this._deviceId = deviceId - this._tsrHandler = tsrHandler } async init(): Promise { this._device = await this._devicePr @@ -455,10 +448,11 @@ export class CoreTSRDeviceHandler { ) }) + console.log('has got status? ' + this._hasGottenStatusChange) if (!this._hasGottenStatusChange) { this._deviceStatus = await this._device.device.getStatus() - this.sendStatus() } + this.sendStatus() if (this.disposed) throw new Error('CoreTSRDeviceHandler cant init, is disposed') await this.setupSubscriptionsAndObservers() if (this.disposed) throw new Error('CoreTSRDeviceHandler cant init, is disposed') @@ -490,8 +484,9 @@ export class CoreTSRDeviceHandler { // setup observers this._coreParentHandler.setupObserverForPeripheralDeviceCommands(this) } - statusChanged(deviceStatus: Partial): void { - this._hasGottenStatusChange = true + statusChanged(deviceStatus: Partial, fromDevice = true): void { + console.log('device ' + this._deviceId + ' status set to ' + deviceStatus.statusCode) + if (fromDevice) this._hasGottenStatusChange = true this._deviceStatus = { ...this._deviceStatus, @@ -545,7 +540,8 @@ export class CoreTSRDeviceHandler { async dispose(subdevice: 'keepSubDevice' | 'removeSubDevice' = 'keepSubDevice'): Promise { this._observers.forEach((obs) => obs.stop()) - await this._tsrHandler.tsr.removeDevice(this._deviceId) + if (!this.core) return + await this.core.setStatus({ statusCode: StatusCode.BAD, messages: ['Uninitialized'], diff --git a/packages/playout-gateway/src/tsrHandler.ts b/packages/playout-gateway/src/tsrHandler.ts index d3245bd383..40def38103 100644 --- a/packages/playout-gateway/src/tsrHandler.ts +++ b/packages/playout-gateway/src/tsrHandler.ts @@ -7,15 +7,10 @@ import { TSRTimelineObj, TSRTimeline, TSRTimelineContent, - CommandReport, DeviceOptionsAtem, AtemMediaPoolAsset, - MediaObject, ExpectedPlayoutItem, ExpectedPlayoutItemContent, - SlowSentCommandInfo, - SlowFulfilledCommandInfo, - DeviceStatus, StatusCode, Datastore, } from 'timeline-state-resolver' @@ -55,8 +50,6 @@ import { unprotectString, } from '@sofie-automation/server-core-integration' import { BaseRemoteDeviceIntegration } from 'timeline-state-resolver/dist/service/remoteDeviceInstance' -import { DeviceEvents } from 'timeline-state-resolver/dist/service/device' -import EventEmitter = require('eventemitter3') const debug = Debug('playout-gateway') @@ -69,31 +62,6 @@ export interface TimelineContentObjectTmp { inGroup?: string } -/** Max time for initializing devices */ -const INIT_TIMEOUT = 10000 - -enum DeviceAction { - ADD = 'add', - READD = 'readd', - REMOVE = 'remove', -} - -type DeviceActionResult = { - success: boolean - deviceId: string - action: DeviceAction -} - -type UpdateDeviceOperationsResult = - | { - success: true - results: DeviceActionResult[] - } - | { - success: false - reason: 'timeout' | 'error' - details: string[] - } /** * Represents a connection between Gateway and TSR @@ -223,6 +191,8 @@ export class TSRHandler { }) this.tsr.on('timeTrace', (trace: FinishedTrace) => sendTrace(trace)) + this.attachTSRConnectionEvents() + this.logger.debug('tsr init') await this.tsr.init() @@ -234,6 +204,195 @@ export class TSRHandler { this.logger.debug('tsr init done') } + private attachTSRConnectionEvents() { + this.tsr.connectionManager.on('info', (info) => this.logger.info('TSR ConnectionManager: ' + info)) + this.tsr.connectionManager.on('warning', (warning) => this.logger.warn('TSR ConnectionManager: ' + warning)) + this.tsr.connectionManager.on('debug', (...args) => { + if (!this._coreHandler.logDebug) { + return + } + const data = args.map((arg) => (typeof arg === 'object' ? JSON.stringify(arg) : arg)) + this.logger.debug(`TSR ConnectionManager debug (${args.length})`, { data }) + }) + + this.tsr.connectionManager.on('connectionAdded', (id, container) => { + const coreTsrHandler = new CoreTSRDeviceHandler(this._coreHandler, Promise.resolve(container), id) + this._coreTsrHandlers[id] = coreTsrHandler + + // set the status to uninitialized for now: + coreTsrHandler.statusChanged( + { + statusCode: StatusCode.BAD, + messages: ['Device initialising...'], + }, + false + ) + + this._triggerupdateExpectedPlayoutItems() // So that any recently created devices will get all the ExpectedPlayoutItems + }) + + this.tsr.connectionManager.on('connectionInitialised', (id) => { + const coreTsrHandler = this._coreTsrHandlers[id] + + if (!coreTsrHandler) { + this.logger.error('TSR Connection initialised when there was not CoreTSRHandler for it') + return + } + + coreTsrHandler.init().catch((e) => this.logger.error('CoreTSRHandler failed to initialise', e)) // todo - is this the right way to log this? + }) + + this.tsr.connectionManager.on('connectionRemoved', (id) => { + const coreTsrHandler = this._coreTsrHandlers[id] + + if (!coreTsrHandler) { + this.logger.error('TSR Connection was removed when but there was not CoreTSRHandler to handle that') + return + } + + coreTsrHandler.dispose('removeSubDevice').catch((e) => { + this.logger.error('Failed to dispose of coreTsrHandler for ' + id + ': ' + e) + }) + delete this._coreTsrHandlers[id] + }) + + const fixLog = (id: string, e: string): string => { + const device = this._coreTsrHandlers[id]?._device + + return `Device "${device?.deviceName ?? id}" (${device?.instanceId ?? 'instance unknown'}): ` + e + } + const fixError = (id: string, e: Error): any => { + const device = this._coreTsrHandlers[id]?._device + const name = `Device "${device?.deviceName ?? id}" (${device?.instanceId ?? 'instance unknown'})` + + return { + message: e.message && name + ': ' + e.message, + name: e.name && name + ': ' + e.name, + stack: e.stack && e.stack + '\nAt device' + name, + } + } + const fixContext = (...context: any[]): any => { + return { + context, + } + } + + this.tsr.connectionManager.on('connectionEvent:connectionChanged', (id, status) => { + const coreTsrHandler = this._coreTsrHandlers[id] + if (!coreTsrHandler) return + + coreTsrHandler.statusChanged(status) + + // When the status has changed, the deviceName might have changed: + coreTsrHandler._device.reloadProps().catch((err) => { + this.logger.error(`Error in reloadProps: ${stringifyError(err)}`) + }) + // hack to make sure atem has media after restart + if ( + (status.statusCode === StatusCode.GOOD || + status.statusCode === StatusCode.WARNING_MINOR || + status.statusCode === StatusCode.WARNING_MAJOR) && + coreTsrHandler._device.deviceType === DeviceType.ATEM && + !disableAtemUpload + ) { + const assets = (coreTsrHandler._device.deviceOptions as DeviceOptionsAtem).options?.mediaPoolAssets + if (assets && assets.length > 0) { + try { + this.uploadFilesToAtem( + coreTsrHandler._device, + assets.filter((asset) => _.isNumber(asset.position) && asset.path) + ) + } catch (e) { + // don't worry about it. + } + } + } + }) + this.tsr.connectionManager.on('connectionEvent:slowSentCommand', (id, info) => { + // If the internalDelay is too large, it should be logged as an error, + // since something took too long internally. + + if (info.internalDelay > 100) { + this.logger.error('slowSentCommand', { + id, + ...info, + }) + } else { + this.logger.warn('slowSentCommand', { + id, + ...info, + }) + } + }) + this.tsr.connectionManager.on('connectionEvent:slowFulfilledCommand', (id, info) => { + // Note: we don't emit slow fulfilled commands as error, since + // the fulfillment of them lies on the device being controlled, not on us. + + this.logger.warn('slowFulfilledCommand', { + id, + ...info, + }) + }) + this.tsr.connectionManager.on('connectionEvent:commandError', (id, error, context) => { + // todo: handle this better + this.logger.error(fixError(id, error), { context }) + }) + this.tsr.connectionManager.on('connectionEvent:commandReport', (_id, commandReport) => { + if (this._reportAllCommands) { + // Todo: send these to Core + this.logger.info('commandReport', { + commandReport: commandReport, + }) + } + }) + this.tsr.connectionManager.on('connectionEvent:updateMediaObject', (id, collectionId, docId, doc) => { + const coreTsrHandler = this._coreTsrHandlers[id] + if (!coreTsrHandler) return + + coreTsrHandler.onUpdateMediaObject(collectionId, docId, doc) + }) + this.tsr.connectionManager.on('connectionEvent:clearMediaObjects', (id, collectionId) => { + const coreTsrHandler = this._coreTsrHandlers[id] + if (!coreTsrHandler) return + + coreTsrHandler.onClearMediaObjectCollection(collectionId) + }) + this.tsr.connectionManager.on('connectionEvent:info', (id, info) => { + this.logger.info(fixLog(id, info)) + }) + this.tsr.connectionManager.on('connectionEvent:warning', (id, warning) => { + this.logger.warn(fixLog(id, warning)) + }) + this.tsr.connectionManager.on('connectionEvent:error', (id, context, error) => { + this.logger.error(fixError(id, error), fixContext(context)) + }) + this.tsr.connectionManager.on('connectionEvent:debug', (id, ...args) => { + const device = this._coreTsrHandlers[id]?._device + + if (!device?.debugLogging && !this._coreHandler.logDebug) { + return + } + if (args.length === 0) { + this.logger.debug('>empty message<') + return + } + const data = args.map((arg) => (typeof arg === 'object' ? JSON.stringify(arg) : arg)) + this.logger.debug(`Device "${device?.deviceName || id}" (${device?.instanceId})`, { data }) + }) + this.tsr.connectionManager.on('connectionEvent:debugState', (id, state) => { + const device = this._coreTsrHandlers[id]?._device + + if (device?.debugState && this._coreHandler.logDebug) { + // Fetch the Id that core knows this device by + const coreId = this._coreTsrHandlers[device.deviceId].core.deviceId + this._debugStates.set(unprotectString(coreId), state) + } + }) + this.tsr.connectionManager.on('connectionEvent:timeTrace', (_id, trace) => { + sendTrace(trace) + }) + } + private loadSubdeviceConfigurations(): { [deviceType: string]: Record } { const defaultDeviceOptions: { [deviceType: string]: Record } = {} @@ -496,40 +655,14 @@ export class TSRHandler { } private async _updateDevices(): Promise { - this.logger.debug('updateDevices start') - const peripheralDevice = this._getPeripheralDevice() - const ps: Promise[] = [] - const promiseOperations: { [id: string]: { deviceId: string; operation: DeviceAction } } = {} - const keepTrack = async (p: Promise, deviceId: string, operation: DeviceAction) => { - const name = `${operation}_${deviceId}` - promiseOperations[name] = { - deviceId, - operation, - } - return p.then((result) => { - delete promiseOperations[name] - return result - }) - } - const deviceOptions = new Map() - if (peripheralDevice) { + const connections: Record = {} const devices = peripheralDevice.playoutDevices for (const [deviceId, device0] of Object.entries(devices)) { - const device = device0 - if (!device.disable) { - deviceOptions.set(deviceId, device) - } - } - - for (const [deviceId, orgDeviceOptions] of deviceOptions.entries()) { - const oldDevice: BaseRemoteDeviceIntegration | undefined = this.tsr.getDevice( - deviceId, - true - ) + if (device0.disable) continue const deviceOptions = _.extend( { @@ -538,9 +671,8 @@ export class TSRHandler { limitSlowFulfilledCommand: 100, options: {}, }, - this.populateDefaultValuesIfMissing(orgDeviceOptions) + this.populateDefaultValuesIfMissing(device0) ) - if (this._multiThreaded !== null && deviceOptions.isMultiThreaded === undefined) { deviceOptions.isMultiThreaded = this._multiThreaded } @@ -548,130 +680,11 @@ export class TSRHandler { deviceOptions.reportAllCommands = this._reportAllCommands } - if (!oldDevice) { - if (deviceOptions.options) { - this.logger.info('Initializing device: ' + deviceId) - this.logger.info('new', deviceOptions) - ps.push(keepTrack(this._addDevice(deviceId, deviceOptions), deviceId, DeviceAction.ADD)) - } - } else { - if (deviceOptions.options) { - let anyChanged = false - - if ( - // Changing the debug flag shouldn't restart the device: - !_.isEqual(_.omit(oldDevice.deviceOptions, 'debug'), _.omit(deviceOptions, 'debug')) - ) { - anyChanged = true - } - - if (anyChanged) { - deviceOptions.debug = this.getDeviceDebug(orgDeviceOptions) - - this.logger.info('Re-initializing device: ' + deviceId) - this.logger.info('old', oldDevice.deviceOptions) - this.logger.info('new', deviceOptions) - ps.push( - keepTrack(this._removeDevice(deviceId), deviceId, DeviceAction.REMOVE).then(async () => - keepTrack(this._addDevice(deviceId, deviceOptions), deviceId, DeviceAction.READD) - ) - ) - } - } - } - } - - for (const oldDevice of this.tsr.getDevices()) { - const deviceId = oldDevice.deviceId - if (!deviceOptions.has(deviceId)) { - this.logger.info('Un-initializing device: ' + deviceId) - ps.push(keepTrack(this._removeDevice(deviceId), deviceId, DeviceAction.REMOVE)) - } - } - } - - const resultsOrTimeout = await Promise.race([ - Promise.all(ps).then((results) => ({ - success: true, - results, - })), - new Promise((resolve) => - setTimeout(() => { - const keys = Object.keys(promiseOperations) - if (keys.length) { - this.logger.warn( - `Timeout in _updateDevices: ${Object.values<{ deviceId: string; operation: DeviceAction }>( - promiseOperations - ) - .map((op) => op.deviceId) - .join(',')}` - ) - } - - Promise.all( - // At this point in time, promiseOperations contains the promises that have timed out. - // If we tried to add or re-add a device, that apparently failed so we should remove the device in order to - // give it another chance next time _updateDevices() is called. - Object.values<{ deviceId: string; operation: DeviceAction }>(promiseOperations) - .filter((op) => op.operation === DeviceAction.ADD || op.operation === DeviceAction.READD) - .map(async (op) => - // the device was never added, should retry next round - this._removeDevice(op.deviceId) - ) - ) - .catch((e) => { - this.logger.error( - `Error when trying to remove unsuccessfully initialized devices: ${stringifyIds( - Object.values<{ deviceId: string; operation: DeviceAction }>(promiseOperations).map( - (op) => op.deviceId - ) - )}`, - e - ) - }) - .finally(() => { - resolve({ - success: false, - reason: 'error', - details: keys, - }) - }) - }, INIT_TIMEOUT) - ), // Timeout if not all are resolved within INIT_TIMEOUT - ]) - - await this._reportResult(resultsOrTimeout) - - const debugLoggingPs: Promise[] = [] - // Set logDebug on the devices: - for (const device of this.tsr.getDevices()) { - const options: DeviceOptionsAny | undefined = deviceOptions.get(device.deviceId) - if (!options) { - continue - } - const debug: boolean = this.getDeviceDebug(options) - if (device.debugLogging !== debug) { - this.logger.info(`Setting logDebug of device ${device.deviceId} to ${debug}`) - debugLoggingPs.push(device.setDebugLogging(debug)) - } - } - // Set debugState on devices: - for (const device of this.tsr.getDevices()) { - const options: DeviceOptionsAny | undefined = deviceOptions.get(device.deviceId) - if (!options) { - continue + connections[deviceId] = deviceOptions } - const debug: boolean = this.getDeviceDebugState(options) - if (device.debugState !== debug) { - this.logger.info(`Setting debugState of device ${device.deviceId} to ${debug}`) - debugLoggingPs.push(device.setDebugState(debug)) - } + this.tsr.connectionManager.setConnections(connections) } - await Promise.all(debugLoggingPs) - - this._triggerupdateExpectedPlayoutItems() // So that any recently created devices will get all the ExpectedPlayoutItems - this.logger.debug('updateDevices end') } private populateDefaultValuesIfMissing(deviceOptions: DeviceOptionsAny): DeviceOptionsAny { @@ -681,303 +694,6 @@ export class TSRHandler { deviceOptions.options = { ...this.defaultDeviceOptions[deviceOptions.type], ...options } return deviceOptions } - - private getDeviceDebug(deviceOptions: DeviceOptionsAny): boolean { - return deviceOptions.debug || this._coreHandler.logDebug || false - } - private getDeviceDebugState(deviceOptions: DeviceOptionsAny): boolean { - return (deviceOptions.debugState && this._coreHandler.logState) || false - } - private async _reportResult(resultsOrTimeout: UpdateDeviceOperationsResult): Promise { - this.logger.warn(JSON.stringify(resultsOrTimeout)) - // Check if the updateDevice operation failed before completing - if (!resultsOrTimeout.success) { - // It failed because there was a global timeout (not a device-specific failure) - if (resultsOrTimeout.reason === 'timeout') { - await this._coreHandler.core.setStatus({ - statusCode: StatusCode.FATAL, - messages: [ - `Time-out during device update. Timed-out on devices: ${stringifyIds( - resultsOrTimeout.details - )}`, - ], - }) - // It failed for an unknown reason - } else { - await this._coreHandler.core.setStatus({ - statusCode: StatusCode.BAD, - messages: [ - `Unknown error during device update: ${resultsOrTimeout.reason}. Devices: ${stringifyIds( - resultsOrTimeout.details - )}`, - ], - }) - } - - return - } - - // updateDevice finished successfully, let's see if any of the individual devices failed - const failures = resultsOrTimeout.results.filter((result) => !result.success) - // Group the failures according to what sort of an operation was executed - const addFailureDeviceIds = failures - .filter((failure) => failure.action === DeviceAction.ADD) - .map((failure) => failure.deviceId) - const removeFailureDeviceIds = failures - .filter((failure) => failure.action === DeviceAction.REMOVE) - .map((failure) => failure.deviceId) - - // There were no failures, good - if (failures.length === 0) { - await this._coreHandler.core.setStatus({ - statusCode: StatusCode.GOOD, - messages: [], - }) - return - } - // Something did fail, let's report it as the status - await this._coreHandler.core.setStatus({ - statusCode: StatusCode.BAD, - messages: [ - addFailureDeviceIds.length > 0 - ? `Unable to initialize devices, check configuration: ${stringifyIds(addFailureDeviceIds)}` - : null, - removeFailureDeviceIds.length > 0 - ? `Failed to remove devices: ${stringifyIds(removeFailureDeviceIds)}` - : null, - ].filter(Boolean) as string[], - }) - } - - private async _addDevice(deviceId: string, options: DeviceOptionsAny): Promise { - this.logger.debug('Adding device ' + deviceId) - - try { - if (this._coreTsrHandlers[deviceId]) { - throw new Error(`There is already a _coreTsrHandlers for deviceId "${deviceId}"!`) - } - - const devicePr: Promise> = this.tsr.createDevice( - deviceId, - options - ) - - const coreTsrHandler = new CoreTSRDeviceHandler(this._coreHandler, devicePr, deviceId, this) - - this._coreTsrHandlers[deviceId] = coreTsrHandler - - // set the status to uninitialized for now: - coreTsrHandler.statusChanged({ - statusCode: StatusCode.BAD, - messages: ['Device initialising...'], - }) - - const device = await devicePr - - // Set up device status - const deviceType = device.deviceType - - const onDeviceStatusChanged = (connectedOrStatus: Partial) => { - let deviceStatus: Partial - if (_.isBoolean(connectedOrStatus)) { - // for backwards compability, to be removed later - if (connectedOrStatus) { - deviceStatus = { - statusCode: StatusCode.GOOD, - } - } else { - deviceStatus = { - statusCode: StatusCode.BAD, - messages: ['Disconnected'], - } - } - } else { - deviceStatus = connectedOrStatus - } - coreTsrHandler.statusChanged(deviceStatus) - - // When the status has changed, the deviceName might have changed: - device.reloadProps().catch((err) => { - this.logger.error(`Error in reloadProps: ${stringifyError(err)}`) - }) - // hack to make sure atem has media after restart - if ( - (deviceStatus.statusCode === StatusCode.GOOD || - deviceStatus.statusCode === StatusCode.WARNING_MINOR || - deviceStatus.statusCode === StatusCode.WARNING_MAJOR) && - deviceType === DeviceType.ATEM && - !disableAtemUpload - ) { - const assets = (options as DeviceOptionsAtem).options?.mediaPoolAssets - if (assets && assets.length > 0) { - try { - this.uploadFilesToAtem( - device, - assets.filter((asset) => _.isNumber(asset.position) && asset.path) - ) - } catch (e) { - // don't worry about it. - } - } - } - } - const onSlowSentCommand = (info: SlowSentCommandInfo) => { - // If the internalDelay is too large, it should be logged as an error, - // since something took too long internally. - - if (info.internalDelay > 100) { - this.logger.error('slowSentCommand', { - deviceName: device.deviceName, - ...info, - }) - } else { - this.logger.warn('slowSentCommand', { - deviceName: device.deviceName, - ...info, - }) - } - } - const onSlowFulfilledCommand = (info: SlowFulfilledCommandInfo) => { - // Note: we don't emit slow fulfilled commands as error, since - // the fulfillment of them lies on the device being controlled, not on us. - - this.logger.warn('slowFulfilledCommand', { - deviceName: device.deviceName, - ...info, - }) - } - const onCommandReport = (commandReport: CommandReport) => { - if (this._reportAllCommands) { - // Todo: send these to Core - this.logger.info('commandReport', { - commandReport: commandReport, - }) - } - } - const onCommandError = (error: any, context: any) => { - // todo: handle this better - this.logger.error(fixError(error), { context }) - } - const onUpdateMediaObject = (collectionId: string, docId: string, doc: MediaObject | null) => { - coreTsrHandler.onUpdateMediaObject(collectionId, docId, doc) - } - const onClearMediaObjectCollection = (collectionId: string) => { - coreTsrHandler.onClearMediaObjectCollection(collectionId) - } - const fixLog = (e: string): string => `Device "${device.deviceName || deviceId}" (${device.instanceId})` + e - const fixError = (e: Error): any => { - const name = `Device "${device.deviceName || deviceId}" (${device.instanceId})` - - return { - message: e.message && name + ': ' + e.message, - name: e.name && name + ': ' + e.name, - stack: e.stack && e.stack + '\nAt device' + name, - } - } - const fixContext = (...context: any[]): any => { - return { - context, - } - } - await coreTsrHandler.init() - - device.onChildClose = () => { - // Called if a child is closed / crashed - this.logger.warn(`Child of device ${deviceId} closed/crashed`) - debug(`Trigger update devices because "${deviceId}" process closed`) - - onDeviceStatusChanged({ - statusCode: StatusCode.BAD, - messages: ['Child process closed'], - }) - - this._removeDevice(deviceId).then( - () => { - this._triggerUpdateDevices() - }, - () => { - this._triggerUpdateDevices() - } - ) - } - - await addListenerToDevice(device, 'connectionChanged', onDeviceStatusChanged) - // await addListenerToDevice(device, 'slowCommand', onSlowCommand) - await addListenerToDevice(device, 'slowSentCommand', onSlowSentCommand) - await addListenerToDevice(device, 'slowFulfilledCommand', onSlowFulfilledCommand) - await addListenerToDevice(device, 'commandError', onCommandError) - await addListenerToDevice(device, 'commandReport', onCommandReport) - await addListenerToDevice(device, 'updateMediaObject', onUpdateMediaObject) - await addListenerToDevice(device, 'clearMediaObjects', onClearMediaObjectCollection) - - await addListenerToDevice(device, 'info', (info) => { - this.logger.info(fixLog(info)) - }) - await addListenerToDevice(device, 'warning', (warning: string) => { - this.logger.warn(fixLog(warning)) - }) - await addListenerToDevice(device, 'error', (context, error) => { - this.logger.error(fixError(error), fixContext(context)) - }) - - await addListenerToDevice(device, 'debug', (...args) => { - if (!device.debugLogging && !this._coreHandler.logDebug) { - return - } - if (args.length === 0) { - this.logger.debug('>empty message<') - return - } - const data = args.map((arg) => (typeof arg === 'object' ? JSON.stringify(arg) : arg)) - this.logger.debug(`Device "${device.deviceName || deviceId}" (${device.instanceId})`, { data }) - }) - - await addListenerToDevice(device, 'debugState', (...args) => { - if (device.debugState && this._coreHandler.logDebug) { - // Fetch the Id that core knows this device by - const coreId = this._coreTsrHandlers[device.deviceId].core.deviceId - this._debugStates.set(unprotectString(coreId), args[0]) - } - }) - - await addListenerToDevice(device, 'timeTrace', (trace) => sendTrace(trace)) - /* eslint-enable @typescript-eslint/await-thenable */ - - // now initialize it - await this.tsr.initDevice(deviceId, options) - - // also ask for the status now, and update: - onDeviceStatusChanged(await device.device.getStatus()) - return { - action: DeviceAction.ADD, - deviceId, - success: true, - } - } catch (error) { - // Initialization failed, clean up any artifacts and see if we can try again later: - this.logger.error(`Error when adding device "${deviceId}"`, { error }) - debug(`Error when adding device "${deviceId}"`) - try { - await this._removeDevice(deviceId) - } catch (error) { - this.logger.error(`Error when cleaning up after adding device "${deviceId}" error...`, error) - } - - if (!this._triggerUpdateDevicesTimeout) { - this._triggerUpdateDevicesTimeout = setTimeout(() => { - debug(`Trigger updateDevices from failure "${deviceId}"`) - // try again later: - this._triggerUpdateDevices() - }, 10 * 1000) - } - - return { - action: DeviceAction.ADD, - deviceId, - success: false, - } - } - } /** * This function is a quick and dirty solution to load a still to the atem mixers. * This does not serve as a proper implementation! And need to be refactor @@ -1000,25 +716,6 @@ export class TSRHandler { process.stderr.on('data', (data) => this.logger.info(data.toString())) process.on('close', () => process.removeAllListeners()) } - private async _removeDevice(deviceId: string): Promise { - let success = false - if (this._coreTsrHandlers[deviceId]) { - try { - await this._coreTsrHandlers[deviceId].dispose('removeSubDevice') - this.logger.debug('Disposed device ' + deviceId) - success = true - } catch (error) { - this.logger.error(`Error when removing device "${deviceId}"`, error) - } - } - delete this._coreTsrHandlers[deviceId] - - return { - deviceId, - action: DeviceAction.REMOVE, - success, - } - } private _triggerupdateExpectedPlayoutItems() { if (!this._initialized) return if (this._triggerupdateExpectedPlayoutItemsTimeout) { @@ -1049,7 +746,7 @@ export class TSRHandler { } await Promise.all( - _.map(this.tsr.getDevices(), async (container) => { + _.map(this.tsr.connectionManager.getConnections(), async (container) => { if (!container.details.supportsExpectedPlayoutItems) { return } @@ -1241,19 +938,3 @@ export function getHash(str: string): string { export function stringifyIds(ids: string[]): string { return ids.map((id) => `"${id}"`).join(', ') } - -async function addListenerToDevice( - device: BaseRemoteDeviceIntegration, - eventName: T, - fcn: EventEmitter.EventListener -): Promise { - // Note for the future: - // It is important that the callbacks returns void, - // otherwise there might be problems with threadedclass! - // Also, it is critical that all of these `.on` calls be `await`ed. - // They aren't typed as promises due to limitations of TypeScript, - // but due to threadedclass they _are_ promises. - - const emitterHack = device.device as unknown as EventEmitter - await Promise.resolve(emitterHack.on(eventName, fcn)) -} diff --git a/packages/server-core-integration/CHANGELOG.md b/packages/server-core-integration/CHANGELOG.md index b0f83d985d..7dfd8d5ce6 100644 --- a/packages/server-core-integration/CHANGELOG.md +++ b/packages/server-core-integration/CHANGELOG.md @@ -3,6 +3,102 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.51.3](https://github.com/nrkno/sofie-core/compare/v1.51.2...v1.51.3) (2024-11-21) + +**Note:** Version bump only for package @sofie-automation/server-core-integration + + + + + +## [1.51.2](https://github.com/nrkno/sofie-core/compare/v1.51.1...v1.51.2) (2024-11-21) + +**Note:** Version bump only for package @sofie-automation/server-core-integration + + + + + +## [1.51.1](https://github.com/nrkno/sofie-core/compare/v1.51.1-2...v1.51.1) (2024-11-13) + +**Note:** Version bump only for package @sofie-automation/server-core-integration + + + + + +## [1.51.1-2](https://github.com/nrkno/sofie-core/compare/v1.51.1-1...v1.51.1-2) (2024-10-24) + +**Note:** Version bump only for package @sofie-automation/server-core-integration + + + + + +## [1.51.1-2](https://github.com/nrkno/sofie-core/compare/v1.51.1-1...v1.51.1-2) (2024-10-24) + +**Note:** Version bump only for package @sofie-automation/server-core-integration + + + + + +## [1.51.1-1](https://github.com/nrkno/sofie-core/compare/v1.51.1-0...v1.51.1-1) (2024-10-18) + +**Note:** Version bump only for package @sofie-automation/server-core-integration + + + + + +## [1.51.1-0](https://github.com/nrkno/sofie-core/compare/v1.51.0...v1.51.1-0) (2024-10-18) + +**Note:** Version bump only for package @sofie-automation/server-core-integration + + + + + +## [1.51.1-0](https://github.com/nrkno/sofie-core/compare/v1.51.0...v1.51.1-0) (2024-10-18) + +**Note:** Version bump only for package @sofie-automation/server-core-integration + + + + + +# [1.51.0](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.3...v1.51.0) (2024-10-07) + +**Note:** Version bump only for package @sofie-automation/server-core-integration + + + + + +# [1.51.0](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.3...v1.51.0) (2024-10-07) + +**Note:** Version bump only for package @sofie-automation/server-core-integration + + + + + +# [1.51.0-in-testing.3](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.2...v1.51.0-in-testing.3) (2024-09-25) + +**Note:** Version bump only for package @sofie-automation/server-core-integration + + + + + +# [1.51.0-in-testing.2](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.1...v1.51.0-in-testing.2) (2024-09-24) + +**Note:** Version bump only for package @sofie-automation/server-core-integration + + + + + # [1.51.0-in-testing.1](https://github.com/nrkno/sofie-core/compare/v1.51.0-in-testing.0...v1.51.0-in-testing.1) (2024-09-06) **Note:** Version bump only for package @sofie-automation/server-core-integration diff --git a/packages/shared-lib/package.json b/packages/shared-lib/package.json index f0640fd264..40c506ca0c 100644 --- a/packages/shared-lib/package.json +++ b/packages/shared-lib/package.json @@ -39,7 +39,7 @@ ], "dependencies": { "@mos-connection/model": "v4.2.0-alpha.1", - "timeline-state-resolver-types": "9.2.0-nightly-release52-20240909-111856-517f0ee37.0", + "timeline-state-resolver-types": "9.2.0-nightly-release52-20240923-122840-58cfbb259.0", "tslib": "^2.6.2", "type-fest": "^3.13.1" }, diff --git a/packages/shared-lib/src/core/model/PackageContainer.ts b/packages/shared-lib/src/core/model/PackageContainer.ts new file mode 100644 index 0000000000..f2eeeab894 --- /dev/null +++ b/packages/shared-lib/src/core/model/PackageContainer.ts @@ -0,0 +1,7 @@ +import { PackageContainer } from '../../package-manager/package' + +export interface StudioPackageContainer { + /** List of which peripheraldevices uses this packageContainer */ + deviceIds: string[] + container: PackageContainer +} diff --git a/packages/shared-lib/src/core/model/ShowStyle.ts b/packages/shared-lib/src/core/model/ShowStyle.ts index 4a7a2ec8da..9c1825ff28 100644 --- a/packages/shared-lib/src/core/model/ShowStyle.ts +++ b/packages/shared-lib/src/core/model/ShowStyle.ts @@ -95,6 +95,7 @@ export enum PlayoutActions { reloadRundownPlaylistData = 'reloadRundownPlaylistData', disableNextPiece = 'disableNextPiece', activateAdlibTestingMode = 'activateAdlibTestingMode', + switchRouteSet = 'switchRouteSet', } export enum ClientActions { diff --git a/packages/shared-lib/src/core/model/StudioRouteSet.ts b/packages/shared-lib/src/core/model/StudioRouteSet.ts new file mode 100644 index 0000000000..4928eb0192 --- /dev/null +++ b/packages/shared-lib/src/core/model/StudioRouteSet.ts @@ -0,0 +1,67 @@ +import { BlueprintMapping } from './Timeline' +import { TSR } from '../../tsr' + +export type AbPlayerId = number | string + +export interface StudioRouteSetExclusivityGroup { + name: string +} + +export interface StudioRouteSet { + /** User-presentable name */ + name: string + /** Whether this group is active or not */ + active: boolean + /** Default state of this group */ + defaultActive?: boolean + /** Only one Route can be active at the same time in the exclusivity-group */ + exclusivityGroup?: string + /** If true, should be displayed and toggleable by user */ + behavior: StudioRouteBehavior + + routes: RouteMapping[] + /** + * AB Pool members + * An AB player will be active if either no routesets reference it, or any active routset references it. + * Specify the players here which this routeset should enable + */ + abPlayers: StudioAbPlayerDisabling[] +} + +export enum StudioRouteBehavior { + HIDDEN = 0, + TOGGLE = 1, + ACTIVATE_ONLY = 2, +} + +export enum StudioRouteType { + /** Default */ + REROUTE = 0, + /** Replace all properties with a new mapping */ + REMAP = 1, +} + +export interface RouteMapping extends ResultingMappingRoute { + /** Which original layer to route. If false, a "new" layer will be inserted during routing */ + mappedLayer: string | undefined +} + +export interface StudioAbPlayerDisabling { + poolName: string + playerId: AbPlayerId +} + +export interface ResultingMappingRoutes { + /** Routes that route existing layers */ + existing: { + [mappedLayer: string]: ResultingMappingRoute[] + } + /** Routes that create new layers, from nothing */ + inserted: ResultingMappingRoute[] +} +export interface ResultingMappingRoute { + outputMappedLayer: string + deviceType?: TSR.DeviceType + remapping?: Partial + routeType: StudioRouteType +} diff --git a/packages/shared-lib/src/package-manager/packageInfo.ts b/packages/shared-lib/src/package-manager/packageInfo.ts index ca036caa2b..54f0f6abbc 100644 --- a/packages/shared-lib/src/package-manager/packageInfo.ts +++ b/packages/shared-lib/src/package-manager/packageInfo.ts @@ -3,10 +3,11 @@ export namespace PackageInfo { export enum Type { SCAN = 'scan', DEEPSCAN = 'deepScan', + JSON = 'json', OTHER = 'other', } - export type Any = FFProbeScan | FFProbeDeepScan | FFOther + export type Any = FFProbeScan | FFProbeDeepScan | JSONDocument | FFOther export interface Base { type: Type payload: any @@ -20,6 +21,12 @@ export namespace PackageInfo { type: Type.DEEPSCAN payload: FFProbeDeepScanInfo } + + export interface JSONDocument extends Base { + type: Type.JSON + payload: unknown + } + export interface FFOther extends Base { // placeholder type: Type.OTHER @@ -44,6 +51,7 @@ export namespace PackageInfo { display_aspect_ratio?: string // Example: '16:9' pix_fmt?: string // Example: 'yuv420p' bits_per_raw_sample?: string // Example: '8' + field_order?: FieldOrder // audio sample_fmt?: string diff --git a/packages/shared-lib/src/peripheralDevice/ingest.ts b/packages/shared-lib/src/peripheralDevice/ingest.ts index 5e38b10d0e..c53739f87e 100644 --- a/packages/shared-lib/src/peripheralDevice/ingest.ts +++ b/packages/shared-lib/src/peripheralDevice/ingest.ts @@ -1,10 +1,10 @@ -export interface IngestPlaylist { +export interface IngestPlaylist { /** Id of the playlist. */ externalId: string /** Ingest cache of rundowns in this playlist. */ - rundowns: IngestRundown[] + rundowns: IngestRundown[] } -export interface IngestRundown { +export interface IngestRundown { /** Id of the rundown as reported by the ingest gateway. Must be unique for each rundown owned by the gateway */ externalId: string /** Name of the rundown */ @@ -14,37 +14,38 @@ export interface IngestRundown { type: string /** Raw payload of rundown metadata. Only used by the blueprints */ - payload?: any + payload: TRundownPayload - /** Array of segmsnts in this rundown */ - segments: IngestSegment[] + /** Array of segments in this rundown */ + segments: IngestSegment[] } -export interface IngestSegment { +export interface IngestSegment { /** Id of the segment as reported by the ingest gateway. Must be unique for each segment in the rundown */ externalId: string /** Name of the segment */ name: string + /** Rank of the segment within the rundown */ rank: number /** Raw payload of segment metadata. Only used by the blueprints */ - payload?: any + payload: TSegmentPayload /** Array of parts in this segment */ - parts: IngestPart[] + parts: IngestPart[] } -export interface IngestPart { +export interface IngestPart { /** Id of the part as reported by the ingest gateway. Must be unique for each part in the rundown */ externalId: string /** Name of the part */ name: string - /** Rank of the part within the segmetn */ + /** Rank of the part within the segment */ rank: number /** Raw payload of the part. Only used by the blueprints */ - payload?: any + payload: TPartPayload } -export interface IngestAdlib { +export interface IngestAdlib { /** Id of the adlib as reported by the ingest source. Must be unique for each adlib */ externalId: string /** Name of the adlib */ @@ -53,5 +54,5 @@ export interface IngestAdlib { /** Type of the raw payload. Only used by the blueprints */ payloadType: string /** Raw payload of the adlib. Only used by the blueprints */ - payload?: any + payload: TPayload } diff --git a/packages/webui/.eslintrc.cjs b/packages/webui/.eslintrc.cjs index f282269d27..17d7af8b27 100644 --- a/packages/webui/.eslintrc.cjs +++ b/packages/webui/.eslintrc.cjs @@ -42,7 +42,7 @@ const tsBase = { extends: [...tsExtends], plugins: tsPlugins, ...tsParser, - parserOptions: { project: './packages/webui/tsconfig.eslint.json' }, + parserOptions: { project: './tsconfig.eslint.json' }, settings: { node: { tryExtensions: ['.js', '.json', '.node', '.ts', '.tsx', '.d.ts'], diff --git a/packages/webui/index.html b/packages/webui/index.html index 5e02e90cfe..efd4095594 100644 --- a/packages/webui/index.html +++ b/packages/webui/index.html @@ -1,3 +1,4 @@ + Sofie diff --git a/packages/webui/src/__mocks__/defaultCollectionObjects.ts b/packages/webui/src/__mocks__/defaultCollectionObjects.ts index 2ac60d54a7..89c7749232 100644 --- a/packages/webui/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/webui/src/__mocks__/defaultCollectionObjects.ts @@ -105,11 +105,14 @@ export function defaultStudio(_id: StudioId): DBStudio { frameRate: 25, mediaPreviewsUrl: '', minimumTakeSpan: DEFAULT_MINIMUM_TAKE_SPAN, + allowHold: true, + allowPieceDirectPlay: true, + enableBuckets: true, }, _rundownVersionHash: '', - routeSets: {}, - routeSetExclusivityGroups: {}, - packageContainers: {}, + routeSetsWithOverrides: wrapDefaultObject({}), + routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), + packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], peripheralDeviceSettings: { @@ -129,7 +132,6 @@ export function defaultSegment(_id: SegmentId, rundownId: RundownId): DBSegment externalId: unprotectString(_id), rundownId: rundownId, name: 'Default Segment', - externalModified: 1, } } diff --git a/packages/webui/src/__mocks__/helpers/database.ts b/packages/webui/src/__mocks__/helpers/database.ts index 382e789aa0..fbc094fa42 100644 --- a/packages/webui/src/__mocks__/helpers/database.ts +++ b/packages/webui/src/__mocks__/helpers/database.ts @@ -343,7 +343,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_0', rundownId: rundown._id, name: 'Segment 0', - externalModified: 1, } MongoMock.getInnerMockCollection(Segments).insert(segment0) /* tslint:disable:ter-indent*/ @@ -453,7 +452,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_2', rundownId: rundown._id, name: 'Segment 1', - externalModified: 1, } MongoMock.getInnerMockCollection(Segments).insert(segment1) @@ -496,7 +494,6 @@ export async function setupDefaultRundown( externalId: 'MOCK_SEGMENT_2', rundownId: rundown._id, name: 'Segment 2', - externalModified: 1, } MongoMock.getInnerMockCollection(Segments).insert(segment2) diff --git a/packages/webui/src/client/lib/Components/Checkbox.tsx b/packages/webui/src/client/lib/Components/Checkbox.tsx index c357624805..6932ef5e6d 100644 --- a/packages/webui/src/client/lib/Components/Checkbox.tsx +++ b/packages/webui/src/client/lib/Components/Checkbox.tsx @@ -6,6 +6,7 @@ import ClassNames from 'classnames' interface ICheckboxControlProps { classNames?: string disabled?: boolean + title?: string value: boolean handleUpdate: (value: boolean) => void @@ -15,6 +16,7 @@ export function CheckboxControl({ value, disabled, handleUpdate, + title, }: Readonly): JSX.Element { const handleChange = useCallback( (e: React.ChangeEvent) => { @@ -25,11 +27,18 @@ export function CheckboxControl({ return ( - - + + - + diff --git a/packages/webui/src/client/lib/Components/LabelAndOverrides.tsx b/packages/webui/src/client/lib/Components/LabelAndOverrides.tsx index 25cb48894a..be6c4d39f2 100644 --- a/packages/webui/src/client/lib/Components/LabelAndOverrides.tsx +++ b/packages/webui/src/client/lib/Components/LabelAndOverrides.tsx @@ -16,6 +16,7 @@ export interface LabelAndOverridesProps { opPrefix: string overrideHelper: OverrideOpHelperForItemContents + showClearButton?: boolean formatDefaultValue?: (value: any) => JSX.Element | string | null children: (value: TValue, setValue: (value: TValue) => void) => React.ReactNode @@ -33,6 +34,7 @@ export function LabelAndOverrides({ itemKey, opPrefix, overrideHelper, + showClearButton, formatDefaultValue, }: Readonly>): JSX.Element { const { t } = useTranslation() @@ -51,7 +53,7 @@ export function LabelAndOverrides({ let displayValue: JSX.Element | string | null = '""' if (item.defaults) { - const defaultValue: any = item.defaults[itemKey] + const defaultValue: any = objectPathGet(item.defaults, String(itemKey)) // Special cases for formatting of the default if (formatDefaultValue) { displayValue = formatDefaultValue(defaultValue) @@ -75,7 +77,16 @@ export function LabelAndOverrides({