diff --git a/.github/workflows/merge-next-notify.yml b/.github/workflows/merge-next-notify.yml index b8187d9173ce..f3fa58033066 100644 --- a/.github/workflows/merge-next-notify.yml +++ b/.github/workflows/merge-next-notify.yml @@ -19,4 +19,4 @@ jobs: with: only_create: true message: | - Please review this PR: @sonalideshpandemsft @tylerbutler @scottn12 \ No newline at end of file + This PR is ready to merge! Please review it and squash merge into `next`: @sonalideshpandemsft @tylerbutler @scottn12 \ No newline at end of file diff --git a/.github/workflows/pr-labeler.yml b/.github/workflows/pr-labeler.yml index 56344c5c2bb2..c77d18623c7c 100644 --- a/.github/workflows/pr-labeler.yml +++ b/.github/workflows/pr-labeler.yml @@ -1,12 +1,17 @@ name: "Pull Request Labeler" on: pull_request_target: - types: [ opened, synchronize, reopened ] + types: [ opened, synchronize, reopened, labeled, unlabeled ] + branches: [ main, next, release/* ] jobs: paths_label: runs-on: ubuntu-latest name: Label based on file paths + # Skip labeling main-next merge PRs. The area labels are noisy and distracting for main-next PRs because they can + # contain many commits, and thus touch nearly the whole repo in a single commit. Skipping these labels makes it + # easier to focus on the more relevant main-next labels. + if: "!contains(github.event.issue.labels.*.name, 'main-next-integrate')" steps: - uses: actions/labeler@5c7539237e04b714afd8ad9b4aed733815b9fab4 # ratchet:actions/labeler@v4.0.2 with: diff --git a/api-report/container-runtime.api.md b/api-report/container-runtime.api.md index 680e63a23fb2..31a8811bd345 100644 --- a/api-report/container-runtime.api.md +++ b/api-report/container-runtime.api.md @@ -224,7 +224,7 @@ export class ContainerRuntime extends TypedEventEmitter updateUnusedRoutes(unusedRoutes: string[]): void; updateUsedRoutes(usedRoutes: string[]): void; // (undocumented) - uploadBlob(blob: ArrayBufferLike): Promise>; + uploadBlob(blob: ArrayBufferLike, signal?: AbortSignal): Promise>; } // @public (undocumented) diff --git a/api-report/datastore-definitions.api.md b/api-report/datastore-definitions.api.md index e2d9cf37f9fc..29c1c50851e1 100644 --- a/api-report/datastore-definitions.api.md +++ b/api-report/datastore-definitions.api.md @@ -122,7 +122,7 @@ export interface IFluidDataStoreRuntime extends IFluidRouter, IEventProvider>; + uploadBlob(blob: ArrayBufferLike, signal?: AbortSignal): Promise>; waitAttached(): Promise; } diff --git a/api-report/datastore.api.md b/api-report/datastore.api.md index 0ce0baa544d8..72e09d1d17a5 100644 --- a/api-report/datastore.api.md +++ b/api-report/datastore.api.md @@ -125,7 +125,7 @@ export class FluidDataStoreRuntime extends TypedEventEmitter; updateUsedRoutes(usedRoutes: string[]): void; // (undocumented) - uploadBlob(blob: ArrayBufferLike): Promise>; + uploadBlob(blob: ArrayBufferLike, signal?: AbortSignal): Promise>; // (undocumented) visibilityState: VisibilityState_2; waitAttached(): Promise; diff --git a/api-report/runtime-definitions.api.md b/api-report/runtime-definitions.api.md index 916b42d0f516..cb3c46aeb1b4 100644 --- a/api-report/runtime-definitions.api.md +++ b/api-report/runtime-definitions.api.md @@ -140,7 +140,7 @@ export interface IContainerRuntimeBase extends IEventProvider; submitSignal(type: string, content: any): void; // (undocumented) - uploadBlob(blob: ArrayBufferLike): Promise>; + uploadBlob(blob: ArrayBufferLike, signal?: AbortSignal): Promise>; } // @public (undocumented) @@ -280,7 +280,7 @@ export interface IFluidDataStoreContext extends IEventProvider>; + uploadBlob(blob: ArrayBufferLike, signal?: AbortSignal): Promise>; } // @public (undocumented) diff --git a/api-report/tree2.api.md b/api-report/tree2.api.md index f84d583ce90d..9282d8f91c13 100644 --- a/api-report/tree2.api.md +++ b/api-report/tree2.api.md @@ -1805,7 +1805,6 @@ export interface SequenceFieldEditBuilder { delete(index: number, count: number): void; insert(index: number, newContent: ITreeCursor | readonly ITreeCursor[]): void; move(sourceIndex: number, count: number, destIndex: number): void; - revive(index: number, count: number, detachedBy: RevisionTag, detachId: ChangesetLocalId, reviver: NodeReviver, isIntention?: true): void; } // @alpha diff --git a/common/lib/common-definitions/pnpm-lock.yaml b/common/lib/common-definitions/pnpm-lock.yaml index 97adc0a64334..e7938b26abda 100644 --- a/common/lib/common-definitions/pnpm-lock.yaml +++ b/common/lib/common-definitions/pnpm-lock.yaml @@ -6842,7 +6842,7 @@ packages: levn: 0.4.1 prelude-ls: 1.2.1 type-check: 0.4.0 - word-wrap: 1.2.3 + word-wrap: 1.2.4 dev: true /ora/5.4.1: @@ -9085,8 +9085,8 @@ packages: execa: 1.0.0 dev: true - /word-wrap/1.2.3: - resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} + /word-wrap/1.2.4: + resolution: {integrity: sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==} engines: {node: '>=0.10.0'} dev: true diff --git a/common/lib/common-utils/pnpm-lock.yaml b/common/lib/common-utils/pnpm-lock.yaml index a1fa40b03d48..81b9931ab0db 100644 --- a/common/lib/common-utils/pnpm-lock.yaml +++ b/common/lib/common-utils/pnpm-lock.yaml @@ -9831,7 +9831,7 @@ packages: levn: 0.3.0 prelude-ls: 1.1.2 type-check: 0.3.2 - word-wrap: 1.2.3 + word-wrap: 1.2.4 dev: true /optionator/0.9.1: @@ -9843,7 +9843,7 @@ packages: levn: 0.4.1 prelude-ls: 1.2.1 type-check: 0.4.0 - word-wrap: 1.2.3 + word-wrap: 1.2.4 dev: true /ora/5.4.1: @@ -12871,8 +12871,8 @@ packages: execa: 1.0.0 dev: true - /word-wrap/1.2.3: - resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} + /word-wrap/1.2.4: + resolution: {integrity: sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==} engines: {node: '>=0.10.0'} dev: true diff --git a/experimental/dds/tree2/src/feature-libraries/default-field-kinds/defaultChangeFamily.ts b/experimental/dds/tree2/src/feature-libraries/default-field-kinds/defaultChangeFamily.ts index 2128877d1438..6fbddb0a2d78 100644 --- a/experimental/dds/tree2/src/feature-libraries/default-field-kinds/defaultChangeFamily.ts +++ b/experimental/dds/tree2/src/feature-libraries/default-field-kinds/defaultChangeFamily.ts @@ -11,7 +11,6 @@ import { Delta, UpPath, ITreeCursor, - RevisionTag, ChangeFamilyEditor, FieldUpPath, } from "../../core"; @@ -21,8 +20,6 @@ import { ModularEditBuilder, FieldChangeset, ModularChangeset, - NodeReviver, - ChangesetLocalId, } from "../modular-schema"; import { fieldKinds, optional, sequence, value as valueFieldKind } from "./defaultFieldKinds"; @@ -243,26 +240,6 @@ export class DefaultEditBuilder implements ChangeFamilyEditor, IDefaultEditBuild moveId, ); }, - revive: ( - index: number, - count: number, - detachedBy: RevisionTag, - detachId: ChangesetLocalId, - reviver: NodeReviver, - isIntention?: true, - ): void => { - const change: FieldChangeset = brand( - sequence.changeHandler.editor.revive( - index, - count, - detachedBy, - detachId, - reviver, - isIntention, - ), - ); - this.modularBuilder.submitChange(field, sequence.identifier, change); - }, }; } } @@ -315,23 +292,4 @@ export interface SequenceFieldEditBuilder { * @param destIndex - the index the elements are moved to, interpreted after removing the moving elements. */ move(sourceIndex: number, count: number, destIndex: number): void; - - /** - * Revives a contiguous range of deleted nodes. - * @param index - The index at which to revive the node (this will become the index of the first revived node). - * @param count - The number of nodes to revive. - * @param detachedBy - The revision of the edit that deleted the nodes. - * @param reviver - The NodeReviver used to retrieve repair data. - * @param detachIndex - The index of the first node to revive in the input context of edit `detachedBy`. - * @param isIntention - If true, the node will be revived even if edit `detachedBy` did not ultimately - * delete them. If false, only those nodes that were deleted by `detachedBy` (and not revived) will be revived. - */ - revive( - index: number, - count: number, - detachedBy: RevisionTag, - detachId: ChangesetLocalId, - reviver: NodeReviver, - isIntention?: true, - ): void; } diff --git a/experimental/dds/tree2/src/feature-libraries/index.ts b/experimental/dds/tree2/src/feature-libraries/index.ts index 0ceed44cb14d..79a320918a12 100644 --- a/experimental/dds/tree2/src/feature-libraries/index.ts +++ b/experimental/dds/tree2/src/feature-libraries/index.ts @@ -142,6 +142,7 @@ export { NodeExistsConstraint, NodeExistenceState, BrandedFieldKind, + ChangeAtomId, } from "./modular-schema"; export { diff --git a/experimental/dds/tree2/src/feature-libraries/sequence-field/compose.ts b/experimental/dds/tree2/src/feature-libraries/sequence-field/compose.ts index a9ef2d516974..6b6e108cf50b 100644 --- a/experimental/dds/tree2/src/feature-libraries/sequence-field/compose.ts +++ b/experimental/dds/tree2/src/feature-libraries/sequence-field/compose.ts @@ -22,6 +22,7 @@ import { Modify, MoveId, NoopMarkType, + CellId, } from "./format"; import { MarkListFactory } from "./markListFactory"; import { MarkQueue } from "./markQueue"; @@ -192,10 +193,7 @@ function composeMarks( // Modify and Placeholder marks must be muted because the node they target has been deleted. // Detach marks must be muted because the cell is empty. if (newMark.type === "Modify" || newMark.type === "Placeholder" || isDetachMark(newMark)) { - assert( - newMark.detachEvent !== undefined, - "Invalid node-targeting mark after transient", - ); + assert(newMark.cellId !== undefined, "Invalid node-targeting mark after transient"); return baseMark; } if (newMark.type === "ReturnTo") { @@ -363,7 +361,7 @@ function createModifyMark( assert(length === 1, 0x692 /* A mark with a node change must have length one */); const mark: Modify = { type: "Modify", changes: nodeChange }; if (cellId !== undefined) { - mark.detachEvent = cellId; + mark.cellId = cellId; } return mark; } @@ -585,7 +583,7 @@ export class ComposeQueue { isExistingCellMark(baseMark) && areInputCellsEmpty(baseMark), 0x696 /* Mark with empty output must either be a detach or also have input empty */, ); - baseCellId = baseMark.detachEvent; + baseCellId = baseMark.cellId; } const cmp = compareCellPositions( baseCellId, @@ -805,11 +803,11 @@ function areInverseMovesAtIntermediateLocation( 0x6d0 /* baseMark should be an attach and newMark should be a detach */, ); - if (baseMark.type === "ReturnTo" && baseMark.detachEvent?.revision === newIntention) { + if (baseMark.type === "ReturnTo" && baseMark.cellId?.revision === newIntention) { return true; } - if (newMark.type === "ReturnFrom" && newMark.detachEvent?.revision === baseIntention) { + if (newMark.type === "ReturnFrom" && newMark.cellId?.revision === baseIntention) { return true; } @@ -826,14 +824,15 @@ function areInverseMovesAtIntermediateLocation( * are before the first cell of `newMark`. */ function compareCellPositions( - baseCellId: ChangeAtomId, + baseCellId: CellId, baseMark: Mark, newMark: EmptyInputCellMark, newIntention: RevisionTag | undefined, cancelledInserts: Set, ): number { const newCellId = getCellId(newMark, newIntention); - if (newCellId !== undefined && baseCellId.revision === newCellId.revision) { + assert(newCellId !== undefined, "Should have cell ID"); + if (baseCellId.revision === newCellId.revision) { if (isNewAttach(newMark)) { // There is some change foo that is being cancelled out as part of a rebase sandwich. // The marks that make up this change (and its inverse) may be broken up differently between the base @@ -859,31 +858,27 @@ function compareCellPositions( } } - if (newCellId !== undefined) { - const offset = getOffsetInCellRange( - baseMark.lineage, - newCellId.revision, - newCellId.localId, - getMarkLength(newMark), - ); - if (offset !== undefined) { - return offset > 0 ? offset : -Infinity; - } + const offsetInBase = getOffsetInCellRange( + baseCellId.lineage, + newCellId.revision, + newCellId.localId, + getMarkLength(newMark), + ); + if (offsetInBase !== undefined) { + return offsetInBase > 0 ? offsetInBase : -Infinity; } - { - const offset = getOffsetInCellRange( - newMark.lineage, - baseCellId.revision, - baseCellId.localId, - getMarkLength(baseMark), - ); - if (offset !== undefined) { - return offset > 0 ? -offset : Infinity; - } + const offsetInNew = getOffsetInCellRange( + newCellId.lineage, + baseCellId.revision, + baseCellId.localId, + getMarkLength(baseMark), + ); + if (offsetInNew !== undefined) { + return offsetInNew > 0 ? -offsetInNew : Infinity; } - const cmp = compareLineages(baseMark.lineage, newMark.lineage); + const cmp = compareLineages(baseCellId.lineage, newCellId.lineage); if (cmp !== 0) { return Math.sign(cmp) * Infinity; } diff --git a/experimental/dds/tree2/src/feature-libraries/sequence-field/format.ts b/experimental/dds/tree2/src/feature-libraries/sequence-field/format.ts index 3ffaa8703206..81efa81b61fe 100644 --- a/experimental/dds/tree2/src/feature-libraries/sequence-field/format.ts +++ b/experimental/dds/tree2/src/feature-libraries/sequence-field/format.ts @@ -74,6 +74,7 @@ export enum Effects { * Note that `LineageEvent`s with the same revision are not necessarily referring to the same detach. * `LineageEvent`s for a given revision can only be meaningfully compared if it is known that they must refer to the * same detach. + * @alpha */ export interface LineageEvent { readonly revision: RevisionTag; @@ -93,13 +94,32 @@ export const LineageEvent = Type.Object( noAdditionalProps, ); +/** + * @alpha + */ +export interface HasLineage { + /** + * History of detaches adjacent to the cells described by this `ChangeAtomId`. + */ + lineage?: LineageEvent[]; +} + +export const HasLineage = Type.Object({ lineage: Type.Optional(Type.Array(LineageEvent)) }); + +/** + * @alpha + */ +export interface CellId extends ChangeAtomId, HasLineage {} + +export const CellId = Type.Composite([EncodedChangeAtomId, HasLineage]); + export interface HasChanges { changes?: TNodeChange; } export const HasChanges = (tNodeChange: TNodeChange) => Type.Object({ changes: Type.Optional(tNodeChange) }); -export interface HasPlaceFields { +export interface HasPlaceFields extends HasLineage { /** * Describes which kinds of concurrent slice operations should affect the target place. * @@ -116,32 +136,20 @@ export interface HasPlaceFields { heed?: Effects | [Effects, Effects]; /** - * Record of relevant information about changes this mark has been rebased over. - * Events are stored in the order in which they were rebased over. + * Omit if `Tiebreak.Right` for terseness. */ - lineage?: LineageEvent[]; + tiebreak?: Tiebreak; } const EffectsSchema = Type.Enum(Effects); -export const HasPlaceFields = Type.Object({ - heed: Type.Optional(Type.Union([EffectsSchema, Type.Tuple([EffectsSchema, EffectsSchema])])), - lineage: Type.Optional(Type.Array(LineageEvent)), -}); - -export interface HasReattachFields extends HasPlaceFields { - /** - * The revision this mark is inverting a detach from. - * If defined this mark is a revert-only inverse, - * meaning that it will only reattach nodes if those nodes were last detached by `inverseOf`. - * If `inverseOf` is undefined, this mark will reattach nodes regardless of when they were last detached. - */ - inverseOf?: RevisionTag; -} -export const HasReattachFields = Type.Composite([ - HasPlaceFields, +export const HasPlaceFields = Type.Composite([ Type.Object({ - inverseOf: Type.Optional(RevisionTagSchema), + heed: Type.Optional( + Type.Union([EffectsSchema, Type.Tuple([EffectsSchema, EffectsSchema])]), + ), + tiebreak: Type.Optional(Type.Enum(Tiebreak)), }), + HasLineage, ]); /** @@ -149,21 +157,31 @@ export const HasReattachFields = Type.Composite([ */ export interface CellTargetingMark { /** - * Describes the detach which last emptied target cells. + * Describes the detach which last emptied the target cells, + * or the attach which allocated the cells if the cells have never been filled. * Undefined if the target cells are not empty in this mark's input context. */ - detachEvent?: ChangeAtomId; + cellId?: CellId; +} +export const CellTargetingMark = Type.Object({ + cellId: Type.Optional(CellId), +}); +export interface HasReattachFields extends CellTargetingMark { /** - * Lineage of detaches adjacent to the cells since `detachEvent`. - * Should be empty if the cells are full in this mark's input context. + * The revision this mark is inverting a detach from. + * If defined this mark is a revert-only inverse, + * meaning that it will only reattach nodes if those nodes were last detached by `inverseOf`. + * If `inverseOf` is undefined, this mark will reattach nodes regardless of when they were last detached. */ - lineage?: LineageEvent[]; + inverseOf?: RevisionTag; } -export const CellTargetingMark = Type.Object({ - detachEvent: Type.Optional(EncodedChangeAtomId), - lineage: Type.Optional(Type.Array(LineageEvent)), -}); +export const HasReattachFields = Type.Composite([ + Type.Object({ + inverseOf: Type.Optional(RevisionTagSchema), + }), + CellTargetingMark, +]); export interface NoopMark extends CellTargetingMark { /** @@ -183,24 +201,11 @@ export const NoopMark = Type.Composite( ); export interface DetachedCellMark extends CellTargetingMark { - detachEvent: ChangeAtomId; + cellId: CellId; } export const DetachedCellMark = Type.Composite([ CellTargetingMark, - Type.Object({ detachEvent: EncodedChangeAtomId }), -]); - -export interface HasTiebreakPolicy extends HasPlaceFields { - /** - * Omit if `Tiebreak.Right` for terseness. - */ - tiebreak?: Tiebreak; -} -export const HasTiebreakPolicy = Type.Composite([ - HasPlaceFields, - Type.Object({ - tiebreak: Type.Optional(Type.Enum(Tiebreak)), - }), + Type.Object({ cellId: CellId }), ]); export enum RangeType { @@ -229,7 +234,7 @@ export type CanBeTransient = Partial; export const CanBeTransient = Type.Partial(Transient); export interface Insert - extends HasTiebreakPolicy, + extends HasPlaceFields, HasRevisionTag, CanBeTransient, HasChanges { @@ -245,9 +250,8 @@ export interface Insert export const Insert = (tNodeChange: Schema) => Type.Composite( [ - HasTiebreakPolicy, + HasPlaceFields, HasRevisionTag, - CanBeTransient, HasChanges(tNodeChange), Type.Object({ type: Type.Literal("Insert"), @@ -337,8 +341,7 @@ export interface Revive extends HasReattachFields, HasRevisionTag, CanBeTransient, - HasChanges, - CellTargetingMark { + HasChanges { type: "Revive"; content: ITreeCursorSynchronous[]; count: NodeCount; @@ -350,7 +353,6 @@ export const Revive = (tNodeChange: Schema) => HasRevisionTag, CanBeTransient, HasChanges(tNodeChange), - CellTargetingMark, Type.Object({ type: Type.Literal("Revive"), content: Type.Array(ProtoNode), @@ -360,7 +362,7 @@ export const Revive = (tNodeChange: Schema) => noAdditionalProps, ); -export interface ReturnTo extends HasReattachFields, HasRevisionTag, HasMoveId, CellTargetingMark { +export interface ReturnTo extends HasReattachFields, HasRevisionTag, HasMoveId { type: "ReturnTo"; count: NodeCount; @@ -375,7 +377,6 @@ export const ReturnTo = Type.Composite( HasReattachFields, HasRevisionTag, HasMoveId, - CellTargetingMark, Type.Object({ type: Type.Literal("ReturnTo"), count: NodeCount, diff --git a/experimental/dds/tree2/src/feature-libraries/sequence-field/index.ts b/experimental/dds/tree2/src/feature-libraries/sequence-field/index.ts index e7c40f25df7b..cdd7e3bb54fb 100644 --- a/experimental/dds/tree2/src/feature-libraries/sequence-field/index.ts +++ b/experimental/dds/tree2/src/feature-libraries/sequence-field/index.ts @@ -14,7 +14,6 @@ export { HasMoveId, HasPlaceFields, HasRevisionTag, - HasTiebreakPolicy, Insert, Mark, MarkList, @@ -35,6 +34,8 @@ export { LineageEvent, HasReattachFields, CellSpanningMark, + CellId, + HasLineage, } from "./format"; export { SequenceFieldChangeHandler, diff --git a/experimental/dds/tree2/src/feature-libraries/sequence-field/invert.ts b/experimental/dds/tree2/src/feature-libraries/sequence-field/invert.ts index bbedec286659..f3511eb36b75 100644 --- a/experimental/dds/tree2/src/feature-libraries/sequence-field/invert.ts +++ b/experimental/dds/tree2/src/feature-libraries/sequence-field/invert.ts @@ -112,7 +112,7 @@ function invertMark( withNodeChange( { type: "Revive", - detachEvent: { + cellId: { revision: mark.transientDetach.revision ?? revision, localId: mark.transientDetach.localId, }, @@ -137,11 +137,11 @@ function invertMark( } case "Delete": { assert(revision !== undefined, 0x5a1 /* Unable to revert to undefined revision */); - if (mark.detachEvent === undefined) { + if (mark.cellId === undefined) { const inverse = withNodeChange( { type: "Revive", - detachEvent: { revision: mark.revision ?? revision, localId: mark.id }, + cellId: { revision: mark.revision ?? revision, localId: mark.id }, content: reviver(revision, inputIndex, mark.count), count: mark.count, inverseOf: mark.revision ?? revision, @@ -157,7 +157,7 @@ function invertMark( case "Revive": { if (!isReattachConflicted(mark)) { assert( - mark.detachEvent !== undefined, + mark.cellId !== undefined, 0x707 /* Active reattach should have a detach event */, ); if (mark.transientDetach !== undefined) { @@ -166,7 +166,7 @@ function invertMark( withNodeChange( { type: "Revive", - detachEvent: { + cellId: { revision: mark.transientDetach.revision ?? revision, localId: mark.transientDetach.localId, }, @@ -175,7 +175,7 @@ function invertMark( inverseOf: mark.revision ?? revision, transientDetach: { revision: mark.revision ?? revision, - localId: mark.detachEvent.localId, + localId: mark.cellId.localId, }, }, invertNodeChange(mark.changes, inputIndex, invertChild), @@ -186,7 +186,7 @@ function invertMark( { type: "Delete", count: mark.count, - id: mark.detachEvent.localId, + id: mark.cellId.localId, }, invertNodeChange(mark.changes, inputIndex, invertChild), ); @@ -213,12 +213,12 @@ function invertMark( mark.changes, inputIndex, invertChild, - mark.detachEvent, + mark.cellId, ), ]; } case "Modify": { - if (mark.detachEvent === undefined) { + if (mark.cellId === undefined) { return [ { type: "Modify", @@ -258,7 +258,7 @@ function invertMark( type: "ReturnTo", id: mark.id, count: mark.count, - detachEvent: { + cellId: { revision: mark.revision ?? revision ?? fail("Revision must be defined"), localId: mark.id, }, @@ -268,12 +268,12 @@ function invertMark( case "MoveIn": case "ReturnTo": { if (mark.isSrcConflicted) { - return mark.type === "ReturnTo" && mark.detachEvent === undefined + return mark.type === "ReturnTo" && mark.cellId === undefined ? [{ count: mark.count }] : []; } if (mark.type === "ReturnTo") { - if (mark.detachEvent === undefined) { + if (mark.cellId === undefined) { // The nodes were already attached, so the mark did not affect them. return [{ count: mark.count }]; } else if (isConflictedReattach(mark)) { @@ -355,7 +355,7 @@ function invertModifyOrSkip( assert(length === 1, 0x66c /* A modify mark must have length equal to one */); const modify: Modify = { type: "Modify", changes: inverter(changes, index) }; if (detachEvent !== undefined) { - modify.detachEvent = detachEvent; + modify.cellId = detachEvent; } return modify; } diff --git a/experimental/dds/tree2/src/feature-libraries/sequence-field/rebase.ts b/experimental/dds/tree2/src/feature-libraries/sequence-field/rebase.ts index 304b54b6e391..bda684d23f4c 100644 --- a/experimental/dds/tree2/src/feature-libraries/sequence-field/rebase.ts +++ b/experimental/dds/tree2/src/feature-libraries/sequence-field/rebase.ts @@ -45,6 +45,7 @@ import { Modify, EmptyInputCellMark, NoopMarkType, + HasLineage, } from "./format"; import { MarkListFactory } from "./markListFactory"; import { ComposeQueue } from "./compose"; @@ -401,11 +402,11 @@ function rebaseMark( ); } } - rebasedMark = withoutDetachEvent(rebasedMark); + rebasedMark = withoutCellId(rebasedMark); } else if ( nodeExistenceState === NodeExistenceState.Alive && (rebasedMark.type === "MoveOut" || rebasedMark.type === "ReturnFrom") && - rebasedMark.detachEvent === undefined + rebasedMark.cellId === undefined ) { setPairedMarkStatus( moveEffects, @@ -547,14 +548,13 @@ function makeDetachedMark( return { count: 0 }; } - assert(mark.detachEvent === undefined, 0x69f /* Expected mark to be attached */); - return { ...mark, detachEvent: { revision: detachIntention, localId: detachId } }; + assert(mark.cellId === undefined, 0x69f /* Expected mark to be attached */); + return { ...mark, cellId: { revision: detachIntention, localId: detachId } }; } -function withoutDetachEvent>(mark: TMark): TMark { +function withoutCellId>(mark: TMark): TMark { const newMark = { ...mark }; - delete newMark.detachEvent; - delete newMark.lineage; + delete newMark.cellId; return newMark; } @@ -684,10 +684,11 @@ function handleLineage( // TODO: Handle cases where the base changeset is a composition of multiple revisions. // TODO: Don't remove the lineage event in cases where the event isn't actually inverted by the base changeset, // e.g., if the inverse of the lineage event is muted after rebasing. - tryRemoveLineageEvents(rebasedMark, baseIntention); + const lineageHolder = getLineageHolder(rebasedMark); + tryRemoveLineageEvents(lineageHolder, baseIntention); for (const entry of lineageEntries) { - addLineageEntry(rebasedMark, baseIntention, entry.id, entry.count, entry.count); + addLineageEntry(lineageHolder, baseIntention, entry.id, entry.count, entry.count); } lineageRecipients.push(rebasedMark); @@ -700,33 +701,33 @@ function addLineageToRecipients( count: number, ) { for (const mark of recipients) { - addLineageEntry(mark, revision, id, count, 0); + addLineageEntry(getLineageHolder(mark), revision, id, count, 0); } } function addLineageEntry( - mark: Mark, + lineageHolder: HasLineage, revision: RevisionTag, id: ChangesetLocalId, count: number, offset: number, ) { - if (mark.lineage === undefined) { - mark.lineage = []; + if (lineageHolder.lineage === undefined) { + lineageHolder.lineage = []; } - if (mark.lineage.length > 0) { - const lastEntry = mark.lineage[mark.lineage.length - 1]; + if (lineageHolder.lineage.length > 0) { + const lastEntry = lineageHolder.lineage[lineageHolder.lineage.length - 1]; if (lastEntry.revision === revision && (lastEntry.id as number) + lastEntry.count === id) { if (lastEntry.offset === lastEntry.count) { - mark.lineage[mark.lineage.length - 1] = { + lineageHolder.lineage[lineageHolder.lineage.length - 1] = { ...lastEntry, count: lastEntry.count + count, offset: lastEntry.offset + offset, }; return; } else if (offset === 0) { - mark.lineage[mark.lineage.length - 1] = { + lineageHolder.lineage[lineageHolder.lineage.length - 1] = { ...lastEntry, count: lastEntry.count + count, }; @@ -735,18 +736,29 @@ function addLineageEntry( } } - mark.lineage.push({ revision, id, count, offset }); + lineageHolder.lineage.push({ revision, id, count, offset }); } -function tryRemoveLineageEvents(mark: Mark, revisionToRemove: RevisionTag) { - if (mark.lineage === undefined) { +function tryRemoveLineageEvents(lineageHolder: HasLineage, revisionToRemove: RevisionTag) { + if (lineageHolder.lineage === undefined) { return; } - mark.lineage = mark.lineage.filter((event) => event.revision !== revisionToRemove); - if (mark.lineage.length === 0) { - delete mark.lineage; + lineageHolder.lineage = lineageHolder.lineage.filter( + (event) => event.revision !== revisionToRemove, + ); + if (lineageHolder.lineage.length === 0) { + delete lineageHolder.lineage; + } +} + +function getLineageHolder(mark: Mark): HasLineage { + if (isNewAttach(mark)) { + return mark; } + + assert(mark.cellId !== undefined, "Attached cells cannot have lineage"); + return mark.cellId; } /** @@ -775,7 +787,7 @@ function compareCellPositions( if (newId !== undefined) { const offset = getOffsetInCellRange( - baseMark.lineage, + baseId.lineage, newId.revision, newId.localId, newLength, @@ -783,21 +795,23 @@ function compareCellPositions( if (offset !== undefined) { return offset > 0 ? offset : -Infinity; } - } - const newOffset = getOffsetInCellRange( - newMark.lineage, - baseId.revision, - baseId.localId, - baseLength, - ); - if (newOffset !== undefined) { - return newOffset > 0 ? -newOffset : Infinity; + const newOffset = getOffsetInCellRange( + newId.lineage, + baseId.revision, + baseId.localId, + baseLength, + ); + if (newOffset !== undefined) { + return newOffset > 0 ? -newOffset : Infinity; + } } - const cmp = compareLineages(baseMark.lineage, newMark.lineage); - if (cmp !== 0) { - return Math.sign(cmp) * Infinity; + if (newId !== undefined) { + const cmp = compareLineages(baseId.lineage, newId.lineage); + if (cmp !== 0) { + return Math.sign(cmp) * Infinity; + } } if (isNewAttach(newMark)) { diff --git a/experimental/dds/tree2/src/feature-libraries/sequence-field/sequenceFieldEditor.ts b/experimental/dds/tree2/src/feature-libraries/sequence-field/sequenceFieldEditor.ts index 0ccba525a706..8310dae3758f 100644 --- a/experimental/dds/tree2/src/feature-libraries/sequence-field/sequenceFieldEditor.ts +++ b/experimental/dds/tree2/src/feature-libraries/sequence-field/sequenceFieldEditor.ts @@ -3,14 +3,15 @@ * Licensed under the MIT License. */ +import { assert } from "@fluidframework/common-utils"; import { jsonableTreeFromCursor } from "../treeTextCursor"; -import { ITreeCursor, RevisionTag } from "../../core"; +import { ITreeCursor } from "../../core"; import { ChangesetLocalId, FieldEditor, NodeReviver } from "../modular-schema"; import { brand } from "../../util"; import { + CellId, Changeset, Insert, - LineageEvent, Mark, MoveId, NodeChangeType, @@ -26,8 +27,7 @@ export interface SequenceFieldEditor extends FieldEditor { revive( index: number, count: number, - detachedBy: RevisionTag, - detachId: ChangesetLocalId, + detachEvent: CellId, reviver: NodeReviver, isIntention?: true, ): Changeset; @@ -49,8 +49,7 @@ export interface SequenceFieldEditor extends FieldEditor { sourceIndex: number, count: number, destIndex: number, - detachedBy: RevisionTag, - detachId: ChangesetLocalId, + detachEvent: CellId, ): Changeset; } @@ -73,23 +72,23 @@ export const sequenceFieldEditor = { }, delete: (index: number, count: number, id: ChangesetLocalId): Changeset => count === 0 ? [] : markAtIndex(index, { type: "Delete", count, id }), + revive: ( index: number, count: number, - detachedBy: RevisionTag, - detachId: ChangesetLocalId, + detachEvent: CellId, reviver: NodeReviver, isIntention: boolean = false, ): Changeset => { - const detachEvent = { revision: detachedBy, localId: detachId }; + assert(detachEvent.revision !== undefined, "Detach event must have a revision"); const mark: Reattach = { type: "Revive", - content: reviver(detachedBy, detachId, count), + content: reviver(detachEvent.revision, detachEvent.localId, count), count, - detachEvent, + cellId: detachEvent, }; if (!isIntention) { - mark.inverseOf = detachedBy; + mark.inverseOf = detachEvent.revision; } return count === 0 ? [] : markAtIndex(index, mark); }, @@ -119,15 +118,12 @@ export const sequenceFieldEditor = { sourceIndex: number, count: number, destIndex: number, - detachedBy: RevisionTag, - detachId: ChangesetLocalId, - lineage?: LineageEvent[], + detachEvent: CellId, ): Changeset { if (count === 0) { return []; } - const detachEvent = { revision: detachedBy, localId: detachId }; const id = brand(0); const returnFrom: ReturnFrom = { type: "ReturnFrom", @@ -139,13 +135,9 @@ export const sequenceFieldEditor = { type: "ReturnTo", id, count, - detachEvent, + cellId: detachEvent, }; - if (lineage !== undefined) { - returnTo.lineage = lineage; - } - const factory = new MarkListFactory(); if (sourceIndex < destIndex) { factory.pushOffset(sourceIndex); diff --git a/experimental/dds/tree2/src/feature-libraries/sequence-field/utils.ts b/experimental/dds/tree2/src/feature-libraries/sequence-field/utils.ts index 8be5475972cb..cdfe330c38d9 100644 --- a/experimental/dds/tree2/src/feature-libraries/sequence-field/utils.ts +++ b/experimental/dds/tree2/src/feature-libraries/sequence-field/utils.ts @@ -20,7 +20,6 @@ import { Detach, HasChanges, HasRevisionTag, - HasTiebreakPolicy, Insert, LineageEvent, Mark, @@ -42,6 +41,9 @@ import { Transient, DetachedCellMark, CellTargetingMark, + CellId, + HasPlaceFields, + HasReattachFields, } from "./format"; import { MarkListFactory } from "./markListFactory"; import { isMoveMark, MoveEffectTable } from "./moveEffectTable"; @@ -91,8 +93,8 @@ export function isConflictedReattach( // TODO: Name is misleading export function isReattachConflicted(mark: Reattach): boolean { return ( - mark.detachEvent === undefined || - (mark.inverseOf !== undefined && mark.inverseOf !== mark.detachEvent.revision) + mark.cellId === undefined || + (mark.inverseOf !== undefined && mark.inverseOf !== mark.cellId.revision) ); } @@ -100,23 +102,25 @@ export function isReturnMuted(mark: ReturnTo): boolean { return mark.isSrcConflicted ?? isReattachConflicted(mark); } -export function areEqualDetachEvents(a: ChangeAtomId, b: ChangeAtomId): boolean { - return a.localId === b.localId && a.revision === b.revision; +export function areEqualCellIds(a: CellId | undefined, b: CellId | undefined): boolean { + if (a === undefined || b === undefined) { + return a === b; + } + return ( + a.localId === b.localId && a.revision === b.revision && areSameLineage(a.lineage, b.lineage) + ); } export function getCellId( mark: Mark, revision: RevisionTag | undefined, -): ChangeAtomId | undefined { +): CellId | undefined { if (isNewAttach(mark)) { const rev = mark.revision ?? revision; - if (rev !== undefined) { - return { revision: rev, localId: mark.id }; - } - return undefined; + return { revision: rev, localId: mark.id, lineage: mark.lineage }; } - return mark.detachEvent; + return mark.cellId; } export function cloneMark, TNodeChange>(mark: TMark): TMark { @@ -124,27 +128,52 @@ export function cloneMark, TNodeChange>(mark: TM if (clone.type === "Insert" || clone.type === "Revive") { clone.content = [...clone.content]; } - if (isAttach(clone) && clone.lineage !== undefined) { - clone.lineage = [...clone.lineage]; + if (isNewAttach(clone)) { + if (clone.lineage !== undefined) { + clone.lineage = [...clone.lineage]; + } + } else if (clone.cellId !== undefined) { + clone.cellId = { ...clone.cellId }; + if (clone.cellId.lineage !== undefined) { + clone.cellId.lineage = [...clone.cellId.lineage]; + } } return clone; } /** - * @returns `true` iff `lhs` and `rhs`'s `HasTiebreakPolicy` fields are structurally equal. + * @returns `true` iff `lhs` and `rhs`'s `HasPlaceFields` fields are structurally equal. */ export function isEqualPlace( - lhs: Readonly, - rhs: Readonly, + lhs: Readonly, + rhs: Readonly, ): boolean { return ( lhs.heed === rhs.heed && lhs.tiebreak === rhs.tiebreak && - areSameLineage(lhs.lineage ?? [], rhs.lineage ?? []) + areSameLineage(lhs.lineage, rhs.lineage) ); } -function areSameLineage(lineage1: LineageEvent[], lineage2: LineageEvent[]): boolean { +function haveEqualReattachFields( + lhs: Readonly, + rhs: Readonly, +): boolean { + return lhs.inverseOf === rhs.inverseOf && areEqualCellIds(lhs.cellId, rhs.cellId); +} + +function areSameLineage( + lineage1: LineageEvent[] | undefined, + lineage2: LineageEvent[] | undefined, +): boolean { + if (lineage1 === undefined && lineage2 === undefined) { + return true; + } + + if (lineage1 === undefined || lineage2 === undefined) { + return false; + } + if (lineage1.length !== lineage2.length) { return false; } @@ -219,7 +248,7 @@ export function areInputCellsEmpty(mark: Mark): mark is EmptyInputCellMark return true; } - return mark.detachEvent !== undefined; + return mark.cellId !== undefined; } export function areOutputCellsEmpty(mark: Mark): boolean { @@ -236,17 +265,17 @@ export function areOutputCellsEmpty(mark: Mark): boolean { return true; case "Modify": case "Placeholder": - return mark.detachEvent !== undefined; + return mark.cellId !== undefined; case "ReturnFrom": - return mark.detachEvent !== undefined || !mark.isDstConflicted; + return mark.cellId !== undefined || !mark.isDstConflicted; case "ReturnTo": return ( - mark.detachEvent !== undefined && + mark.cellId !== undefined && ((mark.isSrcConflicted ?? false) || isReattachConflicted(mark)) ); case "Revive": return ( - (mark.detachEvent !== undefined && isReattachConflicted(mark)) || + (mark.cellId !== undefined && isReattachConflicted(mark)) || mark.transientDetach !== undefined ); default: @@ -369,13 +398,13 @@ export function tryExtendMark(lhs: Mark, rhs: Readonly>): boolean if (isExistingCellMark(lhs)) { assert(isExistingCellMark(rhs), 0x6a6 /* Should be existing cell mark */); - if (lhs.detachEvent?.revision !== rhs.detachEvent?.revision) { + if (lhs.cellId?.revision !== rhs.cellId?.revision) { return false; } if ( - lhs.detachEvent !== undefined && - (lhs.detachEvent.localId as number) + getMarkLength(lhs) !== rhs.detachEvent?.localId + lhs.cellId !== undefined && + (lhs.cellId.localId as number) + getMarkLength(lhs) !== rhs.cellId?.localId ) { return false; } @@ -398,9 +427,8 @@ export function tryExtendMark(lhs: Mark, rhs: Readonly>): boolean } break; } - case "MoveIn": - case "ReturnTo": { - const lhsMoveIn = lhs as MoveIn | ReturnTo; + case "MoveIn": { + const lhsMoveIn = lhs as MoveIn; if ( isEqualPlace(lhsMoveIn, rhs) && lhsMoveIn.isSrcConflicted === rhs.isSrcConflicted && @@ -411,6 +439,17 @@ export function tryExtendMark(lhs: Mark, rhs: Readonly>): boolean } break; } + case "ReturnTo": { + const lhsReturnTo = lhs as ReturnTo; + if ( + haveEqualReattachFields(lhsReturnTo, rhs) && + lhsReturnTo.isSrcConflicted === rhs.isSrcConflicted && + (lhsReturnTo.id as number) + lhsReturnTo.count === rhs.id + ) { + lhsReturnTo.count += rhs.count; + return true; + } + } case "Delete": { const lhsDetach = lhs as Detach; if ((lhsDetach.id as number) + lhsDetach.count === rhs.id) { @@ -461,8 +500,8 @@ export function tryExtendMark(lhs: Mark, rhs: Readonly>): boolean */ export class DetachedNodeTracker { // Maps the index for a node to its last characterization as a reattached node. - private nodes: Map = new Map(); - private readonly equivalences: { old: ChangeAtomId; new: ChangeAtomId }[] = []; + private nodes: Map = new Map(); + private readonly equivalences: { old: CellId; new: CellId }[] = []; public constructor() {} @@ -477,7 +516,7 @@ export class DetachedNodeTracker { const inputLength: number = getInputLength(mark); if (markEmptiesCells(mark)) { assert(isDetachMark(mark), 0x70d /* Only detach marks should empty cells */); - const newNodes: Map = new Map(); + const newNodes: Map = new Map(); const after = index + inputLength; for (const [k, v] of this.nodes) { if (k >= index) { @@ -509,7 +548,7 @@ export class DetachedNodeTracker { for (const mark of change.change) { const inputLength: number = getInputLength(mark); if (isActiveReattach(mark)) { - const newNodes: Map = new Map(); + const newNodes: Map = new Map(); for (const [k, v] of this.nodes) { if (k >= index) { newNodes.set(k + inputLength, v); @@ -517,7 +556,7 @@ export class DetachedNodeTracker { newNodes.set(k, v); } } - const detachEvent = mark.detachEvent ?? fail("Unable to track detached nodes"); + const detachEvent = mark.cellId ?? fail("Unable to track detached nodes"); for (let i = 0; i < mark.count; ++i) { newNodes.set(index + i, { revision: detachEvent.revision, @@ -541,11 +580,11 @@ export class DetachedNodeTracker { public isApplicable(change: Changeset): boolean { for (const mark of change) { if (isActiveReattach(mark)) { - const detachEvent = mark.detachEvent ?? fail("Unable to track detached nodes"); + const detachEvent = mark.cellId ?? fail("Unable to track detached nodes"); const revision = detachEvent.revision; for (let i = 0; i < mark.count; ++i) { const localId = brand((detachEvent.localId as number) + i); - const original: ChangeAtomId = { revision, localId }; + const original: CellId = { revision, localId }; const updated = this.getUpdatedDetach(original); for (const detached of this.nodes.values()) { if ( @@ -598,15 +637,15 @@ export class DetachedNodeTracker { } private updateMark(mark: CellTargetingMark & DetachedCellMark): void { - const detachEvent = mark.detachEvent; + const detachEvent = mark.cellId; const original = { revision: detachEvent.revision, localId: detachEvent.localId }; const updated = this.getUpdatedDetach(original); if (updated.revision !== original.revision || updated.localId !== original.localId) { - mark.detachEvent = { ...updated }; + mark.cellId = { ...updated }; } } - private getUpdatedDetach(detach: ChangeAtomId): ChangeAtomId { + private getUpdatedDetach(detach: CellId): CellId { let curr = detach; for (const eq of this.equivalences) { if (curr.revision === eq.old.revision && curr.localId === eq.old.localId) { @@ -637,8 +676,8 @@ export function areRebasable(branch: Changeset, target: Changeset, target: Changeset>(mark: TMark, length: number) const mark1: TMark = { ...mark, count: length }; const mark2: TMark = { ...mark, id: (mark.id as number) + length, count: remainder }; if (mark.type === "ReturnTo") { - if (mark.detachEvent !== undefined) { - (mark2 as ReturnTo).detachEvent = splitDetachEvent(mark.detachEvent, length); + if (mark.cellId !== undefined) { + (mark2 as ReturnTo).cellId = splitDetachEvent(mark.cellId, length); } return [mark1, mark2]; @@ -853,8 +892,8 @@ export function splitMark>(mark: TMark, length: number) count: remainder, }; - if (mark.detachEvent !== undefined) { - (mark2 as Revive).detachEvent = splitDetachEvent(mark.detachEvent, length); + if (mark.cellId !== undefined) { + (mark2 as Revive).cellId = splitDetachEvent(mark.cellId, length); } if (mark.transientDetach !== undefined) { (mark2 as Transient).transientDetach = { @@ -868,12 +907,12 @@ export function splitMark>(mark: TMark, length: number) const mark1 = { ...mark, count: length }; const id2: ChangesetLocalId = brand((mark.id as number) + length); const mark2 = - mark.detachEvent !== undefined + mark.cellId !== undefined ? { ...mark, id: id2, count: remainder, - detachEvent: splitDetachEvent(mark.detachEvent, length), + cellId: splitDetachEvent(mark.cellId, length), } : { ...mark, @@ -892,8 +931,8 @@ export function splitMark>(mark: TMark, length: number) id: (mark.id as number) + length, count: remainder, }; - if (mark.detachEvent !== undefined) { - (mark2 as Detach).detachEvent = splitDetachEvent(mark.detachEvent, length); + if (mark.cellId !== undefined) { + (mark2 as Detach).cellId = splitDetachEvent(mark.cellId, length); } return [mark1, mark2]; } @@ -904,7 +943,7 @@ export function splitMark>(mark: TMark, length: number) } } -function splitDetachEvent(detachEvent: ChangeAtomId, length: number): ChangeAtomId { +function splitDetachEvent(detachEvent: CellId, length: number): CellId { return { ...detachEvent, localId: brand((detachEvent.localId as number) + length) }; } diff --git a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/compose.spec.ts b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/compose.spec.ts index 3dad043f0cb0..7d87ece742c8 100644 --- a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/compose.spec.ts +++ b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/compose.spec.ts @@ -103,7 +103,11 @@ describe("SequenceField - Compose", () => { it("delete ○ revive => Noop", () => { const deletion = tagChange(Change.delete(0, 1), tag1); - const insertion = tagRollbackInverse(Change.revive(0, 1, tag1), tag2, tag1); + const insertion = tagRollbackInverse( + Change.revive(0, 1, { revision: tag1, localId: brand(0) }), + tag2, + tag1, + ); const actual = shallowCompose([deletion, insertion]); assert.deepEqual(actual, cases.no_change); }); @@ -141,7 +145,7 @@ describe("SequenceField - Compose", () => { const modify: SF.Modify = { type: "Modify", changes: TestChange.mint([], 42), - detachEvent: detach, + cellId: detach, }; const actual = compose([makeAnonChange([insert]), makeAnonChange([modify])], revInfos); assert.deepEqual(actual, [insert]); @@ -154,7 +158,7 @@ describe("SequenceField - Compose", () => { }; const revive: SF.Revive = { type: "Revive", - detachEvent: { + cellId: { revision: tag1, localId: brand(0), }, @@ -165,7 +169,7 @@ describe("SequenceField - Compose", () => { const modify: SF.Modify = { type: "Modify", changes: TestChange.mint([], 42), - detachEvent: detach, + cellId: detach, }; const actual = compose([makeAnonChange([revive]), makeAnonChange([modify])], revInfos); assert.deepEqual(actual, [revive]); @@ -185,7 +189,7 @@ describe("SequenceField - Compose", () => { const revive: SF.Revive = { type: "Revive", changes: TestChange.mint([], 42), - detachEvent: detach, + cellId: detach, count: 1, content: fakeRepair(tag2, 0, 1), }; @@ -245,7 +249,7 @@ describe("SequenceField - Compose", () => { }); it("revive ○ modify", () => { - const revive = Change.revive(0, 3, tag1, brand(0)); + const revive = Change.revive(0, 3, { revision: tag1, localId: brand(0) }); const childChange = TestChange.mint([0, 1], 2); const modify = Change.modify(0, childChange); const expected: TestChangeset = [ @@ -253,7 +257,7 @@ describe("SequenceField - Compose", () => { type: "Revive", content: fakeRepair(tag1, 0, 1), count: 1, - detachEvent: { revision: tag1, localId: brand(0) }, + cellId: { revision: tag1, localId: brand(0) }, changes: childChange, inverseOf: tag1, }, @@ -261,7 +265,7 @@ describe("SequenceField - Compose", () => { type: "Revive", content: fakeRepair(tag1, 1, 2), count: 2, - detachEvent: { revision: tag1, localId: brand(1) }, + cellId: { revision: tag1, localId: brand(1) }, inverseOf: tag1, }, ]; @@ -281,7 +285,7 @@ describe("SequenceField - Compose", () => { type: "Revive", content: fakeRepair(tag1, 0, 1), count: 1, - detachEvent: { revision: tag1, localId: brand(0) }, + cellId: { revision: tag1, localId: brand(0) }, changes: childChangeA, }, ]; @@ -296,7 +300,7 @@ describe("SequenceField - Compose", () => { type: "Revive", content: fakeRepair(tag1, 0, 1), count: 1, - detachEvent: { revision: tag1, localId: brand(0) }, + cellId: { revision: tag1, localId: brand(0) }, changes: childChangeAB, }, ]; @@ -557,7 +561,7 @@ describe("SequenceField - Compose", () => { }); it("revive ○ delete", () => { - const revive = Change.revive(0, 5, tag1, brand(0)); + const revive = Change.revive(0, 5, { revision: tag1, localId: brand(0) }); const deletion: SF.Changeset = [ { count: 1 }, { type: "Delete", id: brand(0), count: 1 }, @@ -570,14 +574,14 @@ describe("SequenceField - Compose", () => { type: "Revive", content: fakeRepair(tag1, 0, 1), count: 1, - detachEvent: { revision: tag1, localId: brand(0) }, + cellId: { revision: tag1, localId: brand(0) }, inverseOf: tag1, }, { type: "Revive", content: fakeRepair(tag1, 1, 1), count: 1, - detachEvent: { revision: tag1, localId: brand(1) }, + cellId: { revision: tag1, localId: brand(1) }, inverseOf: tag1, transientDetach: { revision: tag2, localId: brand(0) }, }, @@ -585,14 +589,14 @@ describe("SequenceField - Compose", () => { type: "Revive", content: fakeRepair(tag1, 2, 1), count: 1, - detachEvent: { revision: tag1, localId: brand(2) }, + cellId: { revision: tag1, localId: brand(2) }, inverseOf: tag1, }, { type: "Revive", content: fakeRepair(tag1, 3, 2), count: 2, - detachEvent: { revision: tag1, localId: brand(3) }, + cellId: { revision: tag1, localId: brand(3) }, inverseOf: tag1, transientDetach: { revision: tag2, localId: brand(1) }, }, @@ -609,7 +613,7 @@ describe("SequenceField - Compose", () => { type: "Revive", content: fakeRepair(tag1, 0, 1), count: 1, - detachEvent, + cellId: detachEvent, changes: childChange, }, ]; @@ -620,7 +624,7 @@ describe("SequenceField - Compose", () => { type: "Revive", content: fakeRepair(tag1, 0, 1), count: 1, - detachEvent, + cellId: detachEvent, changes: childChange, revision: tag2, transientDetach: { revision: tag3, localId: brand(0) }, @@ -658,7 +662,7 @@ describe("SequenceField - Compose", () => { }); it("revive ○ insert", () => { - const revive = Change.revive(0, 5, tag1, brand(0)); + const revive = Change.revive(0, 5, { revision: tag1, localId: brand(0) }); const insert = Change.insert(0, 1, 2); // TODO: test with merge-right policy as well const expected: SF.Changeset = [ @@ -667,7 +671,7 @@ describe("SequenceField - Compose", () => { type: "Revive", content: fakeRepair(tag1, 0, 5), count: 5, - detachEvent: { revision: tag1, localId: brand(0) }, + cellId: { revision: tag1, localId: brand(0) }, inverseOf: tag1, }, ]; @@ -710,13 +714,13 @@ describe("SequenceField - Compose", () => { it("modify ○ revive", () => { const childChange = TestChange.mint([0, 1], 2); const modify = Change.modify(0, childChange); - const revive = Change.revive(0, 2, tag1, brand(0)); + const revive = Change.revive(0, 2, { revision: tag1, localId: brand(0) }); const expected: TestChangeset = [ { type: "Revive", content: fakeRepair(tag1, 0, 2), count: 2, - detachEvent: { revision: tag1, localId: brand(0) }, + cellId: { revision: tag1, localId: brand(0) }, inverseOf: tag1, }, { @@ -731,15 +735,16 @@ describe("SequenceField - Compose", () => { it("delete ○ revive (different earlier nodes)", () => { const deletion = tagChange(Change.delete(0, 2), tag1); const lineage: SF.LineageEvent[] = [{ revision: tag1, id: brand(0), count: 2, offset: 0 }]; - const revive = makeAnonChange(Change.revive(0, 2, tag2, brand(0), undefined, lineage)); + const revive = makeAnonChange( + Change.revive(0, 2, { revision: tag2, localId: brand(0), lineage }), + ); const expected: SF.Changeset = [ { type: "Revive", content: fakeRepair(tag2, 0, 2), count: 2, - detachEvent: { revision: tag2, localId: brand(0) }, + cellId: { revision: tag2, localId: brand(0), lineage }, inverseOf: tag2, - lineage, }, { type: "Delete", id: brand(0), count: 2, revision: tag1 }, ]; @@ -750,16 +755,17 @@ describe("SequenceField - Compose", () => { it("delete ○ revive (different in-between nodes)", () => { const deletion = tagChange(Change.delete(0, 2), tag1); const lineage: SF.LineageEvent[] = [{ revision: tag1, id: brand(0), count: 2, offset: 1 }]; - const revive = makeAnonChange(Change.revive(0, 2, tag2, brand(0), undefined, lineage)); + const revive = makeAnonChange( + Change.revive(0, 2, { revision: tag2, localId: brand(0), lineage }), + ); const expected: SF.Changeset = [ { type: "Delete", id: brand(0), count: 1, revision: tag1 }, { type: "Revive", content: fakeRepair(tag2, 0, 2), count: 2, - detachEvent: { revision: tag2, localId: brand(0) }, + cellId: { revision: tag2, localId: brand(0), lineage }, inverseOf: tag2, - lineage, }, { type: "Delete", id: brand(1), count: 1, revision: tag1 }, ]; @@ -770,16 +776,17 @@ describe("SequenceField - Compose", () => { it("delete ○ revive (different later nodes)", () => { const deletion = tagChange(Change.delete(0, 2), tag1); const lineage: SF.LineageEvent[] = [{ revision: tag1, id: brand(0), count: 2, offset: 2 }]; - const revive = makeAnonChange(Change.revive(0, 2, tag2, brand(0), undefined, lineage)); + const revive = makeAnonChange( + Change.revive(0, 2, { revision: tag2, localId: brand(0), lineage }), + ); const expected: SF.Changeset = [ { type: "Delete", id: brand(0), count: 2, revision: tag1 }, { type: "Revive", content: fakeRepair(tag2, 0, 2), count: 2, - detachEvent: { revision: tag2, localId: brand(0) }, + cellId: { revision: tag2, localId: brand(0), lineage }, inverseOf: tag2, - lineage, }, ]; const actual = shallowCompose([deletion, revive]); @@ -791,9 +798,11 @@ describe("SequenceField - Compose", () => { const delete2 = Change.delete(0, 2); // The revive needs lineage to describe the precise gap in which it is reviving the nodes. // Such lineage would normally be acquired by rebasing the revive over the second delete. - const revive = Change.revive(0, 1, tag1, brand(1), undefined, [ - { revision: tag2, id: brand(0), count: 2, offset: 1 }, - ]); + const revive = Change.revive(0, 1, { + revision: tag1, + localId: brand(1), + lineage: [{ revision: tag2, id: brand(0), count: 2, offset: 1 }], + }); const expected: SF.Changeset = [ { type: "Delete", id: brand(0), count: 1, revision: tag2 }, { type: "Delete", id: brand(0), count: 1, revision: tag1 }, @@ -812,7 +821,7 @@ describe("SequenceField - Compose", () => { it("delete1 ○ delete2 ○ revive (delete2)", () => { const delete1 = Change.delete(1, 3); const delete2 = Change.delete(0, 2); - const revive = Change.revive(0, 2, tag2, brand(0)); + const revive = Change.revive(0, 2, { revision: tag2, localId: brand(0) }); const expected: SF.Changeset = [ { count: 1 }, { type: "Delete", id: brand(0), count: 3, revision: tag1 }, @@ -827,23 +836,22 @@ describe("SequenceField - Compose", () => { it("reviveAA ○ reviveB => BAA", () => { const lineage: SF.LineageEvent[] = [{ revision: tag2, id: brand(0), count: 1, offset: 1 }]; - const reviveAA = Change.revive(0, 2, tag1, brand(1), undefined, lineage); - const reviveB = Change.revive(0, 1, tag2, brand(0)); + const reviveAA = Change.revive(0, 2, { revision: tag1, localId: brand(1), lineage }); + const reviveB = Change.revive(0, 1, { revision: tag2, localId: brand(0) }); const expected: SF.Changeset = [ { type: "Revive", content: fakeRepair(tag2, 0, 1), count: 1, - detachEvent: { revision: tag2, localId: brand(0) }, + cellId: { revision: tag2, localId: brand(0) }, inverseOf: tag2, }, { type: "Revive", content: fakeRepair(tag1, 1, 2), count: 2, - detachEvent: { revision: tag1, localId: brand(1) }, + cellId: { revision: tag1, localId: brand(1), lineage }, inverseOf: tag1, - lineage, }, ]; const actual = shallowCompose([makeAnonChange(reviveAA), makeAnonChange(reviveB)]); @@ -852,30 +860,29 @@ describe("SequenceField - Compose", () => { it("reviveA ○ reviveBB => BAB", () => { const lineage: SF.LineageEvent[] = [{ revision: tag2, id: brand(0), count: 2, offset: 1 }]; - const reviveA = Change.revive(0, 1, tag1, brand(1), undefined, lineage); - const reviveB1 = Change.revive(0, 1, tag2, brand(0)); - const reviveB2 = Change.revive(2, 1, tag2, brand(1)); + const reviveA = Change.revive(0, 1, { revision: tag1, localId: brand(1), lineage }); + const reviveB1 = Change.revive(0, 1, { revision: tag2, localId: brand(0) }); + const reviveB2 = Change.revive(2, 1, { revision: tag2, localId: brand(1) }); const expected: SF.Changeset = [ { type: "Revive", content: fakeRepair(tag2, 0, 1), count: 1, - detachEvent: { revision: tag2, localId: brand(0) }, + cellId: { revision: tag2, localId: brand(0) }, inverseOf: tag2, }, { type: "Revive", content: fakeRepair(tag1, 1, 1), count: 1, - detachEvent: { revision: tag1, localId: brand(1) }, + cellId: { revision: tag1, localId: brand(1), lineage }, inverseOf: tag1, - lineage, }, { type: "Revive", content: fakeRepair(tag2, 1, 1), count: 1, - detachEvent: { revision: tag2, localId: brand(1) }, + cellId: { revision: tag2, localId: brand(1) }, inverseOf: tag2, }, ]; @@ -889,22 +896,21 @@ describe("SequenceField - Compose", () => { it("reviveAA ○ reviveB => AAB", () => { const lineage: SF.LineageEvent[] = [{ revision: tag2, id: brand(0), count: 1, offset: 0 }]; - const reviveA = Change.revive(0, 2, tag1, brand(0), undefined, lineage); - const reviveB = Change.revive(2, 1, tag2, brand(0)); + const reviveA = Change.revive(0, 2, { revision: tag1, localId: brand(0), lineage }); + const reviveB = Change.revive(2, 1, { revision: tag2, localId: brand(0) }); const expected: SF.Changeset = [ { type: "Revive", content: fakeRepair(tag1, 0, 2), count: 2, - detachEvent: { revision: tag1, localId: brand(0) }, + cellId: { revision: tag1, localId: brand(0), lineage }, inverseOf: tag1, - lineage, }, { type: "Revive", content: fakeRepair(tag2, 0, 1), count: 1, - detachEvent: { revision: tag2, localId: brand(0) }, + cellId: { revision: tag2, localId: brand(0) }, inverseOf: tag2, }, ]; @@ -913,14 +919,14 @@ describe("SequenceField - Compose", () => { }); it("revive ○ redundant revive", () => { - const reviveA = Change.revive(0, 2, tag1, brand(0)); - const reviveB = Change.redundantRevive(0, 2, tag1, brand(0)); + const reviveA = Change.revive(0, 2, { revision: tag1, localId: brand(0) }); + const reviveB = Change.redundantRevive(0, 2, { revision: tag1, localId: brand(0) }); const expected: SF.Changeset = [ { type: "Revive", content: fakeRepair(tag1, 0, 2), count: 2, - detachEvent: { revision: tag1, localId: brand(0) }, + cellId: { revision: tag1, localId: brand(0) }, inverseOf: tag1, revision: tag2, }, @@ -949,7 +955,7 @@ describe("SequenceField - Compose", () => { revision: tag3, content: fakeRepair(tag1, 0, 1), count: 1, - detachEvent: { revision: tag1, localId: brand(0) }, + cellId: { revision: tag1, localId: brand(0) }, }, { count: 4 }, { @@ -957,7 +963,7 @@ describe("SequenceField - Compose", () => { revision: tag4, content: fakeRepair(tag1, 0, 1), count: 1, - detachEvent: { revision: tag1, localId: brand(0) }, + cellId: { revision: tag1, localId: brand(0) }, }, ]; const actual = shallowCompose([makeAnonChange(insert), makeAnonChange(revive)], revInfos); @@ -967,7 +973,7 @@ describe("SequenceField - Compose", () => { revision: tag3, count: 1, content: fakeRepair(tag1, 0, 1), - detachEvent: { revision: tag1, localId: brand(0) }, + cellId: { revision: tag1, localId: brand(0) }, }, { type: "Insert", revision: tag1, content: [{ type, value: 1 }], id: brand(1) }, { count: 2 }, @@ -977,7 +983,7 @@ describe("SequenceField - Compose", () => { revision: tag4, content: fakeRepair(tag1, 0, 1), count: 1, - detachEvent: { revision: tag1, localId: brand(0) }, + cellId: { revision: tag1, localId: brand(0) }, }, { type: "Insert", revision: tag2, content: [{ type, value: 3 }], id: brand(3) }, ]; @@ -1006,8 +1012,14 @@ describe("SequenceField - Compose", () => { }); it("return ○ return", () => { - const return1 = tagChange(Change.return(0, 1, 3, tag2, brand(0)), tag3); - const return2 = tagChange(Change.return(3, 1, 0, tag3, brand(0)), tag4); + const return1 = tagChange( + Change.return(0, 1, 3, { revision: tag2, localId: brand(0) }), + tag3, + ); + const return2 = tagChange( + Change.return(3, 1, 0, { revision: tag3, localId: brand(0) }), + tag4, + ); const actual = shallowCompose([return1, return2]); assert.deepEqual(actual, []); }); @@ -1058,17 +1070,17 @@ describe("SequenceField - Compose", () => { // Revision 4 modifies B const nodeChange1 = "Change1"; const nodeChange2 = "Change2"; - const detach1: ChangeAtomId = { revision: tag1, localId: brand(0) }; - const detach2: ChangeAtomId = { revision: tag2, localId: brand(0) }; - const lineage: SF.LineageEvent[] = [{ revision: tag2, id: brand(0), count: 1, offset: 0 }]; - const modify1 = Change.modifyDetached(0, nodeChange1, detach1, lineage); + const detach1: SF.CellId = { revision: tag1, localId: brand(0), lineage }; + const detach2: SF.CellId = { revision: tag2, localId: brand(0) }; + + const modify1 = Change.modifyDetached(0, nodeChange1, detach1); const modify2 = Change.modifyDetached(0, nodeChange2, detach2); const actual = shallowCompose([tagChange(modify1, tag3), tagChange(modify2, tag4)]); const expected: SF.Changeset = [ - { type: "Modify", changes: nodeChange1, detachEvent: detach1, lineage }, - { type: "Modify", changes: nodeChange2, detachEvent: detach2 }, + { type: "Modify", changes: nodeChange1, cellId: detach1 }, + { type: "Modify", changes: nodeChange2, cellId: detach2 }, ]; assert.deepEqual(actual, expected); @@ -1082,17 +1094,17 @@ describe("SequenceField - Compose", () => { // Revision 4 modifies A const nodeChange1 = "Change1"; const nodeChange2 = "Change2"; - const detach1: ChangeAtomId = { revision: tag1, localId: brand(1) }; - const detach2: ChangeAtomId = { revision: tag2, localId: brand(0) }; - const lineage: SF.LineageEvent[] = [{ revision: tag2, id: brand(0), count: 1, offset: 1 }]; - const modify1 = Change.modifyDetached(0, nodeChange1, detach1, lineage); + const detach1: SF.CellId = { revision: tag1, localId: brand(1), lineage }; + const detach2: SF.CellId = { revision: tag2, localId: brand(0) }; + + const modify1 = Change.modifyDetached(0, nodeChange1, detach1); const modify2 = Change.modifyDetached(0, nodeChange2, detach2); const actual = shallowCompose([tagChange(modify1, tag3), tagChange(modify2, tag4)]); const expected: SF.Changeset = [ - { type: "Modify", changes: nodeChange2, detachEvent: detach2 }, - { type: "Modify", changes: nodeChange1, detachEvent: detach1, lineage }, + { type: "Modify", changes: nodeChange2, cellId: detach2 }, + { type: "Modify", changes: nodeChange1, cellId: detach1 }, ]; assert.deepEqual(actual, expected); @@ -1106,17 +1118,17 @@ describe("SequenceField - Compose", () => { // Revision 4 modifies A const nodeChange1 = "Change1"; const nodeChange2 = "Change2"; - const detach1: ChangeAtomId = { revision: tag1, localId: brand(0) }; - const detach2: ChangeAtomId = { revision: tag2, localId: brand(0) }; - const lineage: SF.LineageEvent[] = [{ revision: tag2, id: brand(0), count: 1, offset: 0 }]; + const detach1: SF.CellId = { revision: tag1, localId: brand(0), lineage }; + const detach2: SF.CellId = { revision: tag2, localId: brand(0) }; + const modify1 = Change.modifyDetached(0, nodeChange1, detach2); - const modify2 = Change.modifyDetached(0, nodeChange2, detach1, lineage); + const modify2 = Change.modifyDetached(0, nodeChange2, detach1); const actual = shallowCompose([tagChange(modify1, tag3), tagChange(modify2, tag4)]); const expected: SF.Changeset = [ - { type: "Modify", changes: nodeChange2, detachEvent: detach1, lineage }, - { type: "Modify", changes: nodeChange1, detachEvent: detach2 }, + { type: "Modify", changes: nodeChange2, cellId: detach1 }, + { type: "Modify", changes: nodeChange1, cellId: detach2 }, ]; assert.deepEqual(actual, expected); @@ -1130,17 +1142,18 @@ describe("SequenceField - Compose", () => { // Revision 4 modifies B const nodeChange1 = "Change1"; const nodeChange2 = "Change2"; - const detach1: ChangeAtomId = { revision: tag1, localId: brand(1) }; - const detach2: ChangeAtomId = { revision: tag2, localId: brand(0) }; const lineage: SF.LineageEvent[] = [{ revision: tag2, id: brand(0), count: 1, offset: 1 }]; + const detach1: SF.CellId = { revision: tag1, localId: brand(1), lineage }; + const detach2: SF.CellId = { revision: tag2, localId: brand(0) }; + const modify1 = Change.modifyDetached(0, nodeChange1, detach2); - const modify2 = Change.modifyDetached(0, nodeChange2, detach1, lineage); + const modify2 = Change.modifyDetached(0, nodeChange2, detach1); const actual = shallowCompose([tagChange(modify1, tag3), tagChange(modify2, tag4)]); const expected: SF.Changeset = [ - { type: "Modify", changes: nodeChange1, detachEvent: detach2 }, - { type: "Modify", changes: nodeChange2, detachEvent: detach1, lineage }, + { type: "Modify", changes: nodeChange1, cellId: detach2 }, + { type: "Modify", changes: nodeChange2, cellId: detach1 }, ]; assert.deepEqual(actual, expected); @@ -1148,8 +1161,22 @@ describe("SequenceField - Compose", () => { it("adjacent blocked revives", () => { const lineage: SF.LineageEvent[] = [{ revision: tag2, id: brand(0), count: 1, offset: 1 }]; - const revive1 = Change.blockedRevive(0, 5, tag1, tag2, brand(0)); - const revive2 = Change.blockedRevive(0, 4, tag3, tag4, brand(0), undefined, lineage); + const revive1 = Change.blockedRevive( + 0, + 5, + { revision: tag1, localId: brand(0) }, + { revision: tag2, localId: brand(0) }, + ); + const revive2 = Change.blockedRevive( + 0, + 4, + { revision: tag3, localId: brand(0) }, + { + revision: tag4, + localId: brand(0), + lineage, + }, + ); const actual = shallowCompose([tagChange(revive1, tag5), tagChange(revive2, tag6)]); const expected: SF.Changeset = [ @@ -1158,7 +1185,7 @@ describe("SequenceField - Compose", () => { revision: tag5, count: 5, content: fakeRepair(tag1, 0, 5), - detachEvent: { revision: tag2, localId: brand(0) }, + cellId: { revision: tag2, localId: brand(0) }, inverseOf: tag1, }, { @@ -1166,9 +1193,8 @@ describe("SequenceField - Compose", () => { revision: tag6, count: 4, content: fakeRepair(tag3, 0, 4), - detachEvent: { revision: tag4, localId: brand(0) }, + cellId: { revision: tag4, localId: brand(0), lineage }, inverseOf: tag3, - lineage, }, ]; diff --git a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/invert.spec.ts b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/invert.spec.ts index c2a08998f5e3..592e04bde0f6 100644 --- a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/invert.spec.ts +++ b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/invert.spec.ts @@ -73,7 +73,7 @@ describe("SequenceField - Invert", () => { it("delete => revive", () => { const input = composeAnonChanges([Change.modify(0, childChange1), Change.delete(0, 2)]); const expected = composeAnonChanges([ - Change.revive(0, 2, tag1, brand(0)), + Change.revive(0, 2, { revision: tag1, localId: brand(0) }), Change.modify(0, inverseChildChange1), ]); const actual = invert(input); @@ -81,7 +81,7 @@ describe("SequenceField - Invert", () => { }); it("revert-only active revive => delete", () => { - const revive = Change.revive(0, 2, tag1, brand(0)); + const revive = Change.revive(0, 2, { revision: tag1, localId: brand(0) }); const modify = Change.modify(0, childChange1); const input = composeAnonChanges([revive, modify]); const expected = composeAnonChanges([ @@ -93,7 +93,7 @@ describe("SequenceField - Invert", () => { }); it("intentional active revive => delete", () => { - const input = Change.intentionalRevive(0, 2, tag1, brand(0)); + const input = Change.intentionalRevive(0, 2, { revision: tag1, localId: brand(0) }); const expected = Change.delete(0, 2); const actual = invert(input); assert.deepEqual(actual, expected); @@ -103,7 +103,7 @@ describe("SequenceField - Invert", () => { const input = composeAnonChanges([Change.modify(0, childChange1), Change.move(0, 2, 3)]); const expected = composeAnonChanges([ Change.modify(3, inverseChildChange1), - Change.return(3, 2, 0, tag1, brand(0)), + Change.return(3, 2, 0, { revision: tag1, localId: brand(0) }), ]); const actual = invert(input); assert.deepEqual(actual, expected); @@ -113,7 +113,7 @@ describe("SequenceField - Invert", () => { const input = composeAnonChanges([Change.modify(3, childChange1), Change.move(2, 2, 0)]); const expected = composeAnonChanges([ Change.modify(1, inverseChildChange1), - Change.return(0, 2, 2, tag1, brand(0)), + Change.return(0, 2, 2, { revision: tag1, localId: brand(0) }), ]); const actual = invert(input); assert.deepEqual(actual, expected); @@ -122,11 +122,11 @@ describe("SequenceField - Invert", () => { it("return => return", () => { const input = composeAnonChanges([ Change.modify(0, childChange1), - Change.return(0, 2, 3, tag1, brand(0)), + Change.return(0, 2, 3, { revision: tag1, localId: brand(0) }), ]); const expected = composeAnonChanges([ Change.modify(3, inverseChildChange1), - Change.return(3, 2, 0, tag1, brand(0)), + Change.return(3, 2, 0, { revision: tag1, localId: brand(0) }), ]); const actual = invert(input); assert.deepEqual(actual, expected); @@ -141,7 +141,7 @@ describe("SequenceField - Invert", () => { id: brand(0), count: 1, changes: childChange1, - detachEvent, + cellId: detachEvent, }, ]; @@ -159,7 +159,7 @@ describe("SequenceField - Invert", () => { count: 1, id: brand(0), changes: childChange1, - detachEvent, + cellId: detachEvent, }, { type: "MoveIn", @@ -205,7 +205,12 @@ describe("SequenceField - Invert", () => { it("revert-only blocked revive => no-op", () => { const input = composeAnonChanges([ Change.modify(0, childChange1), - Change.blockedRevive(1, 2, tag1, tag2, brand(0)), + Change.blockedRevive( + 1, + 2, + { revision: tag1, localId: brand(0) }, + { revision: tag2, localId: brand(0) }, + ), Change.modify(1, childChange2), ]); const expected = composeAnonChanges([ @@ -219,7 +224,13 @@ describe("SequenceField - Invert", () => { it("intentional redundant revive => skip", () => { const input = composeAnonChanges([ Change.modify(0, childChange1), - Change.redundantRevive(1, 1, tag1, brand(0), undefined, true), + Change.redundantRevive( + 1, + 1, + { revision: tag1, localId: brand(0) }, + undefined, + true, + ), Change.modify(2, childChange2), ]); const expected = composeAnonChanges([ @@ -243,7 +254,7 @@ describe("SequenceField - Invert", () => { type: "ReturnTo", count: 1, id: brand(0), - detachEvent: { revision: tag2, localId: brand(0) }, + cellId: { revision: tag2, localId: brand(0) }, inverseOf: tag1, }, { @@ -265,7 +276,7 @@ describe("SequenceField - Invert", () => { type: "MoveOut", count: 1, id: brand(0), - detachEvent: { revision: tag2, localId: brand(0) }, + cellId: { revision: tag2, localId: brand(0) }, }, { type: "Modify", @@ -296,7 +307,7 @@ describe("SequenceField - Invert", () => { type: "ReturnFrom", count: 1, id: brand(0), - detachEvent: { revision: tag2, localId: brand(0) }, + cellId: { revision: tag2, localId: brand(0) }, }, { type: "Modify", @@ -306,7 +317,7 @@ describe("SequenceField - Invert", () => { type: "ReturnTo", count: 1, id: brand(0), - detachEvent: { revision: tag1, localId: brand(0) }, + cellId: { revision: tag1, localId: brand(0) }, isSrcConflicted: true, }, { @@ -328,7 +339,7 @@ describe("SequenceField - Invert", () => { type: "ReturnFrom", count: 1, id: brand(0), - detachEvent: { revision: tag2, localId: brand(0) }, + cellId: { revision: tag2, localId: brand(0) }, isDstConflicted: true, }, { diff --git a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/markListFactory.spec.ts b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/markListFactory.spec.ts index 0468e9a93a01..4a1b5540b97c 100644 --- a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/markListFactory.spec.ts +++ b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/markListFactory.spec.ts @@ -119,13 +119,13 @@ describe("SequenceField - MarkListFactory", () => { const factory = new SF.MarkListFactory(); const revive1: SF.Reattach = { type: "Revive", - detachEvent: { revision: detachedBy, localId: brand(0) }, + cellId: { revision: detachedBy, localId: brand(0) }, content: fakeRepair(detachedBy, 0, 1), count: 1, }; const revive2: SF.Reattach = { type: "Revive", - detachEvent: { revision: detachedBy, localId: brand(1) }, + cellId: { revision: detachedBy, localId: brand(1) }, content: fakeRepair(detachedBy, 1, 1), count: 1, }; @@ -133,7 +133,7 @@ describe("SequenceField - MarkListFactory", () => { factory.pushContent(revive2); const expected: SF.Reattach = { type: "Revive", - detachEvent: { revision: detachedBy, localId: brand(0) }, + cellId: { revision: detachedBy, localId: brand(0) }, content: fakeRepair(detachedBy, 0, 2), count: 2, }; @@ -144,13 +144,13 @@ describe("SequenceField - MarkListFactory", () => { const factory = new SF.MarkListFactory(); const revive1: SF.Reattach = { type: "Revive", - detachEvent: { revision: detachedBy, localId: brand(0) }, + cellId: { revision: detachedBy, localId: brand(0) }, content: fakeRepair(detachedBy, 0, 1), count: 1, }; const revive2: SF.Reattach = { type: "Revive", - detachEvent: { revision: detachedBy, localId: brand(2) }, + cellId: { revision: detachedBy, localId: brand(2) }, content: fakeRepair(detachedBy, 2, 1), count: 1, }; diff --git a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/rebase.spec.ts b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/rebase.spec.ts index c5d094ac6383..f2caebb4ba03 100644 --- a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/rebase.spec.ts +++ b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/rebase.spec.ts @@ -67,9 +67,9 @@ describe("SequenceField - Rebase", () => { it("revive ↷ modify", () => { const revive = composeAnonChanges([ - Change.revive(0, 2, tag1, brand(0), rebaseRepair), - Change.revive(4, 2, tag1, brand(2), rebaseRepair), - Change.revive(10, 2, tag1, brand(4), rebaseRepair), + Change.revive(0, 2, { revision: tag1, localId: brand(0) }, rebaseRepair), + Change.revive(4, 2, { revision: tag1, localId: brand(2) }, rebaseRepair), + Change.revive(10, 2, { revision: tag1, localId: brand(4) }, rebaseRepair), ]); const mods = composeAnonChanges([ Change.modify(0, TestChange.mint([0], 1)), @@ -119,43 +119,60 @@ describe("SequenceField - Rebase", () => { it("revive ↷ delete", () => { const revive = composeAnonChanges([ - Change.revive(0, 1, tag1, brand(0), rebaseRepair), - Change.revive(3, 1, tag1, brand(1), rebaseRepair), - Change.revive(8, 1, tag1, brand(2), rebaseRepair), + Change.revive(0, 1, { revision: tag1, localId: brand(0) }, rebaseRepair), + Change.revive(3, 1, { revision: tag1, localId: brand(1) }, rebaseRepair), + Change.revive(8, 1, { revision: tag1, localId: brand(2) }, rebaseRepair), ]); const deletion = Change.delete(1, 3); const actual = rebase(revive, deletion, tag2); const expected = composeAnonChanges([ // Rebase does not affect the stored repair data - Change.revive(0, 1, tag1, brand(0), rebaseRepair), - Change.revive(2, 1, tag1, brand(1), rebaseRepair, [ - { revision: tag2, id: brand(0), count: 3, offset: 1 }, - ]), - Change.revive(5, 1, tag1, brand(2), rebaseRepair), + Change.revive(0, 1, { revision: tag1, localId: brand(0) }, rebaseRepair), + Change.revive( + 2, + 1, + { + revision: tag1, + localId: brand(1), + lineage: [{ revision: tag2, id: brand(0), count: 3, offset: 1 }], + }, + rebaseRepair, + ), + Change.revive(5, 1, { revision: tag1, localId: brand(2) }, rebaseRepair), ]); assert.deepEqual(actual, expected); }); it("redundant revive ↷ related delete", () => { - const revive = Change.redundantRevive(0, 3, tag1, brand(1), rebaseRepair); + const revive = Change.redundantRevive( + 0, + 3, + { revision: tag1, localId: brand(1) }, + rebaseRepair, + ); const deletion = Change.delete(1, 1); const actual = rebase(revive, deletion, tag2); const expected = composeAnonChanges([ // Earlier revive is unaffected - Change.redundantRevive(0, 1, tag1, brand(1), rebaseRepair), + Change.redundantRevive(0, 1, { revision: tag1, localId: brand(1) }, rebaseRepair), // Overlapping revive is no longer redundant - Change.revive(1, 1, tag1, brand(1), rebaseRepair, undefined, { + Change.revive(1, 1, { revision: tag1, localId: brand(1) }, rebaseRepair, { revision: tag2, localId: brand(0), }), // Later revive is unaffected - Change.redundantRevive(1, 1, tag1, brand(3), rebaseRepair), + Change.redundantRevive(1, 1, { revision: tag1, localId: brand(3) }, rebaseRepair), ]); assert.deepEqual(actual, expected); }); it("redundant revive ↷ unrelated delete", () => { - const revive = Change.redundantRevive(0, 3, tag1, brand(1), fakeRepair); + const revive = Change.redundantRevive( + 0, + 3, + { revision: tag1, localId: brand(1) }, + fakeRepair, + ); const deletion = Change.delete(1, 1); const actual = rebase(revive, deletion, tag3); const expected: SF.Changeset = [ @@ -170,7 +187,7 @@ describe("SequenceField - Rebase", () => { content: fakeRepair(tag1, 2, 1), count: 1, inverseOf: tag1, - detachEvent: { revision: tag3, localId: brand(0) }, + cellId: { revision: tag3, localId: brand(0) }, }, { type: "Revive", @@ -183,62 +200,80 @@ describe("SequenceField - Rebase", () => { }); it("blocked revive ↷ revive", () => { - const revive1 = Change.blockedRevive(0, 3, tag1, tag2, brand(1), fakeRepair); - const revive2 = Change.revive(0, 1, tag2, brand(2), fakeRepair); + const revive1 = Change.blockedRevive( + 0, + 3, + { revision: tag1, localId: brand(0) }, + { revision: tag2, localId: brand(1) }, + fakeRepair, + ); + const revive2 = Change.revive(0, 1, { revision: tag2, localId: brand(2) }, fakeRepair); const actual = rebase(revive1, revive2, tag2); const expected: SF.Changeset = [ { type: "Revive", - content: fakeRepair(tag1, 1, 1), + content: fakeRepair(tag1, 0, 1), count: 1, inverseOf: tag1, - detachEvent: { revision: tag2, localId: brand(1) }, + cellId: { revision: tag2, localId: brand(1) }, }, { type: "Revive", - content: fakeRepair(tag1, 2, 1), + content: fakeRepair(tag1, 1, 1), count: 1, inverseOf: tag1, }, { type: "Revive", - content: fakeRepair(tag1, 3, 1), + content: fakeRepair(tag1, 2, 1), count: 1, inverseOf: tag1, - detachEvent: { revision: tag2, localId: brand(3) }, + cellId: { revision: tag2, localId: brand(3) }, }, ]; assert.deepEqual(actual, expected); }); it("redundant intentional revive ↷ related delete", () => { - const revive = Change.redundantRevive(0, 3, tag1, brand(1), rebaseRepair, true); + const revive = Change.redundantRevive( + 0, + 3, + { revision: tag1, localId: brand(1) }, + rebaseRepair, + true, + ); const deletion = Change.delete(1, 1); const actual = rebase(revive, deletion, tag2); const expected = composeAnonChanges([ // Earlier revive is unaffected - Change.redundantRevive(0, 1, tag1, brand(1), rebaseRepair, true), + Change.redundantRevive(0, 1, { revision: tag1, localId: brand(1) }, rebaseRepair, true), // Overlapping revive is no longer conflicted. // It now references the target node to revive using the latest delete. - Change.intentionalRevive(1, 1, tag2, brand(0), rebaseRepair), + Change.intentionalRevive(1, 1, { revision: tag2, localId: brand(0) }, rebaseRepair), // Later revive is unaffected - Change.redundantRevive(2, 1, tag1, brand(3), rebaseRepair, true), + Change.redundantRevive(2, 1, { revision: tag1, localId: brand(3) }, rebaseRepair, true), ]); assert.deepEqual(actual, expected); }); it("redundant intentional revive ↷ unrelated delete", () => { - const revive = Change.redundantRevive(0, 3, tag1, brand(1), rebaseRepair, true); + const revive = Change.redundantRevive( + 0, + 3, + { revision: tag1, localId: brand(1) }, + rebaseRepair, + true, + ); const deletion = Change.delete(1, 1); const actual = rebase(revive, deletion, tag3); const expected = composeAnonChanges([ // Earlier revive is unaffected - Change.redundantRevive(0, 1, tag1, brand(1), rebaseRepair, true), + Change.redundantRevive(0, 1, { revision: tag1, localId: brand(1) }, rebaseRepair, true), // Overlapping revive is no longer conflicted. // It now references the target node to revive using the latest delete. - Change.intentionalRevive(1, 1, tag3, brand(0), rebaseRepair), + Change.intentionalRevive(1, 1, { revision: tag3, localId: brand(0) }, rebaseRepair), // Later revive gets linage - Change.redundantRevive(2, 1, tag1, brand(3), rebaseRepair, true), + Change.redundantRevive(2, 1, { revision: tag1, localId: brand(3) }, rebaseRepair, true), ]); assert.deepEqual(actual, expected); }); @@ -260,21 +295,21 @@ describe("SequenceField - Rebase", () => { type: "Delete", id: brand(0), count: 1, - detachEvent: { revision: tag1, localId: brand(1) }, + cellId: { revision: tag1, localId: brand(1) }, }, { type: "Delete", id: brand(1), count: 1 }, { type: "Delete", id: brand(2), count: 1, - detachEvent: { revision: tag1, localId: brand(2) }, + cellId: { revision: tag1, localId: brand(2) }, }, { type: "Delete", id: brand(3), count: 1 }, { type: "Delete", id: brand(4), count: 1, - detachEvent: { revision: tag1, localId: brand(3) }, + cellId: { revision: tag1, localId: brand(3) }, }, ]; checkDeltaEquality(actual, expected); @@ -323,23 +358,29 @@ describe("SequenceField - Rebase", () => { type: "MoveOut", count: 1, id: brand(0), - detachEvent: { revision: tag1, localId: brand(1) }, - lineage: [{ revision: tag1, id: brand(0), count: 1, offset: 1 }], + cellId: { + revision: tag1, + localId: brand(1), + lineage: [{ revision: tag1, id: brand(0), count: 1, offset: 1 }], + }, }, { type: "MoveOut", count: 1, id: brand(1) }, { type: "MoveOut", count: 1, id: brand(2), - detachEvent: { revision: tag1, localId: brand(2) }, + cellId: { revision: tag1, localId: brand(2) }, }, { type: "MoveOut", count: 1, id: brand(3) }, { type: "MoveOut", count: 1, id: brand(4), - detachEvent: { revision: tag1, localId: brand(3) }, - lineage: [{ revision: tag1, id: brand(4), count: 1, offset: 0 }], + cellId: { + revision: tag1, + localId: brand(3), + lineage: [{ revision: tag1, id: brand(4), count: 1, offset: 0 }], + }, }, ]; assert.deepEqual(actual, expected); @@ -393,28 +434,33 @@ describe("SequenceField - Rebase", () => { it("revive ↷ insert", () => { const revive = composeAnonChanges([ - Change.revive(0, 1, tag1, brand(0), rebaseRepair), - Change.revive(3, 2, tag1, brand(1), rebaseRepair), - Change.revive(7, 1, tag1, brand(3), rebaseRepair), + Change.revive(0, 1, { revision: tag1, localId: brand(0) }, rebaseRepair), + Change.revive(3, 2, { revision: tag1, localId: brand(1) }, rebaseRepair), + Change.revive(7, 1, { revision: tag1, localId: brand(3) }, rebaseRepair), ]); // TODO: test both tiebreak policies const insert = Change.insert(2, 1); const actual = rebase(revive, insert); const expected = composeAnonChanges([ - Change.revive(0, 1, tag1, brand(0), rebaseRepair), - Change.revive(4, 2, tag1, brand(1), rebaseRepair), - Change.revive(8, 1, tag1, brand(3), rebaseRepair), + Change.revive(0, 1, { revision: tag1, localId: brand(0) }, rebaseRepair), + Change.revive(4, 2, { revision: tag1, localId: brand(1) }, rebaseRepair), + Change.revive(8, 1, { revision: tag1, localId: brand(3) }, rebaseRepair), ]); assert.deepEqual(actual, expected); }); it("redundant revive ↷ insert", () => { - const revive = Change.redundantRevive(0, 3, tag1, brand(0), rebaseRepair); + const revive = Change.redundantRevive( + 0, + 3, + { revision: tag1, localId: brand(0) }, + rebaseRepair, + ); const insert = Change.insert(1, 1); const actual = rebase(revive, insert); const expected = composeAnonChanges([ - Change.redundantRevive(0, 1, tag1, brand(0), rebaseRepair), - Change.redundantRevive(2, 2, tag1, brand(1), rebaseRepair), + Change.redundantRevive(0, 1, { revision: tag1, localId: brand(0) }, rebaseRepair), + Change.redundantRevive(2, 2, { revision: tag1, localId: brand(1) }, rebaseRepair), ]); assert.deepEqual(actual, expected); }); @@ -424,7 +470,7 @@ describe("SequenceField - Rebase", () => { Change.modify(0, TestChange.mint([0], 1)), Change.modify(3, TestChange.mint([0], 2)), ]); - const revive = Change.revive(2, 1, tag1, brand(0), rebaseRepair); + const revive = Change.revive(2, 1, { revision: tag1, localId: brand(0) }, rebaseRepair); const expected = composeAnonChanges([ // Modify at earlier index is unaffected Change.modify(0, TestChange.mint([0], 1)), @@ -443,7 +489,7 @@ describe("SequenceField - Rebase", () => { Change.delete(2, 1, brand(3)), ]); // Revives content between C and D - const revive = Change.revive(3, 1, tag1, brand(0), rebaseRepair); + const revive = Change.revive(3, 1, { revision: tag1, localId: brand(0) }, rebaseRepair); const expected = composeAnonChanges([ // Delete with earlier index is unaffected Change.delete(0, 1, brand(0)), @@ -459,7 +505,7 @@ describe("SequenceField - Rebase", () => { it("insert ↷ revive", () => { const insert = composeAnonChanges([Change.insert(0, 1, 1), Change.insert(3, 1, 2)]); - const revive = Change.revive(1, 1, tag1, brand(0), rebaseRepair); + const revive = Change.revive(1, 1, { revision: tag1, localId: brand(0) }, rebaseRepair); const actual = rebase(insert, revive); const expected = composeAnonChanges([Change.insert(0, 1, 1), Change.insert(4, 1, 2)]); assert.deepEqual(actual, expected); @@ -467,47 +513,91 @@ describe("SequenceField - Rebase", () => { it("reviveAA ↷ reviveB => BAA", () => { const lineage: SF.LineageEvent[] = [{ revision: tag2, id: brand(0), count: 1, offset: 1 }]; - const reviveAA = Change.revive(0, 2, tag1, brand(0), rebaseRepair, lineage); - const reviveB = Change.revive(0, 1, tag2, brand(0), rebaseRepair); - const expected = Change.revive(1, 2, tag1, brand(0), rebaseRepair, lineage); + const reviveAA = Change.revive( + 0, + 2, + { revision: tag1, localId: brand(0), lineage }, + rebaseRepair, + ); + const reviveB = Change.revive(0, 1, { revision: tag2, localId: brand(0) }, rebaseRepair); + const expected = Change.revive( + 1, + 2, + { revision: tag1, localId: brand(0), lineage }, + rebaseRepair, + ); const actual = rebase(reviveAA, reviveB); assert.deepEqual(actual, expected); }); it("reviveAA ↷ reviveB => AAB", () => { const lineage: SF.LineageEvent[] = [{ revision: tag2, id: brand(0), count: 1, offset: 0 }]; - const reviveAA = Change.revive(0, 2, tag1, brand(0), rebaseRepair, lineage); - const reviveB = Change.revive(0, 1, tag2, brand(0), rebaseRepair); - const expected = Change.revive(0, 2, tag1, brand(0), rebaseRepair, lineage); + const reviveAA = Change.revive( + 0, + 2, + { revision: tag1, localId: brand(0), lineage }, + rebaseRepair, + ); + const reviveB = Change.revive(0, 1, { revision: tag2, localId: brand(0) }, rebaseRepair); + const expected = Change.revive( + 0, + 2, + { revision: tag1, localId: brand(0), lineage }, + rebaseRepair, + ); const actual = rebase(reviveAA, reviveB); assert.deepEqual(actual, expected); }); it("reviveBB ↷ reviveA => BBA", () => { - const reviveBB = Change.revive(0, 2, tag2, brand(0), rebaseRepair); - const reviveA = Change.revive(0, 1, tag1, brand(1), rebaseRepair, [ - { revision: tag2, id: brand(0), count: 2, offset: 2 }, - ]); - const expected = Change.revive(0, 2, tag2, brand(0), rebaseRepair); + const reviveBB = Change.revive(0, 2, { revision: tag2, localId: brand(0) }, rebaseRepair); + const reviveA = Change.revive( + 0, + 1, + { + revision: tag1, + localId: brand(1), + lineage: [{ revision: tag2, id: brand(0), count: 2, offset: 2 }], + }, + rebaseRepair, + ); + const expected = Change.revive(0, 2, { revision: tag2, localId: brand(0) }, rebaseRepair); const actual = rebase(reviveBB, reviveA); assert.deepEqual(actual, expected); }); it("reviveBB ↷ reviveA => ABB", () => { - const reviveBB = Change.revive(5, 2, tag2, brand(0), rebaseRepair); - const reviveA = Change.revive(5, 1, tag1, brand(0), rebaseRepair, [ - { revision: tag2, id: brand(0), count: 2, offset: 0 }, - ]); - const expected = Change.revive(6, 2, tag2, brand(0), rebaseRepair); + const reviveBB = Change.revive(5, 2, { revision: tag2, localId: brand(0) }, rebaseRepair); + const reviveA = Change.revive( + 5, + 1, + { + revision: tag1, + localId: brand(0), + lineage: [{ revision: tag2, id: brand(0), count: 2, offset: 0 }], + }, + rebaseRepair, + ); + const expected = Change.revive(6, 2, { revision: tag2, localId: brand(0) }, rebaseRepair); const actual = rebase(reviveBB, reviveA); assert.deepEqual(actual, expected); }); it("reviveA ↷ reviveBB => BAB", () => { const lineage: SF.LineageEvent[] = [{ revision: tag2, id: brand(5), count: 2, offset: 1 }]; - const reviveA = Change.revive(5, 1, tag1, brand(6), rebaseRepair, lineage); - const reviveBB = Change.revive(5, 2, tag2, brand(5), rebaseRepair); - const expected = Change.revive(6, 1, tag1, brand(6), rebaseRepair, lineage); + const reviveA = Change.revive( + 5, + 1, + { revision: tag1, localId: brand(6), lineage }, + rebaseRepair, + ); + const reviveBB = Change.revive(5, 2, { revision: tag2, localId: brand(5) }, rebaseRepair); + const expected = Change.revive( + 6, + 1, + { revision: tag1, localId: brand(6), lineage }, + rebaseRepair, + ); const actual = rebase(reviveA, reviveBB); assert.deepEqual(actual, expected); }); @@ -517,45 +607,65 @@ describe("SequenceField - Rebase", () => { { revision: tag2, id: brand(0), count: 1, offset: 1 }, { revision: tag3, id: brand(0), count: 1, offset: 1 }, ]; - const reviveAA = Change.revive(0, 2, tag1, brand(0), rebaseRepair, lineage); + const reviveAA = Change.revive( + 0, + 2, + { revision: tag1, localId: brand(0), lineage }, + rebaseRepair, + ); const reviveB = composeAnonChanges([ - Change.revive(0, 1, tag2, brand(0), rebaseRepair), - Change.revive(0, 1, tag3, brand(0), rebaseRepair), + Change.revive(0, 1, { revision: tag2, localId: brand(0) }, rebaseRepair), + Change.revive(0, 1, { revision: tag3, localId: brand(0) }, rebaseRepair), ]); - const expected = Change.revive(2, 2, tag1, brand(0), rebaseRepair, lineage); + const expected = Change.revive( + 2, + 2, + { revision: tag1, localId: brand(0), lineage }, + rebaseRepair, + ); const actual = rebase(reviveAA, reviveB); assert.deepEqual(actual, expected); }); it("intentional revive ↷ same revive", () => { - const reviveA = Change.intentionalRevive(0, 3, tag1, brand(1), rebaseRepair); - const reviveB = Change.revive(0, 1, tag1, brand(2), rebaseRepair); + const reviveA = Change.intentionalRevive( + 0, + 3, + { revision: tag1, localId: brand(1) }, + rebaseRepair, + ); + const reviveB = Change.revive(0, 1, { revision: tag1, localId: brand(2) }, rebaseRepair); const actual = rebase(reviveA, reviveB, tag2); const expected = composeAnonChanges([ - Change.intentionalRevive(0, 1, tag1, brand(1), rebaseRepair), - Change.redundantRevive(1, 1, tag1, brand(2), rebaseRepair, true), - Change.intentionalRevive(2, 1, tag1, brand(3), rebaseRepair), + Change.intentionalRevive(0, 1, { revision: tag1, localId: brand(1) }, rebaseRepair), + Change.redundantRevive(1, 1, { revision: tag1, localId: brand(2) }, rebaseRepair, true), + Change.intentionalRevive(2, 1, { revision: tag1, localId: brand(3) }, rebaseRepair), ]); assert.deepEqual(actual, expected); }); it("revive ↷ same revive (base within curr)", () => { - const reviveA = Change.revive(0, 3, tag1, brand(1), rebaseRepair); - const reviveB = Change.revive(0, 1, tag1, brand(2), rebaseRepair); + const reviveA = Change.revive(0, 3, { revision: tag1, localId: brand(1) }, rebaseRepair); + const reviveB = Change.revive(0, 1, { revision: tag1, localId: brand(2) }, rebaseRepair); const actual = rebase(reviveA, reviveB, tag2); const expected = composeAnonChanges([ - Change.revive(0, 1, tag1, brand(1), rebaseRepair), - Change.redundantRevive(1, 1, tag1, brand(2), rebaseRepair), - Change.revive(2, 1, tag1, brand(3), rebaseRepair), + Change.revive(0, 1, { revision: tag1, localId: brand(1) }, rebaseRepair), + Change.redundantRevive(1, 1, { revision: tag1, localId: brand(2) }, rebaseRepair), + Change.revive(2, 1, { revision: tag1, localId: brand(3) }, rebaseRepair), ]); assert.deepEqual(actual, expected); }); it("revive ↷ same revive (curr within base)", () => { - const reviveA = Change.revive(0, 1, tag1, brand(2), rebaseRepair); - const reviveB = Change.revive(0, 3, tag1, brand(1), rebaseRepair); + const reviveA = Change.revive(0, 1, { revision: tag1, localId: brand(2) }, rebaseRepair); + const reviveB = Change.revive(0, 3, { revision: tag1, localId: brand(1) }, rebaseRepair); const actual = rebase(reviveA, reviveB, tag2); - const expected = Change.redundantRevive(1, 1, tag1, brand(2), rebaseRepair); + const expected = Change.redundantRevive( + 1, + 1, + { revision: tag1, localId: brand(2) }, + rebaseRepair, + ); assert.deepEqual(actual, expected); }); diff --git a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/sequenceChangeRebaser.spec.ts b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/sequenceChangeRebaser.spec.ts index cd3544f8a45e..2e3d9b3fefaf 100644 --- a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/sequenceChangeRebaser.spec.ts +++ b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/sequenceChangeRebaser.spec.ts @@ -89,7 +89,11 @@ const testChanges: [string, (index: number, maxIndex: number) => SF.Changeset - Change.revive(2, 2, tag1, brand(i), undefined, generateLineage(tag1, brand(i), 2, max)), + Change.revive(2, 2, { + revision: tag1, + localId: brand(i), + lineage: generateLineage(tag1, brand(i), 2, max), + }), ], [ "TransientRevive", @@ -98,22 +102,35 @@ const testChanges: [string, (index: number, maxIndex: number) => SF.Changeset Change.redundantRevive(2, 2, tag2, brand(i), undefined)], + [ + "ConflictedRevive", + (i) => Change.redundantRevive(2, 2, { revision: tag2, localId: brand(i) }), + ], ["MoveOut", (i) => Change.move(i, 2, 1)], ["MoveIn", (i) => Change.move(1, 2, i)], [ "ReturnFrom", - (i, max) => Change.return(i, 2, 1, tag4, brand(i), generateLineage(tag4, brand(i), 2, max)), + (i, max) => + Change.return(i, 2, 1, { + revision: tag4, + localId: brand(i), + lineage: generateLineage(tag4, brand(i), 2, max), + }), ], [ "ReturnTo", - (i, max) => Change.return(1, 2, i, tag4, brand(i), generateLineage(tag4, brand(1), 2, max)), + (i, max) => + Change.return(1, 2, i, { + revision: tag4, + localId: brand(i), + lineage: generateLineage(tag4, brand(1), 2, max), + }), ], ]; deepFreeze(testChanges); @@ -340,7 +357,7 @@ describe("SequenceField - Sandwich Rebasing", () => { it("[Delete ABC, Revive ABC] ↷ Delete B", () => { const delB = tagChange(Change.delete(1, 1), tag1); const delABC = tagChange(Change.delete(0, 3), tag2); - const revABC = tagChange(Change.revive(0, 3, tag2, id0), tag4); + const revABC = tagChange(Change.revive(0, 3, { revision: tag2, localId: id0 }), tag4); const delABC2 = rebaseTagged(delABC, delB); const invDelABC = tagRollbackInverse(invert(delABC), tag3, delABC2.revision); const revABC2 = rebaseTagged(revABC, invDelABC); @@ -355,7 +372,7 @@ describe("SequenceField - Sandwich Rebasing", () => { it("[Move ABC, Return ABC] ↷ Delete B", () => { const delB = tagChange(Change.delete(1, 1), tag1); const movABC = tagChange(Change.move(0, 3, 1), tag2); - const retABC = tagChange(Change.return(1, 3, 0, tag2, id0), tag4); + const retABC = tagChange(Change.return(1, 3, 0, { revision: tag2, localId: id0 }), tag4); const movABC2 = rebaseTagged(movABC, delB); const invMovABC = invert(movABC); const retABC2 = rebaseTagged(retABC, tagRollbackInverse(invMovABC, tag3, movABC2.revision)); @@ -370,7 +387,7 @@ describe("SequenceField - Sandwich Rebasing", () => { it("[Delete AC, Revive AC] ↷ Insert B", () => { const addB = tagChange(Change.insert(1, 1), tag1); const delAC = tagChange(Change.delete(0, 2), tag2); - const revAC = tagChange(Change.revive(0, 2, tag2, id0), tag4); + const revAC = tagChange(Change.revive(0, 2, { revision: tag2, localId: id0 }), tag4); const delAC2 = rebaseTagged(delAC, addB); const invDelAC = invert(delAC); const revAC2 = rebaseTagged(revAC, tagRollbackInverse(invDelAC, tag3, delAC2.revision)); diff --git a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/sequenceFieldEncoder.spec.ts b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/sequenceFieldEncoder.spec.ts index 02db20d32062..66783d7a5a9d 100644 --- a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/sequenceFieldEncoder.spec.ts +++ b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/sequenceFieldEncoder.spec.ts @@ -16,7 +16,10 @@ const encodingTestData: EncodingTestData, unknown> = { successes: [ ["with child change", Change.modify(1, TestChange.mint([], 1))], ["without child change", Change.delete(2, 2)], - ["with repair data", Change.revive(0, 1, mintRevisionTag(), brand(10), fakeRepair)], + [ + "with repair data", + Change.revive(0, 1, { revision: mintRevisionTag(), localId: brand(10) }, fakeRepair), + ], // TODO: Include revive case here or in other encode/decode tests in this file. // It's likely we need a different notion of equality, as revive involves a ProtoNode type // and deep equality of that test case fails on comparing two `StackCursor`s. diff --git a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/sequenceFieldToDelta.spec.ts b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/sequenceFieldToDelta.spec.ts index f9b048e4729e..366edc8b1f89 100644 --- a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/sequenceFieldToDelta.spec.ts +++ b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/sequenceFieldToDelta.spec.ts @@ -93,7 +93,7 @@ describe("SequenceField - toDelta", () => { assert.equal(count, 1); return contentCursor; } - const changeset = Change.revive(0, 1, tag, brand(0), reviver); + const changeset = Change.revive(0, 1, { revision: tag, localId: brand(0) }, reviver); const actual = toDelta(changeset); const expected: Delta.MarkList = [ { @@ -117,7 +117,7 @@ describe("SequenceField - toDelta", () => { type: "Revive", content: contentCursor, count: 1, - detachEvent: { revision: tag, localId: brand(0) }, + cellId: { revision: tag, localId: brand(0) }, changes: nodeChange, }, ]; @@ -357,7 +357,7 @@ describe("SequenceField - toDelta", () => { type: "Delete", id: brand(0), count: 2, - detachEvent, + cellId: detachEvent, }, ]; @@ -367,7 +367,9 @@ describe("SequenceField - toDelta", () => { }); it("modify", () => { - const modify: TestChangeset = [{ type: "Modify", changes: childChange1, detachEvent }]; + const modify: TestChangeset = [ + { type: "Modify", changes: childChange1, cellId: detachEvent }, + ]; const actual = toDelta(modify); const expected: Delta.MarkList = []; @@ -378,7 +380,7 @@ describe("SequenceField - toDelta", () => { const move: TestChangeset = [ { type: "MoveIn", id: brand(0), count: 1, isSrcConflicted: true }, { count: 1 }, - { type: "MoveOut", id: brand(0), count: 1, detachEvent }, + { type: "MoveOut", id: brand(0), count: 1, cellId: detachEvent }, ]; const actual = toDelta(move); @@ -408,7 +410,7 @@ describe("SequenceField - toDelta", () => { count: 1, content: fakeRepairData(tag, 0, 1), inverseOf: tag1, - detachEvent: { revision: tag2, localId: brand(0) }, + cellId: { revision: tag2, localId: brand(0) }, }, { type: "Revive", @@ -416,7 +418,7 @@ describe("SequenceField - toDelta", () => { changes: childChange1, content: fakeRepairData(tag, 1, 1), inverseOf: tag1, - detachEvent: { revision: tag2, localId: brand(1) }, + cellId: { revision: tag2, localId: brand(1) }, }, ]; const actual = toDelta(changeset); diff --git a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/testEdits.ts b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/testEdits.ts index bc2af76571ee..6678900e0eae 100644 --- a/experimental/dds/tree2/src/test/feature-libraries/sequence-field/testEdits.ts +++ b/experimental/dds/tree2/src/test/feature-libraries/sequence-field/testEdits.ts @@ -39,9 +39,9 @@ export const cases: { createModifyChangeset(1, TestChange.mint([], 2)), ]), delete: createDeleteChangeset(1, 3), - revive: createReviveChangeset(2, 2, tag, brand(0)), + revive: createReviveChangeset(2, 2, { revision: tag, localId: brand(0) }), move: createMoveChangeset(1, 2, 2), - return: createReturnChangeset(1, 3, 0, tag, brand(0)), + return: createReturnChangeset(1, 3, 0, { revision: tag, localId: brand(0) }), }; function createInsertChangeset( @@ -75,32 +75,21 @@ function createRedundantRemoveChangeset( detachEvent: ChangeAtomId, ): SF.Changeset { const changeset = createDeleteChangeset(index, size); - (changeset[changeset.length - 1] as SF.Delete).detachEvent = detachEvent; + (changeset[changeset.length - 1] as SF.Delete).cellId = detachEvent; return changeset; } function createReviveChangeset( startIndex: number, count: number, - detachedBy: RevisionTag, - detachId?: ChangesetLocalId, + detachEvent: SF.CellId, reviver = fakeRepair, - lineage?: SF.LineageEvent[], - lastDetach?: ChangeAtomId, + lastDetach?: SF.CellId, ): SF.Changeset { - const markList = SF.sequenceFieldEditor.revive( - startIndex, - count, - detachedBy, - detachId ?? brand(0), - reviver, - ); + const markList = SF.sequenceFieldEditor.revive(startIndex, count, detachEvent, reviver); const mark = markList[markList.length - 1] as SF.Reattach; if (lastDetach !== undefined) { - mark.detachEvent = lastDetach; - } - if (lineage !== undefined) { - mark.lineage = lineage; + mark.cellId = lastDetach; } return markList; } @@ -108,74 +97,49 @@ function createReviveChangeset( function createRedundantReviveChangeset( startIndex: number, count: number, - detachedBy: RevisionTag, - detachId: ChangesetLocalId, + detachEvent: SF.CellId, reviver = fakeRepair, isIntention?: boolean, ): SF.Changeset { const markList = SF.sequenceFieldEditor.revive( startIndex, count, - detachedBy, - detachId, + detachEvent, reviver, isIntention, ); const mark = markList[markList.length - 1] as SF.Reattach; - delete mark.detachEvent; + delete mark.cellId; return markList; } function createBlockedReviveChangeset( startIndex: number, count: number, - inverseOf: RevisionTag, - lastDetachedBy: RevisionTag, - lastDetachId: ChangesetLocalId, + detachEvent: SF.CellId, + lastDetach: SF.CellId, reviver = fakeRepair, - lineage?: SF.LineageEvent[], ): SF.Changeset { - const markList = SF.sequenceFieldEditor.revive( - startIndex, - count, - inverseOf, - lastDetachId, - reviver, - ); + const markList = SF.sequenceFieldEditor.revive(startIndex, count, detachEvent, reviver); const mark = markList[markList.length - 1] as SF.Reattach; - mark.detachEvent = { revision: lastDetachedBy, localId: lastDetachId }; - if (lineage !== undefined) { - mark.lineage = lineage; - } + mark.cellId = lastDetach; return markList; } function createIntentionalReviveChangeset( startIndex: number, count: number, - detachedBy: RevisionTag, - detachId: ChangesetLocalId, + detachEvent: SF.CellId, reviver = fakeRepair, - lineage?: SF.LineageEvent[], - lastDetach?: ChangeAtomId, + lastDetach?: SF.CellId, ): SF.Changeset { - const markList = SF.sequenceFieldEditor.revive( - startIndex, - count, - detachedBy, - detachId, - reviver, - true, - ); + const markList = SF.sequenceFieldEditor.revive(startIndex, count, detachEvent, reviver, true); const mark = markList[markList.length - 1] as SF.Reattach; if (lastDetach !== undefined) { - mark.detachEvent = lastDetach; + mark.cellId = lastDetach; } - if (lineage !== undefined) { - mark.lineage = lineage; - } return markList; } @@ -194,18 +158,9 @@ function createReturnChangeset( sourceIndex: number, count: number, destIndex: number, - detachedBy: RevisionTag, - detachId: ChangesetLocalId, - lineage?: SF.LineageEvent[], + detachEvent: SF.CellId, ): SF.Changeset { - return SF.sequenceFieldEditor.return( - sourceIndex, - count, - destIndex, - detachedBy, - detachId, - lineage, - ); + return SF.sequenceFieldEditor.return(sourceIndex, count, destIndex, detachEvent); } function createModifyChangeset( @@ -218,15 +173,11 @@ function createModifyChangeset( function createModifyDetachedChangeset( index: number, change: TNodeChange, - detachEvent: ChangeAtomId, - lineage?: SF.LineageEvent[], + detachEvent: SF.CellId, ): SF.Changeset { const changeset = createModifyChangeset(index, change); const modify = changeset[changeset.length - 1] as SF.Modify; - modify.detachEvent = detachEvent; - if (lineage !== undefined) { - modify.lineage = lineage; - } + modify.cellId = detachEvent; return changeset; } diff --git a/packages/drivers/odsp-driver/src/test/deltaStorageService.spec.ts b/packages/drivers/odsp-driver/src/test/deltaStorageService.spec.ts index a78aa9ecf069..369662989b87 100644 --- a/packages/drivers/odsp-driver/src/test/deltaStorageService.spec.ts +++ b/packages/drivers/odsp-driver/src/test/deltaStorageService.spec.ts @@ -6,7 +6,7 @@ import { strict as assert } from "assert"; import { IDeltasFetchResult } from "@fluidframework/driver-definitions"; import { ISequencedDocumentMessage } from "@fluidframework/protocol-definitions"; -import { TelemetryUTLogger } from "@fluidframework/telemetry-utils"; +import { MockLogger } from "@fluidframework/telemetry-utils"; import { IOdspResolvedUrl } from "@fluidframework/odsp-driver-definitions"; import { OdspDeltaStorageService, OdspDeltaStorageWithCache } from "../odspDeltaStorageService"; import { LocalPersistentCache } from "../odspCache"; @@ -37,7 +37,7 @@ describe("DeltaStorageService", () => { const fileEntry = { docId: "docId", resolvedUrl }; it("Should build the correct sharepoint delta url with auth", async () => { - const logger = new TelemetryUTLogger(); + const logger = new MockLogger(); const deltaStorageService = new OdspDeltaStorageService( testDeltaStorageUrl, async (_refresh) => "?access_token=123", @@ -47,6 +47,7 @@ describe("DeltaStorageService", () => { const actualDeltaUrl = deltaStorageService.buildUrl(3, 8); const expectedDeltaUrl = `${deltaStorageBasePath}/drives/testdrive/items/testitem/opStream?ump=1&filter=sequenceNumber%20ge%203%20and%20sequenceNumber%20le%207`; assert.equal(actualDeltaUrl, expectedDeltaUrl, "The constructed delta url is invalid"); + logger.assertMatchNone([{ category: "error" }]); }); describe("Get Returns Response With Op Envelope", () => { @@ -86,8 +87,8 @@ describe("DeltaStorageService", () => { }; let deltaStorageService: OdspDeltaStorageService; + const logger = new MockLogger(); before(() => { - const logger = new TelemetryUTLogger(); deltaStorageService = new OdspDeltaStorageService( testDeltaStorageUrl, async (_refresh) => "", @@ -95,6 +96,9 @@ describe("DeltaStorageService", () => { logger, ); }); + afterEach(() => { + logger.assertMatchNone([{ category: "error" }]); + }); it("Should deserialize the delta feed response correctly", async () => { const { messages, partialResult } = await mockFetchOk( @@ -156,8 +160,8 @@ describe("DeltaStorageService", () => { }; let deltaStorageService: OdspDeltaStorageService; + const logger = new MockLogger(); before(() => { - const logger = new TelemetryUTLogger(); deltaStorageService = new OdspDeltaStorageService( testDeltaStorageUrl, async (_refresh) => "", @@ -165,6 +169,9 @@ describe("DeltaStorageService", () => { logger, ); }); + afterEach(() => { + logger.assertMatchNone([{ category: "error" }]); + }); it("Should deserialize the delta feed response correctly", async () => { const { messages, partialResult } = await mockFetchOk( @@ -196,7 +203,10 @@ describe("DeltaStorageService", () => { }); describe("DeltaStorageServiceWith Cache Tests", () => { - const logger = new TelemetryUTLogger(); + const logger = new MockLogger(); + afterEach(() => { + logger.assertMatchNone([{ category: "error" }]); + }); it("FirstCacheMiss should update to first miss op seq number correctly", async () => { const deltasFetchResult: IDeltasFetchResult = { messages: [], partialResult: false }; diff --git a/packages/drivers/odsp-driver/src/test/epochTestsWithRedemption.spec.ts b/packages/drivers/odsp-driver/src/test/epochTestsWithRedemption.spec.ts index 48cf9c45c822..9f0080eb4e4c 100644 --- a/packages/drivers/odsp-driver/src/test/epochTestsWithRedemption.spec.ts +++ b/packages/drivers/odsp-driver/src/test/epochTestsWithRedemption.spec.ts @@ -5,7 +5,7 @@ import { strict as assert } from "assert"; import { Deferred } from "@fluidframework/common-utils"; -import { TelemetryUTLogger } from "@fluidframework/telemetry-utils"; +import { MockLogger } from "@fluidframework/telemetry-utils"; import { DriverErrorType } from "@fluidframework/driver-definitions"; import { IOdspResolvedUrl, IEntry, snapshotKey } from "@fluidframework/odsp-driver-definitions"; import { EpochTrackerWithRedemption } from "../epochTracker"; @@ -34,6 +34,7 @@ describe("Tests for Epoch Tracker With Redemption", () => { const siteUrl = "https://microsoft.sharepoint-df.com/siteUrl"; const driveId = "driveId"; const itemId = "itemId"; + const logger = new MockLogger(); let epochTracker: EpochTrackerWithRedemption; let hashedDocumentId: string; let epochCallback: DeferralWithCallback; @@ -55,12 +56,13 @@ describe("Tests for Epoch Tracker With Redemption", () => { docId: hashedDocumentId, resolvedUrl, }, - new TelemetryUTLogger(), + logger, ); }); afterEach(async () => { await epochTracker.removeEntries().catch(() => {}); + logger.assertMatchNone([{ category: "error" }]); }); describe("Test Suite 1", () => { diff --git a/packages/drivers/odsp-driver/src/test/getFileLink.spec.ts b/packages/drivers/odsp-driver/src/test/getFileLink.spec.ts index 26514cb3c1de..b56f40aa7f76 100644 --- a/packages/drivers/odsp-driver/src/test/getFileLink.spec.ts +++ b/packages/drivers/odsp-driver/src/test/getFileLink.spec.ts @@ -4,14 +4,14 @@ */ import { strict as assert } from "assert"; -import { TelemetryUTLogger } from "@fluidframework/telemetry-utils"; +import { MockLogger } from "@fluidframework/telemetry-utils"; import { getFileLink } from "../getFileLink"; import { mockFetchSingle, mockFetchMultiple, okResponse, notFound } from "./mockFetch"; describe("getFileLink", () => { const siteUrl = "https://microsoft.sharepoint-df.com/siteUrl"; const driveId = "driveId"; - const logger = new TelemetryUTLogger(); + const logger = new MockLogger(); const storageTokenFetcher = async () => "StorageToken"; const fileItemResponse = { webDavUrl: "fetchDavUrl", @@ -19,6 +19,10 @@ describe("getFileLink", () => { sharepointIds: { listItemUniqueId: "fetchFileId" }, }; + afterEach(() => { + logger.assertMatchNone([{ category: "error" }]); + }); + it("should return share link with existing access", async () => { const result = await mockFetchMultiple( async () => diff --git a/packages/drivers/odsp-driver/src/test/odspCreateContainer.spec.ts b/packages/drivers/odsp-driver/src/test/odspCreateContainer.spec.ts index bee986be0c4d..64af65b70e81 100644 --- a/packages/drivers/odsp-driver/src/test/odspCreateContainer.spec.ts +++ b/packages/drivers/odsp-driver/src/test/odspCreateContainer.spec.ts @@ -6,7 +6,7 @@ import { strict as assert } from "assert"; import { DriverErrorType, IDocumentService } from "@fluidframework/driver-definitions"; import { IRequest } from "@fluidframework/core-interfaces"; -import { TelemetryUTLogger } from "@fluidframework/telemetry-utils"; +import { MockLogger } from "@fluidframework/telemetry-utils"; import { ISummaryTree, SummaryType } from "@fluidframework/protocol-definitions"; import { IOdspResolvedUrl } from "@fluidframework/odsp-driver-definitions"; import { OdspDriverUrlResolver } from "../odspDriverUrlResolver"; @@ -22,6 +22,7 @@ describe("Odsp Create Container Test", () => { const driveId = "driveId"; const filePath = "path"; const fileName = "fileName"; + const logger = new MockLogger(); let resolver: OdspDriverUrlResolver; let request: IRequest; @@ -76,24 +77,22 @@ describe("Odsp Create Container Test", () => { summary: ISummaryTree, resolved: IOdspResolvedUrl, ): Promise => - odspDocumentServiceFactory.createContainer(summary, resolved, new TelemetryUTLogger()); + odspDocumentServiceFactory.createContainer(summary, resolved, logger); beforeEach(() => { resolver = new OdspDriverUrlResolver(); request = createOdspCreateContainerRequest(siteUrl, driveId, filePath, fileName); }); + afterEach(() => { + logger.assertMatchNone([{ category: "error" }]); + }); it("Check Document Service Successfully", async () => { const resolved = await resolver.resolve(request); const docID = await getHashedDocumentId(driveId, itemId); const summary = createSummary(true, true); const docService = await mockFetchOk( - async () => - odspDocumentServiceFactory.createContainer( - summary, - resolved, - new TelemetryUTLogger(), - ), + async () => odspDocumentServiceFactory.createContainer(summary, resolved, logger), expectedResponse, { "x-fluid-epoch": "epoch1" }, ); diff --git a/packages/drivers/odsp-driver/src/test/opsCaching.spec.ts b/packages/drivers/odsp-driver/src/test/opsCaching.spec.ts index ff63f57ebbc3..1d23c1092e0a 100644 --- a/packages/drivers/odsp-driver/src/test/opsCaching.spec.ts +++ b/packages/drivers/odsp-driver/src/test/opsCaching.spec.ts @@ -3,7 +3,7 @@ * Licensed under the MIT License. */ import { strict as assert } from "assert"; -import { TelemetryUTLogger } from "@fluidframework/telemetry-utils"; +import { MockLogger } from "@fluidframework/telemetry-utils"; import { ISequencedDocumentMessage } from "@fluidframework/protocol-definitions"; import { IStream } from "@fluidframework/driver-definitions"; import { delay } from "@fluidframework/common-utils"; @@ -104,10 +104,10 @@ async function runTestNoTimer( initialWritesExpected: number, ) { const mockCache = new MockCache(); - + const logger = new MockLogger(); const cache = new OpsCache( initialSeq, - new TelemetryUTLogger(), + logger, mockCache, batchSize, -1, // timerGranularity @@ -133,6 +133,7 @@ async function runTestNoTimer( cache.addOps(mockData); cache.flushOps(); assert.equal(mockCache.opsWritten, mockData.length); + logger.assertMatchNone([{ category: "error" }]); } export async function runTestWithTimer( @@ -144,10 +145,10 @@ export async function runTestWithTimer( totalWritesExpected: number, ) { const mockCache = new MockCache(); - + const logger = new MockLogger(); const cache = new OpsCache( initialSeq, - new TelemetryUTLogger(), + logger, mockCache, batchSize, 1, // timerGranularity @@ -163,6 +164,7 @@ export async function runTestWithTimer( } assert.equal(mockCache.writeCount, totalWritesExpected); assert.equal(mockCache.opsWritten, mockData.length); + logger.assertMatchNone([{ category: "error" }]); } export async function runTest( @@ -307,10 +309,10 @@ describe("OpsCache", () => { { sequenceNumber: 110, data: "110" }, { sequenceNumber: 111, data: "111" }, ]; - + const logger = new MockLogger(); const cache = new OpsCache( initialSeq, - new TelemetryUTLogger(), + logger, mockCache, 5 /* batchSize */, -1, // timerGranularity @@ -329,6 +331,7 @@ describe("OpsCache", () => { { sequenceNumber: 105, data: "105" }, { sequenceNumber: 106, data: "106" }, ]); + logger.assertMatchNone([{ category: "error" }]); }); }); @@ -393,10 +396,10 @@ describe("OdspDeltaStorageWithCache", () => { totalOps = Math.min(totalOps, askingOps); let opsToCache: ISequencedDocumentMessage[] = []; - + const logger = new MockLogger(); const storage = new OdspDeltaStorageWithCache( snapshotOps, - new TelemetryUTLogger(), + logger, batchSize, concurrency, // getFromStorage @@ -431,6 +434,7 @@ describe("OdspDeltaStorageWithCache", () => { fromTotal + totalOps, ); } + logger.assertMatchNone([{ category: "error" }]); } it("basic permutations", async () => { diff --git a/packages/drivers/odsp-driver/src/test/snapshotFormatTests.spec.ts b/packages/drivers/odsp-driver/src/test/snapshotFormatTests.spec.ts index 23b16f1b4f45..2a48c5af4888 100644 --- a/packages/drivers/odsp-driver/src/test/snapshotFormatTests.spec.ts +++ b/packages/drivers/odsp-driver/src/test/snapshotFormatTests.spec.ts @@ -6,7 +6,7 @@ import { strict as assert } from "assert"; import { ISequencedDocumentMessage, ISnapshotTree } from "@fluidframework/protocol-definitions"; import { stringToBuffer } from "@fluidframework/common-utils"; -import { TelemetryUTLogger } from "@fluidframework/telemetry-utils"; +import { MockLogger } from "@fluidframework/telemetry-utils"; import { parseCompactSnapshotResponse } from "../compactSnapshotParser"; import { convertToCompactSnapshot } from "../compactSnapshotWriter"; import { ISnapshotContents } from "../odspPublicUtils"; @@ -129,8 +129,9 @@ describe("Snapshot Format Conversion Tests", () => { sequenceNumber: 0, latestSequenceNumber: 2, }; + const logger = new MockLogger(); const compactSnapshot = convertToCompactSnapshot(snapshotContents); - const result = parseCompactSnapshotResponse(compactSnapshot, new TelemetryUTLogger()); + const result = parseCompactSnapshotResponse(compactSnapshot, logger); assert.deepStrictEqual(result.snapshotTree, snapshotTree, "Tree structure should match"); assert.deepStrictEqual(result.blobs, blobs, "Blobs content should match"); assert.deepStrictEqual(result.ops, ops, "Ops should match"); @@ -147,6 +148,7 @@ describe("Snapshot Format Conversion Tests", () => { compactSnapshot.buffer, "Compact representation should remain same", ); + logger.assertMatchNone([{ category: "error" }]); }); it("Conversion test with empty ops", async () => { @@ -157,8 +159,9 @@ describe("Snapshot Format Conversion Tests", () => { sequenceNumber: 0, latestSequenceNumber: 2, }; + const logger = new MockLogger(); const compactSnapshot = convertToCompactSnapshot(snapshotContents); - const result = parseCompactSnapshotResponse(compactSnapshot, new TelemetryUTLogger()); + const result = parseCompactSnapshotResponse(compactSnapshot, logger); assert.deepStrictEqual(result.snapshotTree, snapshotTree, "Tree structure should match"); assert.deepStrictEqual(result.blobs, blobs, "Blobs content should match"); assert.deepStrictEqual(result.ops, [], "Ops should match"); @@ -175,5 +178,6 @@ describe("Snapshot Format Conversion Tests", () => { compactSnapshot.buffer, "Compact representation should remain same", ); + logger.assertMatchNone([{ category: "error" }]); }); }); diff --git a/packages/drivers/odsp-driver/src/test/zipItDataRepresentationTests.spec.ts b/packages/drivers/odsp-driver/src/test/zipItDataRepresentationTests.spec.ts index 6c7a0a01509d..c950fec733a4 100644 --- a/packages/drivers/odsp-driver/src/test/zipItDataRepresentationTests.spec.ts +++ b/packages/drivers/odsp-driver/src/test/zipItDataRepresentationTests.spec.ts @@ -5,7 +5,7 @@ import { strict as assert } from "assert"; import { Uint8ArrayToString } from "@fluidframework/common-utils"; -import { TelemetryUTLogger } from "@fluidframework/telemetry-utils"; +import { MockLogger } from "@fluidframework/telemetry-utils"; import { ReadBuffer } from "../ReadBufferUtils"; import { TreeBuilderSerializer } from "../WriteBufferUtils"; import { @@ -59,15 +59,18 @@ function createLongBuffer(length: number) { describe("Tree Representation tests", () => { let builder: TreeBuilderSerializer; - + const logger = new MockLogger(); beforeEach(() => { builder = new TreeBuilderSerializer(); }); + afterEach(() => { + logger.assertMatchNone([{ category: "error" }]); + }); function validate(length = -1) { const buffer = builder.serialize(); assert.strictEqual(buffer.length, length, "buffer size not equal"); - const builder2 = TreeBuilder.load(new ReadBuffer(buffer), new TelemetryUTLogger()).builder; + const builder2 = TreeBuilder.load(new ReadBuffer(buffer), logger).builder; compareNodes(builder, builder2); } diff --git a/packages/drivers/routerlicious-driver/src/test/restWrapper.spec.ts b/packages/drivers/routerlicious-driver/src/test/restWrapper.spec.ts index cc2cb9314ee2..d72f947a3f45 100644 --- a/packages/drivers/routerlicious-driver/src/test/restWrapper.spec.ts +++ b/packages/drivers/routerlicious-driver/src/test/restWrapper.spec.ts @@ -4,7 +4,7 @@ */ import assert from "assert"; -import { TelemetryUTLogger } from "@fluidframework/telemetry-utils"; +import { MockLogger } from "@fluidframework/telemetry-utils"; import { DriverErrorType } from "@fluidframework/driver-definitions"; import { RateLimiter } from "@fluidframework/driver-utils"; import nock from "nock"; @@ -44,7 +44,7 @@ describe("RouterliciousDriverRestWrapper", () => { } let restWrapper: RouterliciousOrdererRestWrapper; - + const logger = new MockLogger(); beforeEach(async () => { // reset auth mocking tokenQueue = [token1, token2, token3]; @@ -60,7 +60,6 @@ describe("RouterliciousDriverRestWrapper", () => { return newToken; }; - const logger = new TelemetryUTLogger(); restWrapper = await RouterliciousOrdererRestWrapper.load( toInstrumentedR11sOrdererTokenFetcher( "dummytenantid", @@ -76,6 +75,9 @@ describe("RouterliciousDriverRestWrapper", () => { after(() => { nock.restore(); }); + afterEach(() => { + logger.assertMatchNone([{ category: "error" }]); + }); describe("get()", () => { it("sends a request with auth headers", async () => { diff --git a/packages/framework/agent-scheduler/src/scheduler.ts b/packages/framework/agent-scheduler/src/scheduler.ts index 8530c810a073..8d12cf0a71ad 100644 --- a/packages/framework/agent-scheduler/src/scheduler.ts +++ b/packages/framework/agent-scheduler/src/scheduler.ts @@ -121,6 +121,8 @@ export class AgentScheduler private readonly consensusRegisterCollection: ConsensusRegisterCollection, ) { super(); + // We are expecting this class to have many listeners, so we suppress noisy "MaxListenersExceededWarning" logging. + super.setMaxListeners(0); this._handle = new FluidObjectHandle(this, "", this.runtime.objectsRoutingContext); } diff --git a/packages/loader/driver-utils/src/test/parallelRequests.spec.ts b/packages/loader/driver-utils/src/test/parallelRequests.spec.ts index c27fa5d7241d..49b540569dc0 100644 --- a/packages/loader/driver-utils/src/test/parallelRequests.spec.ts +++ b/packages/loader/driver-utils/src/test/parallelRequests.spec.ts @@ -4,7 +4,7 @@ */ import { strict as assert } from "assert"; -import { TelemetryUTLogger } from "@fluidframework/telemetry-utils"; +import { MockLogger } from "@fluidframework/telemetry-utils"; import { unreachableCase } from "@fluidframework/common-utils"; import { ParallelRequests } from "../parallelRequests"; @@ -28,11 +28,13 @@ describe("Parallel Requests", () => { let requests = 0; let dispatches = 0; + const logger = new MockLogger(); + const manager = new ParallelRequests( from, knownTo ? to : undefined, payloadSize, - new TelemetryUTLogger(), + logger, async (request: number, _from: number, _to: number) => { let length = _to - _from; requests++; @@ -84,6 +86,7 @@ describe("Parallel Requests", () => { assert(nextElement === to); assert(!knownTo || dispatches === requests); assert.equal(requests, expectedRequests, "expected requests"); + logger.assertMatchNone([{ category: "error" }]); } async function testCancel( @@ -96,12 +99,13 @@ describe("Parallel Requests", () => { let nextElement = from; let requests = 0; let dispatches = 0; + const logger = new MockLogger(); const manager = new ParallelRequests( from, to, payloadSize, - new TelemetryUTLogger(), + logger, async (request: number, _from: number, _to: number) => { const length = _to - _from; requests++; @@ -136,6 +140,7 @@ describe("Parallel Requests", () => { assert(dispatches <= requests); assert(requests === expectedRequests); + logger.assertMatchNone([{ category: "error" }]); } it("no concurrency, single request, over", async () => { @@ -197,11 +202,13 @@ describe("Parallel Requests", () => { }); it("exception in request", async () => { + const logger = new MockLogger(); + const manager = new ParallelRequests( 1, 100, 10, - new TelemetryUTLogger(), + logger, async (request: number, _from: number, _to: number) => { throw new Error("request"); }, @@ -218,14 +225,17 @@ describe("Parallel Requests", () => { assert(error.message === "request"); } assert(!success); + logger.assertMatchNone([{ category: "error" }]); }); it("exception in response", async () => { + const logger = new MockLogger(); + const manager = new ParallelRequests( 1, 100, 10, - new TelemetryUTLogger(), + logger, async (request: number, _from: number, _to: number) => { return { cancel: false, partial: false, payload: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] }; }, @@ -242,5 +252,6 @@ describe("Parallel Requests", () => { assert(error.message === "response"); } assert(!success); + logger.assertMatchNone([{ category: "error" }]); }); }); diff --git a/packages/runtime/container-runtime/src/blobManager.ts b/packages/runtime/container-runtime/src/blobManager.ts index 3309e7804e90..1680b88e8903 100644 --- a/packages/runtime/container-runtime/src/blobManager.ts +++ b/packages/runtime/container-runtime/src/blobManager.ts @@ -30,6 +30,7 @@ import { } from "@fluidframework/container-runtime-definitions"; import { AttachState, ICriticalContainerError } from "@fluidframework/container-definitions"; import { + LoggingError, createChildMonitoringContext, MonitoringContext, PerformanceEvent, @@ -123,7 +124,7 @@ export type IBlobManagerRuntime = Pick< // Note that while offline we "submit" an op before uploading the blob, but we always // expect blobs to be uploaded before we actually see the op round-trip -enum PendingBlobStatus { +export enum PendingBlobStatus { OnlinePendingUpload, OnlinePendingOp, OfflinePendingUpload, @@ -137,11 +138,13 @@ interface PendingBlob { status: PendingBlobStatus; storageId?: string; handleP: Deferred; - uploadP?: Promise; + uploadP?: Promise; uploadTime?: number; minTTLInSeconds?: number; attached?: boolean; acked?: boolean; + abortSignal?: AbortSignal; + opsent?: boolean; } export interface IPendingBlobs { @@ -279,6 +282,8 @@ export class BlobManager extends TypedEventEmitter { this.sendBlobAttachOp = (localId: string, blobId?: string) => { const pendingEntry = this.pendingBlobs.get(localId); + assert(pendingEntry !== undefined, "Must have pending blob entry for upcoming op"); + pendingEntry.opsent = true; if (pendingEntry?.uploadTime && pendingEntry?.minTTLInSeconds) { const secondsSinceUpload = (Date.now() - pendingEntry.uploadTime) / 1000; const expired = pendingEntry.minTTLInSeconds - secondsSinceUpload < 0; @@ -326,6 +331,13 @@ export class BlobManager extends TypedEventEmitter { ); } + private createAbortError(pending?: PendingBlob) { + return new LoggingError("uploadBlob aborted", { + acked: pending?.acked, + status: pending?.status, + uploadTime: pending?.uploadTime, + }); + } /** * Upload blobs added while offline. This must be completed before connecting and resubmitting ops. */ @@ -443,7 +455,10 @@ export class BlobManager extends TypedEventEmitter { return this.getBlobHandle(response.id); } - public async createBlob(blob: ArrayBufferLike): Promise> { + public async createBlob( + blob: ArrayBufferLike, + signal?: AbortSignal, + ): Promise> { if (this.runtime.attachState === AttachState.Detached) { return this.createBlobDetached(blob); } @@ -457,6 +472,10 @@ export class BlobManager extends TypedEventEmitter { 0x385 /* For clarity and paranoid defense against adding future attachment states */, ); + if (signal?.aborted) { + throw this.createAbortError(); + } + // Create a local ID for the blob. After uploading it to storage and before returning it, a local ID to // storage ID mapping is created. const localId = uuid(); @@ -467,13 +486,27 @@ export class BlobManager extends TypedEventEmitter { uploadP: this.uploadBlob(localId, blob), attached: false, acked: false, + abortSignal: signal, + opsent: false, }; this.pendingBlobs.set(localId, pendingEntry); - return pendingEntry.handleP.promise; + const abortListener = () => { + if (!pendingEntry.acked) { + pendingEntry.handleP.reject(this.createAbortError(pendingEntry)); + } + }; + signal?.addEventListener("abort", abortListener, { once: true }); + + return pendingEntry.handleP.promise.finally(() => { + signal?.removeEventListener("abort", abortListener); + }); } - private async uploadBlob(localId: string, blob: ArrayBufferLike): Promise { + private async uploadBlob( + localId: string, + blob: ArrayBufferLike, + ): Promise { return PerformanceEvent.timedExecAsync( this.mc.logger, { eventName: "createBlob" }, @@ -497,17 +530,24 @@ export class BlobManager extends TypedEventEmitter { if (this.pendingBlobs.has(id)) { const entry = this.pendingBlobs.get(id); if (entry?.attached && entry?.acked) { - this.pendingBlobs.delete(id); - if (!this.hasPendingBlobs) { - this.emit("noPendingBlobs"); - } + this.deletePendingBlob(id); } } } + private deletePendingBlob(id: string) { + if (this.pendingBlobs.delete(id) && !this.hasPendingBlobs) { + this.emit("noPendingBlobs"); + } + } + private onUploadResolve(localId: string, response: ICreateBlobResponseWithTTL) { const entry = this.pendingBlobs.get(localId); assert(entry !== undefined, 0x6c8 /* pending blob entry not found for uploaded blob */); + if (entry.abortSignal?.aborted === true && !entry.opsent) { + this.deletePendingBlob(localId); + return; + } assert( entry.status === PendingBlobStatus.OnlinePendingUpload || entry.status === PendingBlobStatus.OfflinePendingUpload, @@ -556,9 +596,13 @@ export class BlobManager extends TypedEventEmitter { return response; } - private async onUploadReject(localId: string, error) { + private async onUploadReject(localId: string, error: any) { const entry = this.pendingBlobs.get(localId); assert(!!entry, 0x387 /* Must have pending blob entry for blob which failed to upload */); + if (entry.abortSignal?.aborted === true && !entry.opsent) { + this.deletePendingBlob(localId); + return; + } if (!this.runtime.connected) { if (entry.status === PendingBlobStatus.OnlinePendingUpload) { this.transitionToOffline(localId); @@ -633,6 +677,14 @@ export class BlobManager extends TypedEventEmitter { public processBlobAttachOp(message: ISequencedDocumentMessage, local: boolean) { const localId = (message.metadata as IBlobMetadata | undefined)?.localId; const blobId = (message.metadata as IBlobMetadata | undefined)?.blobId; + + if (localId) { + const pendingEntry = this.pendingBlobs.get(localId); + if (pendingEntry?.abortSignal?.aborted) { + this.deletePendingBlob(localId); + return; + } + } assert(blobId !== undefined, 0x12a /* "Missing blob id on metadata" */); // Set up a mapping from local ID to storage ID. This is crucial since without this the blob cannot be diff --git a/packages/runtime/container-runtime/src/containerRuntime.ts b/packages/runtime/container-runtime/src/containerRuntime.ts index 4c7f5ea692f5..f5f95ed6621e 100644 --- a/packages/runtime/container-runtime/src/containerRuntime.ts +++ b/packages/runtime/container-runtime/src/containerRuntime.ts @@ -3109,9 +3109,12 @@ export class ContainerRuntime this.submit({ type: ContainerMessageType.Alias, contents }, localOpMetadata); } - public async uploadBlob(blob: ArrayBufferLike): Promise> { + public async uploadBlob( + blob: ArrayBufferLike, + signal?: AbortSignal, + ): Promise> { this.verifyNotClosed(); - return this.blobManager.createBlob(blob); + return this.blobManager.createBlob(blob, signal); } private maybeSubmitIdAllocationOp(type: ContainerMessageType) { diff --git a/packages/runtime/container-runtime/src/dataStoreContext.ts b/packages/runtime/container-runtime/src/dataStoreContext.ts index 28a2bad68001..00d606bb3c31 100644 --- a/packages/runtime/container-runtime/src/dataStoreContext.ts +++ b/packages/runtime/container-runtime/src/dataStoreContext.ts @@ -943,8 +943,11 @@ export abstract class FluidDataStoreContext ); } - public async uploadBlob(blob: ArrayBufferLike): Promise> { - return this.containerRuntime.uploadBlob(blob); + public async uploadBlob( + blob: ArrayBufferLike, + signal?: AbortSignal, + ): Promise> { + return this.containerRuntime.uploadBlob(blob, signal); } } diff --git a/packages/runtime/container-runtime/src/deltaManagerSummarizerProxy.ts b/packages/runtime/container-runtime/src/deltaManagerSummarizerProxy.ts index a7241bedb0c4..95bb52559993 100644 --- a/packages/runtime/container-runtime/src/deltaManagerSummarizerProxy.ts +++ b/packages/runtime/container-runtime/src/deltaManagerSummarizerProxy.ts @@ -41,6 +41,8 @@ export class DeltaManagerSummarizerProxy constructor(deltaManager: IDeltaManager) { super(deltaManager); + // We are expecting this class to have many listeners, so we suppress noisy "MaxListenersExceededWarning" logging. + super.setMaxListeners(0); this.isSummarizerClient = this.deltaManager.clientDetails.type === summarizerClientType; } } diff --git a/packages/runtime/container-runtime/src/test/blobManager.spec.ts b/packages/runtime/container-runtime/src/test/blobManager.spec.ts index 3b0a5c7b4b56..b98dacac9bc2 100644 --- a/packages/runtime/container-runtime/src/test/blobManager.spec.ts +++ b/packages/runtime/container-runtime/src/test/blobManager.spec.ts @@ -23,7 +23,12 @@ import { MonitoringContext, createChildLogger, } from "@fluidframework/telemetry-utils"; -import { BlobManager, IBlobManagerLoadInfo, IBlobManagerRuntime } from "../blobManager"; +import { + BlobManager, + IBlobManagerLoadInfo, + IBlobManagerRuntime, + PendingBlobStatus, +} from "../blobManager"; import { sweepAttachmentBlobsKey } from "../gc"; const MIN_TTL = 24 * 60 * 60; // same as ODSP @@ -123,8 +128,11 @@ class MockRuntime this.ops.push({ metadata: { localId, blobId } }); } - public async createBlob(blob: ArrayBufferLike): Promise> { - const P = this.blobManager.createBlob(blob); + public async createBlob( + blob: ArrayBufferLike, + signal?: AbortSignal, + ): Promise> { + const P = this.blobManager.createBlob(blob, signal); this.handlePs.push(P); return P; } @@ -155,10 +163,14 @@ class MockRuntime this.ops = []; } - public async processBlobs() { + public async processBlobs(resolve = true) { const blobPs = this.blobPs; this.blobPs = []; - this.processBlobsP.resolve(); + if (resolve) { + this.processBlobsP.resolve(); + } else { + this.processBlobsP.reject(new Error("fake error")); + } this.processBlobsP = new Deferred(); await Promise.all(blobPs); } @@ -257,7 +269,7 @@ const validateSummary = (runtime: MockRuntime) => { describe("BlobManager", () => { const handlePs: Promise>[] = []; let runtime: MockRuntime; - let createBlob: (blob: ArrayBufferLike) => Promise; + let createBlob: (blob: ArrayBufferLike, signal?: AbortSignal) => Promise; let waitForBlob: (blob: ArrayBufferLike) => Promise; let mc: MonitoringContext; let injectedSettings: Record = {}; @@ -284,8 +296,8 @@ describe("BlobManager", () => { }; // create blob and await the handle after the test - createBlob = async (blob: ArrayBufferLike) => { - const handleP = runtime.createBlob(blob); + createBlob = async (blob: ArrayBufferLike, signal?: AbortSignal) => { + const handleP = runtime.createBlob(blob, signal); handlePs.push(handleP); await waitForBlob(blob); }; @@ -613,6 +625,204 @@ describe("BlobManager", () => { assert.strictEqual(summaryData?.redirectTable.size, 3); }); + describe("Abort Signal", () => { + it("abort before upload", async () => { + await runtime.attach(); + await runtime.connect(); + const ac = new AbortController(); + ac.abort("abort test"); + try { + const blob = IsoBuffer.from("blob", "utf8"); + await runtime.createBlob(blob, ac.signal); + assert.fail("Should not succeed"); + } catch (error: any) { + assert.strictEqual(error.status, undefined); + assert.strictEqual(error.uploadTime, undefined); + assert.strictEqual(error.acked, undefined); + } + const summaryData = validateSummary(runtime); + assert.strictEqual(summaryData.ids.length, 0); + assert.strictEqual(summaryData.redirectTable, undefined); + }); + + it("abort while upload", async () => { + await runtime.attach(); + await runtime.connect(); + const ac = new AbortController(); + let handleP; + try { + const blob = IsoBuffer.from("blob", "utf8"); + handleP = runtime.createBlob(blob, ac.signal); + ac.abort("abort test"); + assert.strictEqual(runtime.unprocessedBlobs.size, 1); + await runtime.processBlobs(); + await handleP; + assert.fail("Should not succeed"); + } catch (error: any) { + assert.strictEqual(error.status, PendingBlobStatus.OnlinePendingUpload); + assert.strictEqual(error.uploadTime, undefined); + assert.strictEqual(error.acked, false); + } + assert(handleP); + await assert.rejects(handleP); + const summaryData = validateSummary(runtime); + assert.strictEqual(summaryData.ids.length, 0); + assert.strictEqual(summaryData.redirectTable, undefined); + }); + + it("abort while failed upload", async () => { + await runtime.attach(); + await runtime.connect(); + const ac = new AbortController(); + let handleP; + try { + const blob = IsoBuffer.from("blob", "utf8"); + handleP = runtime.createBlob(blob, ac.signal); + ac.abort("abort test"); + assert.strictEqual(runtime.unprocessedBlobs.size, 1); + await runtime.processBlobs(false); + await handleP; + assert.fail("Should not succeed"); + } catch (error: any) { + assert.strictEqual(error.status, PendingBlobStatus.OnlinePendingUpload); + assert.strictEqual(error.uploadTime, undefined); + assert.strictEqual(error.acked, false); + } + assert(handleP); + await assert.rejects(handleP); + const summaryData = validateSummary(runtime); + assert.strictEqual(summaryData.ids.length, 0); + assert.strictEqual(summaryData.redirectTable, undefined); + }); + + it("abort while disconnected", async () => { + await runtime.attach(); + await runtime.connect(); + const ac = new AbortController(); + let handleP; + try { + const blob = IsoBuffer.from("blob", "utf8"); + handleP = runtime.createBlob(blob, ac.signal); + runtime.disconnect(); + ac.abort(); + await runtime.processBlobs(); + await handleP; + assert.fail("Should not succeed"); + } catch (error: any) { + assert.strictEqual(error.status, PendingBlobStatus.OnlinePendingUpload); + assert.strictEqual(error.uploadTime, undefined); + assert.strictEqual(error.acked, false); + } + assert(handleP); + await assert.rejects(handleP); + const summaryData = validateSummary(runtime); + assert.strictEqual(summaryData.ids.length, 0); + assert.strictEqual(summaryData.redirectTable, undefined); + }); + + it("abort after blob suceeds", async () => { + await runtime.attach(); + await runtime.connect(); + const ac = new AbortController(); + let handleP; + try { + const blob = IsoBuffer.from("blob", "utf8"); + handleP = runtime.createBlob(blob, ac.signal); + await runtime.processAll(); + ac.abort(); + } catch (error: any) { + assert.fail("abort after processing should not throw"); + } + assert(handleP); + await assert.doesNotReject(handleP); + const summaryData = validateSummary(runtime); + assert.strictEqual(summaryData.ids.length, 1); + assert.strictEqual(summaryData.redirectTable.size, 1); + }); + + it("abort while waiting for op", async () => { + await runtime.attach(); + await runtime.connect(); + const ac = new AbortController(); + let handleP; + try { + const blob = IsoBuffer.from("blob", "utf8"); + handleP = runtime.createBlob(blob, ac.signal); + const p1 = runtime.processBlobs(); + const p2 = runtime.processHandles(); + // finish upload + await Promise.race([p1, p2]); + ac.abort(); + runtime.processOps(); + // finish op + await Promise.all([p1, p2]); + } catch (error: any) { + assert.strictEqual(error.status, PendingBlobStatus.OnlinePendingOp); + assert.ok(error.uploadTime); + assert.strictEqual(error.acked, false); + } + assert(handleP); + await assert.rejects(handleP); + const summaryData = validateSummary(runtime); + assert.strictEqual(summaryData.ids.length, 0); + assert.strictEqual(summaryData.redirectTable, undefined); + }); + + // tests results will change after + // https://dev.azure.com/fluidframework/internal/_workitems/edit/4550 + // handles won't be resolved on disconnection + it("resubmit on aborted pending upload", async () => { + await runtime.attach(); + await runtime.connect(); + const ac = new AbortController(); + let handleP; + try { + handleP = runtime.createBlob(IsoBuffer.from("blob", "utf8"), ac.signal); + // we can't reject the handle after disconnection but + // we can still clean the pending entries + runtime.disconnect(); + await runtime.processBlobs(); + await handleP; + ac.abort(); + await runtime.connect(); + runtime.processOps(); + } catch (error: any) { + assert.fail("Should succeed"); + } + assert(handleP); + await assert.doesNotReject(handleP); + const summaryData = validateSummary(runtime); + assert.strictEqual(summaryData.ids.length, 0); + assert.strictEqual(summaryData.redirectTable, undefined); + }); + + it("resubmit on aborted pending op", async () => { + await runtime.attach(); + await runtime.connect(); + const ac = new AbortController(); + let handleP; + try { + handleP = runtime.createBlob(IsoBuffer.from("blob", "utf8"), ac.signal); + await runtime.processBlobs(); + // disconnect causes blob to transition to offline if we already upload the + // blob, therefore resolving its handle. Once we change that, handle will + // reject + runtime.disconnect(); + await handleP; + ac.abort(); + await runtime.connect(); + runtime.processOps(); + } catch (error: any) { + assert.fail("Should succeed"); + } + assert(handleP); + await assert.doesNotReject(handleP); + const summaryData = validateSummary(runtime); + assert.strictEqual(summaryData.ids.length, 0); + assert.strictEqual(summaryData.redirectTable, undefined); + }); + }); + describe("Garbage Collection", () => { let redirectTable: Map; diff --git a/packages/runtime/datastore-definitions/src/dataStoreRuntime.ts b/packages/runtime/datastore-definitions/src/dataStoreRuntime.ts index c9a5019ae917..4f06161f97e0 100644 --- a/packages/runtime/datastore-definitions/src/dataStoreRuntime.ts +++ b/packages/runtime/datastore-definitions/src/dataStoreRuntime.ts @@ -103,7 +103,7 @@ export interface IFluidDataStoreRuntime * Api to upload a blob of data. * @param blob - blob to be uploaded. */ - uploadBlob(blob: ArrayBufferLike): Promise>; + uploadBlob(blob: ArrayBufferLike, signal?: AbortSignal): Promise>; /** * Submits the signal to be sent to other clients. diff --git a/packages/runtime/datastore/src/dataStoreRuntime.ts b/packages/runtime/datastore/src/dataStoreRuntime.ts index fd6ffb9a8b03..acf5e279eeb5 100644 --- a/packages/runtime/datastore/src/dataStoreRuntime.ts +++ b/packages/runtime/datastore/src/dataStoreRuntime.ts @@ -559,10 +559,13 @@ export class FluidDataStoreRuntime return this.audience; } - public async uploadBlob(blob: ArrayBufferLike): Promise> { + public async uploadBlob( + blob: ArrayBufferLike, + signal?: AbortSignal, + ): Promise> { this.verifyNotClosed(); - return this.dataStoreContext.uploadBlob(blob); + return this.dataStoreContext.uploadBlob(blob, signal); } public process(message: ISequencedDocumentMessage, local: boolean, localOpMetadata: unknown) { diff --git a/packages/runtime/datastore/src/test/channelStorageService.spec.ts b/packages/runtime/datastore/src/test/channelStorageService.spec.ts index 4c6cca315b7c..ad97e2ab410f 100644 --- a/packages/runtime/datastore/src/test/channelStorageService.spec.ts +++ b/packages/runtime/datastore/src/test/channelStorageService.spec.ts @@ -5,7 +5,7 @@ import { strict as assert } from "assert"; import { stringToBuffer } from "@fluidframework/common-utils"; -import { TelemetryUTLogger } from "@fluidframework/telemetry-utils"; +import { MockLogger } from "@fluidframework/telemetry-utils"; import { ISnapshotTree } from "@fluidframework/protocol-definitions"; import { IDocumentStorageService } from "@fluidframework/driver-definitions"; import { ChannelStorageService } from "../channelStorageService"; @@ -21,10 +21,12 @@ describe("ChannelStorageService", () => { throw new Error("not implemented"); }, }; - const ss = new ChannelStorageService(tree, storage, new TelemetryUTLogger()); + const logger = new MockLogger(); + const ss = new ChannelStorageService(tree, storage, logger); assert.strictEqual(await ss.contains("/"), false); assert.deepStrictEqual(await ss.list(""), []); + logger.assertMatchNone([{ category: "error" }]); }); it("Top Level Blob", async () => { @@ -39,11 +41,13 @@ describe("ChannelStorageService", () => { return stringToBuffer(id, "utf8"); }, }; - const ss = new ChannelStorageService(tree, storage, new TelemetryUTLogger()); + const logger = new MockLogger(); + const ss = new ChannelStorageService(tree, storage, logger); assert.strictEqual(await ss.contains("foo"), true); assert.deepStrictEqual(await ss.list(""), ["foo"]); assert.deepStrictEqual(await ss.readBlob("foo"), stringToBuffer("bar", "utf8")); + logger.assertMatchNone([{ category: "error" }]); }); it("Nested Blob", async () => { @@ -63,10 +67,12 @@ describe("ChannelStorageService", () => { return stringToBuffer(id, "utf8"); }, }; - const ss = new ChannelStorageService(tree, storage, new TelemetryUTLogger()); + const logger = new MockLogger(); + const ss = new ChannelStorageService(tree, storage, logger); assert.strictEqual(await ss.contains("nested/foo"), true); assert.deepStrictEqual(await ss.list("nested/"), ["foo"]); assert.deepStrictEqual(await ss.readBlob("nested/foo"), stringToBuffer("bar", "utf8")); + logger.assertMatchNone([{ category: "error" }]); }); }); diff --git a/packages/runtime/runtime-definitions/src/dataStoreContext.ts b/packages/runtime/runtime-definitions/src/dataStoreContext.ts index 50014303d1c3..826e3ce85933 100644 --- a/packages/runtime/runtime-definitions/src/dataStoreContext.ts +++ b/packages/runtime/runtime-definitions/src/dataStoreContext.ts @@ -212,7 +212,7 @@ export interface IContainerRuntimeBase */ getAbsoluteUrl(relativeUrl: string): Promise; - uploadBlob(blob: ArrayBufferLike): Promise>; + uploadBlob(blob: ArrayBufferLike, signal?: AbortSignal): Promise>; /** * Returns the current quorum. @@ -470,7 +470,7 @@ export interface IFluidDataStoreContext createParam: CreateChildSummarizerNodeParam, ): CreateChildSummarizerNodeFn; - uploadBlob(blob: ArrayBufferLike): Promise>; + uploadBlob(blob: ArrayBufferLike, signal?: AbortSignal): Promise>; /** * @deprecated - The functionality to get base GC details has been moved to summarizer node. diff --git a/packages/test/test-end-to-end-tests/src/test/blobsisAttached.spec.ts b/packages/test/test-end-to-end-tests/src/test/blobsisAttached.spec.ts index 4c79163567a3..34e1b2b2b83f 100644 --- a/packages/test/test-end-to-end-tests/src/test/blobsisAttached.spec.ts +++ b/packages/test/test-end-to-end-tests/src/test/blobsisAttached.spec.ts @@ -65,6 +65,50 @@ describeNoCompat("blob handle isAttached", (getTestObjectProvider) => { provider.updateDocumentId(container.resolvedUrl); }); + it("blob is aborted before uploading", async function () { + const testString = "this is a test string"; + const dataStore1 = await requestFluidObject(container, "default"); + const ac = new AbortController(); + ac.abort("abort test"); + + try { + await dataStore1.runtime.uploadBlob(stringToBuffer(testString, "utf-8"), ac.signal); + assert.fail("Should not succeed"); + } catch (error: any) { + assert.strictEqual(error.status, undefined); + assert.strictEqual(error.uploadTime, undefined); + assert.strictEqual(error.acked, undefined); + } + const pendingBlobs = (runtimeOf(dataStore1).getPendingLocalState() as PendingLocalState) + .pendingAttachmentBlobs; + assert.strictEqual(Object.keys(pendingBlobs).length, 0); + }); + + it("blob is aborted after upload succeds", async function () { + const testString = "this is a test string"; + const testKey = "a blob"; + const dataStore1 = await requestFluidObject(container, "default"); + const map = await dataStore1.getSharedObject(mapId); + const ac = new AbortController(); + // TODO: https://dev.azure.com/fluidframework/internal/_workitems/edit/4685 + await forceWriteMode(map, dataStore1); + let blob; + try { + blob = await dataStore1.runtime.uploadBlob( + stringToBuffer(testString, "utf-8"), + ac.signal, + ); + ac.abort(); + } catch (error: any) { + assert.fail("Should succeed"); + } + const pendingBlobs = (runtimeOf(dataStore1).getPendingLocalState() as PendingLocalState) + .pendingAttachmentBlobs; + const acked = Object.values(pendingBlobs)[0].acked; + assert.strictEqual(blob.isAttached, false); + assert.strictEqual(acked, true); + }); + it("blob is attached after usage in map", async function () { const testString = "this is a test string"; const testKey = "a blob"; diff --git a/server/gitrest/pnpm-lock.yaml b/server/gitrest/pnpm-lock.yaml index c94a66c6e03a..80195247dbb8 100644 --- a/server/gitrest/pnpm-lock.yaml +++ b/server/gitrest/pnpm-lock.yaml @@ -10209,7 +10209,7 @@ packages: levn: 0.4.1 prelude-ls: 1.2.1 type-check: 0.4.0 - word-wrap: 1.2.3 + word-wrap: 1.2.4 dev: true /ora/5.4.1: @@ -13195,8 +13195,8 @@ packages: triple-beam: 1.3.0 winston-transport: 4.5.0 - /word-wrap/1.2.3: - resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} + /word-wrap/1.2.4: + resolution: {integrity: sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==} engines: {node: '>=0.10.0'} dev: true diff --git a/server/routerlicious/kubernetes/routerlicious/templates/fluid-configmap.yaml b/server/routerlicious/kubernetes/routerlicious/templates/fluid-configmap.yaml index 96550fdc2192..4ead824f71a9 100644 --- a/server/routerlicious/kubernetes/routerlicious/templates/fluid-configmap.yaml +++ b/server/routerlicious/kubernetes/routerlicious/templates/fluid-configmap.yaml @@ -74,7 +74,7 @@ data: "kafkaCheckpointOnReprocessingOp": {{ .Values.checkpoints.kafkaCheckpointOnReprocessingOp }} }, "apiCounters": { - "fetchTenantKeyMetricMs": 60000, + "fetchTenantKeyMetricMs": 60000 }, "alfred": { "kafkaClientId": "{{ template "alfred.fullname" . }}", @@ -117,7 +117,8 @@ data: "enforceServerGeneratedDocumentId": {{ .Values.alfred.enforceServerGeneratedDocumentId }}, "socketIo" : { "perMessageDeflate": {{ .Values.alfred.socketIo.perMessageDeflate}} - } + }, + "getDeltasRequestMaxOpsRange": {{ .Values.alfred.getDeltasRequestMaxOpsRange }} }, "client": { "type": "browser", diff --git a/server/routerlicious/kubernetes/routerlicious/values.yaml b/server/routerlicious/kubernetes/routerlicious/values.yaml index 8e1e1e934401..55e68239eb3c 100644 --- a/server/routerlicious/kubernetes/routerlicious/values.yaml +++ b/server/routerlicious/kubernetes/routerlicious/values.yaml @@ -48,6 +48,7 @@ alfred: enforceServerGeneratedDocumentId: false socketIo: perMessageDeflate: true + getDeltasRequestMaxOpsRange: 2000 storage: enableWholeSummaryUpload: false diff --git a/server/routerlicious/packages/lambdas/src/scribe/checkpointManager.ts b/server/routerlicious/packages/lambdas/src/scribe/checkpointManager.ts index 17f3f42d231d..83b556a7ef09 100644 --- a/server/routerlicious/packages/lambdas/src/scribe/checkpointManager.ts +++ b/server/routerlicious/packages/lambdas/src/scribe/checkpointManager.ts @@ -59,6 +59,7 @@ export class CheckpointManager implements ICheckpointManager { this.documentId, expectedSequenceNumber - 1, expectedSequenceNumber + 1, + "scribe", ); // If we don't get the expected delta, retry after a delay @@ -82,6 +83,7 @@ export class CheckpointManager implements ICheckpointManager { this.documentId, expectedSequenceNumber - 1, expectedSequenceNumber + 1, + "scribe", ); if ( diff --git a/server/routerlicious/packages/lambdas/src/scribe/lambdaFactory.ts b/server/routerlicious/packages/lambdas/src/scribe/lambdaFactory.ts index 045fa1464202..c443581a1ecf 100644 --- a/server/routerlicious/packages/lambdas/src/scribe/lambdaFactory.ts +++ b/server/routerlicious/packages/lambdas/src/scribe/lambdaFactory.ts @@ -318,6 +318,8 @@ export class ScribeLambdaFactory tenantId, documentId, lastCheckpoint.protocolState.sequenceNumber, + undefined, + "scribe", ); } return opMessages; diff --git a/server/routerlicious/packages/lambdas/src/scribe/pendingMessageReader.ts b/server/routerlicious/packages/lambdas/src/scribe/pendingMessageReader.ts index 161cf56148de..8f44f06af839 100644 --- a/server/routerlicious/packages/lambdas/src/scribe/pendingMessageReader.ts +++ b/server/routerlicious/packages/lambdas/src/scribe/pendingMessageReader.ts @@ -26,6 +26,7 @@ export class PendingMessageReader implements IPendingMessageReader { this.documentId, from - 1, to + 1, + "scribe", ); return deltasP; } diff --git a/server/routerlicious/packages/lambdas/src/scribe/summaryWriter.ts b/server/routerlicious/packages/lambdas/src/scribe/summaryWriter.ts index db7d851f25f6..27068da22aa0 100644 --- a/server/routerlicious/packages/lambdas/src/scribe/summaryWriter.ts +++ b/server/routerlicious/packages/lambdas/src/scribe/summaryWriter.ts @@ -713,7 +713,14 @@ export class SummaryWriter implements ISummaryWriter { let logTail: ISequencedDocumentMessage[] = []; if (this.getDeltasViaAlfred) { - logTail = await this.deltaService.getDeltas("", this.tenantId, this.documentId, gt, lt); + logTail = await this.deltaService.getDeltas( + "", + this.tenantId, + this.documentId, + gt, + lt, + "scribe", + ); } else { const query = { "documentId": this.documentId, diff --git a/server/routerlicious/packages/routerlicious-base/src/alfred/routes/api/deltas.ts b/server/routerlicious/packages/routerlicious-base/src/alfred/routes/api/deltas.ts index def21554ddd6..66f15a181d3c 100644 --- a/server/routerlicious/packages/routerlicious-base/src/alfred/routes/api/deltas.ts +++ b/server/routerlicious/packages/routerlicious-base/src/alfred/routes/api/deltas.ts @@ -36,6 +36,8 @@ export function create( ): Router { const deltasCollectionName = config.get("mongo:collectionNames:deltas"); const rawDeltasCollectionName = config.get("mongo:collectionNames:rawdeltas"); + const getDeltasRequestMaxOpsRange = + (config.get("alfred:getDeltasRequestMaxOpsRange") as number) ?? 2000; const router: Router = Router(); const tenantThrottleOptions: Partial = { @@ -144,9 +146,19 @@ export function create( ), verifyStorageToken(tenantManager, config, defaultTokenValidationOptions), (request, response, next) => { - const from = stringToSequenceNumber(request.query.from); - const to = stringToSequenceNumber(request.query.to); + let from = stringToSequenceNumber(request.query.from); + let to = stringToSequenceNumber(request.query.to); + if (from === undefined && to === undefined) { + from = 0; + to = from + getDeltasRequestMaxOpsRange + 1; + } else if (to === undefined) { + to = from + getDeltasRequestMaxOpsRange + 1; + } else if (from === undefined) { + from = Math.max(0, to - getDeltasRequestMaxOpsRange - 1); + } + const tenantId = getParam(request.params, "tenantId") || appTenants[0].id; + const caller = request.query.caller?.toString(); // Query for the deltas and return a filtered version of just the operations field const deltasP = deltaService.getDeltas( @@ -155,6 +167,7 @@ export function create( getParam(request.params, "id"), from, to, + caller, ); handleResponse(deltasP, response, undefined, 500); diff --git a/server/routerlicious/packages/routerlicious/config/config.json b/server/routerlicious/packages/routerlicious/config/config.json index 2a5777a73c4a..7837852c3171 100644 --- a/server/routerlicious/packages/routerlicious/config/config.json +++ b/server/routerlicious/packages/routerlicious/config/config.json @@ -114,7 +114,8 @@ }, "jwtTokenCache": { "enable": true - } + }, + "getDeltasRequestMaxOpsRange": 2000 }, "client": { "type": "browser", diff --git a/server/routerlicious/packages/services-core/src/delta.ts b/server/routerlicious/packages/services-core/src/delta.ts index 515b7f22c7f8..abca4bce7caa 100644 --- a/server/routerlicious/packages/services-core/src/delta.ts +++ b/server/routerlicious/packages/services-core/src/delta.ts @@ -12,6 +12,7 @@ export interface IDeltaService { documentId: string, from?: number, to?: number, + caller?: string, ): Promise; getDeltasFromStorage( diff --git a/server/routerlicious/packages/services-telemetry/src/lumber.ts b/server/routerlicious/packages/services-telemetry/src/lumber.ts index 89c847de68c0..04983a9fadb0 100644 --- a/server/routerlicious/packages/services-telemetry/src/lumber.ts +++ b/server/routerlicious/packages/services-telemetry/src/lumber.ts @@ -82,7 +82,7 @@ export class Lumber { public setProperties(properties: Map | Record): this { if (properties instanceof Map) { if (this._properties.size === 0) { - this._properties = properties; + this._properties = new Map(properties); } else { properties.forEach((value: any, key: string) => { this.setProperty(key, value); diff --git a/server/routerlicious/packages/services/src/deltaManager.ts b/server/routerlicious/packages/services/src/deltaManager.ts index 060dbf68ab22..de1739f83fb1 100644 --- a/server/routerlicious/packages/services/src/deltaManager.ts +++ b/server/routerlicious/packages/services/src/deltaManager.ts @@ -22,12 +22,13 @@ export class DeltaManager implements IDeltaService { documentId: string, from: number, to: number, + caller?: string, ): Promise { const baseUrl = `${this.internalAlfredUrl}`; const restWrapper = await this.getBasicRestWrapper(tenantId, documentId, baseUrl); const resultP = restWrapper.get( `/deltas/${tenantId}/${documentId}`, - { from, to }, + { from, to, caller }, ); return resultP; } diff --git a/server/tinylicious/pnpm-lock.yaml b/server/tinylicious/pnpm-lock.yaml index 844ce87ed6ec..c7e871b72ee6 100644 --- a/server/tinylicious/pnpm-lock.yaml +++ b/server/tinylicious/pnpm-lock.yaml @@ -4519,7 +4519,7 @@ packages: levn: 0.4.1 prelude-ls: 1.2.1 type-check: 0.4.0 - word-wrap: 1.2.3 + word-wrap: 1.2.4 dev: true /p-limit/2.3.0: @@ -5906,8 +5906,8 @@ packages: triple-beam: 1.3.0 winston-transport: 4.5.0 - /word-wrap/1.2.3: - resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} + /word-wrap/1.2.4: + resolution: {integrity: sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==} engines: {node: '>=0.10.0'} dev: true diff --git a/tools/api-markdown-documenter/pnpm-lock.yaml b/tools/api-markdown-documenter/pnpm-lock.yaml index 45883a9b17a7..bb0c37af78ae 100644 --- a/tools/api-markdown-documenter/pnpm-lock.yaml +++ b/tools/api-markdown-documenter/pnpm-lock.yaml @@ -5437,7 +5437,7 @@ packages: levn: 0.4.1 prelude-ls: 1.2.1 type-check: 0.4.0 - word-wrap: 1.2.3 + word-wrap: 1.2.4 dev: true /os-name/3.1.0: @@ -6900,8 +6900,8 @@ packages: execa: 1.0.0 dev: true - /word-wrap/1.2.3: - resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} + /word-wrap/1.2.4: + resolution: {integrity: sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==} engines: {node: '>=0.10.0'} dev: true diff --git a/tools/benchmark/pnpm-lock.yaml b/tools/benchmark/pnpm-lock.yaml index a698ab880943..bcd3a985b021 100644 --- a/tools/benchmark/pnpm-lock.yaml +++ b/tools/benchmark/pnpm-lock.yaml @@ -2982,7 +2982,7 @@ packages: levn: 0.4.1 prelude-ls: 1.2.1 type-check: 0.4.0 - word-wrap: 1.2.3 + word-wrap: 1.2.4 dev: true /p-limit/2.3.0: @@ -3755,8 +3755,8 @@ packages: isexe: 2.0.0 dev: true - /word-wrap/1.2.3: - resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} + /word-wrap/1.2.4: + resolution: {integrity: sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==} engines: {node: '>=0.10.0'} dev: true diff --git a/tools/changelog-generator-wrapper/pnpm-lock.yaml b/tools/changelog-generator-wrapper/pnpm-lock.yaml index a6cb76bf1fa8..cba9ee1b3b97 100644 --- a/tools/changelog-generator-wrapper/pnpm-lock.yaml +++ b/tools/changelog-generator-wrapper/pnpm-lock.yaml @@ -4507,7 +4507,7 @@ packages: levn: 0.3.0 prelude-ls: 1.1.2 type-check: 0.3.2 - word-wrap: 1.2.3 + word-wrap: 1.2.4 dev: true /optionator/0.9.1: @@ -4519,7 +4519,7 @@ packages: levn: 0.4.1 prelude-ls: 1.2.1 type-check: 0.4.0 - word-wrap: 1.2.3 + word-wrap: 1.2.4 dev: true /os-tmpdir/1.0.2: @@ -5802,8 +5802,8 @@ packages: isexe: 2.0.0 dev: true - /word-wrap/1.2.3: - resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} + /word-wrap/1.2.4: + resolution: {integrity: sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==} engines: {node: '>=0.10.0'} dev: true diff --git a/tools/getkeys/pnpm-lock.yaml b/tools/getkeys/pnpm-lock.yaml index 3935fc368353..ca58f385a4de 100644 --- a/tools/getkeys/pnpm-lock.yaml +++ b/tools/getkeys/pnpm-lock.yaml @@ -2167,7 +2167,7 @@ packages: levn: 0.4.1 prelude-ls: 1.2.1 type-check: 0.4.0 - word-wrap: 1.2.3 + word-wrap: 1.2.4 dev: true /p-limit/2.3.0: @@ -2820,8 +2820,8 @@ packages: isexe: 2.0.0 dev: true - /word-wrap/1.2.3: - resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} + /word-wrap/1.2.4: + resolution: {integrity: sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==} engines: {node: '>=0.10.0'} dev: true diff --git a/tools/markdown-magic/pnpm-lock.yaml b/tools/markdown-magic/pnpm-lock.yaml index 0cf8cf93cc7d..02fd4b3d6b1a 100644 --- a/tools/markdown-magic/pnpm-lock.yaml +++ b/tools/markdown-magic/pnpm-lock.yaml @@ -2366,7 +2366,7 @@ packages: levn: 0.4.1 prelude-ls: 1.2.1 type-check: 0.4.0 - word-wrap: 1.2.3 + word-wrap: 1.2.4 dev: false /p-limit/2.3.0: @@ -3061,8 +3061,8 @@ packages: isexe: 2.0.0 dev: false - /word-wrap/1.2.3: - resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} + /word-wrap/1.2.4: + resolution: {integrity: sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==} engines: {node: '>=0.10.0'} dev: false diff --git a/tools/test-tools/pnpm-lock.yaml b/tools/test-tools/pnpm-lock.yaml index a4001195e627..7342da43eaf8 100644 --- a/tools/test-tools/pnpm-lock.yaml +++ b/tools/test-tools/pnpm-lock.yaml @@ -3978,7 +3978,7 @@ packages: levn: 0.4.1 prelude-ls: 1.2.1 type-check: 0.4.0 - word-wrap: 1.2.3 + word-wrap: 1.2.4 dev: true /os-name/3.1.0: @@ -5194,8 +5194,8 @@ packages: execa: 1.0.0 dev: true - /word-wrap/1.2.3: - resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} + /word-wrap/1.2.4: + resolution: {integrity: sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==} engines: {node: '>=0.10.0'} dev: true