Skip to content

Commit

Permalink
tag asserts (#16222)
Browse files Browse the repository at this point in the history
## Description

tag asserts for release.

`flub release -g client && npm run format`
  • Loading branch information
CraigMacomber authored Jul 1, 2023
1 parent a44d339 commit 3bb86f7
Show file tree
Hide file tree
Showing 15 changed files with 84 additions and 54 deletions.
4 changes: 2 additions & 2 deletions experimental/dds/tree2/src/core/tree/visitDelta.ts
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ function secondPass(delta: Delta.MarkList, visitor: DeltaVisitor, config: PassCo
extractFromOpaque(mark.moveId),
mark.count,
);
assert(entry !== undefined, "Expected a move out for this move in");
assert(entry !== undefined, 0x6d7 /* Expected a move out for this move in */);
visitor.onMoveIn(index, entry.length, entry.value);
let endIndex = index + entry.length;

Expand All @@ -278,7 +278,7 @@ function secondPass(delta: Delta.MarkList, visitor: DeltaVisitor, config: PassCo

assert(
entry !== undefined && entry.start === nextId,
"Expected a move out for the remaining portion of this move in",
0x6d8 /* Expected a move out for the remaining portion of this move in */,
);

lastEntryId = entry.start + entry.length - 1;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ const nodeProxyHandler: AdaptingProxyHandler<NodeProxyTarget, EditableTree> = {
const fieldKey: FieldKey = brand(key);
if (fieldKey === localNodeKeySymbol) {
// TODO: this is not very type safe. Can we do better?
assert(typeof key === "number", "Invalid local node key");
assert(typeof key === "number", 0x6d9 /* Invalid local node key */);
const localNodeKey = key as unknown as LocalNodeKey;
const stableNodeKey = target.context.nodeKeys.stabilizeNodeKey(localNodeKey);
target.getField(fieldKey).content = stableNodeKey;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ abstract class AbstractPathVisitor implements PathVisitor {
listeners: new Set(),
children: new Map(),
};
assert(contextRoots !== undefined, "expected contextRoots to be defined");
assert(contextRoots !== undefined, 0x6da /* expected contextRoots to be defined */);
contextRoots.set(tree.field, newRoot);
this.bindTree(contextType, tree, listener, newRoot);
} else {
Expand Down Expand Up @@ -387,7 +387,7 @@ abstract class AbstractPathVisitor implements PathVisitor {
foundTree.listeners.delete(listener);
} else {
tree.children.forEach((childTree, fieldKey) => {
assert(foundTree !== undefined, "expected foundTree to be defined");
assert(foundTree !== undefined, 0x6db /* expected foundTree to be defined */);
const childCallTree = foundTree.children.get(fieldKey);
if (childCallTree !== undefined) {
this.unregisterListener(contextType, childTree, listener, childCallTree);
Expand Down Expand Up @@ -666,7 +666,7 @@ class AbstractDataBinder<
const newVisitor = this.visitorFactory(anchor);
this.unregisterHandles.add(
anchor[on]("subtreeChanging", (upPath: UpPath) => {
assert(newVisitor !== undefined, "visitor expected to be defined");
assert(newVisitor !== undefined, 0x6dc /* visitor expected to be defined */);
if (!this.visitorLocations.has(newVisitor)) {
this.visitorLocations.set(newVisitor, upPath);
}
Expand Down Expand Up @@ -728,8 +728,8 @@ class BufferingDataBinder<E extends Events<E>>
const compareFn = (a: BufferingPathVisitor, b: BufferingPathVisitor) => {
const pathA = this.visitorLocations.get(a);
const pathB = this.visitorLocations.get(b);
assert(pathA !== undefined, "pathA expected to be defined");
assert(pathB !== undefined, "pathB expected to be defined");
assert(pathA !== undefined, 0x6dd /* pathA expected to be defined */);
assert(pathB !== undefined, 0x6de /* pathB expected to be defined */);
return sortFn(pathA, pathB);
};
const sortedVisitors: BufferingPathVisitor[] = nativeSort(unsortedVisitors, compareFn);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,13 +90,13 @@ export function getStableNodeKey(
const field = node[getField](nodeKeyFieldKeySymbol);
assert(
field.fieldSchema.kind.identifier === FieldKinds.nodeKey.identifier,
"Invalid node key field kind",
0x6df /* Invalid node key field kind */,
);
const nodeKeyNode = field.getNode(0);
const id = nodeKeyNode[valueSymbol];
assert(
typeof id === "string" && isStableId(id),
"Malformed value encountered in node key field",
0x6e0 /* Malformed value encountered in node key field */,
);
return brand(id);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ export class NodeKeyIndex<TField extends GlobalFieldKey>
this.nodes.clear();
for (let i = 0; i < context.root.length; i++) {
for (const [id, node] of this.findKeys(context.root.getNode(i))) {
assert(!this.nodes.has(id), "Encountered duplicate node key");
assert(!this.nodes.has(id), 0x6e1 /* Encountered duplicate node key */);
this.nodes.set(id, node);
}
}
Expand Down Expand Up @@ -107,12 +107,12 @@ export class NodeKeyIndex<TField extends GlobalFieldKey>
assert(
node[typeSymbol].extraGlobalFields ||
node[typeSymbol].globalFields.has(this.fieldKey),
"Found node key that is not in schema",
0x6e2 /* Found node key that is not in schema */,
);
} else {
assert(
!node[typeSymbol].globalFields.has(this.fieldKey),
"Node key absent but required by schema",
0x6e3 /* Node key absent but required by schema */,
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,23 +43,23 @@ export function createNodeKeyManager(idCompressor?: IIdCompressor | undefined):
generateLocalNodeKey: () => {
assert(
idCompressor !== undefined,
"Runtime IdCompressor must be available to generate local node keys",
0x6e4 /* Runtime IdCompressor must be available to generate local node keys */,
);
return brand(idCompressor.generateCompressedId());
},

localizeNodeKey: (key: StableNodeKey) => {
assert(
idCompressor !== undefined,
"Runtime IdCompressor must be available to convert node keys",
0x6e5 /* Runtime IdCompressor must be available to convert node keys */,
);
return brand(idCompressor.recompress(key));
},

stabilizeNodeKey: (key: LocalNodeKey) => {
assert(
idCompressor !== undefined,
"Runtime IdCompressor must be available to convert node keys",
0x6e6 /* Runtime IdCompressor must be available to convert node keys */,
);
return brand(
// TODO: The assert below is required for type safety but is maybe slow
Expand Down Expand Up @@ -98,8 +98,8 @@ class MockNodeKeyManager implements NodeKeyManager {
}

private createMockStableId(offset: number): StableId {
assert(offset >= 0, "UUID offset may not be negative");
assert(offset < 281_474_976_710_656, "UUID offset must be at most 16^12");
assert(offset >= 0, 0x6e7 /* UUID offset may not be negative */);
assert(offset < 281_474_976_710_656, 0x6e8 /* UUID offset must be at most 16^12 */);
return assertIsStableId(
`a110ca7e-add1-4000-8000-${Math.round(offset).toString(16).padStart(12, "0")}`,
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -669,13 +669,13 @@ function getReplacementMark<T>(
const lastEffectId = effect.start + effect.length - 1;
assert(
effect.start <= id && lastEffectId >= lastTargetId,
"Expected effect to cover entire mark",
0x6e9 /* Expected effect to cover entire mark */,
);

let mark = effect.value.mark;
assert(
getMarkLength(mark) === effect.length,
"Expected replacement mark to be same length as number of cells replaced",
0x6ea /* Expected replacement mark to be same length as number of cells replaced */,
);

// The existing effect may cover more cells than the area we are querying.
Expand Down Expand Up @@ -737,7 +737,7 @@ function setReplacementMark<T>(
if (effect !== undefined) {
assert(
effect.start <= id && effect.start + effect.length >= (id as number) + count,
"Expected effect to cover entire mark",
0x6eb /* Expected effect to cover entire mark */,
);
newEffect = { ...effect.value, mark };
} else {
Expand Down Expand Up @@ -891,7 +891,7 @@ function setModifyAfter<T>(
if (effect !== undefined) {
assert(
effect.start <= id && effect.start + effect.length >= (id as number) + count,
"Expected effect to cover entire mark",
0x6ec /* Expected effect to cover entire mark */,
);
const nodeChange =
effect.value.modifyAfter !== undefined
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,10 @@ function invertMark<TNodeChange>(
return [invertModifyOrSkip(mark.count, mark.changes, inputIndex, invertChild)];
}
if (mark.changes !== undefined) {
assert(mark.count === 1, "Mark with changes can only target a single cell");
assert(
mark.count === 1,
0x6ed /* Mark with changes can only target a single cell */,
);
crossFieldManager.set(
CrossFieldTarget.Destination,
mark.revision ?? revision,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ export function getModifyAfter<T>(
if (effect?.value.modifyAfter !== undefined) {
assert(
effect.start <= id && effect.start + effect.length >= (id as number) + count,
"Expected effect to cover entire mark",
0x6ee /* Expected effect to cover entire mark */,
);
if (consumeEffect) {
const newEffect = { ...effect.value };
Expand Down Expand Up @@ -355,7 +355,7 @@ function getPairedMarkStatus<T>(
if (effect?.value.pairedMarkStatus !== undefined) {
assert(
effect.start <= id && effect.start + effect.length >= (id as number) + count,
"Expected effect to cover entire mark",
0x6ef /* Expected effect to cover entire mark */,
);
if (consumeEffect) {
const newEffect = { ...effect.value };
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,7 @@ function rebaseMark<TNodeChange>(
if (markEmptiesCells(baseMark)) {
const moveId = getMarkMoveId(baseMark);
if (moveId !== undefined) {
assert(isMoveMark(baseMark), "Only move marks have move IDs");
assert(isMoveMark(baseMark), 0x6f0 /* Only move marks have move IDs */);
if (markFollowsMoves(rebasedMark)) {
sendMarkToDest(rebasedMark, moveEffects, baseRevision, moveId, baseMark.count);
return { count: 0 };
Expand Down Expand Up @@ -460,7 +460,7 @@ function sendMarkToDest<T>(
if (effect !== undefined) {
assert(
effect.start <= id && effect.start + effect.length >= (id as number) + count,
"Expected effect to cover entire mark",
0x6f1 /* Expected effect to cover entire mark */,
);
newEffect = { ...effect.value, movedMark: mark };
} else {
Expand All @@ -486,7 +486,7 @@ function setPairedMarkStatus(
if (effect !== undefined) {
assert(
effect.start <= id && effect.start + effect.length >= (id as number) + count,
"Expected effect to cover entire mark",
0x6f2 /* Expected effect to cover entire mark */,
);
newEffect = { ...effect.value, pairedMarkStatus: status };
} else {
Expand Down Expand Up @@ -655,7 +655,7 @@ function getMovedMark<T>(
if (effect?.value.movedMark !== undefined) {
assert(
effect.start <= id && effect.start + effect.length >= (id as number) + count,
"Expected effect to cover entire mark",
0x6f3 /* Expected effect to cover entire mark */,
);
const newEffect = { ...effect.value };
delete newEffect.movedMark;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ export class DocumentStorageServiceCompressionAdapter extends DocumentStorageSer
return blob;
}
}
assert(algorithm < 0x10, "Algorithm should be less than 0x10");
assert(algorithm < 0x10, 0x6f5 /* Algorithm should be less than 0x10 */);
const blobView = new Uint8Array(blob);
const blobLength = blobView.length;
const newBlob = new Uint8Array(blobLength + 1);
Expand Down Expand Up @@ -241,7 +241,7 @@ export class DocumentStorageServiceCompressionAdapter extends DocumentStorageSer
config: ICompressionStorageConfig,
context?: ISummaryContext,
): SummaryObject {
assert(typeof input === "object", "input must be a non-null object");
assert(typeof input === "object", 0x6f6 /* input must be a non-null object */);
const maybeReplaced = isEncode ? encoder(input, config) : decoder(input);

if (maybeReplaced !== input) {
Expand Down Expand Up @@ -277,7 +277,7 @@ export class DocumentStorageServiceCompressionAdapter extends DocumentStorageSer
* @returns - The summary tree containing the metadata blob.
*/
private static findMetadataHolderSummary(summary: ISummaryTree): ISummaryTree | undefined {
assert(typeof summary === "object", "summary must be a non-null object");
assert(typeof summary === "object", 0x6f7 /* summary must be a non-null object */);
for (const key of Object.keys(summary.tree)) {
const value = summary.tree[key];

Expand All @@ -302,7 +302,7 @@ export class DocumentStorageServiceCompressionAdapter extends DocumentStorageSer
*/
private static getMetadataHolderTree(summary: ISummaryTree) {
const metadataHolder = this.findMetadataHolderSummary(summary);
assert(metadataHolder !== undefined, "metadataHolder must be a non-null object");
assert(metadataHolder !== undefined, 0x6f8 /* metadataHolder must be a non-null object */);
const metadataHolderTree = metadataHolder.tree;
return metadataHolderTree;
}
Expand All @@ -328,7 +328,7 @@ export class DocumentStorageServiceCompressionAdapter extends DocumentStorageSer
* @returns - True if the compression markup blob is found, otherwise false.
*/
private static hasCompressionMarkup(snapshot: ISnapshotTree): boolean {
assert(typeof snapshot === "object", "snapshot must be a non-null object");
assert(typeof snapshot === "object", 0x6f9 /* snapshot must be a non-null object */);
for (const key of Object.keys(snapshot.blobs)) {
if (key === metadataBlobName) {
const value = snapshot.blobs[blobHeadersBlobName];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ export function applyStorageCompression(
documentServiceFactory,
);
} else {
assert(isCompressionConfig(config), "Invalid compression config");
assert(isCompressionConfig(config), 0x6f4 /* Invalid compression config */);
return applyStorageCompressionInternal(
DocumentServiceFactoryCompressionAdapter,
documentServiceFactory,
Expand Down
4 changes: 2 additions & 2 deletions packages/runtime/container-runtime/src/opLifecycle/outbox.ts
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ export class Outbox {

const rawBatch = batchManager.popBatch();
if (rawBatch.hasReentrantOps === true && this.params.config.enableBatchRebasing) {
assert(!this.rebasing, "A rebased batch should never have reentrant ops");
assert(!this.rebasing, 0x6fa /* A rebased batch should never have reentrant ops */);
// If a batch contains reentrant ops (ops created as a result from processing another op)
// it needs to be rebased so that we can ensure consistent reference sequence numbers
// and eventual consistency at the DDS level.
Expand All @@ -266,7 +266,7 @@ export class Outbox {
* @param rawBatch - the batch to be rebased
*/
private rebase(rawBatch: IBatch, batchManager: BatchManager) {
assert(!this.rebasing, "Reentrancy");
assert(!this.rebasing, 0x6fb /* Reentrancy */);

this.rebasing = true;
for (const message of rawBatch.content) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -243,9 +243,9 @@ export class SummarizerNode implements IRootSummarizerNode {
private wasSummarizeMissed(parentSkipRecursion: boolean): boolean {
assert(
this.wipSummaryLogger !== undefined,
"wipSummaryLogger should have been set in startSummary or ctor",
0x6fc /* wipSummaryLogger should have been set in startSummary or ctor */,
);
assert(this.wipReferenceSequenceNumber !== undefined, "Not tracking a summary");
assert(this.wipReferenceSequenceNumber !== undefined, 0x6fd /* Not tracking a summary */);

// If the parent node skipped recursion, it did not call summarize on this node. So, summarize was not missed
// but was intentionally not called.
Expand Down Expand Up @@ -335,7 +335,7 @@ export class SummarizerNode implements IRootSummarizerNode {

// If localPathsToUse is undefined, it means summarize didn't run for this node and in that case the validate
// step should have failed.
assert(localPathsToUse !== undefined, "summarize didn't run for node");
assert(localPathsToUse !== undefined, 0x6fe /* summarize didn't run for node */);
const summary = new SummaryNode({
...localPathsToUse,
referenceSequenceNumber: this.wipReferenceSequenceNumber,
Expand Down
Loading

0 comments on commit 3bb86f7

Please sign in to comment.