From b868c89c815269fd8a5a113602952e29bb165a41 Mon Sep 17 00:00:00 2001
From: CI Bot
Date: Fri, 13 Feb 2026 18:52:40 -0800
Subject: [PATCH 01/14] feat(cli): add `git warp doctor` structural diagnostics
command
7 diagnostic checks (repo-accessible, refs-consistent, coverage-complete,
checkpoint-fresh, audit-consistent, clock-skew, hooks-installed) with
budget enforcement, sorted findings, deterministic health derivation,
and machine-readable JSON output.
Includes --strict flag (exit 4 on warnings), text presenter with colored
status icons, doctorSchema, golden-JSON unit tests, and BATS E2E tests.
---
CHANGELOG.md | 15 ++
README.md | 44 +++-
ROADMAP.md | 2 +-
WHITEPAPER.md | 127 ++++++++++
bin/cli/commands/doctor/checks.js | 405 ++++++++++++++++++++++++++++++
bin/cli/commands/doctor/codes.js | 46 ++++
bin/cli/commands/doctor/index.js | 216 ++++++++++++++++
bin/cli/commands/doctor/types.js | 89 +++++++
bin/cli/commands/registry.js | 2 +
bin/cli/infrastructure.js | 6 +-
bin/cli/schemas.js | 12 +
bin/cli/types.js | 1 +
bin/presenters/index.js | 2 +
bin/presenters/text.js | 52 ++++
package.json | 2 +-
test/bats/cli-doctor.bats | 88 +++++++
test/unit/cli/doctor.test.js | 240 ++++++++++++++++++
test/unit/cli/schemas.test.js | 17 ++
18 files changed, 1354 insertions(+), 12 deletions(-)
create mode 100644 WHITEPAPER.md
create mode 100644 bin/cli/commands/doctor/checks.js
create mode 100644 bin/cli/commands/doctor/codes.js
create mode 100644 bin/cli/commands/doctor/index.js
create mode 100644 bin/cli/commands/doctor/types.js
create mode 100644 test/bats/cli-doctor.bats
create mode 100644 test/unit/cli/doctor.test.js
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 75e3646..64e2f30 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,21 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## [10.13.0] — 2026-02-13 — Doctor Command
+
+Adds `git warp doctor`, a structural diagnostics command that probes for anomalies (broken refs, missing objects, clock drift, audit gaps) and prescribes fixes. Read-only, no materialization required.
+
+### Added
+
+- **`git warp doctor`**: 7 diagnostic checks — repo-accessible, refs-consistent, coverage-complete, checkpoint-fresh, audit-consistent, clock-skew, hooks-installed
+- **`--strict` flag**: Treats warnings as failures (exit 4 instead of 3)
+- **Budget enforcement**: Global 10s deadline; skipped checks appear as findings, not silent omissions
+- **Machine-readable output**: `--json` emits versioned `DoctorPayload` (v1) with policy echo, sorted findings, and priority actions
+- **Human-readable output**: Colored status icons, per-finding fix suggestions, priority action summary
+- **Code registry**: `bin/cli/commands/doctor/codes.js` — single source of truth for all finding codes
+- **Schema + unit tests**: `doctorSchema` tests in schemas.test.js, golden-JSON tests in doctor.test.js
+- **BATS E2E tests**: 5 scenarios in cli-doctor.bats (healthy, broken ref, missing checkpoint, strict mode)
+
## [10.12.0] — 2026-02-13 — Multi-Runtime CLI + parseArgs Migration
Makes the CLI (`bin/`) portable across Node 22+, Bun, and Deno by removing Node-only dependencies, and replaces hand-rolled arg parsing with `node:util.parseArgs` + Zod schemas.
diff --git a/README.md b/README.md
index 432ceb7..01037a7 100644
--- a/README.md
+++ b/README.md
@@ -8,9 +8,11 @@
-A multi-writer graph database that uses Git commits as its storage substrate. Graph state is stored as commits pointing to the empty tree (`4b825dc...`), making the data invisible to normal Git workflows while inheriting Git's content-addressing, cryptographic integrity, and distributed replication.
+## The Core Idea
-Writers collaborate without coordination using CRDTs (OR-Set for nodes/edges, LWW registers for properties). Every writer maintains an independent patch chain; materialization deterministically merges all writers into a single consistent view.
+**git-warp** is a graph database that doesn't need a database server. It stores all its data inside a Git repository by abusing a clever trick: every piece of data is a Git commit that points to the **empty tree** — a special object that exists in every Git repo. Because the commits don't reference any actual files, they're completely invisible to normal Git operations like `git log`, `git diff`, or `git status`. Your codebase stays untouched, but there's a full graph database living alongside it.
+
+Writers collaborate without coordination using CRDTs (Conflict-free Replicated Data Types) that guarantee deterministic convergence regardless of what order the patches arrive in.
```bash
npm install @git-stunts/git-warp @git-stunts/plumbing
@@ -54,15 +56,21 @@ const result = await graph.query()
## How It Works
-Each writer creates **patches**: atomic batches of graph operations (add/remove nodes, add/remove edges, set properties). Patches are serialized as CBOR-encoded Git commit messages pointing to the empty tree, forming a per-writer chain under `refs/warp//writers/`.
+### The Multi-Writer Problem (and How It's Solved)
+
+Multiple people (or machines, or processes) can write to the same graph **simultaneously, without any coordination**. There's no central server, no locking, no "wait your turn."
+
+Each writer maintains their own independent chain of **patches** — atomic batches of operations like "add this node, set this property, create this edge." These patches are stored as Git commits under refs like `refs/warp//writers/`.
+
+When you want to read the graph, you **materialize** — which means replaying all patches from all writers and merging them into a single consistent view. The specific CRDT rules are:
-**Materialization** replays all patches from all writers, applying CRDT merge semantics:
+- **Nodes and edges** use an OR-Set (Observed-Remove Set). If Alice adds a node and Bob concurrently deletes it, the add wins — unless Bob's delete specifically observed Alice's add. This is the "add wins over concurrent remove" principle.
+- **Properties** use LWW (Last-Writer-Wins) registers. If two writers set the same property at the same time, the one with the higher Lamport timestamp wins. Ties are broken by writer ID (lexicographic), then by patch SHA.
+- **Version vectors** track causality across writers so the system knows which operations are concurrent vs. causally ordered.
-- **Nodes and edges** use an Observed-Remove Set (OR-Set). An add wins over a concurrent remove unless the remove has observed the specific add event.
-- **Properties** use Last-Write-Wins (LWW) registers, ordered by Lamport timestamp, then writer ID, then patch SHA.
-- **Version vectors** track causality across writers, ensuring deterministic convergence regardless of patch arrival order.
+Every operation gets a unique **EventId** — `(lamport, writerId, patchSha, opIndex)` — which creates a total ordering that makes merge results identical no matter which machine runs them.
-**Checkpoints** snapshot materialized state into a single commit for fast incremental recovery. Subsequent materializations only need to replay patches created after the checkpoint.
+**Checkpoints** snapshot the materialized state into a single commit for fast incremental recovery. Subsequent materializations only need to replay patches created after the checkpoint.
## Multi-Writer Collaboration
@@ -492,9 +500,27 @@ npm run test:deno # Deno: API integration tests
npm run test:matrix # All runtimes in parallel
```
+## When git-warp is Most Useful
+
+- **Distributed configuration management.** A fleet of servers each writing their own state into a shared graph without a central database.
+- **Offline-first field applications.** Collecting data in the field with no connectivity; merging cleanly when back online.
+- **Collaborative knowledge bases.** Researchers curating nodes and relationships independently.
+- **Git-native issue/project tracking.** Embedding a full project graph directly in the repo.
+- **Audit-critical systems.** Tamper-evident records with cryptographic proof (via Audit Receipts).
+- **IoT sensor networks.** Sensors logging readings and relationships, syncing when bandwidth allows.
+- **Game world state.** Modders independently adding content that composes without a central manager.
+
+## When NOT to Use It
+
+- **High-throughput transactional workloads.** If you need thousands of writes per second with immediate consistency, use Postgres or Redis.
+- **Large binary or blob storage.** Data lives in Git commit messages (default cap 1 MB). Use object storage for images or videos.
+- **Sub-millisecond read latency.** Materialization has overhead. Use an in-memory database for real-time gaming physics or HFT.
+- **Simple key-value storage.** If you don't have relationships or need traversals, a graph database is overkill.
+- **Non-Git environments.** The value proposition depends on Git infrastructure (push/pull, content-addressing).
+
## AIΩN Foundations Series
-This package is the reference implementation of WARP (Worldline Algebra for Recursive Provenance) graphs as described in the AIΩN Foundations Series. The papers define WARP graphs as a minimal recursive state object ([Paper I](https://doi.org/10.5281/zenodo.17908005)), equip them with deterministic tick-based operational semantics ([Paper II](https://doi.org/10.5281/zenodo.17934512)), develop computational holography, provenance payloads, and prefix forks ([Paper III](https://doi.org/10.5281/zenodo.17963669)), and introduce observer geometry with rulial distance and temporal logic ([Paper IV](https://doi.org/10.5281/zenodo.18038297)). This codebase implements the core data structures and multi-writer collaboration protocol described in those papers.
+This package is the reference implementation of WARP (Worldline Algebra for Recursive Provenance) graphs as described in the AIΩN Foundations Series. The papers formalize the graph as a minimal recursive state object ([Paper I](https://doi.org/10.5281/zenodo.17908005)), equip it with deterministic tick-based semantics ([Paper II](https://doi.org/10.5281/zenodo.17934512)), develop computational holography and provenance payloads ([Paper III](https://doi.org/10.5281/zenodo.17963669)), and introduce observer geometry with the translation cost metric ([Paper IV](https://doi.org/10.5281/zenodo.18038297)).
## License
diff --git a/ROADMAP.md b/ROADMAP.md
index 0ccef23..8de60f8 100644
--- a/ROADMAP.md
+++ b/ROADMAP.md
@@ -331,7 +331,7 @@ Create `docs/specs/AUDIT_RECEIPT.md` with:
### M4.T2.DOCTOR (B-Tier)
-- **Status:** `OPEN`
+- **Status:** `DONE`
**User Story:** As an operator, I need one command that identifies likely system breakage fast.
diff --git a/WHITEPAPER.md b/WHITEPAPER.md
new file mode 100644
index 0000000..7ab214a
--- /dev/null
+++ b/WHITEPAPER.md
@@ -0,0 +1,127 @@
+# Git Warp
+
+## The Core Idea
+
+git-warp is a graph database that doesn't need a database server. It stores all its data inside a Git repository by abusing a clever trick: every piece of data is a Git commit that points to the **empty tree** — a special object that exists in every Git repo. Because the commits don't reference any actual files, they're completely invisible to normal Git operations like `git log`, `git diff`, or `git status`. Your codebase stays untouched, but there's a full graph database living alongside it.
+
+## What's a Graph Database?
+
+Instead of rows and columns (like a spreadsheet or SQL database), a graph database stores **nodes** (things) and **edges** (relationships between things). So you might have nodes like `user:alice` and `user:bob`, connected by an edge labeled `manages`. Nodes and edges can both carry properties — key/value pairs like `name: "Alice"` or `since: "2024"`.
+
+## The Multi-Writer Problem (and How It's Solved)
+
+This is where it gets interesting. Multiple people (or machines, or processes) can write to the same graph **simultaneously, without any coordination**. There's no central server, no locking, no "wait your turn."
+
+Each writer maintains their own independent chain of **patches** — atomic batches of operations like "add this node, set this property, create this edge." These patches are stored as Git commits under refs like `refs/warp/myGraph/writers/alice`.
+
+When you want to read the graph, you **materialize** — which means replaying all patches from all writers and merging them into a single consistent view. The merge uses CRDTs (Conflict-free Replicated Data Types), which are mathematical structures that guarantee deterministic convergence regardless of what order the patches arrive in.
+
+The specific CRDT rules are:
+
+- **Nodes and edges** use an OR-Set (Observed-Remove Set). If Alice adds a node and Bob concurrently deletes it, the add wins — unless Bob's delete specifically observed Alice's add. This is the "add wins over concurrent remove" principle.
+- **Properties** use LWW (Last-Writer-Wins) registers. If two writers set the same property at the same time, the one with the higher Lamport timestamp wins. Ties are broken by writer ID (lexicographic), then by patch SHA.
+- **Version vectors** track causality across writers so the system knows which operations are concurrent vs. causally ordered.
+
+Every operation gets a unique EventId — `(lamport, writerId, patchSha, opIndex)` — which creates a total ordering that makes merge results identical no matter which machine runs them.
+
+## Syncing
+
+Since the data lives in Git, syncing can be as simple as `git push` and `git pull`. But there's also a built-in HTTP sync protocol and a direct in-process sync for when two graph instances are running in the same application. The sync protocol works by comparing frontiers (each side's latest patch SHAs per writer), then shipping over whatever the other side is missing. It supports HMAC-SHA256 authentication, retries with exponential backoff, and abort signals.
+
+## Querying
+
+Once materialized, you get a fluent query builder:
+
+```javascript
+graph.query()
+ .match('user:*')
+ .where({ role: 'admin' })
+ .outgoing('manages', { depth: [1, 3] })
+ .aggregate({ count: true, avg: 'props.salary' })
+ .run();
+```
+
+There's also full graph traversal — BFS, DFS, shortest path (bidirectional BFS), weighted shortest path (Dijkstra), A* search, topological sort, and connected components. All traversals support depth limits, abort signals, and direction control.
+
+## Checkpoints and Performance
+
+Materialization replays every patch, which gets expensive as the graph grows. **Checkpoints** snapshot the current state so future materializations only replay patches created after the checkpoint. You can configure auto-checkpointing (e.g., every 500 patches) and it handles this transparently.
+
+For large graphs, there's a **bitmap index** system using Roaring bitmaps that enables O(1) neighbor lookups instead of scanning. The index is sharded by SHA prefix for lazy loading — cold start is near-zero memory, and a full index for a million nodes runs about 150–200 MB.
+
+## Time Travel
+
+The `seek` system lets you navigate to any point in the graph's history by Lamport tick. You can jump to an absolute tick, step forward/backward, save named bookmarks, and return to the present. Previously visited ticks are cached as content-addressed blobs for near-instant restoration. When a seek cursor is active, all queries and reads automatically show state at that tick.
+
+## Advanced Features
+
+**Observer Views** project the graph through filtered lenses — you define a match pattern and optionally redact sensitive properties. This gives you access control and data minimization without modifying the underlying graph. You can even measure the **translation cost** between two observers (how much information is lost going from one perspective to another), using Minimum Description Length theory.
+
+**Temporal Queries** implement CTL*-style operators over history. `always()` checks if a predicate held at every tick, `eventually()` checks if it held at any tick. These let you ask questions like "was this PR ever merged?" or "was this user always active?"
+
+**Forks** create a divergent copy of a graph at a specific point in a writer's history, with Git's content-addressing automatically deduplicating shared history.
+
+**Wormholes** compress a contiguous range of patches into a single edge while preserving provenance — and two consecutive wormholes can be composed (they form a monoid).
+
+**Audit Receipts** create a tamper-evident chain of records for every data commit — each receipt captures the operation outcomes, is CBOR-encoded into a Git tree, and is linked to the previous receipt via parent pointers. Mutating any receipt invalidates the entire chain downstream.
+
+**Garbage Collection** compacts tombstoned entries from the OR-Sets, but only entries that all known writers have observed — so it never removes information an unsynced writer might still need.
+
+## Architecture
+
+The codebase follows hexagonal architecture (ports and adapters). **Ports** are abstract interfaces — `GraphPersistencePort`, `IndexStoragePort`, `LoggerPort`, `ClockPort`, `CryptoPort`, `CodecPort`. **Adapters** implement them for specific runtimes — there are adapters for Node.js, Deno, Bun, and browsers. The domain layer has zero direct Node.js imports, making it genuinely portable.
+
+The project runs across Node 22+, Bun, and Deno, with a full multi-runtime test matrix in Docker. The CLI is available as `warp-graph` or as a Git subcommand (`git warp`), with ASCII visualization dashboards for queries, health checks, path finding, and time travel.
+
+## The Academic Side
+
+The whole thing is the reference implementation for something called WARP (Worldline Algebra for Recursive Provenance) graphs, described across four papers in the "AIΩN Foundations Series." The papers formalize the graph as a minimal recursive state object, give it deterministic tick-based semantics, develop computational holography and provenance payloads, and introduce the observer geometry with the translation cost metric. The codebase implements all of it.
+
+It's built by a group called FLYING ROBOTS and licensed Apache-2.0.
+
+---
+## When git-warp Would Be Most Useful
+
+**1. Distributed configuration management.** A fleet of servers each writing their own state (health, config, version) into a shared graph. No central database needed — each server is a writer, and any node can materialize the full picture after a `git pull`.
+
+**2. Offline-first field applications.** Think geologists, aid workers, or inspectors collecting data on tablets with no connectivity. Each device writes patches locally. When they're back online, everything merges cleanly without conflict resolution meetings.
+
+**3. Collaborative knowledge bases.** A research team where each member curates nodes and relationships (papers, concepts, people, citations) independently. The graph merges their perspectives, and observer views can give each team a filtered lens into just their domain.
+
+**4. Git-native issue/project tracking.** Embedding a full project graph (tasks, dependencies, assignees, statuses) directly in the repo. No external service, no API keys, no vendor lock-in. The tracker lives and dies with the code.
+
+**5. Audit-critical systems.** Anywhere you need a tamper-evident record of every change — regulatory compliance, legal discovery, medical records coordination. The audit receipt chain gives you cryptographic proof of what happened and when.
+
+**6. Multi-team microservice dependency graphs.** Each team maintains their own service nodes and dependency edges. Materialization gives you a live, always-consistent dependency map across the whole org, synced through your existing Git infrastructure.
+
+**7. Decentralized access control modeling.** Storing permission graphs (users, roles, resources, grants) where multiple admins across different regions can make changes independently. The OR-Set semantics mean a permission grant won't be accidentally lost to a concurrent revocation.
+
+**8. IoT sensor networks.** Each sensor or gateway is a writer, logging readings and relationships (sensor → location, sensor → alert-threshold). Sync when bandwidth allows. Checkpoints keep materialization fast even with millions of readings.
+
+**9. Game world state in multiplayer modding.** Modders independently add items, NPCs, quests, and relationships. The CRDT merge means mods compose without a central mod manager resolving conflicts — adds win over concurrent removes, so one mod can't accidentally delete another's content.
+
+**10. Supply chain provenance.** Tracking goods through a supply chain where each participant (manufacturer, shipper, warehouse, retailer) writes their own nodes and edges. Temporal queries let you ask "was this item always in cold storage?" and the graph provides a cryptographically verifiable answer.
+
+## Five Clever Uses
+
+**1. Git repo archaeology as a graph.** Import your actual Git history as nodes and edges, then use git-warp's traversal and temporal queries to ask questions like "what's the shortest path between these two files through shared authors?" — and the whole analysis lives in the same repo it's analyzing.
+
+**2. Personal knowledge graph that syncs like dotfiles.** Keep a `~/.brain` repo with a warp graph of everything you know — concepts, people, books, ideas, connections. It syncs across your machines via your normal dotfile workflow, and `git warp query` from the terminal replaces searching through notes.
+
+**3. Distributed feature flags with rollback.** Each environment (staging, prod-us, prod-eu) is a writer maintaining feature flag states. Temporal queries let you answer "was this flag ever enabled in prod-eu?" and seek lets you roll back the flag graph to any point in time without touching your actual deployment.
+
+**4. Peer-to-peer CRM.** A sales team where each rep tracks their own contacts, deals, and relationships offline. No Salesforce, no subscription fees. The graph merges at standup when everyone pushes, and observer views give management a redacted roll-up without exposing individual pipeline details.
+
+**5. Executable architecture diagrams.** Store your system architecture as a warp graph — services, databases, queues, dependencies — then query it programmatically in CI. "Does any service have more than 3 hops to the auth service?" becomes a shortest-path query, and it's version-controlled with the code it describes.
+
+## When NOT to Use It
+
+**1. High-throughput transactional workloads.** If you need thousands of writes per second with immediate consistency (e-commerce checkout, real-time bidding), git-warp is the wrong tool. Every write is a Git commit, which involves disk I/O and SHA computation. Use Postgres, Redis, or a purpose-built OLTP database.
+
+**2. Large binary or blob storage.** The data lives in Git commit messages, which are not designed for large payloads (default cap is 1 MB). If you're storing images, videos, or large documents as property values, you'll hit limits fast and bloat the Git repo. Use object storage.
+
+**3. When you need real-time, sub-millisecond reads.** Materialization has to replay patches, and even with checkpoints there's overhead. If your application requires microsecond-level read latency (high-frequency trading, real-time gaming physics), use an in-memory database like Redis or a specialized engine.
+
+**4. Simple key-value storage.** If your data model is flat — just keys mapping to values with no relationships — a graph database is overkill. Use a KV store, SQLite, or even a JSON file. The graph structure, CRDT machinery, and materialization overhead buy you nothing if you never traverse edges.
+
+**5. When your team doesn't use Git.** The entire value proposition depends on Git infrastructure — pushing, pulling, refs, content-addressing. If your deployment environment doesn't have Git, or your users aren't comfortable with it, you're fighting the tool instead of leveraging it.
diff --git a/bin/cli/commands/doctor/checks.js b/bin/cli/commands/doctor/checks.js
new file mode 100644
index 0000000..3e4359f
--- /dev/null
+++ b/bin/cli/commands/doctor/checks.js
@@ -0,0 +1,405 @@
+/**
+ * Diagnostic check functions for `git warp doctor`.
+ *
+ * Each check follows the DoctorCheck callback signature and NEVER throws.
+ * Internal errors are captured as `CHECK_INTERNAL_ERROR` findings.
+ *
+ * @module cli/commands/doctor/checks
+ */
+
+import HealthCheckService from '../../../../src/domain/services/HealthCheckService.js';
+import ClockAdapter from '../../../../src/infrastructure/adapters/ClockAdapter.js';
+import {
+ buildCheckpointRef,
+ buildCoverageRef,
+ buildAuditPrefix,
+} from '../../../../src/domain/utils/RefLayout.js';
+import { createHookInstaller } from '../../shared.js';
+import { CODES } from './codes.js';
+
+/** @typedef {import('./types.js').DoctorFinding} DoctorFinding */
+/** @typedef {import('./types.js').DoctorContext} DoctorContext */
+
+// ── helpers ─────────────────────────────────────────────────────────────────
+
+/**
+ * @param {string} id
+ * @param {*} err
+ * @returns {DoctorFinding}
+ */
+function internalError(id, err) {
+ return {
+ id,
+ status: 'fail',
+ code: CODES.CHECK_INTERNAL_ERROR,
+ impact: 'data_integrity',
+ message: `Internal error: ${err?.message || String(err)}`,
+ };
+}
+
+// ── repo-accessible ─────────────────────────────────────────────────────────
+
+/** @param {DoctorContext} ctx @returns {Promise} */
+export async function checkRepoAccessible(ctx) {
+ try {
+ const clock = ClockAdapter.global();
+ const svc = new HealthCheckService({ persistence: /** @type {*} */ (ctx.persistence), clock });
+ const health = await svc.getHealth();
+ if (health.components.repository.status === 'unhealthy') {
+ return {
+ id: 'repo-accessible', status: 'fail', code: CODES.REPO_UNREACHABLE,
+ impact: 'operability', message: 'Repository is not accessible',
+ fix: 'Check that the --repo path points to a valid git repository',
+ };
+ }
+ return {
+ id: 'repo-accessible', status: 'ok', code: CODES.REPO_OK,
+ impact: 'operability', message: 'Repository is accessible',
+ };
+ } catch (/** @type {*} */ err) {
+ return internalError('repo-accessible', err);
+ }
+}
+
+// ── refs-consistent ─────────────────────────────────────────────────────────
+
+/** @param {DoctorContext} ctx @returns {Promise} */
+export async function checkRefsConsistent(ctx) {
+ try {
+ const findings = /** @type {DoctorFinding[]} */ ([]);
+ const allRefs = ctx.writerHeads.map((h) => ({
+ ref: h.ref, sha: h.sha, label: `writer ${h.writerId}`,
+ }));
+ let allOk = true;
+
+ for (const { ref, sha, label } of allRefs) {
+ if (!sha) {
+ continue;
+ }
+ const exists = await ctx.persistence.nodeExists(sha);
+ if (!exists) {
+ allOk = false;
+ findings.push({
+ id: 'refs-consistent', status: 'fail', code: CODES.REFS_DANGLING_OBJECT,
+ impact: 'data_integrity',
+ message: `Ref ${ref} points to missing object ${sha.slice(0, 7)}`,
+ fix: `Investigate missing object for ${label}`, evidence: { ref, sha },
+ });
+ }
+ }
+
+ if (allOk) {
+ findings.push({
+ id: 'refs-consistent', status: 'ok', code: CODES.REFS_OK,
+ impact: 'data_integrity', message: `All ${allRefs.length} refs point to existing objects`,
+ });
+ }
+ return findings;
+ } catch (/** @type {*} */ err) {
+ return [internalError('refs-consistent', err)];
+ }
+}
+
+// ── coverage-complete ───────────────────────────────────────────────────────
+
+/** @param {DoctorContext} ctx @returns {Promise} */
+export async function checkCoverageComplete(ctx) {
+ try {
+ const coverageRef = buildCoverageRef(ctx.graphName);
+ const coverageSha = await ctx.persistence.readRef(coverageRef);
+
+ if (!coverageSha) {
+ return {
+ id: 'coverage-complete', status: 'warn', code: CODES.COVERAGE_NO_REF,
+ impact: 'operability', message: 'No coverage ref found',
+ fix: 'Run `git warp materialize` to create a coverage anchor',
+ };
+ }
+
+ const missing = [];
+ for (const head of ctx.writerHeads) {
+ const reachable = await ctx.persistence.isAncestor(head.sha, coverageSha);
+ if (!reachable) {
+ missing.push(head.writerId);
+ }
+ }
+
+ if (missing.length > 0) {
+ return {
+ id: 'coverage-complete', status: 'warn', code: CODES.COVERAGE_MISSING_WRITERS,
+ impact: 'operability',
+ message: `Coverage anchor is missing ${missing.length} writer(s): ${missing.join(', ')}`,
+ fix: 'Run `git warp materialize` to update the coverage anchor',
+ evidence: { missingWriters: missing },
+ };
+ }
+
+ return {
+ id: 'coverage-complete', status: 'ok', code: CODES.COVERAGE_OK,
+ impact: 'operability', message: 'Coverage anchor includes all writers',
+ };
+ } catch (/** @type {*} */ err) {
+ return internalError('coverage-complete', err);
+ }
+}
+
+// ── checkpoint-fresh ────────────────────────────────────────────────────────
+
+/**
+ * @param {import('../../types.js').Persistence} persistence
+ * @param {string} checkpointSha
+ * @returns {Promise<{date: string|null, ageHours: number|null}>}
+ */
+async function getCheckpointAge(persistence, checkpointSha) {
+ const info = await persistence.getNodeInfo(checkpointSha);
+ const date = info.date || null;
+ if (!date) {
+ return { date: null, ageHours: null };
+ }
+ const parsed = Date.parse(date);
+ if (Number.isNaN(parsed)) {
+ return { date, ageHours: null };
+ }
+ return { date, ageHours: (Date.now() - parsed) / (1000 * 60 * 60) };
+}
+
+/** @param {DoctorContext} ctx @returns {Promise} */
+export async function checkCheckpointFresh(ctx) {
+ try {
+ const ref = buildCheckpointRef(ctx.graphName);
+ const sha = await ctx.persistence.readRef(ref);
+
+ if (!sha) {
+ return {
+ id: 'checkpoint-fresh', status: 'warn', code: CODES.CHECKPOINT_MISSING,
+ impact: 'operability', message: 'No checkpoint found',
+ fix: 'Run `git warp materialize` to create a checkpoint',
+ };
+ }
+
+ const { date, ageHours } = await getCheckpointAge(ctx.persistence, sha);
+ return buildCheckpointFinding({ sha, date, ageHours, maxAge: ctx.policy.checkpointMaxAgeHours });
+ } catch (/** @type {*} */ err) {
+ return internalError('checkpoint-fresh', err);
+ }
+}
+
+/**
+ * @param {{sha: string, date: string|null, ageHours: number|null, maxAge: number}} p
+ * @returns {DoctorFinding}
+ */
+function buildCheckpointFinding({ sha, date, ageHours, maxAge }) {
+ if (ageHours === null) {
+ return {
+ id: 'checkpoint-fresh', status: 'ok', code: CODES.CHECKPOINT_OK,
+ impact: 'operability', message: 'Checkpoint exists (age unknown)',
+ evidence: { sha, date },
+ };
+ }
+ if (ageHours > maxAge) {
+ return {
+ id: 'checkpoint-fresh', status: 'warn', code: CODES.CHECKPOINT_STALE,
+ impact: 'operability',
+ message: `Checkpoint is ${Math.round(ageHours)} hours old (threshold: ${maxAge}h)`,
+ fix: 'Run `git warp materialize` to refresh the checkpoint',
+ evidence: { sha, date, ageHours: Math.round(ageHours) },
+ };
+ }
+ return {
+ id: 'checkpoint-fresh', status: 'ok', code: CODES.CHECKPOINT_OK,
+ impact: 'operability', message: 'Checkpoint is fresh',
+ evidence: { sha, date, ageHours: Math.round(ageHours) },
+ };
+}
+
+// ── audit-consistent ────────────────────────────────────────────────────────
+
+/**
+ * @param {DoctorContext} ctx
+ * @param {string[]} auditRefs
+ * @param {string} auditPrefix
+ * @returns {Promise}
+ */
+async function probeAuditRefs(ctx, auditRefs, auditPrefix) {
+ const findings = /** @type {DoctorFinding[]} */ ([]);
+
+ for (const ref of auditRefs) {
+ const sha = await ctx.persistence.readRef(ref);
+ if (!sha) {
+ continue;
+ }
+ const exists = await ctx.persistence.nodeExists(sha);
+ if (!exists) {
+ findings.push({
+ id: 'audit-consistent', status: 'warn', code: CODES.AUDIT_DANGLING,
+ impact: 'data_integrity',
+ message: `Audit ref ${ref} points to missing object ${sha.slice(0, 7)}`,
+ evidence: { ref, sha },
+ });
+ }
+ }
+
+ const writerIds = new Set(ctx.writerHeads.map((h) => h.writerId));
+ const auditIds = auditRefs.map((r) => r.slice(auditPrefix.length)).filter((id) => id.length > 0);
+ const missing = [...writerIds].filter((id) => !auditIds.includes(id));
+
+ if (missing.length > 0 && auditIds.length > 0) {
+ findings.push({
+ id: 'audit-consistent', status: 'warn', code: CODES.AUDIT_PARTIAL,
+ impact: 'data_integrity',
+ message: `Audit coverage is partial: writers without audit refs: ${missing.join(', ')}`,
+ fix: 'Run `git warp verify-audit` to verify existing chains',
+ evidence: { writersWithoutAudit: missing },
+ });
+ }
+
+ return findings;
+}
+
+/** @param {DoctorContext} ctx @returns {Promise} */
+export async function checkAuditConsistent(ctx) {
+ try {
+ const auditPrefix = buildAuditPrefix(ctx.graphName);
+ const auditRefs = await ctx.persistence.listRefs(auditPrefix);
+
+ if (auditRefs.length === 0) {
+ return [{
+ id: 'audit-consistent', status: 'ok', code: CODES.AUDIT_OK,
+ impact: 'data_integrity', message: 'No audit refs present (none expected)',
+ }];
+ }
+
+ const findings = await probeAuditRefs(ctx, auditRefs, auditPrefix);
+ if (findings.length === 0) {
+ findings.push({
+ id: 'audit-consistent', status: 'ok', code: CODES.AUDIT_OK,
+ impact: 'data_integrity', message: `All ${auditRefs.length} audit ref(s) are consistent`,
+ });
+ }
+ return findings;
+ } catch (/** @type {*} */ err) {
+ return [internalError('audit-consistent', err)];
+ }
+}
+
+// ── clock-skew ──────────────────────────────────────────────────────────────
+
+/**
+ * @param {DoctorContext} ctx
+ * @returns {Promise>}
+ */
+async function collectWriterDates(ctx) {
+ const dates = [];
+ for (const head of ctx.writerHeads) {
+ const info = await ctx.persistence.getNodeInfo(head.sha);
+ const ms = info.date ? Date.parse(info.date) : NaN;
+ if (!Number.isNaN(ms)) {
+ dates.push({ writerId: head.writerId, ms });
+ }
+ }
+ return dates;
+}
+
+/** @param {DoctorContext} ctx @returns {Promise} */
+export async function checkClockSkew(ctx) {
+ try {
+ if (ctx.writerHeads.length < 2) {
+ return {
+ id: 'clock-skew', status: 'ok', code: CODES.CLOCK_SYNCED,
+ impact: 'operability', message: 'Clock skew check skipped (fewer than 2 writers)',
+ };
+ }
+
+ const dates = await collectWriterDates(ctx);
+ if (dates.length < 2) {
+ return {
+ id: 'clock-skew', status: 'ok', code: CODES.CLOCK_SYNCED,
+ impact: 'operability', message: 'Clock skew check skipped (insufficient date data)',
+ };
+ }
+
+ const spreadMs = Math.max(...dates.map((d) => d.ms)) - Math.min(...dates.map((d) => d.ms));
+ if (spreadMs > ctx.policy.clockSkewMs) {
+ return {
+ id: 'clock-skew', status: 'warn', code: CODES.CLOCK_SKEW_EXCEEDED,
+ impact: 'operability',
+ message: `Clock skew is ${Math.round(spreadMs / 1000)}s (threshold: ${Math.round(ctx.policy.clockSkewMs / 1000)}s)`,
+ evidence: { spreadMs, thresholdMs: ctx.policy.clockSkewMs },
+ };
+ }
+
+ return {
+ id: 'clock-skew', status: 'ok', code: CODES.CLOCK_SYNCED,
+ impact: 'operability',
+ message: `Clock skew is within threshold (${Math.round(spreadMs / 1000)}s)`,
+ evidence: { spreadMs },
+ };
+ } catch (/** @type {*} */ err) {
+ return internalError('clock-skew', err);
+ }
+}
+
+// ── hooks-installed ─────────────────────────────────────────────────────────
+
+/**
+ * @param {DoctorContext} ctx
+ * @returns {Promise}
+ */
+export function checkHooksInstalled(ctx) {
+ try {
+ const installer = createHookInstaller();
+ const s = installer.getHookStatus(ctx.repoPath);
+ return Promise.resolve(buildHookFinding(s));
+ } catch (/** @type {*} */ err) {
+ return Promise.resolve(internalError('hooks-installed', err));
+ }
+}
+
+/**
+ * @param {*} s - hook status from HookInstaller
+ * @returns {DoctorFinding}
+ */
+function buildHookFinding(s) {
+ if (!s.installed && s.foreign) {
+ return {
+ id: 'hooks-installed', status: 'warn', code: CODES.HOOKS_MISSING,
+ impact: 'hygiene', message: 'Foreign hook present; warp hook not installed',
+ fix: 'Run `git warp install-hooks` (use --force to replace existing hook)',
+ };
+ }
+ if (!s.installed) {
+ return {
+ id: 'hooks-installed', status: 'warn', code: CODES.HOOKS_MISSING,
+ impact: 'hygiene', message: 'Post-merge hook is not installed',
+ fix: 'Run `git warp install-hooks`',
+ };
+ }
+ if (!s.current) {
+ return {
+ id: 'hooks-installed', status: 'warn', code: CODES.HOOKS_OUTDATED,
+ impact: 'hygiene', message: `Hook is outdated (v${s.version})`,
+ fix: 'Run `git warp install-hooks` to upgrade',
+ evidence: { version: s.version },
+ };
+ }
+ return {
+ id: 'hooks-installed', status: 'ok', code: CODES.HOOKS_OK,
+ impact: 'hygiene', message: `Hook is installed and current (v${s.version})`,
+ };
+}
+
+// ── registry ────────────────────────────────────────────────────────────────
+
+/**
+ * All checks in execution order.
+ * @type {Array<{id: string, fn: function(DoctorContext): Promise}>}
+ */
+export const ALL_CHECKS = [
+ { id: 'repo-accessible', fn: checkRepoAccessible },
+ { id: 'refs-consistent', fn: checkRefsConsistent },
+ { id: 'coverage-complete', fn: checkCoverageComplete },
+ { id: 'checkpoint-fresh', fn: checkCheckpointFresh },
+ { id: 'audit-consistent', fn: checkAuditConsistent },
+ { id: 'clock-skew', fn: checkClockSkew },
+ { id: 'hooks-installed', fn: checkHooksInstalled },
+];
diff --git a/bin/cli/commands/doctor/codes.js b/bin/cli/commands/doctor/codes.js
new file mode 100644
index 0000000..6a5f6f7
--- /dev/null
+++ b/bin/cli/commands/doctor/codes.js
@@ -0,0 +1,46 @@
+/**
+ * Single source of truth for all doctor finding codes.
+ *
+ * Every code string referenced in checks.js and tests MUST come from here.
+ * Prevents drift and typos across the codebase.
+ *
+ * @module cli/commands/doctor/codes
+ */
+
+export const CODES = {
+ // repo-accessible
+ REPO_OK: 'REPO_OK',
+ REPO_UNREACHABLE: 'REPO_UNREACHABLE',
+
+ // refs-consistent
+ REFS_OK: 'REFS_OK',
+ REFS_DANGLING_OBJECT: 'REFS_DANGLING_OBJECT',
+
+ // coverage-complete
+ COVERAGE_OK: 'COVERAGE_OK',
+ COVERAGE_MISSING_WRITERS: 'COVERAGE_MISSING_WRITERS',
+ COVERAGE_NO_REF: 'COVERAGE_NO_REF',
+
+ // checkpoint-fresh
+ CHECKPOINT_OK: 'CHECKPOINT_OK',
+ CHECKPOINT_MISSING: 'CHECKPOINT_MISSING',
+ CHECKPOINT_STALE: 'CHECKPOINT_STALE',
+
+ // audit-consistent
+ AUDIT_OK: 'AUDIT_OK',
+ AUDIT_DANGLING: 'AUDIT_DANGLING',
+ AUDIT_PARTIAL: 'AUDIT_PARTIAL',
+
+ // clock-skew
+ CLOCK_SYNCED: 'CLOCK_SYNCED',
+ CLOCK_SKEW_EXCEEDED: 'CLOCK_SKEW_EXCEEDED',
+
+ // hooks-installed
+ HOOKS_OK: 'HOOKS_OK',
+ HOOKS_MISSING: 'HOOKS_MISSING',
+ HOOKS_OUTDATED: 'HOOKS_OUTDATED',
+
+ // meta
+ CHECK_SKIPPED_BUDGET_EXHAUSTED: 'CHECK_SKIPPED_BUDGET_EXHAUSTED',
+ CHECK_INTERNAL_ERROR: 'CHECK_INTERNAL_ERROR',
+};
diff --git a/bin/cli/commands/doctor/index.js b/bin/cli/commands/doctor/index.js
new file mode 100644
index 0000000..beb3022
--- /dev/null
+++ b/bin/cli/commands/doctor/index.js
@@ -0,0 +1,216 @@
+/**
+ * `git warp doctor` — diagnose structural anomalies and suggest fixes.
+ *
+ * Orchestrator: builds context, runs checks with budget tracking,
+ * assembles payload, sorts findings, derives health.
+ *
+ * @module cli/commands/doctor
+ */
+
+import { buildWritersPrefix } from '../../../../src/domain/utils/RefLayout.js';
+import { parseCommandArgs } from '../../infrastructure.js';
+import { doctorSchema } from '../../schemas.js';
+import { createPersistence, resolveGraphName } from '../../shared.js';
+import { ALL_CHECKS } from './checks.js';
+import { CODES } from './codes.js';
+import { DOCTOR_EXIT_CODES } from './types.js';
+
+/** @typedef {import('../../types.js').CliOptions} CliOptions */
+/** @typedef {import('./types.js').DoctorFinding} DoctorFinding */
+/** @typedef {import('./types.js').DoctorPolicy} DoctorPolicy */
+/** @typedef {import('./types.js').DoctorPayload} DoctorPayload */
+
+const DOCTOR_OPTIONS = {
+ strict: { type: 'boolean', default: false },
+};
+
+/** @type {DoctorPolicy} */
+const DEFAULT_POLICY = {
+ strict: false,
+ clockSkewMs: 300_000,
+ checkpointMaxAgeHours: 168,
+ globalDeadlineMs: 10_000,
+ checkTimeouts: {},
+};
+
+const STATUS_ORDER = /** @type {const} */ ({ fail: 0, warn: 1, ok: 2 });
+const IMPACT_ORDER = /** @type {const} */ ({
+ data_integrity: 0,
+ security: 1,
+ operability: 2,
+ hygiene: 3,
+});
+
+/**
+ * @param {{options: CliOptions, args: string[]}} params
+ * @returns {Promise<{payload: DoctorPayload, exitCode: number}>}
+ */
+export default async function handleDoctor({ options, args }) {
+ const { values } = parseCommandArgs(args, DOCTOR_OPTIONS, doctorSchema);
+ const startMs = Date.now();
+
+ const { persistence } = await createPersistence(options.repo);
+ const graphName = await resolveGraphName(persistence, options.graph);
+ const policy = { ...DEFAULT_POLICY, strict: Boolean(values.strict) };
+ const writerHeads = await collectWriterHeads(persistence, graphName);
+
+ /** @type {import('./types.js').DoctorContext} */
+ const ctx = { persistence, graphName, writerHeads, policy, repoPath: options.repo };
+
+ const { findings, checksRun } = await runChecks(ctx, startMs);
+ findings.sort(compareFinding);
+
+ const payload = assemblePayload({ repo: options.repo, graph: graphName, policy, findings, checksRun, startMs });
+ const exitCode = computeExitCode(payload.health, policy.strict);
+ return { payload, exitCode };
+}
+
+/**
+ * Assembles the final DoctorPayload from sorted findings.
+ * @param {{repo: string, graph: string, policy: DoctorPolicy, findings: DoctorFinding[], checksRun: number, startMs: number}} p
+ * @returns {DoctorPayload}
+ */
+function assemblePayload({ repo, graph, policy, findings, checksRun, startMs }) {
+ const ok = findings.filter((f) => f.status === 'ok').length;
+ const warn = findings.filter((f) => f.status === 'warn').length;
+ const fail = findings.filter((f) => f.status === 'fail').length;
+ const priorityActions = [
+ ...new Set(
+ findings.filter((f) => f.status !== 'ok' && f.fix).map((f) => /** @type {string} */ (f.fix)),
+ ),
+ ];
+
+ return {
+ doctorVersion: 1,
+ repo,
+ graph,
+ checkedAt: new Date().toISOString(),
+ health: deriveHealth(fail, warn),
+ policy,
+ summary: { checksRun, findingsTotal: findings.length, ok, warn, fail, priorityActions },
+ findings,
+ durationMs: Date.now() - startMs,
+ };
+}
+
+/**
+ * @param {import('../../types.js').Persistence} persistence
+ * @param {string} graphName
+ * @returns {Promise>}
+ */
+async function collectWriterHeads(persistence, graphName) {
+ const prefix = buildWritersPrefix(graphName);
+ const refs = await persistence.listRefs(prefix);
+ const heads = [];
+ for (const ref of refs) {
+ const writerId = ref.slice(prefix.length);
+ if (!writerId) {
+ continue;
+ }
+ const sha = await persistence.readRef(ref);
+ if (sha) {
+ heads.push({ writerId, sha, ref });
+ }
+ }
+ return heads.sort((a, b) => a.writerId.localeCompare(b.writerId));
+}
+
+/**
+ * Runs all checks with global deadline enforcement.
+ * @param {import('./types.js').DoctorContext} ctx
+ * @param {number} startMs
+ * @returns {Promise<{findings: DoctorFinding[], checksRun: number}>}
+ */
+async function runChecks(ctx, startMs) {
+ const findings = /** @type {DoctorFinding[]} */ ([]);
+ let checksRun = 0;
+
+ for (const check of ALL_CHECKS) {
+ const elapsed = Date.now() - startMs;
+ if (elapsed >= ctx.policy.globalDeadlineMs) {
+ findings.push({
+ id: check.id,
+ status: 'warn',
+ code: CODES.CHECK_SKIPPED_BUDGET_EXHAUSTED,
+ impact: 'operability',
+ message: `Check skipped: global deadline exceeded (${elapsed}ms >= ${ctx.policy.globalDeadlineMs}ms)`,
+ });
+ checksRun++;
+ continue;
+ }
+
+ const checkStart = Date.now();
+ const result = await check.fn(ctx);
+ const checkDuration = Date.now() - checkStart;
+ checksRun++;
+
+ const resultArray = normalizeResult(result);
+ for (const f of resultArray) {
+ f.durationMs = checkDuration;
+ findings.push(f);
+ }
+ }
+
+ return { findings, checksRun };
+}
+
+/**
+ * @param {DoctorFinding|DoctorFinding[]|null} result
+ * @returns {DoctorFinding[]}
+ */
+function normalizeResult(result) {
+ if (!result) {
+ return [];
+ }
+ if (Array.isArray(result)) {
+ return result;
+ }
+ return [result];
+}
+
+/**
+ * @param {number} fail
+ * @param {number} warn
+ * @returns {'ok'|'degraded'|'failed'}
+ */
+function deriveHealth(fail, warn) {
+ if (fail > 0) {
+ return 'failed';
+ }
+ if (warn > 0) {
+ return 'degraded';
+ }
+ return 'ok';
+}
+
+/**
+ * @param {'ok'|'degraded'|'failed'} health
+ * @param {boolean} strict
+ * @returns {number}
+ */
+function computeExitCode(health, strict) {
+ if (health === 'ok') {
+ return DOCTOR_EXIT_CODES.OK;
+ }
+ if (strict) {
+ return DOCTOR_EXIT_CODES.STRICT_FINDINGS;
+ }
+ return DOCTOR_EXIT_CODES.FINDINGS;
+}
+
+/**
+ * @param {DoctorFinding} a
+ * @param {DoctorFinding} b
+ * @returns {number}
+ */
+function compareFinding(a, b) {
+ const statusDiff = (STATUS_ORDER[a.status] ?? 9) - (STATUS_ORDER[b.status] ?? 9);
+ if (statusDiff !== 0) {
+ return statusDiff;
+ }
+ const impactDiff = (IMPACT_ORDER[a.impact] ?? 9) - (IMPACT_ORDER[b.impact] ?? 9);
+ if (impactDiff !== 0) {
+ return impactDiff;
+ }
+ return a.id.localeCompare(b.id);
+}
diff --git a/bin/cli/commands/doctor/types.js b/bin/cli/commands/doctor/types.js
new file mode 100644
index 0000000..6a055e1
--- /dev/null
+++ b/bin/cli/commands/doctor/types.js
@@ -0,0 +1,89 @@
+/**
+ * Type definitions for the `doctor` command.
+ *
+ * @module cli/commands/doctor/types
+ */
+
+// ── JSON-safe recursive value type ──────────────────────────────────────────
+
+/** @typedef {null | boolean | number | string | JsonValue[] | {[k:string]: JsonValue}} JsonValue */
+
+/** @typedef {{[k:string]: JsonValue}} FindingEvidence */
+
+// ── Finding ─────────────────────────────────────────────────────────────────
+
+/**
+ * @typedef {Object} DoctorFinding
+ * @property {string} id - Check identifier (e.g. 'repo-accessible')
+ * @property {'ok'|'warn'|'fail'} status
+ * @property {string} code - Machine-readable code from CODES registry
+ * @property {'data_integrity'|'security'|'operability'|'hygiene'} impact
+ * @property {string} message - Human-readable summary
+ * @property {string} [fix] - Suggested remediation command or instruction
+ * @property {string} [helpUrl] - Stable documentation anchor
+ * @property {FindingEvidence} [evidence] - JSON-safe supporting data
+ * @property {number} [durationMs] - Time spent on this check
+ */
+
+// ── Policy ──────────────────────────────────────────────────────────────────
+
+/**
+ * @typedef {Object} DoctorPolicy
+ * @property {boolean} strict
+ * @property {number} clockSkewMs
+ * @property {number} checkpointMaxAgeHours
+ * @property {number} globalDeadlineMs
+ * @property {{[checkId:string]: number}} checkTimeouts
+ */
+
+// ── Payload ─────────────────────────────────────────────────────────────────
+
+/**
+ * @typedef {Object} DoctorPayload
+ * @property {1} doctorVersion
+ * @property {string} repo
+ * @property {string} graph
+ * @property {string} checkedAt - ISO 8601 timestamp
+ * @property {'ok'|'degraded'|'failed'} health
+ * @property {DoctorPolicy} policy
+ * @property {DoctorSummary} summary
+ * @property {DoctorFinding[]} findings
+ * @property {number} durationMs
+ */
+
+/**
+ * @typedef {Object} DoctorSummary
+ * @property {number} checksRun
+ * @property {number} findingsTotal
+ * @property {number} ok
+ * @property {number} warn
+ * @property {number} fail
+ * @property {string[]} priorityActions
+ */
+
+// ── Context passed to each check ────────────────────────────────────────────
+
+/**
+ * @typedef {Object} DoctorContext
+ * @property {import('../../types.js').Persistence} persistence
+ * @property {string} graphName
+ * @property {Array<{writerId: string, sha: string, ref: string}>} writerHeads
+ * @property {DoctorPolicy} policy
+ * @property {string} repoPath
+ */
+
+/**
+ * @callback DoctorCheck
+ * @param {DoctorContext} ctx
+ * @returns {Promise}
+ */
+
+// ── Exit codes ──────────────────────────────────────────────────────────────
+
+export const DOCTOR_EXIT_CODES = {
+ OK: 0,
+ FINDINGS: 3,
+ STRICT_FINDINGS: 4,
+};
+
+export {};
diff --git a/bin/cli/commands/registry.js b/bin/cli/commands/registry.js
index 24c8315..96ea1bd 100644
--- a/bin/cli/commands/registry.js
+++ b/bin/cli/commands/registry.js
@@ -3,6 +3,7 @@ import handleQuery from './query.js';
import handlePath from './path.js';
import handleHistory from './history.js';
import handleCheck from './check.js';
+import handleDoctor from './doctor/index.js';
import handleMaterialize from './materialize.js';
import handleSeek from './seek.js';
import handleVerifyAudit from './verify-audit.js';
@@ -16,6 +17,7 @@ export const COMMANDS = new Map(/** @type {[string, Function][]} */ ([
['path', handlePath],
['history', handleHistory],
['check', handleCheck],
+ ['doctor', handleDoctor],
['materialize', handleMaterialize],
['seek', handleSeek],
['verify-audit', handleVerifyAudit],
diff --git a/bin/cli/infrastructure.js b/bin/cli/infrastructure.js
index 9347082..6ff5ded 100644
--- a/bin/cli/infrastructure.js
+++ b/bin/cli/infrastructure.js
@@ -20,6 +20,7 @@ Commands:
path Find a logical path between two nodes
history Show writer history
check Report graph health/GC status
+ doctor Diagnose structural issues and suggest fixes
verify-audit Verify audit receipt chain integrity
materialize Materialize and checkpoint all graphs
seek Time-travel: step through graph history by Lamport tick
@@ -55,6 +56,9 @@ Path options:
History options:
--node Filter patches touching node id
+Doctor options:
+ --strict Treat warnings as failures (exit 4)
+
Verify-audit options:
--writer Verify a single writer's chain (default: all)
--since Verify from tip down to this commit (inclusive)
@@ -99,7 +103,7 @@ export function notFoundError(message) {
return new CliError(message, { code: 'E_NOT_FOUND', exitCode: EXIT_CODES.NOT_FOUND });
}
-export const KNOWN_COMMANDS = ['info', 'query', 'path', 'history', 'check', 'materialize', 'seek', 'verify-audit', 'install-hooks', 'view'];
+export const KNOWN_COMMANDS = ['info', 'query', 'path', 'history', 'check', 'doctor', 'materialize', 'seek', 'verify-audit', 'install-hooks', 'view'];
const BASE_OPTIONS = {
repo: { type: 'string', short: 'r' },
diff --git a/bin/cli/schemas.js b/bin/cli/schemas.js
index d3c34dd..a6593bd 100644
--- a/bin/cli/schemas.js
+++ b/bin/cli/schemas.js
@@ -70,6 +70,18 @@ export const viewSchema = z.object({
// Seek
// ============================================================================
+// ============================================================================
+// Doctor
+// ============================================================================
+
+export const doctorSchema = z.object({
+ strict: z.boolean().default(false),
+}).strict();
+
+// ============================================================================
+// Seek
+// ============================================================================
+
export const seekSchema = z.object({
tick: z.string().optional(),
latest: z.boolean().default(false),
diff --git a/bin/cli/types.js b/bin/cli/types.js
index f5d4fff..1a9fc5f 100644
--- a/bin/cli/types.js
+++ b/bin/cli/types.js
@@ -7,6 +7,7 @@
* @property {(oid: string) => Promise} readBlob
* @property {(buf: Buffer) => Promise} writeBlob
* @property {(sha: string) => Promise<{date?: string|null}>} getNodeInfo
+ * @property {(sha: string) => Promise} nodeExists
* @property {(sha: string, coverageSha: string) => Promise} isAncestor
* @property {() => Promise<{ok: boolean}>} ping
* @property {*} plumbing
diff --git a/bin/presenters/index.js b/bin/presenters/index.js
index 34e32e3..a0ef8bb 100644
--- a/bin/presenters/index.js
+++ b/bin/presenters/index.js
@@ -22,6 +22,7 @@ import {
renderQuery,
renderPath,
renderCheck,
+ renderDoctor,
renderHistory,
renderError,
renderMaterialize,
@@ -65,6 +66,7 @@ const TEXT_RENDERERS = new Map(/** @type {[string, function(*): string][]} */ ([
['query', renderQuery],
['path', renderPath],
['check', renderCheck],
+ ['doctor', renderDoctor],
['history', renderHistory],
['materialize', renderMaterialize],
['seek', renderSeek],
diff --git a/bin/presenters/text.js b/bin/presenters/text.js
index 297496c..603e05b 100644
--- a/bin/presenters/text.js
+++ b/bin/presenters/text.js
@@ -406,6 +406,58 @@ export function renderSeek(payload) {
return renderSeekSimple(payload) ?? renderSeekState(payload);
}
+// ── Doctor renderer ──────────────────────────────────────────────────────────
+
+/** @param {'ok'|'warn'|'fail'} status */
+function findingIcon(status) {
+ if (status === 'ok') {
+ return `${ANSI_GREEN}\u2713${ANSI_RESET}`;
+ }
+ if (status === 'warn') {
+ return `${ANSI_YELLOW}\u26A0${ANSI_RESET}`;
+ }
+ return `${ANSI_RED}\u2717${ANSI_RESET}`;
+}
+
+/** @param {'ok'|'degraded'|'failed'} health */
+function colorHealth(health) {
+ if (health === 'ok') {
+ return `${ANSI_GREEN}${health}${ANSI_RESET}`;
+ }
+ if (health === 'degraded') {
+ return `${ANSI_YELLOW}${health}${ANSI_RESET}`;
+ }
+ return `${ANSI_RED}${health}${ANSI_RESET}`;
+}
+
+/** @param {*} payload */
+export function renderDoctor(payload) {
+ const lines = [
+ `Graph: ${payload.graph}`,
+ `Health: ${colorHealth(payload.health)}`,
+ `Checked: ${payload.checkedAt}`,
+ `Summary: ${payload.summary.checksRun} checks, ${payload.summary.findingsTotal} findings (${payload.summary.ok} ok, ${payload.summary.warn} warn, ${payload.summary.fail} fail)`,
+ '',
+ ];
+
+ for (const f of payload.findings) {
+ lines.push(`${findingIcon(f.status)} ${f.id}: ${f.message}`);
+ if (f.fix) {
+ lines.push(` fix: ${f.fix}`);
+ }
+ }
+
+ if (payload.summary.priorityActions.length > 0) {
+ lines.push('');
+ lines.push('Priority actions:');
+ for (const action of payload.summary.priorityActions) {
+ lines.push(` - ${action}`);
+ }
+ }
+
+ return `${lines.join('\n')}\n`;
+}
+
// ── Verify-audit renderer ────────────────────────────────────────────────────
/** @param {string} status */
diff --git a/package.json b/package.json
index c143c75..5cddc11 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "@git-stunts/git-warp",
- "version": "10.12.0",
+ "version": "10.13.0",
"description": "Deterministic WARP graph over Git: graph-native storage, traversal, and tooling.",
"type": "module",
"license": "Apache-2.0",
diff --git a/test/bats/cli-doctor.bats b/test/bats/cli-doctor.bats
new file mode 100644
index 0000000..7b3f185
--- /dev/null
+++ b/test/bats/cli-doctor.bats
@@ -0,0 +1,88 @@
+#!/usr/bin/env bats
+
+load helpers/setup.bash
+
+setup() {
+ setup_test_repo
+ seed_graph "seed-graph.js"
+}
+
+teardown() {
+ teardown_test_repo
+}
+
+@test "doctor --json healthy graph returns all ok" {
+ run git warp --repo "${TEST_REPO}" --graph demo --json doctor
+ assert_success
+
+ JSON="$output" python3 - <<'PY'
+import json, os
+data = json.loads(os.environ["JSON"])
+assert data["doctorVersion"] == 1
+assert data["graph"] == "demo"
+assert data["health"] == "ok"
+assert data["summary"]["checksRun"] == 7
+assert data["summary"]["fail"] == 0
+assert data["summary"]["ok"] >= 1
+assert isinstance(data["findings"], list)
+assert len(data["findings"]) >= 7
+assert isinstance(data["policy"], dict)
+assert data["policy"]["clockSkewMs"] == 300000
+PY
+}
+
+@test "doctor human output includes check IDs" {
+ run git warp --repo "${TEST_REPO}" --graph demo doctor
+ assert_success
+ echo "$output" | grep -q "repo-accessible"
+ echo "$output" | grep -q "refs-consistent"
+ echo "$output" | grep -q "checkpoint-fresh"
+ echo "$output" | grep -q "hooks-installed"
+}
+
+@test "doctor --json broken writer ref yields refs-consistent fail" {
+ # Point writer ref to a non-existent object
+ cd "${PROJECT_ROOT}" || return 1
+ git -C "${TEST_REPO}" update-ref refs/warp/demo/writers/ghost deadbeefdeadbeefdeadbeefdeadbeefdeadbeef
+ cd "${TEST_REPO}" || return 1
+
+ run git warp --repo "${TEST_REPO}" --graph demo --json doctor
+ # Should exit with code 3 (findings)
+ [ "$status" -eq 3 ]
+
+ JSON="$output" python3 - <<'PY'
+import json, os
+data = json.loads(os.environ["JSON"])
+assert data["health"] == "failed"
+codes = [f["code"] for f in data["findings"]]
+assert "REFS_DANGLING_OBJECT" in codes
+PY
+}
+
+@test "doctor --json no checkpoint yields checkpoint-fresh warn" {
+ # Remove the checkpoint ref if it exists
+ cd "${PROJECT_ROOT}" || return 1
+ git -C "${TEST_REPO}" update-ref -d refs/warp/demo/checkpoints/head 2>/dev/null || true
+ cd "${TEST_REPO}" || return 1
+
+ run git warp --repo "${TEST_REPO}" --graph demo --json doctor
+ # exit 3 = findings present
+ [ "$status" -eq 3 ]
+
+ JSON="$output" python3 - <<'PY'
+import json, os
+data = json.loads(os.environ["JSON"])
+codes = [f["code"] for f in data["findings"]]
+assert "CHECKPOINT_MISSING" in codes
+PY
+}
+
+@test "doctor --strict with warnings returns exit 4" {
+ # Remove checkpoint to trigger a warning
+ cd "${PROJECT_ROOT}" || return 1
+ git -C "${TEST_REPO}" update-ref -d refs/warp/demo/checkpoints/head 2>/dev/null || true
+ cd "${TEST_REPO}" || return 1
+
+ run git warp --repo "${TEST_REPO}" --graph demo --json doctor --strict
+ [ "$status" -eq 4 ]
+}
diff --git a/test/unit/cli/doctor.test.js b/test/unit/cli/doctor.test.js
new file mode 100644
index 0000000..6854c43
--- /dev/null
+++ b/test/unit/cli/doctor.test.js
@@ -0,0 +1,240 @@
+import { describe, it, expect, vi, beforeEach } from 'vitest';
+import { CODES } from '../../../bin/cli/commands/doctor/codes.js';
+import { DOCTOR_EXIT_CODES } from '../../../bin/cli/commands/doctor/types.js';
+
+// Mock shared.js to avoid real git operations
+vi.mock('../../../bin/cli/shared.js', () => ({
+ createPersistence: vi.fn(),
+ resolveGraphName: vi.fn(),
+ createHookInstaller: vi.fn(),
+}));
+
+// Mock HealthCheckService
+vi.mock('../../../src/domain/services/HealthCheckService.js', () => ({
+ default: vi.fn().mockImplementation(() => ({
+ getHealth: vi.fn().mockResolvedValue({
+ status: 'healthy',
+ components: { repository: { status: 'healthy', latencyMs: 1 } },
+ }),
+ })),
+}));
+
+// Mock ClockAdapter
+vi.mock('../../../src/infrastructure/adapters/ClockAdapter.js', () => ({
+ default: { global: vi.fn().mockReturnValue({}) },
+}));
+
+const { createPersistence, resolveGraphName, createHookInstaller } = await import('../../../bin/cli/shared.js');
+
+/**
+ * Builds a mock persistence object that simulates a healthy graph
+ * with a single writer "alice".
+ */
+function buildMockPersistence() {
+ return {
+ ping: vi.fn().mockResolvedValue({ ok: true }),
+ readRef: vi.fn().mockImplementation((ref) => {
+ if (ref.includes('writers/alice')) {
+ return Promise.resolve('aaaa000000000000000000000000000000000000');
+ }
+ if (ref.includes('checkpoints/head')) {
+ return Promise.resolve('bbbb000000000000000000000000000000000000');
+ }
+ if (ref.includes('coverage/head')) {
+ return Promise.resolve('cccc000000000000000000000000000000000000');
+ }
+ return Promise.resolve(null);
+ }),
+ listRefs: vi.fn().mockImplementation((prefix) => {
+ if (prefix.includes('writers/')) {
+ return Promise.resolve([`${prefix}alice`]);
+ }
+ if (prefix.includes('audit/')) {
+ return Promise.resolve([]);
+ }
+ return Promise.resolve([]);
+ }),
+ nodeExists: vi.fn().mockResolvedValue(true),
+ isAncestor: vi.fn().mockResolvedValue(true),
+ getNodeInfo: vi.fn().mockResolvedValue({
+ sha: 'bbbb000000000000000000000000000000000000',
+ date: new Date().toISOString(),
+ author: 'Test',
+ message: '',
+ parents: [],
+ }),
+ plumbing: {},
+ };
+}
+
+describe('doctor command', () => {
+ let handleDoctor;
+ let mockPersistence;
+
+ beforeEach(async () => {
+ vi.clearAllMocks();
+ mockPersistence = buildMockPersistence();
+
+ createPersistence.mockResolvedValue({ persistence: mockPersistence });
+ resolveGraphName.mockResolvedValue('demo');
+ createHookInstaller.mockReturnValue({
+ getHookStatus: vi.fn().mockReturnValue({
+ installed: true,
+ current: true,
+ version: '10.8.0',
+ hookPath: '/tmp/test/.git/hooks/post-merge',
+ }),
+ });
+
+ // Dynamic import to pick up mocks
+ const mod = await import('../../../bin/cli/commands/doctor/index.js');
+ handleDoctor = mod.default;
+ });
+
+ it('produces valid payload for a healthy graph', async () => {
+ const result = await handleDoctor({
+ options: {
+ repo: '/tmp/test',
+ graph: 'demo',
+ json: true,
+ ndjson: false,
+ view: null,
+ writer: 'cli',
+ help: false,
+ },
+ args: [],
+ });
+
+ const { payload, exitCode } = result;
+
+ // Exit code
+ expect(exitCode).toBe(DOCTOR_EXIT_CODES.OK);
+
+ // Top-level fields
+ expect(payload.doctorVersion).toBe(1);
+ expect(payload.graph).toBe('demo');
+ expect(payload.repo).toBe('/tmp/test');
+ expect(payload.health).toBe('ok');
+ expect(typeof payload.checkedAt).toBe('string');
+ expect(typeof payload.durationMs).toBe('number');
+
+ // Policy echo
+ expect(payload.policy.strict).toBe(false);
+ expect(payload.policy.clockSkewMs).toBe(300_000);
+ expect(payload.policy.checkpointMaxAgeHours).toBe(168);
+ expect(payload.policy.globalDeadlineMs).toBe(10_000);
+
+ // Summary
+ expect(payload.summary.checksRun).toBe(7);
+ expect(payload.summary.fail).toBe(0);
+ expect(payload.summary.warn).toBe(0);
+ expect(payload.summary.ok).toBeGreaterThanOrEqual(1);
+ expect(payload.summary.priorityActions).toEqual([]);
+
+ // Findings: all should be ok
+ for (const f of payload.findings) {
+ expect(f.status).toBe('ok');
+ expect(f.id).toBeTruthy();
+ expect(f.code).toBeTruthy();
+ expect(f.impact).toBeTruthy();
+ expect(f.message).toBeTruthy();
+ }
+
+ // Check that known codes are used
+ const codes = payload.findings.map((f) => f.code);
+ expect(codes).toContain(CODES.REPO_OK);
+ expect(codes).toContain(CODES.REFS_OK);
+ expect(codes).toContain(CODES.COVERAGE_OK);
+ expect(codes).toContain(CODES.CHECKPOINT_OK);
+ expect(codes).toContain(CODES.HOOKS_OK);
+ });
+
+ it('returns exit 3 when warnings are present', async () => {
+ // Remove checkpoint to trigger warning
+ mockPersistence.readRef.mockImplementation((ref) => {
+ if (ref.includes('writers/alice')) {
+ return Promise.resolve('aaaa000000000000000000000000000000000000');
+ }
+ if (ref.includes('coverage/head')) {
+ return Promise.resolve('cccc000000000000000000000000000000000000');
+ }
+ return Promise.resolve(null);
+ });
+
+ const result = await handleDoctor({
+ options: {
+ repo: '/tmp/test',
+ graph: 'demo',
+ json: true,
+ ndjson: false,
+ view: null,
+ writer: 'cli',
+ help: false,
+ },
+ args: [],
+ });
+
+ expect(result.exitCode).toBe(DOCTOR_EXIT_CODES.FINDINGS);
+ expect(result.payload.health).toBe('degraded');
+ expect(result.payload.summary.warn).toBeGreaterThan(0);
+
+ const checkpointFinding = result.payload.findings.find(
+ (f) => f.code === CODES.CHECKPOINT_MISSING,
+ );
+ expect(checkpointFinding).toBeDefined();
+ expect(checkpointFinding.status).toBe('warn');
+ });
+
+ it('returns exit 4 in strict mode with warnings', async () => {
+ mockPersistence.readRef.mockImplementation((ref) => {
+ if (ref.includes('writers/alice')) {
+ return Promise.resolve('aaaa000000000000000000000000000000000000');
+ }
+ if (ref.includes('coverage/head')) {
+ return Promise.resolve('cccc000000000000000000000000000000000000');
+ }
+ return Promise.resolve(null);
+ });
+
+ const result = await handleDoctor({
+ options: {
+ repo: '/tmp/test',
+ graph: 'demo',
+ json: true,
+ ndjson: false,
+ view: null,
+ writer: 'cli',
+ help: false,
+ },
+ args: ['--strict'],
+ });
+
+ expect(result.exitCode).toBe(DOCTOR_EXIT_CODES.STRICT_FINDINGS);
+ });
+
+ it('sorts findings by status > impact > id', async () => {
+ // Make refs-consistent fail by breaking nodeExists for writer ref
+ mockPersistence.nodeExists.mockResolvedValue(false);
+
+ const result = await handleDoctor({
+ options: {
+ repo: '/tmp/test',
+ graph: 'demo',
+ json: true,
+ ndjson: false,
+ view: null,
+ writer: 'cli',
+ help: false,
+ },
+ args: [],
+ });
+
+ const statuses = result.payload.findings.map((f) => f.status);
+ // fail should come before warn, which comes before ok
+ const firstOkIdx = statuses.indexOf('ok');
+ const lastFailIdx = statuses.lastIndexOf('fail');
+ if (lastFailIdx >= 0 && firstOkIdx >= 0) {
+ expect(lastFailIdx).toBeLessThan(firstOkIdx);
+ }
+ });
+});
diff --git a/test/unit/cli/schemas.test.js b/test/unit/cli/schemas.test.js
index 42d98aa..9badcff 100644
--- a/test/unit/cli/schemas.test.js
+++ b/test/unit/cli/schemas.test.js
@@ -1,5 +1,6 @@
import { describe, it, expect } from 'vitest';
import {
+ doctorSchema,
historySchema,
installHooksSchema,
verifyAuditSchema,
@@ -9,6 +10,22 @@ import {
seekSchema,
} from '../../../bin/cli/schemas.js';
+describe('doctorSchema', () => {
+ it('defaults strict to false', () => {
+ const result = doctorSchema.parse({});
+ expect(result.strict).toBe(false);
+ });
+
+ it('accepts --strict', () => {
+ const result = doctorSchema.parse({ strict: true });
+ expect(result.strict).toBe(true);
+ });
+
+ it('rejects unknown keys', () => {
+ expect(() => doctorSchema.parse({ unknown: true })).toThrow();
+ });
+});
+
describe('historySchema', () => {
it('accepts empty input', () => {
const result = historySchema.parse({});
From 2c7641b2e934240034226e99a443106951629b6a Mon Sep 17 00:00:00 2001
From: CI Bot
Date: Fri, 13 Feb 2026 18:56:14 -0800
Subject: [PATCH 02/14] fix(doctor): resolve ts-policy, typecheck, and eslint
violations
- Place TODO(ts-cleanup) tags on the same line as wildcard casts
- Fix circular JsonValue typedef for TypeScript compatibility
- Add type annotations to doctor.test.js mock variables
---
bin/cli/commands/doctor/checks.js | 20 +++----
bin/cli/commands/doctor/types.js | 2 +-
test/unit/cli/doctor.test.js | 87 +++++++++++--------------------
3 files changed, 40 insertions(+), 69 deletions(-)
diff --git a/bin/cli/commands/doctor/checks.js b/bin/cli/commands/doctor/checks.js
index 3e4359f..060b994 100644
--- a/bin/cli/commands/doctor/checks.js
+++ b/bin/cli/commands/doctor/checks.js
@@ -24,7 +24,7 @@ import { CODES } from './codes.js';
/**
* @param {string} id
- * @param {*} err
+ * @param {*} err TODO(ts-cleanup): narrow error type
* @returns {DoctorFinding}
*/
function internalError(id, err) {
@@ -43,7 +43,7 @@ function internalError(id, err) {
export async function checkRepoAccessible(ctx) {
try {
const clock = ClockAdapter.global();
- const svc = new HealthCheckService({ persistence: /** @type {*} */ (ctx.persistence), clock });
+ const svc = new HealthCheckService({ persistence: /** @type {*} TODO(ts-cleanup): narrow port type */ (ctx.persistence), clock });
const health = await svc.getHealth();
if (health.components.repository.status === 'unhealthy') {
return {
@@ -56,7 +56,7 @@ export async function checkRepoAccessible(ctx) {
id: 'repo-accessible', status: 'ok', code: CODES.REPO_OK,
impact: 'operability', message: 'Repository is accessible',
};
- } catch (/** @type {*} */ err) {
+ } catch (/** @type {*} */ err) { // TODO(ts-cleanup): narrow error type
return internalError('repo-accessible', err);
}
}
@@ -95,7 +95,7 @@ export async function checkRefsConsistent(ctx) {
});
}
return findings;
- } catch (/** @type {*} */ err) {
+ } catch (/** @type {*} */ err) { // TODO(ts-cleanup): narrow error type
return [internalError('refs-consistent', err)];
}
}
@@ -138,7 +138,7 @@ export async function checkCoverageComplete(ctx) {
id: 'coverage-complete', status: 'ok', code: CODES.COVERAGE_OK,
impact: 'operability', message: 'Coverage anchor includes all writers',
};
- } catch (/** @type {*} */ err) {
+ } catch (/** @type {*} */ err) { // TODO(ts-cleanup): narrow error type
return internalError('coverage-complete', err);
}
}
@@ -179,7 +179,7 @@ export async function checkCheckpointFresh(ctx) {
const { date, ageHours } = await getCheckpointAge(ctx.persistence, sha);
return buildCheckpointFinding({ sha, date, ageHours, maxAge: ctx.policy.checkpointMaxAgeHours });
- } catch (/** @type {*} */ err) {
+ } catch (/** @type {*} */ err) { // TODO(ts-cleanup): narrow error type
return internalError('checkpoint-fresh', err);
}
}
@@ -277,7 +277,7 @@ export async function checkAuditConsistent(ctx) {
});
}
return findings;
- } catch (/** @type {*} */ err) {
+ } catch (/** @type {*} */ err) { // TODO(ts-cleanup): narrow error type
return [internalError('audit-consistent', err)];
}
}
@@ -334,7 +334,7 @@ export async function checkClockSkew(ctx) {
message: `Clock skew is within threshold (${Math.round(spreadMs / 1000)}s)`,
evidence: { spreadMs },
};
- } catch (/** @type {*} */ err) {
+ } catch (/** @type {*} */ err) { // TODO(ts-cleanup): narrow error type
return internalError('clock-skew', err);
}
}
@@ -350,13 +350,13 @@ export function checkHooksInstalled(ctx) {
const installer = createHookInstaller();
const s = installer.getHookStatus(ctx.repoPath);
return Promise.resolve(buildHookFinding(s));
- } catch (/** @type {*} */ err) {
+ } catch (/** @type {*} */ err) { // TODO(ts-cleanup): narrow error type
return Promise.resolve(internalError('hooks-installed', err));
}
}
/**
- * @param {*} s - hook status from HookInstaller
+ * @param {*} s TODO(ts-cleanup): narrow hook status type
* @returns {DoctorFinding}
*/
function buildHookFinding(s) {
diff --git a/bin/cli/commands/doctor/types.js b/bin/cli/commands/doctor/types.js
index 6a055e1..39f0c64 100644
--- a/bin/cli/commands/doctor/types.js
+++ b/bin/cli/commands/doctor/types.js
@@ -6,7 +6,7 @@
// ── JSON-safe recursive value type ──────────────────────────────────────────
-/** @typedef {null | boolean | number | string | JsonValue[] | {[k:string]: JsonValue}} JsonValue */
+/** @typedef {null | boolean | number | string | Array<*> | {[k:string]: *}} JsonValue */ // TODO(ts-cleanup): recursive type
/** @typedef {{[k:string]: JsonValue}} FindingEvidence */
diff --git a/test/unit/cli/doctor.test.js b/test/unit/cli/doctor.test.js
index 6854c43..810dcc3 100644
--- a/test/unit/cli/doctor.test.js
+++ b/test/unit/cli/doctor.test.js
@@ -24,16 +24,18 @@ vi.mock('../../../src/infrastructure/adapters/ClockAdapter.js', () => ({
default: { global: vi.fn().mockReturnValue({}) },
}));
+/** @type {*} */ // TODO(ts-cleanup): narrow mock types
const { createPersistence, resolveGraphName, createHookInstaller } = await import('../../../bin/cli/shared.js');
/**
* Builds a mock persistence object that simulates a healthy graph
* with a single writer "alice".
+ * @returns {*} // TODO(ts-cleanup): narrow mock type
*/
function buildMockPersistence() {
return {
ping: vi.fn().mockResolvedValue({ ok: true }),
- readRef: vi.fn().mockImplementation((ref) => {
+ readRef: vi.fn().mockImplementation((/** @type {string} */ ref) => {
if (ref.includes('writers/alice')) {
return Promise.resolve('aaaa000000000000000000000000000000000000');
}
@@ -45,7 +47,7 @@ function buildMockPersistence() {
}
return Promise.resolve(null);
}),
- listRefs: vi.fn().mockImplementation((prefix) => {
+ listRefs: vi.fn().mockImplementation((/** @type {string} */ prefix) => {
if (prefix.includes('writers/')) {
return Promise.resolve([`${prefix}alice`]);
}
@@ -67,8 +69,21 @@ function buildMockPersistence() {
};
}
+/** @type {import('../../../bin/cli/commands/doctor/types.js').DoctorPayload} */
+const CLI_OPTIONS = /** @type {*} */ ({
+ repo: '/tmp/test',
+ graph: 'demo',
+ json: true,
+ ndjson: false,
+ view: null,
+ writer: 'cli',
+ help: false,
+});
+
describe('doctor command', () => {
+ /** @type {Function} */
let handleDoctor;
+ /** @type {*} */ // TODO(ts-cleanup): narrow mock type
let mockPersistence;
beforeEach(async () => {
@@ -92,20 +107,10 @@ describe('doctor command', () => {
});
it('produces valid payload for a healthy graph', async () => {
- const result = await handleDoctor({
- options: {
- repo: '/tmp/test',
- graph: 'demo',
- json: true,
- ndjson: false,
- view: null,
- writer: 'cli',
- help: false,
- },
- args: [],
- });
-
- const { payload, exitCode } = result;
+ const result = await handleDoctor({ options: CLI_OPTIONS, args: [] });
+ /** @type {import('../../../bin/cli/commands/doctor/types.js').DoctorPayload} */
+ const payload = result.payload;
+ const { exitCode } = result;
// Exit code
expect(exitCode).toBe(DOCTOR_EXIT_CODES.OK);
@@ -141,7 +146,7 @@ describe('doctor command', () => {
}
// Check that known codes are used
- const codes = payload.findings.map((f) => f.code);
+ const codes = payload.findings.map((/** @type {*} */ f) => f.code);
expect(codes).toContain(CODES.REPO_OK);
expect(codes).toContain(CODES.REFS_OK);
expect(codes).toContain(CODES.COVERAGE_OK);
@@ -151,7 +156,7 @@ describe('doctor command', () => {
it('returns exit 3 when warnings are present', async () => {
// Remove checkpoint to trigger warning
- mockPersistence.readRef.mockImplementation((ref) => {
+ mockPersistence.readRef.mockImplementation((/** @type {string} */ ref) => {
if (ref.includes('writers/alice')) {
return Promise.resolve('aaaa000000000000000000000000000000000000');
}
@@ -161,32 +166,21 @@ describe('doctor command', () => {
return Promise.resolve(null);
});
- const result = await handleDoctor({
- options: {
- repo: '/tmp/test',
- graph: 'demo',
- json: true,
- ndjson: false,
- view: null,
- writer: 'cli',
- help: false,
- },
- args: [],
- });
+ const result = await handleDoctor({ options: CLI_OPTIONS, args: [] });
expect(result.exitCode).toBe(DOCTOR_EXIT_CODES.FINDINGS);
expect(result.payload.health).toBe('degraded');
expect(result.payload.summary.warn).toBeGreaterThan(0);
const checkpointFinding = result.payload.findings.find(
- (f) => f.code === CODES.CHECKPOINT_MISSING,
+ (/** @type {*} */ f) => f.code === CODES.CHECKPOINT_MISSING,
);
expect(checkpointFinding).toBeDefined();
expect(checkpointFinding.status).toBe('warn');
});
it('returns exit 4 in strict mode with warnings', async () => {
- mockPersistence.readRef.mockImplementation((ref) => {
+ mockPersistence.readRef.mockImplementation((/** @type {string} */ ref) => {
if (ref.includes('writers/alice')) {
return Promise.resolve('aaaa000000000000000000000000000000000000');
}
@@ -196,19 +190,7 @@ describe('doctor command', () => {
return Promise.resolve(null);
});
- const result = await handleDoctor({
- options: {
- repo: '/tmp/test',
- graph: 'demo',
- json: true,
- ndjson: false,
- view: null,
- writer: 'cli',
- help: false,
- },
- args: ['--strict'],
- });
-
+ const result = await handleDoctor({ options: CLI_OPTIONS, args: ['--strict'] });
expect(result.exitCode).toBe(DOCTOR_EXIT_CODES.STRICT_FINDINGS);
});
@@ -216,20 +198,9 @@ describe('doctor command', () => {
// Make refs-consistent fail by breaking nodeExists for writer ref
mockPersistence.nodeExists.mockResolvedValue(false);
- const result = await handleDoctor({
- options: {
- repo: '/tmp/test',
- graph: 'demo',
- json: true,
- ndjson: false,
- view: null,
- writer: 'cli',
- help: false,
- },
- args: [],
- });
+ const result = await handleDoctor({ options: CLI_OPTIONS, args: [] });
- const statuses = result.payload.findings.map((f) => f.status);
+ const statuses = result.payload.findings.map((/** @type {*} */ f) => f.status);
// fail should come before warn, which comes before ok
const firstOkIdx = statuses.indexOf('ok');
const lastFailIdx = statuses.lastIndexOf('fail');
From b458adf501fc3bede25b132e39d6a42c642e296a Mon Sep 17 00:00:00 2001
From: CI Bot
Date: Fri, 13 Feb 2026 19:34:37 -0800
Subject: [PATCH 03/14] fix(doctor): address PR review feedback
- Single-pass counting in assemblePayload instead of 3 filter scans
- Error boundary in runChecks so one failing check doesn't crash doctor
- Guard falsy head.sha in collectWriterDates before calling getNodeInfo
- Use Set for auditId lookups instead of Array.includes
- Remove unnecessary cd round-trips in BATS tests (git -C suffices)
- Assert preconditions unconditionally in sort-order test
- Fix CLI_OPTIONS JSDoc type (CliOptions, not DoctorPayload)
- Clarify ESM cache comment in beforeEach
- Fix CHANGELOG BATS scenario enumeration (list all 5)
---
CHANGELOG.md | 2 +-
bin/cli/commands/doctor/checks.js | 9 ++++---
bin/cli/commands/doctor/index.js | 41 ++++++++++++++++++++++---------
test/bats/cli-doctor.bats | 6 -----
test/unit/cli/doctor.test.js | 13 +++++-----
5 files changed, 44 insertions(+), 27 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 64e2f30..1535243 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -18,7 +18,7 @@ Adds `git warp doctor`, a structural diagnostics command that probes for anomali
- **Human-readable output**: Colored status icons, per-finding fix suggestions, priority action summary
- **Code registry**: `bin/cli/commands/doctor/codes.js` — single source of truth for all finding codes
- **Schema + unit tests**: `doctorSchema` tests in schemas.test.js, golden-JSON tests in doctor.test.js
-- **BATS E2E tests**: 5 scenarios in cli-doctor.bats (healthy, broken ref, missing checkpoint, strict mode)
+- **BATS E2E tests**: 5 scenarios in cli-doctor.bats (healthy JSON, human output, broken ref, missing checkpoint, strict mode)
## [10.12.0] — 2026-02-13 — Multi-Runtime CLI + parseArgs Migration
diff --git a/bin/cli/commands/doctor/checks.js b/bin/cli/commands/doctor/checks.js
index 060b994..d72d8ca 100644
--- a/bin/cli/commands/doctor/checks.js
+++ b/bin/cli/commands/doctor/checks.js
@@ -240,10 +240,10 @@ async function probeAuditRefs(ctx, auditRefs, auditPrefix) {
}
const writerIds = new Set(ctx.writerHeads.map((h) => h.writerId));
- const auditIds = auditRefs.map((r) => r.slice(auditPrefix.length)).filter((id) => id.length > 0);
- const missing = [...writerIds].filter((id) => !auditIds.includes(id));
+ const auditIdSet = new Set(auditRefs.map((r) => r.slice(auditPrefix.length)).filter((id) => id.length > 0));
+ const missing = [...writerIds].filter((id) => !auditIdSet.has(id));
- if (missing.length > 0 && auditIds.length > 0) {
+ if (missing.length > 0 && auditIdSet.size > 0) {
findings.push({
id: 'audit-consistent', status: 'warn', code: CODES.AUDIT_PARTIAL,
impact: 'data_integrity',
@@ -291,6 +291,9 @@ export async function checkAuditConsistent(ctx) {
async function collectWriterDates(ctx) {
const dates = [];
for (const head of ctx.writerHeads) {
+ if (!head.sha) {
+ continue;
+ }
const info = await ctx.persistence.getNodeInfo(head.sha);
const ms = info.date ? Date.parse(info.date) : NaN;
if (!Number.isNaN(ms)) {
diff --git a/bin/cli/commands/doctor/index.js b/bin/cli/commands/doctor/index.js
index beb3022..dc5dd1a 100644
--- a/bin/cli/commands/doctor/index.js
+++ b/bin/cli/commands/doctor/index.js
@@ -71,9 +71,14 @@ export default async function handleDoctor({ options, args }) {
* @returns {DoctorPayload}
*/
function assemblePayload({ repo, graph, policy, findings, checksRun, startMs }) {
- const ok = findings.filter((f) => f.status === 'ok').length;
- const warn = findings.filter((f) => f.status === 'warn').length;
- const fail = findings.filter((f) => f.status === 'fail').length;
+ let ok = 0;
+ let warn = 0;
+ let fail = 0;
+ for (const f of findings) {
+ if (f.status === 'ok') { ok++; }
+ else if (f.status === 'warn') { warn++; }
+ else if (f.status === 'fail') { fail++; }
+ }
const priorityActions = [
...new Set(
findings.filter((f) => f.status !== 'ok' && f.fix).map((f) => /** @type {string} */ (f.fix)),
@@ -139,15 +144,29 @@ async function runChecks(ctx, startMs) {
continue;
}
- const checkStart = Date.now();
- const result = await check.fn(ctx);
- const checkDuration = Date.now() - checkStart;
- checksRun++;
+ let checkDuration;
+ try {
+ const checkStart = Date.now();
+ const result = await check.fn(ctx);
+ checkDuration = Date.now() - checkStart;
+ checksRun++;
- const resultArray = normalizeResult(result);
- for (const f of resultArray) {
- f.durationMs = checkDuration;
- findings.push(f);
+ const resultArray = normalizeResult(result);
+ for (const f of resultArray) {
+ f.durationMs = checkDuration;
+ findings.push(f);
+ }
+ } catch (/** @type {*} */ err) {
+ checkDuration = checkDuration ?? 0;
+ checksRun++;
+ findings.push({
+ id: check.id,
+ status: 'fail',
+ code: CODES.CHECK_INTERNAL_ERROR,
+ impact: 'data_integrity',
+ message: `Internal error in ${check.id}: ${err?.message || String(err)}`,
+ durationMs: checkDuration,
+ });
}
}
diff --git a/test/bats/cli-doctor.bats b/test/bats/cli-doctor.bats
index 7b3f185..dfbf6ab 100644
--- a/test/bats/cli-doctor.bats
+++ b/test/bats/cli-doctor.bats
@@ -42,9 +42,7 @@ PY
@test "doctor --json broken writer ref yields refs-consistent fail" {
# Point writer ref to a non-existent object
- cd "${PROJECT_ROOT}" || return 1
git -C "${TEST_REPO}" update-ref refs/warp/demo/writers/ghost deadbeefdeadbeefdeadbeefdeadbeefdeadbeef
- cd "${TEST_REPO}" || return 1
run git warp --repo "${TEST_REPO}" --graph demo --json doctor
# Should exit with code 3 (findings)
@@ -61,9 +59,7 @@ PY
@test "doctor --json no checkpoint yields checkpoint-fresh warn" {
# Remove the checkpoint ref if it exists
- cd "${PROJECT_ROOT}" || return 1
git -C "${TEST_REPO}" update-ref -d refs/warp/demo/checkpoints/head 2>/dev/null || true
- cd "${TEST_REPO}" || return 1
run git warp --repo "${TEST_REPO}" --graph demo --json doctor
# exit 3 = findings present
@@ -79,9 +75,7 @@ PY
@test "doctor --strict with warnings returns exit 4" {
# Remove checkpoint to trigger a warning
- cd "${PROJECT_ROOT}" || return 1
git -C "${TEST_REPO}" update-ref -d refs/warp/demo/checkpoints/head 2>/dev/null || true
- cd "${TEST_REPO}" || return 1
run git warp --repo "${TEST_REPO}" --graph demo --json doctor --strict
[ "$status" -eq 4 ]
diff --git a/test/unit/cli/doctor.test.js b/test/unit/cli/doctor.test.js
index 810dcc3..b913d17 100644
--- a/test/unit/cli/doctor.test.js
+++ b/test/unit/cli/doctor.test.js
@@ -69,7 +69,7 @@ function buildMockPersistence() {
};
}
-/** @type {import('../../../bin/cli/commands/doctor/types.js').DoctorPayload} */
+/** @type {import('../../../bin/cli/types.js').CliOptions} */
const CLI_OPTIONS = /** @type {*} */ ({
repo: '/tmp/test',
graph: 'demo',
@@ -101,7 +101,7 @@ describe('doctor command', () => {
}),
});
- // Dynamic import to pick up mocks
+ // ESM caches the module after first import; vi.mock hoisting ensures mocks are in place
const mod = await import('../../../bin/cli/commands/doctor/index.js');
handleDoctor = mod.default;
});
@@ -201,11 +201,12 @@ describe('doctor command', () => {
const result = await handleDoctor({ options: CLI_OPTIONS, args: [] });
const statuses = result.payload.findings.map((/** @type {*} */ f) => f.status);
- // fail should come before warn, which comes before ok
+ // Precondition: the mock must produce both fail and ok findings
+ expect(statuses).toContain('fail');
+ expect(statuses).toContain('ok');
+
const firstOkIdx = statuses.indexOf('ok');
const lastFailIdx = statuses.lastIndexOf('fail');
- if (lastFailIdx >= 0 && firstOkIdx >= 0) {
- expect(lastFailIdx).toBeLessThan(firstOkIdx);
- }
+ expect(lastFailIdx).toBeLessThan(firstOkIdx);
});
});
From cb6f9a93a2e3f09ca1a3dfa4c2505955994ed067 Mon Sep 17 00:00:00 2001
From: CI Bot
Date: Fri, 13 Feb 2026 19:35:01 -0800
Subject: [PATCH 04/14] fix(doctor): add TODO(ts-cleanup) tag to wildcard catch
cast
---
bin/cli/commands/doctor/index.js | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/bin/cli/commands/doctor/index.js b/bin/cli/commands/doctor/index.js
index dc5dd1a..4ac18a7 100644
--- a/bin/cli/commands/doctor/index.js
+++ b/bin/cli/commands/doctor/index.js
@@ -156,7 +156,7 @@ async function runChecks(ctx, startMs) {
f.durationMs = checkDuration;
findings.push(f);
}
- } catch (/** @type {*} */ err) {
+ } catch (/** @type {*} TODO(ts-cleanup): narrow error type */ err) {
checkDuration = checkDuration ?? 0;
checksRun++;
findings.push({
From 6b9c59f24e0711c8dcc0a9a2f04cb02245b9bfdf Mon Sep 17 00:00:00 2001
From: CI Bot
Date: Fri, 13 Feb 2026 19:37:55 -0800
Subject: [PATCH 05/14] fix(bats): fix doctor E2E tests for CI/Docker
environment
- Add seed-doctor-graph.js that materializes (creates checkpoint +
coverage refs) so the "healthy graph" tests pass
- Install hooks in healthy-graph tests so hooks-installed check passes
- Write dangling ref directly to .git/refs/ instead of git update-ref
(which rejects nonexistent objects in some git versions)
---
test/bats/cli-doctor.bats | 17 +++++++++++----
test/bats/helpers/seed-doctor-graph.js | 29 ++++++++++++++++++++++++++
2 files changed, 42 insertions(+), 4 deletions(-)
create mode 100644 test/bats/helpers/seed-doctor-graph.js
diff --git a/test/bats/cli-doctor.bats b/test/bats/cli-doctor.bats
index dfbf6ab..2029893 100644
--- a/test/bats/cli-doctor.bats
+++ b/test/bats/cli-doctor.bats
@@ -4,7 +4,7 @@ load helpers/setup.bash
setup() {
setup_test_repo
- seed_graph "seed-graph.js"
+ seed_graph "seed-doctor-graph.js"
}
teardown() {
@@ -12,6 +12,10 @@ teardown() {
}
@test "doctor --json healthy graph returns all ok" {
+ # Install hooks so the hooks-installed check passes
+ run git warp --repo "${TEST_REPO}" install-hooks
+ assert_success
+
run git warp --repo "${TEST_REPO}" --graph demo --json doctor
assert_success
@@ -32,6 +36,10 @@ PY
}
@test "doctor human output includes check IDs" {
+ # Install hooks so the hooks-installed check passes
+ run git warp --repo "${TEST_REPO}" install-hooks
+ assert_success
+
run git warp --repo "${TEST_REPO}" --graph demo doctor
assert_success
echo "$output" | grep -q "repo-accessible"
@@ -41,8 +49,9 @@ PY
}
@test "doctor --json broken writer ref yields refs-consistent fail" {
- # Point writer ref to a non-existent object
- git -C "${TEST_REPO}" update-ref refs/warp/demo/writers/ghost deadbeefdeadbeefdeadbeefdeadbeefdeadbeef
+ # Write a dangling ref directly (git update-ref rejects nonexistent objects)
+ mkdir -p "${TEST_REPO}/.git/refs/warp/demo/writers"
+ echo "deadbeefdeadbeefdeadbeefdeadbeefdeadbeef" > "${TEST_REPO}/.git/refs/warp/demo/writers/ghost"
run git warp --repo "${TEST_REPO}" --graph demo --json doctor
# Should exit with code 3 (findings)
@@ -58,7 +67,7 @@ PY
}
@test "doctor --json no checkpoint yields checkpoint-fresh warn" {
- # Remove the checkpoint ref if it exists
+ # Remove the checkpoint ref
git -C "${TEST_REPO}" update-ref -d refs/warp/demo/checkpoints/head 2>/dev/null || true
run git warp --repo "${TEST_REPO}" --graph demo --json doctor
diff --git a/test/bats/helpers/seed-doctor-graph.js b/test/bats/helpers/seed-doctor-graph.js
new file mode 100644
index 0000000..114e1cc
--- /dev/null
+++ b/test/bats/helpers/seed-doctor-graph.js
@@ -0,0 +1,29 @@
+/**
+ * Seeds a "healthy" demo graph for doctor BATS tests.
+ * Creates patches, materializes (checkpoint + coverage), and installs hooks.
+ * Expects REPO_PATH env var.
+ */
+import { WarpGraph, persistence, crypto } from './seed-setup.js';
+
+const graph = await WarpGraph.open({
+ persistence,
+ graphName: 'demo',
+ writerId: 'alice',
+ crypto,
+});
+
+const patchOne = await graph.createPatch();
+await patchOne
+ .addNode('user:alice')
+ .setProperty('user:alice', 'role', 'engineering')
+ .addNode('user:bob')
+ .setProperty('user:bob', 'role', 'engineering')
+ .commit();
+
+const patchTwo = await graph.createPatch();
+await patchTwo
+ .addEdge('user:alice', 'user:bob', 'follows')
+ .commit();
+
+// Materialize to create checkpoint + coverage refs
+await graph.materialize();
From bcdf31a31369910215bc85c41750bff8ca6b291b Mon Sep 17 00:00:00 2001
From: CI Bot
Date: Fri, 13 Feb 2026 19:41:21 -0800
Subject: [PATCH 06/14] docs(changelog): note error boundary in doctor check
runner
---
CHANGELOG.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1535243..3056022 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -14,6 +14,7 @@ Adds `git warp doctor`, a structural diagnostics command that probes for anomali
- **`git warp doctor`**: 7 diagnostic checks — repo-accessible, refs-consistent, coverage-complete, checkpoint-fresh, audit-consistent, clock-skew, hooks-installed
- **`--strict` flag**: Treats warnings as failures (exit 4 instead of 3)
- **Budget enforcement**: Global 10s deadline; skipped checks appear as findings, not silent omissions
+- **Error boundary**: Each check is wrapped in try/catch so a single failing check produces a `CHECK_INTERNAL_ERROR` finding instead of crashing the entire command
- **Machine-readable output**: `--json` emits versioned `DoctorPayload` (v1) with policy echo, sorted findings, and priority actions
- **Human-readable output**: Colored status icons, per-finding fix suggestions, priority action summary
- **Code registry**: `bin/cli/commands/doctor/codes.js` — single source of truth for all finding codes
From 5cfda7e473503e28979cde041931655488e347c4 Mon Sep 17 00:00:00 2001
From: CI Bot
Date: Fri, 13 Feb 2026 19:52:22 -0800
Subject: [PATCH 07/14] fix(bats): fix doctor E2E tests for Docker CI
environment
- Add _run_json helper that captures stdout only (BATS 1.8+ merges
stderr into $output, corrupting JSON when git emits diagnostics)
- Remove install-hooks from healthy-graph tests (template path doesn't
resolve inside Docker container)
- Relax healthy-graph assertion: check fail==0 in JSON instead of
exit code 0 (hooks-installed warning is expected without template)
- Human output test checks content only, not exit code
---
test/bats/cli-doctor.bats | 27 +++++++++++----------------
1 file changed, 11 insertions(+), 16 deletions(-)
diff --git a/test/bats/cli-doctor.bats b/test/bats/cli-doctor.bats
index 2029893..bb29117 100644
--- a/test/bats/cli-doctor.bats
+++ b/test/bats/cli-doctor.bats
@@ -11,23 +11,23 @@ teardown() {
teardown_test_repo
}
-@test "doctor --json healthy graph returns all ok" {
- # Install hooks so the hooks-installed check passes
- run git warp --repo "${TEST_REPO}" install-hooks
- assert_success
+# Helper: run a command and capture only stdout (BATS 1.8+ merges stderr into
+# $output, which breaks JSON parsing when git emits diagnostic messages).
+_run_json() {
+ output=$("$@" 2>/dev/null) || true
+ status=$?
+}
- run git warp --repo "${TEST_REPO}" --graph demo --json doctor
- assert_success
+@test "doctor --json healthy graph returns all ok" {
+ _run_json git warp --repo "${TEST_REPO}" --graph demo --json doctor
JSON="$output" python3 - <<'PY'
import json, os
data = json.loads(os.environ["JSON"])
assert data["doctorVersion"] == 1
assert data["graph"] == "demo"
-assert data["health"] == "ok"
assert data["summary"]["checksRun"] == 7
assert data["summary"]["fail"] == 0
-assert data["summary"]["ok"] >= 1
assert isinstance(data["findings"], list)
assert len(data["findings"]) >= 7
assert isinstance(data["policy"], dict)
@@ -36,12 +36,7 @@ PY
}
@test "doctor human output includes check IDs" {
- # Install hooks so the hooks-installed check passes
- run git warp --repo "${TEST_REPO}" install-hooks
- assert_success
-
run git warp --repo "${TEST_REPO}" --graph demo doctor
- assert_success
echo "$output" | grep -q "repo-accessible"
echo "$output" | grep -q "refs-consistent"
echo "$output" | grep -q "checkpoint-fresh"
@@ -53,7 +48,7 @@ PY
mkdir -p "${TEST_REPO}/.git/refs/warp/demo/writers"
echo "deadbeefdeadbeefdeadbeefdeadbeefdeadbeef" > "${TEST_REPO}/.git/refs/warp/demo/writers/ghost"
- run git warp --repo "${TEST_REPO}" --graph demo --json doctor
+ _run_json git warp --repo "${TEST_REPO}" --graph demo --json doctor
# Should exit with code 3 (findings)
[ "$status" -eq 3 ]
@@ -70,7 +65,7 @@ PY
# Remove the checkpoint ref
git -C "${TEST_REPO}" update-ref -d refs/warp/demo/checkpoints/head 2>/dev/null || true
- run git warp --repo "${TEST_REPO}" --graph demo --json doctor
+ _run_json git warp --repo "${TEST_REPO}" --graph demo --json doctor
# exit 3 = findings present
[ "$status" -eq 3 ]
@@ -86,6 +81,6 @@ PY
# Remove checkpoint to trigger a warning
git -C "${TEST_REPO}" update-ref -d refs/warp/demo/checkpoints/head 2>/dev/null || true
- run git warp --repo "${TEST_REPO}" --graph demo --json doctor --strict
+ _run_json git warp --repo "${TEST_REPO}" --graph demo --json doctor --strict
[ "$status" -eq 4 ]
}
From 904e454d1d6985315bc9f5a7a4cdaabb0fdb7586 Mon Sep 17 00:00:00 2001
From: CI Bot
Date: Fri, 13 Feb 2026 20:14:14 -0800
Subject: [PATCH 08/14] fix(cli): fix install-hooks template path resolution
createHookInstaller() resolved templateDir to /hooks/ which does
not exist. The actual template lives at src/hooks/post-merge.sh.
Fix the path to resolve to /src/hooks/.
Restore BATS tests to properly install hooks and assert health == ok.
Keep _run_json helper for stderr isolation in JSON-parsing tests.
---
bin/cli/shared.js | 2 +-
test/bats/cli-doctor.bats | 12 ++++++++++++
2 files changed, 13 insertions(+), 1 deletion(-)
diff --git a/bin/cli/shared.js b/bin/cli/shared.js
index 23b114c..9b6394c 100644
--- a/bin/cli/shared.js
+++ b/bin/cli/shared.js
@@ -180,7 +180,7 @@ export async function readCheckpointDate(persistence, checkpointSha) {
export function createHookInstaller() {
const __filename = new URL(import.meta.url).pathname;
const __dirname = path.dirname(__filename);
- const templateDir = path.resolve(__dirname, '..', '..', 'hooks');
+ const templateDir = path.resolve(__dirname, '..', '..', 'src', 'hooks');
const { version } = JSON.parse(fs.readFileSync(path.resolve(__dirname, '..', '..', 'package.json'), 'utf8'));
return new HookInstaller({
fs: /** @type {*} */ (fs), // TODO(ts-cleanup): narrow port type
diff --git a/test/bats/cli-doctor.bats b/test/bats/cli-doctor.bats
index bb29117..4f9633e 100644
--- a/test/bats/cli-doctor.bats
+++ b/test/bats/cli-doctor.bats
@@ -19,15 +19,22 @@ _run_json() {
}
@test "doctor --json healthy graph returns all ok" {
+ # Install hooks so the hooks-installed check passes
+ run git warp --repo "${TEST_REPO}" install-hooks
+ assert_success
+
_run_json git warp --repo "${TEST_REPO}" --graph demo --json doctor
+ [ "$status" -eq 0 ]
JSON="$output" python3 - <<'PY'
import json, os
data = json.loads(os.environ["JSON"])
assert data["doctorVersion"] == 1
assert data["graph"] == "demo"
+assert data["health"] == "ok"
assert data["summary"]["checksRun"] == 7
assert data["summary"]["fail"] == 0
+assert data["summary"]["ok"] >= 1
assert isinstance(data["findings"], list)
assert len(data["findings"]) >= 7
assert isinstance(data["policy"], dict)
@@ -36,7 +43,12 @@ PY
}
@test "doctor human output includes check IDs" {
+ # Install hooks so the hooks-installed check passes
+ run git warp --repo "${TEST_REPO}" install-hooks
+ assert_success
+
run git warp --repo "${TEST_REPO}" --graph demo doctor
+ assert_success
echo "$output" | grep -q "repo-accessible"
echo "$output" | grep -q "refs-consistent"
echo "$output" | grep -q "checkpoint-fresh"
From 845712bafff0514742b963dbe4aac459a878c1c5 Mon Sep 17 00:00:00 2001
From: CI Bot
Date: Fri, 13 Feb 2026 22:10:38 -0800
Subject: [PATCH 09/14] fix(doctor): address CodeRabbit review round 2
- checkCoverageComplete: null-sha writer heads reported as missing
(not silently skipped) with regression test
- checkClockSkew: regression test for null-sha guard in collectWriterDates
- checkHooksInstalled: made async (await Promise.resolve pattern)
- Sort-order test: exercises all three status tiers with targeted mocks;
asserts full three-key comparator (status > impact > id)
- seed-doctor-graph.js: removed stale "installs hooks" doc comment
- CHANGELOG: added Fixed section for v10.13.0
---
CHANGELOG.md | 7 ++
bin/cli/commands/doctor/checks.js | 10 ++-
test/bats/helpers/seed-doctor-graph.js | 4 +-
test/unit/cli/doctor.test.js | 96 ++++++++++++++++++++++++--
4 files changed, 105 insertions(+), 12 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3056022..8b5a09b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -21,6 +21,13 @@ Adds `git warp doctor`, a structural diagnostics command that probes for anomali
- **Schema + unit tests**: `doctorSchema` tests in schemas.test.js, golden-JSON tests in doctor.test.js
- **BATS E2E tests**: 5 scenarios in cli-doctor.bats (healthy JSON, human output, broken ref, missing checkpoint, strict mode)
+### Fixed
+
+- **coverage-complete**: Writer heads with null SHA are now reported as missing (not silently skipped)
+- **checkHooksInstalled**: Made `async` for consistency with other check functions (`await Promise.resolve(...)` pattern to satisfy both `require-await` and `await-thenable` lint rules)
+- **sort-order test**: Hardened to exercise all three status tiers (fail/warn/ok) with targeted mocks and assert the full three-key sort invariant (status > impact > id)
+- **seed-doctor-graph.js**: Removed stale "installs hooks" claim from doc comment
+
## [10.12.0] — 2026-02-13 — Multi-Runtime CLI + parseArgs Migration
Makes the CLI (`bin/`) portable across Node 22+, Bun, and Deno by removing Node-only dependencies, and replaces hand-rolled arg parsing with `node:util.parseArgs` + Zod schemas.
diff --git a/bin/cli/commands/doctor/checks.js b/bin/cli/commands/doctor/checks.js
index d72d8ca..f6b7fa0 100644
--- a/bin/cli/commands/doctor/checks.js
+++ b/bin/cli/commands/doctor/checks.js
@@ -118,6 +118,10 @@ export async function checkCoverageComplete(ctx) {
const missing = [];
for (const head of ctx.writerHeads) {
+ if (!head.sha) {
+ missing.push(head.writerId);
+ continue;
+ }
const reachable = await ctx.persistence.isAncestor(head.sha, coverageSha);
if (!reachable) {
missing.push(head.writerId);
@@ -348,13 +352,13 @@ export async function checkClockSkew(ctx) {
* @param {DoctorContext} ctx
* @returns {Promise}
*/
-export function checkHooksInstalled(ctx) {
+export async function checkHooksInstalled(ctx) {
try {
const installer = createHookInstaller();
const s = installer.getHookStatus(ctx.repoPath);
- return Promise.resolve(buildHookFinding(s));
+ return await Promise.resolve(buildHookFinding(s));
} catch (/** @type {*} */ err) { // TODO(ts-cleanup): narrow error type
- return Promise.resolve(internalError('hooks-installed', err));
+ return await Promise.resolve(internalError('hooks-installed', err));
}
}
diff --git a/test/bats/helpers/seed-doctor-graph.js b/test/bats/helpers/seed-doctor-graph.js
index 114e1cc..0356415 100644
--- a/test/bats/helpers/seed-doctor-graph.js
+++ b/test/bats/helpers/seed-doctor-graph.js
@@ -1,7 +1,7 @@
/**
* Seeds a "healthy" demo graph for doctor BATS tests.
- * Creates patches, materializes (checkpoint + coverage), and installs hooks.
- * Expects REPO_PATH env var.
+ * Creates patches and materializes (checkpoint + coverage).
+ * Expects REPO_PATH env var (consumed by seed-setup.js).
*/
import { WarpGraph, persistence, crypto } from './seed-setup.js';
diff --git a/test/unit/cli/doctor.test.js b/test/unit/cli/doctor.test.js
index b913d17..b442b35 100644
--- a/test/unit/cli/doctor.test.js
+++ b/test/unit/cli/doctor.test.js
@@ -1,6 +1,7 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { CODES } from '../../../bin/cli/commands/doctor/codes.js';
import { DOCTOR_EXIT_CODES } from '../../../bin/cli/commands/doctor/types.js';
+import { checkCoverageComplete, checkClockSkew } from '../../../bin/cli/commands/doctor/checks.js';
// Mock shared.js to avoid real git operations
vi.mock('../../../bin/cli/shared.js', () => ({
@@ -195,18 +196,99 @@ describe('doctor command', () => {
});
it('sorts findings by status > impact > id', async () => {
- // Make refs-consistent fail by breaking nodeExists for writer ref
- mockPersistence.nodeExists.mockResolvedValue(false);
+ // Targeted mock: only break nodeExists for writer refs so that
+ // checkRefsConsistent emits a fail, without accidentally affecting
+ // other checks (e.g. probeAuditRefs dangling detection).
+ mockPersistence.nodeExists.mockImplementation(
+ (/** @type {string} */ sha) => Promise.resolve(sha !== 'aaaa000000000000000000000000000000000000'),
+ );
+
+ // Also remove checkpoint to add a warn finding, giving us all three statuses.
+ mockPersistence.readRef.mockImplementation((/** @type {string} */ ref) => {
+ if (ref.includes('writers/alice')) {
+ return Promise.resolve('aaaa000000000000000000000000000000000000');
+ }
+ if (ref.includes('coverage/head')) {
+ return Promise.resolve('cccc000000000000000000000000000000000000');
+ }
+ return Promise.resolve(null);
+ });
const result = await handleDoctor({ options: CLI_OPTIONS, args: [] });
+ const findings = result.payload.findings;
- const statuses = result.payload.findings.map((/** @type {*} */ f) => f.status);
- // Precondition: the mock must produce both fail and ok findings
+ // Precondition: the mocks must produce all three status tiers.
+ const statuses = findings.map((/** @type {*} */ f) => f.status);
expect(statuses).toContain('fail');
+ expect(statuses).toContain('warn');
expect(statuses).toContain('ok');
- const firstOkIdx = statuses.indexOf('ok');
- const lastFailIdx = statuses.lastIndexOf('fail');
- expect(lastFailIdx).toBeLessThan(firstOkIdx);
+ // Assert full three-key sort invariant: (status, impact, id) ascending.
+ const STATUS_ORDER = { fail: 0, warn: 1, ok: 2 };
+ const IMPACT_ORDER = { data_integrity: 0, security: 1, operability: 2, hygiene: 3 };
+ for (let i = 1; i < findings.length; i++) {
+ const a = findings[i - 1];
+ const b = findings[i];
+ const statusCmp = (STATUS_ORDER[a.status] ?? 9) - (STATUS_ORDER[b.status] ?? 9);
+ if (statusCmp !== 0) {
+ expect(statusCmp).toBeLessThan(0);
+ continue;
+ }
+ const impactCmp = (IMPACT_ORDER[a.impact] ?? 9) - (IMPACT_ORDER[b.impact] ?? 9);
+ if (impactCmp !== 0) {
+ expect(impactCmp).toBeLessThan(0);
+ continue;
+ }
+ expect(a.id.localeCompare(b.id)).toBeLessThanOrEqual(0);
+ }
+ });
+});
+
+describe('individual check guards', () => {
+ it('checkCoverageComplete treats null-sha writer heads as missing', async () => {
+ const ctx = /** @type {*} */ ({
+ graphName: 'demo',
+ writerHeads: [
+ { writerId: 'alice', sha: null, ref: 'refs/warp/demo/writers/alice' },
+ ],
+ persistence: {
+ readRef: vi.fn().mockResolvedValue('cccc000000000000000000000000000000000000'),
+ isAncestor: vi.fn(),
+ },
+ });
+
+ const finding = await checkCoverageComplete(ctx);
+
+ expect(finding.code).toBe(CODES.COVERAGE_MISSING_WRITERS);
+ expect(finding.evidence.missingWriters).toContain('alice');
+ // isAncestor must NOT have been called with null
+ expect(ctx.persistence.isAncestor).not.toHaveBeenCalled();
+ });
+
+ it('checkClockSkew skips null-sha writer heads in collectWriterDates', async () => {
+ const ctx = /** @type {*} */ ({
+ graphName: 'demo',
+ writerHeads: [
+ { writerId: 'alice', sha: null, ref: 'refs/warp/demo/writers/alice' },
+ { writerId: 'bob', sha: 'bbbb000000000000000000000000000000000000', ref: 'refs/warp/demo/writers/bob' },
+ ],
+ policy: { clockSkewMs: 300_000 },
+ persistence: {
+ getNodeInfo: vi.fn().mockResolvedValue({
+ sha: 'bbbb000000000000000000000000000000000000',
+ date: new Date().toISOString(),
+ author: 'Test',
+ message: '',
+ parents: [],
+ }),
+ },
+ });
+
+ const finding = await checkClockSkew(ctx);
+
+ // Should not have called getNodeInfo with null
+ expect(ctx.persistence.getNodeInfo).not.toHaveBeenCalledWith(null);
+ // Only one writer had valid data, so skew check is skipped (< 2 writers)
+ expect(finding.code).toBe(CODES.CLOCK_SYNCED);
});
});
From afd42d0e405cd9981a275d3990c4840d288edf55 Mon Sep 17 00:00:00 2001
From: CI Bot
Date: Fri, 13 Feb 2026 22:11:22 -0800
Subject: [PATCH 10/14] fix(test): resolve typecheck errors in doctor
sort-order test
Add Record annotations for STATUS_ORDER/IMPACT_ORDER
maps and cast findings to any for indexing. Cast checkCoverageComplete
return to any for evidence.missingWriters access.
---
test/unit/cli/doctor.test.js | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/test/unit/cli/doctor.test.js b/test/unit/cli/doctor.test.js
index b442b35..e76d01d 100644
--- a/test/unit/cli/doctor.test.js
+++ b/test/unit/cli/doctor.test.js
@@ -224,11 +224,13 @@ describe('doctor command', () => {
expect(statuses).toContain('ok');
// Assert full three-key sort invariant: (status, impact, id) ascending.
+ /** @type {Record} */
const STATUS_ORDER = { fail: 0, warn: 1, ok: 2 };
+ /** @type {Record} */
const IMPACT_ORDER = { data_integrity: 0, security: 1, operability: 2, hygiene: 3 };
for (let i = 1; i < findings.length; i++) {
- const a = findings[i - 1];
- const b = findings[i];
+ const a = /** @type {*} */ (findings[i - 1]);
+ const b = /** @type {*} */ (findings[i]);
const statusCmp = (STATUS_ORDER[a.status] ?? 9) - (STATUS_ORDER[b.status] ?? 9);
if (statusCmp !== 0) {
expect(statusCmp).toBeLessThan(0);
@@ -257,7 +259,7 @@ describe('individual check guards', () => {
},
});
- const finding = await checkCoverageComplete(ctx);
+ const finding = /** @type {*} */ (await checkCoverageComplete(ctx));
expect(finding.code).toBe(CODES.COVERAGE_MISSING_WRITERS);
expect(finding.evidence.missingWriters).toContain('alice');
From 2cd7cc0ca4a84f7a4458dd0f088d573adf9d9d74 Mon Sep 17 00:00:00 2001
From: CI Bot
Date: Sat, 14 Feb 2026 00:15:45 -0800
Subject: [PATCH 11/14] fix(doctor): refs-consistent OK message counts only
verified refs
checkRefsConsistent now filters writer heads with null sha into a
separate `checkable` array so the OK message accurately reports how
many refs were actually verified. Adds regression test confirming the
count excludes null-sha entries.
---
bin/cli/commands/doctor/checks.js | 8 +++-----
test/unit/cli/doctor.test.js | 22 +++++++++++++++++++++-
2 files changed, 24 insertions(+), 6 deletions(-)
diff --git a/bin/cli/commands/doctor/checks.js b/bin/cli/commands/doctor/checks.js
index f6b7fa0..3389dea 100644
--- a/bin/cli/commands/doctor/checks.js
+++ b/bin/cli/commands/doctor/checks.js
@@ -70,12 +70,10 @@ export async function checkRefsConsistent(ctx) {
const allRefs = ctx.writerHeads.map((h) => ({
ref: h.ref, sha: h.sha, label: `writer ${h.writerId}`,
}));
+ const checkable = allRefs.filter((r) => r.sha);
let allOk = true;
- for (const { ref, sha, label } of allRefs) {
- if (!sha) {
- continue;
- }
+ for (const { ref, sha, label } of checkable) {
const exists = await ctx.persistence.nodeExists(sha);
if (!exists) {
allOk = false;
@@ -91,7 +89,7 @@ export async function checkRefsConsistent(ctx) {
if (allOk) {
findings.push({
id: 'refs-consistent', status: 'ok', code: CODES.REFS_OK,
- impact: 'data_integrity', message: `All ${allRefs.length} refs point to existing objects`,
+ impact: 'data_integrity', message: `All ${checkable.length} ref(s) point to existing objects`,
});
}
return findings;
diff --git a/test/unit/cli/doctor.test.js b/test/unit/cli/doctor.test.js
index e76d01d..07fe5cf 100644
--- a/test/unit/cli/doctor.test.js
+++ b/test/unit/cli/doctor.test.js
@@ -1,7 +1,7 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { CODES } from '../../../bin/cli/commands/doctor/codes.js';
import { DOCTOR_EXIT_CODES } from '../../../bin/cli/commands/doctor/types.js';
-import { checkCoverageComplete, checkClockSkew } from '../../../bin/cli/commands/doctor/checks.js';
+import { checkCoverageComplete, checkClockSkew, checkRefsConsistent } from '../../../bin/cli/commands/doctor/checks.js';
// Mock shared.js to avoid real git operations
vi.mock('../../../bin/cli/shared.js', () => ({
@@ -293,4 +293,24 @@ describe('individual check guards', () => {
// Only one writer had valid data, so skew check is skipped (< 2 writers)
expect(finding.code).toBe(CODES.CLOCK_SYNCED);
});
+
+ it('checkRefsConsistent OK message counts only verified refs (not null-sha)', async () => {
+ const ctx = /** @type {*} */ ({
+ writerHeads: [
+ { writerId: 'alice', sha: 'aaaa000000000000000000000000000000000000', ref: 'refs/warp/demo/writers/alice' },
+ { writerId: 'bob', sha: null, ref: 'refs/warp/demo/writers/bob' },
+ ],
+ persistence: {
+ nodeExists: vi.fn().mockResolvedValue(true),
+ },
+ });
+
+ const findings = /** @type {*[]} */ (await checkRefsConsistent(ctx));
+ const ok = findings.find((/** @type {*} */ f) => f.code === CODES.REFS_OK);
+
+ expect(ok).toBeDefined();
+ // Must say "1 ref(s)" not "2 refs" — bob's null sha was not verified
+ expect(ok.message).toMatch(/\b1 ref/);
+ expect(ctx.persistence.nodeExists).not.toHaveBeenCalledWith(null);
+ });
});
From 5f27d37d1b466eabdec8b31e67613fa5dc633003 Mon Sep 17 00:00:00 2001
From: CI Bot
Date: Sat, 14 Feb 2026 00:16:06 -0800
Subject: [PATCH 12/14] docs(changelog): note refs-consistent count fix
---
CHANGELOG.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8b5a09b..3b9b17f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -26,6 +26,7 @@ Adds `git warp doctor`, a structural diagnostics command that probes for anomali
- **coverage-complete**: Writer heads with null SHA are now reported as missing (not silently skipped)
- **checkHooksInstalled**: Made `async` for consistency with other check functions (`await Promise.resolve(...)` pattern to satisfy both `require-await` and `await-thenable` lint rules)
- **sort-order test**: Hardened to exercise all three status tiers (fail/warn/ok) with targeted mocks and assert the full three-key sort invariant (status > impact > id)
+- **refs-consistent**: OK message now counts only verified refs (excludes null-sha writer heads)
- **seed-doctor-graph.js**: Removed stale "installs hooks" claim from doc comment
## [10.12.0] — 2026-02-13 — Multi-Runtime CLI + parseArgs Migration
From cd0a609ea09a06e547100e1d564aec76278d830b Mon Sep 17 00:00:00 2001
From: CI Bot
Date: Sat, 14 Feb 2026 00:35:16 -0800
Subject: [PATCH 13/14] fix(doctor): address CodeRabbit review nits (round 5)
- Remove redundant `await Promise.resolve()` wrapping in checkHooksInstalled
- Add CLOCK_SYNCED and AUDIT_OK assertions to healthy-graph test
- Update CHANGELOG to reflect simplified async pattern
---
CHANGELOG.md | 2 +-
ROADMAP.md | 3 +++
bin/cli/commands/doctor/checks.js | 5 +++--
test/unit/cli/doctor.test.js | 2 ++
4 files changed, 9 insertions(+), 3 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3b9b17f..779afd6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -24,7 +24,7 @@ Adds `git warp doctor`, a structural diagnostics command that probes for anomali
### Fixed
- **coverage-complete**: Writer heads with null SHA are now reported as missing (not silently skipped)
-- **checkHooksInstalled**: Made `async` for consistency with other check functions (`await Promise.resolve(...)` pattern to satisfy both `require-await` and `await-thenable` lint rules)
+- **checkHooksInstalled**: Made `async` for consistency with other check functions; removed redundant `await Promise.resolve()` wrapping
- **sort-order test**: Hardened to exercise all three status tiers (fail/warn/ok) with targeted mocks and assert the full three-key sort invariant (status > impact > id)
- **refs-consistent**: OK message now counts only verified refs (excludes null-sha writer heads)
- **seed-doctor-graph.js**: Removed stale "installs hooks" claim from doc comment
diff --git a/ROADMAP.md b/ROADMAP.md
index 8de60f8..b7a3b78 100644
--- a/ROADMAP.md
+++ b/ROADMAP.md
@@ -442,6 +442,9 @@ No v2.0 tag until all pass:
| B4 | B | **WARP UI VISUALIZER** — local graph/audit explorer |
| B5 | D | **EXPERIMENTAL SYNC-BUILDER** — only behind explicit flag; requires invariants doc + soak + rollback proof; not eligible for core release without separate RFC |
| B6 | B/C | **RUST CORE / WASM** — pursue only when measured perf ceiling is proven in JS path |
+| B7 | C | **DOCTOR: `writerHeadsWithSha()` HELPER** — pre-filter null-sha heads once in orchestrator, eliminating defensive guards in every check function |
+| B8 | C | **DOCTOR: PROPERTY-BASED FUZZ TEST** — fuzz `writerHeads` with random null/empty shas and verify no check throws (all return findings) |
+| B9 | D | **DOCTOR: EXPORT `compareFinding`** — enable direct unit testing of sort logic independent of the full doctor pipeline |
---
diff --git a/bin/cli/commands/doctor/checks.js b/bin/cli/commands/doctor/checks.js
index 3389dea..4a48890 100644
--- a/bin/cli/commands/doctor/checks.js
+++ b/bin/cli/commands/doctor/checks.js
@@ -350,13 +350,14 @@ export async function checkClockSkew(ctx) {
* @param {DoctorContext} ctx
* @returns {Promise}
*/
+// eslint-disable-next-line @typescript-eslint/require-await -- sync body, async contract
export async function checkHooksInstalled(ctx) {
try {
const installer = createHookInstaller();
const s = installer.getHookStatus(ctx.repoPath);
- return await Promise.resolve(buildHookFinding(s));
+ return buildHookFinding(s);
} catch (/** @type {*} */ err) { // TODO(ts-cleanup): narrow error type
- return await Promise.resolve(internalError('hooks-installed', err));
+ return internalError('hooks-installed', err);
}
}
diff --git a/test/unit/cli/doctor.test.js b/test/unit/cli/doctor.test.js
index 07fe5cf..b49b433 100644
--- a/test/unit/cli/doctor.test.js
+++ b/test/unit/cli/doctor.test.js
@@ -153,6 +153,8 @@ describe('doctor command', () => {
expect(codes).toContain(CODES.COVERAGE_OK);
expect(codes).toContain(CODES.CHECKPOINT_OK);
expect(codes).toContain(CODES.HOOKS_OK);
+ expect(codes).toContain(CODES.CLOCK_SYNCED);
+ expect(codes).toContain(CODES.AUDIT_OK);
});
it('returns exit 3 when warnings are present', async () => {
From 5f4511948dccf223c53403b7d121c1509cd07abe Mon Sep 17 00:00:00 2001
From: CI Bot
Date: Sat, 14 Feb 2026 01:00:56 -0800
Subject: [PATCH 14/14] fix(doctor): handle dangling refs gracefully in BATS
E2E tests
Root causes:
- collectWriterHeads crashed when readRef encountered a ref pointing
to a missing object (git show-ref exits 128, not 1, for dangling refs)
- seed-doctor-graph.js called materialize() but not createCheckpoint()
or syncCoverage(), so doctor always found missing checkpoint/coverage
- _run_json BATS helper had status capture bug: `|| true` made $?
always 0, masking real exit codes
Fixes:
- collectWriterHeads now catches readRef failures and includes the
writer head with sha=null for downstream checks to report
- checkRefsConsistent reports null-sha heads as REFS_DANGLING_OBJECT
- seed-doctor-graph.js calls createCheckpoint() + syncCoverage()
- _run_json properly captures exit code via local variable
---
CHANGELOG.md | 6 ++++--
bin/cli/commands/doctor/checks.js | 17 ++++++++++++++---
bin/cli/commands/doctor/index.js | 12 ++++++++----
bin/cli/commands/doctor/types.js | 2 +-
test/bats/cli-doctor.bats | 5 +++--
test/bats/helpers/seed-doctor-graph.js | 4 +++-
test/unit/cli/doctor.test.js | 15 +++++++++++----
7 files changed, 44 insertions(+), 17 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 779afd6..2baaa57 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -26,8 +26,10 @@ Adds `git warp doctor`, a structural diagnostics command that probes for anomali
- **coverage-complete**: Writer heads with null SHA are now reported as missing (not silently skipped)
- **checkHooksInstalled**: Made `async` for consistency with other check functions; removed redundant `await Promise.resolve()` wrapping
- **sort-order test**: Hardened to exercise all three status tiers (fail/warn/ok) with targeted mocks and assert the full three-key sort invariant (status > impact > id)
-- **refs-consistent**: OK message now counts only verified refs (excludes null-sha writer heads)
-- **seed-doctor-graph.js**: Removed stale "installs hooks" claim from doc comment
+- **refs-consistent**: OK message now counts only verified refs (excludes null-sha writer heads); null-sha heads reported as `REFS_DANGLING_OBJECT`
+- **collectWriterHeads**: Gracefully handles `readRef` failures (e.g. `git show-ref` exit 128 for dangling refs) instead of crashing the entire doctor command
+- **seed-doctor-graph.js**: Calls `createCheckpoint()` + `syncCoverage()` (materialize alone does not create these refs); removed stale "installs hooks" claim
+- **`_run_json` BATS helper**: Fixed status capture bug (`|| true` made `$?` always 0)
## [10.12.0] — 2026-02-13 — Multi-Runtime CLI + parseArgs Migration
diff --git a/bin/cli/commands/doctor/checks.js b/bin/cli/commands/doctor/checks.js
index 4a48890..2134c8f 100644
--- a/bin/cli/commands/doctor/checks.js
+++ b/bin/cli/commands/doctor/checks.js
@@ -70,10 +70,21 @@ export async function checkRefsConsistent(ctx) {
const allRefs = ctx.writerHeads.map((h) => ({
ref: h.ref, sha: h.sha, label: `writer ${h.writerId}`,
}));
- const checkable = allRefs.filter((r) => r.sha);
let allOk = true;
+ let checkedCount = 0;
- for (const { ref, sha, label } of checkable) {
+ for (const { ref, sha, label } of allRefs) {
+ if (!sha) {
+ allOk = false;
+ findings.push({
+ id: 'refs-consistent', status: 'fail', code: CODES.REFS_DANGLING_OBJECT,
+ impact: 'data_integrity',
+ message: `Ref ${ref} points to a missing or unreadable object`,
+ fix: `Investigate broken ref for ${label}`, evidence: { ref },
+ });
+ continue;
+ }
+ checkedCount++;
const exists = await ctx.persistence.nodeExists(sha);
if (!exists) {
allOk = false;
@@ -89,7 +100,7 @@ export async function checkRefsConsistent(ctx) {
if (allOk) {
findings.push({
id: 'refs-consistent', status: 'ok', code: CODES.REFS_OK,
- impact: 'data_integrity', message: `All ${checkable.length} ref(s) point to existing objects`,
+ impact: 'data_integrity', message: `All ${checkedCount} ref(s) point to existing objects`,
});
}
return findings;
diff --git a/bin/cli/commands/doctor/index.js b/bin/cli/commands/doctor/index.js
index 4ac18a7..d416308 100644
--- a/bin/cli/commands/doctor/index.js
+++ b/bin/cli/commands/doctor/index.js
@@ -101,7 +101,7 @@ function assemblePayload({ repo, graph, policy, findings, checksRun, startMs })
/**
* @param {import('../../types.js').Persistence} persistence
* @param {string} graphName
- * @returns {Promise>}
+ * @returns {Promise>}
*/
async function collectWriterHeads(persistence, graphName) {
const prefix = buildWritersPrefix(graphName);
@@ -112,10 +112,14 @@ async function collectWriterHeads(persistence, graphName) {
if (!writerId) {
continue;
}
- const sha = await persistence.readRef(ref);
- if (sha) {
- heads.push({ writerId, sha, ref });
+ let sha = null;
+ try {
+ sha = await persistence.readRef(ref);
+ } catch {
+ // Dangling ref — readRef may fail (e.g. show-ref exits 128 for missing objects).
+ // Include the head with sha=null so downstream checks can report it.
}
+ heads.push({ writerId, sha, ref });
}
return heads.sort((a, b) => a.writerId.localeCompare(b.writerId));
}
diff --git a/bin/cli/commands/doctor/types.js b/bin/cli/commands/doctor/types.js
index 39f0c64..27797ae 100644
--- a/bin/cli/commands/doctor/types.js
+++ b/bin/cli/commands/doctor/types.js
@@ -67,7 +67,7 @@
* @typedef {Object} DoctorContext
* @property {import('../../types.js').Persistence} persistence
* @property {string} graphName
- * @property {Array<{writerId: string, sha: string, ref: string}>} writerHeads
+ * @property {Array<{writerId: string, sha: string|null, ref: string}>} writerHeads
* @property {DoctorPolicy} policy
* @property {string} repoPath
*/
diff --git a/test/bats/cli-doctor.bats b/test/bats/cli-doctor.bats
index 4f9633e..1104dce 100644
--- a/test/bats/cli-doctor.bats
+++ b/test/bats/cli-doctor.bats
@@ -14,8 +14,9 @@ teardown() {
# Helper: run a command and capture only stdout (BATS 1.8+ merges stderr into
# $output, which breaks JSON parsing when git emits diagnostic messages).
_run_json() {
- output=$("$@" 2>/dev/null) || true
- status=$?
+ local rc=0
+ output=$("$@" 2>/dev/null) || rc=$?
+ status=$rc
}
@test "doctor --json healthy graph returns all ok" {
diff --git a/test/bats/helpers/seed-doctor-graph.js b/test/bats/helpers/seed-doctor-graph.js
index 0356415..811e4b4 100644
--- a/test/bats/helpers/seed-doctor-graph.js
+++ b/test/bats/helpers/seed-doctor-graph.js
@@ -25,5 +25,7 @@ await patchTwo
.addEdge('user:alice', 'user:bob', 'follows')
.commit();
-// Materialize to create checkpoint + coverage refs
+// Materialize state, then explicitly create checkpoint + coverage refs
await graph.materialize();
+await graph.createCheckpoint();
+await graph.syncCoverage();
diff --git a/test/unit/cli/doctor.test.js b/test/unit/cli/doctor.test.js
index b49b433..2cf1896 100644
--- a/test/unit/cli/doctor.test.js
+++ b/test/unit/cli/doctor.test.js
@@ -296,7 +296,7 @@ describe('individual check guards', () => {
expect(finding.code).toBe(CODES.CLOCK_SYNCED);
});
- it('checkRefsConsistent OK message counts only verified refs (not null-sha)', async () => {
+ it('checkRefsConsistent reports null-sha heads as dangling', async () => {
const ctx = /** @type {*} */ ({
writerHeads: [
{ writerId: 'alice', sha: 'aaaa000000000000000000000000000000000000', ref: 'refs/warp/demo/writers/alice' },
@@ -308,11 +308,18 @@ describe('individual check guards', () => {
});
const findings = /** @type {*[]} */ (await checkRefsConsistent(ctx));
+
+ // bob's null sha should produce a REFS_DANGLING_OBJECT finding
+ const dangling = findings.find((/** @type {*} */ f) => f.code === CODES.REFS_DANGLING_OBJECT);
+ expect(dangling).toBeDefined();
+ expect(dangling.status).toBe('fail');
+ expect(dangling.ref || dangling.evidence?.ref).toBeTruthy();
+
+ // No OK finding because allOk is false
const ok = findings.find((/** @type {*} */ f) => f.code === CODES.REFS_OK);
+ expect(ok).toBeUndefined();
- expect(ok).toBeDefined();
- // Must say "1 ref(s)" not "2 refs" — bob's null sha was not verified
- expect(ok.message).toMatch(/\b1 ref/);
+ // nodeExists should NOT have been called with null
expect(ctx.persistence.nodeExists).not.toHaveBeenCalledWith(null);
});
});