diff --git a/.vscode/launch.json b/.vscode/launch.json
index 0ad80e0d..2860f61e 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -14,6 +14,7 @@
       ],
       "outputCapture": "std",
       "internalConsoleOptions": "openOnSessionStart",
+      "envFile": "${workspaceFolder}/.env",
       "env": {
         "NODE_ENV": "development",
         "TS_NODE_SKIP_IGNORE": "true"
@@ -33,6 +34,7 @@
       ],
       "outputCapture": "std",
       "internalConsoleOptions": "openOnSessionStart",
+      "envFile": "${workspaceFolder}/.env",
       "env": {
         "NODE_ENV": "development",
         "TS_NODE_SKIP_IGNORE": "true",
@@ -53,6 +55,7 @@
       ],
       "outputCapture": "std",
       "internalConsoleOptions": "openOnSessionStart",
+      "envFile": "${workspaceFolder}/.env",
       "env": {
         "NODE_ENV": "development",
         "TS_NODE_SKIP_IGNORE": "true",
@@ -60,6 +63,25 @@
       },
       "killBehavior": "polite",
     },
+    {
+      "type": "node",
+      "request": "launch",
+      "name": "Run: debug server",
+      "runtimeArgs": [
+        "-r",
+        "ts-node/register"
+      ],
+      "args": [
+        "${workspaceFolder}/util/debug-server.ts"
+      ],
+      "outputCapture": "std",
+      "internalConsoleOptions": "openOnSessionStart",
+      "env": {
+        "NODE_ENV": "development",
+        "TS_NODE_SKIP_IGNORE": "true",
+      },
+      "killBehavior": "polite",
+    },
     {
       "type": "node",
       "request": "launch",
diff --git a/migrations/1676395230930_inscriptions.ts b/migrations/1676395230930_inscriptions.ts
index 3205872b..17951b52 100644
--- a/migrations/1676395230930_inscriptions.ts
+++ b/migrations/1676395230930_inscriptions.ts
@@ -31,6 +31,7 @@ export function up(pgm: MigrationBuilder): void {
     },
     address: {
       type: 'text',
+      notNull: true,
     },
     mime_type: {
       type: 'text',
@@ -76,11 +77,6 @@ export function up(pgm: MigrationBuilder): void {
     },
   });
   pgm.createConstraint('inscriptions', 'inscriptions_number_unique', 'UNIQUE(number)');
-  pgm.createConstraint(
-    'inscriptions',
-    'inscriptions_ordinal_number_fk',
-    'FOREIGN KEY(ordinal_number) REFERENCES satoshis(ordinal_number) ON DELETE CASCADE'
-  );
   pgm.createIndex('inscriptions', ['mime_type']);
   pgm.createIndex('inscriptions', ['recursive']);
   pgm.createIndex('inscriptions', [
@@ -89,4 +85,5 @@ export function up(pgm: MigrationBuilder): void {
   ]);
   pgm.createIndex('inscriptions', ['address']);
   pgm.createIndex('inscriptions', [{ name: 'updated_at', sort: 'DESC' }]);
+  pgm.createIndex('inscriptions', ['ordinal_number']);
 }
diff --git a/migrations/1677284495299_locations.ts b/migrations/1677284495299_locations.ts
index 30894492..3cdcc48d 100644
--- a/migrations/1677284495299_locations.ts
+++ b/migrations/1677284495299_locations.ts
@@ -28,6 +28,7 @@ export function up(pgm: MigrationBuilder): void {
     },
     address: {
       type: 'text',
+      notNull: true,
     },
     output: {
       type: 'text',
@@ -57,11 +58,6 @@ export function up(pgm: MigrationBuilder): void {
   pgm.createConstraint('locations', 'locations_pkey', {
     primaryKey: ['ordinal_number', 'block_height', 'tx_index'],
   });
-  pgm.createConstraint(
-    'locations',
-    'locations_ordinal_number_fk',
-    'FOREIGN KEY(ordinal_number) REFERENCES satoshis(ordinal_number) ON DELETE CASCADE'
-  );
   pgm.createIndex('locations', ['output', 'offset']);
   pgm.createIndex('locations', ['timestamp']);
   pgm.createIndex('locations', [
diff --git a/migrations/1677284495500_current-locations.ts b/migrations/1677284495500_current-locations.ts
index 51f4b8a3..8da71549 100644
--- a/migrations/1677284495500_current-locations.ts
+++ b/migrations/1677284495500_current-locations.ts
@@ -19,18 +19,10 @@ export function up(pgm: MigrationBuilder): void {
     },
     address: {
       type: 'text',
+      notNull: true,
     },
   });
-  pgm.createConstraint(
-    'current_locations',
-    'current_locations_locations_fk',
-    'FOREIGN KEY(ordinal_number, block_height, tx_index) REFERENCES locations(ordinal_number, block_height, tx_index) ON DELETE CASCADE'
-  );
-  pgm.createConstraint(
-    'locations',
-    'locations_satoshis_fk',
-    'FOREIGN KEY(ordinal_number) REFERENCES satoshis(ordinal_number) ON DELETE CASCADE'
-  );
   pgm.createIndex('current_locations', ['ordinal_number'], { unique: true });
   pgm.createIndex('current_locations', ['address']);
+  pgm.createIndex('current_locations', ['block_height', 'tx_index']);
 }
diff --git a/migrations/1677284495501_inscription-transfers.ts b/migrations/1677284495501_inscription-transfers.ts
index 90b72717..648ef662 100644
--- a/migrations/1677284495501_inscription-transfers.ts
+++ b/migrations/1677284495501_inscription-transfers.ts
@@ -37,16 +37,6 @@ export function up(pgm: MigrationBuilder): void {
   pgm.createConstraint('inscription_transfers', 'inscription_transfers_pkey', {
     primaryKey: ['block_height', 'block_transfer_index'],
   });
-  pgm.createConstraint(
-    'inscription_transfers',
-    'inscription_transfers_locations_fk',
-    'FOREIGN KEY(ordinal_number, block_height, tx_index) REFERENCES locations(ordinal_number, block_height, tx_index) ON DELETE CASCADE'
-  );
-  pgm.createConstraint(
-    'inscription_transfers',
-    'inscription_transfers_satoshis_fk',
-    'FOREIGN KEY(ordinal_number) REFERENCES satoshis(ordinal_number) ON DELETE CASCADE'
-  );
   pgm.createIndex('inscription_transfers', ['genesis_id']);
   pgm.createIndex('inscription_transfers', ['number']);
 }
diff --git a/package-lock.json b/package-lock.json
index d715c605..a6e43e84 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -15,7 +15,7 @@
         "@fastify/swagger": "^8.3.1",
         "@fastify/type-provider-typebox": "^3.2.0",
         "@hirosystems/api-toolkit": "^1.4.0",
-        "@hirosystems/chainhook-client": "^1.8.0",
+        "@hirosystems/chainhook-client": "^1.10.0",
         "@semantic-release/changelog": "^6.0.3",
         "@semantic-release/commit-analyzer": "^10.0.4",
         "@semantic-release/git": "^10.0.1",
@@ -1271,9 +1271,9 @@
       }
     },
     "node_modules/@hirosystems/chainhook-client": {
-      "version": "1.8.0",
-      "resolved": "https://registry.npmjs.org/@hirosystems/chainhook-client/-/chainhook-client-1.8.0.tgz",
-      "integrity": "sha512-BpYwrbxWuH0KGRyKq1T8nIiZUGaapOxz6yFZ653m6CJi7DS7kqOm2+v5X/DR0hbeZUmqriGMUJnROJ1tW08aEg==",
+      "version": "1.10.0",
+      "resolved": "https://registry.npmjs.org/@hirosystems/chainhook-client/-/chainhook-client-1.10.0.tgz",
+      "integrity": "sha512-Akp3+sZxys/n2iC5NjfnjEHtMfZmP89wSjZfvxU31pvXjz5PtOvL2LFZtkU3+y2EKjEI9msKemMEvQqSAdKO3g==",
       "dependencies": {
         "@fastify/type-provider-typebox": "^3.2.0",
         "fastify": "^4.15.0",
@@ -19739,9 +19739,9 @@
       }
     },
     "@hirosystems/chainhook-client": {
-      "version": "1.8.0",
-      "resolved": "https://registry.npmjs.org/@hirosystems/chainhook-client/-/chainhook-client-1.8.0.tgz",
-      "integrity": "sha512-BpYwrbxWuH0KGRyKq1T8nIiZUGaapOxz6yFZ653m6CJi7DS7kqOm2+v5X/DR0hbeZUmqriGMUJnROJ1tW08aEg==",
+      "version": "1.10.0",
+      "resolved": "https://registry.npmjs.org/@hirosystems/chainhook-client/-/chainhook-client-1.10.0.tgz",
+      "integrity": "sha512-Akp3+sZxys/n2iC5NjfnjEHtMfZmP89wSjZfvxU31pvXjz5PtOvL2LFZtkU3+y2EKjEI9msKemMEvQqSAdKO3g==",
       "requires": {
         "@fastify/type-provider-typebox": "^3.2.0",
         "fastify": "^4.15.0",
diff --git a/package.json b/package.json
index 889db34b..08fd8b84 100644
--- a/package.json
+++ b/package.json
@@ -9,7 +9,7 @@
     "build": "rimraf ./dist && tsc --project tsconfig.build.json",
     "start": "node dist/src/index.js",
     "start-ts": "ts-node ./src/index.ts",
-    "start:debug-server": "node dist/util/debug-server.js",
+    "start:debug-server": "ts-node ./util/debug-server.ts",
     "test": "jest --runInBand",
     "test:brc-20": "npm run test -- ./tests/brc-20/",
     "test:api": "npm run test -- ./tests/api/",
@@ -57,7 +57,7 @@
     "@fastify/swagger": "^8.3.1",
     "@fastify/type-provider-typebox": "^3.2.0",
     "@hirosystems/api-toolkit": "^1.4.0",
-    "@hirosystems/chainhook-client": "^1.8.0",
+    "@hirosystems/chainhook-client": "^1.10.0",
     "@semantic-release/changelog": "^6.0.3",
     "@semantic-release/commit-analyzer": "^10.0.4",
     "@semantic-release/git": "^10.0.1",
diff --git a/src/env.ts b/src/env.ts
index fc8f0389..bc7aca06 100644
--- a/src/env.ts
+++ b/src/env.ts
@@ -33,9 +33,9 @@ const schema = Type.Object({
   ORDHOOK_NODE_RPC_PORT: Type.Number({ default: 20456, minimum: 0, maximum: 65535 }),
   /**
    * Authorization token that the ordhook node must send with every event to make sure it's
-   * coming from the valid instance
+   * coming from the valid instance. Leave it undefined if you wish to avoid header validation.
    */
-  ORDHOOK_NODE_AUTH_TOKEN: Type.String(),
+  ORDHOOK_NODE_AUTH_TOKEN: Type.Optional(Type.String()),
   /**
    * Register ordhook predicates automatically when the API is first launched. Set this to `false`
    * if you're configuring your predicates manually for any reason.
@@ -53,6 +53,8 @@ const schema = Type.Object({
     { default: 'default', replay: 'replay' },
     { default: 'default' }
   ),
+  /** If the API should automatically shut down when Ordhook ingestion mode is `replay` */
+  ORDHOOK_REPLAY_INGESTION_MODE_AUTO_SHUTDOWN: Type.Boolean({ default: true }),
 
   PGHOST: Type.String(),
   PGPORT: Type.Number({ default: 5432, minimum: 0, maximum: 65535 }),
diff --git a/src/ordhook/server.ts b/src/ordhook/server.ts
index 19d1decc..cc383229 100644
--- a/src/ordhook/server.ts
+++ b/src/ordhook/server.ts
@@ -45,10 +45,11 @@ export async function startOrdhookServer(args: { db: PgStore }): Promise<Chainho
   const serverOpts: ServerOptions = {
     hostname: ENV.API_HOST,
     port: ENV.EVENT_PORT,
-    auth_token: ENV.ORDHOOK_NODE_AUTH_TOKEN,
+    auth_token: ENV.ORDHOOK_NODE_AUTH_TOKEN ?? '',
     external_base_url: `http://${ENV.EXTERNAL_HOSTNAME}`,
     wait_for_chainhook_node: ENV.ORDHOOK_AUTO_PREDICATE_REGISTRATION,
-    validate_chainhook_payloads: true,
+    validate_chainhook_payloads: false,
+    validate_token_authorization: ENV.ORDHOOK_NODE_AUTH_TOKEN != undefined,
     body_limit: ENV.EVENT_SERVER_BODY_LIMIT,
     node_type: 'ordhook',
   };
@@ -58,7 +59,11 @@ export async function startOrdhookServer(args: { db: PgStore }): Promise<Chainho
   const server = new ChainhookEventObserver(serverOpts, ordhookOpts);
   await server.start(predicates, async (uuid: string, payload: Payload) => {
     const streamed = payload.chainhook.is_streaming_blocks;
-    if (ENV.ORDHOOK_INGESTION_MODE === 'replay' && streamed) {
+    if (
+      ENV.ORDHOOK_INGESTION_MODE === 'replay' &&
+      ENV.ORDHOOK_REPLAY_INGESTION_MODE_AUTO_SHUTDOWN &&
+      streamed
+    ) {
       logger.info(`OrdhookServer finished replaying blocks, shutting down`);
       return shutdown();
     }
@@ -67,5 +72,7 @@ export async function startOrdhookServer(args: { db: PgStore }): Promise<Chainho
     );
     await args.db.updateInscriptions(payload as BitcoinPayload);
   });
+  const chainTip = await args.db.getChainTipBlockHeight();
+  logger.info(`OrdhookServer chain tip is at ${chainTip}`);
   return server;
 }
diff --git a/src/pg/block-cache.ts b/src/pg/block-cache.ts
index 128dcba7..d194d5b9 100644
--- a/src/pg/block-cache.ts
+++ b/src/pg/block-cache.ts
@@ -61,7 +61,7 @@ export class BlockCache {
       content_length: reveal.content_length,
       block_height: this.blockHeight,
       tx_index: reveal.tx_index,
-      address: reveal.inscriber_address,
+      address: reveal.inscriber_address ?? '',
       number: reveal.inscription_number.jubilee,
       classic_number: reveal.inscription_number.classic,
       content: removeNullBytes(reveal.content_bytes),
@@ -73,7 +73,8 @@ export class BlockCache {
       parent: reveal.parent,
       timestamp: this.timestamp,
     });
-    this.revealedNumbers.push(reveal.inscription_number.jubilee);
+    if (reveal.inscription_number.jubilee > 0)
+      this.revealedNumbers.push(reveal.inscription_number.jubilee);
     this.increaseMimeTypeCount(mime_type);
     this.increaseSatRarityCount(satoshi.rarity);
     this.increaseInscriptionTypeCount(reveal.inscription_number.classic < 0 ? 'cursed' : 'blessed');
@@ -85,7 +86,7 @@ export class BlockCache {
       tx_id,
       tx_index: reveal.tx_index,
       ordinal_number,
-      address: reveal.inscriber_address,
+      address: reveal.inscriber_address ?? '',
       output: `${satpoint.tx_id}:${satpoint.vout}`,
       offset: satpoint.offset ?? null,
       prev_output: null,
@@ -98,7 +99,7 @@ export class BlockCache {
       ordinal_number,
       block_height: this.blockHeight,
       tx_index: reveal.tx_index,
-      address: reveal.inscriber_address,
+      address: reveal.inscriber_address ?? '',
     });
     if (recursive_refs.length > 0) this.recursiveRefs.set(reveal.inscription_id, recursive_refs);
   }
@@ -107,7 +108,7 @@ export class BlockCache {
     const satpoint = parseSatPoint(transfer.satpoint_post_transfer);
     const prevSatpoint = parseSatPoint(transfer.satpoint_pre_transfer);
     const ordinal_number = transfer.ordinal_number.toString();
-    const address = transfer.destination.value ?? null;
+    const address = transfer.destination.value ?? '';
     this.locations.push({
       block_hash: this.blockHash,
       block_height: this.blockHeight,
diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts
index 494a0998..708e4f58 100644
--- a/src/pg/pg-store.ts
+++ b/src/pg/pg-store.ts
@@ -35,7 +35,7 @@ import { BlockCache } from './block-cache';
 
 export const MIGRATIONS_DIR = path.join(__dirname, '../../migrations');
 const ORDINALS_GENESIS_BLOCK = 767430;
-export const INSERT_BATCH_SIZE = 4000;
+export const INSERT_BATCH_SIZE = 3500;
 
 type InscriptionIdentifier = { genesis_id: string } | { number: number };
 
@@ -82,7 +82,8 @@ export class PgStore extends BasePgStore {
    */
   async updateInscriptions(payload: BitcoinPayload): Promise<void> {
     await this.sqlWriteTransaction(async sql => {
-      const streamed = payload.chainhook.is_streaming_blocks;
+      const streamed =
+        ENV.ORDHOOK_INGESTION_MODE === 'default' && payload.chainhook.is_streaming_blocks;
       for (const event of payload.rollback) {
         logger.info(`PgStore rollback block ${event.block_identifier.index}`);
         const time = stopwatch();
@@ -184,36 +185,34 @@ export class PgStore extends BasePgStore {
         ...l,
         timestamp: sql`TO_TIMESTAMP(${l.timestamp})`,
       }));
+      // Insert locations, figure out moved inscriptions, insert inscription transfers.
       for await (const batch of batchIterate(entries, INSERT_BATCH_SIZE))
         await sql`
-          INSERT INTO locations ${sql(batch)}
-          ON CONFLICT (ordinal_number, block_height, tx_index) DO NOTHING
-        `;
-      // Insert block transfers.
-      let block_transfer_index = 0;
-      const transferEntries = [];
-      for (const transfer of cache.locations) {
-        const transferred = await sql<{ genesis_id: string; number: string }[]>`
-          SELECT genesis_id, number FROM inscriptions
-          WHERE ordinal_number = ${transfer.ordinal_number} AND (
-            block_height < ${transfer.block_height}
-            OR (block_height = ${transfer.block_height} AND tx_index < ${transfer.tx_index})
+          WITH location_inserts AS (
+            INSERT INTO locations ${sql(batch)}
+            ON CONFLICT (ordinal_number, block_height, tx_index) DO NOTHING
+            RETURNING ordinal_number, block_height, block_hash, tx_index
+          ),
+          prev_transfer_index AS (
+            SELECT MAX(block_transfer_index) AS max
+            FROM inscription_transfers
+            WHERE block_height = (SELECT block_height FROM location_inserts LIMIT 1)
+          ),
+          moved_inscriptions AS (
+            SELECT
+              i.genesis_id, i.number, i.ordinal_number, li.block_height, li.block_hash, li.tx_index,
+              (
+                ROW_NUMBER() OVER (ORDER BY li.block_height ASC, li.tx_index ASC) + (SELECT COALESCE(max, -1) FROM prev_transfer_index)
+              ) AS block_transfer_index
+            FROM inscriptions AS i
+            INNER JOIN location_inserts AS li ON li.ordinal_number = i.ordinal_number
+            WHERE
+              i.block_height < li.block_height
+              OR (i.block_height = li.block_height AND i.tx_index < li.tx_index)
           )
-        `;
-        for (const inscription of transferred)
-          transferEntries.push({
-            genesis_id: inscription.genesis_id,
-            number: inscription.number,
-            ordinal_number: transfer.ordinal_number,
-            block_height: transfer.block_height,
-            block_hash: transfer.block_hash,
-            tx_index: transfer.tx_index,
-            block_transfer_index: block_transfer_index++,
-          });
-      }
-      for await (const batch of batchIterate(transferEntries, INSERT_BATCH_SIZE))
-        await sql`
-          INSERT INTO inscription_transfers ${sql(batch)}
+          INSERT INTO inscription_transfers
+          (genesis_id, number, ordinal_number, block_height, block_hash, tx_index, block_transfer_index)
+          (SELECT * FROM moved_inscriptions)
           ON CONFLICT (block_height, block_transfer_index) DO NOTHING
         `;
     }
@@ -228,18 +227,20 @@ export class PgStore extends BasePgStore {
     if (cache.currentLocations.size) {
       // Deduct counts from previous owners
       const moved_sats = [...cache.currentLocations.keys()];
-      const prevOwners = await sql<{ address: string; count: number }[]>`
-        SELECT address, COUNT(*) AS count
-        FROM current_locations
-        WHERE ordinal_number IN ${sql(moved_sats)}
-        GROUP BY address
-      `;
-      for (const owner of prevOwners)
-        await sql`
-          UPDATE counts_by_address
-          SET count = count - ${owner.count}
-          WHERE address = ${owner.address}
+      for await (const batch of batchIterate(moved_sats, INSERT_BATCH_SIZE)) {
+        const prevOwners = await sql<{ address: string; count: number }[]>`
+          SELECT address, COUNT(*) AS count
+          FROM current_locations
+          WHERE ordinal_number IN ${sql(batch)}
+          GROUP BY address
         `;
+        for (const owner of prevOwners)
+          await sql`
+            UPDATE counts_by_address
+            SET count = count - ${owner.count}
+            WHERE address = ${owner.address}
+          `;
+      }
       // Insert locations
       const entries = [...cache.currentLocations.values()];
       for await (const batch of batchIterate(entries, INSERT_BATCH_SIZE))
@@ -255,24 +256,25 @@ export class PgStore extends BasePgStore {
               EXCLUDED.tx_index > current_locations.tx_index)
         `;
       // Update owner counts
-      await sql`
-        WITH new_owners AS (
-          SELECT address, COUNT(*) AS count
-          FROM current_locations
-          WHERE ordinal_number IN ${sql(moved_sats)}
-          GROUP BY address
-        )
-        INSERT INTO counts_by_address (address, count)
-        (SELECT address, count FROM new_owners)
-        ON CONFLICT (address) DO UPDATE SET count = counts_by_address.count + EXCLUDED.count
-      `;
-      if (streamed)
-        for await (const batch of batchIterate(moved_sats, INSERT_BATCH_SIZE))
+      for await (const batch of batchIterate(moved_sats, INSERT_BATCH_SIZE)) {
+        await sql`
+          WITH new_owners AS (
+            SELECT address, COUNT(*) AS count
+            FROM current_locations
+            WHERE ordinal_number IN ${sql(batch)}
+            GROUP BY address
+          )
+          INSERT INTO counts_by_address (address, count)
+          (SELECT address, count FROM new_owners)
+          ON CONFLICT (address) DO UPDATE SET count = counts_by_address.count + EXCLUDED.count
+        `;
+        if (streamed)
           await sql`
             UPDATE inscriptions
             SET updated_at = NOW()
             WHERE ordinal_number IN ${sql(batch)}
           `;
+      }
     }
     await this.counts.applyCounts(sql, cache);
   }
diff --git a/src/pg/types.ts b/src/pg/types.ts
index 46e66e1e..82eed73c 100644
--- a/src/pg/types.ts
+++ b/src/pg/types.ts
@@ -15,7 +15,7 @@ export type DbInscriptionInsert = {
   classic_number: number;
   block_height: number;
   tx_index: number;
-  address: string | null;
+  address: string;
   mime_type: string;
   content_type: string;
   content_length: number;
@@ -34,7 +34,7 @@ export type DbLocationInsert = {
   block_hash: string;
   tx_index: number;
   tx_id: string;
-  address: string | null;
+  address: string;
   output: string;
   offset: PgNumeric | null;
   prev_output: string | null;
@@ -48,22 +48,7 @@ export type DbCurrentLocationInsert = {
   ordinal_number: PgNumeric;
   block_height: number;
   tx_index: number;
-  address: string | null;
-};
-
-type AbstractLocationData = {
-  block_height: number;
-  block_hash: string;
-  tx_id: string;
-  tx_index: number;
-  address: string | null;
-  output: string;
-  offset: PgNumeric | null;
-  prev_output: string | null;
-  prev_offset: PgNumeric | null;
-  value: PgNumeric | null;
-  transfer_type: DbLocationTransferType;
-  block_transfer_index: number | null;
+  address: string;
 };
 
 /**
@@ -116,7 +101,7 @@ export type DbLocation = {
   block_hash: string;
   tx_id: string;
   tx_index: number;
-  address: string | null;
+  address: string;
   output: string;
   offset: string | null;
   prev_output: string | null;
@@ -131,7 +116,7 @@ export type DbInscriptionLocationChange = {
   from_block_height: string;
   from_block_hash: string;
   from_tx_id: string;
-  from_address: string | null;
+  from_address: string;
   from_output: string;
   from_offset: string | null;
   from_value: string | null;
@@ -139,7 +124,7 @@ export type DbInscriptionLocationChange = {
   to_block_height: string;
   to_block_hash: string;
   to_tx_id: string;
-  to_address: string | null;
+  to_address: string;
   to_output: string;
   to_offset: string | null;
   to_value: string | null;
diff --git a/tests/api/inscriptions.test.ts b/tests/api/inscriptions.test.ts
index dc541cc3..d52d4ed3 100644
--- a/tests/api/inscriptions.test.ts
+++ b/tests/api/inscriptions.test.ts
@@ -3031,7 +3031,7 @@ describe('/inscriptions', () => {
           new TestChainhookPayloadBuilder()
             .rollback()
             .block({
-              height: 775618,
+              height: 778575,
               hash: '000000000000000000032ef6c45a69c0496456b3cae84ee9f2899f636d03c5ac',
               timestamp: 1675312161,
             })
diff --git a/tests/ordhook/server.test.ts b/tests/ordhook/server.test.ts
index 4f8a4d23..17cf8a70 100644
--- a/tests/ordhook/server.test.ts
+++ b/tests/ordhook/server.test.ts
@@ -126,7 +126,7 @@ describe('EventServer', () => {
       const payload2 = new TestChainhookPayloadBuilder()
         .rollback()
         .block({
-          height: 107,
+          height: 767430,
           hash: '0x163de66dc9c0949905bfe8e148bde04600223cf88d19f26fdbeba1d6e6fa0f88',
           timestamp: 1676913207,
         })
diff --git a/util/debug-server.ts b/util/debug-server.ts
index 3537408a..2ea49d17 100644
--- a/util/debug-server.ts
+++ b/util/debug-server.ts
@@ -23,17 +23,18 @@ import * as path from 'path';
 const serverOpts: ServerOptions = {
   hostname: ENV.API_HOST,
   port: ENV.EVENT_PORT,
-  auth_token: ENV.ORDHOOK_NODE_AUTH_TOKEN,
+  auth_token: ENV.ORDHOOK_NODE_AUTH_TOKEN ?? '',
   external_base_url: `http://${ENV.EXTERNAL_HOSTNAME}`,
   wait_for_chainhook_node: false,
   validate_chainhook_payloads: false,
+  validate_token_authorization: false,
   body_limit: ENV.EVENT_SERVER_BODY_LIMIT,
   node_type: 'ordhook',
 };
 const ordhookOpts: ChainhookNodeOptions = {
   base_url: ORDHOOK_BASE_PATH,
 };
-const dirPath = path.join(__dirname, '../../tmp/debug-server/');
+const dirPath = path.join(__dirname, '../tmp/debug-server/');
 fs.mkdirSync(dirPath, { recursive: true });
 logger.info(`DebugServer saving outputs to ${dirPath}`);
 
@@ -41,7 +42,7 @@ const server = new ChainhookEventObserver(serverOpts, ordhookOpts);
 server
   .start([], async (uuid: string, payload: Payload) => {
     logger.info(`DebugServer received payload from predicate ${uuid}`);
-    const filePath = path.join(dirPath, `${new Date().getTime()}.txt`);
+    const filePath = path.join(dirPath, `${payload.apply[0].block_identifier.index}.txt`);
     fs.writeFileSync(filePath, JSON.stringify(payload, null, 2));
     return Promise.resolve();
   })