From 719c33eec6bbcdf23926722518887de4d2cca8e3 Mon Sep 17 00:00:00 2001 From: Gregorio Juliana Date: Thu, 24 Oct 2024 18:31:48 +0200 Subject: [PATCH] feat: Get logs by tags (#9353) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes: https://github.com/AztecProtocol/aztec-packages/issues/9268 Indexes L2 encrypted note logs by their first field in the archiver, so they can later be queried via the node's RPC interface. This is intended to be used by PXE to retrieve tagged notes and avoid brute force trial decrypting. --------- Co-authored-by: Nicolás Venturo --- .../archiver/src/archiver/archiver.ts | 14 +++ .../archiver/src/archiver/archiver_store.ts | 9 ++ .../src/archiver/archiver_store_test_suite.ts | 118 ++++++++++++++++++ .../kv_archiver_store/kv_archiver_store.ts | 15 +++ .../archiver/kv_archiver_store/log_store.ts | 109 ++++++++++++---- .../memory_archiver_store.ts | 45 +++++++ .../src/aztec-node/http_rpc_server.ts | 2 + .../aztec-node/src/aztec-node/server.ts | 11 ++ .../circuit-types/src/logs/l2_logs_source.ts | 11 ++ 9 files changed, 313 insertions(+), 21 deletions(-) diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 18220995691..8e6523c3283 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -1,4 +1,5 @@ import { + type EncryptedL2NoteLog, type FromLogType, type GetUnencryptedLogsResponse, type InboxLeaf, @@ -627,6 +628,16 @@ export class Archiver implements ArchiveSource { return this.store.getLogs(from, limit, logType); } + /** + * Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag). + * @param tags - The tags to filter the logs by. + * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match + * that tag. + */ + getLogsByTags(tags: Fr[]): Promise { + return this.store.getLogsByTags(tags); + } + /** * Gets unencrypted logs based on the provided filter. * @param filter - The filter to apply to the logs. @@ -924,6 +935,9 @@ class ArchiverStoreHelper ): Promise>[]> { return this.store.getLogs(from, limit, logType); } + getLogsByTags(tags: Fr[]): Promise { + return this.store.getLogsByTags(tags); + } getUnencryptedLogs(filter: LogFilter): Promise { return this.store.getUnencryptedLogs(filter); } diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index d2353d93fa4..8a1cc1559c9 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -1,4 +1,5 @@ import { + type EncryptedL2NoteLog, type FromLogType, type GetUnencryptedLogsResponse, type InboxLeaf, @@ -136,6 +137,14 @@ export interface ArchiverDataStore { logType: TLogType, ): Promise>[]>; + /** + * Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag). + * @param tags - The tags to filter the logs by. + * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match + * that tag. + */ + getLogsByTags(tags: Fr[]): Promise; + /** * Gets unencrypted logs based on the provided filter. * @param filter - The filter to apply to the logs. diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index 34443ee0cec..4bea2246fb7 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -14,6 +14,7 @@ import { makeExecutablePrivateFunctionWithMembershipProof, makeUnconstrainedFunctionWithMembershipProof, } from '@aztec/circuits.js/testing'; +import { toBufferBE } from '@aztec/foundation/bigint-buffer'; import { times } from '@aztec/foundation/collection'; import { randomBytes, randomInt } from '@aztec/foundation/crypto'; @@ -354,6 +355,123 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch }); }); + describe('getLogsByTags', () => { + const txsPerBlock = 4; + const numPrivateFunctionCalls = 3; + const numNoteEncryptedLogs = 2; + const numBlocks = 10; + let blocks: L1Published[]; + let tags: { [i: number]: { [j: number]: Buffer[] } } = {}; + + beforeEach(async () => { + blocks = times(numBlocks, (index: number) => ({ + data: L2Block.random(index + 1, txsPerBlock, numPrivateFunctionCalls, 2, numNoteEncryptedLogs, 2), + l1: { blockNumber: BigInt(index), blockHash: `0x${index}`, timestamp: BigInt(index) }, + })); + // Last block has the note encrypted log tags of the first tx copied from the previous block + blocks[numBlocks - 1].data.body.noteEncryptedLogs.txLogs[0].functionLogs.forEach((fnLogs, fnIndex) => { + fnLogs.logs.forEach((log, logIndex) => { + const previousLogData = + blocks[numBlocks - 2].data.body.noteEncryptedLogs.txLogs[0].functionLogs[fnIndex].logs[logIndex].data; + previousLogData.copy(log.data, 0, 0, 32); + }); + }); + // Last block has invalid tags in the second tx + const tooBig = toBufferBE(Fr.MODULUS, 32); + blocks[numBlocks - 1].data.body.noteEncryptedLogs.txLogs[1].functionLogs.forEach(fnLogs => { + fnLogs.logs.forEach(log => { + tooBig.copy(log.data, 0, 0, 32); + }); + }); + + await store.addBlocks(blocks); + await store.addLogs(blocks.map(b => b.data)); + + tags = {}; + blocks.forEach((b, blockIndex) => { + if (!tags[blockIndex]) { + tags[blockIndex] = {}; + } + b.data.body.noteEncryptedLogs.txLogs.forEach((txLogs, txIndex) => { + if (!tags[blockIndex][txIndex]) { + tags[blockIndex][txIndex] = []; + } + tags[blockIndex][txIndex].push(...txLogs.unrollLogs().map(log => log.data.subarray(0, 32))); + }); + }); + }); + + it('is possible to batch request all logs of a tx via tags', async () => { + // get random tx from any block that's not the last one + const targetBlockIndex = randomInt(numBlocks - 2); + const targetTxIndex = randomInt(txsPerBlock); + + const logsByTags = await store.getLogsByTags( + tags[targetBlockIndex][targetTxIndex].map(buffer => new Fr(buffer)), + ); + + const expectedResponseSize = numPrivateFunctionCalls * numNoteEncryptedLogs; + expect(logsByTags.length).toEqual(expectedResponseSize); + + logsByTags.forEach((logsByTag, logIndex) => { + expect(logsByTag).toHaveLength(1); + const [log] = logsByTag; + expect(log).toEqual( + blocks[targetBlockIndex].data.body.noteEncryptedLogs.txLogs[targetTxIndex].unrollLogs()[logIndex], + ); + }); + }); + + it('is possible to batch request all logs of different blocks via tags', async () => { + // get first tx of first block and second tx of second block + const logsByTags = await store.getLogsByTags([...tags[0][0], ...tags[1][1]].map(buffer => new Fr(buffer))); + + const expectedResponseSize = 2 * numPrivateFunctionCalls * numNoteEncryptedLogs; + expect(logsByTags.length).toEqual(expectedResponseSize); + + logsByTags.forEach(logsByTag => expect(logsByTag).toHaveLength(1)); + }); + + it('is possible to batch request logs that have the same tag but different content', async () => { + // get first tx of last block + const logsByTags = await store.getLogsByTags(tags[numBlocks - 1][0].map(buffer => new Fr(buffer))); + + const expectedResponseSize = numPrivateFunctionCalls * numNoteEncryptedLogs; + expect(logsByTags.length).toEqual(expectedResponseSize); + + logsByTags.forEach(logsByTag => { + expect(logsByTag).toHaveLength(2); + const [tag0, tag1] = logsByTag.map(log => new Fr(log.data.subarray(0, 32))); + expect(tag0).toEqual(tag1); + }); + }); + + it('is possible to request logs for non-existing tags and determine their position', async () => { + // get random tx from any block that's not the last one + const targetBlockIndex = randomInt(numBlocks - 2); + const targetTxIndex = randomInt(txsPerBlock); + + const logsByTags = await store.getLogsByTags([ + Fr.random(), + ...tags[targetBlockIndex][targetTxIndex].slice(1).map(buffer => new Fr(buffer)), + ]); + + const expectedResponseSize = numPrivateFunctionCalls * numNoteEncryptedLogs; + expect(logsByTags.length).toEqual(expectedResponseSize); + + const [emptyLogsByTag, ...populatedLogsByTags] = logsByTags; + expect(emptyLogsByTag).toHaveLength(0); + + populatedLogsByTags.forEach((logsByTag, logIndex) => { + expect(logsByTag).toHaveLength(1); + const [log] = logsByTag; + expect(log).toEqual( + blocks[targetBlockIndex].data.body.noteEncryptedLogs.txLogs[targetTxIndex].unrollLogs()[logIndex + 1], + ); + }); + }); + }); + describe('getUnencryptedLogs', () => { const txsPerBlock = 4; const numPublicFunctionCalls = 3; diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts index 75b7afff8be..b0328eae595 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts @@ -1,4 +1,5 @@ import { + type EncryptedL2NoteLog, type FromLogType, type GetUnencryptedLogsResponse, type InboxLeaf, @@ -239,6 +240,20 @@ export class KVArchiverDataStore implements ArchiverDataStore { } } + /** + * Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag). + * @param tags - The tags to filter the logs by. + * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match + * that tag. + */ + getLogsByTags(tags: Fr[]): Promise { + try { + return this.#logStore.getLogsByTags(tags); + } catch (err) { + return Promise.reject(err); + } + } + /** * Gets unencrypted logs based on the provided filter. * @param filter - The filter to apply to the logs. diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts index 58fbe849a02..5c093fb4b1e 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts @@ -1,5 +1,6 @@ import { EncryptedL2BlockL2Logs, + EncryptedL2NoteLog, EncryptedNoteL2BlockL2Logs, ExtendedUnencryptedL2Log, type FromLogType, @@ -12,9 +13,10 @@ import { UnencryptedL2BlockL2Logs, type UnencryptedL2Log, } from '@aztec/circuit-types'; +import { Fr } from '@aztec/circuits.js'; import { INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js/constants'; import { createDebugLogger } from '@aztec/foundation/log'; -import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; +import { type AztecKVStore, type AztecMap, type AztecMultiMap } from '@aztec/kv-store'; import { type BlockStore } from './block_store.js'; @@ -22,16 +24,22 @@ import { type BlockStore } from './block_store.js'; * A store for logs */ export class LogStore { - #noteEncryptedLogs: AztecMap; - #encryptedLogs: AztecMap; - #unencryptedLogs: AztecMap; + #noteEncryptedLogsByBlock: AztecMap; + #noteEncryptedLogsByHash: AztecMap; + #noteEncryptedLogHashesByTag: AztecMultiMap; + #noteEncryptedLogTagsByBlock: AztecMultiMap; + #encryptedLogsByBlock: AztecMap; + #unencryptedLogsByBlock: AztecMap; #logsMaxPageSize: number; #log = createDebugLogger('aztec:archiver:log_store'); constructor(private db: AztecKVStore, private blockStore: BlockStore, logsMaxPageSize: number = 1000) { - this.#noteEncryptedLogs = db.openMap('archiver_note_encrypted_logs'); - this.#encryptedLogs = db.openMap('archiver_encrypted_logs'); - this.#unencryptedLogs = db.openMap('archiver_unencrypted_logs'); + this.#noteEncryptedLogsByBlock = db.openMap('archiver_note_encrypted_logs_by_block'); + this.#noteEncryptedLogsByHash = db.openMap('archiver_note_encrypted_logs_by_hash'); + this.#noteEncryptedLogHashesByTag = db.openMultiMap('archiver_tagged_note_encrypted_log_hashes_by_tag'); + this.#noteEncryptedLogTagsByBlock = db.openMultiMap('archiver_note_encrypted_log_tags_by_block'); + this.#encryptedLogsByBlock = db.openMap('archiver_encrypted_logs_by_block'); + this.#unencryptedLogsByBlock = db.openMap('archiver_unencrypted_logs_by_block'); this.#logsMaxPageSize = logsMaxPageSize; } @@ -44,21 +52,58 @@ export class LogStore { addLogs(blocks: L2Block[]): Promise { return this.db.transaction(() => { blocks.forEach(block => { - void this.#noteEncryptedLogs.set(block.number, block.body.noteEncryptedLogs.toBuffer()); - void this.#encryptedLogs.set(block.number, block.body.encryptedLogs.toBuffer()); - void this.#unencryptedLogs.set(block.number, block.body.unencryptedLogs.toBuffer()); + void this.#noteEncryptedLogsByBlock.set(block.number, block.body.noteEncryptedLogs.toBuffer()); + block.body.noteEncryptedLogs.txLogs.forEach(txLogs => { + const noteLogs = txLogs.unrollLogs(); + noteLogs.forEach(noteLog => { + if (noteLog.data.length < 32) { + this.#log.warn(`Skipping note log with invalid data length: ${noteLog.data.length}`); + return; + } + try { + const tag = new Fr(noteLog.data.subarray(0, 32)); + const hexHash = noteLog.hash().toString('hex'); + // Ideally we'd store all of the logs for a matching tag in an AztecMultiMap, but this type doesn't doesn't + // handle storing buffers well. The 'ordered-binary' encoding returns an error trying to decode buffers + // ('the number <> cannot be converted to a BigInt because it is not an integer'). We therefore store + // instead the hashes of the logs. + void this.#noteEncryptedLogHashesByTag.set(tag.toString(), hexHash); + void this.#noteEncryptedLogsByHash.set(hexHash, noteLog.toBuffer()); + void this.#noteEncryptedLogTagsByBlock.set(block.number, tag.toString()); + } catch (err) { + this.#log.warn(`Failed to add tagged note log to store: ${err}`); + } + }); + }); + void this.#encryptedLogsByBlock.set(block.number, block.body.encryptedLogs.toBuffer()); + void this.#unencryptedLogsByBlock.set(block.number, block.body.unencryptedLogs.toBuffer()); }); return true; }); } - deleteLogs(blocks: L2Block[]): Promise { + async deleteLogs(blocks: L2Block[]): Promise { + const noteTagsToDelete = await this.db.transaction(() => { + return blocks.flatMap(block => Array.from(this.#noteEncryptedLogTagsByBlock.getValues(block.number))); + }); + const noteLogHashesToDelete = await this.db.transaction(() => { + return noteTagsToDelete.flatMap(tag => Array.from(this.#noteEncryptedLogHashesByTag.getValues(tag))); + }); return this.db.transaction(() => { blocks.forEach(block => { - void this.#noteEncryptedLogs.delete(block.number); - void this.#encryptedLogs.delete(block.number); - void this.#unencryptedLogs.delete(block.number); + void this.#noteEncryptedLogsByBlock.delete(block.number); + void this.#encryptedLogsByBlock.delete(block.number); + void this.#unencryptedLogsByBlock.delete(block.number); + void this.#noteEncryptedLogTagsByBlock.delete(block.number); + }); + + noteTagsToDelete.forEach(tag => { + void this.#noteEncryptedLogHashesByTag.delete(tag.toString()); + }); + + noteLogHashesToDelete.forEach(hash => { + void this.#noteEncryptedLogsByHash.delete(hash); }); return true; @@ -80,12 +125,12 @@ export class LogStore { const logMap = (() => { switch (logType) { case LogType.ENCRYPTED: - return this.#encryptedLogs; + return this.#encryptedLogsByBlock; case LogType.NOTEENCRYPTED: - return this.#noteEncryptedLogs; + return this.#noteEncryptedLogsByBlock; case LogType.UNENCRYPTED: default: - return this.#unencryptedLogs; + return this.#unencryptedLogsByBlock; } })(); const logTypeMap = (() => { @@ -105,6 +150,28 @@ export class LogStore { } } + /** + * Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag). + * @param tags - The tags to filter the logs by. + * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match + * that tag. + */ + getLogsByTags(tags: Fr[]): Promise { + return this.db.transaction(() => { + return tags.map(tag => { + const logHashes = Array.from(this.#noteEncryptedLogHashesByTag.getValues(tag.toString())); + return ( + logHashes + .map(hash => this.#noteEncryptedLogsByHash.get(hash)) + // addLogs should ensure that we never have undefined logs, but we filter them out regardless to protect + // ourselves from database corruption + .filter(noteLogBuffer => noteLogBuffer != undefined) + .map(noteLogBuffer => EncryptedL2NoteLog.fromBuffer(noteLogBuffer!)) + ); + }); + }); + } + /** * Gets unencrypted logs based on the provided filter. * @param filter - The filter to apply to the logs. @@ -154,7 +221,7 @@ export class LogStore { const logs: ExtendedUnencryptedL2Log[] = []; let maxLogsHit = false; - loopOverBlocks: for (const [blockNumber, logBuffer] of this.#unencryptedLogs.entries({ start, end })) { + loopOverBlocks: for (const [blockNumber, logBuffer] of this.#unencryptedLogsByBlock.entries({ start, end })) { const unencryptedLogsInBlock = UnencryptedL2BlockL2Logs.fromBuffer(logBuffer); for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < unencryptedLogsInBlock.txLogs.length; txIndex++) { const txLogs = unencryptedLogsInBlock.txLogs[txIndex].unrollLogs(); @@ -199,12 +266,12 @@ export class LogStore { const logMap = (() => { switch (logType) { case LogType.ENCRYPTED: - return this.#encryptedLogs; + return this.#encryptedLogsByBlock; case LogType.NOTEENCRYPTED: - return this.#noteEncryptedLogs; + return this.#noteEncryptedLogsByBlock; case LogType.UNENCRYPTED: default: - return this.#unencryptedLogs; + return this.#unencryptedLogsByBlock; } })(); const logTypeMap = (() => { diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index 6c1f234e435..4a57dbffb6d 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -1,5 +1,6 @@ import { type EncryptedL2BlockL2Logs, + type EncryptedL2NoteLog, type EncryptedNoteL2BlockL2Logs, ExtendedUnencryptedL2Log, type FromLogType, @@ -27,6 +28,7 @@ import { } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; +import { createDebugLogger } from '@aztec/foundation/log'; import { type ArchiverDataStore, type ArchiverL1SynchPoint } from '../archiver_store.js'; import { type DataRetrieval } from '../structs/data_retrieval.js'; @@ -49,6 +51,10 @@ export class MemoryArchiverStore implements ArchiverDataStore { private noteEncryptedLogsPerBlock: Map = new Map(); + private taggedNoteEncryptedLogs: Map = new Map(); + + private noteEncryptedLogTagsPerBlock: Map = new Map(); + private encryptedLogsPerBlock: Map = new Map(); private unencryptedLogsPerBlock: Map = new Map(); @@ -74,6 +80,8 @@ export class MemoryArchiverStore implements ArchiverDataStore { private lastProvenL2BlockNumber: number = 0; private lastProvenL2EpochNumber: number = 0; + #log = createDebugLogger('aztec:archiver:data-store'); + constructor( /** The max number of logs that can be obtained in 1 "getUnencryptedLogs" call. */ public readonly maxLogs: number, @@ -206,6 +214,24 @@ export class MemoryArchiverStore implements ArchiverDataStore { addLogs(blocks: L2Block[]): Promise { blocks.forEach(block => { this.noteEncryptedLogsPerBlock.set(block.number, block.body.noteEncryptedLogs); + block.body.noteEncryptedLogs.txLogs.forEach(txLogs => { + const noteLogs = txLogs.unrollLogs(); + noteLogs.forEach(noteLog => { + if (noteLog.data.length < 32) { + this.#log.warn(`Skipping note log with invalid data length: ${noteLog.data.length}`); + return; + } + try { + const tag = new Fr(noteLog.data.subarray(0, 32)); + const currentNoteLogs = this.taggedNoteEncryptedLogs.get(tag.toString()) || []; + this.taggedNoteEncryptedLogs.set(tag.toString(), [...currentNoteLogs, noteLog]); + const currentTagsInBlock = this.noteEncryptedLogTagsPerBlock.get(block.number) || []; + this.noteEncryptedLogTagsPerBlock.set(block.number, [...currentTagsInBlock, tag]); + } catch (err) { + this.#log.warn(`Failed to add tagged note log to store: ${err}`); + } + }); + }); this.encryptedLogsPerBlock.set(block.number, block.body.encryptedLogs); this.unencryptedLogsPerBlock.set(block.number, block.body.unencryptedLogs); }); @@ -213,10 +239,18 @@ export class MemoryArchiverStore implements ArchiverDataStore { } deleteLogs(blocks: L2Block[]): Promise { + const noteTagsToDelete = blocks.flatMap(block => this.noteEncryptedLogTagsPerBlock.get(block.number)); + noteTagsToDelete + .filter(tag => tag != undefined) + .forEach(tag => { + this.taggedNoteEncryptedLogs.delete(tag!.toString()); + }); + blocks.forEach(block => { this.encryptedLogsPerBlock.delete(block.number); this.noteEncryptedLogsPerBlock.delete(block.number); this.unencryptedLogsPerBlock.delete(block.number); + this.noteEncryptedLogTagsPerBlock.delete(block.number); }); return Promise.resolve(true); @@ -380,6 +414,17 @@ export class MemoryArchiverStore implements ArchiverDataStore { return Promise.resolve(l); } + /** + * Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag). + * @param tags - The tags to filter the logs by. + * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match + * that tag. + */ + getLogsByTags(tags: Fr[]): Promise { + const noteLogs = tags.map(tag => this.taggedNoteEncryptedLogs.get(tag.toString()) || []); + return Promise.resolve(noteLogs); + } + /** * Gets unencrypted logs based on the provided filter. * @param filter - The filter to apply to the logs. diff --git a/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts b/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts index 3f21d944e66..00553b4d004 100644 --- a/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts +++ b/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts @@ -1,5 +1,6 @@ import { type AztecNode, + EncryptedL2NoteLog, EncryptedNoteL2BlockL2Logs, EpochProofQuote, ExtendedUnencryptedL2Log, @@ -49,6 +50,7 @@ export function createAztecNodeRpcServer(node: AztecNode) { }, { EncryptedNoteL2BlockL2Logs, + EncryptedL2NoteLog, NoteSelector, NullifierMembershipWitness, PublicSimulationOutput, diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 89942fb2330..b56eb13940e 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -3,6 +3,7 @@ import { BBCircuitVerifier, TestCircuitVerifier } from '@aztec/bb-prover'; import { type AztecNode, type ClientProtocolCircuitVerifier, + type EncryptedL2NoteLog, type EpochProofQuote, type FromLogType, type GetUnencryptedLogsResponse, @@ -308,6 +309,16 @@ export class AztecNodeService implements AztecNode { return logSource.getLogs(from, limit, logType) as Promise>[]>; } + /** + * Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag). + * @param tags - The tags to filter the logs by. + * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match + * that tag. + */ + public getLogsByTags(tags: Fr[]): Promise { + return this.encryptedLogsSource.getLogsByTags(tags); + } + /** * Gets unencrypted logs based on the provided filter. * @param filter - The filter to apply to the logs. diff --git a/yarn-project/circuit-types/src/logs/l2_logs_source.ts b/yarn-project/circuit-types/src/logs/l2_logs_source.ts index 1d5745484d6..9c00af874bf 100644 --- a/yarn-project/circuit-types/src/logs/l2_logs_source.ts +++ b/yarn-project/circuit-types/src/logs/l2_logs_source.ts @@ -1,3 +1,6 @@ +import { type Fr } from '@aztec/circuits.js'; + +import { type EncryptedL2NoteLog } from './encrypted_l2_note_log.js'; import { type GetUnencryptedLogsResponse } from './get_unencrypted_logs_response.js'; import { type L2BlockL2Logs } from './l2_block_l2_logs.js'; import { type LogFilter } from './log_filter.js'; @@ -20,6 +23,14 @@ export interface L2LogsSource { logType: TLogType, ): Promise>[]>; + /** + * Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag). + * @param tags - The tags to filter the logs by. + * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match + * that tag. + */ + getLogsByTags(tags: Fr[]): Promise; + /** * Gets unencrypted logs based on the provided filter. * @param filter - The filter to apply to the logs.