diff --git a/yarn-project/aztec-node/src/aztec-node/server.test.ts b/yarn-project/aztec-node/src/aztec-node/server.test.ts index 942c544b26f..4a417906496 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.test.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.test.ts @@ -10,7 +10,7 @@ import { type WorldStateSynchronizer, mockTxForRollup, } from '@aztec/circuit-types'; -import { type ContractDataSource, EthAddress, Fr, MaxBlockNumber } from '@aztec/circuits.js'; +import { type ContractDataSource, EthAddress, Fr, GasFees, MaxBlockNumber } from '@aztec/circuits.js'; import { type P2P } from '@aztec/p2p'; import { type GlobalVariableBuilder } from '@aztec/sequencer-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -37,8 +37,9 @@ describe('aztec node', () => { p2p = mock(); globalVariablesBuilder = mock(); - merkleTreeOps = mock(); + globalVariablesBuilder.getCurrentBaseFees.mockResolvedValue(new GasFees(0, 0)); + merkleTreeOps = mock(); merkleTreeOps.findLeafIndices.mockImplementation((_treeId: MerkleTreeId, _value: any[]) => { return Promise.resolve([undefined]); }); @@ -99,14 +100,14 @@ describe('aztec node', () => { const doubleSpendWithExistingTx = txs[1]; lastBlockNumber += 1; - expect(await node.isValidTx(doubleSpendTx)).toBe(true); + expect(await node.isValidTx(doubleSpendTx)).toEqual({ result: 'valid' }); // We push a duplicate nullifier that was created in the same transaction doubleSpendTx.data.forRollup!.end.nullifiers.push(doubleSpendTx.data.forRollup!.end.nullifiers[0]); - expect(await node.isValidTx(doubleSpendTx)).toBe(false); + expect(await node.isValidTx(doubleSpendTx)).toEqual({ result: 'invalid', reason: ['Duplicate nullifier in tx'] }); - expect(await node.isValidTx(doubleSpendWithExistingTx)).toBe(true); + expect(await node.isValidTx(doubleSpendWithExistingTx)).toEqual({ result: 'valid' }); // We make a nullifier from `doubleSpendWithExistingTx` a part of the nullifier tree, so it gets rejected as double spend const doubleSpendNullifier = doubleSpendWithExistingTx.data.forRollup!.end.nullifiers[0].toBuffer(); @@ -116,7 +117,10 @@ describe('aztec node', () => { ); }); - expect(await node.isValidTx(doubleSpendWithExistingTx)).toBe(false); + expect(await node.isValidTx(doubleSpendWithExistingTx)).toEqual({ + result: 'invalid', + reason: ['Existing nullifier'], + }); lastBlockNumber = 0; }); @@ -124,12 +128,12 @@ describe('aztec node', () => { const tx = mockTxForRollup(0x10000); tx.data.constants.txContext.chainId = chainId; - expect(await node.isValidTx(tx)).toBe(true); + expect(await node.isValidTx(tx)).toEqual({ result: 'valid' }); // We make the chain id on the tx not equal to the configured chain id - tx.data.constants.txContext.chainId = new Fr(1n + chainId.value); + tx.data.constants.txContext.chainId = new Fr(1n + chainId.toBigInt()); - expect(await node.isValidTx(tx)).toBe(false); + expect(await node.isValidTx(tx)).toEqual({ result: 'invalid', reason: ['Incorrect chain id'] }); }); it('tests that the node correctly validates max block numbers', async () => { @@ -159,11 +163,14 @@ describe('aztec node', () => { lastBlockNumber = 3; // Default tx with no max block number should be valid - expect(await node.isValidTx(noMaxBlockNumberMetadata)).toBe(true); + expect(await node.isValidTx(noMaxBlockNumberMetadata)).toEqual({ result: 'valid' }); // Tx with max block number < current block number should be invalid - expect(await node.isValidTx(invalidMaxBlockNumberMetadata)).toBe(false); + expect(await node.isValidTx(invalidMaxBlockNumberMetadata)).toEqual({ + result: 'invalid', + reason: ['Invalid block number'], + }); // Tx with max block number >= current block number should be valid - expect(await node.isValidTx(validMaxBlockNumberMetadata)).toBe(true); + expect(await node.isValidTx(validMaxBlockNumberMetadata)).toEqual({ result: 'valid' }); }); }); }); diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 8724f8e465b..4b882875a94 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -16,7 +16,6 @@ import { NullifierMembershipWitness, type NullifierWithBlockSource, P2PClientType, - type ProcessedTx, type ProverConfig, PublicDataWitness, PublicSimulationOutput, @@ -29,7 +28,7 @@ import { TxReceipt, type TxScopedL2Log, TxStatus, - type TxValidator, + type TxValidationResult, type WorldStateSynchronizer, tryStop, } from '@aztec/circuit-types'; @@ -64,17 +63,16 @@ import { DateProvider, Timer } from '@aztec/foundation/timer'; import { type AztecKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/lmdb'; import { SHA256Trunc, StandardTree, UnbalancedTree } from '@aztec/merkle-tree'; -import { - AggregateTxValidator, - DataTxValidator, - DoubleSpendTxValidator, - MetadataTxValidator, - type P2P, - TxProofValidator, - createP2PClient, -} from '@aztec/p2p'; +import { type P2P, createP2PClient } from '@aztec/p2p'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; -import { GlobalVariableBuilder, type L1Publisher, SequencerClient, createSlasherClient } from '@aztec/sequencer-client'; +import { + GlobalVariableBuilder, + type L1Publisher, + SequencerClient, + createSlasherClient, + createValidatorForAcceptingTxs, + getDefaultAllowedSetupFunctions, +} from '@aztec/sequencer-client'; import { PublicProcessorFactory } from '@aztec/simulator'; import { Attributes, type TelemetryClient, type Traceable, type Tracer, trackSpan } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -418,15 +416,21 @@ export class AztecNodeService implements AztecNode, Traceable { */ public async sendTx(tx: Tx) { const timer = new Timer(); - this.log.info(`Received tx ${tx.getTxHash()}`); + const txHash = tx.getTxHash().toString(); - if (!(await this.isValidTx(tx))) { + const valid = await this.isValidTx(tx); + if (valid.result !== 'valid') { + const reason = valid.reason.join(', '); this.metrics.receivedTx(timer.ms(), false); + this.log.warn(`Invalid tx ${txHash}: ${reason}`, { txHash }); + // TODO(#10967): Throw when receiving an invalid tx instead of just returning + // throw new Error(`Invalid tx: ${reason}`); return; } await this.p2pClient!.sendTx(tx); this.metrics.receivedTx(timer.ms(), true); + this.log.info(`Received tx ${tx.getTxHash()}`, { txHash }); } public async getTxReceipt(txHash: TxHash): Promise { @@ -878,34 +882,19 @@ export class AztecNodeService implements AztecNode, Traceable { } } - public async isValidTx(tx: Tx, isSimulation: boolean = false): Promise { + public async isValidTx(tx: Tx, isSimulation: boolean = false): Promise { const blockNumber = (await this.blockSource.getBlockNumber()) + 1; const db = this.worldStateSynchronizer.getCommitted(); - // These validators are taken from the sequencer, and should match. - // The reason why `phases` and `gas` tx validator is in the sequencer and not here is because - // those tx validators are customizable by the sequencer. - const txValidators: TxValidator[] = [ - new DataTxValidator(), - new MetadataTxValidator(new Fr(this.l1ChainId), new Fr(blockNumber)), - new DoubleSpendTxValidator({ - getNullifierIndices: nullifiers => db.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, nullifiers), - }), - ]; - - if (!isSimulation) { - txValidators.push(new TxProofValidator(this.proofVerifier)); - } - - const txValidator = new AggregateTxValidator(...txValidators); - - const [_, invalidTxs] = await txValidator.validateTxs([tx]); - if (invalidTxs.length > 0) { - this.log.warn(`Rejecting tx ${tx.getTxHash()} because of validation errors`); - - return false; - } + const verifier = isSimulation ? undefined : this.proofVerifier; + const validator = createValidatorForAcceptingTxs(db, this.contractDataSource, verifier, { + blockNumber, + l1ChainId: this.l1ChainId, + enforceFees: !!this.config.enforceFees, + setupAllowList: this.config.allowedInSetup ?? getDefaultAllowedSetupFunctions(), + gasFees: await this.getCurrentBaseFees(), + }); - return true; + return await validator.validateTx(tx); } public async setConfig(config: Partial): Promise { diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts index 3bc333609d6..a771e4a6dcc 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts @@ -40,6 +40,7 @@ import { MerkleTreeId } from '../merkle_tree_id.js'; import { EpochProofQuote } from '../prover_coordination/epoch_proof_quote.js'; import { PublicDataWitness } from '../public_data_witness.js'; import { SiblingPath } from '../sibling_path/sibling_path.js'; +import { type TxValidationResult } from '../tx/index.js'; import { PublicSimulationOutput } from '../tx/public_simulation_output.js'; import { Tx } from '../tx/tx.js'; import { TxHash } from '../tx/tx_hash.js'; @@ -293,9 +294,14 @@ describe('AztecNodeApiSchema', () => { expect(response).toBeInstanceOf(PublicSimulationOutput); }); - it('isValidTx', async () => { + it('isValidTx(valid)', async () => { + const response = await context.client.isValidTx(Tx.random(), true); + expect(response).toEqual({ result: 'valid' }); + }); + + it('isValidTx(invalid)', async () => { const response = await context.client.isValidTx(Tx.random()); - expect(response).toBe(true); + expect(response).toEqual({ result: 'invalid', reason: ['Invalid'] }); }); it('setConfig', async () => { @@ -559,9 +565,9 @@ class MockAztecNode implements AztecNode { expect(tx).toBeInstanceOf(Tx); return Promise.resolve(PublicSimulationOutput.random()); } - isValidTx(tx: Tx, _isSimulation?: boolean | undefined): Promise { + isValidTx(tx: Tx, isSimulation?: boolean | undefined): Promise { expect(tx).toBeInstanceOf(Tx); - return Promise.resolve(true); + return Promise.resolve(isSimulation ? { result: 'valid' } : { result: 'invalid', reason: ['Invalid'] }); } setConfig(config: Partial): Promise { expect(config.coinbase).toBeInstanceOf(EthAddress); diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index 17f7fe16cdb..9a1de505eb6 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -38,7 +38,14 @@ import { MerkleTreeId } from '../merkle_tree_id.js'; import { EpochProofQuote } from '../prover_coordination/epoch_proof_quote.js'; import { PublicDataWitness } from '../public_data_witness.js'; import { SiblingPath } from '../sibling_path/index.js'; -import { PublicSimulationOutput, Tx, TxHash, TxReceipt } from '../tx/index.js'; +import { + PublicSimulationOutput, + Tx, + TxHash, + TxReceipt, + type TxValidationResult, + TxValidationResultSchema, +} from '../tx/index.js'; import { TxEffect } from '../tx_effect.js'; import { type SequencerConfig, SequencerConfigSchema } from './configs.js'; import { type L2BlockNumber, L2BlockNumberSchema } from './l2_block_number.js'; @@ -395,7 +402,7 @@ export interface AztecNode * @param tx - The transaction to validate for correctness. * @param isSimulation - True if the transaction is a simulated one without generated proofs. (Optional) */ - isValidTx(tx: Tx, isSimulation?: boolean): Promise; + isValidTx(tx: Tx, isSimulation?: boolean): Promise; /** * Updates the configuration of this node. @@ -567,7 +574,7 @@ export const AztecNodeApiSchema: ApiSchemaFor = { simulatePublicCalls: z.function().args(Tx.schema, optional(z.boolean())).returns(PublicSimulationOutput.schema), - isValidTx: z.function().args(Tx.schema, optional(z.boolean())).returns(z.boolean()), + isValidTx: z.function().args(Tx.schema, optional(z.boolean())).returns(TxValidationResultSchema), setConfig: z.function().args(SequencerConfigSchema.merge(ProverConfigSchema).partial()).returns(z.void()), diff --git a/yarn-project/circuit-types/src/interfaces/configs.ts b/yarn-project/circuit-types/src/interfaces/configs.ts index 8d56779c6f2..32a6514d2ee 100644 --- a/yarn-project/circuit-types/src/interfaces/configs.ts +++ b/yarn-project/circuit-types/src/interfaces/configs.ts @@ -20,6 +20,10 @@ export interface SequencerConfig { maxTxsPerBlock?: number; /** The minimum number of txs to include in a block. */ minTxsPerBlock?: number; + /** The maximum L2 block gas. */ + maxL2BlockGas?: number; + /** The maximum DA block gas. */ + maxDABlockGas?: number; /** Recipient of block reward. */ coinbase?: EthAddress; /** Address to receive fees. */ @@ -53,6 +57,8 @@ export const SequencerConfigSchema = z.object({ transactionPollingIntervalMS: z.number().optional(), maxTxsPerBlock: z.number().optional(), minTxsPerBlock: z.number().optional(), + maxL2BlockGas: z.number().optional(), + maxDABlockGas: z.number().optional(), coinbase: schemas.EthAddress.optional(), feeRecipient: schemas.AztecAddress.optional(), acvmWorkingDirectory: z.string().optional(), diff --git a/yarn-project/circuit-types/src/tx/tx.ts b/yarn-project/circuit-types/src/tx/tx.ts index 4f95a81af7b..00b8a8593e5 100644 --- a/yarn-project/circuit-types/src/tx/tx.ts +++ b/yarn-project/circuit-types/src/tx/tx.ts @@ -1,6 +1,8 @@ import { ClientIvcProof, + Fr, PrivateKernelTailCircuitPublicInputs, + PrivateLog, type PrivateToPublicAccumulatedData, type ScopedLogHash, } from '@aztec/circuits.js'; @@ -230,6 +232,20 @@ export class Tx extends Gossipable { ); } + /** + * Estimates the tx size based on its private effects. Note that the actual size of the tx + * after processing will probably be larger, as public execution would generate more data. + */ + getEstimatedPrivateTxEffectsSize() { + return ( + this.unencryptedLogs.getSerializedLength() + + this.contractClassLogs.getSerializedLength() + + this.data.getNonEmptyNoteHashes().length * Fr.SIZE_IN_BYTES + + this.data.getNonEmptyNullifiers().length * Fr.SIZE_IN_BYTES + + this.data.getNonEmptyPrivateLogs().length * PrivateLog.SIZE_IN_BYTES + ); + } + /** * Convenience function to get a hash out of a tx or a tx-like. * @param tx - Tx-like object. diff --git a/yarn-project/circuit-types/src/tx/validator/empty_validator.ts b/yarn-project/circuit-types/src/tx/validator/empty_validator.ts index 2ea10e7a55a..ccb15a05072 100644 --- a/yarn-project/circuit-types/src/tx/validator/empty_validator.ts +++ b/yarn-project/circuit-types/src/tx/validator/empty_validator.ts @@ -1,11 +1,7 @@ -import { type AnyTx, type TxValidator } from './tx_validator.js'; +import { type AnyTx, type TxValidationResult, type TxValidator } from './tx_validator.js'; export class EmptyTxValidator implements TxValidator { - public validateTxs(txs: T[]): Promise<[validTxs: T[], invalidTxs: T[], skippedTxs: T[]]> { - return Promise.resolve([txs, [], []]); - } - - public validateTx(_tx: T): Promise { - return Promise.resolve(true); + public validateTx(_tx: T): Promise { + return Promise.resolve({ result: 'valid' }); } } diff --git a/yarn-project/circuit-types/src/tx/validator/tx_validator.ts b/yarn-project/circuit-types/src/tx/validator/tx_validator.ts index 040d764cf3d..3928343efca 100644 --- a/yarn-project/circuit-types/src/tx/validator/tx_validator.ts +++ b/yarn-project/circuit-types/src/tx/validator/tx_validator.ts @@ -1,9 +1,23 @@ +import { type ZodFor } from '@aztec/foundation/schemas'; + +import { z } from 'zod'; + import { type ProcessedTx } from '../processed_tx.js'; import { type Tx } from '../tx.js'; export type AnyTx = Tx | ProcessedTx; +export type TxValidationResult = + | { result: 'valid' } + | { result: 'invalid'; reason: string[] } + | { result: 'skipped'; reason: string[] }; + export interface TxValidator { - validateTx(tx: T): Promise; - validateTxs(txs: T[]): Promise<[validTxs: T[], invalidTxs: T[], skippedTxs?: T[]]>; + validateTx(tx: T): Promise; } + +export const TxValidationResultSchema = z.discriminatedUnion('result', [ + z.object({ result: z.literal('valid'), reason: z.array(z.string()).optional() }), + z.object({ result: z.literal('invalid'), reason: z.array(z.string()) }), + z.object({ result: z.literal('skipped'), reason: z.array(z.string()) }), +]) satisfies ZodFor; diff --git a/yarn-project/circuit-types/src/tx_effect.ts b/yarn-project/circuit-types/src/tx_effect.ts index d0ae676ad39..507827c4494 100644 --- a/yarn-project/circuit-types/src/tx_effect.ts +++ b/yarn-project/circuit-types/src/tx_effect.ts @@ -152,6 +152,11 @@ export class TxEffect { ]); } + /** Returns the size of this tx effect in bytes as serialized onto DA. */ + getDASize() { + return this.toBlobFields().length * Fr.SIZE_IN_BYTES; + } + /** * Deserializes the TxEffect object from a Buffer. * @param buffer - Buffer or BufferReader object to deserialize. diff --git a/yarn-project/circuits.js/src/structs/gas.ts b/yarn-project/circuits.js/src/structs/gas.ts index 7952b2cbbbd..956c2b5052d 100644 --- a/yarn-project/circuits.js/src/structs/gas.ts +++ b/yarn-project/circuits.js/src/structs/gas.ts @@ -78,6 +78,11 @@ export class Gas { return new Gas(Math.ceil(this.daGas * scalar), Math.ceil(this.l2Gas * scalar)); } + /** Returns true if any of this instance's dimensions is greater than the corresponding on the other. */ + gtAny(other: Gas) { + return this.daGas > other.daGas || this.l2Gas > other.l2Gas; + } + computeFee(gasFees: GasFees) { return GasDimensions.reduce( (acc, dimension) => acc.add(gasFees.get(dimension).mul(new Fr(this.get(dimension)))), diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index 6a2ed021376..4421e007e25 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -207,7 +207,7 @@ describe('e2e_block_building', () => { // to pick up and validate the txs, so we may need to bump it to work on CI. Note that we need // at least 3s here so the archiver has time to loop once and sync, and the sequencer has at // least 1s to loop. - sequencer.sequencer.timeTable[SequencerState.WAITING_FOR_TXS] = 4; + sequencer.sequencer.timeTable[SequencerState.INITIALIZING_PROPOSAL] = 4; sequencer.sequencer.timeTable[SequencerState.CREATING_BLOCK] = 4; sequencer.sequencer.processTxTime = 1; diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 23cf19b22bf..a0713351efe 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -54,6 +54,7 @@ import fs from 'fs/promises'; import getPort from 'get-port'; import { tmpdir } from 'os'; import * as path from 'path'; +import { inspect } from 'util'; import { type Account, type Chain, @@ -695,7 +696,7 @@ export async function setupCanonicalFeeJuice(pxe: PXE) { .wait(); getLogger().info(`Fee Juice successfully setup. Portal address: ${feeJuicePortalAddress}`); } catch (error) { - getLogger().info(`Fee Juice might have already been setup.`); + getLogger().warn(`Fee Juice might have already been setup. Got error: ${inspect(error)}.`); } } diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 3fa142074e0..d45d8d36f63 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -141,6 +141,8 @@ export type EnvVar = | 'SEQ_MAX_BLOCK_SIZE_IN_BYTES' | 'SEQ_MAX_TX_PER_BLOCK' | 'SEQ_MIN_TX_PER_BLOCK' + | 'SEQ_MAX_DA_BLOCK_GAS' + | 'SEQ_MAX_L2_BLOCK_GAS' | 'SEQ_PUBLISH_RETRY_INTERVAL_MS' | 'SEQ_PUBLISHER_PRIVATE_KEY' | 'SEQ_REQUIRED_CONFIRMATIONS' diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index 0db273a1bd6..4ab855f4847 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -142,6 +142,12 @@ export type P2P = P2PApi & { */ getTxStatus(txHash: TxHash): 'pending' | 'mined' | undefined; + /** Returns an iterator over pending txs on the mempool. */ + iteratePendingTxs(): Iterable; + + /** Returns the number of pending txs in the mempool. */ + getPendingTxCount(): number; + /** * Starts the p2p client. * @returns A promise signalling the completion of the block sync. @@ -460,6 +466,20 @@ export class P2PClient return Promise.resolve(this.getTxs('pending')); } + public getPendingTxCount(): number { + return this.txPool.getPendingTxHashes().length; + } + + public *iteratePendingTxs() { + const pendingTxHashes = this.txPool.getPendingTxHashes(); + for (const txHash of pendingTxHashes) { + const tx = this.txPool.getTxByHash(txHash); + if (tx) { + yield tx; + } + } + } + /** * Returns all transactions in the transaction pool. * @returns An array of Txs. diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.test.ts b/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.test.ts index bd315664445..194779508c8 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.test.ts @@ -1,4 +1,4 @@ -import { type AnyTx, Tx, type TxHash, type TxValidator, mockTx } from '@aztec/circuit-types'; +import { type AnyTx, Tx, type TxHash, type TxValidationResult, type TxValidator, mockTx } from '@aztec/circuit-types'; import { AggregateTxValidator } from './aggregate_tx_validator.js'; @@ -6,57 +6,49 @@ describe('AggregateTxValidator', () => { it('allows txs that pass all validation', async () => { const txs = [mockTx(0), mockTx(1), mockTx(2), mockTx(3), mockTx(4)]; const agg = new AggregateTxValidator( - new TxDenyList([txs[0].getTxHash(), txs[1].getTxHash()], []), + new TxDenyList([txs[0].getTxHash(), txs[1].getTxHash(), txs[4].getTxHash()], []), new TxDenyList([txs[2].getTxHash(), txs[4].getTxHash()], []), ); - const validTxs = [txs[3]]; - const invalidTxs = [txs[0], txs[1], txs[2], txs[4]]; - const skippedTxs: AnyTx[] = []; - await expect(agg.validateTxs(txs)).resolves.toEqual([validTxs, invalidTxs, skippedTxs]); + await expect(agg.validateTx(txs[0])).resolves.toEqual({ result: 'invalid', reason: ['Denied'] }); + await expect(agg.validateTx(txs[1])).resolves.toEqual({ result: 'invalid', reason: ['Denied'] }); + await expect(agg.validateTx(txs[2])).resolves.toEqual({ result: 'invalid', reason: ['Denied'] }); + await expect(agg.validateTx(txs[3])).resolves.toEqual({ result: 'valid' }); + await expect(agg.validateTx(txs[4])).resolves.toEqual({ result: 'invalid', reason: ['Denied', 'Denied'] }); }); it('aggregate skipped txs ', async () => { const txs = [mockTx(0), mockTx(1), mockTx(2), mockTx(3), mockTx(4)]; const agg = new AggregateTxValidator( new TxDenyList([txs[0].getTxHash()], []), - new TxDenyList([], [txs[1].getTxHash(), txs[2].getTxHash()]), + new TxDenyList([txs[4].getTxHash()], [txs[1].getTxHash(), txs[2].getTxHash()]), new TxDenyList([], [txs[4].getTxHash()]), ); - const validTxs = [txs[3]]; - const invalidTxs = [txs[0]]; - const skippedTxs = [txs[1], txs[2], txs[4]]; - await expect(agg.validateTxs(txs)).resolves.toEqual([validTxs, invalidTxs, skippedTxs]); + await expect(agg.validateTx(txs[0])).resolves.toEqual({ result: 'invalid', reason: ['Denied'] }); + await expect(agg.validateTx(txs[1])).resolves.toEqual({ result: 'skipped', reason: ['Skipped'] }); + await expect(agg.validateTx(txs[2])).resolves.toEqual({ result: 'skipped', reason: ['Skipped'] }); + await expect(agg.validateTx(txs[3])).resolves.toEqual({ result: 'valid' }); + await expect(agg.validateTx(txs[4])).resolves.toEqual({ result: 'invalid', reason: ['Denied', 'Skipped'] }); }); class TxDenyList implements TxValidator { denyList: Set; skippedList: Set; + constructor(deniedTxHashes: TxHash[], skippedTxHashes: TxHash[]) { this.denyList = new Set(deniedTxHashes.map(hash => hash.toString())); this.skippedList = new Set(skippedTxHashes.map(hash => hash.toString())); } - validateTxs(txs: AnyTx[]): Promise<[AnyTx[], AnyTx[], AnyTx[] | undefined]> { - const validTxs: AnyTx[] = []; - const invalidTxs: AnyTx[] = []; - const skippedTxs: AnyTx[] = []; - txs.forEach(tx => { - const txHash = Tx.getHash(tx).toString(); - if (this.skippedList.has(txHash)) { - skippedTxs.push(tx); - } else if (this.denyList.has(txHash)) { - invalidTxs.push(tx); - } else { - validTxs.push(tx); - } - }); - return Promise.resolve([validTxs, invalidTxs, skippedTxs.length ? skippedTxs : undefined]); - } - - validateTx(tx: AnyTx): Promise { - return Promise.resolve(this.denyList.has(Tx.getHash(tx).toString())); + validateTx(tx: AnyTx): Promise { + if (this.skippedList.has(Tx.getHash(tx).toString())) { + return Promise.resolve({ result: 'skipped', reason: ['Skipped'] }); + } + if (this.denyList.has(Tx.getHash(tx).toString())) { + return Promise.resolve({ result: 'invalid', reason: ['Denied'] }); + } + return Promise.resolve({ result: 'valid' }); } } }); diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.ts b/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.ts index 21bf24ddb8d..f7279c9b387 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/aggregate_tx_validator.ts @@ -1,4 +1,4 @@ -import { type ProcessedTx, type Tx, type TxValidator } from '@aztec/circuit-types'; +import { type ProcessedTx, type Tx, type TxValidationResult, type TxValidator } from '@aztec/circuit-types'; export class AggregateTxValidator implements TxValidator { #validators: TxValidator[]; @@ -10,27 +10,23 @@ export class AggregateTxValidator implements TxValid this.#validators = validators; } - async validateTxs(txs: T[]): Promise<[validTxs: T[], invalidTxs: T[], skippedTxs: T[]]> { - const invalidTxs: T[] = []; - const skippedTxs: T[] = []; - let txPool = txs; + async validateTx(tx: T): Promise { + const aggregate: { result: string; reason?: string[] } = { result: 'valid', reason: [] }; for (const validator of this.#validators) { - const [valid, invalid, skipped] = await validator.validateTxs(txPool); - invalidTxs.push(...invalid); - skippedTxs.push(...(skipped ?? [])); - txPool = valid; - } - - return [txPool, invalidTxs, skippedTxs]; - } - - async validateTx(tx: T): Promise { - for (const validator of this.#validators) { - const valid = await validator.validateTx(tx); - if (!valid) { - return false; + const result = await validator.validateTx(tx); + if (result.result === 'invalid') { + aggregate.result = 'invalid'; + aggregate.reason!.push(...result.reason); + } else if (result.result === 'skipped') { + if (aggregate.result === 'valid') { + aggregate.result = 'skipped'; + } + aggregate.reason!.push(...result.reason); } } - return true; + if (aggregate.result === 'valid') { + delete aggregate.reason; + } + return aggregate as TxValidationResult; } } diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.test.ts b/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.test.ts index 6b7f42859f6..894d0e970c7 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.test.ts @@ -1,4 +1,4 @@ -import { mockTx } from '@aztec/circuit-types'; +import { type Tx, mockTx } from '@aztec/circuit-types'; import { AztecAddress, Fr, FunctionSelector } from '@aztec/circuits.js'; import { DataTxValidator } from './data_validator.js'; @@ -21,9 +21,19 @@ describe('TxDataValidator', () => { validator = new DataTxValidator(); }); + const expectValid = async (txs: Tx[]) => { + for (const tx of txs) { + await expect(validator.validateTx(tx)).resolves.toEqual({ result: 'valid' }); + } + }; + + const expectInvalid = async (tx: Tx, reason: string) => { + await expect(validator.validateTx(tx)).resolves.toEqual({ result: 'invalid', reason: [reason] }); + }; + it('allows transactions with the correct data', async () => { - const txs = mockTxs(3); - await expect(validator.validateTxs(txs)).resolves.toEqual([txs, []]); + const [tx] = mockTxs(1); + await expect(validator.validateTx(tx)).resolves.toEqual({ result: 'valid' }); }); it('rejects txs with mismatch non revertible execution requests', async () => { @@ -33,7 +43,10 @@ describe('TxDataValidator', () => { badTxs[1].data.forPublic!.nonRevertibleAccumulatedData.publicCallRequests[1].contractAddress = AztecAddress.random(); - await expect(validator.validateTxs([...goodTxs, ...badTxs])).resolves.toEqual([goodTxs, badTxs]); + await expectValid(goodTxs); + + await expectInvalid(badTxs[0], 'Incorrect execution request for public call'); + await expectInvalid(badTxs[1], 'Incorrect execution request for public call'); }); it('rejects txs with mismatch revertible execution requests', async () => { @@ -46,7 +59,12 @@ describe('TxDataValidator', () => { badTxs[3].data.forPublic!.revertibleAccumulatedData.publicCallRequests[0].isStaticCall = !badTxs[3].enqueuedPublicFunctionCalls[0].callContext.isStaticCall; - await expect(validator.validateTxs([...badTxs, ...goodTxs])).resolves.toEqual([goodTxs, badTxs]); + await expectValid(goodTxs); + + await expectInvalid(badTxs[0], 'Incorrect execution request for public call'); + await expectInvalid(badTxs[1], 'Incorrect execution request for public call'); + await expectInvalid(badTxs[2], 'Incorrect execution request for public call'); + await expectInvalid(badTxs[3], 'Incorrect execution request for public call'); }); it('rejects txs with mismatch teardown execution requests', async () => { @@ -55,7 +73,10 @@ describe('TxDataValidator', () => { badTxs[0].data.forPublic!.publicTeardownCallRequest.contractAddress = AztecAddress.random(); badTxs[1].data.forPublic!.publicTeardownCallRequest.msgSender = AztecAddress.random(); - await expect(validator.validateTxs([...goodTxs, ...badTxs])).resolves.toEqual([goodTxs, badTxs]); + await expectValid(goodTxs); + + await expectInvalid(badTxs[0], 'Incorrect teardown execution request'); + await expectInvalid(badTxs[1], 'Incorrect teardown execution request'); }); it('rejects txs with mismatch number of execution requests', async () => { @@ -66,6 +87,9 @@ describe('TxDataValidator', () => { // Having an extra enqueuedPublicFunctionCall. badTxs[1].enqueuedPublicFunctionCalls.push(execRequest); - await expect(validator.validateTxs([...badTxs, ...goodTxs])).resolves.toEqual([goodTxs, badTxs]); + await expectValid(goodTxs); + + await expectInvalid(badTxs[0], 'Wrong number of execution requests for public calls'); + await expectInvalid(badTxs[1], 'Wrong number of execution requests for public calls'); }); }); diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.ts b/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.ts index 143713cc280..ddc5d43ca87 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/data_validator.ts @@ -1,29 +1,14 @@ -import { Tx, type TxValidator } from '@aztec/circuit-types'; +import { Tx, type TxValidationResult, type TxValidator } from '@aztec/circuit-types'; import { createLogger } from '@aztec/foundation/log'; export class DataTxValidator implements TxValidator { #log = createLogger('p2p:tx_validator:tx_data'); - validateTxs(txs: Tx[]): Promise<[validTxs: Tx[], invalidTxs: Tx[]]> { - const validTxs: Tx[] = []; - const invalidTxs: Tx[] = []; - for (const tx of txs) { - if (!this.#hasCorrectExecutionRequests(tx)) { - invalidTxs.push(tx); - continue; - } - - validTxs.push(tx); - } - - return Promise.resolve([validTxs, invalidTxs]); - } - - validateTx(tx: Tx): Promise { + validateTx(tx: Tx): Promise { return Promise.resolve(this.#hasCorrectExecutionRequests(tx)); } - #hasCorrectExecutionRequests(tx: Tx): boolean { + #hasCorrectExecutionRequests(tx: Tx): TxValidationResult { const callRequests = [ ...tx.data.getRevertiblePublicCallRequests(), ...tx.data.getNonRevertiblePublicCallRequests(), @@ -34,7 +19,7 @@ export class DataTxValidator implements TxValidator { callRequests.length }. Got ${tx.enqueuedPublicFunctionCalls.length}.`, ); - return false; + return { result: 'invalid', reason: ['Wrong number of execution requests for public calls'] }; } const invalidExecutionRequestIndex = tx.enqueuedPublicFunctionCalls.findIndex( @@ -46,7 +31,7 @@ export class DataTxValidator implements TxValidator { tx, )} because of incorrect execution requests for public call at index ${invalidExecutionRequestIndex}.`, ); - return false; + return { result: 'invalid', reason: ['Incorrect execution request for public call'] }; } const teardownCallRequest = tx.data.getTeardownPublicCallRequest(); @@ -55,10 +40,10 @@ export class DataTxValidator implements TxValidator { (teardownCallRequest && !tx.publicTeardownFunctionCall.isForCallRequest(teardownCallRequest)); if (isInvalidTeardownExecutionRequest) { this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} because of incorrect teardown execution requests.`); - return false; + return { result: 'invalid', reason: ['Incorrect teardown execution request'] }; } - return true; + return { result: 'valid' }; } // TODO: Check logs. diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.test.ts b/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.test.ts index 7b0fbb13974..3a64e1fb601 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.test.ts @@ -1,6 +1,6 @@ import { type AnyTx, mockTx, mockTxForRollup } from '@aztec/circuit-types'; -import { type MockProxy, mock, mockFn } from 'jest-mock-extended'; +import { type MockProxy, mock } from 'jest-mock-extended'; import { DoubleSpendTxValidator, type NullifierSource } from './double_spend_validator.js'; @@ -8,25 +8,26 @@ describe('DoubleSpendTxValidator', () => { let txValidator: DoubleSpendTxValidator; let nullifierSource: MockProxy; + const expectInvalid = async (tx: AnyTx, reason: string) => { + await expect(txValidator.validateTx(tx)).resolves.toEqual({ result: 'invalid', reason: [reason] }); + }; + beforeEach(() => { - nullifierSource = mock({ - getNullifierIndices: mockFn().mockImplementation(() => { - return Promise.resolve([undefined]); - }), - }); + nullifierSource = mock(); + nullifierSource.nullifiersExist.mockResolvedValue([]); txValidator = new DoubleSpendTxValidator(nullifierSource); }); it('rejects duplicates in non revertible data', async () => { const badTx = mockTxForRollup(); badTx.data.forRollup!.end.nullifiers[1] = badTx.data.forRollup!.end.nullifiers[0]; - await expect(txValidator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); + await expectInvalid(badTx, 'Duplicate nullifier in tx'); }); it('rejects duplicates in revertible data', async () => { const badTx = mockTxForRollup(); badTx.data.forRollup!.end.nullifiers[1] = badTx.data.forRollup!.end.nullifiers[0]; - await expect(txValidator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); + await expectInvalid(badTx, 'Duplicate nullifier in tx'); }); it('rejects duplicates across phases', async () => { @@ -36,19 +37,12 @@ describe('DoubleSpendTxValidator', () => { }); badTx.data.forPublic!.revertibleAccumulatedData.nullifiers[0] = badTx.data.forPublic!.nonRevertibleAccumulatedData.nullifiers[0]; - await expect(txValidator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); - }); - - it('rejects duplicates across txs', async () => { - const firstTx = mockTxForRollup(1); - const secondTx = mockTxForRollup(2); - secondTx.data.forRollup!.end.nullifiers[0] = firstTx.data.forRollup!.end.nullifiers[0]; - await expect(txValidator.validateTxs([firstTx, secondTx])).resolves.toEqual([[firstTx], [secondTx]]); + await expectInvalid(badTx, 'Duplicate nullifier in tx'); }); it('rejects duplicates against history', async () => { const badTx = mockTx(); - nullifierSource.getNullifierIndices.mockReturnValueOnce(Promise.resolve([1n])); - await expect(txValidator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); + nullifierSource.nullifiersExist.mockResolvedValue([true]); + await expectInvalid(badTx, 'Existing nullifier'); }); }); diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.ts b/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.ts index 9f735e197b0..7ec67bbbc39 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/double_spend_validator.ts @@ -1,69 +1,33 @@ -import { type AnyTx, Tx, type TxValidator } from '@aztec/circuit-types'; +import { type AnyTx, Tx, type TxValidationResult, type TxValidator } from '@aztec/circuit-types'; import { createLogger } from '@aztec/foundation/log'; export interface NullifierSource { - getNullifierIndices: (nullifiers: Buffer[]) => Promise<(bigint | undefined)[]>; + nullifiersExist: (nullifiers: Buffer[]) => Promise; } export class DoubleSpendTxValidator implements TxValidator { #log = createLogger('p2p:tx_validator:tx_double_spend'); #nullifierSource: NullifierSource; - constructor(nullifierSource: NullifierSource, private readonly isValidatingBlock: boolean = true) { + constructor(nullifierSource: NullifierSource) { this.#nullifierSource = nullifierSource; } - async validateTxs(txs: T[]): Promise<[validTxs: T[], invalidTxs: T[]]> { - const validTxs: T[] = []; - const invalidTxs: T[] = []; - const thisBlockNullifiers = new Set(); - - for (const tx of txs) { - if (!(await this.#uniqueNullifiers(tx, thisBlockNullifiers))) { - invalidTxs.push(tx); - continue; - } - - validTxs.push(tx); - } - - return [validTxs, invalidTxs]; - } - - validateTx(tx: T): Promise { - return this.#uniqueNullifiers(tx, new Set()); - } - - async #uniqueNullifiers(tx: AnyTx, thisBlockNullifiers: Set): Promise { + async validateTx(tx: T): Promise { const nullifiers = tx instanceof Tx ? tx.data.getNonEmptyNullifiers() : tx.txEffect.nullifiers; // Ditch this tx if it has repeated nullifiers const uniqueNullifiers = new Set(nullifiers); if (uniqueNullifiers.size !== nullifiers.length) { this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} for emitting duplicate nullifiers`); - return false; - } - - if (this.isValidatingBlock) { - for (const nullifier of nullifiers) { - const nullifierBigInt = nullifier.toBigInt(); - if (thisBlockNullifiers.has(nullifierBigInt)) { - this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} for repeating a nullifier in the same block`); - return false; - } - - thisBlockNullifiers.add(nullifierBigInt); - } + return { result: 'invalid', reason: ['Duplicate nullifier in tx'] }; } - const nullifierIndexes = await this.#nullifierSource.getNullifierIndices(nullifiers.map(n => n.toBuffer())); - - const hasDuplicates = nullifierIndexes.some(index => index !== undefined); - if (hasDuplicates) { - this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} for repeating nullifiers present in state trees`); - return false; + if ((await this.#nullifierSource.nullifiersExist(nullifiers.map(n => n.toBuffer()))).some(Boolean)) { + this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} for repeating a nullifier`); + return { result: 'invalid', reason: ['Existing nullifier'] }; } - return true; + return { result: 'valid' }; } } diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.test.ts b/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.test.ts index 211d4ad0e66..0dbf8964e71 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.test.ts @@ -1,4 +1,4 @@ -import { type AnyTx, mockTx, mockTxForRollup } from '@aztec/circuit-types'; +import { type AnyTx, type Tx, mockTx, mockTxForRollup } from '@aztec/circuit-types'; import { Fr, MaxBlockNumber } from '@aztec/circuits.js'; import { MetadataTxValidator } from './metadata_validator.js'; @@ -14,6 +14,14 @@ describe('MetadataTxValidator', () => { validator = new MetadataTxValidator(chainId, blockNumber); }); + const expectValid = async (tx: Tx) => { + await expect(validator.validateTx(tx)).resolves.toEqual({ result: 'valid' }); + }; + + const expectInvalid = async (tx: Tx, reason: string) => { + await expect(validator.validateTx(tx)).resolves.toEqual({ result: 'invalid', reason: [reason] }); + }; + it('allows only transactions for the right chain', async () => { const goodTxs = [mockTx(1), mockTxForRollup(2)]; const badTxs = [mockTx(3), mockTxForRollup(4)]; @@ -26,7 +34,10 @@ describe('MetadataTxValidator', () => { tx.data.constants.txContext.chainId = chainId.add(new Fr(1)); }); - await expect(validator.validateTxs([...goodTxs, ...badTxs])).resolves.toEqual([goodTxs, badTxs]); + await expectValid(goodTxs[0]); + await expectValid(goodTxs[1]); + await expectInvalid(badTxs[0], 'Incorrect chain id'); + await expectInvalid(badTxs[1], 'Incorrect chain id'); }); it.each([42, 43])('allows txs with valid max block number', async maxBlockNumber => { @@ -34,7 +45,7 @@ describe('MetadataTxValidator', () => { goodTx.data.constants.txContext.chainId = chainId; goodTx.data.rollupValidationRequests.maxBlockNumber = new MaxBlockNumber(true, new Fr(maxBlockNumber)); - await expect(validator.validateTxs([goodTx])).resolves.toEqual([[goodTx], []]); + await expectValid(goodTx); }); it('allows txs with unset max block number', async () => { @@ -42,13 +53,14 @@ describe('MetadataTxValidator', () => { goodTx.data.constants.txContext.chainId = chainId; goodTx.data.rollupValidationRequests.maxBlockNumber = new MaxBlockNumber(false, Fr.ZERO); - await expect(validator.validateTxs([goodTx])).resolves.toEqual([[goodTx], []]); + await expectValid(goodTx); }); it('rejects txs with lower max block number', async () => { const badTx = mockTxForRollup(1); badTx.data.constants.txContext.chainId = chainId; badTx.data.rollupValidationRequests.maxBlockNumber = new MaxBlockNumber(true, blockNumber.sub(new Fr(1))); - await expect(validator.validateTxs([badTx])).resolves.toEqual([[], [badTx]]); + + await expectInvalid(badTx, 'Invalid block number'); }); }); diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.ts b/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.ts index fe3194a454e..aefde1dfd72 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.ts @@ -1,4 +1,4 @@ -import { type AnyTx, Tx, type TxValidator } from '@aztec/circuit-types'; +import { type AnyTx, Tx, type TxValidationResult, type TxValidator } from '@aztec/circuit-types'; import { type Fr } from '@aztec/circuits.js'; import { createLogger } from '@aztec/foundation/log'; @@ -7,28 +7,15 @@ export class MetadataTxValidator implements TxValidator { constructor(private chainId: Fr, private blockNumber: Fr) {} - validateTxs(txs: T[]): Promise<[validTxs: T[], invalidTxs: T[]]> { - const validTxs: T[] = []; - const invalidTxs: T[] = []; - for (const tx of txs) { - if (!this.#hasCorrectChainId(tx)) { - invalidTxs.push(tx); - continue; - } - - if (!this.#isValidForBlockNumber(tx)) { - invalidTxs.push(tx); - continue; - } - - validTxs.push(tx); + validateTx(tx: T): Promise { + const errors = []; + if (!this.#hasCorrectChainId(tx)) { + errors.push('Incorrect chain id'); } - - return Promise.resolve([validTxs, invalidTxs]); - } - - validateTx(tx: T): Promise { - return Promise.resolve(this.#hasCorrectChainId(tx) && this.#isValidForBlockNumber(tx)); + if (!this.#isValidForBlockNumber(tx)) { + errors.push('Invalid block number'); + } + return Promise.resolve(errors.length > 0 ? { result: 'invalid', reason: errors } : { result: 'valid' }); } #hasCorrectChainId(tx: T): boolean { diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/tx_proof_validator.ts b/yarn-project/p2p/src/msg_validators/tx_validator/tx_proof_validator.ts index 172234ce3bc..2bf3b1d4508 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/tx_proof_validator.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/tx_proof_validator.ts @@ -1,4 +1,9 @@ -import { type ClientProtocolCircuitVerifier, Tx, type TxValidator } from '@aztec/circuit-types'; +import { + type ClientProtocolCircuitVerifier, + Tx, + type TxValidationResult, + type TxValidator, +} from '@aztec/circuit-types'; import { createLogger } from '@aztec/foundation/log'; export class TxProofValidator implements TxValidator { @@ -6,23 +11,12 @@ export class TxProofValidator implements TxValidator { constructor(private verifier: ClientProtocolCircuitVerifier) {} - async validateTxs(txs: Tx[]): Promise<[validTxs: Tx[], invalidTxs: Tx[]]> { - const validTxs: Tx[] = []; - const invalidTxs: Tx[] = []; - - for (const tx of txs) { - if (await this.verifier.verifyProof(tx)) { - validTxs.push(tx); - } else { - this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} for invalid proof`); - invalidTxs.push(tx); - } + async validateTx(tx: Tx): Promise { + if (!(await this.verifier.verifyProof(tx))) { + this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} for invalid proof`); + return { result: 'invalid', reason: ['Invalid proof'] }; } - - return [validTxs, invalidTxs]; - } - - validateTx(tx: Tx): Promise { - return this.verifier.verifyProof(tx); + this.#log.trace(`Accepted ${Tx.getHash(tx)} with valid proof`); + return { result: 'valid' }; } } diff --git a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts index 6b052af024a..5b47bc91ca3 100644 --- a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts +++ b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts @@ -6,17 +6,18 @@ import { type Gossipable, type L2BlockSource, MerkleTreeId, + P2PClientType, PeerErrorSeverity, type PeerInfo, type RawGossipMessage, TopicTypeMap, Tx, TxHash, + type TxValidationResult, type WorldStateSynchronizer, getTopicTypeForClientType, metricsTopicStrToLabels, } from '@aztec/circuit-types'; -import { P2PClientType } from '@aztec/circuit-types'; import { Fr } from '@aztec/circuits.js'; import { type EpochCache } from '@aztec/epoch-cache'; import { createLogger } from '@aztec/foundation/log'; @@ -73,14 +74,14 @@ import { GossipSubEvent } from '../types.js'; interface MessageValidator { validator: { - validateTx(tx: Tx): Promise; + validateTx(tx: Tx): Promise; }; severity: PeerErrorSeverity; } interface ValidationResult { name: string; - isValid: boolean; + isValid: TxValidationResult; severity: PeerErrorSeverity; } @@ -568,7 +569,7 @@ export class LibP2PService extends WithTracer implement return false; } - if (!validProof) { + if (validProof.result === 'invalid') { // If the proof is invalid, but the txHash is correct, then this is an active attack and we severly punish this.peerManager.penalizePeer(peerId, PeerErrorSeverity.LowToleranceError); return false; @@ -704,9 +705,10 @@ export class LibP2PService extends WithTracer implement }, doubleSpendValidator: { validator: new DoubleSpendTxValidator({ - getNullifierIndices: (nullifiers: Buffer[]) => { + nullifiersExist: async (nullifiers: Buffer[]) => { const merkleTree = this.worldStateSynchronizer.getCommitted(); - return merkleTree.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, nullifiers); + const indices = await merkleTree.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, nullifiers); + return indices.map(index => index !== undefined); }, }), severity: PeerErrorSeverity.HighToleranceError, @@ -725,8 +727,8 @@ export class LibP2PService extends WithTracer implement messageValidators: Record, ): Promise { const validationPromises = Object.entries(messageValidators).map(async ([name, { validator, severity }]) => { - const isValid = await validator.validateTx(tx); - return { name, isValid, severity }; + const { result } = await validator.validateTx(tx); + return { name, isValid: result === 'valid', severity }; }); // A promise that resolves when all validations have been run @@ -767,16 +769,17 @@ export class LibP2PService extends WithTracer implement } const snapshotValidator = new DoubleSpendTxValidator({ - getNullifierIndices: (nullifiers: Buffer[]) => { + nullifiersExist: async (nullifiers: Buffer[]) => { const merkleTree = this.worldStateSynchronizer.getSnapshot( blockNumber - this.config.severePeerPenaltyBlockLength, ); - return merkleTree.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, nullifiers); + const indices = await merkleTree.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, nullifiers); + return indices.map(index => index !== undefined); }, }); const validSnapshot = await snapshotValidator.validateTx(tx); - if (!validSnapshot) { + if (validSnapshot.result !== 'valid') { this.peerManager.penalizePeer(peerId, PeerErrorSeverity.LowToleranceError); return false; } diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index 9ec68ce94ce..ee2740b5ebb 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -5,7 +5,6 @@ import { type PublicExecutionRequest, type ServerCircuitProver, type Tx, - type TxValidator, } from '@aztec/circuit-types'; import { makeBloatedProcessedTx } from '@aztec/circuit-types/test'; import { @@ -195,7 +194,7 @@ export class TestContext { return { block, txs, msgs }; } - public async processPublicFunctions(txs: Tx[], maxTransactions: number, txValidator?: TxValidator) { + public async processPublicFunctions(txs: Tx[], maxTransactions: number) { const defaultExecutorImplementation = ( _stateManager: AvmPersistableStateManager, executionRequest: PublicExecutionRequest, @@ -220,7 +219,6 @@ export class TestContext { return await this.processPublicFunctionsWithMockExecutorImplementation( txs, maxTransactions, - txValidator, defaultExecutorImplementation, ); } @@ -244,7 +242,6 @@ export class TestContext { private async processPublicFunctionsWithMockExecutorImplementation( txs: Tx[], maxTransactions: number, - txValidator?: TxValidator, executorMock?: ( stateManager: AvmPersistableStateManager, executionRequest: PublicExecutionRequest, @@ -271,7 +268,7 @@ export class TestContext { if (executorMock) { simulateInternal.mockImplementation(executorMock); } - return await this.publicProcessor.process(txs, maxTransactions, txValidator); + return await this.publicProcessor.process(txs, { maxTransactions }); } } diff --git a/yarn-project/prover-node/src/job/epoch-proving-job.ts b/yarn-project/prover-node/src/job/epoch-proving-job.ts index 3db7c5ca4d2..f434421dca3 100644 --- a/yarn-project/prover-node/src/job/epoch-proving-job.ts +++ b/yarn-project/prover-node/src/job/epoch-proving-job.ts @@ -1,5 +1,4 @@ import { - EmptyTxValidator, type EpochProver, type EpochProvingJobState, type ForkMerkleTreeOperations, @@ -90,7 +89,6 @@ export class EpochProvingJob implements Traceable { await asyncPool(this.config.parallelBlockLimit, this.blocks, async block => { const globalVariables = block.header.globalVariables; - const txCount = block.body.numberOfTxsIncludingPadded; const txs = this.getTxs(block); const l1ToL2Messages = await this.getL1ToL2Messages(block); const previousHeader = await this.getBlockHeader(block.number - 1); @@ -112,7 +110,7 @@ export class EpochProvingJob implements Traceable { // Process public fns const db = await this.dbProvider.fork(block.number - 1); const publicProcessor = this.publicProcessorFactory.create(db, previousHeader, globalVariables, true); - const processed = await this.processTxs(publicProcessor, txs, txCount); + const processed = await this.processTxs(publicProcessor, txs); await this.prover.addTxs(processed); await db.close(); this.log.verbose(`Processed all ${txs.length} txs for block ${block.number}`, { @@ -168,12 +166,8 @@ export class EpochProvingJob implements Traceable { return this.l1ToL2MessageSource.getL1ToL2Messages(BigInt(block.number)); } - private async processTxs( - publicProcessor: PublicProcessor, - txs: Tx[], - totalNumberOfTxs: number, - ): Promise { - const [processedTxs, failedTxs] = await publicProcessor.process(txs, totalNumberOfTxs, new EmptyTxValidator()); + private async processTxs(publicProcessor: PublicProcessor, txs: Tx[]): Promise { + const [processedTxs, failedTxs] = await publicProcessor.process(txs); if (failedTxs.length) { throw new Error( diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index 6947f4101ac..14a41668893 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -14,7 +14,6 @@ import { GlobalVariableBuilder } from '../global_variable_builder/index.js'; import { L1Publisher } from '../publisher/index.js'; import { Sequencer, type SequencerConfig } from '../sequencer/index.js'; import { type SlasherClient } from '../slasher/index.js'; -import { TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; /** * Encapsulates the full sequencer and publisher. @@ -99,7 +98,7 @@ export class SequencerClient { l2BlockSource, l1ToL2MessageSource, publicProcessorFactory, - new TxValidatorFactory(worldStateSynchronizer.getCommitted(), contractDataSource, !!config.enforceFees), + contractDataSource, l1Constants, deps.dateProvider, telemetryClient, diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index 10f714b6cf6..09064f25404 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -59,6 +59,16 @@ export const sequencerConfigMappings: ConfigMappingsType = { description: 'The minimum number of txs to include in a block.', ...numberConfigHelper(1), }, + maxL2BlockGas: { + env: 'SEQ_MAX_L2_BLOCK_GAS', + description: 'The maximum L2 block gas.', + ...numberConfigHelper(10e9), + }, + maxDABlockGas: { + env: 'SEQ_MAX_DA_BLOCK_GAS', + description: 'The maximum DA block gas.', + ...numberConfigHelper(10e9), + }, coinbase: { env: 'COINBASE', parseEnv: (val: string) => EthAddress.fromString(val), diff --git a/yarn-project/sequencer-client/src/index.ts b/yarn-project/sequencer-client/src/index.ts index 35129eed538..d5fc13c50ef 100644 --- a/yarn-project/sequencer-client/src/index.ts +++ b/yarn-project/sequencer-client/src/index.ts @@ -1,8 +1,9 @@ export * from './client/index.js'; export * from './config.js'; export * from './publisher/index.js'; -export { Sequencer, SequencerState } from './sequencer/index.js'; +export * from './tx_validator/tx_validator_factory.js'; export * from './slasher/index.js'; +export { Sequencer, SequencerState, getDefaultAllowedSetupFunctions } from './sequencer/index.js'; // Used by the node to simulate public parts of transactions. Should these be moved to a shared library? // ISSUE(#9832) diff --git a/yarn-project/sequencer-client/src/sequencer/index.ts b/yarn-project/sequencer-client/src/sequencer/index.ts index 459a5cab42f..316084b13f1 100644 --- a/yarn-project/sequencer-client/src/sequencer/index.ts +++ b/yarn-project/sequencer-client/src/sequencer/index.ts @@ -1,2 +1,3 @@ export * from './config.js'; export * from './sequencer.js'; +export * from './allowed.js'; diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 60aa4208611..c012fbefb2e 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -2,18 +2,17 @@ import { BlockAttestation, type BlockBuilder, BlockProposal, + Body, ConsensusPayload, type EpochProofQuote, type L1ToL2MessageSource, L2Block, type L2BlockSource, - MerkleTreeId, + type MerkleTreeId, type MerkleTreeReadOperations, type MerkleTreeWriteOperations, type Tx, TxHash, - type UnencryptedL2Log, - UnencryptedTxL2Logs, WorldStateRunningState, type WorldStateSynchronizer, mockEpochProofQuote as baseMockEpochProofQuote, @@ -22,21 +21,21 @@ import { } from '@aztec/circuit-types'; import { AztecAddress, + BlockHeader, type ContractDataSource, EthAddress, Fr, GasFees, - type GasSettings, GlobalVariables, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, } from '@aztec/circuits.js'; +import { makeAppendOnlyTreeSnapshot } from '@aztec/circuits.js/testing'; import { DefaultL1ContractsConfig } from '@aztec/ethereum'; import { Buffer32 } from '@aztec/foundation/buffer'; import { times } from '@aztec/foundation/collection'; -import { randomBytes } from '@aztec/foundation/crypto'; import { Signature } from '@aztec/foundation/eth-signature'; +import { type Logger, createLogger } from '@aztec/foundation/log'; import { TestDateProvider } from '@aztec/foundation/timer'; -import { type Writeable } from '@aztec/foundation/types'; import { type P2P, P2PClientState } from '@aztec/p2p'; import { type BlockBuilderFactory } from '@aztec/prover-client/block-builder'; import { type PublicProcessor, type PublicProcessorFactory } from '@aztec/simulator'; @@ -49,7 +48,6 @@ import { type MockProxy, mock, mockFn } from 'jest-mock-extended'; import { type GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; import { type L1Publisher } from '../publisher/l1-publisher.js'; import { type SlasherClient } from '../slasher/index.js'; -import { TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; import { Sequencer } from './sequencer.js'; import { SequencerState } from './utils.js'; @@ -68,7 +66,13 @@ describe('sequencer', () => { let publicProcessorFactory: MockProxy; let lastBlockNumber: number; + let newBlockNumber: number; + let newSlotNumber: number; let hash: string; + let logger: Logger; + + let block: L2Block; + let globalVariables: GlobalVariables; let sequencer: TestSubject; @@ -87,13 +91,13 @@ describe('sequencer', () => { const archive = Fr.random(); const mockedSig = new Signature(Buffer32.fromField(Fr.random()), Buffer32.fromField(Fr.random()), 27); - const committee = [EthAddress.random()]; + const getSignatures = () => [mockedSig]; + const getAttestations = () => { const attestation = new BlockAttestation(new ConsensusPayload(block.header, archive, []), mockedSig); (attestation as any).sender = committee[0]; - return [attestation]; }; @@ -101,20 +105,43 @@ describe('sequencer', () => { return new BlockProposal(new ConsensusPayload(block.header, archive, [TxHash.random()]), mockedSig); }; - let block: L2Block; - let mockedGlobalVariables: GlobalVariables; + const processTxs = async (txs: Tx[]) => { + return await Promise.all(txs.map(tx => makeProcessedTxFromPrivateOnlyTx(tx, Fr.ZERO, undefined, globalVariables))); + }; + + const mockPendingTxs = (txs: Tx[]) => { + p2p.getPendingTxCount.mockReturnValue(txs.length); + p2p.iteratePendingTxs.mockReturnValue(txs); + }; + + const makeBlock = async (txs: Tx[]) => { + const processedTxs = await processTxs(txs); + const body = new Body(processedTxs.map(tx => tx.txEffect)); + const header = BlockHeader.empty({ globalVariables: globalVariables }); + const archive = makeAppendOnlyTreeSnapshot(newBlockNumber + 1); + + block = new L2Block(archive, header, body); + return block; + }; + + const makeTx = (seed?: number) => { + const tx = mockTxForRollup(seed); + tx.data.constants.txContext.chainId = chainId; + return tx; + }; beforeEach(() => { lastBlockNumber = 0; + newBlockNumber = lastBlockNumber + 1; + newSlotNumber = newBlockNumber; hash = Fr.ZERO.toString(); + logger = createLogger('sequencer:test'); - block = L2Block.random(lastBlockNumber + 1); - - mockedGlobalVariables = new GlobalVariables( + globalVariables = new GlobalVariables( chainId, version, - block.header.globalVariables.blockNumber, - block.header.globalVariables.slotNumber, + new Fr(newBlockNumber), + new Fr(newSlotNumber), Fr.ZERO, coinbase, feeRecipient, @@ -124,16 +151,17 @@ describe('sequencer', () => { publisher = mock(); publisher.getSenderAddress.mockImplementation(() => EthAddress.random()); publisher.getCurrentEpochCommittee.mockResolvedValue(committee); - publisher.canProposeAtNextEthBlock.mockResolvedValue([ - block.header.globalVariables.slotNumber.toBigInt(), - block.header.globalVariables.blockNumber.toBigInt(), - ]); + publisher.canProposeAtNextEthBlock.mockResolvedValue([BigInt(newSlotNumber), BigInt(newBlockNumber)]); publisher.validateBlockForSubmission.mockResolvedValue(); + publisher.proposeL2Block.mockResolvedValue(true); globalVariableBuilder = mock(); - merkleTreeOps = mock(); + globalVariableBuilder.buildGlobalVariables.mockResolvedValue(globalVariables); + blockBuilder = mock(); + blockBuilder.setBlockCompleted.mockImplementation(() => Promise.resolve(block)); + merkleTreeOps = mock(); merkleTreeOps.findLeafIndices.mockImplementation((_treeId: MerkleTreeId, _value: any[]) => { return Promise.resolve([undefined]); }); @@ -155,14 +183,12 @@ describe('sequencer', () => { }), }); - publicProcessor = mock({ - process: async txs => [ - await Promise.all( - txs.map(tx => makeProcessedTxFromPrivateOnlyTx(tx, Fr.ZERO, undefined, block.header.globalVariables)), - ), - [], - [], - ], + publicProcessor = mock(); + publicProcessor.process.mockImplementation(async txsIter => { + const txs = Array.from(txsIter); + const processed = await processTxs(txs); + logger.verbose(`Processed ${txs.length} txs`, { txHashes: txs.map(tx => tx.getTxHash()) }); + return [processed, [], []]; }); publicProcessorFactory = mock({ @@ -189,10 +215,9 @@ describe('sequencer', () => { create: () => blockBuilder, }); - validatorClient = mock({ - collectAttestations: mockFn().mockResolvedValue(getAttestations()), - createBlockProposal: mockFn().mockResolvedValue(createBlockProposal()), - }); + validatorClient = mock(); + validatorClient.collectAttestations.mockImplementation(() => Promise.resolve(getAttestations())); + validatorClient.createBlockProposal.mockImplementation(() => Promise.resolve(createBlockProposal())); const l1GenesisTime = BigInt(Math.floor(Date.now() / 1000)); const l1Constants = { l1GenesisTime, slotDuration, ethereumSlotDuration }; @@ -210,7 +235,7 @@ describe('sequencer', () => { l2BlockSource, l1ToL2MessageSource, publicProcessorFactory, - new TxValidatorFactory(merkleTreeOps, contractSource, false), + contractSource, l1Constants, new TestDateProvider(), new NoopTelemetryClient(), @@ -219,29 +244,25 @@ describe('sequencer', () => { }); it('builds a block out of a single tx', async () => { - const tx = mockTxForRollup(); - tx.data.constants.txContext.chainId = chainId; + const tx = makeTx(); const txHash = tx.getTxHash(); - p2p.getPendingTxs.mockResolvedValueOnce([tx]); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); + block = await makeBlock([tx]); + mockPendingTxs([tx]); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, + globalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - // Ok, we have an issue that we never actually call the process L2 block + expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], undefined); }); it.each([ - { delayedState: SequencerState.WAITING_FOR_TXS }, + { delayedState: SequencerState.INITIALIZING_PROPOSAL }, // It would be nice to add the other states, but we would need to inject delays within the `work` loop ])('does not build a block if it does not have enough time left in the slot', async ({ delayedState }) => { // trick the sequencer into thinking that we are just too far into slot 1 @@ -249,14 +270,9 @@ describe('sequencer', () => { Math.floor(Date.now() / 1000) - slotDuration * 1 - (sequencer.getTimeTable()[delayedState] + 1), ); - const tx = mockTxForRollup(); - tx.data.constants.txContext.chainId = chainId; - - p2p.getPendingTxs.mockResolvedValueOnce([tx]); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); + const tx = makeTx(); + mockPendingTxs([tx]); + block = await makeBlock([tx]); await expect(sequencer.doRealWork()).rejects.toThrow( expect.objectContaining({ @@ -270,15 +286,11 @@ describe('sequencer', () => { }); it('builds a block when it is their turn', async () => { - const tx = mockTxForRollup(); - tx.data.constants.txContext.chainId = chainId; + const tx = makeTx(); const txHash = tx.getTxHash(); - p2p.getPendingTxs.mockResolvedValue([tx]); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); + mockPendingTxs([tx]); + block = await makeBlock([tx]); // Not your turn! publisher.canProposeAtNextEthBlock.mockRejectedValue(new Error()); @@ -302,206 +314,83 @@ describe('sequencer', () => { await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, + globalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], undefined); }); - it('builds a block out of several txs rejecting double spends', async () => { - const doubleSpendTxIndex = 1; - const txs = [mockTxForRollup(0x10000), mockTxForRollup(0x20000), mockTxForRollup(0x30000)]; - txs.forEach(tx => { - tx.data.constants.txContext.chainId = chainId; - }); - const validTxHashes = txs.filter((_, i) => i !== doubleSpendTxIndex).map(tx => tx.getTxHash()); - - const doubleSpendTx = txs[doubleSpendTxIndex]; - - p2p.getPendingTxs.mockResolvedValueOnce(txs); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); - - // We make a nullifier from tx1 a part of the nullifier tree, so it gets rejected as double spend - const doubleSpendNullifier = doubleSpendTx.data.forRollup!.end.nullifiers[0].toBuffer(); - merkleTreeOps.findLeafIndices.mockImplementation((treeId: MerkleTreeId, value: any[]) => { - return Promise.resolve( - treeId === MerkleTreeId.NULLIFIER_TREE && value[0].equals(doubleSpendNullifier) ? [1n] : [undefined], - ); - }); - - await sequencer.doRealWork(); - - expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, - Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), - ); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes, undefined); - expect(p2p.deleteTxs).toHaveBeenCalledWith([doubleSpendTx.getTxHash()]); - }); - - it('builds a block out of several txs rejecting incorrect chain ids', async () => { - const invalidChainTxIndex = 1; - const txs = [mockTxForRollup(0x10000), mockTxForRollup(0x20000), mockTxForRollup(0x30000)]; - txs.forEach(tx => { - tx.data.constants.txContext.chainId = chainId; - }); - const invalidChainTx = txs[invalidChainTxIndex]; - const validTxHashes = txs.filter((_, i) => i !== invalidChainTxIndex).map(tx => tx.getTxHash()); - - p2p.getPendingTxs.mockResolvedValueOnce(txs); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); - - // We make the chain id on the invalid tx not equal to the configured chain id - invalidChainTx.data.constants.txContext.chainId = new Fr(1n + chainId.value); - - await sequencer.doRealWork(); - - expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, - Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), - ); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes, undefined); - expect(p2p.deleteTxs).toHaveBeenCalledWith([invalidChainTx.getTxHash()]); - }); - - it('builds a block out of several txs dropping the ones that go over max size', async () => { - const invalidTransactionIndex = 1; - - const txs = [mockTxForRollup(0x10000), mockTxForRollup(0x20000), mockTxForRollup(0x30000)]; - txs.forEach(tx => { - tx.data.constants.txContext.chainId = chainId; - }); - const validTxHashes = txs.filter((_, i) => i !== invalidTransactionIndex).map(tx => tx.getTxHash()); - - p2p.getPendingTxs.mockResolvedValueOnce(txs); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); - - // We make txs[1] too big to fit - (txs[invalidTransactionIndex] as Writeable).unencryptedLogs = UnencryptedTxL2Logs.random(2, 4); - (txs[invalidTransactionIndex].unencryptedLogs.functionLogs[0].logs[0] as Writeable).data = - randomBytes(1024 * 1022); - - await sequencer.doRealWork(); - - expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, - Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), - ); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes, undefined); - }); - - it('builds a block out of several txs skipping the ones not providing enough fee per gas', async () => { - const gasFees = new GasFees(10, 20); - mockedGlobalVariables.gasFees = gasFees; - - const txs = Array(5) - .fill(0) - .map((_, i) => mockTxForRollup(0x10000 * i)); - - const skippedTxIndexes = [1, 2]; - const validTxHashes: TxHash[] = []; - txs.forEach((tx, i) => { - tx.data.constants.txContext.chainId = chainId; - const maxFeesPerGas: Writeable = gasFees.clone(); - const feeToAdjust = i % 2 ? 'feePerDaGas' : 'feePerL2Gas'; - if (skippedTxIndexes.includes(i)) { - // maxFeesPerGas is less than gasFees. - maxFeesPerGas[feeToAdjust] = maxFeesPerGas[feeToAdjust].sub(new Fr(i + 1)); - } else { - // maxFeesPerGas is greater than or equal to gasFees. - maxFeesPerGas[feeToAdjust] = maxFeesPerGas[feeToAdjust].add(new Fr(i)); - validTxHashes.push(tx.getTxHash()); - } - (tx.data.constants.txContext.gasSettings as Writeable).maxFeesPerGas = maxFeesPerGas; - }); - - p2p.getPendingTxs.mockResolvedValueOnce(txs); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); + it('builds a block out of several txs rejecting invalid txs', async () => { + const txs = [makeTx(0x10000), makeTx(0x20000), makeTx(0x30000)]; + const validTxs = [txs[0], txs[2]]; + const invalidTx = txs[1]; + const validTxHashes = validTxs.map(tx => tx.getTxHash()); + + mockPendingTxs(txs); + block = await makeBlock([txs[0], txs[2]]); + publicProcessor.process.mockResolvedValue([ + await processTxs(validTxs), + [{ tx: invalidTx, error: new Error() }], + [], + ]); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, + globalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes, undefined); - // The txs are not included. But they are not dropped from the pool either. - expect(p2p.deleteTxs).not.toHaveBeenCalled(); + expect(p2p.deleteTxs).toHaveBeenCalledWith([invalidTx.getTxHash()]); }); it('builds a block once it reaches the minimum number of transactions', async () => { - const txs = times(8, i => { - const tx = mockTxForRollup(i * 0x10000); - tx.data.constants.txContext.chainId = chainId; - return tx; - }); - const block = L2Block.random(lastBlockNumber + 1); - - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); - + const txs = times(8, i => makeTx(i * 0x10000)); sequencer.updateConfig({ minTxsPerBlock: 4 }); // block is not built with 0 txs - p2p.getPendingTxs.mockResolvedValueOnce([]); - //p2p.getPendingTxs.mockResolvedValueOnce(txs.slice(0, 4)); + mockPendingTxs([]); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); // block is not built with 3 txs - p2p.getPendingTxs.mockResolvedValueOnce(txs.slice(0, 3)); + mockPendingTxs(txs.slice(0, 3)); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); // block is built with 4 txs - p2p.getPendingTxs.mockResolvedValueOnce(txs.slice(0, 4)); - const txHashes = txs.slice(0, 4).map(tx => tx.getTxHash()); + const neededTxs = txs.slice(0, 4); + mockPendingTxs(neededTxs); + block = await makeBlock(neededTxs); await sequencer.doRealWork(); + expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, - Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), + globalVariables, + times(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, Fr.zero), ); expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), txHashes, undefined); + expect(publisher.proposeL2Block).toHaveBeenCalledWith( + block, + getSignatures(), + neededTxs.map(tx => tx.getTxHash()), + undefined, + ); }); it('builds a block that contains zero real transactions once flushed', async () => { - const txs = times(8, i => { - const tx = mockTxForRollup(i * 0x10000); - tx.data.constants.txContext.chainId = chainId; - return tx; - }); - const block = L2Block.random(lastBlockNumber + 1); - - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); + const txs = times(8, i => makeTx(i * 0x10000)); sequencer.updateConfig({ minTxsPerBlock: 4 }); // block is not built with 0 txs - p2p.getPendingTxs.mockResolvedValueOnce([]); + mockPendingTxs([]); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); // block is not built with 3 txs - p2p.getPendingTxs.mockResolvedValueOnce(txs.slice(0, 3)); + mockPendingTxs(txs.slice(0, 3)); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); @@ -509,12 +398,15 @@ describe('sequencer', () => { sequencer.flush(); // block is built with 0 txs - p2p.getPendingTxs.mockResolvedValueOnce([]); + mockPendingTxs([]); + block = await makeBlock([]); + await sequencer.doRealWork(); + expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(1); expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, - Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), + globalVariables, + times(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, Fr.zero), ); expect(blockBuilder.addTxs).toHaveBeenCalledWith([]); expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); @@ -522,27 +414,17 @@ describe('sequencer', () => { }); it('builds a block that contains less than the minimum number of transactions once flushed', async () => { - const txs = times(8, i => { - const tx = mockTxForRollup(i * 0x10000); - tx.data.constants.txContext.chainId = chainId; - return tx; - }); - const block = L2Block.random(lastBlockNumber + 1); - - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); + const txs = times(8, i => makeTx(i * 0x10000)); sequencer.updateConfig({ minTxsPerBlock: 4 }); // block is not built with 0 txs - p2p.getPendingTxs.mockResolvedValueOnce([]); + mockPendingTxs([]); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); // block is not built with 3 txs - p2p.getPendingTxs.mockResolvedValueOnce(txs.slice(0, 3)); + mockPendingTxs(txs.slice(0, 3)); await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(0); @@ -551,12 +433,14 @@ describe('sequencer', () => { // block is built with 3 txs const postFlushTxs = txs.slice(0, 3); - p2p.getPendingTxs.mockResolvedValueOnce(postFlushTxs); + mockPendingTxs(postFlushTxs); + block = await makeBlock(postFlushTxs); const postFlushTxHashes = postFlushTxs.map(tx => tx.getTxHash()); + await sequencer.doRealWork(); expect(blockBuilder.startNewBlock).toHaveBeenCalledTimes(1); expect(blockBuilder.startNewBlock).toHaveBeenCalledWith( - mockedGlobalVariables, + globalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); @@ -565,31 +449,12 @@ describe('sequencer', () => { }); it('aborts building a block if the chain moves underneath it', async () => { - const tx = mockTxForRollup(); - tx.data.constants.txContext.chainId = chainId; - - p2p.getPendingTxs.mockResolvedValueOnce([tx]); - blockBuilder.setBlockCompleted.mockResolvedValue(block); - publisher.proposeL2Block.mockResolvedValueOnce(true); - - const mockedGlobalVariables = new GlobalVariables( - chainId, - version, - block.header.globalVariables.blockNumber, - block.header.globalVariables.slotNumber, - Fr.ZERO, - coinbase, - feeRecipient, - gasFees, - ); - - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); + const tx = makeTx(); + mockPendingTxs([tx]); + block = await makeBlock([tx]); // This could practically be for any reason, e.g., could also be that we have entered a new slot. - publisher.validateBlockForSubmission - .mockResolvedValueOnce() - .mockResolvedValueOnce() - .mockRejectedValueOnce(new Error()); + publisher.validateBlockForSubmission.mockResolvedValueOnce().mockRejectedValueOnce(new Error('No block for you')); await sequencer.doRealWork(); @@ -597,19 +462,20 @@ describe('sequencer', () => { }); describe('proof quotes', () => { + let tx: Tx; let txHash: TxHash; let currentEpoch = 0n; - const setupForBlockNumber = (blockNumber: number) => { + const setupForBlockNumber = async (blockNumber: number) => { + newBlockNumber = blockNumber; + newSlotNumber = blockNumber; currentEpoch = BigInt(blockNumber) / BigInt(epochDuration); - // Create a new block and header - block = L2Block.random(blockNumber); - mockedGlobalVariables = new GlobalVariables( + globalVariables = new GlobalVariables( chainId, version, - block.header.globalVariables.blockNumber, - block.header.globalVariables.slotNumber, + new Fr(blockNumber), + new Fr(blockNumber), Fr.ZERO, coinbase, feeRecipient, @@ -618,35 +484,31 @@ describe('sequencer', () => { worldState.status.mockResolvedValue({ state: WorldStateRunningState.IDLE, - syncedToL2Block: { number: block.header.globalVariables.blockNumber.toNumber() - 1, hash }, + syncedToL2Block: { number: blockNumber - 1, hash }, }); p2p.getStatus.mockResolvedValue({ - syncedToL2Block: { number: block.header.globalVariables.blockNumber.toNumber() - 1, hash }, state: P2PClientState.IDLE, + syncedToL2Block: { number: blockNumber - 1, hash }, }); - l2BlockSource.getBlockNumber.mockResolvedValue(block.header.globalVariables.blockNumber.toNumber() - 1); + l2BlockSource.getBlockNumber.mockResolvedValue(blockNumber - 1); - l1ToL2MessageSource.getBlockNumber.mockResolvedValue(block.header.globalVariables.blockNumber.toNumber() - 1); + l1ToL2MessageSource.getBlockNumber.mockResolvedValue(blockNumber - 1); - globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); - - publisher.canProposeAtNextEthBlock.mockResolvedValue([ - block.header.globalVariables.slotNumber.toBigInt(), - block.header.globalVariables.blockNumber.toBigInt(), - ]); + globalVariableBuilder.buildGlobalVariables.mockResolvedValue(globalVariables); + publisher.canProposeAtNextEthBlock.mockResolvedValue([BigInt(newSlotNumber), BigInt(blockNumber)]); + publisher.claimEpochProofRight.mockResolvedValueOnce(true); publisher.getEpochForSlotNumber.mockImplementation((slotNumber: bigint) => Promise.resolve(slotNumber / BigInt(epochDuration)), ); - const tx = mockTxForRollup(); - tx.data.constants.txContext.chainId = chainId; + tx = makeTx(); txHash = tx.getTxHash(); - p2p.getPendingTxs.mockResolvedValue([tx]); - blockBuilder.setBlockCompleted.mockResolvedValue(block); + mockPendingTxs([tx]); + block = await makeBlock([tx]); }; const mockEpochProofQuote = (opts: { epoch?: bigint; validUntilSlot?: bigint; fee?: number } = {}) => @@ -660,12 +522,11 @@ describe('sequencer', () => { it('submits a valid proof quote with a block', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const proofQuote = mockEpochProofQuote(); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.proposeL2Block.mockResolvedValueOnce(true); publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); // The previous epoch can be claimed @@ -677,15 +538,14 @@ describe('sequencer', () => { it('submits a valid proof quote even without a block', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); // There are no txs! - p2p.getPendingTxs.mockResolvedValue([]); + mockPendingTxs([]); const proofQuote = mockEpochProofQuote(); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.claimEpochProofRight.mockResolvedValueOnce(true); publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); // The previous epoch can be claimed @@ -698,12 +558,11 @@ describe('sequencer', () => { it('does not claim the epoch previous to the first', async () => { const blockNumber = 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const proofQuote = mockEpochProofQuote({ epoch: 0n }); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.proposeL2Block.mockResolvedValueOnce(true); publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); publisher.getClaimableEpoch.mockImplementation(() => Promise.resolve(undefined)); @@ -714,13 +573,12 @@ describe('sequencer', () => { it('does not submit a quote with an expired slot number', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const expiredSlotNumber = block.header.globalVariables.slotNumber.toBigInt() - 1n; const proofQuote = mockEpochProofQuote({ validUntilSlot: expiredSlotNumber }); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.proposeL2Block.mockResolvedValueOnce(true); publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); // The previous epoch can be claimed @@ -732,12 +590,11 @@ describe('sequencer', () => { it('does not submit a valid quote if unable to claim epoch', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const proofQuote = mockEpochProofQuote(); p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); - publisher.proposeL2Block.mockResolvedValueOnce(true); publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); publisher.getClaimableEpoch.mockResolvedValue(undefined); @@ -748,7 +605,7 @@ describe('sequencer', () => { it('does not submit an invalid quote', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); const proofQuote = mockEpochProofQuote(); @@ -767,7 +624,7 @@ describe('sequencer', () => { it('selects the lowest cost valid quote', async () => { const blockNumber = epochDuration + 1; - setupForBlockNumber(blockNumber); + await setupForBlockNumber(blockNumber); // Create 3 valid quotes with different fees. // And 3 invalid quotes with lower fees diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 44acbec00d3..a953e51e110 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -4,11 +4,9 @@ import { type L1ToL2MessageSource, type L2Block, type L2BlockSource, - type ProcessedTx, SequencerConfigSchema, Tx, type TxHash, - type TxValidator, type WorldStateSynchronizer, } from '@aztec/circuit-types'; import type { AllowedElement, Signature, WorldStateSynchronizerStatus } from '@aztec/circuit-types/interfaces'; @@ -17,7 +15,9 @@ import { AppendOnlyTreeSnapshot, BlockHeader, ContentCommitment, + type ContractDataSource, GENESIS_ARCHIVE_ROOT, + Gas, type GlobalVariables, StateReference, } from '@aztec/circuits.js'; @@ -39,7 +39,7 @@ import { type GlobalVariableBuilder } from '../global_variable_builder/global_bu import { type L1Publisher, VoteType } from '../publisher/l1-publisher.js'; import { prettyLogViemErrorMsg } from '../publisher/utils.js'; import { type SlasherClient } from '../slasher/slasher_client.js'; -import { type TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; +import { createValidatorsForBlockBuilding } from '../tx_validator/tx_validator_factory.js'; import { getDefaultAllowedSetupFunctions } from './allowed.js'; import { type SequencerConfig } from './config.js'; import { SequencerMetrics } from './metrics.js'; @@ -47,12 +47,6 @@ import { SequencerState, orderAttestations } from './utils.js'; export { SequencerState }; -export type ShouldProposeArgs = { - pendingTxsCount?: number; - validTxsCount?: number; - processedTxsCount?: number; -}; - export class SequencerTooSlowError extends Error { constructor( public readonly currentState: SequencerState, @@ -90,6 +84,7 @@ export class Sequencer { private state = SequencerState.STOPPED; private allowedInSetup: AllowedElement[] = getDefaultAllowedSetupFunctions(); private maxBlockSizeInBytes: number = 1024 * 1024; + private maxBlockGas: Gas = new Gas(10e9, 10e9); private processTxTime: number = 12; private metrics: SequencerMetrics; private isFlushing: boolean = false; @@ -112,7 +107,7 @@ export class Sequencer { private l2BlockSource: L2BlockSource, private l1ToL2MessageSource: L1ToL2MessageSource, private publicProcessorFactory: PublicProcessorFactory, - private txValidatorFactory: TxValidatorFactory, + private contractDataSource: ContractDataSource, protected l1Constants: SequencerRollupConstants, private dateProvider: DateProvider, telemetry: TelemetryClient, @@ -149,6 +144,12 @@ export class Sequencer { if (config.minTxsPerBlock !== undefined) { this.minTxsPerBLock = config.minTxsPerBlock; } + if (config.maxDABlockGas !== undefined) { + this.maxBlockGas = new Gas(config.maxDABlockGas, this.maxBlockGas.l2Gas); + } + if (config.maxL2BlockGas !== undefined) { + this.maxBlockGas = new Gas(this.maxBlockGas.daGas, config.maxL2BlockGas); + } if (config.coinbase) { this._coinbase = config.coinbase; } @@ -179,7 +180,7 @@ export class Sequencer { // How late into the slot can we be to start working const initialTime = 2; - // How long it takes to validate the txs collected and get ready to start building + // How long it takes to get ready to start building const blockPrepareTime = 1; // How long it takes to for attestations to travel across the p2p layer. @@ -218,9 +219,9 @@ export class Sequencer { [SequencerState.SYNCHRONIZING]: this.aztecSlotDuration, // We always want to allow the full slot to check if we are the proposer [SequencerState.PROPOSER_CHECK]: this.aztecSlotDuration, - // First transition towards building a block - [SequencerState.WAITING_FOR_TXS]: initialTime, - // We then validate the txs and prepare to start building the block + // How late we can start initializing a new block proposal + [SequencerState.INITIALIZING_PROPOSAL]: initialTime, + // When we start building a block [SequencerState.CREATING_BLOCK]: initialTime + blockPrepareTime, // We start collecting attestations after building the block [SequencerState.COLLECTING_ATTESTATIONS]: initialTime + blockPrepareTime + processTxsTime + blockValidationTime, @@ -323,25 +324,27 @@ export class Sequencer { void this.publisher.castVote(slot, newGlobalVariables.timestamp.toBigInt(), VoteType.GOVERNANCE); void this.publisher.castVote(slot, newGlobalVariables.timestamp.toBigInt(), VoteType.SLASHING); - if (!this.shouldProposeBlock(historicalHeader, {})) { + // Check the pool has enough txs to build a block + const pendingTxCount = this.p2pClient.getPendingTxCount(); + if (pendingTxCount < this.minTxsPerBLock && !this.isFlushing) { + this.log.verbose(`Not enough txs to propose block. Got ${pendingTxCount} min ${this.minTxsPerBLock}.`, { + slot, + blockNumber: newBlockNumber, + }); + await this.claimEpochProofRightIfAvailable(slot); return; } + this.setState(SequencerState.INITIALIZING_PROPOSAL, slot); this.log.verbose(`Preparing proposal for block ${newBlockNumber} at slot ${slot}`, { chainTipArchive: new Fr(chainTipArchive), blockNumber: newBlockNumber, slot, }); - this.setState(SequencerState.WAITING_FOR_TXS, slot); - - // Get txs to build the new block. - const pendingTxs = await this.p2pClient.getPendingTxs(); - - if (!this.shouldProposeBlock(historicalHeader, { pendingTxsCount: pendingTxs.length })) { - await this.claimEpochProofRightIfAvailable(slot); - return; - } + // We don't fetch exactly maxTxsPerBlock txs here because we may not need all of them if we hit a limit before, + // and also we may need to fetch more if we don't have enough valid txs. + const pendingTxs = this.p2pClient.iteratePendingTxs(); // If I created a "partial" header here that should make our job much easier. const proposalHeader = new BlockHeader( @@ -353,35 +356,12 @@ export class Sequencer { Fr.ZERO, ); - // TODO: It should be responsibility of the P2P layer to validate txs before passing them on here. - // TODO: We should validate only the number of txs we need to speed up this process. - const allValidTxs = await this.takeValidTxs( - pendingTxs, - this.txValidatorFactory.validatorForNewTxs(newGlobalVariables, this.allowedInSetup), - ); - - // TODO: We are taking the size of the tx from private-land, but we should be doing this after running - // public functions. Only reason why we do it here now is because the public processor and orchestrator - // are set up such that they require knowing the total number of txs in advance. Still, main reason for - // exceeding max block size in bytes is contract class registration, which happens in private-land. This - // may break if we start emitting lots of log data from public-land. - const validTxs = this.takeTxsWithinMaxSize(allValidTxs); - - this.log.verbose( - `Collected ${validTxs.length} txs out of ${allValidTxs.length} valid txs out of ${pendingTxs.length} total pending txs for block ${newBlockNumber}`, - ); - - // Bail if we don't have enough valid txs - if (!this.shouldProposeBlock(historicalHeader, { validTxsCount: validTxs.length })) { - await this.claimEpochProofRightIfAvailable(slot); - return; - } - try { + // TODO(palla/txs) Is the note below still valid? We don't seem to be doing any rollback in there. // @note It is very important that the following function will FAIL and not just return early // if it have made any state changes. If not, we won't rollback the state, and you will // be in for a world of pain. - await this.buildBlockAndAttemptToPublish(validTxs, proposalHeader, historicalHeader); + await this.buildBlockAndAttemptToPublish(pendingTxs, proposalHeader, historicalHeader); } catch (err) { this.log.error(`Error assembling block`, err, { blockNumber: newBlockNumber, slot }); } @@ -469,64 +449,20 @@ export class Sequencer { this.state = proposedState; } - shouldProposeBlock(historicalHeader: BlockHeader | undefined, args: ShouldProposeArgs): boolean { - if (this.isFlushing) { - this.log.verbose(`Flushing all pending txs in new block`); - return true; - } - - // Compute time elapsed since the previous block - const lastBlockTime = historicalHeader?.globalVariables.timestamp.toNumber() || 0; - const currentTime = Math.floor(Date.now() / 1000); - const elapsedSinceLastBlock = currentTime - lastBlockTime; - this.log.debug( - `Last block mined at ${lastBlockTime} current time is ${currentTime} (elapsed ${elapsedSinceLastBlock})`, - ); - - // We need to have at least minTxsPerBLock txs. - if (args.pendingTxsCount !== undefined && args.pendingTxsCount < this.minTxsPerBLock) { - this.log.verbose( - `Not creating block because not enough txs in the pool (got ${args.pendingTxsCount} min ${this.minTxsPerBLock})`, - ); - return false; - } - - // Bail if we don't have enough valid txs - if (args.validTxsCount !== undefined && args.validTxsCount < this.minTxsPerBLock) { - this.log.verbose( - `Not creating block because not enough valid txs loaded from the pool (got ${args.validTxsCount} min ${this.minTxsPerBLock})`, - ); - return false; - } - - // TODO: This check should be processedTxs.length < this.minTxsPerBLock, so we don't publish a block with - // less txs than the minimum. But that'd cause the entire block to be aborted and retried. Instead, we should - // go back to the p2p pool and load more txs until we hit our minTxsPerBLock target. Only if there are no txs - // we should bail. - if (args.processedTxsCount === 0 && this.minTxsPerBLock > 0) { - this.log.verbose('No txs processed correctly to build block.'); - return false; - } - - return true; - } - /** * Build a block * * Shared between the sequencer and the validator for re-execution * - * @param validTxs - The valid transactions to construct the block from + * @param pendingTxs - The pending transactions to construct the block from * @param newGlobalVariables - The global variables for the new block * @param historicalHeader - The historical header of the parent - * @param interrupt - The interrupt callback, used to validate the block for submission and check if we should propose the block * @param opts - Whether to just validate the block as a validator, as opposed to building it as a proposal */ private async buildBlock( - validTxs: Tx[], + pendingTxs: Iterable, newGlobalVariables: GlobalVariables, historicalHeader?: BlockHeader, - interrupt?: (processedTxs: ProcessedTx[]) => Promise, opts: { validateOnly?: boolean } = {}, ) { const blockNumber = newGlobalVariables.blockNumber.toBigInt(); @@ -534,19 +470,9 @@ export class Sequencer { this.log.debug(`Requesting L1 to L2 messages from contract for block ${blockNumber}`); const l1ToL2Messages = await this.l1ToL2MessageSource.getL1ToL2Messages(blockNumber); + const msgCount = l1ToL2Messages.length; - this.log.verbose( - `Building block ${blockNumber} with ${validTxs.length} txs and ${l1ToL2Messages.length} messages`, - { - msgCount: l1ToL2Messages.length, - txCount: validTxs.length, - slot, - blockNumber, - }, - ); - - const numRealTxs = validTxs.length; - const blockSize = Math.max(2, numRealTxs); + this.log.verbose(`Building block ${blockNumber} for slot ${slot}`, { slot, blockNumber, msgCount }); // Sync to the previous block at least await this.worldState.syncImmediate(newGlobalVariables.blockNumber.toNumber() - 1); @@ -570,18 +496,30 @@ export class Sequencer { // We set the deadline for tx processing to the start of the CREATING_BLOCK phase, plus the expected time for tx processing. // Deadline is only set if enforceTimeTable is enabled. const processingEndTimeWithinSlot = this.timeTable[SequencerState.CREATING_BLOCK] + this.processTxTime; - const processingDeadline = this.enforceTimeTable + const deadline = this.enforceTimeTable ? new Date((this.getSlotStartTimestamp(slot) + processingEndTimeWithinSlot) * 1000) : undefined; - this.log.verbose(`Processing ${validTxs.length} txs`, { + this.log.verbose(`Processing pending txs`, { slot, slotStart: new Date(this.getSlotStartTimestamp(slot) * 1000), now: new Date(this.dateProvider.now()), - deadline: processingDeadline, + deadline, }); - const processingTxValidator = this.txValidatorFactory.validatorForProcessedTxs(publicProcessorFork); + + const validators = createValidatorsForBlockBuilding( + publicProcessorFork, + this.contractDataSource, + newGlobalVariables, + !!this.config.enforceFees, + this.allowedInSetup, + ); + + // REFACTOR: Public processor should just handle processing, one tx at a time. It should be responsibility + // of the sequencer to update world state and iterate over txs. We should refactor this along with unifying the + // publicProcessorFork and orchestratorFork, to avoid doing tree insertions twice when building the block. + const limits = { deadline, maxTransactions: this.maxTxsPerBlock, maxBlockSize: this.maxBlockSizeInBytes }; const [publicProcessorDuration, [processedTxs, failedTxs]] = await elapsed(() => - processor.process(validTxs, blockSize, processingTxValidator, processingDeadline), + processor.process(pendingTxs, limits, validators), ); if (failedTxs.length > 0) { @@ -609,8 +547,6 @@ export class Sequencer { const duration = Number(end - start) / 1_000; this.metrics.recordBlockBuilderTreeInsertions(duration); - await interrupt?.(processedTxs); - // All real transactions have been added, set the block as full and pad if needed const block = await blockBuilder.setBlockCompleted(); @@ -618,7 +554,7 @@ export class Sequencer { block, publicProcessorDuration, numMsgs: l1ToL2Messages.length, - numProcessedTxs: processedTxs.length, + numTxs: processedTxs.length, blockBuildingTimer, }; } finally { @@ -642,7 +578,7 @@ export class Sequencer { * @dev MUST throw instead of exiting early to ensure that world-state * is being rolled back if the block is dropped. * - * @param validTxs - The valid transactions to construct the block from + * @param pendingTxs - Iterable of pending transactions to construct the block from * @param proposalHeader - The partial header constructed for the proposal * @param historicalHeader - The historical header of the parent */ @@ -650,7 +586,7 @@ export class Sequencer { [Attributes.BLOCK_NUMBER]: proposalHeader.globalVariables.blockNumber.toNumber(), })) private async buildBlockAndAttemptToPublish( - validTxs: Tx[], + pendingTxs: Iterable, proposalHeader: BlockHeader, historicalHeader: BlockHeader | undefined, ): Promise { @@ -660,40 +596,19 @@ export class Sequencer { const blockNumber = newGlobalVariables.blockNumber.toNumber(); const slot = newGlobalVariables.slotNumber.toBigInt(); - this.metrics.recordNewBlock(blockNumber, validTxs.length); + // this.metrics.recordNewBlock(blockNumber, validTxs.length); const workTimer = new Timer(); this.setState(SequencerState.CREATING_BLOCK, slot); - /** - * BuildBlock is shared between the sequencer and the validator for re-execution - * We use the interrupt callback to validate the block for submission and check if we should propose the block - * - * If we fail, we throw an error in order to roll back - */ - const interrupt = async (processedTxs: ProcessedTx[]) => { - await this.publisher.validateBlockForSubmission(proposalHeader); - - if ( - !this.shouldProposeBlock(historicalHeader, { - validTxsCount: validTxs.length, - processedTxsCount: processedTxs.length, - }) - ) { - // TODO: Roll back changes to world state - throw new Error('Should not propose the block'); - } - }; - // Start collecting proof quotes for the previous epoch if needed in the background const proofQuotePromise = this.createProofClaimForPreviousEpoch(slot); try { - const buildBlockRes = await this.buildBlock(validTxs, newGlobalVariables, historicalHeader, interrupt); - const { block, publicProcessorDuration, numProcessedTxs, numMsgs, blockBuildingTimer } = buildBlockRes; + const buildBlockRes = await this.buildBlock(pendingTxs, newGlobalVariables, historicalHeader); + const { block, publicProcessorDuration, numTxs, numMsgs, blockBuildingTimer } = buildBlockRes; // TODO(@PhilWindle) We should probably periodically check for things like another // block being published before ours instead of just waiting on our block - await this.publisher.validateBlockForSubmission(block.header); const workDuration = workTimer.ms(); @@ -707,8 +622,8 @@ export class Sequencer { }; const blockHash = block.hash(); - const txHashes = validTxs.map(tx => tx.getTxHash()); - this.log.info(`Built block ${block.number} with hash ${blockHash}`, { + const txHashes = block.body.txEffects.map(tx => tx.txHash); + this.log.info(`Built block ${block.number} for slot ${slot} with ${numTxs} txs`, { blockHash, globalVariables: block.header.globalVariables.toInspect(), txHashes, @@ -734,14 +649,12 @@ export class Sequencer { await this.publishL2Block(block, attestations, txHashes, proofQuote); this.metrics.recordPublishedBlock(workDuration); this.log.info( - `Published rollup block ${ - block.number - } with ${numProcessedTxs} transactions and ${numMsgs} messages in ${Math.ceil(workDuration)}ms`, + `Published block ${block.number} with ${numTxs} txs and ${numMsgs} messages in ${Math.ceil(workDuration)}ms`, { blockNumber: block.number, blockHash: blockHash, slot, - txCount: numProcessedTxs, + txCount: txHashes.length, msgCount: numMsgs, duration: Math.ceil(workDuration), submitter: this.publisher.getSenderAddress().toString(), @@ -865,36 +778,6 @@ export class Sequencer { } } - protected async takeValidTxs(txs: T[], validator: TxValidator): Promise { - const [valid, invalid] = await validator.validateTxs(txs); - if (invalid.length > 0) { - this.log.debug(`Dropping invalid txs from the p2p pool ${Tx.getHashes(invalid).join(', ')}`); - await this.p2pClient.deleteTxs(Tx.getHashes(invalid)); - } - - return valid.slice(0, this.maxTxsPerBlock); - } - - protected takeTxsWithinMaxSize(txs: Tx[]): Tx[] { - const maxSize = this.maxBlockSizeInBytes; - let totalSize = 0; - - const toReturn: Tx[] = []; - for (const tx of txs) { - const txSize = tx.getSize() - tx.clientIvcProof.clientIvcProofBuffer.length; - if (totalSize + txSize > maxSize) { - this.log.debug( - `Dropping tx ${tx.getTxHash()} with estimated size ${txSize} due to exceeding ${maxSize} block size limit (currently at ${totalSize})`, - ); - continue; - } - toReturn.push(tx); - totalSize += txSize; - } - - return toReturn; - } - @trackSpan( 'Sequencer.claimEpochProofRightIfAvailable', slotNumber => ({ [Attributes.SLOT_NUMBER]: Number(slotNumber) }), diff --git a/yarn-project/sequencer-client/src/sequencer/utils.ts b/yarn-project/sequencer-client/src/sequencer/utils.ts index 5939b43c353..af90e2f45dd 100644 --- a/yarn-project/sequencer-client/src/sequencer/utils.ts +++ b/yarn-project/sequencer-client/src/sequencer/utils.ts @@ -19,9 +19,9 @@ export enum SequencerState { */ PROPOSER_CHECK = 'PROPOSER_CHECK', /** - * Polling the P2P module for txs to include in a block. Will move to CREATING_BLOCK if there are valid txs to include, or back to SYNCHRONIZING otherwise. + * Initializing the block proposal. Will move to CREATING_BLOCK if there are valid txs to include, or back to SYNCHRONIZING otherwise. */ - WAITING_FOR_TXS = 'WAITING_FOR_TXS', + INITIALIZING_PROPOSAL = 'INITIALIZING_PROPOSAL', /** * Creating a new L2 block. Includes processing public function calls and running rollup circuits. Will move to PUBLISHING_CONTRACT_DATA. */ diff --git a/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts b/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts index 68a7c4f8427..07f67fdeb0b 100644 --- a/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts +++ b/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts @@ -46,22 +46,19 @@ describe('GasTxValidator', () => { const validateTx = async (tx: Tx) => { const validator = new GasTxValidator(publicStateSource, feeJuiceAddress, enforceFees, gasFees); - return await validator.validateTxs([tx]); + return await validator.validateTx(tx); }; const expectValid = async (tx: Tx) => { - const result = await validateTx(tx); - expect(result).toEqual([[tx], [], []]); + await expect(validateTx(tx)).resolves.toEqual({ result: 'valid' }); }; - const expectInvalid = async (tx: Tx) => { - const result = await validateTx(tx); - expect(result).toEqual([[], [tx], []]); + const expectInvalid = async (tx: Tx, reason: string) => { + await expect(validateTx(tx)).resolves.toEqual({ result: 'invalid', reason: [reason] }); }; - const expectSkipped = async (tx: Tx) => { - const result = await validateTx(tx); - expect(result).toEqual([[], [], [tx]]); + const expectSkipped = async (tx: Tx, reason: string) => { + await expect(validateTx(tx)).resolves.toEqual({ result: 'skipped', reason: [reason] }); }; it('allows fee paying txs if fee payer has enough balance', async () => { @@ -83,11 +80,11 @@ describe('GasTxValidator', () => { it('rejects txs if fee payer has not enough balance', async () => { mockBalance(feeLimit - 1n); - await expectInvalid(tx); + await expectInvalid(tx, 'Insufficient fee payer balance'); }); it('rejects txs if fee payer has zero balance', async () => { - await expectInvalid(tx); + await expectInvalid(tx, 'Insufficient fee payer balance'); }); it('rejects txs if fee payer claims balance outside setup', async () => { @@ -96,7 +93,7 @@ describe('GasTxValidator', () => { selector: FunctionSelector.fromSignature('_increase_public_balance((Field),Field)'), args: [payer.toField(), new Fr(1n)], }); - await expectInvalid(tx); + await expectInvalid(tx, 'Insufficient fee payer balance'); }); it('allows txs with no fee payer if fees are not enforced', async () => { @@ -107,16 +104,16 @@ describe('GasTxValidator', () => { it('rejects txs with no fee payer if fees are enforced', async () => { enforceFees = true; tx.data.feePayer = AztecAddress.ZERO; - await expectInvalid(tx); + await expectInvalid(tx, 'Missing fee payer'); }); it('skips txs with not enough fee per da gas', async () => { gasFees.feePerDaGas = gasFees.feePerDaGas.add(new Fr(1)); - await expectSkipped(tx); + await expectSkipped(tx, 'Insufficient fee per gas'); }); it('skips txs with not enough fee per l2 gas', async () => { gasFees.feePerL2Gas = gasFees.feePerL2Gas.add(new Fr(1)); - await expectSkipped(tx); + await expectSkipped(tx, 'Insufficient fee per gas'); }); }); diff --git a/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts b/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts index 8b4167543c9..b5ee24df54c 100644 --- a/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts +++ b/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts @@ -1,4 +1,4 @@ -import { type Tx, TxExecutionPhase, type TxValidator } from '@aztec/circuit-types'; +import { type Tx, TxExecutionPhase, type TxValidationResult, type TxValidator } from '@aztec/circuit-types'; import { type AztecAddress, type Fr, FunctionSelector, type GasFees } from '@aztec/circuits.js'; import { createLogger } from '@aztec/foundation/log'; import { computeFeePayerBalanceStorageSlot, getExecutionRequestsByPhase } from '@aztec/simulator'; @@ -27,25 +27,10 @@ export class GasTxValidator implements TxValidator { this.#gasFees = gasFees; } - async validateTxs(txs: Tx[]): Promise<[validTxs: Tx[], invalidTxs: Tx[], skippedTxs: Tx[]]> { - const validTxs: Tx[] = []; - const invalidTxs: Tx[] = []; - const skippedTxs: Tx[] = []; - - for (const tx of txs) { - if (this.#shouldSkip(tx)) { - skippedTxs.push(tx); - } else if (await this.#validateTxFee(tx)) { - validTxs.push(tx); - } else { - invalidTxs.push(tx); - } + validateTx(tx: Tx): Promise { + if (this.#shouldSkip(tx)) { + return Promise.resolve({ result: 'skipped', reason: ['Insufficient fee per gas'] }); } - - return [validTxs, invalidTxs, skippedTxs]; - } - - validateTx(tx: Tx): Promise { return this.#validateTxFee(tx); } @@ -57,20 +42,22 @@ export class GasTxValidator implements TxValidator { const notEnoughMaxFees = maxFeesPerGas.feePerDaGas.lt(this.#gasFees.feePerDaGas) || maxFeesPerGas.feePerL2Gas.lt(this.#gasFees.feePerL2Gas); + if (notEnoughMaxFees) { this.#log.warn(`Skipping transaction ${tx.getTxHash()} due to insufficient fee per gas`); } return notEnoughMaxFees; } - async #validateTxFee(tx: Tx): Promise { + async #validateTxFee(tx: Tx): Promise { const feePayer = tx.data.feePayer; // TODO(@spalladino) Eventually remove the is_zero condition as we should always charge fees to every tx if (feePayer.isZero()) { if (this.#enforceFees) { this.#log.warn(`Rejecting transaction ${tx.getTxHash()} due to missing fee payer`); + return { result: 'invalid', reason: ['Missing fee payer'] }; } else { - return true; + return { result: 'valid' }; } } @@ -98,13 +85,13 @@ export class GasTxValidator implements TxValidator { const balance = claimFunctionCall ? initialBalance.add(claimFunctionCall.args[2]) : initialBalance; if (balance.lt(feeLimit)) { - this.#log.info(`Rejecting transaction due to not enough fee payer balance`, { + this.#log.warn(`Rejecting transaction due to not enough fee payer balance`, { feePayer, balance: balance.toBigInt(), feeLimit: feeLimit.toBigInt(), }); - return false; + return { result: 'invalid', reason: ['Insufficient fee payer balance'] }; } - return true; + return { result: 'valid' }; } } diff --git a/yarn-project/sequencer-client/src/tx_validator/nullifier_cache.test.ts b/yarn-project/sequencer-client/src/tx_validator/nullifier_cache.test.ts new file mode 100644 index 00000000000..136ad73056a --- /dev/null +++ b/yarn-project/sequencer-client/src/tx_validator/nullifier_cache.test.ts @@ -0,0 +1,57 @@ +import { MerkleTreeId, type MerkleTreeReadOperations } from '@aztec/circuit-types'; +import { times } from '@aztec/foundation/collection'; + +import { type MockProxy, mock } from 'jest-mock-extended'; + +import { NullifierCache } from './nullifier_cache.js'; + +describe('NullifierCache', () => { + let nullifierCache: NullifierCache; + let db: MockProxy; + let nullifiers: Buffer[]; + + beforeEach(() => { + db = mock(); + nullifierCache = new NullifierCache(db); + nullifiers = [Buffer.alloc(1, 1), Buffer.alloc(1, 2), Buffer.alloc(1, 3)]; + }); + + it('checks nullifier existence against cache', async () => { + nullifierCache.addNullifiers([nullifiers[0], nullifiers[1]]); + db.findLeafIndices.mockResolvedValue([]); + await expect(nullifierCache.nullifiersExist(nullifiers)).resolves.toEqual([true, true, false]); + }); + + it('checks nullifier existence against db', async () => { + db.findLeafIndices.mockResolvedValue([1n, 2n, undefined]); + await expect(nullifierCache.nullifiersExist(nullifiers)).resolves.toEqual([true, true, false]); + }); + + it('checks nullifier existence against db only on cache miss', async () => { + nullifierCache.addNullifiers([nullifiers[0]]); + db.findLeafIndices.mockResolvedValue([2n, undefined]); + const result = await nullifierCache.nullifiersExist(nullifiers); + expect(db.findLeafIndices).toHaveBeenCalledWith(MerkleTreeId.NULLIFIER_TREE, [nullifiers[1], nullifiers[2]]); + expect(result).toEqual([true, true, false]); + }); + + it('checks existence with several nullifiers', async () => { + // Split 60 nullifiers evenly across db, cache, or not found + const nullifiers = times(60, i => Buffer.alloc(1, i)); + const where = nullifiers.map((_, i) => + i % 3 === 0 ? ('db' as const) : i % 3 === 1 ? ('cache' as const) : ('none' as const), + ); + + // Add to the cache nullifiers flagged as cache + nullifierCache.addNullifiers(nullifiers.filter((_, i) => where[i] === 'cache')); + // The db should be queried only with nullifiers not in the cache, return true for half of them then + db.findLeafIndices.mockResolvedValue(times(40, i => (i % 2 === 0 ? BigInt(i) : undefined))); + + const result = await nullifierCache.nullifiersExist(nullifiers); + expect(db.findLeafIndices).toHaveBeenCalledWith( + MerkleTreeId.NULLIFIER_TREE, + nullifiers.filter((_, i) => where[i] !== 'cache'), + ); + expect(result).toEqual(times(60, i => where[i] !== 'none')); + }); +}); diff --git a/yarn-project/sequencer-client/src/tx_validator/nullifier_cache.ts b/yarn-project/sequencer-client/src/tx_validator/nullifier_cache.ts new file mode 100644 index 00000000000..3175f05e1d0 --- /dev/null +++ b/yarn-project/sequencer-client/src/tx_validator/nullifier_cache.ts @@ -0,0 +1,29 @@ +import { MerkleTreeId, type MerkleTreeReadOperations } from '@aztec/circuit-types'; +import { type NullifierSource } from '@aztec/p2p'; + +/** + * Implements a nullifier source by checking a DB and an in-memory collection. + * Intended for validating transactions as they are added to a block. + */ +export class NullifierCache implements NullifierSource { + nullifiers: Set; + + constructor(private db: MerkleTreeReadOperations) { + this.nullifiers = new Set(); + } + + public async nullifiersExist(nullifiers: Buffer[]): Promise { + const cacheResults = nullifiers.map(n => this.nullifiers.has(n.toString())); + const toCheckDb = nullifiers.filter((_n, index) => !cacheResults[index]); + const dbHits = await this.db.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, toCheckDb); + + let dbIndex = 0; + return nullifiers.map((_n, index) => cacheResults[index] || dbHits[dbIndex++] !== undefined); + } + + public addNullifiers(nullifiers: Buffer[]) { + for (const nullifier of nullifiers) { + this.nullifiers.add(nullifier.toString()); + } + } +} diff --git a/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts b/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts index 55a1d0ecb79..53894c3e97c 100644 --- a/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts +++ b/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts @@ -1,4 +1,4 @@ -import { mockTx } from '@aztec/circuit-types'; +import { type Tx, mockTx } from '@aztec/circuit-types'; import { type AztecAddress, type ContractDataSource, Fr, type FunctionSelector } from '@aztec/circuits.js'; import { makeAztecAddress, makeSelector } from '@aztec/circuits.js/testing'; @@ -15,6 +15,14 @@ describe('PhasesTxValidator', () => { let allowedSetupSelector1: FunctionSelector; let allowedSetupSelector2: FunctionSelector; + const expectValid = async (tx: Tx) => { + await expect(txValidator.validateTx(tx)).resolves.toEqual({ result: 'valid' }); + }; + + const expectInvalid = async (tx: Tx, reason: string) => { + await expect(txValidator.validateTx(tx)).resolves.toEqual({ result: 'invalid', reason: [reason] }); + }; + beforeEach(() => { allowedContractClass = Fr.random(); allowedContract = makeAztecAddress(); @@ -53,7 +61,7 @@ describe('PhasesTxValidator', () => { const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); patchNonRevertibleFn(tx, 0, { address: allowedContract, selector: allowedSetupSelector1 }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[tx], []]); + await expectValid(tx); }); it('allows setup functions on the contracts class allow list', async () => { @@ -70,13 +78,13 @@ describe('PhasesTxValidator', () => { } }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[tx], []]); + await expectValid(tx); }); it('rejects txs with setup functions not on the allow list', async () => { const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[], [tx]]); + await expectInvalid(tx, 'Setup function not on allow list'); }); it('rejects setup functions not on the contracts class list', async () => { @@ -92,7 +100,8 @@ describe('PhasesTxValidator', () => { return Promise.resolve(undefined); } }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[], [tx]]); + + await expectInvalid(tx, 'Setup function not on allow list'); }); it('allows multiple setup functions on the allow list', async () => { @@ -100,13 +109,13 @@ describe('PhasesTxValidator', () => { patchNonRevertibleFn(tx, 0, { address: allowedContract, selector: allowedSetupSelector1 }); patchNonRevertibleFn(tx, 1, { address: allowedContract, selector: allowedSetupSelector2 }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[tx], []]); + await expectValid(tx); }); it('rejects if one setup functions is not on the allow list', async () => { const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); patchNonRevertibleFn(tx, 0, { address: allowedContract, selector: allowedSetupSelector1 }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[], [tx]]); + await expectInvalid(tx, 'Setup function not on allow list'); }); }); diff --git a/yarn-project/sequencer-client/src/tx_validator/phases_validator.ts b/yarn-project/sequencer-client/src/tx_validator/phases_validator.ts index d21b136a828..2d885f68ce6 100644 --- a/yarn-project/sequencer-client/src/tx_validator/phases_validator.ts +++ b/yarn-project/sequencer-client/src/tx_validator/phases_validator.ts @@ -3,6 +3,7 @@ import { type PublicExecutionRequest, Tx, TxExecutionPhase, + type TxValidationResult, type TxValidator, } from '@aztec/circuit-types'; import { type ContractDataSource } from '@aztec/circuits.js'; @@ -17,48 +18,36 @@ export class PhasesTxValidator implements TxValidator { this.contractDataSource = new ContractsDataSourcePublicDB(contracts); } - async validateTxs(txs: Tx[]): Promise<[validTxs: Tx[], invalidTxs: Tx[]]> { - const validTxs: Tx[] = []; - const invalidTxs: Tx[] = []; - - for (const tx of txs) { + async validateTx(tx: Tx): Promise { + try { // TODO(@spalladino): We add this just to handle public authwit-check calls during setup // which are needed for public FPC flows, but fail if the account contract hasnt been deployed yet, // which is what we're trying to do as part of the current txs. await this.contractDataSource.addNewContracts(tx); - if (await this.validateTx(tx)) { - validTxs.push(tx); - } else { - invalidTxs.push(tx); + if (!tx.data.forPublic) { + this.#log.debug(`Tx ${Tx.getHash(tx)} does not contain enqueued public functions. Skipping phases validation.`); + return { result: 'valid' }; } - await this.contractDataSource.removeNewContracts(tx); - } - - return Promise.resolve([validTxs, invalidTxs]); - } - - async validateTx(tx: Tx): Promise { - if (!tx.data.forPublic) { - this.#log.debug(`Tx ${Tx.getHash(tx)} does not contain enqueued public functions. Skipping phases validation.`); - return true; - } - - const setupFns = getExecutionRequestsByPhase(tx, TxExecutionPhase.SETUP); - for (const setupFn of setupFns) { - if (!(await this.isOnAllowList(setupFn, this.setupAllowList))) { - this.#log.warn( - `Rejecting tx ${Tx.getHash(tx)} because it calls setup function not on allow list: ${ - setupFn.callContext.contractAddress - }:${setupFn.callContext.functionSelector}`, - ); - - return false; + const setupFns = getExecutionRequestsByPhase(tx, TxExecutionPhase.SETUP); + for (const setupFn of setupFns) { + if (!(await this.isOnAllowList(setupFn, this.setupAllowList))) { + this.#log.warn( + `Rejecting tx ${Tx.getHash(tx)} because it calls setup function not on allow list: ${ + setupFn.callContext.contractAddress + }:${setupFn.callContext.functionSelector}`, + { allowList: this.setupAllowList }, + ); + + return { result: 'invalid', reason: ['Setup function not on allow list'] }; + } } - } - return true; + return { result: 'valid' }; + } finally { + await this.contractDataSource.removeNewContracts(tx); + } } async isOnAllowList(publicCall: PublicExecutionRequest, allowList: AllowedElement[]): Promise { diff --git a/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts b/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts index 59b6baab1cf..500e446360c 100644 --- a/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts +++ b/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts @@ -1,65 +1,107 @@ import { type AllowedElement, - MerkleTreeId, + type ClientProtocolCircuitVerifier, type MerkleTreeReadOperations, type ProcessedTx, type Tx, type TxValidator, } from '@aztec/circuit-types'; -import { type ContractDataSource, type GlobalVariables } from '@aztec/circuits.js'; +import { type AztecAddress, type ContractDataSource, Fr, type GasFees, type GlobalVariables } from '@aztec/circuits.js'; import { AggregateTxValidator, DataTxValidator, DoubleSpendTxValidator, MetadataTxValidator, - type NullifierSource, + TxProofValidator, } from '@aztec/p2p'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; import { readPublicState } from '@aztec/simulator'; import { GasTxValidator, type PublicStateSource } from './gas_validator.js'; +import { NullifierCache } from './nullifier_cache.js'; import { PhasesTxValidator } from './phases_validator.js'; -export class TxValidatorFactory { - nullifierSource: NullifierSource; - publicStateSource: PublicStateSource; - constructor( - private committedDb: MerkleTreeReadOperations, - private contractDataSource: ContractDataSource, - private enforceFees: boolean, - ) { - this.nullifierSource = { - getNullifierIndices: nullifiers => - this.committedDb - .findLeafIndices(MerkleTreeId.NULLIFIER_TREE, nullifiers) - .then(x => x.filter(index => index !== undefined) as bigint[]), - }; +export function createValidatorForAcceptingTxs( + db: MerkleTreeReadOperations, + contractDataSource: ContractDataSource, + verifier: ClientProtocolCircuitVerifier | undefined, + data: { + blockNumber: number; + l1ChainId: number; + enforceFees: boolean; + setupAllowList: AllowedElement[]; + gasFees: GasFees; + }, +): TxValidator { + const { blockNumber, l1ChainId, enforceFees, setupAllowList, gasFees } = data; + const validators: TxValidator[] = [ + new DataTxValidator(), + new MetadataTxValidator(new Fr(l1ChainId), new Fr(blockNumber)), + new DoubleSpendTxValidator(new NullifierCache(db)), + new PhasesTxValidator(contractDataSource, setupAllowList), + new GasTxValidator(new DatabasePublicStateSource(db), ProtocolContractAddress.FeeJuice, enforceFees, gasFees), + ]; - this.publicStateSource = { - storageRead: (contractAddress, slot) => { - return readPublicState(this.committedDb, contractAddress, slot); - }, - }; + if (verifier) { + validators.push(new TxProofValidator(verifier)); } - validatorForNewTxs(globalVariables: GlobalVariables, setupAllowList: AllowedElement[]): TxValidator { - return new AggregateTxValidator( - new DataTxValidator(), - new MetadataTxValidator(globalVariables.chainId, globalVariables.blockNumber), - new DoubleSpendTxValidator(this.nullifierSource), - new PhasesTxValidator(this.contractDataSource, setupAllowList), - new GasTxValidator( - this.publicStateSource, - ProtocolContractAddress.FeeJuice, - this.enforceFees, - globalVariables.gasFees, - ), - ); - } + return new AggregateTxValidator(...validators); +} + +export function createValidatorsForBlockBuilding( + db: MerkleTreeReadOperations, + contractDataSource: ContractDataSource, + globalVariables: GlobalVariables, + enforceFees: boolean, + setupAllowList: AllowedElement[], +): { + preprocessValidator: TxValidator; + postprocessValidator: TxValidator; + nullifierCache: NullifierCache; +} { + const nullifierCache = new NullifierCache(db); + const publicStateSource = new DatabasePublicStateSource(db); + + return { + preprocessValidator: preprocessValidator( + nullifierCache, + publicStateSource, + contractDataSource, + enforceFees, + globalVariables, + setupAllowList, + ), + postprocessValidator: postprocessValidator(nullifierCache), + nullifierCache, + }; +} - validatorForProcessedTxs(fork: MerkleTreeReadOperations): TxValidator { - return new DoubleSpendTxValidator({ - getNullifierIndices: nullifiers => fork.findLeafIndices(MerkleTreeId.NULLIFIER_TREE, nullifiers), - }); +class DatabasePublicStateSource implements PublicStateSource { + constructor(private db: MerkleTreeReadOperations) {} + + storageRead(contractAddress: AztecAddress, slot: Fr): Promise { + return readPublicState(this.db, contractAddress, slot); } } + +function preprocessValidator( + nullifierCache: NullifierCache, + publicStateSource: PublicStateSource, + contractDataSource: ContractDataSource, + enforceFees: boolean, + globalVariables: GlobalVariables, + setupAllowList: AllowedElement[], +): TxValidator { + // We don't include the TxProofValidator nor the DataTxValidator here because they are already checked by the time we get to block building. + return new AggregateTxValidator( + new MetadataTxValidator(globalVariables.chainId, globalVariables.blockNumber), + new DoubleSpendTxValidator(nullifierCache), + new PhasesTxValidator(contractDataSource, setupAllowList), + new GasTxValidator(publicStateSource, ProtocolContractAddress.FeeJuice, enforceFees, globalVariables.gasFees), + ); +} + +function postprocessValidator(nullifierCache: NullifierCache): TxValidator { + return new DoubleSpendTxValidator(nullifierCache); +} diff --git a/yarn-project/simulator/src/public/public_processor.test.ts b/yarn-project/simulator/src/public/public_processor.test.ts index d2043586288..b2696cb860c 100644 --- a/yarn-project/simulator/src/public/public_processor.test.ts +++ b/yarn-project/simulator/src/public/public_processor.test.ts @@ -4,6 +4,7 @@ import { ProvingRequestType, SimulationError, type TreeInfo, + type Tx, type TxValidator, mockTx, } from '@aztec/circuit-types'; @@ -95,7 +96,7 @@ describe('public_processor', () => { it('process private-only txs', async function () { const tx = mockPrivateOnlyTx(); - const [processed, failed] = await processor.process([tx], 1); + const [processed, failed] = await processor.process([tx]); expect(processed.length).toBe(1); expect(processed[0].hash).toEqual(tx.getTxHash()); @@ -106,7 +107,7 @@ describe('public_processor', () => { it('runs a tx with enqueued public calls', async function () { const tx = mockTxWithPublicCalls(); - const [processed, failed] = await processor.process([tx], 1); + const [processed, failed] = await processor.process([tx]); expect(processed.length).toBe(1); expect(processed[0].hash).toEqual(tx.getTxHash()); @@ -122,7 +123,7 @@ describe('public_processor', () => { mockedEnqueuedCallsResult.revertCode = RevertCode.APP_LOGIC_REVERTED; mockedEnqueuedCallsResult.revertReason = new SimulationError(`Failed`, []); - const [processed, failed] = await processor.process([tx], 1); + const [processed, failed] = await processor.process([tx]); expect(processed.length).toBe(1); expect(processed[0].hash).toEqual(tx.getTxHash()); @@ -135,7 +136,7 @@ describe('public_processor', () => { publicTxSimulator.simulate.mockRejectedValue(new SimulationError(`Failed`, [])); const tx = mockTxWithPublicCalls(); - const [processed, failed] = await processor.process([tx], 1); + const [processed, failed] = await processor.process([tx]); expect(processed).toEqual([]); expect(failed.length).toBe(1); @@ -149,7 +150,7 @@ describe('public_processor', () => { const txs = Array.from([1, 2, 3], seed => mockPrivateOnlyTx({ seed })); // We are passing 3 txs but only 2 can fit in the block - const [processed, failed] = await processor.process(txs, 2); + const [processed, failed] = await processor.process(txs, { maxTransactions: 2 }); expect(processed.length).toBe(2); expect(processed[0].hash).toEqual(txs[0].getTxHash()); @@ -159,13 +160,25 @@ describe('public_processor', () => { expect(worldStateDB.commit).toHaveBeenCalledTimes(2); }); - it('does not send a transaction to the prover if validation fails', async function () { + it('does not send a transaction to the prover if pre validation fails', async function () { + const tx = mockPrivateOnlyTx(); + + const txValidator: MockProxy> = mock(); + txValidator.validateTx.mockResolvedValue({ result: 'invalid', reason: ['Invalid'] }); + + const [processed, failed] = await processor.process([tx], {}, { preprocessValidator: txValidator }); + + expect(processed).toEqual([]); + expect(failed.length).toBe(1); + }); + + it('does not send a transaction to the prover if post validation fails', async function () { const tx = mockPrivateOnlyTx(); const txValidator: MockProxy> = mock(); - txValidator.validateTxs.mockRejectedValue([[], [tx]]); + txValidator.validateTx.mockResolvedValue({ result: 'invalid', reason: ['Invalid'] }); - const [processed, failed] = await processor.process([tx], 1, txValidator); + const [processed, failed] = await processor.process([tx], {}, { postprocessValidator: txValidator }); expect(processed).toEqual([]); expect(failed.length).toBe(1); @@ -183,7 +196,7 @@ describe('public_processor', () => { // We allocate a deadline of 1s, so only one 2 txs should fit const deadline = new Date(Date.now() + 1000); - const [processed, failed] = await processor.process(txs, 3, undefined, deadline); + const [processed, failed] = await processor.process(txs, { deadline }); expect(processed.length).toBe(2); expect(processed[0].hash).toEqual(txs[0].getTxHash()); @@ -215,7 +228,7 @@ describe('public_processor', () => { const txFee = privateGasUsed.computeFee(globalVariables.gasFees); - const [processed, failed] = await processor.process([tx], 1); + const [processed, failed] = await processor.process([tx]); expect(processed).toHaveLength(1); expect(processed[0].data.feePayer).toEqual(feePayer); @@ -239,7 +252,7 @@ describe('public_processor', () => { } tx.data.gasUsed = privateGasUsed; - const [processed, failed] = await processor.process([tx], 1); + const [processed, failed] = await processor.process([tx]); expect(processed).toEqual([]); expect(failed).toHaveLength(1); diff --git a/yarn-project/simulator/src/public/public_processor.ts b/yarn-project/simulator/src/public/public_processor.ts index 5a5fc91d0e0..eb3f743d963 100644 --- a/yarn-project/simulator/src/public/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor.ts @@ -136,29 +136,132 @@ export class PublicProcessor implements Traceable { * @returns The list of processed txs with their circuit simulation outputs. */ public async process( - txs: Tx[], - maxTransactions = txs.length, - txValidator?: TxValidator, - deadline?: Date, + txs: Iterable, + limits: { + maxTransactions?: number; + maxBlockSize?: number; + maxBlockGas?: Gas; + deadline?: Date; + } = {}, + validators: { + preprocessValidator?: TxValidator; + postprocessValidator?: TxValidator; + nullifierCache?: { addNullifiers: (nullifiers: Buffer[]) => void }; + } = {}, ): Promise<[ProcessedTx[], FailedTx[], NestedProcessReturnValues[]]> { - // The processor modifies the tx objects in place, so we need to clone them. - txs = txs.map(tx => Tx.clone(tx)); + const { maxTransactions, maxBlockSize, deadline, maxBlockGas } = limits; + const { preprocessValidator, postprocessValidator, nullifierCache } = validators; const result: ProcessedTx[] = []; const failed: FailedTx[] = []; - let returns: NestedProcessReturnValues[] = []; - let totalGas = new Gas(0, 0); const timer = new Timer(); - for (const tx of txs) { - // only process up to the limit of the block - if (result.length >= maxTransactions) { + let totalSizeInBytes = 0; + let returns: NestedProcessReturnValues[] = []; + let totalPublicGas = new Gas(0, 0); + let totalBlockGas = new Gas(0, 0); + + for (const origTx of txs) { + // Only process up to the max tx limit + if (maxTransactions !== undefined && result.length >= maxTransactions) { + this.log.debug(`Stopping tx processing due to reaching the max tx limit.`); break; } + + // Bail if we've hit the deadline + if (deadline && this.dateProvider.now() > +deadline) { + this.log.warn(`Stopping tx processing due to timeout.`); + break; + } + + // Skip this tx if it'd exceed max block size + const txHash = origTx.getTxHash().toString(); + const preTxSizeInBytes = origTx.getEstimatedPrivateTxEffectsSize(); + if (maxBlockSize !== undefined && totalSizeInBytes + preTxSizeInBytes > maxBlockSize) { + this.log.warn(`Skipping processing of tx ${txHash} sized ${preTxSizeInBytes} bytes due to block size limit`, { + txHash, + sizeInBytes: preTxSizeInBytes, + totalSizeInBytes, + maxBlockSize, + }); + continue; + } + + // Skip this tx if its gas limit would exceed the block gas limit + const txGasLimit = origTx.data.constants.txContext.gasSettings.gasLimits; + if (maxBlockGas !== undefined && totalBlockGas.add(txGasLimit).gtAny(maxBlockGas)) { + this.log.warn(`Skipping processing of tx ${txHash} due to block gas limit`, { + txHash, + txGasLimit, + totalBlockGas, + maxBlockGas, + }); + continue; + } + + // The processor modifies the tx objects in place, so we need to clone them. + const tx = Tx.clone(origTx); + + // We validate the tx before processing it, to avoid unnecessary work. + if (preprocessValidator) { + const result = await preprocessValidator.validateTx(tx); + if (result.result === 'invalid') { + const reason = result.reason.join(', '); + this.log.warn(`Rejecting tx ${tx.getTxHash().toString()} due to pre-process validation fail: ${reason}`); + failed.push({ tx, error: new Error(`Tx failed preprocess validation: ${reason}`) }); + returns.push(new NestedProcessReturnValues([])); + continue; + } else if (result.result === 'skipped') { + const reason = result.reason.join(', '); + this.log.warn(`Skipping tx ${tx.getTxHash().toString()} due to pre-process validation: ${reason}`); + returns.push(new NestedProcessReturnValues([])); + continue; + } else { + this.log.trace(`Tx ${tx.getTxHash().toString()} is valid before processing.`); + } + } + try { - const [processedTx, returnValues] = await this.processTx(tx, txValidator, deadline); + const [processedTx, returnValues] = await this.processTx(tx, deadline); + + // If the actual size of this tx would exceed block size, skip it + const txSize = processedTx.txEffect.getDASize(); + if (maxBlockSize !== undefined && totalSizeInBytes + txSize > maxBlockSize) { + this.log.warn(`Skipping processed tx ${txHash} sized ${txSize} due to max block size.`, { + txHash, + sizeInBytes: txSize, + totalSizeInBytes, + maxBlockSize, + }); + continue; + } + + // Re-validate the transaction + if (postprocessValidator) { + // Only accept processed transactions that are not double-spends, + // public functions emitting nullifiers would pass earlier check but fail here. + // Note that we're checking all nullifiers generated in the private execution twice, + // we could store the ones already checked and skip them here as an optimization. + // TODO(palla/txs): Can we get into this case? AVM validates this. We should be able to remove it. + const result = await postprocessValidator.validateTx(processedTx); + if (result.result !== 'valid') { + const reason = result.reason.join(', '); + this.log.error(`Rejecting tx ${processedTx.hash} after processing: ${reason}.`); + failed.push({ tx, error: new Error(`Tx failed post-process validation: ${reason}`) }); + continue; + } else { + this.log.trace(`Tx ${tx.getTxHash().toString()} is valid post processing.`); + } + } + + // Otherwise, commit tx state for the next tx to be processed + await this.commitTxState(processedTx); + nullifierCache?.addNullifiers(processedTx.txEffect.nullifiers.map(n => n.toBuffer())); result.push(processedTx); returns = returns.concat(returnValues); - totalGas = totalGas.add(processedTx.gasUsed.publicGas); + + totalPublicGas = totalPublicGas.add(processedTx.gasUsed.publicGas); + totalBlockGas = totalBlockGas.add(processedTx.gasUsed.totalGas); + totalSizeInBytes += txSize; } catch (err: any) { if (err?.name === 'PublicProcessorTimeoutError') { this.log.warn(`Stopping tx processing due to timeout.`); @@ -173,18 +276,22 @@ export class PublicProcessor implements Traceable { } const duration = timer.s(); - const rate = duration > 0 ? totalGas.l2Gas / duration : 0; - this.metrics.recordAllTxs(totalGas, rate); + const rate = duration > 0 ? totalPublicGas.l2Gas / duration : 0; + this.metrics.recordAllTxs(totalPublicGas, rate); + + this.log.info(`Processed ${result.length} succesful txs and ${failed.length} txs in ${duration}ms`, { + duration, + rate, + totalPublicGas, + totalBlockGas, + totalSizeInBytes, + }); return [result, failed, returns]; } @trackSpan('PublicProcessor.processTx', tx => ({ [Attributes.TX_HASH]: tx.tryGetTxHash()?.toString() })) - private async processTx( - tx: Tx, - txValidator?: TxValidator, - deadline?: Date, - ): Promise<[ProcessedTx, NestedProcessReturnValues[]]> { + private async processTx(tx: Tx, deadline?: Date): Promise<[ProcessedTx, NestedProcessReturnValues[]]> { const [time, [processedTx, returnValues]] = await elapsed(() => this.processTxWithinDeadline(tx, deadline)); this.log.verbose( @@ -208,20 +315,14 @@ export class PublicProcessor implements Traceable { }, ); + return [processedTx, returnValues ?? []]; + } + + private async commitTxState(processedTx: ProcessedTx, txValidator?: TxValidator): Promise { // Commit the state updates from this transaction + // TODO(palla/txs): It seems like this doesn't do anything...? await this.worldStateDB.commit(); - // Re-validate the transaction - if (txValidator) { - // Only accept processed transactions that are not double-spends, - // public functions emitting nullifiers would pass earlier check but fail here. - // Note that we're checking all nullifiers generated in the private execution twice, - // we could store the ones already checked and skip them here as an optimization. - const [_, invalid] = await txValidator.validateTxs([processedTx]); - if (invalid.length) { - throw new Error(`Transaction ${invalid[0].hash} invalid after processing public functions`); - } - } // Update the state so that the next tx in the loop has the correct .startState // NB: before this change, all .startStates were actually incorrect, but the issue was never caught because we either: // a) had only 1 tx with public calls per block, so this loop had len 1 @@ -255,8 +356,6 @@ export class PublicProcessor implements Traceable { ); const treeInsertionEnd = process.hrtime.bigint(); this.metrics.recordTreeInsertions(Number(treeInsertionEnd - treeInsertionStart) / 1_000); - - return [processedTx, returnValues ?? []]; } /** Processes the given tx within deadline. Returns timeout if deadline is hit. */ diff --git a/yarn-project/txe/src/node/txe_node.ts b/yarn-project/txe/src/node/txe_node.ts index 8a5bbe90def..8197627ce15 100644 --- a/yarn-project/txe/src/node/txe_node.ts +++ b/yarn-project/txe/src/node/txe_node.ts @@ -20,6 +20,7 @@ import { TxHash, type TxReceipt, TxScopedL2Log, + type TxValidationResult, type UnencryptedL2Log, } from '@aztec/circuit-types'; import { @@ -557,7 +558,7 @@ export class TXENode implements AztecNode { * @param tx - The transaction to validate for correctness. * @param isSimulation - True if the transaction is a simulated one without generated proofs. (Optional) */ - isValidTx(_tx: Tx, _isSimulation?: boolean): Promise { + isValidTx(_tx: Tx, _isSimulation?: boolean): Promise { throw new Error('TXE Node method isValidTx not implemented'); } diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index b7874b104ce..09c19eac82b 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -1,11 +1,4 @@ -import { - type BlockAttestation, - type BlockProposal, - type L2Block, - type ProcessedTx, - type Tx, - type TxHash, -} from '@aztec/circuit-types'; +import { type BlockAttestation, type BlockProposal, type L2Block, type Tx, type TxHash } from '@aztec/circuit-types'; import { type BlockHeader, type GlobalVariables } from '@aztec/circuits.js'; import { type EpochCache } from '@aztec/epoch-cache'; import { Buffer32 } from '@aztec/foundation/buffer'; @@ -38,12 +31,11 @@ import { ValidatorMetrics } from './metrics.js'; * We reuse the sequencer's block building functionality for re-execution */ type BlockBuilderCallback = ( - txs: Tx[], + txs: Iterable, globalVariables: GlobalVariables, historicalHeader?: BlockHeader, - interrupt?: (processedTxs: ProcessedTx[]) => Promise, opts?: { validateOnly?: boolean }, -) => Promise<{ block: L2Block; publicProcessorDuration: number; numProcessedTxs: number; blockBuildingTimer: Timer }>; +) => Promise<{ block: L2Block; publicProcessorDuration: number; numTxs: number; blockBuildingTimer: Timer }>; export interface Validator { start(): Promise; @@ -243,9 +235,7 @@ export class ValidatorClient extends WithTracer implements Validator { // Use the sequencer's block building logic to re-execute the transactions const stopTimer = this.metrics.reExecutionTimer(); - const { block } = await this.blockBuilder(txs, header.globalVariables, undefined, undefined, { - validateOnly: true, - }); + const { block } = await this.blockBuilder(txs, header.globalVariables, undefined, { validateOnly: true }); stopTimer(); this.log.verbose(`Transaction re-execution complete`);