diff --git a/.gitignore b/.gitignore index a6cd13dd..2fb38a53 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ genesis.json types/ cache/ artifacts/ +build/ diff --git a/.prettierignore b/.prettierignore index 51df5afb..72730e9c 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,3 +1,4 @@ types/ .github/ cache/ +build/ diff --git a/package-lock.json b/package-lock.json index dfbdb302..88d2d7d9 100644 --- a/package-lock.json +++ b/package-lock.json @@ -739,6 +739,12 @@ "@types/node": "*" } }, + "@types/lodash": { + "version": "4.14.168", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.168.tgz", + "integrity": "sha512-oVfRvqHV/V6D1yifJbVRU3TMp8OT6o6BG+U9MkwuJ3U8/CsDHvalRpsxBqivn71ztOFZBTfJMvETbqHiaNSj7Q==", + "dev": true + }, "@types/lru-cache": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/@types/lru-cache/-/lru-cache-5.1.0.tgz", @@ -2956,10 +2962,9 @@ } }, "lodash": { - "version": "4.17.20", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz", - "integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==", - "dev": true + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, "log-symbols": { "version": "3.0.0", diff --git a/package.json b/package.json index 1e217d1f..2155db2b 100644 --- a/package.json +++ b/package.json @@ -10,6 +10,7 @@ "prettier": "prettier --write \"**/*.{sol,ts,js}\"", "generate": "hardhat compile && typechain --target ethers-v5 './artifacts/contracts/**/!(*.dbg).json'", "compile": "hardhat compile", + "tsc": "tsc", "node": "hardhat node", "deploy": "ts-node ./scripts/deploy.ts", "keyless:check": "ts-node ./scripts/keyless.ts --check", @@ -34,6 +35,7 @@ "@openzeppelin/contracts": "^3.4.1", "@typechain/ethers-v5": "^1.0.0", "@types/chai": "^4.2.15", + "@types/lodash": "^4.14.168", "@types/mocha": "^8.2.1", "@types/node": "^14.14.33", "bn.js": "^5.2.0", @@ -51,6 +53,7 @@ }, "dependencies": { "abort-controller": "^3.0.0", - "fastify": "^3.14.2" + "fastify": "^3.14.2", + "lodash": "^4.17.21" } } diff --git a/scripts/deploy.ts b/scripts/deploy.ts index 187b449a..6d7c4e4b 100644 --- a/scripts/deploy.ts +++ b/scripts/deploy.ts @@ -4,22 +4,51 @@ import { DeploymentParameters } from "../ts/interfaces"; import fs from "fs"; import { PRODUCTION_PARAMS } from "../ts/constants"; import { StateTree } from "../ts/stateTree"; +import { Group } from "../ts/factory"; -const argv = require("minimist")(process.argv.slice(2), { - string: ["url", "root", "key", "input", "output"] +const { + url, + root, + key, + input, + output, + numPubkeys, + pubkeyMnemonic +} = require("minimist")(process.argv.slice(2), { + string: [ + "url", + "root", + "key", + "input", + "output", + "numPubkeys", + "pubkeyMnemonic" + ] }); /* Note separate pubkeys with commas > npm run deploy -- --url http://localhost:8545 \ --root 0x309976060df37ed6961ebd53027fe0c45d3cbbbdfc30a5039e86b2a7aa7fed6e - You can also specify a private key - > npm run deploy -- --key 0xYourPrivateKey + You can specify an Eth1 private key + > npm run deploy -- --key 0xYourPrivateKey + + You can specify an initial number of BLS public keys to register and their mnemonic seed (hardhat mnemonic default) + > npm run deploy -- --numPubkeys 32 \ + --pubkeyMnemonic 'focus hood pipe manual below record fly pole corn remember define slice kitchen capable search' You can use a custom parameters.json > npm run deploy -- --input parameters.json --output ../hubbe-commander/genesis.json */ +function validateArgv() { + if (pubkeyMnemonic && !numPubkeys) { + throw new Error( + "numPubkeys must be specified if a pubkeyMnemonic is provided" + ); + } +} + function getDefaultGenesisRoot(parameters: DeploymentParameters) { const stateTree = StateTree.new(parameters.MAX_DEPTH); // An completely empty genesis state @@ -28,22 +57,49 @@ function getDefaultGenesisRoot(parameters: DeploymentParameters) { } async function main() { + validateArgv(); + const provider = new ethers.providers.JsonRpcProvider( - argv.url ?? "http://localhost:8545" + url ?? "http://localhost:8545" ); - const signer = argv.key - ? new ethers.Wallet(argv.key).connect(provider) + const signer = key + ? new ethers.Wallet(key).connect(provider) : provider.getSigner(); - const parameters = argv.input - ? JSON.parse(fs.readFileSync(argv.input).toString()) + const parameters = input + ? JSON.parse(fs.readFileSync(input).toString()) : PRODUCTION_PARAMS; - parameters.GENESIS_STATE_ROOT = - argv.root || getDefaultGenesisRoot(parameters); + parameters.GENESIS_STATE_ROOT = root || getDefaultGenesisRoot(parameters); console.log("Deploy with parameters", parameters); - await deployAndWriteGenesis(signer, parameters, argv.output); + const { blsAccountRegistry } = await deployAndWriteGenesis( + signer, + parameters, + output + ); + + if (numPubkeys) { + console.log( + `Registering ${numPubkeys} pubkeys. Custom mnemonic: ${!!pubkeyMnemonic}` + ); + const group = Group.new({ + n: pubkeyMnemonic, + mnemonic: pubkeyMnemonic + }); + // Convert this to batch register once implemented + for (const user of group.userIterator()) { + await blsAccountRegistry.register(user.pubkey); + } + } } -main(); +main() + .then(() => { + console.log("Deployment complete"); + process.exit(0); + }) + .catch(err => { + console.error(err); + process.exit(1); + }); diff --git a/test/client/integration.test.ts b/test/client/integration.test.ts index 4047634e..c2702339 100644 --- a/test/client/integration.test.ts +++ b/test/client/integration.test.ts @@ -9,11 +9,14 @@ import { } from "../../ts/client/features/transfer"; import { SyncerService } from "../../ts/client/services/syncer"; import { PRODUCTION_PARAMS } from "../../ts/constants"; +import { USDT } from "../../ts/decimal"; import { deployAll } from "../../ts/deploy"; import { deployKeyless } from "../../ts/deployment/deploy"; import { Group, storageManagerFactory } from "../../ts/factory"; import { Genesis } from "../../ts/genesis"; import * as mcl from "../../ts/mcl"; +import { Pubkey } from "../../ts/pubkey"; +import { State } from "../../ts/state"; describe("Client Integration", function() { it("run", async function() { @@ -22,15 +25,45 @@ describe("Client Integration", function() { const provider = signer.provider as providers.Provider; const genesisEth1Block = await provider.getBlockNumber(); await deployKeyless(signer, false); + const storagePacker = await storageManagerFactory(); + const storageSyncer = await storageManagerFactory(); + + // Setup pubkeys, state for packer & syncer + const tokenID = 1; + const initialBalance = USDT.fromHumanValue("100.12"); const group = Group.new({ n: 32 }); - const storagePacker = await storageManagerFactory(group); - const storageSyncer = await storageManagerFactory(group); + for (const user of group.userIterator()) { + const state = State.new( + user.pubkeyID, + tokenID, + initialBalance.l2Value, + 0 + ); + // Setup packer L2 storage + await storagePacker.pubkey.update( + user.pubkeyID, + new Pubkey(user.pubkey) + ); + await storagePacker.state.update(user.stateID, state); + + // Setup syncer L2 state + // Replace with L1 deposits once implemented + await storageSyncer.state.update(user.stateID, state); + } + await storagePacker.pubkey.commit(); + await storagePacker.state.commit(); + await storageSyncer.state.commit(); const parameters = PRODUCTION_PARAMS; parameters.USE_BURN_AUCTION = false; parameters.GENESIS_STATE_ROOT = storagePacker.state.root; const contracts = await deployAll(signer, parameters); + for (const user of group.userIterator()) { + // Setup L1 pubkeys + await contracts.blsAccountRegistry.register(user.pubkey); + } + const appID = await contracts.rollup.appID(); group.setupSigners(arrayify(appID)); @@ -58,6 +91,8 @@ describe("Client Integration", function() { const syncService = new SyncerService(apiSyncer); await syncService.initialSync(); + assert.equal(storageSyncer.state.root, storagePacker.state.root); + assert.equal(storageSyncer.pubkey.root, storagePacker.pubkey.root); }).timeout(300000); }); diff --git a/test/integration.test.ts b/test/integration.test.ts index 5e23babf..4dc8c025 100644 --- a/test/integration.test.ts +++ b/test/integration.test.ts @@ -13,7 +13,6 @@ import { } from "../ts/factory"; import { DeploymentParameters } from "../ts/interfaces"; import { MigrationTree, StateTree } from "../ts/stateTree"; -import { BurnAuction } from "../types/ethers-contracts/BurnAuction"; import * as mcl from "../ts/mcl"; import { BodylessCommitment, diff --git a/ts/client/coreAPI.ts b/ts/client/coreAPI.ts index 3b162458..dab09ae3 100644 --- a/ts/client/coreAPI.ts +++ b/ts/client/coreAPI.ts @@ -40,7 +40,7 @@ export class SyncedPoint { } export interface ICoreAPI { - getlatestBatchID(): Promise; + getLatestBatchID(): Promise; getState(stateID: number): Promise; updateState(stateID: number, state: State): Promise; getPubkey(pubkeyID: number): Promise; @@ -98,7 +98,7 @@ export class CoreAPI implements ICoreAPI { async getBlockNumber() { return await this.provider.getBlockNumber(); } - async getlatestBatchID() { + async getLatestBatchID() { return Number(await this.rollup.nextBatchID()) - 1; } diff --git a/ts/client/node.ts b/ts/client/node.ts index 4169b067..3ab9f832 100644 --- a/ts/client/node.ts +++ b/ts/client/node.ts @@ -1,5 +1,4 @@ -import { arrayify } from "@ethersproject/bytes"; -import { Group, storageManagerFactory } from "../factory"; +import { storageManagerFactory } from "../factory"; import * as mcl from "../../ts/mcl"; import { BigNumber } from "@ethersproject/bignumber"; import { Bidder } from "./services/bidder"; @@ -55,17 +54,18 @@ export class HubbleNode { config.providerUrl ); const signer = provider.getSigner(); - const { parameters, auxiliary } = genesis; - const group = Group.new({ n: 32, domain: arrayify(auxiliary.domain) }); - const storageManager = await storageManagerFactory(group, { - stateTreeDepth: parameters.MAX_DEPTH + const { MAX_DEPTH } = genesis.parameters; + const storageManager = await storageManagerFactory({ + stateTreeDepth: MAX_DEPTH, + pubkeyTreeDepth: MAX_DEPTH }); const api = CoreAPI.new(storageManager, genesis, provider, signer); - const feeReceiver = group.getUser(0).stateID; - const tokenID = (await storageManager.state.get(feeReceiver)).tokenID; - + // Hardcoded for now, will be configurable in + // https://github.com/thehubbleproject/hubble-contracts/issues/557 + const feeReceiver = 0; + const tokenID = 1; const pool = new TransferPool(tokenID, feeReceiver); const packer = new Packer(api, pool); @@ -74,6 +74,9 @@ export class HubbleNode { api.contracts.burnAuction ); const syncer = new SyncerService(api); + // In the future, we will want to delay starting up the rpc client + // until after the initial sync is completed (HTTP 503). + // https://github.com/thehubbleproject/hubble-contracts/issues/558 const rpc = await RPC.init(config.rpcPort, storageManager, pool); return new this(nodeType, provider, syncer, packer, bidder, rpc); } diff --git a/ts/client/services/syncer.ts b/ts/client/services/syncer.ts index 635c5d31..cfe0813e 100644 --- a/ts/client/services/syncer.ts +++ b/ts/client/services/syncer.ts @@ -1,9 +1,13 @@ import { Event, EventFilter } from "@ethersproject/contracts"; +import { chunk } from "lodash"; import { Rollup } from "../../../types/ethers-contracts/Rollup"; +import { BlsAccountRegistry } from "../../../types/ethers-contracts/BlsAccountRegistry"; import { Usage } from "../../interfaces"; import { BatchHandlingContext } from "../contexts"; import { CoreAPI } from "../coreAPI"; import { nodeEmitter, SyncCompleteEvent } from "../node"; +import { PubkeyStorageEngine } from "../storageEngine"; +import { Pubkey } from "../../pubkey"; export enum SyncMode { INITIAL_SYNCING, @@ -13,14 +17,27 @@ export enum SyncMode { export class SyncerService { private mode: SyncMode; private newBatchFilter: EventFilter; + private singlePubkeyRegisteredFilter: EventFilter; + private batchPubkeyRegisteredFilter: EventFilter; private batchHandlingContext: BatchHandlingContext; private rollup: Rollup; + private accountRegistry: BlsAccountRegistry; + private pubkeyStorage: PubkeyStorageEngine; constructor(private readonly api: CoreAPI) { this.mode = SyncMode.INITIAL_SYNCING; this.rollup = this.api.rollup; + this.accountRegistry = this.api.contracts.blsAccountRegistry; this.newBatchFilter = this.rollup.filters.NewBatch(null, null, null); + this.singlePubkeyRegisteredFilter = this.accountRegistry.filters.SinglePubkeyRegistered( + null + ); + this.batchPubkeyRegisteredFilter = this.accountRegistry.filters.BatchPubkeyRegistered( + null, + null + ); this.batchHandlingContext = new BatchHandlingContext(api); + this.pubkeyStorage = this.api.l2Storage.pubkey; } getMode() { @@ -32,29 +49,70 @@ export class SyncerService { nodeEmitter.emit(SyncCompleteEvent); this.mode = SyncMode.REGULAR_SYNCING; this.rollup.on(this.newBatchFilter, this.newBatchListener); + this.accountRegistry.on( + this.singlePubkeyRegisteredFilter, + this.singlePubkeyRegisteredListener + ); + this.accountRegistry.on( + this.batchPubkeyRegisteredFilter, + this.batchPubkeyRegisteredListener + ); + } + + async initialPubkeyRegisteredSync(start: number, end: number) { + await Promise.all([ + this.initialSinglePubkeyRegisteredSync(start, end), + this.initialBatchPubkeyRegisteredSync(start, end) + ]); + } + + async initialSinglePubkeyRegisteredSync(start: number, end: number) { + const events = await this.accountRegistry.queryFilter( + this.singlePubkeyRegisteredFilter, + start, + end + ); + console.info( + `Block ${start} -- ${end}\t${events.length} new single public key registrations` + ); + await chunk(events, 10).reduce(async (prev, eventsChunk) => { + await prev; + await Promise.all( + eventsChunk.map(e => this.handleSinglePubkeyRegistered(e)) + ); + }, Promise.resolve()); + } + + async initialBatchPubkeyRegisteredSync(_start: number, _end: number) { + console.error("initialBatchPubkeyRegisteredSync not implemented"); + } + + async initialNewBatchSync(start: number, end: number) { + const events = await this.rollup.queryFilter( + this.newBatchFilter, + start, + end + ); + console.info(`Block ${start} -- ${end}\t${events.length} new batches`); + for (const event of events) { + await this.handleNewBatch(event); + } } async initialSync() { const chunksize = 100; let start = this.api.syncpoint.blockNumber; let latestBlock = await this.api.getBlockNumber(); - let latestBatchID = await this.api.getlatestBatchID(); + let latestBatchID = await this.api.getLatestBatchID(); while (start <= latestBlock) { const end = start + chunksize - 1; - const events = await this.rollup.queryFilter( - this.newBatchFilter, - start, - end - ); - console.info( - `Block ${start} -- ${end}\t${events.length} new batches` - ); - for (const event of events) { - await this.handleNewBatch(event); - } + + await this.initialPubkeyRegisteredSync(start, end); + await this.initialNewBatchSync(start, end); + start = end + 1; latestBlock = await this.api.getBlockNumber(); - latestBatchID = await this.api.getlatestBatchID(); + latestBatchID = await this.api.getLatestBatchID(); console.info( `block #${this.api.syncpoint.blockNumber}/#${latestBlock} batch ${this.api.syncpoint.batchID}/${latestBatchID}` ); @@ -81,13 +139,47 @@ export class SyncerService { console.info(`#${batchID} [${Usage[usage]}]`, batch.toString()); } + getPubkeyFromTxn(txn: { data: string }): Pubkey { + // Get public key from registration call data + const { args } = this.accountRegistry.interface.parseTransaction(txn); + return new Pubkey(args.pubkey); + } + + async handleSinglePubkeyRegistered(event: Event) { + const pubkeyID = event.args?.pubkeyID; + + const txn = await event.getTransaction(); + const pubkey = this.getPubkeyFromTxn(txn); + + await this.pubkeyStorage.update(pubkeyID, pubkey); + await this.pubkeyStorage.commit(); + + console.info(`Pubkey added ID ${pubkeyID} ${pubkey.toString()}`); + } + + async handleBatchPubkeyRegistered(_event: Event) { + console.error("handleBatchPubkeyRegistered not implemented"); + } + newBatchListener = async ( batchID: null, accountRoot: null, batchType: null, event: Event ) => { - return await this.handleNewBatch(event); + await this.handleNewBatch(event); + }; + + singlePubkeyRegisteredListener = async (pubkeyID: null, event: Event) => { + await this.handleSinglePubkeyRegistered(event); + }; + + batchPubkeyRegisteredListener = async ( + startID: null, + endID: null, + event: Event + ) => { + await this.handleBatchPubkeyRegistered(event); }; stop() { @@ -96,6 +188,14 @@ export class SyncerService { this.newBatchFilter, this.newBatchListener ); + this.accountRegistry.removeListener( + this.singlePubkeyRegisteredFilter, + this.singlePubkeyRegisteredListener + ); + this.accountRegistry.removeListener( + this.batchPubkeyRegisteredFilter, + this.batchPubkeyRegisteredListener + ); } } } diff --git a/ts/deploy.ts b/ts/deploy.ts index 69cee480..4820b3be 100644 --- a/ts/deploy.ts +++ b/ts/deploy.ts @@ -189,4 +189,6 @@ export async function deployAndWriteGenesis( console.log("Writing genesis file to", genesisPath); genesis.dump(genesisPath); console.log("Successsfully deployed", genesis); + + return contracts; } diff --git a/ts/factory.ts b/ts/factory.ts index 29e9c1be..70f75bad 100644 --- a/ts/factory.ts +++ b/ts/factory.ts @@ -6,10 +6,9 @@ import { StorageManager } from "./client/storageEngine"; import { DEFAULT_MNEMONIC } from "./constants"; -import { ERC20Value, ERC20ValueFactory, float16, USDT } from "./decimal"; +import { float16, USDT } from "./decimal"; import { UserNotExist } from "./exceptions"; import { Domain, solG1 } from "./mcl"; -import { Pubkey } from "./pubkey"; import { State } from "./state"; import { nullProvider, StateProvider } from "./stateTree"; import { @@ -283,37 +282,18 @@ export function txMassMigrationFactory( interface StorageManagerFactoryOptions { stateTreeDepth?: number; pubkeyTreeDepth?: number; - tokenID?: number; - initialBalance?: ERC20Value; } export async function storageManagerFactory( - group: Group, options?: StorageManagerFactoryOptions ) { const stateTreeDepth = options?.stateTreeDepth ?? 32; const pubkeyTreeDepth = options?.pubkeyTreeDepth ?? 32; - const tokenID = options?.tokenID ?? 1; - const initialBalance = - options?.initialBalance ?? USDT.fromHumanValue("100.12"); const stateStorage = new StateMemoryEngine(stateTreeDepth); const pubkeyStorage = new PubkeyMemoryEngine(pubkeyTreeDepth); const storageManager: StorageManager = { pubkey: pubkeyStorage, state: stateStorage }; - for (const user of group.userIterator()) { - const state = State.new( - user.pubkeyID, - tokenID, - initialBalance.l2Value, - 0 - ); - await stateStorage.update(user.stateID, state); - - await pubkeyStorage.update(user.pubkeyID, new Pubkey(user.pubkey)); - } - await stateStorage.commit(); - await pubkeyStorage.commit(); return storageManager; } diff --git a/ts/pubkey.ts b/ts/pubkey.ts index be6e637c..de0b8e53 100644 --- a/ts/pubkey.ts +++ b/ts/pubkey.ts @@ -1,6 +1,7 @@ import { solidityKeccak256 } from "ethers/lib/utils"; import { Hashable } from "./interfaces"; import { solG2 } from "./mcl"; +import { prettyHex } from "./utils"; export class Pubkey implements Hashable { constructor(public readonly pubkey: solG2) {} @@ -10,4 +11,10 @@ export class Pubkey implements Hashable { this.pubkey ); } + toString(): string { + const shortHexes = this.pubkey + .map(s => prettyHex(s.toString())) + .join(", "); + return ``; + } } diff --git a/tsconfig.json b/tsconfig.json index 1c316a49..34e57d1c 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -13,7 +13,8 @@ "lib": ["es2015", "esnext.asynciterable"], "sourceMap": true, "typeRoots": ["./node_modules/@types", "./types"], - "types": ["node"] + "types": ["node"], + "outDir": "./build" }, "include": ["**/*.ts"], "exclude": ["node_modules", "example", "build"],