From d5674f2a6eee1c00aa8d3fef7d1525728abad550 Mon Sep 17 00:00:00 2001 From: Chris De Leon Date: Thu, 14 Mar 2024 18:03:44 -0700 Subject: [PATCH] removes starknet-hardhat plugin from contracts folder --- contracts/hardhat.config.ts | 42 +- contracts/package.json | 6 +- contracts/test/constants.ts | 3 + .../test/emergency/StarknetValidator.test.ts | 174 +++--- contracts/test/l1-l2-messaging.ts | 103 ++++ contracts/test/ocr2/aggregator.test.ts | 514 +++++++++++------- contracts/test/setup.ts | 6 +- contracts/test/utils.ts | 58 ++ ops/scripts/devnet-hardhat.sh | 89 +-- yarn.lock | 106 +++- 10 files changed, 754 insertions(+), 347 deletions(-) create mode 100644 contracts/test/l1-l2-messaging.ts create mode 100644 contracts/test/utils.ts diff --git a/contracts/hardhat.config.ts b/contracts/hardhat.config.ts index 4ca4cfd44..519cd2f57 100644 --- a/contracts/hardhat.config.ts +++ b/contracts/hardhat.config.ts @@ -1,5 +1,4 @@ import { HardhatUserConfig } from 'hardhat/types' -import '@shardlabs/starknet-hardhat-plugin' import '@nomiclabs/hardhat-ethers' import '@nomicfoundation/hardhat-chai-matchers' import 'solidity-coverage' @@ -19,6 +18,17 @@ const COMPILER_SETTINGS = { * @type import('hardhat/config').HardhatUserConfig */ const config: HardhatUserConfig = { + // NOTE: hardhat comes with a built-in special network called 'harhdat'. This network is automatically created and + // used if no networks are defined in our config: https://hardhat.org/hardhat-runner/docs/config#hardhat-network. We + // do NOT want to use this network. Our testing scripts already spawn a hardhat node in a container, and we want to + // use this for the l1 <> l2 messaging tests rather than the automatically generated network. With that in mind, we + // need to modify this config to point to the hardhat container by adding the 'defaultNetwork' and 'networks' properties. + defaultNetwork: 'localhost', + networks: { + localhost: { + url: 'http://127.0.0.1:8545', + }, + }, solidity: { compilers: [ { @@ -27,36 +37,6 @@ const config: HardhatUserConfig = { }, ], }, - starknet: { - // dockerizedVersion: "0.10.0", // alternatively choose one of the two venv options below - // uses (my-venv) defined by `python -m venv path/to/my-venv` - // venv: "../.venv", - - // uses the currently active Python environment (hopefully with available Starknet commands!) - venv: 'active', - // network: "alpha", - network: 'devnet', - wallets: { - OpenZeppelin: { - accountName: 'OpenZeppelin', - modulePath: 'starkware.starknet.wallets.open_zeppelin.OpenZeppelinAccount', - accountPath: '~/.starknet_accounts', - }, - }, - requestTimeout: 1000000, - }, - networks: { - devnet: { - url: 'http://127.0.0.1:5050', - args: ['--cairo-compiler-manifest', '../vendor/cairo/Cargo.toml'], - }, - integratedDevnet: { - url: 'http://127.0.0.1:5050', - venv: 'active', - args: ['--lite-mode', '--cairo-compiler-manifest', '../vendor/cairo/Cargo.toml'], - // dockerizedVersion: "0.2.0" - }, - }, mocha: { timeout: 10000000, rootHooks: { diff --git a/contracts/package.json b/contracts/package.json index 131fc7d74..ee8d04656 100644 --- a/contracts/package.json +++ b/contracts/package.json @@ -5,7 +5,7 @@ "main": "index.js", "scripts": { "compile:solidity": "hardhat compile", - "test": "hardhat --network localhost test" + "test": "hardhat test" }, "author": "", "license": "MIT", @@ -13,14 +13,14 @@ "@ethereum-waffle/mock-contract": "^4.0.4", "@nomicfoundation/hardhat-chai-matchers": "^1.0.3", "@nomiclabs/hardhat-ethers": "^2.0.5", - "@shardlabs/starknet-hardhat-plugin": "^0.8.0-alpha.2", "@types/chai": "^4.3.3", "@types/elliptic": "^6.4.14", "@types/mocha": "^9.1.1", "chai": "^4.3.6", "ethers": "^5.6.8", "hardhat": "^2.16.1", - "solidity-coverage": "^0.8.2" + "solidity-coverage": "^0.8.2", + "starknet": "6.4.0" }, "dependencies": { "@chainlink/contracts": "^0.4.2", diff --git a/contracts/test/constants.ts b/contracts/test/constants.ts index ea0bd939d..e7ff055c2 100644 --- a/contracts/test/constants.ts +++ b/contracts/test/constants.ts @@ -1,2 +1,5 @@ /** 15 min */ export const TIMEOUT = 900_000 + +export const STARKNET_DEVNET_URL = 'http://127.0.0.1:5050' +export const ETH_DEVNET_URL = 'http://127.0.0.1:8545' diff --git a/contracts/test/emergency/StarknetValidator.test.ts b/contracts/test/emergency/StarknetValidator.test.ts index 54385109f..5530c2737 100644 --- a/contracts/test/emergency/StarknetValidator.test.ts +++ b/contracts/test/emergency/StarknetValidator.test.ts @@ -1,23 +1,19 @@ -import { ethers, starknet, network } from 'hardhat' -import { BigNumber, Contract, ContractFactory } from 'ethers' -import { hash, number } from 'starknet' -import { - Account, - StarknetContractFactory, - StarknetContract, - HttpNetworkConfig, -} from 'hardhat/types' -import { expect } from 'chai' -import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' -import { abi as aggregatorAbi } from '../../artifacts/@chainlink/contracts/src/v0.8/interfaces/AggregatorV3Interface.sol/AggregatorV3Interface.json' -import { abi as accessControllerAbi } from '../../artifacts/@chainlink/contracts/src/v0.8/interfaces/AccessControllerInterface.sol/AccessControllerInterface.json' import { abi as starknetMessagingAbi } from '../../artifacts/vendor/starkware-libs/cairo-lang/src/starkware/starknet/solidity/IStarknetMessaging.sol/IStarknetMessaging.json' +import { abi as accessControllerAbi } from '../../artifacts/@chainlink/contracts/src/v0.8/interfaces/AccessControllerInterface.sol/AccessControllerInterface.json' +import { abi as aggregatorAbi } from '../../artifacts/@chainlink/contracts/src/v0.8/interfaces/AggregatorV3Interface.sol/AggregatorV3Interface.json' +import { DeclareDeployUDCResponse, RpcProvider, CallData, Account, hash } from 'starknet' import { deployMockContract, MockContract } from '@ethereum-waffle/mock-contract' -import { account, addCompilationToNetwork, expectSuccessOrDeclared } from '@chainlink/starknet' +import { fetchStarknetAccount, getStarknetContractArtifacts } from '../utils' +import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' +import { BigNumber, Contract, ContractFactory } from 'ethers' +import * as l1l2messaging from '../l1-l2-messaging' +import { STARKNET_DEVNET_URL } from '../constants' +import { account } from '@chainlink/starknet' +import { ethers } from 'hardhat' +import { expect } from 'chai' describe('StarknetValidator', () => { - /** Fake L2 target */ - const networkUrl: string = (network.config as HttpNetworkConfig).url + const provider = new RpcProvider({ nodeUrl: STARKNET_DEVNET_URL }) const opts = account.makeFunderOptsFromEnv() const funder = new account.Funder(opts) @@ -34,18 +30,12 @@ describe('StarknetValidator', () => { let mockAccessController: MockContract let mockAggregator: MockContract - let l2ContractFactory: StarknetContractFactory - let l2Contract: StarknetContract + let l2Contract: DeclareDeployUDCResponse before(async () => { - await addCompilationToNetwork('solidity/emergency/StarknetValidator.sol:StarknetValidator') - - // Deploy L2 account - defaultAccount = await starknet.OpenZeppelinAccount.createAccount() - - // Fund L2 account + // Setup L2 account + defaultAccount = await fetchStarknetAccount() await funder.fund([{ account: defaultAccount.address, amount: 1e21 }]) - await defaultAccount.deployAccount() // Fetch predefined L1 EOA accounts const accounts = await ethers.getSigners() @@ -54,23 +44,14 @@ describe('StarknetValidator', () => { alice = accounts[2] // Deploy L2 feed contract - l2ContractFactory = await starknet.getContractFactory('sequencer_uptime_feed') - await expectSuccessOrDeclared(defaultAccount.declare(l2ContractFactory, { maxFee: 1e20 })) - - l2Contract = await defaultAccount.deploy(l2ContractFactory, { - initial_status: 0, - owner_address: defaultAccount.starknetContract.address, + l2Contract = await defaultAccount.declareAndDeploy({ + ...getStarknetContractArtifacts('SequencerUptimeFeed'), + constructorCalldata: CallData.compile({ + initial_status: 0, + owner_address: defaultAccount.address, + }), }) - // Deploy the MockStarknetMessaging contract used to simulate L1 - L2 comms - mockStarknetMessagingFactory = await ethers.getContractFactory( - 'MockStarknetMessaging', - deployer, - ) - const messageCancellationDelay = 5 * 60 // seconds - mockStarknetMessaging = await mockStarknetMessagingFactory.deploy(messageCancellationDelay) - await mockStarknetMessaging.deployed() - // Deploy the mock feed mockGasPriceFeed = await deployMockContract(deployer, aggregatorAbi) await mockGasPriceFeed.mock.latestRoundData.returns( @@ -96,6 +77,15 @@ describe('StarknetValidator', () => { }) beforeEach(async () => { + // Deploy the MockStarknetMessaging contract used to simulate L1 - L2 comms + mockStarknetMessagingFactory = await ethers.getContractFactory( + 'MockStarknetMessaging', + deployer, + ) + const messageCancellationDelay = 5 * 60 // seconds + mockStarknetMessaging = await mockStarknetMessagingFactory.deploy(messageCancellationDelay) + await mockStarknetMessaging.deployed() + // Deploy the L1 StarknetValidator starknetValidatorFactory = await ethers.getContractFactory('StarknetValidator', deployer) starknetValidator = await starknetValidatorFactory.deploy( @@ -103,13 +93,19 @@ describe('StarknetValidator', () => { mockAccessController.address, mockGasPriceFeed.address, mockAggregator.address, - l2Contract.address, + l2Contract.deploy.address, 0, 0, ) // Point the L2 feed contract to receive from the L1 StarknetValidator contract - await defaultAccount.invoke(l2Contract, 'set_l1_sender', { address: starknetValidator.address }) + await defaultAccount.execute({ + contractAddress: l2Contract.deploy.address, + entrypoint: 'set_l1_sender', + calldata: CallData.compile({ + address: starknetValidator.address, + }), + }) }) describe('#constructor', () => { @@ -120,7 +116,7 @@ describe('StarknetValidator', () => { mockAccessController.address, mockGasPriceFeed.address, mockAggregator.address, - l2Contract.address, + l2Contract.deploy.address, 0, 0, ), @@ -148,7 +144,7 @@ describe('StarknetValidator', () => { mockAccessController.address, mockGasPriceFeed.address, ethers.constants.AddressZero, - l2Contract.address, + l2Contract.deploy.address, 0, 0, ), @@ -162,7 +158,7 @@ describe('StarknetValidator', () => { ethers.constants.AddressZero, mockGasPriceFeed.address, mockAggregator.address, - l2Contract.address, + l2Contract.deploy.address, 0, 0, ), @@ -176,7 +172,7 @@ describe('StarknetValidator', () => { mockAccessController.address, ethers.constants.AddressZero, mockAggregator.address, - l2Contract.address, + l2Contract.deploy.address, 0, 0, ), @@ -244,7 +240,7 @@ describe('StarknetValidator', () => { mockAccessController.address, mockGasPriceFeed.address, mockAggregator.address, - l2Contract.address, + l2Contract.deploy.address, 0, 0, ) @@ -464,28 +460,30 @@ describe('StarknetValidator', () => { }) it('should not revert if `sequencer_uptime_feed.latest_round_data` called by an Account with no explicit access (Accounts are allowed read access)', async () => { - const { response: round } = await l2Contract.call('latest_round_data') - expect(round.answer).to.equal(0n) + const [, answer] = await provider.callContract({ + contractAddress: l2Contract.deploy.address, + entrypoint: 'latest_round_data', + }) + expect(answer).to.hexEqual('0x0') }) it('should deploy the messaging contract', async () => { - const { address, l1_provider } = await starknet.devnet.loadL1MessagingContract(networkUrl) - expect(address).not.to.be.undefined - expect(l1_provider).to.equal(networkUrl) + const { messaging_contract_address } = await l1l2messaging.loadL1MessagingContract({ + address: mockStarknetMessaging.address, + }) + expect(messaging_contract_address).not.to.be.undefined }) it('should load the already deployed contract if the address is provided', async () => { - const { address: loadedFrom } = await starknet.devnet.loadL1MessagingContract( - networkUrl, - mockStarknetMessaging.address, - ) - - expect(mockStarknetMessaging.address).to.hexEqual(loadedFrom) + const { messaging_contract_address } = await l1l2messaging.loadL1MessagingContract({ + address: mockStarknetMessaging.address, + }) + expect(mockStarknetMessaging.address).to.hexEqual(messaging_contract_address) }) it('should send a message to the L2 contract', async () => { // Load the mock messaging contract - await starknet.devnet.loadL1MessagingContract(networkUrl, mockStarknetMessaging.address) + await l1l2messaging.loadL1MessagingContract({ address: mockStarknetMessaging.address }) // Return gas price of 1 await mockGasPriceFeed.mock.latestRoundData.returns( @@ -506,23 +504,25 @@ describe('StarknetValidator', () => { await starknetValidator.connect(eoaValidator).validate(0, 0, 1, 1) // gasPrice (1) * newGasEstimate (1) // Simulate the L1 - L2 comms - const resp = await starknet.devnet.flush() - const msgFromL1 = resp.consumed_messages.from_l1 + const resp = await l1l2messaging.flush() + const msgFromL1 = resp.messages_to_l2 expect(msgFromL1).to.have.a.lengthOf(1) - expect(resp.consumed_messages.from_l2).to.be.empty + expect(resp.messages_to_l1).to.be.empty - expect(msgFromL1[0].args.from_address).to.hexEqual(starknetValidator.address) - expect(msgFromL1[0].args.to_address).to.hexEqual(l2Contract.address) - expect(msgFromL1[0].address).to.hexEqual(mockStarknetMessaging.address) + expect(msgFromL1.at(0)?.l1_contract_address).to.hexEqual(starknetValidator.address) + expect(msgFromL1.at(0)?.l2_contract_address).to.hexEqual(l2Contract.deploy.address) // Assert L2 effects - const res = await l2Contract.call('latest_round_data') - expect(res.response.answer).to.equal(1n) + const [, answer] = await provider.callContract({ + contractAddress: l2Contract.deploy.address, + entrypoint: 'latest_round_data', + }) + expect(answer).to.hexEqual('0x1') }) it('should always send a **boolean** message to L2 contract', async () => { // Load the mock messaging contract - await starknet.devnet.loadL1MessagingContract(networkUrl, mockStarknetMessaging.address) + await l1l2messaging.loadL1MessagingContract({ address: mockStarknetMessaging.address }) // Return gas price of 1 await mockGasPriceFeed.mock.latestRoundData.returns( @@ -543,23 +543,25 @@ describe('StarknetValidator', () => { await starknetValidator.connect(eoaValidator).validate(0, 0, 1, 127) // incorrect value // Simulate the L1 - L2 comms - const resp = await starknet.devnet.flush() - const msgFromL1 = resp.consumed_messages.from_l1 + const resp = await l1l2messaging.flush() + const msgFromL1 = resp.messages_to_l2 expect(msgFromL1).to.have.a.lengthOf(1) - expect(resp.consumed_messages.from_l2).to.be.empty + expect(resp.messages_to_l1).to.be.empty - expect(msgFromL1[0].args.from_address).to.hexEqual(starknetValidator.address) - expect(msgFromL1[0].args.to_address).to.hexEqual(l2Contract.address) - expect(msgFromL1[0].address).to.hexEqual(mockStarknetMessaging.address) + expect(msgFromL1[0].l1_contract_address).to.hexEqual(starknetValidator.address) + expect(msgFromL1[0].l2_contract_address).to.hexEqual(l2Contract.deploy.address) // Assert L2 effects - const res = await l2Contract.call('latest_round_data') - expect(res.response.answer).to.equal(0n) // status unchanged - incorrect value treated as false + const [, answer] = await provider.callContract({ + contractAddress: l2Contract.deploy.address, + entrypoint: 'latest_round_data', + }) + expect(answer).to.hexEqual('0x0') // status unchanged - incorrect value treated as false }) it('should send multiple messages', async () => { // Load the mock messaging contract - await starknet.devnet.loadL1MessagingContract(networkUrl, mockStarknetMessaging.address) + await l1l2messaging.loadL1MessagingContract({ address: mockStarknetMessaging.address }) // Return gas price of 1 await mockGasPriceFeed.mock.latestRoundData.returns( @@ -584,18 +586,20 @@ describe('StarknetValidator', () => { await c.validate(0, 0, 1, 0) // final status // Simulate the L1 - L2 comms - const resp = await starknet.devnet.flush() - const msgFromL1 = resp.consumed_messages.from_l1 + const resp = await l1l2messaging.flush() + const msgFromL1 = resp.messages_to_l2 expect(msgFromL1).to.have.a.lengthOf(4) - expect(resp.consumed_messages.from_l2).to.be.empty + expect(resp.messages_to_l1).to.be.empty - expect(msgFromL1[0].args.from_address).to.hexEqual(starknetValidator.address) - expect(msgFromL1[0].args.to_address).to.hexEqual(l2Contract.address) - expect(msgFromL1[0].address).to.hexEqual(mockStarknetMessaging.address) + expect(msgFromL1[0].l1_contract_address).to.hexEqual(starknetValidator.address) + expect(msgFromL1[0].l2_contract_address).to.hexEqual(l2Contract.deploy.address) // Assert L2 effects - const res = await l2Contract.call('latest_round_data') - expect(res.response.answer).to.equal(0n) // final status 0 + const [, answer] = await provider.callContract({ + contractAddress: l2Contract.deploy.address, + entrypoint: 'latest_round_data', + }) + expect(answer).to.hexEqual('0x0') // final status 0 }) }) diff --git a/contracts/test/l1-l2-messaging.ts b/contracts/test/l1-l2-messaging.ts new file mode 100644 index 000000000..b27b58520 --- /dev/null +++ b/contracts/test/l1-l2-messaging.ts @@ -0,0 +1,103 @@ +import { ETH_DEVNET_URL, STARKNET_DEVNET_URL } from './constants' + +// +// Docs: https://github.com/0xSpaceShard/starknet-devnet-rs/blob/main/contracts/l1-l2-messaging/README.md#ethereum-setup +// + +/* + * https://github.com/0xSpaceShard/starknet-devnet-rs/blob/7e5ff351198f799816c1857c1048bf8ee7f89428/crates/starknet-devnet-server/src/api/http/models.rs#L23 + */ +export type PostmanLoadL1MessagingContract = Readonly<{ + networkUrl?: string + address?: string +}> + +/* + * https://github.com/0xSpaceShard/starknet-devnet-rs/blob/7e5ff351198f799816c1857c1048bf8ee7f89428/crates/starknet-devnet-server/src/api/http/models.rs#L132 + */ +export type MessagingLoadAddress = Readonly<{ + messaging_contract_address: string +}> + +/* + * https://github.com/0xSpaceShard/starknet-devnet-rs/blob/7e5ff351198f799816c1857c1048bf8ee7f89428/crates/starknet-devnet-server/src/api/http/endpoints/postman.rs#L12 + */ +export const loadL1MessagingContract = async ( + params?: PostmanLoadL1MessagingContract, +): Promise => { + const res = await fetch(`${STARKNET_DEVNET_URL}/postman/load_l1_messaging_contract`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + network_url: params?.networkUrl ?? ETH_DEVNET_URL, + address: params?.address, + }), + }) + + const result = await res.json() + if (result.error != null) { + throw new Error(result.error) + } + return result +} + +/* + * https://github.com/0xSpaceShard/starknet-devnet-rs/blob/7e5ff351198f799816c1857c1048bf8ee7f89428/crates/starknet-devnet-server/src/api/http/models.rs#L127 + */ +export type FlushParameters = Readonly<{ + dryRun?: boolean +}> + +/* + * https://github.com/0xSpaceShard/starknet-devnet-rs/blob/7e5ff351198f799816c1857c1048bf8ee7f89428/crates/starknet-devnet-types/src/rpc/messaging.rs#L52 + */ +export type MessageToL1 = Readonly<{ + from_address: string + to_address: string + payload: string[] +}> + +/* + * https://github.com/0xSpaceShard/starknet-devnet-rs/blob/7e5ff351198f799816c1857c1048bf8ee7f89428/crates/starknet-devnet-types/src/rpc/messaging.rs#L14 + */ +export type MessageToL2 = Readonly<{ + l2_contract_address: string + entry_point_selector: string + l1_contract_address: string + payload: string + paid_fee_on_l1: string + nonce: string +}> + +/* + * https://github.com/0xSpaceShard/starknet-devnet-rs/blob/7e5ff351198f799816c1857c1048bf8ee7f89428/crates/starknet-devnet-server/src/api/http/models.rs#L120 + */ +export type FlushedMessages = Readonly<{ + messages_to_l1: MessageToL1[] + messages_to_l2: MessageToL2[] + generated_l2_transactions: string[] + l1_provider: string +}> + +/* + * https://github.com/0xSpaceShard/starknet-devnet-rs/blob/7e5ff351198f799816c1857c1048bf8ee7f89428/crates/starknet-devnet-server/src/api/http/endpoints/postman.rs#L26 + */ +export const flush = async (params?: FlushParameters): Promise => { + const res = await fetch(`${STARKNET_DEVNET_URL}/postman/flush`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + dry_run: params?.dryRun ?? false, + }), + }) + + const result = await res.json() + if (result.error != null) { + throw new Error(result.error) + } + return result +} diff --git a/contracts/test/ocr2/aggregator.test.ts b/contracts/test/ocr2/aggregator.test.ts index 3d53e7ccd..81e9ee82a 100644 --- a/contracts/test/ocr2/aggregator.test.ts +++ b/contracts/test/ocr2/aggregator.test.ts @@ -1,16 +1,29 @@ -import { assert, expect } from 'chai' -import { starknet } from 'hardhat' -import { ec, hash, num } from 'starknet' -import { Account, StarknetContract, StarknetContractFactory } from 'hardhat/types/runtime' -import { TIMEOUT } from '../constants' -import { account, expectInvokeError, expectSuccessOrDeclared } from '@chainlink/starknet' +import { fetchStarknetAccount, getStarknetContractArtifacts } from '../utils' import { bytesToFelts } from '@chainlink/starknet-gauntlet' - -interface Oracle { +import { STARKNET_DEVNET_URL, TIMEOUT } from '../constants' +import { account } from '@chainlink/starknet' +import { assert, expect } from 'chai' +import { + DeclareDeployUDCResponse, + BigNumberish, + ParsedStruct, + LibraryError, + RpcProvider, + Contract, + CallData, + Account, + Uint256, + cairo, + hash, + num, + ec, +} from 'starknet' + +type Oracle = Readonly<{ // hex string signer: string transmitter: Account -} +}> // Observers - max 31 oracles or 31 bytes const OBSERVERS_MAX = 31 @@ -19,89 +32,90 @@ const UINT128_MAX = BigInt(2) ** BigInt(128) - BigInt(1) describe('Aggregator', function () { this.timeout(TIMEOUT) + const provider = new RpcProvider({ nodeUrl: STARKNET_DEVNET_URL }) const opts = account.makeFunderOptsFromEnv() const funder = new account.Funder(opts) - let aggregatorFactory: StarknetContractFactory - + let aggregator: DeclareDeployUDCResponse + let token: DeclareDeployUDCResponse let owner: Account - let token: StarknetContract - let aggregator: StarknetContract - let minAnswer = 2 - let maxAnswer = 1000000000 - - let f = 1 - let n = 3 * f + 1 - let oracles: Oracle[] = [] - let config_digest: number + const maxAnswer = 1000000000 + const minAnswer = 2 + const f = 1 + const n = 3 * f + 1 + const oracles: Oracle[] = [] + let config_digest: string before(async () => { - aggregatorFactory = await starknet.getContractFactory('aggregator') - - // can also be declared as - // account = (await starknet.deployAccount("OpenZeppelin")) as OpenZeppelinAccount - // if imported from hardhat/types/runtime" - owner = await starknet.OpenZeppelinAccount.createAccount() - + // Sets up the owner account + owner = await fetchStarknetAccount() await funder.fund([{ account: owner.address, amount: 1e21 }]) - await owner.deployAccount() - - const tokenFactory = await starknet.getContractFactory('link_token') - await expectSuccessOrDeclared(owner.declare(tokenFactory, { maxFee: 1e20 })) - token = await owner.deploy(tokenFactory, { - minter: owner.starknetContract.address, - owner: owner.starknetContract.address, - }) - - await owner.invoke(token, 'permissioned_mint', { - account: owner.starknetContract.address, - amount: 100_000_000_000, + console.log('Owner account has been funded') + + // Declares and deploys the LINK token contract + token = await owner.declareAndDeploy({ + ...getStarknetContractArtifacts('LinkToken'), + constructorCalldata: CallData.compile({ + minter: owner.address, + owner: owner.address, + }), }) - - await expectSuccessOrDeclared(owner.declare(aggregatorFactory, { maxFee: 1e20 })) - - aggregator = await owner.deploy(aggregatorFactory, { - owner: BigInt(owner.starknetContract.address), - link: BigInt(token.address), - min_answer: minAnswer, // TODO: toFelt() to correctly wrap negative ints - max_answer: maxAnswer, // TODO: toFelt() to correctly wrap negative ints - billing_access_controller: 0, // TODO: billing AC - decimals: 8, - description: starknet.shortStringToBigInt('FOO/BAR'), + console.log(`Successfully deployed LinkToken: ${token.deploy.address}`) + + // Funds the owner account with some LINK + await owner.execute({ + contractAddress: token.deploy.address, + entrypoint: 'permissioned_mint', + calldata: CallData.compile({ + account: owner.address, + amount: cairo.uint256(100_000_000_000n), + }), }) + console.log('Successfully funded owner account with LINK') + + // Performs the following in parallel: + // Deploys the aggregator contract + // Populates the oracles array with devnet accounts + ;[aggregator] = await Promise.all([ + // Declares and deploys the aggregator + owner.declareAndDeploy({ + ...getStarknetContractArtifacts('Aggregator'), + constructorCalldata: CallData.compile({ + owner: owner.address, + link: token.deploy.address, + min_answer: minAnswer, // TODO: toFelt() to correctly wrap negative ints + max_answer: maxAnswer, // TODO: toFelt() to correctly wrap negative ints + billing_access_controller: 0, // TODO: billing AC + decimals: 8, + description: 0, + }), + }), - console.log(`Deployed 'aggregator.cairo': ${aggregator.address}`) - - let futures = [] - let generateOracle = async () => { - let transmitter = await starknet.OpenZeppelinAccount.createAccount() - - await funder.fund([{ account: transmitter.address, amount: 1e21 }]) - await transmitter.deployAccount() - - return { - signer: '0x' + Buffer.from(ec.starkCurve.utils.randomPrivateKey()).toString('hex'), - transmitter, - // payee - } - } - for (let i = 0; i < n; i++) { - futures.push(generateOracle()) - } - oracles = await Promise.all(futures) - - let onchain_config: number[] = [] - let offchain_config_version = 2 - let offchain_config = new Uint8Array([1]) - let offchain_config_encoded = bytesToFelts(offchain_config) - console.log('Encoded offchain_config: %O', offchain_config_encoded) - - let config = { + // Populates the oracles array with devnet accounts + ...Array.from({ length: n }).map(async (_, i) => { + // account index 0 is taken by the owner account, so we need to offset by 1 + const transmitter = await fetchStarknetAccount({ accountIndex: i + 1 }) + await funder.fund([{ account: transmitter.address, amount: 1e21 }]) + oracles.push({ + signer: '0x' + Buffer.from(ec.starkCurve.utils.randomPrivateKey()).toString('hex'), + transmitter, + // payee + }) + }), + ]) + console.log(`Successfully deployed Aggregator: ${aggregator.deploy.address}`) + + // Defines the offchain config + const onchain_config = new Array() + const offchain_config = new Uint8Array([1]) + const offchain_config_encoded = bytesToFelts(offchain_config) + const offchain_config_version = 2 + const config = { oracles: oracles.map((oracle) => { return { signer: ec.starkCurve.getStarkKey(oracle.signer), - transmitter: oracle.transmitter.starknetContract.address, + transmitter: oracle.transmitter.address, } }), f, @@ -109,70 +123,119 @@ describe('Aggregator', function () { offchain_config_version, offchain_config: offchain_config_encoded, } - await owner.invoke(aggregator, 'set_config', config) + console.log('Encoded offchain_config: %O', offchain_config_encoded) + + // Sets the OCR config + await owner.execute({ + contractAddress: aggregator.deploy.address, + entrypoint: 'set_config', + calldata: CallData.compile(config), + }) console.log('Config: %O', config) - let { response } = await aggregator.call('latest_config_details') - config_digest = response[2] - console.log(`Config digest: 0x${config_digest.toString(16)}`) + // Sets the billing config + await owner.execute({ + contractAddress: aggregator.deploy.address, + entrypoint: 'set_billing', + calldata: CallData.compile({ + observation_payment_gjuels: 1, + transmission_payment_gjuels: 1, + gas_base: 1, + gas_per_signature: 1, + }), + }) + + // Gets the config details as hex encoded strings: + // + // result[0] = config_count + // result[1] = block_number + // result[2] = config_digest + // + const [, blockNumber, configDigest] = await provider.callContract({ + contractAddress: aggregator.deploy.address, + entrypoint: 'latest_config_details', + }) + console.log(`Config digest: ${configDigest}`) + config_digest = configDigest // Immitate the fetch done by relay to confirm latest_config_details_works - let block = await starknet.getBlock({ blockNumber: response.block_number }) - let events = block.transaction_receipts[0].events + const block = await provider.getBlock(parseInt(blockNumber, 16)) + const txHash = block.transactions.at(0) + if (txHash == null) { + assert.fail('unexpectedly found no transacitons') + } - assert.isNotEmpty(events) - assert.equal(events.length, 2) - console.log("Log raw 'ConfigSet' event: %O", events[0]) + // Gets the transaction receipt + const receipt = await provider.waitForTransaction(txHash) + + // Checks that the receipt has events to decode + const events = receipt.events + const event = events.at(0) + if (event == null) { + assert.fail('unexpectedly received no events') + } else { + console.log("Log raw 'ConfigSet' event: %O", event) + } - const decodedEvents = aggregator.decodeEvents(events) - assert.isNotEmpty(decodedEvents) - assert.equal(decodedEvents.length, 1) - console.log("Log decoded 'ConfigSet' event: %O", decodedEvents[0]) + // Decodes the events + const { abi: aggregatorAbi } = await provider.getClassByHash(aggregator.declare.class_hash) + const aggregatorContract = new Contract(aggregatorAbi, aggregator.deploy.address, provider) + const decodedEvents = aggregatorContract.parseEvents(receipt) + const decodedEvent = decodedEvents.at(0) + if (decodedEvent == null) { + assert.fail('unexpectedly received no decoded events') + } else { + console.log("Log decoded 'ConfigSet' event: %O", decodedEvent) + } - let e = decodedEvents[0] - assert.equal(e.name, 'ConfigSet') + // Double checks that the ConfigSet event exists in the decoded event payload + assert.isTrue(Object.prototype.hasOwnProperty.call(decodedEvent, 'ConfigSet')) }) describe('OCR aggregator behavior', function () { - let transmit = async (epoch_and_round: number, answer: num.BigNumberish): Promise => { - let extra_hash = 1 - let observation_timestamp = 1 - let juels_per_fee_coin = 1 - let gas_price = 1 - - let observers_buf = Buffer.alloc(31) - let observations = [] - - for (const [index, _] of oracles.entries()) { - observers_buf[index] = index + const transmit = async (epochAndRound: number, answer: num.BigNumberish) => { + // Defines helper variables + const observations = new Array() + const observersBuf = Buffer.alloc(31) + const observationTimestamp = 1 + const juelsPerFeeCoin = 1 + const extraHash = 1 + const gasPrice = 1 + + // Updates the observer state + for (let i = 0; i < oracles.length; i++) { + observersBuf[i] = i observations.push(answer) } - // convert to a single value that will be decoded by toBN - let observers = `0x${observers_buf.toString('hex')}` + // Converts observersBuf to a single value that will be decoded by toBN + const observers = `0x${observersBuf.toString('hex')}` assert.equal(observers, OBSERVERS_HEX) + // Defines report data const reportData = [ // report_context config_digest, - epoch_and_round, - extra_hash, + epochAndRound, + extraHash, // raw_report - observation_timestamp, + observationTimestamp, observers, observations.length, ...observations, - juels_per_fee_coin, - gas_price, + juelsPerFeeCoin, + gasPrice, ] - console.log('report data:', reportData) + + // Hashes the report data const reportDigest = hash.computeHashOnElements(reportData) console.log('Report data: %O', reportData) console.log(`Report digest: ${reportDigest}`) + // Generates report signatures console.log('Report signatures - START') const signatures = [] - for (let { signer } of oracles.slice(0, f + 1)) { + for (const { signer } of oracles.slice(0, f + 1)) { const signature = ec.starkCurve.sign(reportDigest, signer) const { r, s } = signature const starkKey = ec.starkCurve.getStarkKey(signer) @@ -188,65 +251,113 @@ describe('Aggregator', function () { } console.log('Report signatures - END\n') - const transmitter = oracles[0].transmitter - return await transmitter.invoke(aggregator, 'transmit', { - report_context: { - config_digest, - epoch_and_round, - extra_hash, + // Gets the first transmitter + const transmitter = oracles.at(0)?.transmitter + if (transmitter == null) { + assert.fail('no oracles exist') + } + + // Executes the transmit function on the aggregator contract + return await transmitter.execute([ + { + contractAddress: aggregator.deploy.address, + entrypoint: 'transmit', + calldata: CallData.compile({ + report_context: { + config_digest, + epoch_and_round: epochAndRound, + extra_hash: extraHash, + }, + observation_timestamp: observationTimestamp, + observers, + observations, + juels_per_fee_coin: juelsPerFeeCoin, + gas_price: gasPrice, + signatures, + }), }, - observation_timestamp, - observers, - observations, - juels_per_fee_coin, - gas_price, - signatures, - }) + ]) } it("should emit 'NewTransmission' event on transmit", async () => { - const txHash = await transmit(1, 99) - const receipt = await starknet.getTransactionReceipt(txHash) + // Calls the transmit function + const { transaction_hash } = await transmit(1, 99) + const receipt = await provider.getTransactionReceipt(transaction_hash) + // Double checks that some events were emitted assert.isNotEmpty(receipt.events) console.log("Log raw 'NewTransmission' event: %O", receipt.events[0]) - const decodedEvents = aggregator.decodeEvents(receipt.events) - assert.isNotEmpty(decodedEvents) - console.log("Log decoded 'NewTransmission' event: %O", decodedEvents[0]) - - const e = decodedEvents[0] - const transmitter = oracles[0].transmitter.address + // Decodes the events + const { abi: aggregatorAbi } = await provider.getClassByHash(aggregator.declare.class_hash) + const aggregatorContract = new Contract(aggregatorAbi, aggregator.deploy.address, provider) + const decodedEvents = aggregatorContract.parseEvents(receipt) + const decodedEvent = decodedEvents.at(0) + if (decodedEvent == null) { + assert.fail('unexpectedly received no decoded events') + } else { + console.log("Log decoded 'NewTransmission' event: %O", decodedEvent) + } - assert.equal(e.name, 'NewTransmission') - assert.equal(e.data.round_id, 1n) - assert.equal(e.data.observation_timestamp, 1n) - assert.equal(e.data.epoch_and_round, 1n) + // Validates the decoded event + const e = decodedEvent['NewTransmission'] + assert.isTrue(Object.prototype.hasOwnProperty.call(decodedEvent, 'NewTransmission')) + assert.equal(e.round_id, 1n) + assert.equal(e.observation_timestamp, 1n) + assert.equal(e.epoch_and_round, 1n) // assert.equal(e.data.reimbursement, 0n) - const len = 32 * 2 // 32 bytes (hex) - // NOTICE: Leading zeros are trimmed for an encoded felt (number). // To decode, the raw felt needs to be start padded up to max felt size (252 bits or < 32 bytes). - const hexPadStart = (data: number | bigint, len: number) => { + const hexPadStart = ( + data: BigNumberish | Uint256 | ParsedStruct | BigNumberish[], + len: number, + ) => { return `0x${data.toString(16).padStart(len, '0')}` } - expect(hexPadStart(e.data.transmitter, len)).to.hexEqual(transmitter) + // Validates the transmitter + const transmitterAddr = oracles[0].transmitter.address + const len = 32 * 2 // 32 bytes (hex) + expect(hexPadStart(e.transmitter, len)).to.hexEqual(transmitterAddr) + // Validates the observers and observations const lenObservers = OBSERVERS_MAX * 2 // 31 bytes (hex) - assert.equal(hexPadStart(e.data.observers, lenObservers), OBSERVERS_HEX) - assert.equal(e.data.observations.length, 4n) + assert.equal(hexPadStart(e.observers, lenObservers), OBSERVERS_HEX) + if (Array.isArray(e.observations)) { + assert.equal(e.observations.length, 4) + } else { + assert.fail( + `property 'observations' on NewTransmission event is not an array: ${JSON.stringify( + e, + null, + 2, + )}`, + ) + } - assert.equal(hexPadStart(e.data.config_digest, len), hexPadStart(config_digest, len)) + // Validates the config digest + const normalizedConfigDigest = '0x'.concat(config_digest.slice(2).padStart(len, '0')) + assert.equal(hexPadStart(e.config_digest, len), normalizedConfigDigest) }) it('should transmit correctly', async () => { await transmit(2, 99) - let { response: round } = await aggregator.call('latest_round_data') - assert.equal(round.round_id, 2) - assert.equal(round.answer, 99) + // Gets the latest round details as hex encoded strings: + // + // result[0] = round ID + // result[1] = answer + // result[2] = block_num + // result[3] = started_at + // result[4] = updated_at + // + const round = await provider.callContract({ + contractAddress: aggregator.deploy.address, + entrypoint: 'latest_round_data', + }) + assert.equal(parseInt(round[0], 16), 2) + assert.equal(parseInt(round[1], 16), 99) // await transmit(3, -10) // TODO: toFelt() to correctly wrap negative ints // ;({ round } = await aggregator.call('latest_round_data')) @@ -256,70 +367,109 @@ describe('Aggregator', function () { try { await transmit(4, 1) expect.fail() - } catch (err: any) { + } catch (err) { // Round should be unchanged - let { response: new_round } = await aggregator.call('latest_round_data') - assert.deepEqual(round, new_round) + const newRound = await provider.callContract({ + contractAddress: aggregator.deploy.address, + entrypoint: 'latest_round_data', + }) + assert.deepEqual(round, newRound) } }) it('should transmit with max u128 value correctly', async () => { - await expectInvokeError(transmit(4, UINT128_MAX), 'median is out of min-max range') + try { + await transmit(4, UINT128_MAX) + assert.fail('expected an error') + } catch (err) { + if (err instanceof LibraryError) { + expect(err.message).to.contain('median is out of min-max range') + } else { + assert.fail('expected a starknet LibraryError') + } + } }) it('payments and withdrawals', async () => { // set up payees - let payees = oracles.map((oracle) => ({ - transmitter: oracle.transmitter.starknetContract.address, - payee: oracle.transmitter.starknetContract.address, // reusing transmitter acocunts as payees for simplicity - })) - await owner.invoke(aggregator, 'set_payees', { payees }) - - let oracle = oracles[0] - let payee = oracle.transmitter - let { response: owed } = await aggregator.call('owed_payment', { - transmitter: oracle.transmitter.starknetContract.address, + await owner.execute({ + contractAddress: aggregator.deploy.address, + entrypoint: 'set_payees', + calldata: CallData.compile({ + payees: oracles.map((oracle) => ({ + transmitter: oracle.transmitter.address, + payee: oracle.transmitter.address, // reusing transmitter acocunts as payees for simplicity + })), + }), }) + // several rounds happened so we are owed payment - assert.ok(owed > 0) + const payee = oracles[0].transmitter + const [owed1] = await provider.callContract({ + contractAddress: aggregator.deploy.address, + entrypoint: 'owed_payment', + calldata: CallData.compile({ + transmitter: payee.address, + }), + }) + assert.ok(parseInt(owed1, 16) > 0) const availableToValue = ([is_negative, abs_difference]: [boolean, bigint]): bigint => { return is_negative ? -abs_difference : abs_difference } // no funds on contract, so no LINK available for payment - let { response: available } = await aggregator.call('link_available_for_payment') - assert.ok(availableToValue(available) < 0) // should be negative: we owe payments + let [isNegative, absDiff] = await provider.callContract({ + contractAddress: aggregator.deploy.address, + entrypoint: 'link_available_for_payment', + }) + assert.ok(availableToValue([isNegative === '0x1', BigInt(absDiff)]) < 0) // should be negative: we owe payments // deposit LINK to contract - await owner.invoke(token, 'transfer', { - recipient: aggregator.address, - amount: 100_000_000_000, + await owner.execute({ + contractAddress: token.deploy.address, + entrypoint: 'transfer', + calldata: CallData.compile({ + recipient: aggregator.deploy.address, + amount: cairo.uint256(100_000_000_000n), + }), }) // we have enough funds available now - available = (await aggregator.call('link_available_for_payment')).response - assert.ok(availableToValue(available) > 0) + ;[isNegative, absDiff] = await provider.callContract({ + contractAddress: aggregator.deploy.address, + entrypoint: 'link_available_for_payment', + }) + assert.ok(availableToValue([isNegative === '0x1', BigInt(absDiff)]) > 0) // attempt to withdraw the payment - await payee.invoke(aggregator, 'withdraw_payment', { - transmitter: oracle.transmitter.starknetContract.address, + await payee.execute({ + contractAddress: aggregator.deploy.address, + entrypoint: 'withdraw_payment', + calldata: CallData.compile({ + transmitter: payee.address, + }), }) // balance as transferred to payee - let { response: balance } = await token.call('balance_of', { - account: payee.starknetContract.address, + const [balance] = await provider.callContract({ + contractAddress: token.deploy.address, + entrypoint: 'balance_of', + calldata: CallData.compile({ + account: payee.address, + }), }) - - assert.ok(owed === balance) + assert.ok(owed1 === balance) // owed payment is now zero - { - let { response: owed } = await aggregator.call('owed_payment', { - transmitter: oracle.transmitter.starknetContract.address, - }) - assert.ok(owed == 0) - } + const [owed2] = await provider.callContract({ + contractAddress: aggregator.deploy.address, + entrypoint: 'owed_payment', + calldata: CallData.compile({ + transmitter: payee.address, + }), + }) + assert.ok(parseInt(owed2, 16) === 0) }) }) }) diff --git a/contracts/test/setup.ts b/contracts/test/setup.ts index 8c9c516ef..0ca69128c 100644 --- a/contracts/test/setup.ts +++ b/contracts/test/setup.ts @@ -1,12 +1,12 @@ -import * as fs from 'fs' -import * as path from 'path' +import * as path from 'node:path' +import * as fs from 'node:fs' function findCommonPrefix(path1: string, path2: string): string { const segments1 = path1.split(path.sep) const segments2 = path2.split(path.sep) const minLength = Math.min(segments1.length, segments2.length) - let commonSegments = [] + const commonSegments = [] for (let i = 0; i < minLength; i++) { if (segments1[i] === segments2[i]) { diff --git a/contracts/test/utils.ts b/contracts/test/utils.ts new file mode 100644 index 000000000..c26a5f23e --- /dev/null +++ b/contracts/test/utils.ts @@ -0,0 +1,58 @@ +import { STARKNET_DEVNET_URL } from './constants' +import { execSync } from 'node:child_process' +import { Account } from 'starknet' +import * as path from 'node:path' +import { json } from 'starknet' +import * as fs from 'node:fs' + +export type FetchStarknetAccountParams = Readonly<{ + accountIndex?: number +}> + +export const fetchStarknetAccount = async (params?: FetchStarknetAccountParams) => { + const response = await fetch(`${STARKNET_DEVNET_URL}/predeployed_accounts`) + const accounts = await response.json() + const accIndex = params?.accountIndex ?? 0 + + const account = accounts.at(accIndex) + if (account == null) { + throw new Error(`no account available at index ${accIndex}`) + } + + return new Account( + { + nodeUrl: STARKNET_DEVNET_URL, + }, + account.address, + account.private_key, + ) +} + +export const getStarknetContractArtifacts = (name: string) => { + const root = getRootDir() + return { + contract: getStarknetContractArtifactPath(root, name, false), + casm: getStarknetContractArtifactPath(root, name, true), + } +} + +const getRootDir = () => { + const result = execSync('git rev-parse --show-toplevel').toString() + return result.replace(/\n/g, '') +} + +const getStarknetContractArtifactPath = (root: string, name: string, casm: boolean) => { + return json.parse( + fs + .readFileSync( + path.join( + root, + 'contracts', + 'target', + 'release', + `chainlink_${name}.${casm ? 'compiled_' : ''}contract_class.json`, + ), + ) + .toString('ascii'), + ) +} diff --git a/ops/scripts/devnet-hardhat.sh b/ops/scripts/devnet-hardhat.sh index 80e728984..4a84fb843 100755 --- a/ops/scripts/devnet-hardhat.sh +++ b/ops/scripts/devnet-hardhat.sh @@ -5,63 +5,70 @@ set -euo pipefail # cpu_struct=`arch`; # echo $cpu_struct; -cpu_struct="linux"; +cpu_struct="linux" # Clean up first -bash "$(dirname -- "$0";)/devnet-hardhat-down.sh" +bash "$(dirname -- "$0")/devnet-hardhat-down.sh" echo "Checking CPU structure..." -if [[ $cpu_struct == *"arm"* ]] -then - echo "Starting arm devnet container..." - container_version="d7c168ac53da3e9d717ed3ff8dad665ccade43e0-arm" +if [[ $cpu_struct == *"arm"* ]]; then + echo "Starting arm devnet container..." + container_version="d7c168ac53da3e9d717ed3ff8dad665ccade43e0-arm" else - echo "Starting i386 devnet container..." - container_version="d7c168ac53da3e9d717ed3ff8dad665ccade43e0" + echo "Starting i386 devnet container..." + container_version="d7c168ac53da3e9d717ed3ff8dad665ccade43e0" fi echo "Starting starknet-devnet" # we need to replace the entrypoint because starknet-devnet's docker builds at 0.5.1 don't include cargo or gcc. docker run \ - -p 127.0.0.1:5050:5050 \ - -p 127.0.0.1:8545:8545 \ - -d \ - -e RUST_LOG=debug \ - --name chainlink-starknet.starknet-devnet \ - "shardlabs/starknet-devnet-rs:${container_version}" \ - --seed 0 \ - --gas-price 1 \ - --account-class cairo1 + -p 127.0.0.1:5050:5050 \ + -p 127.0.0.1:8545:8545 \ + -d \ + -e RUST_LOG=debug \ + --name chainlink-starknet.starknet-devnet \ + "shardlabs/starknet-devnet-rs:${container_version}" \ + --seed 0 \ + --gas-price 1 \ + --account-class cairo1 echo "Starting hardhat..." docker run --net container:chainlink-starknet.starknet-devnet -d --name chainlink-starknet.hardhat ethereumoptimism/hardhat-node:nightly -# starknet-devnet startup is slow and requires compiling cairo. -echo "Waiting for starknet-devnet to become ready.." -start_time=$(date +%s) -prev_output="" -while true -do - output=$(docker logs chainlink-starknet.starknet-devnet 2>&1) - if [[ "${output}" != "${prev_output}" ]]; then - echo -n "${output#$prev_output}" - prev_output="${output}" - fi +wait_for_container() { + local container_name="$1" + local ready_log="$2" + local start_time=$(date +%s) + local prev_output="" + + echo "Waiting for container $container_name to become ready.." + while true; do + output=$(docker logs "$container_name" 2>&1) + if [[ "${output}" != "${prev_output}" ]]; then + echo -n "${output#$prev_output}" + prev_output="${output}" + fi - if [[ $output == *"listening"* ]]; then - echo "" - echo "starknet-devnet is ready." - exit 0 - fi + if [[ $output == *"$ready_log"* ]]; then + echo "" + echo "container $container_name is ready." + return + fi - current_time=$(date +%s) - elapsed_time=$((current_time - start_time)) + current_time=$(date +%s) + elapsed_time=$((current_time - start_time)) + if ((elapsed_time > 600)); then + echo "Error: Command did not become ready within 600 seconds" + exit 1 + fi - if (( elapsed_time > 600 )); then - echo "Error: Command did not become ready within 600 seconds" - exit 1 - fi + sleep 3 + done +} + +# starknet-devnet startup is slow and requires compiling cairo. +wait_for_container "chainlink-starknet.starknet-devnet" "listening" - sleep 3 -done +# ethereumoptimism/hardhat-node is also slow and should be online before l1-l2 messaging tests are run +wait_for_container "chainlink-starknet.hardhat" "Any funds sent to them on Mainnet or any other live network WILL BE LOST." diff --git a/yarn.lock b/yarn.lock index 7b88c04ac..fd07a0024 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2482,6 +2482,16 @@ abi-wan-kanabi@^1.0.1, abi-wan-kanabi@^1.0.3: typescript "^4.9.5" yargs "^17.7.2" +abi-wan-kanabi@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/abi-wan-kanabi/-/abi-wan-kanabi-2.2.1.tgz#367050c57b9e66a7cf977453d85579ad1fd8af36" + integrity sha512-W3RNuu2tG10W4AY63uq89JX/MsZSOxvpmsitQ3pbdVn3e8RxXR2oegN0QmGpgfyT0KlPdreydHsqq/u+2Pt2PQ== + dependencies: + ansicolors "^0.3.2" + cardinal "^2.1.1" + fs-extra "^10.0.0" + yargs "^17.7.2" + abstract-level@^1.0.0, abstract-level@^1.0.2, abstract-level@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/abstract-level/-/abstract-level-1.0.3.tgz#78a67d3d84da55ee15201486ab44c09560070741" @@ -2606,6 +2616,11 @@ ansi-styles@^6.1.0: resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== +ansicolors@^0.3.2, ansicolors@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/ansicolors/-/ansicolors-0.3.2.tgz#665597de86a9ffe3aa9bfbe6cae5c6ea426b4979" + integrity sha512-QXu7BPrP29VllRxH8GwB7x5iX5qWKAAMLqKQGWTeLWVlNHNOpVMJ91dsxQAIWXpjuW5wqvxu3Jd/nRjrJ+0pqg== + antlr4ts@^0.5.0-alpha.4: version "0.5.0-alpha.4" resolved "https://registry.yarnpkg.com/antlr4ts/-/antlr4ts-0.5.0-alpha.4.tgz#71702865a87478ed0b40c0709f422cf14d51652a" @@ -3080,6 +3095,14 @@ caniuse-lite@^1.0.30001400: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001422.tgz#f2d7c6202c49a8359e6e35add894d88ef93edba1" integrity sha512-hSesn02u1QacQHhaxl/kNMZwqVG35Sz/8DgvmgedxSH8z9UUpcDYSPYgsj3x5dQNRcNp6BwpSfQfVzYUTm+fog== +cardinal@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/cardinal/-/cardinal-2.1.1.tgz#7cc1055d822d212954d07b085dea251cc7bc5505" + integrity sha512-JSr5eOgoEymtYHBjNWyjrMqet9Am2miJhlfKNdqLp6zoeAh0KN5dRAcxlecj5mAJrmQomgiOBj35xHLrFjqBpw== + dependencies: + ansicolors "~0.3.2" + redeyed "~2.1.0" + case@^1.6.3: version "1.6.3" resolved "https://registry.yarnpkg.com/case/-/case-1.6.3.tgz#0a4386e3e9825351ca2e6216c60467ff5f1ea1c9" @@ -3846,7 +3869,7 @@ esprima@2.7.x, esprima@^2.7.1: resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" integrity sha512-OarPfz0lFCiW4/AV2Oy1Rp9qu0iusTKqykwTspGCZtPxmF81JR4MmIebvF1F9+UOKth2ZubLQ4XGGaU+hSn99A== -esprima@^4.0.0: +esprima@^4.0.0, esprima@~4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== @@ -4114,6 +4137,14 @@ fb-watchman@^2.0.0: dependencies: bser "2.1.1" +fetch-cookie@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/fetch-cookie/-/fetch-cookie-3.0.1.tgz#6a77f7495e1a639ae019db916a234db8c85d5963" + integrity sha512-ZGXe8Y5Z/1FWqQ9q/CrJhkUD73DyBU9VF0hBQmEO/wPHe4A9PKTjplFDLeFX8aOsYypZUcX5Ji/eByn3VCVO3Q== + dependencies: + set-cookie-parser "^2.4.8" + tough-cookie "^4.0.0" + file-entry-cache@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" @@ -5724,6 +5755,11 @@ lossless-json@^2.0.8: resolved "https://registry.yarnpkg.com/lossless-json/-/lossless-json-2.0.11.tgz#3137684c93fd99481c6f99c985efc9c9c5cc76a5" integrity sha512-BP0vn+NGYvzDielvBZaFain/wgeJ1hTvURCqtKvhr1SCPePdaaTanmmcplrHfEJSJOUql7hk4FHwToNJjWRY3g== +lossless-json@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/lossless-json/-/lossless-json-4.0.1.tgz#d45229e3abb213a0235812780ca894ea8c5b2c6b" + integrity sha512-l0L+ppmgPDnb+JGxNLndPtJZGNf6+ZmVaQzoxQm3u6TXmhdnsA+YtdVR8DjzZd/em58686CQhOFDPewfJ4l7MA== + loupe@^2.3.1, loupe@^2.3.6: version "2.3.7" resolved "https://registry.yarnpkg.com/loupe/-/loupe-2.3.7.tgz#6e69b7d4db7d3ab436328013d37d1c8c3540c697" @@ -6536,6 +6572,11 @@ pseudomap@^1.0.2: resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ== +psl@^1.1.33: + version "1.9.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== + pump@^1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/pump/-/pump-1.0.3.tgz#5dfe8311c33bbf6fc18261f9f34702c47c08a954" @@ -6552,11 +6593,16 @@ pump@^3.0.0: end-of-stream "^1.1.0" once "^1.3.1" -punycode@^2.1.0: +punycode@^2.1.0, punycode@^2.1.1: version "2.3.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== +querystringify@^2.1.1: + version "2.2.0" + resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" + integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== + queue-microtask@^1.2.2, queue-microtask@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" @@ -6694,6 +6740,13 @@ redent@^3.0.0: indent-string "^4.0.0" strip-indent "^3.0.0" +redeyed@~2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/redeyed/-/redeyed-2.1.1.tgz#8984b5815d99cb220469c99eeeffe38913e6cc0b" + integrity sha512-FNpGGo1DycYAdnrKFxCMmKYgo/mILAqtRYbkdQD8Ep/Hk2PQ5+aEAEx+IU713RTDmuBaH0c8P5ZozurNu5ObRQ== + dependencies: + esprima "~4.0.0" + regenerator-runtime@^0.14.0: version "0.14.1" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f" @@ -6723,6 +6776,11 @@ require-main-filename@^2.0.0: resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== +requires-port@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== + resolve-cwd@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" @@ -6972,6 +7030,11 @@ set-blocking@^2.0.0: resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== +set-cookie-parser@^2.4.8: + version "2.6.0" + resolved "https://registry.yarnpkg.com/set-cookie-parser/-/set-cookie-parser-2.6.0.tgz#131921e50f62ff1a66a461d7d62d7b21d5d15a51" + integrity sha512-RVnVQxTXuerk653XfuliOxBP81Sf0+qfQE73LIYKcyMYHG94AuH0kgrQpRDuTZnSmjpysHmzxJXKNfa6PjFhyQ== + set-function-length@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.0.tgz#2f81dc6c16c7059bda5ab7c82c11f03a515ed8e1" @@ -7225,6 +7288,22 @@ stacktrace-parser@^0.1.10: dependencies: type-fest "^0.7.1" +starknet@6.4.0: + version "6.4.0" + resolved "https://registry.yarnpkg.com/starknet/-/starknet-6.4.0.tgz#38c3e9a741968a919b81cd22ac89876742613a29" + integrity sha512-sJb7Q02kUAf0Rmxc+adWQXdcSnJ08ngqEicYxNqSV5R56ndpnqRRPiLCGmTflBzGYuI6QFsHb7uF5ZEi602WQQ== + dependencies: + "@noble/curves" "~1.3.0" + "@scure/base" "~1.1.3" + "@scure/starknet" "~1.0.0" + abi-wan-kanabi "^2.2.1" + fetch-cookie "^3.0.0" + isomorphic-fetch "^3.0.0" + lossless-json "^4.0.1" + pako "^2.0.4" + ts-mixer "^6.0.3" + url-join "^4.0.1" + starknet@^3.5.1: version "3.19.0" resolved "https://registry.yarnpkg.com/starknet/-/starknet-3.19.0.tgz#b3ce7fc87a3062f126b7dd92f38d6f70e509f192" @@ -7597,6 +7676,16 @@ toidentifier@1.0.1: resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== +tough-cookie@^4.0.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.3.tgz#97b9adb0728b42280aa3d814b6b999b2ff0318bf" + integrity sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw== + dependencies: + psl "^1.1.33" + punycode "^2.1.1" + universalify "^0.2.0" + url-parse "^1.5.3" + tr46@~0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" @@ -7835,6 +7924,11 @@ universalify@^0.1.0: resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== +universalify@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" + integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== + universalify@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.1.tgz#168efc2180964e6386d061e094df61afe239b18d" @@ -7865,6 +7959,14 @@ url-join@^4.0.1: resolved "https://registry.yarnpkg.com/url-join/-/url-join-4.0.1.tgz#b642e21a2646808ffa178c4c5fda39844e12cde7" integrity sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA== +url-parse@^1.5.3: + version "1.5.10" + resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" + integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== + dependencies: + querystringify "^2.1.1" + requires-port "^1.0.0" + usb@2.9.0: version "2.9.0" resolved "https://registry.yarnpkg.com/usb/-/usb-2.9.0.tgz#8ae3b175f93bee559400bff33491eee63406b6a2"