diff --git a/audits/Curta_Plonky2x_Audit_Report_KALOS.md b/audits/Curta_Plonky2x_Audit_Report_KALOS.md index 53ce51ee0..7c8db9d83 100644 --- a/audits/Curta_Plonky2x_Audit_Report_KALOS.md +++ b/audits/Curta_Plonky2x_Audit_Report_KALOS.md @@ -775,7 +775,7 @@ In a way, the byte lookup digest formula `opcode + 256 * a + 256^2 * b + 256^3 * First, even when `ByteRegister`s are allocated, they are not directly range checked via decomposition into 8 bits. Therefore, one can prove that given `a, b`, one can prove that `a AND b = c` where `OPCODE::AND + 256 * a + 256^2 * b + 256^3 * c = OPCODE::XOR + 256 * 1 + 256^2 * 1 + 256^3 * 0`, **even in the case where `c` is not within byte range**. -In a way, this issue comes from the fact that the coefficients for the linear combination of `(opcode, a, b, c)` used to compute the digest is a fixed constant. In a standard vector lookup, a challenge $\gamma$ is derived via Fiat-Shamir (after commiting to all relevant lookup instances) then the linear combination is done with consecutive powers of $\gamma$. Implementing the byte operation lookups in this fashion would resolve the issue without additional range checks. +In a way, this issue comes from the fact that the coefficients for the linear combination of `(opcode, a, b, c)` used to compute the digest is a fixed constant. In a standard vector lookup, a challenge $\gamma$ is derived via Fiat-Shamir (after committing to all relevant lookup instances) then the linear combination is done with consecutive powers of $\gamma$. Implementing the byte operation lookups in this fashion would resolve the issue without additional range checks. The other method to resolve this issue is to strictly enforce that all `ByteRegister` go through a range check. This range check should not be based on the lookup table (as the lookup itself assumes values to be within byte range), but should be done via bitwise decomposition. @@ -1049,7 +1049,7 @@ In other words, it asserts that on the starting bit the `bit_accumulator` is equ // chip/ec/scalar.rs impl AirConstraint for LimbBitInstruction { fn eval(&self, parser: &mut AP) { - // Assert the initial valuen of `bit_accumulator` at the begining of each cycle. As the bits + // Assert the initial valuen of `bit_accumulator` at the beginning of each cycle. As the bits // are presented in little-endian order, the initial value of `bit_accumulator` is the value // of the limb register at the beginning of the cycle. This translates to the constraint: // `start_bit * (bit_accumulator - limb) = 0` @@ -1823,7 +1823,7 @@ pub(crate) const fn num_ops(config: &CircuitConfig) -> usize { } ``` -Thankfully, the stadard configuration has `num_routed_wires = 80`. +Thankfully, the standard configuration has `num_routed_wires = 80`. ```rust= pub const fn standard_recursion_config() -> Self { diff --git a/contracts/README.md b/contracts/README.md index e4ddb44a7..0fb7a1f81 100644 --- a/contracts/README.md +++ b/contracts/README.md @@ -67,7 +67,7 @@ Ensure your `.env` file is correctly filled out with the necessary environment v PRIVATE_KEY= # The private key of the deployer account CREATE2_SALT= # The salt to be used for CREATE2 deployments GUARDIAN= # The 'owner' of the contracts, recommended to be an EOA -PROVER= # The default prover to fullfill requests for Functions that have not opted for a different prover +PROVER= # The default prover to fulfill requests for Functions that have not opted for a different prover RPC_{CHAIN_ID}= # RPC URLs for each chain you want to deploy to ETHERSCAN_API_KEY_{CHAIN_ID}= # Etherscan API keys for each chain you want to deploy to ``` diff --git a/contracts/script/misc/Base.s.sol b/contracts/script/misc/Base.s.sol index 56f810144..2122ff4c0 100644 --- a/contracts/script/misc/Base.s.sol +++ b/contracts/script/misc/Base.s.sol @@ -193,7 +193,7 @@ abstract contract BaseScript is Script { /// @notice Use 'cast wallet sign' to sign a message. /// @dev Needed because internal vm.sign has needs access to the private key directly, - /// which is unavailable for hardward wallets. + /// which is unavailable for hardware wallets. /// /// Keep in mind cast wallet sign uses EIP-191 eth_sign: https://eips.ethereum.org/EIPS/eip-191 /// with the message prefixed with "\x19Ethereum Signed Message:\n" + message.length. To work diff --git a/contracts/test/verifiers/VerifierPlonk.sol b/contracts/test/verifiers/VerifierPlonk.sol index 1cbbe70ba..5b655672f 100644 --- a/contracts/test/verifiers/VerifierPlonk.sol +++ b/contracts/test/verifiers/VerifierPlonk.sol @@ -439,7 +439,7 @@ contract PlonkVerifier { // BEGINNING compute_pi ------------------------------------------------- - // public input (not comming from the commit api) contribution + // public input (not coming from the commit api) contribution // ins, n are the public inputs and number of public inputs respectively function sum_pi_wo_api_commit(ins, n, mPtr) -> pi_wo_commit { let state := mload(0x40) @@ -541,7 +541,7 @@ contract PlonkVerifier { let mPtr := add(state, state_last_mem) // here the random is not a challenge, hence no need to use Fiat Shamir, we just - // need an unpredictible result. + // need an unpredictable result. let random := mod(keccak256(state, 0x20), r_mod) let folded_quotients := mPtr diff --git a/contracts/test/verifiers/VerifierPlonkRangeCheck.sol b/contracts/test/verifiers/VerifierPlonkRangeCheck.sol index 8117974c2..6f51b16d6 100644 --- a/contracts/test/verifiers/VerifierPlonkRangeCheck.sol +++ b/contracts/test/verifiers/VerifierPlonkRangeCheck.sol @@ -290,7 +290,7 @@ contract PlonkVerifier { } /// Called when one the openings is bigger than r - /// The openings are the claimed evalutions of a polynomial + /// The openings are the claimed evaluations of a polynomial /// in a Kzg proof. function error_proof_openings_size() { let ptError := mload(0x40) diff --git a/gnarkx/succinct/circuit.go b/gnarkx/succinct/circuit.go index 9537a99df..2d97aea2b 100644 --- a/gnarkx/succinct/circuit.go +++ b/gnarkx/succinct/circuit.go @@ -63,7 +63,7 @@ func (f *CircuitFunction) SetWitness(inputBytes []byte) { // and h(outputBytes) == outputHash. func (f *CircuitFunction) Define(baseApi frontend.API) error { // Define the circuit using the Gnark standard API. Ideally, we would pass in builder.API - // but we can't becaues this is handled by Gnark internally. + // but we can't because this is handled by Gnark internally. f.Circuit.Define(baseApi) // Automatically handle the input and output hashes and assert that they must be consistent. diff --git a/plonky2x/core/src/backend/circuit/witness.rs b/plonky2x/core/src/backend/circuit/witness.rs index 611fe8bfc..8c9f45ef1 100644 --- a/plonky2x/core/src/backend/circuit/witness.rs +++ b/plonky2x/core/src/backend/circuit/witness.rs @@ -148,7 +148,7 @@ fn fill_witness_values<'a, L: PlonkParameters, const D: usize>( continue; } - // Run the generator, depending on whether it is an asyncronous or not. + // Run the generator, depending on whether it is an asynchronous or not. if let Some(async_gen) = async_generators.get_mut(&generator_idx) { // Check if the hint handler has returned an error, and if so, return it. if let Ok(e) = rx_handler_error.try_recv() { diff --git a/plonky2x/core/src/backend/prover/service.rs b/plonky2x/core/src/backend/prover/service.rs index b09959bb1..f06ac9221 100644 --- a/plonky2x/core/src/backend/prover/service.rs +++ b/plonky2x/core/src/backend/prover/service.rs @@ -26,11 +26,11 @@ const GET_PROOF_REQUEST_ROUTE: &str = "/api/proof"; /// The endpoint for getting the status of a proof request. const GET_PROOF_BATCH_REQUEST_ROUTE: &str = "/api/proof/batch/status"; -/// A UUID V4 identifer for a proof request. +/// A UUID V4 identifier for a proof request. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct ProofId(pub Uuid); -/// A UUID V4 identifer for a proof request. +/// A UUID V4 identifier for a proof request. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct BatchProofId(pub Uuid); diff --git a/plonky2x/core/src/backend/wrapper/wrap.rs b/plonky2x/core/src/backend/wrapper/wrap.rs index 590c262e5..90bf98199 100644 --- a/plonky2x/core/src/backend/wrapper/wrap.rs +++ b/plonky2x/core/src/backend/wrapper/wrap.rs @@ -236,16 +236,16 @@ impl, const D: usize> WrappedOutput { } let common_data_file = File::create(path.as_ref().join("common_circuit_data.json"))?; serde_json::to_writer(&common_data_file, &self.common_data)?; - info!("Succesfully wrote common circuit data to common_circuit_data.json"); + info!("Successfully wrote common circuit data to common_circuit_data.json"); let verifier_data_file = File::create(path.as_ref().join("verifier_only_circuit_data.json"))?; serde_json::to_writer(&verifier_data_file, &self.verifier_data)?; - info!("Succesfully wrote verifier data to verifier_only_circuit_data.json"); + info!("Successfully wrote verifier data to verifier_only_circuit_data.json"); let proof_file = File::create(path.as_ref().join("proof_with_public_inputs.json"))?; serde_json::to_writer(&proof_file, &self.proof)?; - info!("Succesfully wrote proof to proof_with_public_inputs.json"); + info!("Successfully wrote proof to proof_with_public_inputs.json"); Ok(()) } diff --git a/plonky2x/core/src/frontend/builder/mod.rs b/plonky2x/core/src/frontend/builder/mod.rs index 53f685ff5..9e7131a5c 100644 --- a/plonky2x/core/src/frontend/builder/mod.rs +++ b/plonky2x/core/src/frontend/builder/mod.rs @@ -131,7 +131,7 @@ impl, const D: usize> CircuitBuilder { self.beacon_client = Some(client); } - /// Adds all the constraints nedded before building the circuit and registering hints. + /// Adds all the constraints needed before building the circuit and registering hints. fn pre_build(&mut self) { let blake2b_accelerator = self.blake2b_accelerator.clone(); if let Some(accelerator) = blake2b_accelerator { diff --git a/plonky2x/core/src/frontend/ecc/curve25519/curta/stark.rs b/plonky2x/core/src/frontend/ecc/curve25519/curta/stark.rs index 7391489dd..b4ad85899 100644 --- a/plonky2x/core/src/frontend/ecc/curve25519/curta/stark.rs +++ b/plonky2x/core/src/frontend/ecc/curve25519/curta/stark.rs @@ -194,7 +194,7 @@ impl, const D: usize> Ed25519Stark { let mut writer = writer_data.public_writer(); self.write_input(&mut writer, input); - debug!("Writing EC execusion trace"); + debug!("Writing EC execution trace"); self.stark.air_data.write_global_instructions(&mut writer); writer_data.chunks_par(256).for_each(|mut chunk| { for i in 0..256 { diff --git a/plonky2x/core/src/frontend/eth/mpt/utils.rs b/plonky2x/core/src/frontend/eth/mpt/utils.rs index 20842bc74..caa815304 100644 --- a/plonky2x/core/src/frontend/eth/mpt/utils.rs +++ b/plonky2x/core/src/frontend/eth/mpt/utils.rs @@ -37,7 +37,7 @@ fn generate_fixtures() { let storage_result: EIP1186ProofResponse = get_proof_closure(); let serialized = serde_json::to_string(&storage_result).unwrap(); info!("{}", serialized); - // TODO: save this to fixtures/example.json programatically instead of copy-paste + // TODO: save this to fixtures/example.json programmatically instead of copy-paste } #[allow(dead_code)] // We allow dead_code since this is used in tests diff --git a/plonky2x/core/src/frontend/eth/rlp/utils.rs b/plonky2x/core/src/frontend/eth/rlp/utils.rs index e989478ca..b81f4953f 100644 --- a/plonky2x/core/src/frontend/eth/rlp/utils.rs +++ b/plonky2x/core/src/frontend/eth/rlp/utils.rs @@ -184,9 +184,9 @@ pub fn verify_decoded_list( claim_poly *= random.pow(3); claim_poly += 0xf9; - // Most signficant byte. + // Most significant byte. claim_poly += (sum_of_rlp_encoding_length / 256) * random.clone(); - // Lease siginificant byte. + // Lease significant byte. claim_poly += (sum_of_rlp_encoding_length % 256) * random.pow(2); } diff --git a/plonky2x/core/src/frontend/fold/mod.rs b/plonky2x/core/src/frontend/fold/mod.rs index 24320b576..167f05684 100644 --- a/plonky2x/core/src/frontend/fold/mod.rs +++ b/plonky2x/core/src/frontend/fold/mod.rs @@ -60,7 +60,7 @@ impl, const D: usize> FoldBuilderMethods for Circuit // Build and save inner circuit. let inner_circuit = build_inner::(None); - debug!("succesfully built circuit: id={}", inner_circuit.id()); + debug!("successfully built circuit: id={}", inner_circuit.id()); let gate_serializer = Serializer::gate_registry::(); let generator_serializer = Serializer::generator_registry::(); diff --git a/plonky2x/core/src/frontend/hash/curta/mod.rs b/plonky2x/core/src/frontend/hash/curta/mod.rs index f3b80b7a7..733361049 100644 --- a/plonky2x/core/src/frontend/hash/curta/mod.rs +++ b/plonky2x/core/src/frontend/hash/curta/mod.rs @@ -96,7 +96,7 @@ pub trait Hash< builder: &mut CircuitBuilder, accelerator: HashAccelerator, ) -> HashInputData { - // Initialze the data struictures of `SHAInputData`. + // Initialize the data struictures of `SHAInputData`. let mut t_values: Option> = None; if HAS_T_VALUES { t_values = Some(Vec::new()); @@ -146,7 +146,7 @@ pub trait Hash< // Store the end_bit values. The end bit indicates the end of message chunks. end_bit_values.extend_from_slice(&vec![false; total_number_of_chunks - 1]); end_bit_values.push(true); - // The chunk index is given by the currenty index plus the chunk index we got from + // The chunk index is given by the currently index plus the chunk index we got from // the request. let current_chunk_index_variable = builder .constant::(L::Field::from_canonical_usize(current_chunk_index)); diff --git a/plonky2x/core/src/frontend/hash/curta/request.rs b/plonky2x/core/src/frontend/hash/curta/request.rs index 37827ad5a..595e92a8f 100644 --- a/plonky2x/core/src/frontend/hash/curta/request.rs +++ b/plonky2x/core/src/frontend/hash/curta/request.rs @@ -13,7 +13,7 @@ pub enum HashRequestType { pub enum HashRequest { /// A message of fixed length. Fixed(Vec), - /// A message of variable length, represented by a tuple `(total_message, lengh, last_chunk)`. + /// A message of variable length, represented by a tuple `(total_message, length, last_chunk)`. Variable(Vec, U32Variable, U32Variable), } diff --git a/plonky2x/core/src/frontend/hint/asynchronous/hint.rs b/plonky2x/core/src/frontend/hint/asynchronous/hint.rs index 6bfdfbc41..4fc86e4fa 100644 --- a/plonky2x/core/src/frontend/hint/asynchronous/hint.rs +++ b/plonky2x/core/src/frontend/hint/asynchronous/hint.rs @@ -13,7 +13,7 @@ use crate::frontend::vars::ValueStream; /// /// ## Example /// The following example shows how to use an asynchronous hint that gets an input byte, sleeps -/// for the number of miliseconds specified by the byte, and then outputs the byte. +/// for the number of milliseconds specified by the byte, and then outputs the byte. /// ``` /// # use async_trait::async_trait; /// # use serde::{Deserialize, Serialize}; @@ -62,7 +62,7 @@ pub trait AsyncHint, const D: usize>: /// a unique identifier for this hint. /// - /// By default, this is the type name of the hint. This function should be overwriten in case + /// By default, this is the type name of the hint. This function should be overwritten in case /// type names vary between compilation units. fn id() -> String { std::any::type_name::().to_string() diff --git a/plonky2x/core/src/frontend/hint/simple/hint.rs b/plonky2x/core/src/frontend/hint/simple/hint.rs index 756bc5bd0..e5bfa21be 100644 --- a/plonky2x/core/src/frontend/hint/simple/hint.rs +++ b/plonky2x/core/src/frontend/hint/simple/hint.rs @@ -44,7 +44,7 @@ pub trait Hint, const D: usize>: /// a unique identifier for this hint. /// - /// By default, this is the type name of the hint. This function should be overwriten in case + /// By default, this is the type name of the hint. This function should be overwritten in case /// type names vary between compilation units. fn id() -> String { std::any::type_name::().to_string() diff --git a/plonky2x/core/src/frontend/mapreduce/mod.rs b/plonky2x/core/src/frontend/mapreduce/mod.rs index 8eb35935c..4f65521d3 100644 --- a/plonky2x/core/src/frontend/mapreduce/mod.rs +++ b/plonky2x/core/src/frontend/mapreduce/mod.rs @@ -12,7 +12,7 @@ //! we can pass in a block hash as the dynamic data to the map and reduce functions to say that //! in each map call we want to grab the storage slot at slot i, which is a compile time constant. //! -//! Under the hood, we compute each map in a seperate proof and perform the reductions by generating +//! Under the hood, we compute each map in a separate proof and perform the reductions by generating //! a proof for each reduction between two proofs until we have a single proof. pub mod generator; @@ -194,7 +194,7 @@ impl, const D: usize> CircuitBuilder { // Build a map circuit which maps from I -> O using the closure `m`. debug!("building map"); let map_circuit = self.build_map(&map_fn); - debug!("succesfully built map circuit: id={}", map_circuit.id()); + debug!("successfully built map circuit: id={}", map_circuit.id()); // Save map circuit and map circuit input target to build folder. let map_circuit_id = map_circuit.id(); @@ -221,7 +221,7 @@ impl, const D: usize> CircuitBuilder { &generator_serializer, ); reduce_circuits.push(reduce_circuit); - debug!("succesfully built reduce circuit: id={}", reduce_circuit_id); + debug!("successfully built reduce circuit: id={}", reduce_circuit_id); } // Create generator to generate map and reduce proofs for each layer. @@ -291,7 +291,7 @@ impl, const D: usize> CircuitBuilder { // Build a map circuit which maps from I -> O using the closure `m`. let map_circuit = self.build_map(&map_fn); - debug!("succesfully built map circuit: id={}", map_circuit.id()); + debug!("successfully built map circuit: id={}", map_circuit.id()); // Save map circuit and map circuit input target to build folder. let map_circuit_id = map_circuit.id(); @@ -318,7 +318,7 @@ impl, const D: usize> CircuitBuilder { &generator_serializer, ); reduce_circuits.push(reduce_circuit); - debug!("succesfully built reduce circuit: id={}", reduce_circuit_id); + debug!("successfully built reduce circuit: id={}", reduce_circuit_id); } // Create generator to generate map and reduce proofs for each layer. diff --git a/plonky2x/core/src/frontend/uint/num/biguint/mod.rs b/plonky2x/core/src/frontend/uint/num/biguint/mod.rs index 9e0cfdd45..1013461ab 100644 --- a/plonky2x/core/src/frontend/uint/num/biguint/mod.rs +++ b/plonky2x/core/src/frontend/uint/num/biguint/mod.rs @@ -264,7 +264,7 @@ impl, const D: usize> CircuitBuilderBiguint fn mul_biguint_by_bool(&mut self, a: &BigUintTarget, b: BoolTarget) -> BigUintTarget { let t = b.target; - // Each limb will be multipled by 0 or 1, which will have a product that is within + // Each limb will be multiplied by 0 or 1, which will have a product that is within // U32Target's range. BigUintTarget { limbs: a diff --git a/plonky2x/core/src/frontend/vars/mod.rs b/plonky2x/core/src/frontend/vars/mod.rs index 26c44be25..e9df7235c 100644 --- a/plonky2x/core/src/frontend/vars/mod.rs +++ b/plonky2x/core/src/frontend/vars/mod.rs @@ -106,7 +106,7 @@ pub trait CircuitVariable: Debug + Clone + Sized + Sync + Send + 'static { self.variables().into_iter().map(|v| v.0).collect() } - /// Deserializes a variable from a list of targets. It does not do any validaty checks (e.g. + /// Deserializes a variable from a list of targets. It does not do any validity checks (e.g. /// range checks). fn from_targets(targets: &[Target]) -> Self { Self::from_variables_unsafe(&targets.iter().map(|t| Variable(*t)).collect_vec()) diff --git a/plonky2x/derive/src/lib.rs b/plonky2x/derive/src/lib.rs index 93cc0d7d2..1fd3192b1 100644 --- a/plonky2x/derive/src/lib.rs +++ b/plonky2x/derive/src/lib.rs @@ -52,7 +52,7 @@ pub fn derive_circuit_variable(input: proc_macro::TokenStream) -> proc_macro::To value_derive.push(ident.clone()); Ok(()) }) - .expect("Could not parse value_derive atrributes"); + .expect("Could not parse value_derive attributes"); } Meta::NameValue(_) => panic!("value_derive cannot be a named value"), } diff --git a/rustx/examples/eth_call.rs b/rustx/examples/eth_call.rs index f853c796f..510b64160 100644 --- a/rustx/examples/eth_call.rs +++ b/rustx/examples/eth_call.rs @@ -36,7 +36,7 @@ impl Program for EthCall { let (chain_id, block_number, from_address, to_address, calldata) = EthCallRequestTuple::abi_decode_sequence(&input_bytes, true).unwrap(); - // Get relevant enviroment variables and initialize the HTTP provider. + // Get relevant environment variables and initialize the HTTP provider. dotenv::dotenv().ok(); let rpc_url = env::var(format!("RPC_{}", chain_id)) .unwrap_or_else(|_| panic!("RPC_{} environment variable was not found", chain_id));