Skip to content

Commit

Permalink
Merge pull request #117 from plume-sig/arkworks04
Browse files Browse the repository at this point in the history
Bump to the recent `arkworks` (`v0.5`)
  • Loading branch information
skaunov authored Dec 15, 2024
2 parents c835718 + 1f52416 commit 5a5c11f
Show file tree
Hide file tree
Showing 17 changed files with 1,424 additions and 332 deletions.
7 changes: 0 additions & 7 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,3 @@
resolver = "2"

members = ["rust-arkworks", "rust-k256", "javascript"]

[patch.crates-io]
ark-ec = { git = "https://github.com/FindoraNetwork/ark-algebra" }
ark-ff = { git = "https://github.com/FindoraNetwork/ark-algebra" }
ark-serialize = { git = "https://github.com/FindoraNetwork/ark-algebra" }
ark-algebra-test-templates = { git = "https://github.com/FindoraNetwork/ark-algebra" }
ark-std = { git = "https://github.com/FindoraNetwork/ark-std" }
27 changes: 15 additions & 12 deletions rust-arkworks/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,24 +11,27 @@ keywords = ["nullifier", "zero-knowledge", "ECDSA", "PLUME"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
ark-ec = "~0.3.0"
ark-ff = "~0.3.0"
ark-std = "~0.3.0"
ark-serialize = "~0.3.0"
ark-serialize-derive = "~0.3.0"
secp256k1 = { git = "https://github.com/geometryresearch/ark-secp256k1.git", version = "0.1.0" }
ark-ec = "~0.5.0"
ark-ff = "~0.5.0"
ark-std = "~0.5.0"
ark-serialize = "~0.5.0"
ark-serialize-derive = "~0.5.0"
rand_core = { version = "0.6", default-features = false, features = [
"getrandom",
] }
rand = "0.8.4"
tiny-keccak = { version = "2.0.2", features = ["shake"] }
sha2 = "0.10.2"
elliptic-curve = { version = "0.12.2", features = ["arithmetic"] }
k256 = { version = "0.11.3", features = [
"arithmetic",
"hash2curve",
"expose-field",
"sha2",
] }
generic-array = { version = "0.14", default-features = false }
hex = "0.4.3"

# #standinDependencies
## the curve internalization
ark-ff-macros = "~0.5.0"
## field hasher fix
arrayvec = { version = "0.7", default-features = false }

[dev-dependencies]
num-bigint = "*"
num-traits = "*"
4 changes: 1 addition & 3 deletions rust-arkworks/README.MD
Original file line number Diff line number Diff line change
@@ -1,3 +1 @@
https://github.com/plume-sig/zk-nullifier-sig/blob/main/README.md
# HAZMAT
Please note that until `v0.1.0` this is very much a preview crate which lets you have some preliminary feel of the structure and the reference implementation approach.
https://github.com/plume-sig/zk-nullifier-sig/blob/main/README.md
31 changes: 0 additions & 31 deletions rust-arkworks/src/error.rs

This file was deleted.

135 changes: 135 additions & 0 deletions rust-arkworks/src/fixed_hasher/expander.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
// The below implementation is a rework of https://github.com/armfazh/h2c-rust-ref
// With some optimisations

use core::marker::PhantomData;

use ark_std::vec::*;

use arrayvec::ArrayVec;
use sha2::digest::{ExtendableOutput, FixedOutputReset, Update};

pub trait Expander {
fn expand(&self, msg: &[u8], length: usize) -> Vec<u8>;
}
const MAX_DST_LENGTH: usize = 255;

const LONG_DST_PREFIX: &[u8; 17] = b"H2C-OVERSIZE-DST-";

/// Implements section [5.3.3](https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-16#section-5.3.3)
/// "Using DSTs longer than 255 bytes" of the
/// [IRTF CFRG hash-to-curve draft #16](https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-16#section-5.3.3).
pub struct DST(arrayvec::ArrayVec<u8, MAX_DST_LENGTH>);

impl DST {
pub fn new_xmd<H: FixedOutputReset + Default>(dst: &[u8]) -> DST {
let array = if dst.len() > MAX_DST_LENGTH {
let mut long = H::default();
long.update(&LONG_DST_PREFIX[..]);
long.update(&dst);
ArrayVec::try_from(long.finalize_fixed().as_ref()).unwrap()
} else {
ArrayVec::try_from(dst).unwrap()
};
DST(array)
}

#[allow(dead_code)]
pub fn new_xof<H: ExtendableOutput + Default>(dst: &[u8], k: usize) -> DST {
let array = if dst.len() > MAX_DST_LENGTH {
let mut long = H::default();
long.update(&LONG_DST_PREFIX[..]);
long.update(&dst);

let mut new_dst = [0u8; MAX_DST_LENGTH];
let new_dst = &mut new_dst[0..((2 * k + 7) >> 3)];
long.finalize_xof_into(new_dst);
ArrayVec::try_from(&*new_dst).unwrap()
} else {
ArrayVec::try_from(dst).unwrap()
};
DST(array)
}

pub fn update<H: Update>(&self, h: &mut H) {
h.update(self.0.as_ref());
// I2OSP(len,1) https://www.rfc-editor.org/rfc/rfc8017.txt
h.update(&[self.0.len() as u8]);
}
}

#[allow(dead_code)]
pub(super) struct ExpanderXof<H: ExtendableOutput + Clone + Default> {
pub(super) xofer: PhantomData<H>,
pub(super) dst: Vec<u8>,
pub(super) k: usize,
}

impl<H: ExtendableOutput + Clone + Default> Expander for ExpanderXof<H> {
fn expand(&self, msg: &[u8], n: usize) -> Vec<u8> {
let mut xofer = H::default();
xofer.update(msg);

// I2OSP(len,2) https://www.rfc-editor.org/rfc/rfc8017.txt
let lib_str = (n as u16).to_be_bytes();
xofer.update(&lib_str);

DST::new_xof::<H>(self.dst.as_ref(), self.k).update(&mut xofer);
xofer.finalize_boxed(n).into_vec()
}
}

pub(super) struct ExpanderXmd<H: FixedOutputReset + Default + Clone> {
pub(super) hasher: PhantomData<H>,
pub(super) dst: Vec<u8>,
pub(super) block_size: usize,
}

static Z_PAD: [u8; 256] = [0u8; 256];

impl<H: FixedOutputReset + Default + Clone> Expander for ExpanderXmd<H> {
fn expand(&self, msg: &[u8], n: usize) -> Vec<u8> {
use sha2::digest::typenum::Unsigned;
// output size of the hash function, e.g. 32 bytes = 256 bits for sha2::Sha256
let b_len = H::OutputSize::to_usize();
let ell = (n + (b_len - 1)) / b_len;
assert!(
ell <= 255,
"The ratio of desired output to the output size of hash function is too large!"
);

let dst_prime = DST::new_xmd::<H>(self.dst.as_ref());
// Represent `len_in_bytes` as a 2-byte array.
// As per I2OSP method outlined in https://tools.ietf.org/pdf/rfc8017.pdf,
// The program should abort if integer that we're trying to convert is too large.
assert!(n < (1 << 16), "Length should be smaller than 2^16");
let lib_str: [u8; 2] = (n as u16).to_be_bytes();

let mut hasher = H::default();
hasher.update(&Z_PAD[0..self.block_size]);
hasher.update(msg);
hasher.update(&lib_str);
hasher.update(&[0u8]);
dst_prime.update(&mut hasher);
let b0 = hasher.finalize_fixed_reset();

hasher.update(&b0);
hasher.update(&[1u8]);
dst_prime.update(&mut hasher);
let mut bi = hasher.finalize_fixed_reset();

let mut uniform_bytes: Vec<u8> = Vec::with_capacity(n);
uniform_bytes.extend_from_slice(&bi);
for i in 2..=ell {
// update the hasher with xor of b_0 and b_i elements
for (l, r) in b0.iter().zip(bi.iter()) {
hasher.update(&[*l ^ *r]);
}
hasher.update(&[i as u8]);
dst_prime.update(&mut hasher);
bi = hasher.finalize_fixed_reset();
uniform_bytes.extend_from_slice(&bi);
}
uniform_bytes.truncate(n);
uniform_bytes
}
}
62 changes: 62 additions & 0 deletions rust-arkworks/src/fixed_hasher/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
mod expander;
use ark_ff::{field_hashers::HashToField, Field, PrimeField};
use core::marker::PhantomData;
use expander::{Expander, ExpanderXmd};
use sha2::digest::{core_api::BlockSizeUser, FixedOutputReset};

pub struct FixedFieldHasher<H: FixedOutputReset + Default + Clone, const SEC_PARAM: usize = 128> {
expander: ExpanderXmd<H>,
len_per_base_elem: usize,
}

impl<F: Field, H: FixedOutputReset + BlockSizeUser + Default + Clone, const SEC_PARAM: usize>
HashToField<F> for FixedFieldHasher<H, SEC_PARAM>
{
fn new(dst: &[u8]) -> Self {
// The final output of `hash_to_field` will be an array of field
// elements from F::BaseField, each of size `len_per_elem`.
let len_per_base_elem = get_len_per_elem::<F, SEC_PARAM>();

let expander = ExpanderXmd {
hasher: PhantomData,
dst: dst.to_vec(),
block_size: H::block_size(),
};

FixedFieldHasher {
expander,
len_per_base_elem,
}
}

fn hash_to_field<const N: usize>(&self, message: &[u8]) -> [F; N] {
let m = F::extension_degree() as usize;

// The user requests `N` of elements of F_p^m to output per input msg,
// each field element comprising `m` BasePrimeField elements.
let len_in_bytes = N * m * self.len_per_base_elem;
let uniform_bytes = self.expander.expand(message, len_in_bytes);

let cb = |i| {
let base_prime_field_elem = |j| {
let elm_offset = self.len_per_base_elem * (j + i * m);
F::BasePrimeField::from_be_bytes_mod_order(
&uniform_bytes[elm_offset..][..self.len_per_base_elem],
)
};
F::from_base_prime_field_elems((0..m).map(base_prime_field_elem)).unwrap()
};
ark_std::array::from_fn::<F, N, _>(cb)
}
}

const fn get_len_per_elem<F: Field, const SEC_PARAM: usize>() -> usize {
// ceil(log(p))
let base_field_size_in_bits = F::BasePrimeField::MODULUS_BIT_SIZE as usize;
// ceil(log(p)) + security_parameter
let base_field_size_with_security_padding_in_bits = base_field_size_in_bits + SEC_PARAM;
// ceil( (ceil(log(p)) + security_parameter) / 8)
let bytes_per_base_field_elem =
((base_field_size_with_security_padding_in_bits + 7) / 8) as u64;
bytes_per_base_field_elem as usize
}
87 changes: 0 additions & 87 deletions rust-arkworks/src/hash_to_curve.rs

This file was deleted.

Loading

0 comments on commit 5a5c11f

Please sign in to comment.