-
Notifications
You must be signed in to change notification settings - Fork 23
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #117 from plume-sig/arkworks04
Bump to the recent `arkworks` (`v0.5`)
- Loading branch information
Showing
17 changed files
with
1,424 additions
and
332 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1 @@ | ||
https://github.com/plume-sig/zk-nullifier-sig/blob/main/README.md | ||
# HAZMAT | ||
Please note that until `v0.1.0` this is very much a preview crate which lets you have some preliminary feel of the structure and the reference implementation approach. | ||
https://github.com/plume-sig/zk-nullifier-sig/blob/main/README.md |
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,135 @@ | ||
// The below implementation is a rework of https://github.com/armfazh/h2c-rust-ref | ||
// With some optimisations | ||
|
||
use core::marker::PhantomData; | ||
|
||
use ark_std::vec::*; | ||
|
||
use arrayvec::ArrayVec; | ||
use sha2::digest::{ExtendableOutput, FixedOutputReset, Update}; | ||
|
||
pub trait Expander { | ||
fn expand(&self, msg: &[u8], length: usize) -> Vec<u8>; | ||
} | ||
const MAX_DST_LENGTH: usize = 255; | ||
|
||
const LONG_DST_PREFIX: &[u8; 17] = b"H2C-OVERSIZE-DST-"; | ||
|
||
/// Implements section [5.3.3](https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-16#section-5.3.3) | ||
/// "Using DSTs longer than 255 bytes" of the | ||
/// [IRTF CFRG hash-to-curve draft #16](https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-16#section-5.3.3). | ||
pub struct DST(arrayvec::ArrayVec<u8, MAX_DST_LENGTH>); | ||
|
||
impl DST { | ||
pub fn new_xmd<H: FixedOutputReset + Default>(dst: &[u8]) -> DST { | ||
let array = if dst.len() > MAX_DST_LENGTH { | ||
let mut long = H::default(); | ||
long.update(&LONG_DST_PREFIX[..]); | ||
long.update(&dst); | ||
ArrayVec::try_from(long.finalize_fixed().as_ref()).unwrap() | ||
} else { | ||
ArrayVec::try_from(dst).unwrap() | ||
}; | ||
DST(array) | ||
} | ||
|
||
#[allow(dead_code)] | ||
pub fn new_xof<H: ExtendableOutput + Default>(dst: &[u8], k: usize) -> DST { | ||
let array = if dst.len() > MAX_DST_LENGTH { | ||
let mut long = H::default(); | ||
long.update(&LONG_DST_PREFIX[..]); | ||
long.update(&dst); | ||
|
||
let mut new_dst = [0u8; MAX_DST_LENGTH]; | ||
let new_dst = &mut new_dst[0..((2 * k + 7) >> 3)]; | ||
long.finalize_xof_into(new_dst); | ||
ArrayVec::try_from(&*new_dst).unwrap() | ||
} else { | ||
ArrayVec::try_from(dst).unwrap() | ||
}; | ||
DST(array) | ||
} | ||
|
||
pub fn update<H: Update>(&self, h: &mut H) { | ||
h.update(self.0.as_ref()); | ||
// I2OSP(len,1) https://www.rfc-editor.org/rfc/rfc8017.txt | ||
h.update(&[self.0.len() as u8]); | ||
} | ||
} | ||
|
||
#[allow(dead_code)] | ||
pub(super) struct ExpanderXof<H: ExtendableOutput + Clone + Default> { | ||
pub(super) xofer: PhantomData<H>, | ||
pub(super) dst: Vec<u8>, | ||
pub(super) k: usize, | ||
} | ||
|
||
impl<H: ExtendableOutput + Clone + Default> Expander for ExpanderXof<H> { | ||
fn expand(&self, msg: &[u8], n: usize) -> Vec<u8> { | ||
let mut xofer = H::default(); | ||
xofer.update(msg); | ||
|
||
// I2OSP(len,2) https://www.rfc-editor.org/rfc/rfc8017.txt | ||
let lib_str = (n as u16).to_be_bytes(); | ||
xofer.update(&lib_str); | ||
|
||
DST::new_xof::<H>(self.dst.as_ref(), self.k).update(&mut xofer); | ||
xofer.finalize_boxed(n).into_vec() | ||
} | ||
} | ||
|
||
pub(super) struct ExpanderXmd<H: FixedOutputReset + Default + Clone> { | ||
pub(super) hasher: PhantomData<H>, | ||
pub(super) dst: Vec<u8>, | ||
pub(super) block_size: usize, | ||
} | ||
|
||
static Z_PAD: [u8; 256] = [0u8; 256]; | ||
|
||
impl<H: FixedOutputReset + Default + Clone> Expander for ExpanderXmd<H> { | ||
fn expand(&self, msg: &[u8], n: usize) -> Vec<u8> { | ||
use sha2::digest::typenum::Unsigned; | ||
// output size of the hash function, e.g. 32 bytes = 256 bits for sha2::Sha256 | ||
let b_len = H::OutputSize::to_usize(); | ||
let ell = (n + (b_len - 1)) / b_len; | ||
assert!( | ||
ell <= 255, | ||
"The ratio of desired output to the output size of hash function is too large!" | ||
); | ||
|
||
let dst_prime = DST::new_xmd::<H>(self.dst.as_ref()); | ||
// Represent `len_in_bytes` as a 2-byte array. | ||
// As per I2OSP method outlined in https://tools.ietf.org/pdf/rfc8017.pdf, | ||
// The program should abort if integer that we're trying to convert is too large. | ||
assert!(n < (1 << 16), "Length should be smaller than 2^16"); | ||
let lib_str: [u8; 2] = (n as u16).to_be_bytes(); | ||
|
||
let mut hasher = H::default(); | ||
hasher.update(&Z_PAD[0..self.block_size]); | ||
hasher.update(msg); | ||
hasher.update(&lib_str); | ||
hasher.update(&[0u8]); | ||
dst_prime.update(&mut hasher); | ||
let b0 = hasher.finalize_fixed_reset(); | ||
|
||
hasher.update(&b0); | ||
hasher.update(&[1u8]); | ||
dst_prime.update(&mut hasher); | ||
let mut bi = hasher.finalize_fixed_reset(); | ||
|
||
let mut uniform_bytes: Vec<u8> = Vec::with_capacity(n); | ||
uniform_bytes.extend_from_slice(&bi); | ||
for i in 2..=ell { | ||
// update the hasher with xor of b_0 and b_i elements | ||
for (l, r) in b0.iter().zip(bi.iter()) { | ||
hasher.update(&[*l ^ *r]); | ||
} | ||
hasher.update(&[i as u8]); | ||
dst_prime.update(&mut hasher); | ||
bi = hasher.finalize_fixed_reset(); | ||
uniform_bytes.extend_from_slice(&bi); | ||
} | ||
uniform_bytes.truncate(n); | ||
uniform_bytes | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,62 @@ | ||
mod expander; | ||
use ark_ff::{field_hashers::HashToField, Field, PrimeField}; | ||
use core::marker::PhantomData; | ||
use expander::{Expander, ExpanderXmd}; | ||
use sha2::digest::{core_api::BlockSizeUser, FixedOutputReset}; | ||
|
||
pub struct FixedFieldHasher<H: FixedOutputReset + Default + Clone, const SEC_PARAM: usize = 128> { | ||
expander: ExpanderXmd<H>, | ||
len_per_base_elem: usize, | ||
} | ||
|
||
impl<F: Field, H: FixedOutputReset + BlockSizeUser + Default + Clone, const SEC_PARAM: usize> | ||
HashToField<F> for FixedFieldHasher<H, SEC_PARAM> | ||
{ | ||
fn new(dst: &[u8]) -> Self { | ||
// The final output of `hash_to_field` will be an array of field | ||
// elements from F::BaseField, each of size `len_per_elem`. | ||
let len_per_base_elem = get_len_per_elem::<F, SEC_PARAM>(); | ||
|
||
let expander = ExpanderXmd { | ||
hasher: PhantomData, | ||
dst: dst.to_vec(), | ||
block_size: H::block_size(), | ||
}; | ||
|
||
FixedFieldHasher { | ||
expander, | ||
len_per_base_elem, | ||
} | ||
} | ||
|
||
fn hash_to_field<const N: usize>(&self, message: &[u8]) -> [F; N] { | ||
let m = F::extension_degree() as usize; | ||
|
||
// The user requests `N` of elements of F_p^m to output per input msg, | ||
// each field element comprising `m` BasePrimeField elements. | ||
let len_in_bytes = N * m * self.len_per_base_elem; | ||
let uniform_bytes = self.expander.expand(message, len_in_bytes); | ||
|
||
let cb = |i| { | ||
let base_prime_field_elem = |j| { | ||
let elm_offset = self.len_per_base_elem * (j + i * m); | ||
F::BasePrimeField::from_be_bytes_mod_order( | ||
&uniform_bytes[elm_offset..][..self.len_per_base_elem], | ||
) | ||
}; | ||
F::from_base_prime_field_elems((0..m).map(base_prime_field_elem)).unwrap() | ||
}; | ||
ark_std::array::from_fn::<F, N, _>(cb) | ||
} | ||
} | ||
|
||
const fn get_len_per_elem<F: Field, const SEC_PARAM: usize>() -> usize { | ||
// ceil(log(p)) | ||
let base_field_size_in_bits = F::BasePrimeField::MODULUS_BIT_SIZE as usize; | ||
// ceil(log(p)) + security_parameter | ||
let base_field_size_with_security_padding_in_bits = base_field_size_in_bits + SEC_PARAM; | ||
// ceil( (ceil(log(p)) + security_parameter) / 8) | ||
let bytes_per_base_field_elem = | ||
((base_field_size_with_security_padding_in_bits + 7) / 8) as u64; | ||
bytes_per_base_field_elem as usize | ||
} |
This file was deleted.
Oops, something went wrong.
Oops, something went wrong.