diff --git a/filecoin-hashers/src/generic/blake2s.rs b/filecoin-hashers/src/generic/blake2s.rs new file mode 100644 index 000000000..b686ce895 --- /dev/null +++ b/filecoin-hashers/src/generic/blake2s.rs @@ -0,0 +1,577 @@ +use std::cmp::Ordering; +use std::fmt::{self, Debug, Formatter}; +use std::marker::PhantomData; + +use bellperson::{ + gadgets::{ + blake2s::blake2s as blake2s_circuit, boolean::Boolean, multipack, num::AllocatedNum, + }, + ConstraintSystem, SynthesisError, +}; +use blake2s_simd::{Hash as Blake2sHash, Params as Blake2sBuilder, State}; +use blstrs::Scalar as Fr; +use ff::PrimeField; +use merkletree::{ + hash::{Algorithm, Hashable}, + merkle::Element, +}; +use pasta_curves::{Fp, Fq}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +use crate::{Domain, Hasher, HashFunction}; + +#[derive(Copy, Clone, PartialEq, Eq, Default)] +pub struct Blake2sDomain> { + pub state: [u8; 32], + _f: PhantomData, +} + +impl> Debug for Blake2sDomain { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "Blake2sDomain({})", hex::encode(&self.state)) + } +} + +// Don't blanket `impl From for Blake2sDomain where F: PrimeField` because `PrimeField` is +// an externally defined trait which can have external implementors (which can result in conflicting +// implementations), e.g. if we blanket impl `impl From<[u8; 32]> for Blake2sDomain where F: +// PrimeField` because `[u8; 32]` can implement `PrimeField`. +impl From for Blake2sDomain { + fn from(f: Fr) -> Self { + Blake2sDomain { + state: f.to_repr(), + _f: PhantomData, + } + } +} +impl From for Blake2sDomain { + fn from(f: Fp) -> Self { + Blake2sDomain { + state: f.to_repr(), + _f: PhantomData, + } + } +} +impl From for Blake2sDomain { + fn from(f: Fq) -> Self { + Blake2sDomain { + state: f.to_repr(), + _f: PhantomData, + } + } +} + +impl Into for Blake2sDomain { + fn into(self) -> Fr { + Fr::from_repr_vartime(self.state).expect("from_repr failure") + } +} +impl Into for Blake2sDomain { + fn into(self) -> Fp { + Fp::from_repr_vartime(self.state).expect("from_repr failure") + } +} +impl Into for Blake2sDomain { + fn into(self) -> Fq { + Fq::from_repr_vartime(self.state).expect("from_repr failure") + } +} + +impl From for Blake2sDomain { + fn from(_f: Fr) -> Self { + panic!("cannot convert BLS12-381 scalar to halo::Blake2sDomain (Pallas)") + } +} +impl From for Blake2sDomain { + fn from(_f: Fr) -> Self { + panic!("cannot convert BLS12-381 scalar to halo::Blake2sDomain (Vesta)") + } +} + +impl Into for Blake2sDomain { + fn into(self) -> Fr { + panic!("cannot convert halo::Blake2sDomain (Pallas) into BLS12-381 scalar"); + } +} +impl Into for Blake2sDomain { + fn into(self) -> Fr { + panic!("cannot convert halo::Blake2sDomain (Vesta) into BLS12-381 scalar"); + } +} + +impl> From<[u8; 32]> for Blake2sDomain { + fn from(bytes: [u8; 32]) -> Self { + Blake2sDomain { + state: bytes, + _f: PhantomData, + } + } +} + +impl> From for Blake2sDomain { + fn from(digest: Blake2sHash) -> Self { + let mut domain = Blake2sDomain { + state: *digest.as_array(), + _f: PhantomData, + }; + domain.trim_to_fr32(); + domain + } +} + +impl> AsRef<[u8]> for Blake2sDomain { + fn as_ref(&self) -> &[u8] { + &self.state + } +} + +impl> AsRef for Blake2sDomain { + fn as_ref(&self) -> &Self { + self + } +} + +// Impl `PartialOrd` and `Ord` by hand because we can't derive them due to `F: PrimeField` not +// implementing them. +impl> PartialOrd for Blake2sDomain { + fn partial_cmp(&self, other: &Self) -> Option { + self.state.partial_cmp(&other.state) + } +} +impl> Ord for Blake2sDomain { + fn cmp(&self, other: &Self) -> Ordering { + self.state.cmp(&other.state) + } +} + +impl> Element for Blake2sDomain { + fn byte_len() -> usize { + 32 + } + + fn from_slice(bytes: &[u8]) -> Self { + assert_eq!(bytes.len(), Self::byte_len(), "invalid number of bytes"); + let mut state = [0u8; 32]; + state.copy_from_slice(bytes); + state.into() + } + + fn copy_to_slice(&self, bytes: &mut [u8]) { + bytes.copy_from_slice(&self.state); + } +} + +impl> std::hash::Hash for Blake2sDomain { + fn hash(&self, hasher: &mut H) { + std::hash::Hash::hash(&self.state, hasher); + } +} + +// Implement `Serialize` and `Deserialize` by hand because we can't derive them due to `F: +// PrimeField` not implementing them. +impl> Serialize for Blake2sDomain { + fn serialize(&self, s: S) -> Result { + self.state.serialize(s) + } +} +impl<'de, F: PrimeField> Deserialize<'de> for Blake2sDomain { + fn deserialize>(d: D) -> Result { + <[u8; 32]>::deserialize(d).map(Into::into) + } +} + +impl Domain for Blake2sDomain { + type Field = Fr; +} + +impl Domain for Blake2sDomain { + type Field = Fp; +} + +impl Domain for Blake2sDomain { + type Field = Fq; +} + +impl> Blake2sDomain { + pub fn trim_to_fr32(&mut self) { + // Strip the last (most-signifigant) two bits to ensure that we state within the ~256-bit + // field `F`; note the fields `Fr`, `Fp`, and `Fq` are each 255-bit fields and fully utilize + // 254 bits, i.e. `254 < log2(field_modulus) < 255`. + self.state[31] &= 0b0011_1111; + } +} + +#[derive(Clone)] +pub struct Blake2sFunction +where + F: PrimeField, + Blake2sDomain: From + Into, +{ + hasher: State, + _f: PhantomData, +} + +impl Default for Blake2sFunction +where + F: PrimeField, + Blake2sDomain: From + Into, +{ + fn default() -> Self { + Blake2sFunction { + hasher: Blake2sBuilder::new().hash_length(32).to_state(), + _f: PhantomData, + } + } +} + +// TODO (jake): do we need `PartialEq` on a hash function? Checking for equality using `Debug` isn't +// (afaik) doing what we think it is... +/* +impl> PartialEq for Blake2sFunction { + fn eq(&self, other: &Self) -> bool { + format!("{:?}", self.inner) == format!("{:?}", other.inner) + } +} + +impl Eq for Blake2sFunction {} +*/ + +impl Debug for Blake2sFunction +where + F: PrimeField, + Blake2sDomain: From + Into, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "Blake2sFunction({:?})", self.hasher) + } +} + +impl std::hash::Hasher for Blake2sFunction +where + F: PrimeField, + Blake2sDomain: From + Into, +{ + fn write(&mut self, msg: &[u8]) { + self.hasher.update(msg); + } + + fn finish(&self) -> u64 { + unreachable!("unused by Function -- should never be called") + } +} + +impl Hashable> for Blake2sDomain +where + F: PrimeField, + Blake2sDomain: From + Into, +{ + fn hash(&self, hasher: &mut Blake2sFunction) { + as std::hash::Hasher>::write(hasher, self.as_ref()); + } +} + +impl Algorithm> for Blake2sFunction +where + F: PrimeField, + Blake2sDomain: From + Into, +{ + fn hash(&mut self) -> Blake2sDomain { + self.hasher.clone().finalize().into() + } + + fn reset(&mut self) { + self.hasher = Blake2sBuilder::new().hash_length(32).to_state(); + } + + fn leaf(&mut self, leaf: Blake2sDomain) -> Blake2sDomain { + leaf + } + + fn node( + &mut self, + left: Blake2sDomain, + right: Blake2sDomain, + _height: usize, + ) -> Blake2sDomain { + left.hash(self); + right.hash(self); + self.hash() + } + + fn multi_node(&mut self, parts: &[Blake2sDomain], _height: usize) -> Blake2sDomain { + for part in parts { + part.hash(self); + } + self.hash() + } +} + +// Specialized implementation of `HashFunction` over the BLS12-381 scalar field `Fr` because that +// field is the only one which is compatible with `HashFunction`'s circuit inferfaces. +impl HashFunction> for Blake2sFunction { + fn hash(data: &[u8]) -> Blake2sDomain { + Blake2sBuilder::new() + .hash_length(32) + .to_state() + .update(data) + .finalize() + .into() + } + + fn hash2(a: &Blake2sDomain, b: &Blake2sDomain) -> Blake2sDomain { + Blake2sBuilder::new() + .hash_length(32) + .to_state() + .update(a.as_ref()) + .update(b.as_ref()) + .finalize() + .into() + } + + fn hash_multi_leaf_circuit>( + mut cs: CS, + leaves: &[AllocatedNum], + _height: usize, + ) -> Result, SynthesisError> { + let mut bits = Vec::with_capacity(leaves.len() * Fr::CAPACITY as usize); + for (i, leaf) in leaves.iter().enumerate() { + bits.extend_from_slice( + &leaf.to_bits_le(cs.namespace(|| format!("{}_num_into_bits", i)))?, + ); + while bits.len() % 8 != 0 { + bits.push(Boolean::Constant(false)); + } + } + Self::hash_circuit(cs, &bits) + } + + fn hash_leaf_bits_circuit>( + cs: CS, + left: &[Boolean], + right: &[Boolean], + _height: usize, + ) -> Result, SynthesisError> { + let mut preimage: Vec = vec![]; + + preimage.extend_from_slice(left); + while preimage.len() % 8 != 0 { + preimage.push(Boolean::Constant(false)); + } + + preimage.extend_from_slice(right); + while preimage.len() % 8 != 0 { + preimage.push(Boolean::Constant(false)); + } + + Self::hash_circuit(cs, &preimage[..]) + } + + fn hash_circuit>( + mut cs: CS, + bits: &[Boolean], + ) -> Result, SynthesisError> { + let personalization = vec![0u8; 8]; + let alloc_bits = blake2s_circuit(cs.namespace(|| "hash"), bits, &personalization)?; + + multipack::pack_bits(cs.namespace(|| "pack"), &alloc_bits) + } + + fn hash2_circuit>( + mut cs: CS, + a_num: &AllocatedNum, + b_num: &AllocatedNum, + ) -> Result, SynthesisError> { + // Allocate as booleans + let a = a_num.to_bits_le(cs.namespace(|| "a_bits"))?; + let b = b_num.to_bits_le(cs.namespace(|| "b_bits"))?; + + let mut preimage: Vec = vec![]; + + preimage.extend_from_slice(&a); + while preimage.len() % 8 != 0 { + preimage.push(Boolean::Constant(false)); + } + + preimage.extend_from_slice(&b); + while preimage.len() % 8 != 0 { + preimage.push(Boolean::Constant(false)); + } + + Self::hash_circuit(cs, &preimage[..]) + } +} + +// Specialized implementation of `HashFunction` over the Pasta scalar fields `Fp` and `Fq` because +// those fields are incompatible with `HashFunction`'s circuit inferfaces. +impl HashFunction> for Blake2sFunction { + fn hash(data: &[u8]) -> Blake2sDomain { + Blake2sBuilder::new() + .hash_length(32) + .to_state() + .update(data) + .finalize() + .into() + } + + fn hash2(a: &Blake2sDomain, b: &Blake2sDomain) -> Blake2sDomain { + Blake2sBuilder::new() + .hash_length(32) + .to_state() + .update(a.as_ref()) + .update(b.as_ref()) + .finalize() + .into() + } + + fn hash_leaf_circuit>( + mut _cs: CS, + _left: &AllocatedNum, + _right: &AllocatedNum, + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::Blake2sFunction cannot be used within Groth16 circuits") + } + + fn hash_multi_leaf_circuit>( + mut _cs: CS, + _leaves: &[AllocatedNum], + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::Blake2sFunction cannot be used within Groth16 circuits") + } + + fn hash_md_circuit>( + _cs: &mut CS, + _elements: &[AllocatedNum], + ) -> Result, SynthesisError> { + unimplemented!("halo::Blake2sFunction cannot be used within Groth16 circuits") + } + + fn hash_leaf_bits_circuit>( + _cs: CS, + _left: &[Boolean], + _right: &[Boolean], + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::Blake2sFunction cannot be used within Groth16 circuits") + } + + fn hash_circuit>( + mut _cs: CS, + _bits: &[Boolean], + ) -> Result, SynthesisError> { + unimplemented!("halo::Blake2sFunction cannot be used within Groth16 circuits") + } + + fn hash2_circuit>( + mut _cs: CS, + _a_num: &AllocatedNum, + _b_num: &AllocatedNum, + ) -> Result, SynthesisError> { + unimplemented!("halo::Blake2sFunction cannot be used within Groth16 circuits") + } +} +impl HashFunction> for Blake2sFunction { + fn hash(data: &[u8]) -> Blake2sDomain { + Blake2sBuilder::new() + .hash_length(32) + .to_state() + .update(data) + .finalize() + .into() + } + + fn hash2(a: &Blake2sDomain, b: &Blake2sDomain) -> Blake2sDomain { + Blake2sBuilder::new() + .hash_length(32) + .to_state() + .update(a.as_ref()) + .update(b.as_ref()) + .finalize() + .into() + } + + fn hash_leaf_circuit>( + mut _cs: CS, + _left: &AllocatedNum, + _right: &AllocatedNum, + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::Blake2sFunction cannot be used within Groth16 circuits") + } + + fn hash_multi_leaf_circuit>( + mut _cs: CS, + _leaves: &[AllocatedNum], + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::Blake2sFunction cannot be used within Groth16 circuits") + } + + fn hash_md_circuit>( + _cs: &mut CS, + _elements: &[AllocatedNum], + ) -> Result, SynthesisError> { + unimplemented!("halo::Blake2sFunction cannot be used within Groth16 circuits") + } + + fn hash_leaf_bits_circuit>( + _cs: CS, + _left: &[Boolean], + _right: &[Boolean], + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::Blake2sFunction cannot be used within Groth16 circuits") + } + + fn hash_circuit>( + mut _cs: CS, + _bits: &[Boolean], + ) -> Result, SynthesisError> { + unimplemented!("halo::Blake2sFunction cannot be used within Groth16 circuits") + } + + fn hash2_circuit>( + mut _cs: CS, + _a_num: &AllocatedNum, + _b_num: &AllocatedNum, + ) -> Result, SynthesisError> { + unimplemented!("halo::Blake2sFunction cannot be used within Groth16 circuits") + } +} + +#[derive(Default, Copy, Clone, PartialEq, Eq, Debug)] +pub struct Blake2sHasher +where + F: PrimeField, + Blake2sDomain: From + Into, +{ + _f: PhantomData, +} + +impl Hasher for Blake2sHasher { + type Domain = Blake2sDomain; + type Function = Blake2sFunction; + + fn name() -> String { + "Blake2sHasher".into() + } +} + +impl Hasher for Blake2sHasher { + type Domain = Blake2sDomain; + type Function = Blake2sFunction; + + fn name() -> String { + "Blake2sHasher_pallas".into() + } +} + +impl Hasher for Blake2sHasher { + type Domain = Blake2sDomain; + type Function = Blake2sFunction; + + fn name() -> String { + "Blake2sHasher_vesta".into() + } +} diff --git a/filecoin-hashers/src/generic/mod.rs b/filecoin-hashers/src/generic/mod.rs new file mode 100644 index 000000000..ef8d72172 --- /dev/null +++ b/filecoin-hashers/src/generic/mod.rs @@ -0,0 +1,91 @@ +#[cfg(feature = "blake2s")] +pub mod blake2s; +#[cfg(feature = "poseidon")] +pub mod poseidon; +#[cfg(feature = "sha256")] +pub mod sha256; + +#[cfg(feature = "blake2s")] +pub use blake2s::{Blake2sDomain, Blake2sFunction, Blake2sHasher}; +#[cfg(feature = "poseidon")] +pub use poseidon::{PoseidonDomain, PoseidonFunction, PoseidonHasher}; +#[cfg(feature = "sha256")] +pub use sha256::{Sha256Domain, Sha256Function, Sha256Hasher}; + +// Rexport each hasher over the field `Fr` which is compatible with Groth16. +pub mod groth { + #[cfg(any(feature = "blake2s", feature = "poseidon", feature = "sha256"))] + use blstrs::Scalar as Fr; + + // BLS12-381 + #[cfg(feature = "blake2s")] + pub type Blake2sDomain = super::Blake2sDomain; + #[cfg(feature = "blake2s")] + pub type Blake2sFunction = super::Blake2sFunction; + #[cfg(feature = "blake2s")] + pub type Blake2sHasher = super::Blake2sHasher; + + #[cfg(feature = "poseidon")] + pub type PoseidonDomain = super::PoseidonDomain; + #[cfg(feature = "poseidon")] + pub type PoseidonFunction = super::PoseidonFunction; + #[cfg(feature = "poseidon")] + pub type PoseidonHasher = super::PoseidonHasher; + + #[cfg(feature = "sha256")] + pub type Sha256Domain = super::Sha256Domain; + #[cfg(feature = "sha256")] + pub type Sha256Function = super::Sha256Function; + #[cfg(feature = "sha256")] + pub type Sha256Hasher = super::Sha256Hasher; +} + +// Rexport each hasher over the fields `Fp` and `Fq` which are compatible with Halo2. +pub mod halo { + #[cfg(any(feature = "blake2s", feature = "poseidon", feature = "sha256"))] + use pasta_curves::{Fp, Fq}; + + // Pallas + #[cfg(feature = "blake2s")] + pub type Blake2sDomainPallas = super::Blake2sDomain; + #[cfg(feature = "blake2s")] + pub type Blake2sFunctionPallas = super::Blake2sFunction; + #[cfg(feature = "blake2s")] + pub type Blake2sHasherPallas = super::Blake2sHasher; + + #[cfg(feature = "poseidon")] + pub type PoseidonDomainPallas = super::PoseidonDomain; + #[cfg(feature = "poseidon")] + pub type PoseidonFunctionPallas = super::PoseidonFunction; + #[cfg(feature = "poseidon")] + pub type PoseidonHasherPallas = super::PoseidonHasher; + + #[cfg(feature = "sha256")] + pub type Sha256DomainPallas = super::Sha256Domain; + #[cfg(feature = "sha256")] + pub type Sha256FunctionPallas = super::Sha256Function; + #[cfg(feature = "sha256")] + pub type Sha256HasherPallas = super::Sha256Hasher; + + // Vesta + #[cfg(feature = "blake2s")] + pub type Blake2sDomainVesta = super::Blake2sDomain; + #[cfg(feature = "blake2s")] + pub type Blake2sFunctionVesta = super::Blake2sFunction; + #[cfg(feature = "blake2s")] + pub type Blake2sHasherVesta = super::Blake2sHasher; + + #[cfg(feature = "poseidon")] + pub type PoseidonDomainVesta = super::PoseidonDomain; + #[cfg(feature = "poseidon")] + pub type PoseidonFunctionVesta = super::PoseidonFunction; + #[cfg(feature = "poseidon")] + pub type PoseidonHasherVesta = super::PoseidonHasher; + + #[cfg(feature = "sha256")] + pub type Sha256DomainVesta = super::Sha256Domain; + #[cfg(feature = "sha256")] + pub type Sha256FunctionVesta = super::Sha256Function; + #[cfg(feature = "sha256")] + pub type Sha256HasherVesta = super::Sha256Hasher; +} diff --git a/filecoin-hashers/src/generic/poseidon.rs b/filecoin-hashers/src/generic/poseidon.rs new file mode 100644 index 000000000..b5c8c9dfb --- /dev/null +++ b/filecoin-hashers/src/generic/poseidon.rs @@ -0,0 +1,688 @@ +use std::cmp::Ordering; +use std::marker::PhantomData; + +use bellperson::{ + gadgets::{boolean::Boolean, num::AllocatedNum}, + ConstraintSystem, SynthesisError, +}; +use blstrs::Scalar as Fr; +use ff::{Field, PrimeField}; +use generic_array::typenum::{Unsigned, U11, U2, U4, U8}; +use lazy_static::lazy_static; +use merkletree::{ + hash::{Algorithm, Hashable}, + merkle::Element, +}; +use neptune::{circuit::poseidon_hash, poseidon::PoseidonConstants, Arity, Poseidon}; +use pasta_curves::{Fp, Fq}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use typemap::ShareMap; + +use crate::{Domain, HashFunction, Hasher, PoseidonArity, PoseidonMDArity}; + +lazy_static! { + pub static ref POSEIDON_CONSTANTS_2: PoseidonConstants:: = PoseidonConstants::new(); + pub static ref POSEIDON_CONSTANTS_4: PoseidonConstants:: = PoseidonConstants::new(); + pub static ref POSEIDON_CONSTANTS_8: PoseidonConstants:: = PoseidonConstants::new(); + pub static ref POSEIDON_CONSTANTS_11: PoseidonConstants:: = PoseidonConstants::new(); + pub static ref POSEIDON_MD_CONSTANTS: PoseidonConstants:: = + PoseidonConstants::new(); + + pub static ref POSEIDON_CONSTANTS_2_PALLAS: PoseidonConstants:: = PoseidonConstants::new(); + pub static ref POSEIDON_CONSTANTS_4_PALLAS: PoseidonConstants:: = PoseidonConstants::new(); + pub static ref POSEIDON_CONSTANTS_8_PALLAS: PoseidonConstants:: = PoseidonConstants::new(); + pub static ref POSEIDON_CONSTANTS_11_PALLAS: PoseidonConstants:: = PoseidonConstants::new(); + pub static ref POSEIDON_MD_CONSTANTS_PALLAS: PoseidonConstants:: = + PoseidonConstants::new(); + + pub static ref POSEIDON_CONSTANTS_2_VESTA: PoseidonConstants:: = PoseidonConstants::new(); + pub static ref POSEIDON_CONSTANTS_4_VESTA: PoseidonConstants:: = PoseidonConstants::new(); + pub static ref POSEIDON_CONSTANTS_8_VESTA: PoseidonConstants:: = PoseidonConstants::new(); + pub static ref POSEIDON_CONSTANTS_11_VESTA: PoseidonConstants:: = PoseidonConstants::new(); + pub static ref POSEIDON_MD_CONSTANTS_VESTA: PoseidonConstants:: = + PoseidonConstants::new(); + + pub static ref POSEIDON_CONSTANTS: ShareMap = { + let mut tm = ShareMap::custom(); + + tm.insert::>(&*POSEIDON_CONSTANTS_2); + tm.insert::>(&*POSEIDON_CONSTANTS_4); + tm.insert::>(&*POSEIDON_CONSTANTS_8); + tm.insert::>(&*POSEIDON_CONSTANTS_11); + tm.insert::>(&*POSEIDON_MD_CONSTANTS); + + tm.insert::>(&*POSEIDON_CONSTANTS_2_PALLAS); + tm.insert::>(&*POSEIDON_CONSTANTS_4_PALLAS); + tm.insert::>(&*POSEIDON_CONSTANTS_8_PALLAS); + tm.insert::>(&*POSEIDON_CONSTANTS_11_PALLAS); + tm.insert::>(&*POSEIDON_MD_CONSTANTS_PALLAS); + + tm.insert::>(&*POSEIDON_CONSTANTS_2_VESTA); + tm.insert::>(&*POSEIDON_CONSTANTS_4_VESTA); + tm.insert::>(&*POSEIDON_CONSTANTS_8_VESTA); + tm.insert::>(&*POSEIDON_CONSTANTS_11_VESTA); + tm.insert::>(&*POSEIDON_MD_CONSTANTS_VESTA); + + tm + }; +} + +pub struct FieldArity(PhantomData<(F, A)>) +where + F: PrimeField, + A: Arity; + +impl typemap::Key for FieldArity +where + F: PrimeField, + A: Arity, +{ + type Value = &'static PoseidonConstants; +} + +#[derive(Default, Copy, Clone, Debug, PartialEq, Eq)] +pub struct PoseidonDomain>(pub ::Repr); + +// Don't blanket `impl From for PoseidonDomain where F: PrimeField` because `PrimeField` is +// an externally defined trait which can have external implementors (which can result in conflicting +// implementations), e.g. if we blanket impl `impl From<[u8; 32]> for PoseidonDomain where F: +// PrimeField` because `[u8; 32]` can implement `PrimeField`. +impl From for PoseidonDomain { + fn from(f: Fr) -> Self { + PoseidonDomain(f.to_repr()) + } +} +impl From for PoseidonDomain { + fn from(f: Fp) -> Self { + PoseidonDomain(f.to_repr()) + } +} +impl From for PoseidonDomain { + fn from(f: Fq) -> Self { + PoseidonDomain(f.to_repr()) + } +} + +impl Into for PoseidonDomain { + fn into(self) -> Fr { + Fr::from_repr_vartime(self.0).expect("from_repr failure") + } +} +impl Into for PoseidonDomain { + fn into(self) -> Fp { + Fp::from_repr_vartime(self.0).expect("from_repr failure") + } +} +impl Into for PoseidonDomain { + fn into(self) -> Fq { + Fq::from_repr_vartime(self.0).expect("from_repr failure") + } +} + +impl From for PoseidonDomain { + fn from(_f: Fr) -> Self { + panic!("cannot convert BLS12-381 scalar to halo::PoseidonDomain (Pallas)") + } +} +impl From for PoseidonDomain { + fn from(_f: Fr) -> Self { + panic!("cannot convert BLS12-381 scalar to halo::PoseidonDomain (Vesta)") + } +} + +impl Into for PoseidonDomain { + fn into(self) -> Fr { + panic!("cannot convert halo::PoseidonDomain (Pallas) into BLS12-381 scalar"); + } +} +impl Into for PoseidonDomain { + fn into(self) -> Fr { + panic!("cannot convert halo::PoseidonDomain (Vesta) into BLS12-381 scalar"); + } +} + +impl> From<[u8; 32]> for PoseidonDomain { + fn from(bytes: [u8; 32]) -> Self { + PoseidonDomain(bytes) + } +} + +impl> AsRef<[u8]> for PoseidonDomain { + fn as_ref(&self) -> &[u8] { + &self.0 + } +} + +impl> AsRef for PoseidonDomain { + fn as_ref(&self) -> &Self { + self + } +} + +// Impl `PartialOrd` and `Ord` by hand because we can't derive them due to `F: PrimeField` not +// implementing them. +impl> PartialOrd for PoseidonDomain { + fn partial_cmp(&self, other: &Self) -> Option { + self.0.partial_cmp(&other.0) + } +} +impl> Ord for PoseidonDomain { + fn cmp(&self, other: &Self) -> Ordering { + self.0.cmp(&other.0) + } +} + +impl> Element for PoseidonDomain { + fn byte_len() -> usize { + 32 + } + + fn from_slice(bytes: &[u8]) -> Self { + assert_eq!(bytes.len(), Self::byte_len(), "invalid number of bytes"); + let mut repr = [0u8; 32]; + repr.copy_from_slice(bytes); + repr.into() + } + + fn copy_to_slice(&self, bytes: &mut [u8]) { + bytes.copy_from_slice(&self.0); + } +} + +impl> std::hash::Hash for PoseidonDomain { + fn hash(&self, hasher: &mut H) { + std::hash::Hash::hash(&self.0, hasher); + } +} + +// Implement `Serialize` and `Deserialize` by hand because we can't derive them due to `F: +// PrimeField` not implementing them. +impl> Serialize for PoseidonDomain { + fn serialize(&self, s: S) -> Result { + self.0.serialize(s) + } +} +impl<'de, F: PrimeField> Deserialize<'de> for PoseidonDomain { + fn deserialize>(d: D) -> Result { + <[u8; 32]>::deserialize(d).map(Into::into) + } +} + +impl Domain for PoseidonDomain { + type Field = Fr; +} + +impl Domain for PoseidonDomain { + type Field = Fp; +} + +impl Domain for PoseidonDomain { + type Field = Fq; +} + +fn shared_hash(data: &[u8]) -> PoseidonDomain +where + F: PrimeField, + PoseidonDomain: From, +{ + // FIXME: We shouldn't unwrap here, but doing otherwise will require an interface change. + // We could truncate so `bytes_into_frs` cannot fail, then ensure `data` is always `fr_safe`. + let preimage: Vec = data + .chunks(32) + .map(|chunk| { + let mut repr = [0u8; 32]; + repr.copy_from_slice(chunk); + F::from_repr_vartime(repr).expect("from_repr failure") + }) + .collect(); + + shared_hash_frs(&preimage).into() +} + +fn shared_hash_frs>(preimage: &[F]) -> F { + match preimage.len() { + 2 => { + let consts = &POSEIDON_CONSTANTS + .get::>() + .expect("arity-2 Poseidon constants not found for field"); + Poseidon::new_with_preimage(preimage, consts).hash() + } + 4 => { + let consts = &POSEIDON_CONSTANTS + .get::>() + .expect("arity-4 Poseidon constants not found for field"); + Poseidon::new_with_preimage(preimage, consts).hash() + } + 8 => { + let consts = &POSEIDON_CONSTANTS + .get::>() + .expect("arity-8 Poseidon constants not found for field"); + Poseidon::new_with_preimage(preimage, consts).hash() + } + n => panic!("unsupported arity for Poseidon hasher: {}", n), + } +} + +#[derive(Default, Clone, Debug)] +pub struct PoseidonFunction(F) +where + F: PrimeField, + PoseidonDomain: From + Into; + +impl std::hash::Hasher for PoseidonFunction +where + F: PrimeField, + PoseidonDomain: From + Into, +{ + fn write(&mut self, preimage: &[u8]) { + self.0 = shared_hash(preimage).into(); + } + + fn finish(&self) -> u64 { + unreachable!("unused by Function -- should never be called") + } +} + +impl Hashable> for PoseidonDomain +where + F: PrimeField, + PoseidonDomain: From + Into, +{ + fn hash(&self, hasher: &mut PoseidonFunction) { + as std::hash::Hasher>::write(hasher, self.as_ref()) + } +} + +// We can't blanket impl `Hashable for F where F: PrimeField` because both `Hashable` and `PrimeField` +// are external traits, therefore we must implement `Hashable` by hand for each Pasta field. +impl Hashable> for Fr { + fn hash(&self, hasher: &mut PoseidonFunction) { + as std::hash::Hasher>::write(hasher, &self.to_repr()) + } +} +impl Hashable> for Fp { + fn hash(&self, hasher: &mut PoseidonFunction) { + as std::hash::Hasher>::write(hasher, &self.to_repr()) + } +} +impl Hashable> for Fq { + fn hash(&self, hasher: &mut PoseidonFunction) { + as std::hash::Hasher>::write(hasher, &self.to_repr()) + } +} + +impl Algorithm> for PoseidonFunction +where + F: PrimeField, + PoseidonDomain: From + Into, +{ + fn hash(&mut self) -> PoseidonDomain { + self.0.into() + } + + fn reset(&mut self) { + self.0 = F::zero(); + } + + fn leaf(&mut self, leaf: PoseidonDomain) -> PoseidonDomain { + leaf + } + + fn node( + &mut self, + left: PoseidonDomain, + right: PoseidonDomain, + _height: usize, + ) -> PoseidonDomain { + shared_hash_frs(&[left.into(), right.into()]).into() + } + + fn multi_node(&mut self, preimage: &[PoseidonDomain], _height: usize) -> PoseidonDomain { + match preimage.len() { + 2 | 4 | 8 => { + let preimage: Vec = preimage + .iter() + .map(|domain| (*domain).into()) + .collect(); + shared_hash_frs(&preimage).into() + } + arity => panic!("unsupported Halo Poseidon hasher arity: {}", arity), + } + } +} + +impl HashFunction> for PoseidonFunction { + fn hash(data: &[u8]) -> PoseidonDomain { + shared_hash(data) + } + + fn hash2(a: &PoseidonDomain, b: &PoseidonDomain) -> PoseidonDomain { + let preimage = [(*a).into(), (*b).into()]; + Poseidon::new_with_preimage(&preimage, &*POSEIDON_CONSTANTS_2).hash().into() + } + + fn hash_md(input: &[PoseidonDomain]) -> PoseidonDomain { + assert!( + input.len() > 1, + "hash_md preimage must contain more than one element" + ); + + let arity = PoseidonMDArity::to_usize(); + let consts = &POSEIDON_CONSTANTS + .get::>() + .expect("Poseidon constants not found for field and arity-MD"); + + let mut p = Poseidon::new(consts); + + let fr_input: Vec = input.iter().map(|domain| (*domain).into()).collect(); + + fr_input[1..] + .chunks(arity - 1) + .fold(fr_input[0], |acc, frs| { + p.reset(); + // Calling `.expect()` will panic iff we call `.input()` more that `arity` number + // of times prior to reseting the hasher (i.e. if we exceed the arity of the + // Poseidon constants) or if `preimge.len() == 1`; we prevent both scenarios. + p.input(acc).expect("input failure"); + for fr in frs { + p.input(*fr).expect("input failure"); + } + p.hash() + }) + .into() + } + + fn hash_leaf_circuit>( + cs: CS, + left: &AllocatedNum, + right: &AllocatedNum, + _height: usize, + ) -> Result, SynthesisError> { + Self::hash2_circuit(cs, left, right) + } + + fn hash_multi_leaf_circuit>( + cs: CS, + leaves: &[AllocatedNum], + _height: usize, + ) -> Result, SynthesisError> { + let consts = &POSEIDON_CONSTANTS + .get::>() + .expect(&format!( + "arity-{} Poseidon constants not found for field", + Arity::to_usize(), + )); + poseidon_hash::(cs, leaves.to_vec(), consts) + } + + fn hash_md_circuit>( + cs: &mut CS, + elements: &[AllocatedNum], + ) -> Result, SynthesisError> { + let consts = &*POSEIDON_MD_CONSTANTS; + let arity = PoseidonMDArity::to_usize(); + + let mut hash = elements[0].clone(); + let mut preimage = vec![hash.clone(); arity]; // Allocate. This will be overwritten. + for (hash_num, elts) in elements[1..].chunks(arity - 1).enumerate() { + preimage[0] = hash; + for (i, elt) in elts.iter().enumerate() { + preimage[i + 1] = elt.clone(); + } + // any terminal padding + #[allow(clippy::needless_range_loop)] + for i in (elts.len() + 1)..arity { + preimage[i] = + AllocatedNum::alloc(cs.namespace(|| format!("padding {}", i)), || { + Ok(Fr::zero()) + }) + .expect("alloc failure"); + } + let cs = cs.namespace(|| format!("hash md {}", hash_num)); + hash = poseidon_hash::<_, Fr, PoseidonMDArity>(cs, preimage.clone(), consts)?.clone(); + } + + Ok(hash) + } + + fn hash_circuit>( + _cs: CS, + _bits: &[Boolean], + ) -> Result, SynthesisError> { + unimplemented!(); + } + + fn hash2_circuit( + cs: CS, + a: &AllocatedNum, + b: &AllocatedNum, + ) -> Result, SynthesisError> + where + CS: ConstraintSystem, + { + let preimage = vec![a.clone(), b.clone()]; + poseidon_hash::(cs, preimage, &*POSEIDON_CONSTANTS_2) + } +} + +impl HashFunction> for PoseidonFunction { + fn hash(data: &[u8]) -> PoseidonDomain { + shared_hash(data) + } + + fn hash2(a: &PoseidonDomain, b: &PoseidonDomain) -> PoseidonDomain { + let preimage = [(*a).into(), (*b).into()]; + let consts = &POSEIDON_CONSTANTS + .get::>() + .expect("Poseidon constants not found for field and arity-2"); + Poseidon::new_with_preimage(&preimage, consts).hash().into() + } + + fn hash_md(input: &[PoseidonDomain]) -> PoseidonDomain { + assert!( + input.len() > 1, + "hash_md preimage must contain more than one element" + ); + + let arity = PoseidonMDArity::to_usize(); + let consts = &POSEIDON_CONSTANTS + .get::>() + .expect("Poseidon constants not found for field and arity-MD"); + + let mut p = Poseidon::new(consts); + + let fr_input: Vec = input.iter().map(|domain| (*domain).into()).collect(); + + fr_input[1..] + .chunks(arity - 1) + .fold(fr_input[0], |acc, frs| { + p.reset(); + // Calling `.expect()` will panic iff we call `.input()` more that `arity` number + // of times prior to reseting the hasher (i.e. if we exceed the arity of the + // Poseidon constants) or if `preimge.len() == 1`; we prevent both scenarios. + p.input(acc).expect("input failure"); + for fr in frs { + p.input(*fr).expect("input failure"); + } + p.hash() + }) + .into() + } + + fn hash_leaf_circuit>( + _cs: CS, + _left: &AllocatedNum, + _right: &AllocatedNum, + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::PoseidonFunction cannot be used within Groth16 circuits") + } + + fn hash_multi_leaf_circuit>( + _cs: CS, + _leaves: &[AllocatedNum], + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::PoseidonFunction cannot be used within Groth16 circuits") + } + + fn hash_md_circuit>( + _cs: &mut CS, + _elements: &[AllocatedNum], + ) -> Result, SynthesisError> { + unimplemented!("halo::PoseidonFunction cannot be used within Groth16 circuits") + } + + fn hash_leaf_bits_circuit>( + _cs: CS, + _left: &[Boolean], + _right: &[Boolean], + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::PoseidonFunction cannot be used within Groth16 circuits") + } + + fn hash_circuit>( + _cs: CS, + _bits: &[Boolean], + ) -> Result, SynthesisError> { + unimplemented!("halo::PoseidonFunction cannot be used within Groth16 circuits") + } + + fn hash2_circuit>( + _cs: CS, + _a: &AllocatedNum, + _b: &AllocatedNum, + ) -> Result, SynthesisError> { + unimplemented!("halo::PoseidonFunction cannot be used within Groth16 circuits") + } +} + +impl HashFunction> for PoseidonFunction { + fn hash(data: &[u8]) -> PoseidonDomain { + shared_hash(data) + } + + fn hash2(a: &PoseidonDomain, b: &PoseidonDomain) -> PoseidonDomain { + let preimage = [(*a).into(), (*b).into()]; + let consts = &POSEIDON_CONSTANTS + .get::>() + .expect("Poseidon constants not found for field and arity-2"); + Poseidon::new_with_preimage(&preimage, consts).hash().into() + } + + fn hash_md(input: &[PoseidonDomain]) -> PoseidonDomain { + assert!( + input.len() > 1, + "hash_md preimage must contain more than one element" + ); + + let arity = PoseidonMDArity::to_usize(); + let consts = &POSEIDON_CONSTANTS + .get::>() + .expect("Poseidon constants not found for field and arity-MD"); + + let mut p = Poseidon::new(consts); + + let fr_input: Vec = input.iter().map(|domain| (*domain).into()).collect(); + + fr_input[1..] + .chunks(arity - 1) + .fold(fr_input[0], |acc, frs| { + p.reset(); + // Calling `.expect()` will panic iff we call `.input()` more that `arity` number + // of times prior to reseting the hasher (i.e. if we exceed the arity of the + // Poseidon constants) or if `preimge.len() == 1`; we prevent both scenarios. + p.input(acc).expect("input failure"); + for fr in frs { + p.input(*fr).expect("input failure"); + } + p.hash() + }) + .into() + } + + fn hash_leaf_circuit>( + _cs: CS, + _left: &AllocatedNum, + _right: &AllocatedNum, + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::PoseidonFunction cannot be used within Groth16 circuits") + } + + fn hash_multi_leaf_circuit>( + _cs: CS, + _leaves: &[AllocatedNum], + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::PoseidonFunction cannot be used within Groth16 circuits") + } + + fn hash_md_circuit>( + _cs: &mut CS, + _elements: &[AllocatedNum], + ) -> Result, SynthesisError> { + unimplemented!("halo::PoseidonFunction cannot be used within Groth16 circuits") + } + + fn hash_leaf_bits_circuit>( + _cs: CS, + _left: &[Boolean], + _right: &[Boolean], + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::PoseidonFunction cannot be used within Groth16 circuits") + } + + fn hash_circuit>( + _cs: CS, + _bits: &[Boolean], + ) -> Result, SynthesisError> { + unimplemented!("halo::PoseidonFunction cannot be used within Groth16 circuits") + } + + fn hash2_circuit>( + _cs: CS, + _a: &AllocatedNum, + _b: &AllocatedNum, + ) -> Result, SynthesisError> { + unimplemented!("halo::PoseidonFunction cannot be used within Groth16 circuits") + } +} + +#[derive(Default, Copy, Clone, Debug, PartialEq, Eq)] +pub struct PoseidonHasher +where + F: PrimeField, + PoseidonDomain: From + Into, +{ + _f: PhantomData, +} + +impl Hasher for PoseidonHasher { + type Domain = PoseidonDomain; + type Function = PoseidonFunction; + + fn name() -> String { + "poseidon_hasher".into() + } +} + +impl Hasher for PoseidonHasher { + type Domain = PoseidonDomain; + type Function = PoseidonFunction; + + fn name() -> String { + "poseidon_pallas_hasher".into() + } +} + +impl Hasher for PoseidonHasher { + type Domain = PoseidonDomain; + type Function = PoseidonFunction; + + fn name() -> String { + "poseidon_vesta_hasher".into() + } +} diff --git a/filecoin-hashers/src/generic/sha256.rs b/filecoin-hashers/src/generic/sha256.rs new file mode 100644 index 000000000..df71d65c7 --- /dev/null +++ b/filecoin-hashers/src/generic/sha256.rs @@ -0,0 +1,582 @@ +use std::cmp::Ordering; +use std::fmt::{self, Debug, Formatter}; +use std::marker::PhantomData; + +use bellperson::{ + gadgets::{boolean::Boolean, multipack, num::AllocatedNum, sha256::sha256 as sha256_circuit}, + ConstraintSystem, SynthesisError, +}; +use blstrs::Scalar as Fr; +use ff::PrimeField; +use merkletree::{ + hash::{Algorithm, Hashable}, + merkle::Element, +}; +use pasta_curves::{Fp, Fq}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use sha2::{Digest, Sha256}; + +use crate::{Domain, Hasher, HashFunction}; + +#[derive(Copy, Clone, PartialEq, Eq, Default)] +pub struct Sha256Domain> { + pub state: [u8; 32], + _f: PhantomData, +} + +impl> Debug for Sha256Domain { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "Sha256Domain({})", hex::encode(&self.state)) + } +} + +// Don't blanket `impl From for Sha256Domain where F: PrimeField` because `PrimeField` is +// an externally defined trait which can have external implementors (which can result in conflicting +// implementations), e.g. if we blanket impl `impl From<[u8; 32]> for Sha256Domain where F: +// PrimeField` because `[u8; 32]` can implement `PrimeField`. +impl From for Sha256Domain { + fn from(f: Fr) -> Self { + Sha256Domain { + state: f.to_repr(), + _f: PhantomData, + } + } +} +impl From for Sha256Domain { + fn from(f: Fp) -> Self { + Sha256Domain { + state: f.to_repr(), + _f: PhantomData, + } + } +} +impl From for Sha256Domain { + fn from(f: Fq) -> Self { + Sha256Domain { + state: f.to_repr(), + _f: PhantomData, + } + } +} + +impl Into for Sha256Domain { + fn into(self) -> Fr { + Fr::from_repr_vartime(self.state).expect("from_repr failure") + } +} +impl Into for Sha256Domain { + fn into(self) -> Fp { + Fp::from_repr_vartime(self.state).expect("from_repr failure") + } +} +impl Into for Sha256Domain { + fn into(self) -> Fq { + Fq::from_repr_vartime(self.state).expect("from_repr failure") + } +} + +impl From for Sha256Domain { + fn from(_f: Fr) -> Self { + panic!("cannot convert BLS12-381 scalar to halo::Sha256Domain (Pallas)") + } +} +impl From for Sha256Domain { + fn from(_f: Fr) -> Self { + panic!("cannot convert BLS12-381 scalar to halo::Sha256Domain (Vesta)") + } +} + +impl Into for Sha256Domain { + fn into(self) -> Fr { + panic!("cannot convert halo::Sha256Domain (Pallas) into BLS12-381 scalar"); + } +} +impl Into for Sha256Domain { + fn into(self) -> Fr { + panic!("cannot convert halo::Sha256Domain (Vesta) into BLS12-381 scalar"); + } +} + +impl> From<[u8; 32]> for Sha256Domain { + fn from(bytes: [u8; 32]) -> Self { + Sha256Domain { + state: bytes, + _f: PhantomData, + } + } +} + +impl> AsRef<[u8]> for Sha256Domain { + fn as_ref(&self) -> &[u8] { + &self.state + } +} + +impl> AsRef for Sha256Domain { + fn as_ref(&self) -> &Self { + self + } +} + +// Impl `PartialOrd` and `Ord` by hand because we can't derive them due to `F: PrimeField` not +// implementing them. +impl> PartialOrd for Sha256Domain { + fn partial_cmp(&self, other: &Self) -> Option { + self.state.partial_cmp(&other.state) + } +} +impl> Ord for Sha256Domain { + fn cmp(&self, other: &Self) -> Ordering { + self.state.cmp(&other.state) + } +} + +impl> Element for Sha256Domain { + fn byte_len() -> usize { + 32 + } + + fn from_slice(bytes: &[u8]) -> Self { + assert_eq!(bytes.len(), Self::byte_len(), "invalid number of bytes"); + let mut state = [0u8; 32]; + state.copy_from_slice(bytes); + state.into() + } + + fn copy_to_slice(&self, bytes: &mut [u8]) { + bytes.copy_from_slice(&self.state); + } +} + +impl> std::hash::Hash for Sha256Domain { + fn hash(&self, hasher: &mut H) { + std::hash::Hash::hash(&self.state, hasher); + } +} + +// Implement `Serialize` and `Deserialize` by hand because we can't derive them due to `F: +// PrimeField` not implementing them. +impl> Serialize for Sha256Domain { + fn serialize(&self, s: S) -> Result { + self.state.serialize(s) + } +} +impl<'de, F: PrimeField> Deserialize<'de> for Sha256Domain { + fn deserialize>(d: D) -> Result { + <[u8; 32]>::deserialize(d).map(Into::into) + } +} + +/* +impl Domain for Sha256Domain +where + Self: From + Into, + F: PrimeField, +{ + type Field = F; +} +*/ +impl Domain for Sha256Domain { + type Field = Fr; +} + +impl Domain for Sha256Domain { + type Field = Fp; +} + +impl Domain for Sha256Domain { + type Field = Fq; +} + +impl> Sha256Domain { + fn trim_to_fr32(&mut self) { + // Strip the last (most-signifigant) two bits to ensure that we state within the ~256-bit + // field `F`; note the fields `Fr`, `Fp`, and `Fq` are each 255-bit fields and fully utilize + // 254 bits, i.e. `254 < log2(field_modulus) < 255`. + self.state[31] &= 0b0011_1111; + } +} + +#[derive(Default, Clone, Debug)] +pub struct Sha256Function +where + F: PrimeField, + Sha256Domain: From + Into, +{ + hasher: Sha256, + _f: PhantomData, +} + +impl std::hash::Hasher for Sha256Function +where + F: PrimeField, + Sha256Domain: From + Into, +{ + fn write(&mut self, msg: &[u8]) { + self.hasher.update(msg); + } + + fn finish(&self) -> u64 { + unreachable!("unused by Function -- should never be called"); + } +} + +impl Hashable> for Sha256Domain +where + F: PrimeField, + Sha256Domain: From + Into, +{ + fn hash(&self, hasher: &mut Sha256Function) { + as std::hash::Hasher>::write(hasher, self.as_ref()); + } +} + +impl Algorithm> for Sha256Function +where + F: PrimeField, + Sha256Domain: From + Into, +{ + fn hash(&mut self) -> Sha256Domain { + let mut digest = [0u8; 32]; + digest.copy_from_slice(self.hasher.clone().finalize().as_ref()); + let mut trimmed: Sha256Domain = digest.into(); + trimmed.trim_to_fr32(); + trimmed + } + + fn reset(&mut self) { + self.hasher.reset(); + } + + fn leaf(&mut self, leaf: Sha256Domain) -> Sha256Domain { + leaf + } + + fn node( + &mut self, + left: Sha256Domain, + right: Sha256Domain, + _height: usize, + ) -> Sha256Domain { + left.hash(self); + right.hash(self); + self.hash() + } + + fn multi_node(&mut self, parts: &[Sha256Domain], _height: usize) -> Sha256Domain { + for part in parts { + part.hash(self); + } + self.hash() + } +} + +// Specialized implementation of `HashFunction` over the BLS12-381 scalar field `Fr` because that +// field is the only one which is compatible with `HashFunction`'s circuit inferfaces. +impl HashFunction> for Sha256Function { + fn hash(data: &[u8]) -> Sha256Domain { + let mut digest = [0u8; 32]; + digest.copy_from_slice(Sha256::digest(data).as_ref()); + let mut trimmed: Sha256Domain = digest.into(); + trimmed.trim_to_fr32(); + trimmed + } + + fn hash2(a: &Sha256Domain, b: &Sha256Domain) -> Sha256Domain { + let mut digest = [0u8; 32]; + let hasher = Sha256::new() + .chain(AsRef::<[u8]>::as_ref(a)) + .chain(AsRef::<[u8]>::as_ref(b)); + digest.copy_from_slice(hasher.finalize().as_ref()); + let mut trimmed: Sha256Domain = digest.into(); + trimmed.trim_to_fr32(); + trimmed + } + + fn hash_multi_leaf_circuit>( + mut cs: CS, + leaves: &[AllocatedNum], + _height: usize, + ) -> Result, SynthesisError> { + let mut bits = Vec::with_capacity(leaves.len() * Fr::CAPACITY as usize); + for (i, leaf) in leaves.iter().enumerate() { + let mut padded = leaf.to_bits_le(cs.namespace(|| format!("{}_num_into_bits", i)))?; + while padded.len() % 8 != 0 { + padded.push(Boolean::Constant(false)); + } + + bits.extend( + padded + .chunks_exact(8) + .flat_map(|chunk| chunk.iter().rev()) + .cloned(), + ); + } + Self::hash_circuit(cs, &bits) + } + + fn hash_leaf_bits_circuit>( + cs: CS, + left: &[Boolean], + right: &[Boolean], + _height: usize, + ) -> Result, SynthesisError> { + let mut preimage: Vec = vec![]; + + let mut left_padded = left.to_vec(); + while left_padded.len() % 8 != 0 { + left_padded.push(Boolean::Constant(false)); + } + + preimage.extend( + left_padded + .chunks_exact(8) + .flat_map(|chunk| chunk.iter().rev()) + .cloned(), + ); + + let mut right_padded = right.to_vec(); + while right_padded.len() % 8 != 0 { + right_padded.push(Boolean::Constant(false)); + } + + preimage.extend( + right_padded + .chunks_exact(8) + .flat_map(|chunk| chunk.iter().rev()) + .cloned(), + ); + + Self::hash_circuit(cs, &preimage[..]) + } + + fn hash_circuit>( + mut cs: CS, + bits: &[Boolean], + ) -> Result, SynthesisError> { + let be_bits = sha256_circuit(cs.namespace(|| "hash"), bits)?; + let le_bits = be_bits + .chunks(8) + .flat_map(|chunk| chunk.iter().rev()) + .cloned() + .take(Fr::CAPACITY as usize) + .collect::>(); + multipack::pack_bits(cs.namespace(|| "pack_le"), &le_bits) + } + + fn hash2_circuit( + mut cs: CS, + a_num: &AllocatedNum, + b_num: &AllocatedNum, + ) -> Result, SynthesisError> + where + CS: ConstraintSystem, + { + // Allocate as booleans + let a = a_num.to_bits_le(cs.namespace(|| "a_bits"))?; + let b = b_num.to_bits_le(cs.namespace(|| "b_bits"))?; + + let mut preimage: Vec = vec![]; + + let mut a_padded = a.to_vec(); + while a_padded.len() % 8 != 0 { + a_padded.push(Boolean::Constant(false)); + } + + preimage.extend( + a_padded + .chunks_exact(8) + .flat_map(|chunk| chunk.iter().rev()) + .cloned(), + ); + + let mut b_padded = b.to_vec(); + while b_padded.len() % 8 != 0 { + b_padded.push(Boolean::Constant(false)); + } + + preimage.extend( + b_padded + .chunks_exact(8) + .flat_map(|chunk| chunk.iter().rev()) + .cloned(), + ); + + Self::hash_circuit(cs, &preimage[..]) + } +} + +// Specialized implementation of `HashFunction` over the Pasta scalar fields `Fp` and `Fq` because +// those fields are incompatible with `HashFunction`'s circuit inferfaces. +impl HashFunction> for Sha256Function { + fn hash(data: &[u8]) -> Sha256Domain { + let mut digest = [0u8; 32]; + digest.copy_from_slice(Sha256::digest(data).as_ref()); + let mut trimmed: Sha256Domain = digest.into(); + trimmed.trim_to_fr32(); + trimmed + } + + fn hash2(a: &Sha256Domain, b: &Sha256Domain) -> Sha256Domain { + let mut digest = [0u8; 32]; + let hasher = Sha256::new() + .chain(AsRef::<[u8]>::as_ref(a)) + .chain(AsRef::<[u8]>::as_ref(b)); + digest.copy_from_slice(hasher.finalize().as_ref()); + let mut trimmed: Sha256Domain = digest.into(); + trimmed.trim_to_fr32(); + trimmed + } + + fn hash_leaf_circuit>( + mut _cs: CS, + _left: &AllocatedNum, + _right: &AllocatedNum, + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::Sha256Function cannot be used within Groth16 circuits") + } + + fn hash_multi_leaf_circuit>( + mut _cs: CS, + _leaves: &[AllocatedNum], + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::Sha256Function cannot be used within Groth16 circuits") + } + + fn hash_md_circuit>( + _cs: &mut CS, + _elements: &[AllocatedNum], + ) -> Result, SynthesisError> { + unimplemented!("halo::Sha256Function cannot be used within Groth16 circuits") + } + + fn hash_leaf_bits_circuit>( + _cs: CS, + _left: &[Boolean], + _right: &[Boolean], + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::Sha256Function cannot be used within Groth16 circuits") + } + + fn hash_circuit>( + mut _cs: CS, + _bits: &[Boolean], + ) -> Result, SynthesisError> { + unimplemented!("halo::Sha256Function cannot be used within Groth16 circuits") + } + + fn hash2_circuit>( + mut _cs: CS, + _a_num: &AllocatedNum, + _b_num: &AllocatedNum, + ) -> Result, SynthesisError> { + unimplemented!("halo::Sha256Function cannot be used within Groth16 circuits") + } +} +impl HashFunction> for Sha256Function { + fn hash(data: &[u8]) -> Sha256Domain { + let mut digest = [0u8; 32]; + digest.copy_from_slice(Sha256::digest(data).as_ref()); + let mut trimmed: Sha256Domain = digest.into(); + trimmed.trim_to_fr32(); + trimmed + } + + fn hash2(a: &Sha256Domain, b: &Sha256Domain) -> Sha256Domain { + let mut digest = [0u8; 32]; + let hasher = Sha256::new() + .chain(AsRef::<[u8]>::as_ref(a)) + .chain(AsRef::<[u8]>::as_ref(b)); + digest.copy_from_slice(hasher.finalize().as_ref()); + let mut trimmed: Sha256Domain = digest.into(); + trimmed.trim_to_fr32(); + trimmed + } + + fn hash_leaf_circuit>( + mut _cs: CS, + _left: &AllocatedNum, + _right: &AllocatedNum, + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::Sha256Function cannot be used within Groth16 circuits") + } + + fn hash_multi_leaf_circuit>( + mut _cs: CS, + _leaves: &[AllocatedNum], + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::Sha256Function cannot be used within Groth16 circuits") + } + + fn hash_md_circuit>( + _cs: &mut CS, + _elements: &[AllocatedNum], + ) -> Result, SynthesisError> { + unimplemented!("halo::Sha256Function cannot be used within Groth16 circuits") + } + + fn hash_leaf_bits_circuit>( + _cs: CS, + _left: &[Boolean], + _right: &[Boolean], + _height: usize, + ) -> Result, SynthesisError> { + unimplemented!("halo::Sha256Function cannot be used within Groth16 circuits") + } + + fn hash_circuit>( + mut _cs: CS, + _bits: &[Boolean], + ) -> Result, SynthesisError> { + unimplemented!("halo::Sha256Function cannot be used within Groth16 circuits") + } + + fn hash2_circuit>( + mut _cs: CS, + _a_num: &AllocatedNum, + _b_num: &AllocatedNum, + ) -> Result, SynthesisError> { + unimplemented!("halo::Sha256Function cannot be used within Groth16 circuits") + } +} + +#[derive(Default, Copy, Clone, Debug, PartialEq, Eq)] +pub struct Sha256Hasher +where + F: PrimeField, + Sha256Domain: From + Into, +{ + _f: PhantomData, +} + +impl Hasher for Sha256Hasher { + type Domain = Sha256Domain; + type Function = Sha256Function; + + fn name() -> String { + "sha256_hasher".into() + } +} + +impl Hasher for Sha256Hasher { + type Domain = Sha256Domain; + type Function = Sha256Function; + + fn name() -> String { + "sha256_pallas_hasher".into() + } +} + +impl Hasher for Sha256Hasher { + type Domain = Sha256Domain; + type Function = Sha256Function; + + fn name() -> String { + "sha256_vesta_hasher".into() + } +} diff --git a/filecoin-hashers/src/lib.rs b/filecoin-hashers/src/lib.rs index be76206d5..7dd24746b 100644 --- a/filecoin-hashers/src/lib.rs +++ b/filecoin-hashers/src/lib.rs @@ -6,6 +6,7 @@ #[cfg(feature = "blake2s")] pub mod blake2s; +pub mod generic; pub mod halo; #[cfg(feature = "poseidon")] pub mod poseidon; diff --git a/filecoin-hashers/src/types.rs b/filecoin-hashers/src/types.rs index 83cf3e570..311d0411b 100644 --- a/filecoin-hashers/src/types.rs +++ b/filecoin-hashers/src/types.rs @@ -4,6 +4,7 @@ use std::hash::Hash as StdHash; #[cfg(feature = "poseidon")] pub use crate::poseidon_types::*; +use anyhow::ensure; use bellperson::{ gadgets::{boolean::Boolean, num::AllocatedNum}, ConstraintSystem, SynthesisError, @@ -27,12 +28,15 @@ pub trait Domain: + Eq + Send + Sync + // Maintainig conversions to/from `Fr` limits interface breaking in other `rust-fil-proofs` + // member crates. + From - + From<::Repr> + Into + // Note that `Self::Field` may be `Fr`, in which case the `From` and + // `Into` trait bounds are redundant. + From - + From<[u8; 32]> + Into + + From<[u8; 32]> + Serialize + DeserializeOwned + Element @@ -41,10 +45,24 @@ pub trait Domain: type Field: PrimeField; #[allow(clippy::wrong_self_convention)] - fn into_bytes(&self) -> Vec; - fn try_from_bytes(raw: &[u8]) -> anyhow::Result; + fn into_bytes(&self) -> Vec { + self.as_ref().to_vec() + } + + fn try_from_bytes(bytes: &[u8]) -> anyhow::Result { + ensure!(bytes.len() == Self::byte_len(), "invalid number of bytes"); + let mut array = [0u8; 32]; + array.copy_from_slice(bytes); + Ok(array.into()) + } + /// Write itself into the given slice, LittleEndian bytes. - fn write_bytes(&self, _: &mut [u8]) -> anyhow::Result<()>; + fn write_bytes(&self, dest: &mut [u8]) -> anyhow::Result<()> { + let n = Self::byte_len(); + ensure!(dest.len() >= n, "invalid number of bytes"); + dest[..n].copy_from_slice(self.as_ref()); + Ok(()) + } fn random(rng: &mut R) -> Self { // Generating a field element then converting it ensures that we stay within the field. @@ -52,26 +70,6 @@ pub trait Domain: } } -/* -pub trait GrothDomain: Domain {} - -impl GrothDomain for D -where - D: Domain, -{} - -pub trait GrothHasher: Hasher -where - Self::Domain: GrothDomain + Domain, -{} - -impl GrothHasher for H -where - H: Hasher, - H::Domain: GrothDomain + Domain, -{} -*/ - pub trait HashFunction: Clone + Debug + Send + Sync + LightAlgorithm { fn hash(data: &[u8]) -> T; fn hash2(a: &T, b: &T) -> T; diff --git a/storage-proofs-core/benches/drgraph.rs b/storage-proofs-core/benches/drgraph.rs index a26a59853..fa649a4f9 100644 --- a/storage-proofs-core/benches/drgraph.rs +++ b/storage-proofs-core/benches/drgraph.rs @@ -1,29 +1,27 @@ use criterion::{black_box, criterion_group, criterion_main, Criterion}; -use filecoin_hashers::{halo, poseidon::PoseidonHasher}; -use pasta_curves::{Fp, Fq}; +use filecoin_hashers::{generic::{groth, halo}, Hasher}; use storage_proofs_core::{ api_version::ApiVersion, drgraph::{BucketGraph, Graph, BASE_DEGREE}, }; -// DRG parent-gen for the first and second nodes (node-indexes `0` and `1`) is different than -// parent-gen for all other nodes (node-indexes `>= 2`). -const CHILD_NODE: usize = 2; - -#[allow(clippy::unit_arg)] -fn drgraph(c: &mut Criterion) { +fn bench_for_hasher(c: &mut Criterion, hasher_name: &str) { + // Graph sizes to bench. let nodes = vec![12, 24, 128, 1024]; + // The node to generate parents for; DRG parent-gen for the first and second nodes (node + // indexes `0` and `1`) is different than parent-gen for all other nodes (node-indexes `>= 2`). + let child: usize = 2; + let mut group = c.benchmark_group("drg-parent-gen"); for n in nodes { - group.bench_function(format!("deg={}-nodes={}-bls12", BASE_DEGREE, n), |b| { + group.bench_function(format!("deg={}-nodes={}-{}", BASE_DEGREE, n, hasher_name), |b| { let graph = - BucketGraph::::new(n, BASE_DEGREE, 0, [32; 32], ApiVersion::V1_1_0) - .unwrap(); + BucketGraph::::new(n, BASE_DEGREE, 0, [32; 32], ApiVersion::V1_1_0).unwrap(); b.iter(|| { let mut parents = vec![0; BASE_DEGREE]; - black_box(graph.parents(CHILD_NODE, &mut parents).unwrap()); + black_box(graph.parents(child, &mut parents).unwrap()); }) }); } @@ -32,49 +30,11 @@ fn drgraph(c: &mut Criterion) { } #[allow(clippy::unit_arg)] -fn drgraph_halo(c: &mut Criterion) { - let nodes = vec![12, 24, 128, 1024]; - - let mut group = c.benchmark_group("drg-parent-gen"); - - for n in &nodes { - group.bench_function(format!("deg={}-nodes={}-pallas", BASE_DEGREE, n), |b| { - let graph = BucketGraph::>::new( - *n, - BASE_DEGREE, - 0, - [32; 32], - ApiVersion::V1_1_0, - ) - .unwrap(); - - b.iter(|| { - let mut parents = vec![0; BASE_DEGREE]; - black_box(graph.parents(CHILD_NODE, &mut parents).unwrap()); - }) - }); - } - - for n in nodes { - group.bench_function(format!("deg={}-nodes={}-vesta", BASE_DEGREE, n), |b| { - let graph = BucketGraph::>::new( - n, - BASE_DEGREE, - 0, - [32; 32], - ApiVersion::V1_1_0, - ) - .unwrap(); - - b.iter(|| { - let mut parents = vec![0; BASE_DEGREE]; - black_box(graph.parents(CHILD_NODE, &mut parents).unwrap()); - }) - }); - } - - group.finish(); +fn drgraph(c: &mut Criterion) { + bench_for_hasher::(c, "bls"); + bench_for_hasher::(c, "pallas"); + bench_for_hasher::(c, "vesta"); } -criterion_group!(benches, drgraph, drgraph_halo); +criterion_group!(benches, drgraph); criterion_main!(benches); diff --git a/storage-proofs-core/benches/merkle.rs b/storage-proofs-core/benches/merkle.rs index 826012035..2341056b8 100644 --- a/storage-proofs-core/benches/merkle.rs +++ b/storage-proofs-core/benches/merkle.rs @@ -1,13 +1,9 @@ -use anyhow::Result; use criterion::{black_box, criterion_group, criterion_main, Criterion}; -use filecoin_hashers::{ - halo, poseidon::PoseidonDomain, poseidon::PoseidonHasher, sha256::Sha256Hasher, Domain, -}; -use pasta_curves::{Fp, Fq}; -use rand::{thread_rng, Rng}; +use filecoin_hashers::{generic::{groth, halo}, Domain, Hasher}; +use rand::thread_rng; use storage_proofs_core::merkle::{create_base_merkle_tree, BinaryMerkleTree}; -fn merkle_benchmark_sha256(c: &mut Criterion) { +fn bench_with_hasher(c: &mut Criterion, hasher_name: &str) { let params = if cfg!(feature = "big-sector-sizes-bench") { vec![128, 1024, 1_048_576] } else { @@ -16,13 +12,14 @@ fn merkle_benchmark_sha256(c: &mut Criterion) { let mut group = c.benchmark_group("merkletree-binary"); for n_nodes in params { - group.bench_function(format!("sha256-{}", n_nodes), |b| { + group.bench_function(format!("nodes={}-{}", n_nodes, hasher_name), |b| { let mut rng = thread_rng(); - let data: Vec = (0..32 * n_nodes).map(|_| rng.gen()).collect(); + let data: Vec = (0..n_nodes) + .flat_map(|_| H::Domain::random(&mut rng).into_bytes()) + .collect(); b.iter(|| { black_box( - create_base_merkle_tree::>(None, n_nodes, &data) - .unwrap(), + create_base_merkle_tree::>(None, n_nodes, &data).unwrap(), ) }) }); @@ -31,160 +28,15 @@ fn merkle_benchmark_sha256(c: &mut Criterion) { group.finish(); } -fn merkle_benchmark_poseidon(c: &mut Criterion) { - let params = if cfg!(feature = "big-sector-sizes-bench") { - vec![64, 128, 1024, 1_048_576] - } else { - vec![64, 128, 1024] - }; - - let mut group = c.benchmark_group("merkletree-binary"); - for n_nodes in params { - group.bench_function(format!("poseidon-{}", n_nodes), |b| { - let mut rng = thread_rng(); - let mut data: Vec = Vec::with_capacity(32 * n_nodes); - (0..n_nodes) - .into_iter() - .try_for_each(|_| -> Result<()> { - let node = PoseidonDomain::random(&mut rng); - data.extend(node.into_bytes()); - Ok(()) - }) - .expect("failed to generate data"); - - b.iter(|| { - black_box( - create_base_merkle_tree::>( - None, n_nodes, &data, - ) - .unwrap(), - ) - }) - }); - } - - group.finish(); -} +fn merkle_benchmark(c: &mut Criterion) { + bench_with_hasher::(c, "sha256-bls"); + bench_with_hasher::(c, "sha256-pallas"); + bench_with_hasher::(c, "sha256-vesta"); -fn merkle_benchmark_sha256_halo(c: &mut Criterion) { - let params = if cfg!(feature = "big-sector-sizes-bench") { - vec![64, 128, 1024, 1_048_576] - } else { - vec![64, 128, 1024] - }; - - let mut group = c.benchmark_group("merkletree-binary"); - for n_nodes in params { - group.bench_function(format!("sha256-pallas-{}", n_nodes), |b| { - let mut rng = thread_rng(); - let mut data: Vec = Vec::with_capacity(32 * n_nodes); - (0..n_nodes) - .into_iter() - .try_for_each(|_| -> Result<()> { - let node = halo::Sha256Domain::::random(&mut rng); - data.extend(node.into_bytes()); - Ok(()) - }) - .expect("failed to generate data"); - - b.iter(|| { - black_box( - create_base_merkle_tree::>>( - None, n_nodes, &data, - ) - .unwrap(), - ) - }) - }); - - group.bench_function(format!("sha256-vesta-{}", n_nodes), |b| { - let mut rng = thread_rng(); - let mut data: Vec = Vec::with_capacity(32 * n_nodes); - (0..n_nodes) - .into_iter() - .try_for_each(|_| -> Result<()> { - let node = halo::Sha256Domain::::random(&mut rng); - data.extend(node.into_bytes()); - Ok(()) - }) - .expect("failed to generate data"); - - b.iter(|| { - black_box( - create_base_merkle_tree::>>( - None, n_nodes, &data, - ) - .unwrap(), - ) - }) - }); - } - - group.finish(); -} - -fn merkle_benchmark_poseidon_halo(c: &mut Criterion) { - let params = if cfg!(feature = "big-sector-sizes-bench") { - vec![64, 128, 1024, 1_048_576] - } else { - vec![64, 128, 1024] - }; - - let mut group = c.benchmark_group("merkletree-binary"); - for n_nodes in params { - group.bench_function(format!("poseidon-pallas-{}", n_nodes), |b| { - let mut rng = thread_rng(); - let mut data: Vec = Vec::with_capacity(32 * n_nodes); - (0..n_nodes) - .into_iter() - .try_for_each(|_| -> Result<()> { - let node = halo::PoseidonDomain::::random(&mut rng); - data.extend(node.into_bytes()); - Ok(()) - }) - .expect("failed to generate data"); - - b.iter(|| { - black_box( - create_base_merkle_tree::>>( - None, n_nodes, &data, - ) - .unwrap(), - ) - }) - }); - - group.bench_function(format!("poseidon-vesta-{}", n_nodes), |b| { - let mut rng = thread_rng(); - let mut data: Vec = Vec::with_capacity(32 * n_nodes); - (0..n_nodes) - .into_iter() - .try_for_each(|_| -> Result<()> { - let node = halo::PoseidonDomain::::random(&mut rng); - data.extend(node.into_bytes()); - Ok(()) - }) - .expect("failed to generate data"); - - b.iter(|| { - black_box( - create_base_merkle_tree::>>( - None, n_nodes, &data, - ) - .unwrap(), - ) - }) - }); - } - - group.finish(); + bench_with_hasher::(c, "poseidon-bls"); + bench_with_hasher::(c, "poseidon-pallas"); + bench_with_hasher::(c, "poseidon-vesta"); } -criterion_group!( - benches, - merkle_benchmark_sha256, - merkle_benchmark_poseidon, - merkle_benchmark_sha256_halo, - merkle_benchmark_poseidon_halo, -); +criterion_group!(benches, merkle_benchmark); criterion_main!(benches); diff --git a/storage-proofs-core/src/drgraph.rs b/storage-proofs-core/src/drgraph.rs index 3bc6ff365..78296b5cd 100644 --- a/storage-proofs-core/src/drgraph.rs +++ b/storage-proofs-core/src/drgraph.rs @@ -255,13 +255,10 @@ pub fn derive_drg_seed(porep_id: PoRepID) -> [u8; 28] { mod tests { use super::*; - use filecoin_hashers::{ - blake2s::Blake2sHasher, halo, poseidon::PoseidonHasher, sha256::Sha256Hasher, - }; + use filecoin_hashers::generic::{groth, halo}; use generic_array::typenum::{U0, U2, U4, U8}; use memmap::{MmapMut, MmapOptions}; use merkletree::store::StoreConfig; - use pasta_curves::{Fp, Fq}; use crate::merkle::{ create_base_merkle_tree, DiskStore, MerkleProofTrait, MerkleTreeTrait, MerkleTreeWrapper, @@ -352,18 +349,16 @@ mod tests { #[test] fn graph_bucket_sha256() { - graph_bucket::(); + graph_bucket::(); + graph_bucket::(); + graph_bucket::(); } #[test] fn graph_bucket_blake2s() { - graph_bucket::(); - } - - #[test] - fn graph_bucket_sha256_halo() { - graph_bucket::>(); - graph_bucket::>(); + graph_bucket::(); + graph_bucket::(); + graph_bucket::(); } fn gen_proof(config: Option) { @@ -388,66 +383,50 @@ mod tests { #[test] fn gen_proof_poseidon_binary() { - gen_proof::(None); + gen_proof::(None); + gen_proof::(None); + gen_proof::(None); } #[test] fn gen_proof_sha256_binary() { - gen_proof::(None); + gen_proof::(None); + gen_proof::(None); + gen_proof::(None); } #[test] fn gen_proof_blake2s_binary() { - gen_proof::(None); - } - - #[test] - fn gen_proof_poseidon_binary_halo() { - gen_proof::, U2>(None); - gen_proof::, U2>(None); - } - - #[test] - fn gen_proof_sha256_binary_halo() { - gen_proof::, U2>(None); - gen_proof::, U2>(None); + gen_proof::(None); + gen_proof::(None); + gen_proof::(None); } #[test] fn gen_proof_poseidon_quad() { - gen_proof::(None); + gen_proof::(None); + gen_proof::(None); + gen_proof::(None); } #[test] fn gen_proof_sha256_quad() { - gen_proof::(None); + gen_proof::(None); + gen_proof::(None); + gen_proof::(None); } #[test] fn gen_proof_blake2s_quad() { - gen_proof::(None); - } - - #[test] - fn gen_proof_poseidon_quad_halo() { - gen_proof::, U4>(None); - gen_proof::, U4>(None); - } - - #[test] - fn gen_proof_sha256_quad_halo() { - gen_proof::, U4>(None); - gen_proof::, U4>(None); + gen_proof::(None); + gen_proof::(None); + gen_proof::(None); } #[test] fn gen_proof_poseidon_oct() { - gen_proof::(None); - } - - #[test] - fn gen_proof_poseidon_oct_halo() { - gen_proof::, U8>(None); - gen_proof::, U8>(None); + gen_proof::(None); + gen_proof::(None); + gen_proof::(None); } } diff --git a/storage-proofs-core/src/merkle/proof.rs b/storage-proofs-core/src/merkle/proof.rs index 0ebb509b4..9476b08ab 100644 --- a/storage-proofs-core/src/merkle/proof.rs +++ b/storage-proofs-core/src/merkle/proof.rs @@ -700,10 +700,13 @@ mod tests { use super::*; use filecoin_hashers::{ - blake2s::Blake2sHasher, halo, poseidon::PoseidonHasher, sha256::Sha256Hasher, Domain, + generic::{ + groth::{Blake2sHasher, PoseidonHasher, Sha256Hasher}, + halo, + }, + Domain, }; use generic_array::typenum::{U2, U4, U8}; - use pasta_curves::{Fp, Fq}; use rand::thread_rng; use crate::merkle::{ @@ -905,221 +908,92 @@ mod tests { #[test] fn merklepath_poseidon_2_halo() { - merklepath::< - MerkleTreeWrapper< - halo::PoseidonHasher, - DiskStore< as Hasher>::Domain>, - U2, - U0, - U0, - >, - >(); - merklepath::< - MerkleTreeWrapper< - halo::PoseidonHasher, - DiskStore< as Hasher>::Domain>, - U2, - U0, - U0, - >, - >(); + type Tree = MerkleTreeWrapper::Domain>, U2, U0, U0>; + merklepath::>(); + merklepath::>(); } #[test] fn merklepath_poseidon_4_halo() { - merklepath::< - MerkleTreeWrapper< - halo::PoseidonHasher, - DiskStore< as Hasher>::Domain>, - U4, - U0, - U0, - >, - >(); - merklepath::< - MerkleTreeWrapper< - halo::PoseidonHasher, - DiskStore< as Hasher>::Domain>, - U4, - U0, - U0, - >, - >(); + type Tree = MerkleTreeWrapper::Domain>, U4, U0, U0>; + merklepath::>(); + merklepath::>(); } #[test] fn merklepath_poseidon_8_halo() { - merklepath::< - MerkleTreeWrapper< - halo::PoseidonHasher, - DiskStore< as Hasher>::Domain>, - U8, - U0, - U0, - >, - >(); - merklepath::< - MerkleTreeWrapper< - halo::PoseidonHasher, - DiskStore< as Hasher>::Domain>, - U8, - U0, - U0, - >, - >(); + type Tree = MerkleTreeWrapper::Domain>, U8, U0, U0>; + merklepath::>(); + merklepath::>(); } #[test] fn merklepath_poseidon_8_2_halo() { - merklepath::< - MerkleTreeWrapper< - halo::PoseidonHasher, - DiskStore< as Hasher>::Domain>, - U8, - U2, - U0, - >, - >(); - merklepath::< - MerkleTreeWrapper< - halo::PoseidonHasher, - DiskStore< as Hasher>::Domain>, - U8, - U2, - U0, - >, - >(); + type Tree = MerkleTreeWrapper::Domain>, U8, U2, U0>; + merklepath::>(); + merklepath::>(); } #[test] fn merklepath_poseidon_8_4_halo() { - merklepath::< - MerkleTreeWrapper< - halo::PoseidonHasher, - DiskStore< as Hasher>::Domain>, - U8, - U4, - U0, - >, - >(); - merklepath::< - MerkleTreeWrapper< - halo::PoseidonHasher, - DiskStore< as Hasher>::Domain>, - U8, - U4, - U0, - >, - >(); + type Tree = MerkleTreeWrapper::Domain>, U8, U4, U0>; + merklepath::>(); + merklepath::>(); } #[test] fn merklepath_poseidon_8_4_2_halo() { - merklepath::< - MerkleTreeWrapper< - halo::PoseidonHasher, - DiskStore< as Hasher>::Domain>, - U8, - U4, - U2, - >, - >(); - merklepath::< - MerkleTreeWrapper< - halo::PoseidonHasher, - DiskStore< as Hasher>::Domain>, - U8, - U4, - U2, - >, - >(); + type Tree = MerkleTreeWrapper::Domain>, U8, U4, U2>; + merklepath::>(); + merklepath::>(); } #[test] fn merklepath_sha256_2_halo() { - merklepath::< - MerkleTreeWrapper< - halo::Sha256Hasher, - DiskStore< as Hasher>::Domain>, - U2, - U0, - U0, - >, - >(); - merklepath::< - MerkleTreeWrapper< - halo::Sha256Hasher, - DiskStore< as Hasher>::Domain>, - U2, - U0, - U0, - >, - >(); + type Tree = MerkleTreeWrapper::Domain>, U2, U0, U0>; + merklepath::>(); + merklepath::>(); } #[test] fn merklepath_sha256_4_halo() { - merklepath::< - MerkleTreeWrapper< - halo::Sha256Hasher, - DiskStore< as Hasher>::Domain>, - U4, - U0, - U0, - >, - >(); - merklepath::< - MerkleTreeWrapper< - halo::Sha256Hasher, - DiskStore< as Hasher>::Domain>, - U4, - U0, - U0, - >, - >(); + type Tree = MerkleTreeWrapper::Domain>, U4, U0, U0>; + merklepath::>(); + merklepath::>(); } #[test] fn merklepath_sha256_2_4_halo() { - merklepath::< - MerkleTreeWrapper< - halo::Sha256Hasher, - DiskStore< as Hasher>::Domain>, - U2, - U4, - U0, - >, - >(); - merklepath::< - MerkleTreeWrapper< - halo::Sha256Hasher, - DiskStore< as Hasher>::Domain>, - U2, - U4, - U0, - >, - >(); + type Tree = MerkleTreeWrapper::Domain>, U2, U4, U0>; + merklepath::>(); + merklepath::>(); } #[test] - fn merklepath_sha256_top_2_4_2_halo() { - merklepath::< - MerkleTreeWrapper< - halo::Sha256Hasher, - DiskStore< as Hasher>::Domain>, - U2, - U4, - U2, - >, - >(); - merklepath::< - MerkleTreeWrapper< - halo::Sha256Hasher, - DiskStore< as Hasher>::Domain>, - U2, - U4, - U2, - >, - >(); + fn merklepath_sha256_2_4_2_halo() { + type Tree = MerkleTreeWrapper::Domain>, U2, U4, U2>; + merklepath::>(); + merklepath::>(); + } + + #[test] + fn merklepath_blake2s_2_halo() { + type Tree = MerkleTreeWrapper::Domain>, U2, U0, U0>; + merklepath::>(); + merklepath::>(); + } + + #[test] + fn merklepath_blake2s_4_halo() { + type Tree = MerkleTreeWrapper::Domain>, U4, U0, U0>; + merklepath::>(); + merklepath::>(); + } + + #[test] + fn merklepath_blake2s_8_4_2_halo() { + type Tree = MerkleTreeWrapper::Domain>, U8, U4, U2>; + merklepath::>(); + merklepath::>(); } } diff --git a/storage-proofs-core/tests/por_vanilla.rs b/storage-proofs-core/tests/por_vanilla.rs index b887a1a00..ad092d76e 100644 --- a/storage-proofs-core/tests/por_vanilla.rs +++ b/storage-proofs-core/tests/por_vanilla.rs @@ -1,10 +1,13 @@ use std::convert::Into; use filecoin_hashers::{ - blake2s::Blake2sHasher, halo, poseidon::PoseidonHasher, sha256::Sha256Hasher, Domain, Hasher, + generic::{ + groth::{Blake2sHasher, PoseidonHasher, Sha256Hasher}, + halo, + }, + Domain, Hasher, }; use generic_array::typenum::{U0, U2, U4}; -use pasta_curves::{Fp, Fq}; use rand::SeedableRng; use rand_xorshift::XorShiftRng; use storage_proofs_core::{ @@ -36,14 +39,14 @@ fn test_por_blake2s_base_2() { #[test] fn test_por_poseidon_base_2_halo() { - test_por::, U2>>(); - test_por::, U2>>(); + test_por::>(); + test_por::>(); } #[test] fn test_por_sha256_base_2_halo() { - test_por::, U2>>(); - test_por::, U2>>(); + test_por::>(); + test_por::>(); } #[test] @@ -63,14 +66,14 @@ fn test_por_blake2s_base_4() { #[test] fn test_por_poseidon_base_4_halo() { - test_por::, U4>>(); - test_por::, U4>>(); + test_por::>(); + test_por::>(); } #[test] fn test_por_sha256_base_4_halo() { - test_por::, U4>>(); - test_por::, U4>>(); + test_por::>(); + test_por::>(); } fn test_por() { @@ -128,14 +131,14 @@ fn test_por_validates_proof_poseidon_base_2() { #[test] fn test_por_validates_proof_sha256_base_2_halo() { - test_por_validates_proof::, U2>>(); - test_por_validates_proof::, U2>>(); + test_por_validates_proof::>(); + test_por_validates_proof::>(); } #[test] fn test_por_validates_proof_poseidon_base_2_halo() { - test_por_validates_proof::, U2>>(); - test_por_validates_proof::, U2>>(); + test_por_validates_proof::>(); + test_por_validates_proof::>(); } #[test] @@ -155,14 +158,14 @@ fn test_por_validates_proof_poseidon_base_4() { #[test] fn test_por_validates_proof_sha256_base_4_halo() { - test_por_validates_proof::, U4>>(); - test_por_validates_proof::, U4>>(); + test_por_validates_proof::>(); + test_por_validates_proof::>(); } #[test] fn test_por_validates_proof_poseidon_base_4_halo() { - test_por_validates_proof::, U4>>(); - test_por_validates_proof::, U4>>(); + test_por_validates_proof::>(); + test_por_validates_proof::>(); } fn test_por_validates_proof() { @@ -206,6 +209,7 @@ fn test_por_validates_proof() { // Invalidate the proof. let bad_proof = { + /* let mut bad_data = [0u8; 32]; bad_data.copy_from_slice(good_proof.data.as_ref()); bad_data[0] = bad_data[0].wrapping_add(1); @@ -213,6 +217,13 @@ fn test_por_validates_proof() { data: bad_data.into(), proof: good_proof.proof, } + */ + use ff::Field; + let mut proof = good_proof; + let mut bad_leaf: <::Domain as Domain>::Field = proof.data.into(); + bad_leaf += <::Domain as Domain>::Field::one(); + proof.data = bad_leaf.into(); + proof }; let verified = @@ -238,14 +249,14 @@ fn test_por_validates_challenge_poseidon_base_2() { #[test] fn test_por_validates_challenge_sha256_base_2_halo() { - test_por_validates_challenge::, U2>>(); - test_por_validates_challenge::, U2>>(); + test_por_validates_challenge::>(); + test_por_validates_challenge::>(); } #[test] fn test_por_validates_challenge_poseidon_base_2_halo() { - test_por_validates_challenge::, U2>>(); - test_por_validates_challenge::, U2>>(); + test_por_validates_challenge::>(); + test_por_validates_challenge::>(); } #[test] @@ -265,14 +276,14 @@ fn test_por_validates_challenge_poseidon_base_4() { #[test] fn test_por_validates_challenge_sha256_base_4_halo() { - test_por_validates_challenge::, U4>>(); - test_por_validates_challenge::, U4>>(); + test_por_validates_challenge::>(); + test_por_validates_challenge::>(); } #[test] fn test_por_validates_challenge_poseidon_base_4_halo() { - test_por_validates_challenge::, U4>>(); - test_por_validates_challenge::, U4>>(); + test_por_validates_challenge::>(); + test_por_validates_challenge::>(); } fn test_por_validates_challenge() {