Skip to content

Commit

Permalink
Merge branch 'main' of https://github.com/WizardOfMenlo/whir
Browse files Browse the repository at this point in the history
  • Loading branch information
yczhangsjtu committed Nov 27, 2024
2 parents 34ccf0f + b2c5996 commit 03b2bcf
Show file tree
Hide file tree
Showing 30 changed files with 392 additions and 1,932 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,5 @@ outputs/temp/
*.pdf
scripts/__pycache__/
.DS_Store
outputs/
outputs/
.idea
35 changes: 0 additions & 35 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

13 changes: 3 additions & 10 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ edition = "2021"
default-run = "main"

[dependencies]
ark-std = {version = "0.5", features = ["std"]}
ark-std = { version = "0.5", features = ["std"] }
ark-ff = { version = "0.5", features = ["asm", "std"] }
ark-serialize = "0.5"
ark-crypto-primitives = { version = "0.5", features = ["merkle_tree"] }
Expand All @@ -23,7 +23,7 @@ clap = { version = "4.4.17", features = ["derive"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
nimue = { git = "https://github.com/arkworks-rs/nimue", features = ["ark"] }
nimue-pow = { git = "https://github.com/arkworks-rs/nimue"}
nimue-pow = { git = "https://github.com/arkworks-rs/nimue" }
lazy_static = "1.4"
rayon = { version = "1.10.0", optional = true }

Expand All @@ -41,11 +41,4 @@ parallel = [
]
rayon = ["dep:rayon"]

[patch.crates-io]
ark-std = { git = "https://github.com/arkworks-rs/std" }
ark-crypto-primitives = { git = "https://github.com/arkworks-rs/crypto-primitives" }
ark-test-curves = { git = "https://github.com/WizardOfMenlo/algebra", branch = "fft_extensions" }
ark-ff = { git = "https://github.com/WizardOfMenlo/algebra", branch = "fft_extensions" }
ark-poly = { git = "https://github.com/WizardOfMenlo/algebra", branch = "fft_extensions" }
ark-serialize = { git = "https://github.com/WizardOfMenlo/algebra", branch = "fft_extensions" }
ark-ec = { git = "https://github.com/WizardOfMenlo/algebra", branch = "fft_extensions" }

19 changes: 14 additions & 5 deletions src/bin/benchmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ use whir::{
},
parameters::*,
poly_utils::coeffs::CoefficientList,
whir::Statement,
whir::{iopattern::WhirIOPattern, Statement},
};

use serde::Serialize;
Expand Down Expand Up @@ -228,6 +228,7 @@ fn run_whir<F, MerkleConfig>(
let mv_params = MultivariateParameters::<F>::new(num_variables);

let whir_params = WhirParameters::<MerkleConfig, PowStrategy> {
initial_statement: true,
security_level,
pow_bits,
folding_factor,
Expand All @@ -253,11 +254,15 @@ fn run_whir<F, MerkleConfig>(
whir_ldt_verifier_hashes,
) = {
// Run LDT
use whir::whir_ldt::{
use whir::whir::{
committer::Committer, iopattern::WhirIOPattern, parameters::WhirConfig, prover::Prover,
verifier::Verifier, whir_proof_size,
};

let whir_params = WhirParameters::<MerkleConfig, PowStrategy> {
initial_statement: false,
..whir_params.clone()
};
let params =
WhirConfig::<F, MerkleConfig, PowStrategy>::new(mv_params, whir_params.clone());
if !params.check_pow_bits() {
Expand All @@ -280,7 +285,9 @@ fn run_whir<F, MerkleConfig>(

let prover = Prover(params.clone());

let proof = prover.prove(&mut merlin, witness).unwrap();
let proof = prover
.prove(&mut merlin, Statement::default(), witness)
.unwrap();

let whir_ldt_prover_time = whir_ldt_prover_time.elapsed();
let whir_ldt_argument_size = whir_proof_size(merlin.transcript(), &proof);
Expand All @@ -293,7 +300,9 @@ fn run_whir<F, MerkleConfig>(
let whir_ldt_verifier_time = Instant::now();
for _ in 0..reps {
let mut arthur = io.to_arthur(merlin.transcript());
verifier.verify(&mut arthur, &proof).unwrap();
verifier
.verify(&mut arthur, &Statement::default(), &proof)
.unwrap();
}

let whir_ldt_verifier_time = whir_ldt_verifier_time.elapsed();
Expand Down Expand Up @@ -339,7 +348,7 @@ fn run_whir<F, MerkleConfig>(
.collect();
let evaluations = points
.iter()
.map(|point| polynomial.evaluate_at_extension(&point))
.map(|point| polynomial.evaluate_at_extension(point))
.collect();
let statement = Statement {
points,
Expand Down
35 changes: 24 additions & 11 deletions src/bin/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ use whir::{
},
parameters::*,
poly_utils::{coeffs::CoefficientList, MultilinearPoint},
whir::Statement,
};

use nimue_pow::blake3::Blake3PoW;
Expand Down Expand Up @@ -199,9 +200,9 @@ fn run_whir_as_ldt<F, MerkleConfig>(
MerkleConfig: Config<Leaf = [F]> + Clone,
MerkleConfig::InnerDigest: AsRef<[u8]> + From<[u8; 32]>,
{
use whir::whir_ldt::{
use whir::whir::{
committer::Committer, iopattern::WhirIOPattern, parameters::WhirConfig, prover::Prover,
verifier::Verifier, whir_proof_size,
verifier::Verifier,
};

// Runs as a LDT
Expand All @@ -223,6 +224,7 @@ fn run_whir_as_ldt<F, MerkleConfig>(
let mv_params = MultivariateParameters::<F>::new(num_variables);

let whir_params = WhirParameters::<MerkleConfig, PowStrategy> {
initial_statement: false,
security_level,
pow_bits,
folding_factor,
Expand All @@ -234,12 +236,11 @@ fn run_whir_as_ldt<F, MerkleConfig>(
starting_log_inv_rate: starting_rate,
};

let params = WhirConfig::<F, MerkleConfig, PowStrategy>::new(mv_params, whir_params);
let params = WhirConfig::<F, MerkleConfig, PowStrategy>::new(mv_params, whir_params.clone());

let io = IOPattern::<DefaultHash>::new("🌪️")
.commit_statement(&params)
.add_whir_proof(&params)
.clone();
.add_whir_proof(&params);

let mut merlin = io.to_merlin();

Expand All @@ -265,19 +266,30 @@ fn run_whir_as_ldt<F, MerkleConfig>(

let prover = Prover(params.clone());

let proof = prover.prove(&mut merlin, witness).unwrap();
let proof = prover
.prove(&mut merlin, Statement::default(), witness)
.unwrap();

dbg!(whir_prover_time.elapsed());
dbg!(whir_proof_size(merlin.transcript(), &proof));

// Serialize proof
let transcript = merlin.transcript().to_vec();
let mut proof_bytes = vec![];
proof.serialize_compressed(&mut proof_bytes).unwrap();

let proof_size = transcript.len() + proof_bytes.len();
dbg!(proof_size);

// Just not to count that initial inversion (which could be precomputed)
let verifier = Verifier::new(params);
let verifier = Verifier::new(params.clone());

HashCounter::reset();
let whir_verifier_time = Instant::now();
for _ in 0..reps {
let mut arthur = io.to_arthur(merlin.transcript());
verifier.verify(&mut arthur, &proof).unwrap();
let mut arthur = io.to_arthur(&transcript);
verifier
.verify(&mut arthur, &Statement::default(), &proof)
.unwrap();
}
dbg!(whir_verifier_time.elapsed() / reps as u32);
dbg!(HashCounter::get() as f64 / reps as f64);
Expand Down Expand Up @@ -317,6 +329,7 @@ fn run_whir_pcs<F, MerkleConfig>(
let mv_params = MultivariateParameters::<F>::new(num_variables);

let whir_params = WhirParameters::<MerkleConfig, PowStrategy> {
initial_statement: true,
security_level,
pow_bits,
folding_factor,
Expand Down Expand Up @@ -356,7 +369,7 @@ fn run_whir_pcs<F, MerkleConfig>(
.collect();
let evaluations = points
.iter()
.map(|point| polynomial.evaluate_at_extension(&point))
.map(|point| polynomial.evaluate_at_extension(point))
.collect();

let statement = Statement {
Expand Down
6 changes: 3 additions & 3 deletions src/crypto/merkle_tree/blake3.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,8 @@ pub fn default_config<F: CanonicalSerialize + Send>(
<LeafH<F> as CRHScheme>::Parameters,
<CompressH as TwoToOneCRHScheme>::Parameters,
) {
let leaf_hash_params = <LeafH<F> as CRHScheme>::setup(rng).unwrap();
let two_to_one_params = <CompressH as TwoToOneCRHScheme>::setup(rng).unwrap();
<LeafH<F> as CRHScheme>::setup(rng).unwrap();
<CompressH as TwoToOneCRHScheme>::setup(rng).unwrap();

(leaf_hash_params, two_to_one_params)
((), ())
}
10 changes: 5 additions & 5 deletions src/crypto/merkle_tree/keccak.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,8 @@ impl TwoToOneCRHScheme for KeccakTwoToOneCRHScheme {
right_input: T,
) -> Result<Self::Output, ark_crypto_primitives::Error> {
let mut h = sha3::Keccak256::new();
h.update(&left_input.borrow().0);
h.update(&right_input.borrow().0);
h.update(left_input.borrow().0);
h.update(right_input.borrow().0);
let mut output = [0; 32];
output.copy_from_slice(&h.finalize()[..]);
HashCounter::add();
Expand Down Expand Up @@ -123,8 +123,8 @@ pub fn default_config<F: CanonicalSerialize + Send>(
<LeafH<F> as CRHScheme>::Parameters,
<CompressH as TwoToOneCRHScheme>::Parameters,
) {
let leaf_hash_params = <LeafH<F> as CRHScheme>::setup(rng).unwrap();
let two_to_one_params = <CompressH as TwoToOneCRHScheme>::setup(rng).unwrap();
<LeafH<F> as CRHScheme>::setup(rng).unwrap();
<CompressH as TwoToOneCRHScheme>::setup(rng).unwrap();

(leaf_hash_params, two_to_one_params)
((), ())
}
12 changes: 7 additions & 5 deletions src/crypto/merkle_tree/mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,12 @@ pub fn default_config<F: CanonicalSerialize + Send>(
<LeafH<F> as CRHScheme>::Parameters,
<CompressH as TwoToOneCRHScheme>::Parameters,
) {
let leaf_hash_params = <LeafH<F> as CRHScheme>::setup(rng).unwrap();
let two_to_one_params = <CompressH as TwoToOneCRHScheme>::setup(rng)
.unwrap()
.clone();
<LeafH<F> as CRHScheme>::setup(rng).unwrap();
{
<CompressH as TwoToOneCRHScheme>::setup(rng)
.unwrap();

};

(leaf_hash_params, two_to_one_params)
((), ())
}
1 change: 0 additions & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,3 @@ pub mod poly_utils; // Utils for polynomials
pub mod sumcheck; // Sumcheck specialised
pub mod utils; // Utils in general
pub mod whir; // The real prover
pub mod whir_ldt; // Whir as a LDT // Shared parameters
12 changes: 6 additions & 6 deletions src/ntt/transpose.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ fn transpose_copy<F: Sized + Copy + Send>(src: MatrixMut<F>, dst: MatrixMut<F>)

/// Sets `dst` to the transpose of `src`. This will panic if the sizes of `src` and `dst` are not compatible.
#[cfg(feature = "parallel")]
fn transpose_copy_parallel<'a, 'b, F: Sized + Copy + Send>(
src: MatrixMut<'a, F>,
mut dst: MatrixMut<'b, F>,
fn transpose_copy_parallel<F: Sized + Copy + Send>(
src: MatrixMut<'_, F>,
mut dst: MatrixMut<'_, F>,
) {
assert_eq!(src.rows(), dst.cols());
assert_eq!(src.cols(), dst.rows());
Expand Down Expand Up @@ -85,9 +85,9 @@ fn transpose_copy_parallel<'a, 'b, F: Sized + Copy + Send>(

/// Sets `dst` to the transpose of `src`. This will panic if the sizes of `src` and `dst` are not compatible.
/// This is the non-parallel version
fn transpose_copy_not_parallel<'a, 'b, F: Sized + Copy>(
src: MatrixMut<'a, F>,
mut dst: MatrixMut<'b, F>,
fn transpose_copy_not_parallel<F: Sized + Copy>(
src: MatrixMut<'_, F>,
mut dst: MatrixMut<'_, F>,
) {
assert_eq!(src.rows(), dst.cols());
assert_eq!(src.cols(), dst.rows());
Expand Down
1 change: 0 additions & 1 deletion src/ntt/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,6 @@ mod tests {
);
let should_not_work = std::panic::catch_unwind(|| {
as_chunks_exact_mut::<_, 2>(&mut [1, 2, 3]);
return;
});
assert!(should_not_work.is_err())
}
Expand Down
1 change: 1 addition & 0 deletions src/parameters.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ pub struct WhirParameters<MerkleConfig, PowStrategy>
where
MerkleConfig: Config,
{
pub initial_statement: bool,
pub starting_log_inv_rate: usize,
pub folding_factor: usize,
pub soundness_type: SoundnessType,
Expand Down
6 changes: 3 additions & 3 deletions src/poly_utils/fold.rs
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ mod tests {

// Evaluate the polynomial on the domain
let domain_evaluations: Vec<_> = (0..domain_size)
.map(|w| root_of_unity.pow([w as u64]))
.map(|w| root_of_unity.pow([w]))
.map(|point| {
poly.evaluate(&MultilinearPoint::expand_from_univariate(
point,
Expand All @@ -199,10 +199,10 @@ mod tests {
);

let num = domain_size / folding_factor_exp;
let coset_gen_inv = root_of_unity_inv.pow(&[num]);
let coset_gen_inv = root_of_unity_inv.pow([num]);

for index in 0..num {
let offset_inv = root_of_unity_inv.pow(&[index]);
let offset_inv = root_of_unity_inv.pow([index]);
let span =
(index * folding_factor_exp) as usize..((index + 1) * folding_factor_exp) as usize;

Expand Down
3 changes: 2 additions & 1 deletion src/poly_utils/sequential_lag_poly.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use super::{hypercube::BinaryHypercubePoint, MultilinearPoint};

/// There is an alternative (possibly more efficient) implementation that iterates over the x in Gray code ordering.
///
/// LagrangePolynomialIterator for a given multilinear n-dimensional `point` iterates over pairs (x, y)
/// where x ranges over all possible {0,1}^n
/// and y equals the product y_1 * ... * y_n where
Expand Down Expand Up @@ -60,7 +61,7 @@ impl<F: Field> Iterator for LagrangePolynomialIterator<F> {
// Iterator implementation for the struct
fn next(&mut self) -> Option<Self::Item> {
// a) Check if this is the first iteration
if self.last_position == None {
if self.last_position.is_none() {
// Initialize last position
self.last_position = Some(0);
// Return the top of the stack
Expand Down
2 changes: 1 addition & 1 deletion src/poly_utils/streaming_evaluation_helper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ impl<F: Field> Iterator for TermPolynomialIterator<F> {
// Iterator implementation for the struct
fn next(&mut self) -> Option<Self::Item> {
// a) Check if this is the first iteration
if self.last_position == None {
if self.last_position.is_none() {
// Initialize last position
self.last_position = Some(0);
// Return the top of the stack
Expand Down
Loading

0 comments on commit 03b2bcf

Please sign in to comment.