diff --git a/benches/Cargo.toml b/benches/Cargo.toml index de60e03..37654f6 100644 --- a/benches/Cargo.toml +++ b/benches/Cargo.toml @@ -21,8 +21,7 @@ ark-bw6-761 = "0.3" ark-relations = "0.3" ark-r1cs-std = "0.3" -digest = "0.9" -blake2 = "0.9" +merlin = "3" csv = "1" ark-inner-products = { path = "../inner_products" } diff --git a/benches/benches/gipa.rs b/benches/benches/gipa.rs index 0851de9..8bf194a 100644 --- a/benches/benches/gipa.rs +++ b/benches/benches/gipa.rs @@ -13,14 +13,12 @@ use ark_inner_products::{ use ark_ip_proofs::gipa::GIPA; use ark_std::rand::{rngs::StdRng, Rng, SeedableRng}; -use blake2::Blake2b; -use digest::Digest; +use merlin::Transcript; use std::{ops::MulAssign, time::Instant}; -fn bench_gipa(rng: &mut R, len: usize) +fn bench_gipa(rng: &mut R, len: usize) where - D: Digest, IP: InnerProduct< LeftMessage = LMC::Message, RightMessage = RMC::Message, @@ -45,13 +43,15 @@ where r.push(::rand(rng)); } - let (ck_l, ck_r, ck_t) = GIPA::::setup(rng, len).unwrap(); + let (ck_l, ck_r, ck_t) = GIPA::::setup(rng, len).unwrap(); let com_l = LMC::commit(&ck_l, &l).unwrap(); let com_r = RMC::commit(&ck_r, &r).unwrap(); let t = vec![IP::inner_product(&l, &r).unwrap()]; let com_t = IPC::commit(&vec![ck_t.clone()], &t).unwrap(); + let mut proof_transcript = Transcript::new(b"GIPA-bench"); let mut start = Instant::now(); - let proof = GIPA::::prove( + let proof = GIPA::::prove( + &mut proof_transcript, (&l, &r, &t[0]), (&ck_l, &ck_r, &ck_t), (&com_l, &com_r, &com_t), @@ -59,9 +59,16 @@ where .unwrap(); let mut bench = start.elapsed().as_millis(); println!("\t proving time: {} ms", bench); + + let mut verif_transcript = Transcript::new(b"GIPA-bench"); start = Instant::now(); - GIPA::::verify((&ck_l, &ck_r, &ck_t), (&com_l, &com_r, &com_t), &proof) - .unwrap(); + GIPA::::verify( + &mut verif_transcript, + (&ck_l, &ck_r, &ck_t), + (&com_l, &com_r, &com_t), + &proof, + ) + .unwrap(); bench = start.elapsed().as_millis(); println!("\t verification time: {} ms", bench); } @@ -81,7 +88,6 @@ fn main() { GC1, GC2, IdentityCommitment, ::Fr>, - Blake2b, StdRng, >(&mut rng, LEN); @@ -94,7 +100,6 @@ fn main() { ::G1Projective, ::Fr, >, - Blake2b, StdRng, >(&mut rng, LEN); } diff --git a/benches/benches/groth16_aggregation/bench.rs b/benches/benches/groth16_aggregation/bench.rs index fc1fec4..e48d8ea 100644 --- a/benches/benches/groth16_aggregation/bench.rs +++ b/benches/benches/groth16_aggregation/bench.rs @@ -23,9 +23,8 @@ use ark_ip_proofs::applications::groth16_aggregation::{ aggregate_proofs, setup_inner_product, verify_aggregate_proof, }; -use ark_std::rand::{rngs::StdRng, SeedableRng}; -use blake2::Blake2b; use csv::Writer; +use merlin::Transcript; use std::{io::stdout, time::Instant}; @@ -248,7 +247,7 @@ fn main() { csv_writer.flush().unwrap(); let mut start; let mut time; - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); // Compute hashes let mut hash_inputs = vec![]; @@ -344,7 +343,7 @@ fn main() { // Benchmark aggregation via IPA { start = Instant::now(); - let srs = setup_inner_product::(&mut rng, num_proofs).unwrap(); + let srs = setup_inner_product::(&mut rng, num_proofs).unwrap(); time = start.elapsed().as_millis(); csv_writer .write_record(&[ @@ -359,9 +358,10 @@ fn main() { let v_srs = srs.get_verifier_key(); for i in 1..=num_trials { + let mut proof_transcript = Transcript::new(b"Groth16-agg-bench"); start = Instant::now(); let aggregate_proof = - aggregate_proofs::(&srs, &proofs).unwrap(); + aggregate_proofs::(&mut proof_transcript, &srs, &proofs).unwrap(); time = start.elapsed().as_millis(); csv_writer .write_record(&[ @@ -374,8 +374,10 @@ fn main() { .unwrap(); csv_writer.flush().unwrap(); + let mut verif_transcript = Transcript::new(b"Groth16-agg-bench"); start = Instant::now(); let result = verify_aggregate_proof( + &mut verif_transcript, &v_srs, &hash_circuit_parameters.0.vk, &hash_outputs @@ -533,7 +535,7 @@ pub fn batch_verify_proof( public_inputs: &[Vec], proofs: &[Proof], ) -> Result { - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); let mut r_powers = Vec::with_capacity(proofs.len()); for _ in 0..proofs.len() { let challenge: E::Fr = u128::rand(&mut rng).into(); diff --git a/benches/benches/inner_products.rs b/benches/benches/inner_products.rs index 54838ed..1be5a80 100644 --- a/benches/benches/inner_products.rs +++ b/benches/benches/inner_products.rs @@ -3,7 +3,7 @@ use ark_ec::PairingEngine; use ark_ff::UniformRand; use ark_inner_products::{InnerProduct, MultiexponentiationInnerProduct, PairingInnerProduct}; -use ark_std::rand::{rngs::StdRng, Rng, SeedableRng}; +use ark_std::rand::{rngs::StdRng, Rng}; use std::time::Instant; @@ -26,7 +26,8 @@ where fn main() { const LEN: usize = 16; - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); + println!("Benchmarking inner products with vector length: {}", LEN); println!("1) Pairing inner product..."); diff --git a/benches/benches/poly_commit.rs b/benches/benches/poly_commit.rs index 90facbd..e9789f4 100644 --- a/benches/benches/poly_commit.rs +++ b/benches/benches/poly_commit.rs @@ -11,8 +11,8 @@ use ark_poly::polynomial::{ use ark_std::rand::{rngs::StdRng, SeedableRng}; use csv::Writer; +use merlin::Transcript; -use blake2::Blake2b; use std::{ io::stdout, time::{Duration, Instant}, @@ -51,7 +51,8 @@ fn main() { for degree in (0..num_data_points).map(|i| 4_usize.pow((i + 1) as u32) - 1) { // Benchmark KZG { - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); + start = Instant::now(); let (g_alpha_powers, v_srs) = KZG::::setup(&mut rng, degree).unwrap(); time = start.elapsed().as_millis(); @@ -124,7 +125,7 @@ fn main() { { let mut rng = StdRng::seed_from_u64(0u64); start = Instant::now(); - let srs = IPA::::setup(&mut rng, degree).unwrap(); + let srs = IPA::::setup(&mut rng, degree).unwrap(); let v_srs = srs.0.get_verifier_key(); time = start.elapsed().as_millis(); csv_writer @@ -144,8 +145,7 @@ fn main() { // Commit start = Instant::now(); - let (com, prover_aux) = - IPA::::commit(&srs, &polynomial).unwrap(); + let (com, prover_aux) = IPA::::commit(&srs, &polynomial).unwrap(); time = start.elapsed().as_millis(); csv_writer .write_record(&[ @@ -158,9 +158,16 @@ fn main() { .unwrap(); // Open + let mut proof_transcript = Transcript::new(b"IPA-bench"); start = Instant::now(); - let proof = IPA::::open(&srs, &polynomial, &prover_aux, &point) - .unwrap(); + let proof = IPA::::open( + &mut proof_transcript, + &srs, + &polynomial, + &prover_aux, + &point, + ) + .unwrap(); time = start.elapsed().as_millis(); csv_writer .write_record(&[ @@ -174,10 +181,17 @@ fn main() { // Verify std::thread::sleep(Duration::from_millis(5000)); + let verif_transcript = Transcript::new(b"IPA-bench"); start = Instant::now(); for _ in 0..50 { - let is_valid = IPA::::verify( - &v_srs, degree, &com, &point, &eval, &proof, + let is_valid = IPA::::verify( + &mut verif_transcript.clone(), + &v_srs, + degree, + &com, + &point, + &eval, + &proof, ) .unwrap(); assert!(is_valid); @@ -199,7 +213,7 @@ fn main() { { let mut rng = StdRng::seed_from_u64(0u64); start = Instant::now(); - let ck = TransparentIPA::::setup(&mut rng, degree).unwrap(); + let ck = TransparentIPA::::setup(&mut rng, degree).unwrap(); time = start.elapsed().as_millis(); csv_writer .write_record(&[ @@ -219,7 +233,7 @@ fn main() { // Commit start = Instant::now(); let (com, prover_aux) = - TransparentIPA::::commit(&ck, &polynomial).unwrap(); + TransparentIPA::::commit(&ck, &polynomial).unwrap(); time = start.elapsed().as_millis(); csv_writer .write_record(&[ @@ -232,8 +246,10 @@ fn main() { .unwrap(); // Open + let mut proof_transcript = Transcript::new(b"IPA_transparent-bench"); start = Instant::now(); - let proof = TransparentIPA::::open( + let proof = TransparentIPA::::open( + &mut proof_transcript, &ck, &polynomial, &prover_aux, @@ -252,11 +268,17 @@ fn main() { .unwrap(); // Verify + let verif_transcript = Transcript::new(b"IPA_transparent-bench"); std::thread::sleep(Duration::from_millis(5000)); start = Instant::now(); for _ in 0..50 { - let is_valid = TransparentIPA::::verify( - &ck, &com, &point, &eval, &proof, + let is_valid = TransparentIPA::::verify( + &mut verif_transcript.clone(), + &ck, + &com, + &point, + &eval, + &proof, ) .unwrap(); assert!(is_valid); diff --git a/benches/benches/tipa.rs b/benches/benches/tipa.rs index 0ae6709..85a6ff7 100644 --- a/benches/benches/tipa.rs +++ b/benches/benches/tipa.rs @@ -14,16 +14,14 @@ use ark_ip_proofs::tipa::{ structured_scalar_message::{structured_scalar_power, TIPAWithSSM}, TIPACompatibleSetup, TIPA, }; +use merlin::Transcript; -use ark_std::rand::{rngs::StdRng, Rng, SeedableRng}; -use blake2::Blake2b; -use digest::Digest; +use ark_std::rand::{rngs::StdRng, Rng}; use std::{ops::MulAssign, time::Instant}; -fn bench_tipa(rng: &mut R, len: usize) +fn bench_tipa(rng: &mut R, len: usize) where - D: Digest, P: PairingEngine, IP: InnerProduct< LeftMessage = LMC::Message, @@ -52,28 +50,42 @@ where r.push(::rand(rng)); } - let (srs, ck_t) = TIPA::::setup(rng, len).unwrap(); + let (srs, ck_t) = TIPA::::setup(rng, len).unwrap(); let (ck_l, ck_r) = srs.get_commitment_keys(); let v_srs = srs.get_verifier_key(); let com_l = LMC::commit(&ck_l, &l).unwrap(); let com_r = RMC::commit(&ck_r, &r).unwrap(); let t = vec![IP::inner_product(&l, &r).unwrap()]; let com_t = IPC::commit(&vec![ck_t.clone()], &t).unwrap(); + + let mut proof_transcript = Transcript::new(b"TIPA-bench"); let mut start = Instant::now(); - let proof = - TIPA::::prove(&srs, (&l, &r), (&ck_l, &ck_r, &ck_t)).unwrap(); + let proof = TIPA::::prove( + &mut proof_transcript, + &srs, + (&l, &r), + (&ck_l, &ck_r, &ck_t), + ) + .unwrap(); let mut bench = start.elapsed().as_millis(); println!("\t proving time: {} ms", bench); + + let mut verif_transcript = Transcript::new(b"TIPA-bench"); start = Instant::now(); - TIPA::::verify(&v_srs, &ck_t, (&com_l, &com_r, &com_t), &proof) - .unwrap(); + TIPA::::verify( + &mut verif_transcript, + &v_srs, + &ck_t, + (&com_l, &com_r, &com_t), + &proof, + ) + .unwrap(); bench = start.elapsed().as_millis(); println!("\t verification time: {} ms", bench); } -fn bench_tipa_srs_shift(rng: &mut R, len: usize) +fn bench_tipa_srs_shift(rng: &mut R, len: usize) where - D: Digest, P: PairingEngine, IP: InnerProduct< LeftMessage = LMC::Message, @@ -102,7 +114,7 @@ where r.push(::rand(rng)); } - let (srs, ck_t) = TIPA::::setup(rng, len).unwrap(); + let (srs, ck_t) = TIPA::::setup(rng, len).unwrap(); let (ck_l, ck_r) = srs.get_commitment_keys(); let v_srs = srs.get_verifier_key(); let com_l = LMC::commit(&ck_l, &l).unwrap(); @@ -122,8 +134,10 @@ where let t = vec![IP::inner_product(&l_a, &r).unwrap()]; let com_t = IPC::commit(&vec![ck_t.clone()], &t).unwrap(); + let mut proof_transcript = Transcript::new(b"TIPA_SRS_shift-bench"); let mut start = Instant::now(); - let proof = TIPA::::prove_with_srs_shift( + let proof = TIPA::::prove_with_srs_shift( + &mut proof_transcript, &srs, (&l_a, &r), (&ck_l_a, &ck_r, &ck_t), @@ -132,8 +146,11 @@ where .unwrap(); let mut bench = start.elapsed().as_millis(); println!("\t proving time: {} ms", bench); + + let mut verif_transcript = Transcript::new(b"TIPA_SRS_shift-bench"); start = Instant::now(); - TIPA::::verify_with_srs_shift( + TIPA::::verify_with_srs_shift( + &mut verif_transcript, &v_srs, &ck_t, (&com_l, &com_r, &com_t), @@ -145,9 +162,8 @@ where println!("\t verification time: {} ms", bench); } -fn bench_tipa_ssm(rng: &mut R, len: usize) +fn bench_tipa_ssm(rng: &mut R, len: usize) where - D: Digest, P: PairingEngine, IP: InnerProduct, LMC: DoublyHomomorphicCommitment + TIPACompatibleSetup, @@ -168,14 +184,16 @@ where let scalar = ::rand(rng); let r = structured_scalar_power(len, &scalar); - let (srs, ck_t) = TIPAWithSSM::::setup(rng, len).unwrap(); + let (srs, ck_t) = TIPAWithSSM::::setup(rng, len).unwrap(); let (ck_l, _) = srs.get_commitment_keys(); let v_srs = srs.get_verifier_key(); let com_l = LMC::commit(&ck_l, &l).unwrap(); let t = vec![IP::inner_product(&l, &r).unwrap()]; let com_t = IPC::commit(&vec![ck_t.clone()], &t).unwrap(); + let mut proof_transcript = Transcript::new(b"TIPA_SSM-bench"); let mut start = Instant::now(); - let proof = TIPAWithSSM::::prove_with_structured_scalar_message( + let proof = TIPAWithSSM::::prove_with_structured_scalar_message( + &mut proof_transcript, &srs, (&l, &r), (&ck_l, &ck_t), @@ -183,8 +201,11 @@ where .unwrap(); let mut bench = start.elapsed().as_millis(); println!("\t proving time: {} ms", bench); + + let mut verif_transcript = Transcript::new(b"TIPA_SSM-bench"); start = Instant::now(); - TIPAWithSSM::::verify_with_structured_scalar_message( + TIPAWithSSM::::verify_with_structured_scalar_message( + &mut verif_transcript, &v_srs, &ck_t, (&com_l, &com_t), @@ -201,7 +222,7 @@ fn main() { type GC1 = AFGHOCommitmentG1; type GC2 = AFGHOCommitmentG2; type SC1 = PedersenCommitment<::G1Projective>; - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); println!("Benchmarking TIPA with vector length: {}", LEN); @@ -212,7 +233,6 @@ fn main() { GC2, IdentityCommitment, ::Fr>, Bls12_381, - Blake2b, StdRng, >(&mut rng, LEN); @@ -226,7 +246,6 @@ fn main() { ::Fr, >, Bls12_381, - Blake2b, StdRng, >(&mut rng, LEN); @@ -237,7 +256,6 @@ fn main() { GC2, IdentityCommitment, ::Fr>, Bls12_381, - Blake2b, StdRng, >(&mut rng, LEN); @@ -250,7 +268,6 @@ fn main() { ::Fr, >, Bls12_381, - Blake2b, StdRng, >(&mut rng, LEN); } diff --git a/benches/examples/groth16_aggregation.rs b/benches/examples/groth16_aggregation.rs index d6c0211..2f964aa 100644 --- a/benches/examples/groth16_aggregation.rs +++ b/benches/examples/groth16_aggregation.rs @@ -10,9 +10,7 @@ use ark_ff::UniformRand; use ark_groth16::Groth16; use ark_r1cs_std::{fields::fp::FpVar, prelude::*}; use ark_relations::r1cs::{ConstraintSynthesizer, ConstraintSystemRef, SynthesisError}; - -use ark_std::rand::{rngs::StdRng, SeedableRng}; -use blake2::Blake2b; +use merlin::Transcript; #[derive(Clone)] struct TestCircuit { @@ -42,7 +40,7 @@ impl ConstraintSynthesizer for TestCircuit { fn main() { const NUM_PUBLIC_INPUTS: usize = 4; const NUM_PROOFS_TO_AGGREGATE: usize = 1024; - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); // Generate parameters for Groth16 let test_circuit = TestCircuit { @@ -53,7 +51,7 @@ fn main() { let parameters = Groth16::::setup(test_circuit, &mut rng).unwrap(); // Generate parameters for inner product aggregation - let srs = setup_inner_product::<_, Blake2b, _>(&mut rng, NUM_PROOFS_TO_AGGREGATE).unwrap(); + let srs = setup_inner_product(&mut rng, NUM_PROOFS_TO_AGGREGATE).unwrap(); // Generate proofs println!("Generating {} Groth16 proofs...", NUM_PROOFS_TO_AGGREGATE); @@ -87,14 +85,18 @@ fn main() { let generation_time = start.elapsed().as_millis(); // Aggregate proofs using inner product proofs + let mut proof_transcript = Transcript::new(b"Groth16-agg-bench"); start = Instant::now(); println!("Aggregating {} Groth16 proofs...", NUM_PROOFS_TO_AGGREGATE); - let aggregate_proof = aggregate_proofs::(&srs, &proofs).unwrap(); + let aggregate_proof = + aggregate_proofs::(&mut proof_transcript, &srs, &proofs).unwrap(); let prover_time = start.elapsed().as_millis(); println!("Verifying aggregated proof..."); + let mut verif_transcript = Transcript::new(b"Groth16-agg-bench"); start = Instant::now(); let result = verify_aggregate_proof( + &mut verif_transcript, &srs.get_verifier_key(), ¶meters.0.vk, &statements, diff --git a/ip_proofs/Cargo.toml b/ip_proofs/Cargo.toml index a54ec4e..25a865c 100644 --- a/ip_proofs/Cargo.toml +++ b/ip_proofs/Cargo.toml @@ -20,10 +20,10 @@ ark-poly = "0.3" ark-serialize = { version = "0.3", features = [ "derive" ] } ark-std = "0.3" ark-groth16 = "0.3" -digest = "0.9" num-traits = "0.2" itertools = "0.10" rayon = { version = "1", optional = true } +merlin = "3" ark-inner-products = { path = "../inner_products" } ark-dh-commitments = { path = "../dh_commitments" } @@ -31,10 +31,8 @@ ark-dh-commitments = { path = "../dh_commitments" } [dev-dependencies] ark-bls12-381 = { version = "0.3", features = [ "curve" ] } ark-ed-on-bls12-381 = "0.3" - ark-relations = "0.3" ark-r1cs-std = "0.3" -blake2 = "0.9" [features] default = [ "parallel" ] diff --git a/ip_proofs/src/applications/groth16_aggregation.rs b/ip_proofs/src/applications/groth16_aggregation.rs index 67d26db..27c3ee2 100644 --- a/ip_proofs/src/applications/groth16_aggregation.rs +++ b/ip_proofs/src/applications/groth16_aggregation.rs @@ -1,17 +1,9 @@ -use ark_ec::{group::Group, AffineCurve, PairingEngine}; -use ark_ff::{to_bytes, Field, One}; -use ark_groth16::{Proof, VerifyingKey}; - -use std::ops::AddAssign; - -use ark_std::rand::Rng; -use digest::Digest; - use crate::{ tipa::{ structured_scalar_message::{structured_scalar_power, TIPAWithSSM, TIPAWithSSMProof}, TIPAProof, VerifierSRS, SRS, TIPA, }, + util::TranscriptProtocol, Error, }; use ark_dh_commitments::{ @@ -22,68 +14,76 @@ use ark_inner_products::{ ExtensionFieldElement, InnerProduct, MultiexponentiationInnerProduct, PairingInnerProduct, ScalarInnerProduct, }; +use ark_std::rand::Rng; + +use std::ops::AddAssign; + +use ark_ec::{group::Group, AffineCurve, PairingEngine}; +use ark_ff::{Field, One}; +use ark_groth16::{Proof, VerifyingKey}; +use merlin::Transcript; + +const DOMAIN_SEP: &[u8] = b"ip_proofs-Groth16_agg"; -type PairingInnerProductAB = TIPA< +type PairingInnerProductAB

= TIPA< PairingInnerProduct

, AFGHOCommitmentG1

, AFGHOCommitmentG2

, IdentityCommitment,

::Fr>, P, - D, >; -type PairingInnerProductABProof = TIPAProof< +type PairingInnerProductABProof

= TIPAProof< PairingInnerProduct

, AFGHOCommitmentG1

, AFGHOCommitmentG2

, IdentityCommitment,

::Fr>, P, - D, >; -type MultiExpInnerProductC = TIPAWithSSM< +type MultiExpInnerProductC

= TIPAWithSSM< MultiexponentiationInnerProduct<

::G1Projective>, AFGHOCommitmentG1

, IdentityCommitment<

::G1Projective,

::Fr>, P, - D, >; -type MultiExpInnerProductCProof = TIPAWithSSMProof< +type MultiExpInnerProductCProof

= TIPAWithSSMProof< MultiexponentiationInnerProduct<

::G1Projective>, AFGHOCommitmentG1

, IdentityCommitment<

::G1Projective,

::Fr>, P, - D, >; -pub struct AggregateProof { +pub struct AggregateProof { com_a: ExtensionFieldElement

, com_b: ExtensionFieldElement

, com_c: ExtensionFieldElement

, ip_ab: ExtensionFieldElement

, agg_c: P::G1Projective, - tipa_proof_ab: PairingInnerProductABProof, - tipa_proof_c: MultiExpInnerProductCProof, + tipa_proof_ab: PairingInnerProductABProof

, + tipa_proof_c: MultiExpInnerProductCProof

, } -pub fn setup_inner_product(rng: &mut R, size: usize) -> Result, Error> +pub fn setup_inner_product(rng: &mut R, size: usize) -> Result, Error> where P: PairingEngine, - D: Digest, { - let (srs, _) = PairingInnerProductAB::::setup(rng, size)?; + let (srs, _) = PairingInnerProductAB::

::setup(rng, size)?; Ok(srs) } -pub fn aggregate_proofs( +pub fn aggregate_proofs

( + transcript: &mut Transcript, ip_srs: &SRS

, proofs: &[Proof

], -) -> Result, Error> +) -> Result, Error> where P: PairingEngine, - D: Digest, { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", DOMAIN_SEP); + let a = proofs .iter() .map(|proof| proof.a.into_projective()) @@ -103,18 +103,13 @@ where let com_b = PairingInnerProduct::

::inner_product(&ck_2, &b)?; let com_c = PairingInnerProduct::

::inner_product(&c, &ck_1)?; + // Update the transcript + transcript.append_serializable(b"com_a", &com_a)?; + transcript.append_serializable(b"com_b", &com_b)?; + transcript.append_serializable(b"com_c", &com_c)?; + // Random linear combination of proofs - let mut counter_nonce: usize = 0; - let r = loop { - let mut hash_input = Vec::new(); - hash_input.extend_from_slice(&counter_nonce.to_be_bytes()[..]); - //TODO: Should use CanonicalSerialize instead of ToBytes - hash_input.extend_from_slice(&to_bytes![com_a, com_b, com_c]?); - if let Some(r) = ::from_random_bytes(&D::digest(&hash_input)) { - break r; - }; - counter_nonce += 1; - }; + let r = transcript.challenge_scalar(b"r"); let r_vec = structured_scalar_power(proofs.len(), &r); let a_r = a @@ -136,14 +131,16 @@ where PairingInnerProduct::

::inner_product(&a_r, &ck_1_r)? ); - let tipa_proof_ab = PairingInnerProductAB::::prove_with_srs_shift( + let tipa_proof_ab = PairingInnerProductAB::

::prove_with_srs_shift( + transcript, &ip_srs, (&a_r, &b), (&ck_1_r, &ck_2, &HomomorphicPlaceholderValue), &r, )?; - let tipa_proof_c = MultiExpInnerProductC::::prove_with_structured_scalar_message( + let tipa_proof_c = MultiExpInnerProductC::

::prove_with_structured_scalar_message( + transcript, &ip_srs, (&c, &r_vec), (&ck_1, &HomomorphicPlaceholderValue), @@ -160,31 +157,30 @@ where }) } -pub fn verify_aggregate_proof( +pub fn verify_aggregate_proof

( + transcript: &mut Transcript, ip_verifier_srs: &VerifierSRS

, vk: &VerifyingKey

, public_inputs: &Vec>, //TODO: Should use ToConstraintField instead - proof: &AggregateProof, + proof: &AggregateProof

, ) -> Result where P: PairingEngine, - D: Digest, { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", DOMAIN_SEP); + + // Update the transcript + transcript.append_serializable(b"com_a", &proof.com_a)?; + transcript.append_serializable(b"com_b", &proof.com_b)?; + transcript.append_serializable(b"com_c", &proof.com_c)?; + // Random linear combination of proofs - let mut counter_nonce: usize = 0; - let r = loop { - let mut hash_input = Vec::new(); - hash_input.extend_from_slice(&counter_nonce.to_be_bytes()[..]); - //TODO: Should use CanonicalSerialize instead of ToBytes - hash_input.extend_from_slice(&to_bytes![proof.com_a, proof.com_b, proof.com_c]?); - if let Some(r) = ::from_random_bytes(&D::digest(&hash_input)) { - break r; - }; - counter_nonce += 1; - }; + let r = transcript.challenge_scalar(b"r"); // Check TIPA proofs - let tipa_proof_ab_valid = PairingInnerProductAB::::verify_with_srs_shift( + let tipa_proof_ab_valid = PairingInnerProductAB::

::verify_with_srs_shift( + transcript, ip_verifier_srs, &HomomorphicPlaceholderValue, ( @@ -195,7 +191,8 @@ where &proof.tipa_proof_ab, &r, )?; - let tipa_proof_c_valid = MultiExpInnerProductC::::verify_with_structured_scalar_message( + let tipa_proof_c_valid = MultiExpInnerProductC::

::verify_with_structured_scalar_message( + transcript, ip_verifier_srs, &HomomorphicPlaceholderValue, (&proof.com_c, &IdentityOutput(vec![proof.agg_c.clone()])), diff --git a/ip_proofs/src/applications/poly_commit/mod.rs b/ip_proofs/src/applications/poly_commit/mod.rs index a0c3fc8..bbf716f 100644 --- a/ip_proofs/src/applications/poly_commit/mod.rs +++ b/ip_proofs/src/applications/poly_commit/mod.rs @@ -1,15 +1,3 @@ -use ark_ec::{msm::VariableBaseMSM, PairingEngine, ProjectiveCurve}; -use ark_ff::{Field, One, PrimeField, UniformRand, Zero}; -use ark_poly::polynomial::{ - univariate::DensePolynomial as UnivariatePolynomial, Polynomial, UVPolynomial, -}; - -use ark_std::{end_timer, start_timer}; -use std::marker::PhantomData; - -use ark_std::rand::Rng; -use digest::Digest; - use crate::{ tipa::{ structured_generators_scalar_power, @@ -25,22 +13,33 @@ use ark_dh_commitments::{ }; use ark_inner_products::{ExtensionFieldElement, MultiexponentiationInnerProduct}; +use std::marker::PhantomData; + +use ark_ec::{msm::VariableBaseMSM, PairingEngine, ProjectiveCurve}; +use ark_ff::{Field, One, PrimeField, UniformRand, Zero}; +use ark_poly::polynomial::{ + univariate::DensePolynomial as UnivariatePolynomial, Polynomial, UVPolynomial, +}; +use ark_std::{end_timer, rand::Rng, start_timer}; +use merlin::Transcript; + pub mod transparent; -type PolynomialEvaluationSecondTierIPA = TIPAWithSSM< +const UNIVARIATE_DOMAIN_SEP: &[u8] = b"ip_proofs-Univariate_KZG"; +const BIVARIATE_DOMAIN_SEP: &[u8] = b"ip_proofs-Bivariate_KZG"; + +type PolynomialEvaluationSecondTierIPA

= TIPAWithSSM< MultiexponentiationInnerProduct<

::G1Projective>, AFGHOCommitmentG1

, IdentityCommitment<

::G1Projective,

::Fr>, P, - D, >; -type PolynomialEvaluationSecondTierIPAProof = TIPAWithSSMProof< +type PolynomialEvaluationSecondTierIPAProof

= TIPAWithSSMProof< MultiexponentiationInnerProduct<

::G1Projective>, AFGHOCommitmentG1

, IdentityCommitment<

::G1Projective,

::Fr>, P, - D, >; pub struct KZG { @@ -142,18 +141,17 @@ impl BivariatePolynomial { } } -pub struct OpeningProof { - ip_proof: PolynomialEvaluationSecondTierIPAProof, +pub struct OpeningProof { + ip_proof: PolynomialEvaluationSecondTierIPAProof

, y_eval_comm: P::G1Projective, kzg_proof: P::G1Projective, } -pub struct BivariatePolynomialCommitment { +pub struct BivariatePolynomialCommitment { _pairing: PhantomData

, - _digest: PhantomData, } -impl BivariatePolynomialCommitment { +impl BivariatePolynomialCommitment

{ pub fn setup( rng: &mut R, x_degree: usize, @@ -200,11 +198,15 @@ impl BivariatePolynomialCommitment { } pub fn open( + transcript: &mut Transcript, srs: &(SRS

, Vec), bivariate_polynomial: &BivariatePolynomial, y_polynomial_comms: &Vec, point: &(P::Fr, P::Fr), - ) -> Result, Error> { + ) -> Result, Error> { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", BIVARIATE_DOMAIN_SEP); + let (x, y) = point; let (ip_srs, kzg_srs) = srs; let (ck_1, _) = ip_srs.get_commitment_keys(); @@ -247,7 +249,8 @@ impl BivariatePolynomialCommitment { let ipa_time = start_timer!(|| "Computing IPA proof"); let ip_proof = - PolynomialEvaluationSecondTierIPA::::prove_with_structured_scalar_message( + PolynomialEvaluationSecondTierIPA::

::prove_with_structured_scalar_message( + transcript, &ip_srs, (y_polynomial_comms, &powers_of_x), (&ck_1, &HomomorphicPlaceholderValue), @@ -269,15 +272,20 @@ impl BivariatePolynomialCommitment { } pub fn verify( + transcript: &mut Transcript, v_srs: &VerifierSRS

, com: &ExtensionFieldElement

, point: &(P::Fr, P::Fr), eval: &P::Fr, - proof: &OpeningProof, + proof: &OpeningProof

, ) -> Result { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", BIVARIATE_DOMAIN_SEP); + let (x, y) = point; let ip_proof_valid = - PolynomialEvaluationSecondTierIPA::::verify_with_structured_scalar_message( + PolynomialEvaluationSecondTierIPA::

::verify_with_structured_scalar_message( + transcript, v_srs, &HomomorphicPlaceholderValue, (com, &IdentityOutput(vec![proof.y_eval_comm.clone()])), @@ -290,12 +298,11 @@ impl BivariatePolynomialCommitment { } } -pub struct UnivariatePolynomialCommitment { +pub struct UnivariatePolynomialCommitment { _pairing: PhantomData

, - _digest: PhantomData, } -impl UnivariatePolynomialCommitment { +impl UnivariatePolynomialCommitment

{ fn bivariate_degrees(univariate_degree: usize) -> (usize, usize) { //(((univariate_degree + 1) as f64).sqrt().ceil() as usize).next_power_of_two() - 1; let sqrt = (((univariate_degree + 1) as f64).sqrt().ceil() as usize).next_power_of_two(); @@ -337,7 +344,7 @@ impl UnivariatePolynomialCommitment { pub fn setup(rng: &mut R, degree: usize) -> Result<(SRS

, Vec), Error> { let (x_degree, y_degree) = Self::bivariate_degrees(degree); - BivariatePolynomialCommitment::::setup(rng, x_degree, y_degree) + BivariatePolynomialCommitment::

::setup(rng, x_degree, y_degree) } pub fn commit( @@ -345,22 +352,27 @@ impl UnivariatePolynomialCommitment { polynomial: &UnivariatePolynomial, ) -> Result<(ExtensionFieldElement

, Vec), Error> { let bivariate_degrees = Self::parse_bivariate_degrees_from_srs(srs); - BivariatePolynomialCommitment::::commit( + BivariatePolynomialCommitment::

::commit( srs, &Self::bivariate_form(bivariate_degrees, polynomial), ) } pub fn open( + transcript: &mut Transcript, srs: &(SRS

, Vec), polynomial: &UnivariatePolynomial, y_polynomial_comms: &Vec, point: &P::Fr, - ) -> Result, Error> { + ) -> Result, Error> { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", UNIVARIATE_DOMAIN_SEP); + let (x_degree, y_degree) = Self::parse_bivariate_degrees_from_srs(srs); let y = point.clone(); let x = point.pow(&vec![(y_degree + 1) as u64]); BivariatePolynomialCommitment::open( + transcript, srs, &Self::bivariate_form((x_degree, y_degree), polynomial), y_polynomial_comms, @@ -369,17 +381,21 @@ impl UnivariatePolynomialCommitment { } pub fn verify( + transcript: &mut Transcript, v_srs: &VerifierSRS

, max_degree: usize, com: &ExtensionFieldElement

, point: &P::Fr, eval: &P::Fr, - proof: &OpeningProof, + proof: &OpeningProof

, ) -> Result { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", UNIVARIATE_DOMAIN_SEP); + let (_, y_degree) = Self::bivariate_degrees(max_degree); let y = point.clone(); let x = y.pow(&vec![(y_degree + 1) as u64]); - BivariatePolynomialCommitment::verify(v_srs, com, &(x, y), eval, proof) + BivariatePolynomialCommitment::verify(transcript, v_srs, com, &(x, y), eval, proof) } } @@ -387,8 +403,6 @@ impl UnivariatePolynomialCommitment { mod tests { use super::*; use ark_bls12_381::Bls12_381; - use ark_std::rand::{rngs::StdRng, SeedableRng}; - use blake2::Blake2b; const BIVARIATE_X_DEGREE: usize = 7; const BIVARIATE_Y_DEGREE: usize = 7; @@ -396,12 +410,13 @@ mod tests { const UNIVARIATE_DEGREE: usize = 65535; //const UNIVARIATE_DEGREE: usize = 1048575; - type TestBivariatePolyCommitment = BivariatePolynomialCommitment; - type TestUnivariatePolyCommitment = UnivariatePolynomialCommitment; + type TestBivariatePolyCommitment = BivariatePolynomialCommitment; + type TestUnivariatePolyCommitment = UnivariatePolynomialCommitment; #[test] fn bivariate_poly_commit_test() { - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); + let srs = TestBivariatePolyCommitment::setup(&mut rng, BIVARIATE_X_DEGREE, BIVARIATE_Y_DEGREE) .unwrap(); @@ -425,7 +440,9 @@ mod tests { // Evaluate at challenge point let point = (UniformRand::rand(&mut rng), UniformRand::rand(&mut rng)); + let mut open_transcript = Transcript::new(b"Bivariate-test"); let eval_proof = TestBivariatePolyCommitment::open( + &mut open_transcript, &srs, &bivariate_polynomial, &y_polynomial_comms, @@ -435,16 +452,24 @@ mod tests { let eval = bivariate_polynomial.evaluate(&point); // Verify proof - assert!( - TestBivariatePolyCommitment::verify(&v_srs, &com, &point, &eval, &eval_proof).unwrap() - ); + let mut verif_transcript = Transcript::new(b"Bivariate-test"); + assert!(TestBivariatePolyCommitment::verify( + &mut verif_transcript, + &v_srs, + &com, + &point, + &eval, + &eval_proof + ) + .unwrap()); } // `cargo test univariate_poly_commit_test --release --features print-trace -- --ignored --nocapture` #[ignore] #[test] fn univariate_poly_commit_test() { - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); + let srs = TestUnivariatePolyCommitment::setup(&mut rng, UNIVARIATE_DEGREE).unwrap(); let v_srs = srs.0.get_verifier_key(); @@ -460,13 +485,21 @@ mod tests { // Evaluate at challenge point let point = UniformRand::rand(&mut rng); - let eval_proof = - TestUnivariatePolyCommitment::open(&srs, &polynomial, &y_polynomial_comms, &point) - .unwrap(); + let mut open_transcript = Transcript::new(b"Univariate-test"); + let eval_proof = TestUnivariatePolyCommitment::open( + &mut open_transcript, + &srs, + &polynomial, + &y_polynomial_comms, + &point, + ) + .unwrap(); let eval = polynomial.evaluate(&point); // Verify proof + let mut verif_transcript = Transcript::new(b"Univariate-test"); assert!(TestUnivariatePolyCommitment::verify( + &mut verif_transcript, &v_srs, UNIVARIATE_DEGREE, &com, diff --git a/ip_proofs/src/applications/poly_commit/transparent.rs b/ip_proofs/src/applications/poly_commit/transparent.rs index 71f11c6..2ed5857 100644 --- a/ip_proofs/src/applications/poly_commit/transparent.rs +++ b/ip_proofs/src/applications/poly_commit/transparent.rs @@ -1,15 +1,3 @@ -use ark_ec::PairingEngine; -use ark_ff::{Field, Zero}; -use ark_poly::polynomial::{ - univariate::DensePolynomial as UnivariatePolynomial, Polynomial, UVPolynomial, -}; - -use ark_std::{end_timer, start_timer}; -use std::marker::PhantomData; - -use ark_std::rand::Rng; -use digest::Digest; - use crate::{ gipa::GIPAProof, tipa::structured_scalar_message::{ @@ -17,44 +5,54 @@ use crate::{ }, Error, }; + +use std::marker::PhantomData; + use ark_dh_commitments::{ afgho16::AFGHOCommitmentG1, identity::{HomomorphicPlaceholderValue, IdentityCommitment, IdentityOutput}, pedersen::PedersenCommitment, DoublyHomomorphicCommitment, }; +use ark_ec::PairingEngine; +use ark_ff::{Field, Zero}; use ark_inner_products::{ ExtensionFieldElement, MultiexponentiationInnerProduct, ScalarInnerProduct, }; +use ark_poly::polynomial::{ + univariate::DensePolynomial as UnivariatePolynomial, Polynomial, UVPolynomial, +}; +use ark_std::rand::Rng; +use ark_std::{end_timer, start_timer}; +use merlin::Transcript; + +const UNIVARIATE_DOMAIN_SEP: &[u8] = b"ip_proofs-Univariate_KZG_transparent"; +const BIVARIATE_DOMAIN_SEP: &[u8] = b"ip_proofs-Bivariate_KZG_transparent"; -type PolynomialEvaluationSecondTierIPA = GIPAWithSSM< +type PolynomialEvaluationSecondTierIPA

= GIPAWithSSM< MultiexponentiationInnerProduct<

::G1Projective>, AFGHOCommitmentG1

, IdentityCommitment<

::G1Projective,

::Fr>, - D, >; -type PolynomialEvaluationSecondTierIPAProof = GIPAProof< +type PolynomialEvaluationSecondTierIPAProof

= GIPAProof< MultiexponentiationInnerProduct<

::G1Projective>, AFGHOCommitmentG1

, SSMPlaceholderCommitment<

::Fr>, IdentityCommitment<

::G1Projective,

::Fr>, - D, >; -type PolynomialEvaluationFirstTierIPA = GIPAWithSSM< +type PolynomialEvaluationFirstTierIPA

= GIPAWithSSM< ScalarInnerProduct<

::Fr>, PedersenCommitment<

::G1Projective>, IdentityCommitment<

::Fr,

::Fr>, - D, >; -type PolynomialEvaluationFirstTierIPAProof = GIPAProof< +type PolynomialEvaluationFirstTierIPAProof

= GIPAProof< ScalarInnerProduct<

::Fr>, PedersenCommitment<

::G1Projective>, SSMPlaceholderCommitment<

::Fr>, IdentityCommitment<

::Fr,

::Fr>, - D, >; pub struct BivariatePolynomial { @@ -78,25 +76,24 @@ impl BivariatePolynomial { } } -pub struct OpeningProof { - second_tier_ip_proof: PolynomialEvaluationSecondTierIPAProof, +pub struct OpeningProof { + second_tier_ip_proof: PolynomialEvaluationSecondTierIPAProof

, y_eval_comm: P::G1Projective, - first_tier_ip_proof: PolynomialEvaluationFirstTierIPAProof, + first_tier_ip_proof: PolynomialEvaluationFirstTierIPAProof

, } -pub struct BivariatePolynomialCommitment { +pub struct BivariatePolynomialCommitment { _pairing: PhantomData

, - _digest: PhantomData, } -impl BivariatePolynomialCommitment { +impl BivariatePolynomialCommitment

{ pub fn setup( rng: &mut R, x_degree: usize, y_degree: usize, ) -> Result<(Vec, Vec), Error> { - let first_tier_ck = PolynomialEvaluationFirstTierIPA::::setup(rng, y_degree + 1)?.0; - let second_tier_ck = PolynomialEvaluationSecondTierIPA::::setup(rng, x_degree + 1)?.0; + let first_tier_ck = PolynomialEvaluationFirstTierIPA::

::setup(rng, y_degree + 1)?.0; + let second_tier_ck = PolynomialEvaluationSecondTierIPA::

::setup(rng, x_degree + 1)?.0; Ok((first_tier_ck, second_tier_ck)) } @@ -132,11 +129,15 @@ impl BivariatePolynomialCommitment { } pub fn open( + transcript: &mut Transcript, ck: &(Vec, Vec), bivariate_polynomial: &BivariatePolynomial, y_polynomial_comms: &Vec, point: &(P::Fr, P::Fr), - ) -> Result, Error> { + ) -> Result, Error> { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", BIVARIATE_DOMAIN_SEP); + let (x, y) = point; let (first_tier_ck, second_tier_ck) = ck; assert!(second_tier_ck.len() >= bivariate_polynomial.y_polynomials.len()); @@ -170,7 +171,8 @@ impl BivariatePolynomialCommitment { let ipa_time = start_timer!(|| "Computing second tier IPA opening proof"); let second_tier_ip_proof = - PolynomialEvaluationSecondTierIPA::::prove_with_structured_scalar_message( + PolynomialEvaluationSecondTierIPA::

::prove_with_structured_scalar_message( + transcript, (y_polynomial_comms, &powers_of_x), (second_tier_ck, &HomomorphicPlaceholderValue), )?; @@ -179,7 +181,8 @@ impl BivariatePolynomialCommitment { let powers_of_y = structured_scalar_power(first_tier_ck.len(), y); let first_tier_ip_proof = - PolynomialEvaluationFirstTierIPA::::prove_with_structured_scalar_message( + PolynomialEvaluationFirstTierIPA::

::prove_with_structured_scalar_message( + transcript, (&y_eval_coeffs, &powers_of_y), (first_tier_ck, &HomomorphicPlaceholderValue), )?; @@ -193,23 +196,29 @@ impl BivariatePolynomialCommitment { } pub fn verify( + transcript: &mut Transcript, ck: &(Vec, Vec), com: &ExtensionFieldElement

, point: &(P::Fr, P::Fr), eval: &P::Fr, - proof: &OpeningProof, + proof: &OpeningProof

, ) -> Result { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", BIVARIATE_DOMAIN_SEP); + let (first_tier_ck, second_tier_ck) = ck; let (x, y) = point; let second_tier_ip_proof_valid = - PolynomialEvaluationSecondTierIPA::::verify_with_structured_scalar_message( + PolynomialEvaluationSecondTierIPA::

::verify_with_structured_scalar_message( + transcript, (second_tier_ck, &HomomorphicPlaceholderValue), (com, &IdentityOutput(vec![proof.y_eval_comm.clone()])), x, &proof.second_tier_ip_proof, )?; let first_tier_ip_proof_valid = - PolynomialEvaluationFirstTierIPA::::verify_with_structured_scalar_message( + PolynomialEvaluationFirstTierIPA::

::verify_with_structured_scalar_message( + transcript, (first_tier_ck, &HomomorphicPlaceholderValue), (&proof.y_eval_comm, &IdentityOutput(vec![eval.clone()])), y, @@ -219,12 +228,11 @@ impl BivariatePolynomialCommitment { } } -pub struct UnivariatePolynomialCommitment { +pub struct UnivariatePolynomialCommitment { _pairing: PhantomData

, - _digest: PhantomData, } -impl UnivariatePolynomialCommitment { +impl UnivariatePolynomialCommitment

{ fn bivariate_degrees(univariate_degree: usize) -> (usize, usize) { //(((univariate_degree + 1) as f64).sqrt().ceil() as usize).next_power_of_two() - 1; let sqrt = (((univariate_degree + 1) as f64).sqrt().ceil() as usize).next_power_of_two(); @@ -271,7 +279,7 @@ impl UnivariatePolynomialCommitment { degree: usize, ) -> Result<(Vec, Vec), Error> { let (x_degree, y_degree) = Self::bivariate_degrees(degree); - BivariatePolynomialCommitment::::setup(rng, x_degree, y_degree) + BivariatePolynomialCommitment::

::setup(rng, x_degree, y_degree) } pub fn commit( @@ -279,22 +287,27 @@ impl UnivariatePolynomialCommitment { polynomial: &UnivariatePolynomial, ) -> Result<(ExtensionFieldElement

, Vec), Error> { let bivariate_degrees = Self::parse_bivariate_degrees_from_ck(ck); - BivariatePolynomialCommitment::::commit( + BivariatePolynomialCommitment::

::commit( ck, &Self::bivariate_form(bivariate_degrees, polynomial), ) } pub fn open( + transcript: &mut Transcript, ck: &(Vec, Vec), polynomial: &UnivariatePolynomial, y_polynomial_comms: &Vec, point: &P::Fr, - ) -> Result, Error> { + ) -> Result, Error> { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", UNIVARIATE_DOMAIN_SEP); + let (x_degree, y_degree) = Self::parse_bivariate_degrees_from_ck(ck); let y = point.clone(); let x = point.pow(&vec![(y_degree + 1) as u64]); BivariatePolynomialCommitment::open( + transcript, ck, &Self::bivariate_form((x_degree, y_degree), polynomial), y_polynomial_comms, @@ -303,16 +316,20 @@ impl UnivariatePolynomialCommitment { } pub fn verify( + transcript: &mut Transcript, ck: &(Vec, Vec), com: &ExtensionFieldElement

, point: &P::Fr, eval: &P::Fr, - proof: &OpeningProof, + proof: &OpeningProof

, ) -> Result { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", UNIVARIATE_DOMAIN_SEP); + let (_, y_degree) = Self::parse_bivariate_degrees_from_ck(ck); let y = point.clone(); let x = y.pow(&vec![(y_degree + 1) as u64]); - BivariatePolynomialCommitment::verify(ck, com, &(x, y), eval, proof) + BivariatePolynomialCommitment::verify(transcript, ck, com, &(x, y), eval, proof) } } @@ -322,8 +339,6 @@ mod tests { use ark_bls12_381::Bls12_381; use ark_ec::PairingEngine; use ark_ff::UniformRand; - use ark_std::rand::{rngs::StdRng, SeedableRng}; - use blake2::Blake2b; const BIVARIATE_X_DEGREE: usize = 7; const BIVARIATE_Y_DEGREE: usize = 7; @@ -331,12 +346,13 @@ mod tests { const UNIVARIATE_DEGREE: usize = 65535; //const UNIVARIATE_DEGREE: usize = 1048575; - type TestBivariatePolyCommitment = BivariatePolynomialCommitment; - type TestUnivariatePolyCommitment = UnivariatePolynomialCommitment; + type TestBivariatePolyCommitment = BivariatePolynomialCommitment; + type TestUnivariatePolyCommitment = UnivariatePolynomialCommitment; #[test] fn transparent_bivariate_poly_commit_test() { - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); + let ck = TestBivariatePolyCommitment::setup(&mut rng, BIVARIATE_X_DEGREE, BIVARIATE_Y_DEGREE) .unwrap(); @@ -359,7 +375,9 @@ mod tests { // Evaluate at challenge point let point = (UniformRand::rand(&mut rng), UniformRand::rand(&mut rng)); + let mut open_transcript = Transcript::new(b"Transparent-bivariate-test"); let eval_proof = TestBivariatePolyCommitment::open( + &mut open_transcript, &ck, &bivariate_polynomial, &y_polynomial_comms, @@ -369,16 +387,24 @@ mod tests { let eval = bivariate_polynomial.evaluate(&point); // Verify proof - assert!( - TestBivariatePolyCommitment::verify(&ck, &com, &point, &eval, &eval_proof).unwrap() - ); + let mut verif_transcript = Transcript::new(b"Transparent-bivariate-test"); + assert!(TestBivariatePolyCommitment::verify( + &mut verif_transcript, + &ck, + &com, + &point, + &eval, + &eval_proof + ) + .unwrap()); } // `cargo test transparent_univariate_poly_commit_test --release --features print-trace -- --ignored --nocapture` #[ignore] #[test] fn transparent_univariate_poly_commit_test() { - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); + let ck = TestUnivariatePolyCommitment::setup(&mut rng, UNIVARIATE_DEGREE).unwrap(); let mut polynomial_coeffs = vec![]; @@ -393,14 +419,27 @@ mod tests { // Evaluate at challenge point let point = UniformRand::rand(&mut rng); - let eval_proof = - TestUnivariatePolyCommitment::open(&ck, &polynomial, &y_polynomial_comms, &point) - .unwrap(); + let mut open_transcript = Transcript::new(b"Transparent-univariate-test"); + let eval_proof = TestUnivariatePolyCommitment::open( + &mut open_transcript, + &ck, + &polynomial, + &y_polynomial_comms, + &point, + ) + .unwrap(); let eval = polynomial.evaluate(&point); // Verify proof - assert!( - TestUnivariatePolyCommitment::verify(&ck, &com, &point, &eval, &eval_proof).unwrap() - ); + let mut verif_transcript = Transcript::new(b"Transparent-univariate-test"); + assert!(TestUnivariatePolyCommitment::verify( + &mut verif_transcript, + &ck, + &com, + &point, + &eval, + &eval_proof + ) + .unwrap()); } } diff --git a/ip_proofs/src/gipa.rs b/ip_proofs/src/gipa.rs index 7b5df29..dce5da7 100644 --- a/ip_proofs/src/gipa.rs +++ b/ip_proofs/src/gipa.rs @@ -1,30 +1,31 @@ -use ark_ff::{to_bytes, Field, One}; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; -use ark_std::rand::Rng; -use ark_std::{end_timer, start_timer}; -use digest::Digest; -use std::{convert::TryInto, marker::PhantomData, ops::MulAssign}; +use crate::{mul_helper, util::TranscriptProtocol, Error, InnerProductArgumentError}; + +use std::{marker::PhantomData, ops::MulAssign}; -use crate::{mul_helper, Error, InnerProductArgumentError}; use ark_dh_commitments::DoublyHomomorphicCommitment; +use ark_ff::{Field, One}; use ark_inner_products::InnerProduct; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; use ark_std::cfg_iter; +use ark_std::rand::Rng; +use ark_std::{end_timer, start_timer}; +use merlin::Transcript; #[cfg(feature = "parallel")] use rayon::prelude::*; -pub struct GIPA { +const GIPA_DOMAIN_SEP: &[u8] = b"ip_proofs-v0.3-GIPA"; + +pub struct GIPA { _inner_product: PhantomData, _left_commitment: PhantomData, _right_commitment: PhantomData, _inner_product_commitment: PhantomData, - _digest: PhantomData, } #[derive(CanonicalSerialize, CanonicalDeserialize)] -pub struct GIPAProof +pub struct GIPAProof where - D: Digest, IP: InnerProduct< LeftMessage = LMC::Message, RightMessage = RMC::Message, @@ -45,13 +46,12 @@ where (LMC::Output, RMC::Output, IPC::Output), )>, pub(crate) r_base: (LMC::Message, RMC::Message), - _gipa: PhantomData>, + _gipa: PhantomData>, } #[derive(Clone)] -pub struct GIPAAux +pub struct GIPAAux where - D: Digest, IP: InnerProduct< LeftMessage = LMC::Message, RightMessage = RMC::Message, @@ -67,16 +67,15 @@ where RMC::Output: MulAssign, IPC::Output: MulAssign, { - pub(crate) r_transcript: Vec, + pub(crate) r_challenges: Vec, pub(crate) ck_base: (LMC::Key, RMC::Key), - _gipa: PhantomData>, + _gipa: PhantomData>, } //TODO: Can extend GIPA to support "identity commitments" in addition to "compact commitments", i.e. for SIPP -impl GIPA +impl GIPA where - D: Digest, IP: InnerProduct< LeftMessage = LMC::Message, RightMessage = RMC::Message, @@ -104,10 +103,11 @@ where } pub fn prove( + transcript: &mut Transcript, values: (&[IP::LeftMessage], &[IP::RightMessage], &IP::Output), ck: (&[LMC::Key], &[RMC::Key], &IPC::Key), com: (&LMC::Output, &RMC::Output, &IPC::Output), - ) -> Result, Error> { + ) -> Result, Error> { if IP::inner_product(values.0, values.1)? != values.2.clone() { return Err(Box::new(InnerProductArgumentError::InnerProductInvalid)); } @@ -125,15 +125,19 @@ where return Err(Box::new(InnerProductArgumentError::InnerProductInvalid)); } - let (proof, _) = - Self::prove_with_aux((values.0, values.1), (ck.0, ck.1, &vec![ck.2.clone()]))?; + let (proof, _) = Self::prove_with_aux( + transcript, + (values.0, values.1), + (ck.0, ck.1, &vec![ck.2.clone()]), + )?; Ok(proof) } pub fn verify( + transcript: &mut Transcript, ck: (&[LMC::Key], &[RMC::Key], &IPC::Key), com: (&LMC::Output, &RMC::Output, &IPC::Output), - proof: &GIPAProof, + proof: &GIPAProof, ) -> Result { if ck.0.len().count_ones() != 1 || ck.0.len() != ck.1.len() { // Power of 2 length @@ -142,13 +146,14 @@ where ck.1.len(), ))); } - // Calculate base commitment and transcript - let (base_com, transcript) = Self::_compute_recursive_challenges( + // Calculate base commitment and round challenges + let (base_com, r_challenges) = Self::_compute_recursive_challenges( + transcript, (com.0.clone(), com.1.clone(), com.2.clone()), proof, )?; // Calculate base commitment keys - let (ck_a_base, ck_b_base) = Self::_compute_final_commitment_keys(ck, &transcript)?; + let (ck_a_base, ck_b_base) = Self::_compute_final_commitment_keys(ck, &r_challenges)?; // Verify base commitment Self::_verify_base_commitment( (&ck_a_base, &ck_b_base, &vec![ck.2.clone()]), @@ -158,18 +163,14 @@ where } pub fn prove_with_aux( + transcript: &mut Transcript, values: (&[IP::LeftMessage], &[IP::RightMessage]), ck: (&[LMC::Key], &[RMC::Key], &[IPC::Key]), - ) -> Result< - ( - GIPAProof, - GIPAAux, - ), - Error, - > { + ) -> Result<(GIPAProof, GIPAAux), Error> { let (m_a, m_b) = values; let (ck_a, ck_b, ck_t) = ck; Self::_prove( + transcript, (m_a.to_vec(), m_b.to_vec()), (ck_a.to_vec(), ck_b.to_vec(), ck_t.to_vec()), ) @@ -177,20 +178,22 @@ where // Returns vector of recursive commitments and transcripts in reverse order fn _prove( + transcript: &mut Transcript, values: (Vec, Vec), ck: (Vec, Vec, Vec), - ) -> Result< - ( - GIPAProof, - GIPAAux, - ), - Error, - > { + ) -> Result<(GIPAProof, GIPAAux), Error> { let (mut m_a, mut m_b) = values; let (mut ck_a, mut ck_b, ck_t) = ck; - let mut r_commitment_steps = Vec::new(); - let mut r_transcript = Vec::new(); assert!(m_a.len().is_power_of_two()); + + // For the proof, we collect commitments at every recurisve step. For the auxiliary output, + // we collect the Fiat-Shamir challenges. + let mut r_commitment_steps = Vec::new(); + let mut r_challenges = Vec::new(); + + // Domain-separate this protocol + transcript.append_message(b"dom-sep", GIPA_DOMAIN_SEP); + let (m_base, ck_base) = 'recurse: loop { let recurse = start_timer!(|| format!("Recurse round size {}", m_a.len())); if m_a.len() == 1 { @@ -229,33 +232,19 @@ where ); end_timer!(cr); + // Update the transcript + transcript.append_serializable(b"com_1", &com_1)?; + transcript.append_serializable(b"com_2", &com_2)?; + // Fiat-Shamir challenge - let mut counter_nonce: usize = 0; - let default_transcript = Default::default(); - let transcript = r_transcript.last().unwrap_or(&default_transcript); - let (c, c_inv) = 'challenge: loop { - let mut hash_input = Vec::new(); - hash_input.extend_from_slice(&counter_nonce.to_be_bytes()[..]); - //TODO: Should use CanonicalSerialize instead of ToBytes - hash_input.extend_from_slice(&to_bytes![ - transcript, com_1.0, com_1.1, com_1.2, com_2.0, com_2.1, com_2.2 - ]?); - let c: LMC::Scalar = u128::from_be_bytes( - D::digest(&hash_input).as_slice()[0..16].try_into().unwrap(), - ) - .into(); - if let Some(c_inv) = c.inverse() { - // Optimization for multiexponentiation to rescale G2 elements with 128-bit challenge - // Swap 'c' and 'c_inv' since can't control bit size of c_inv - break 'challenge (c_inv, c); - } - counter_nonce += 1; - }; + let chal: LMC::Scalar = transcript.challenge_scalar(b"c"); + // Don't worry about accidentally sampling 0. The probability is negligible. + let chal_inv = chal.inverse().unwrap(); // Set up values for next step of recursion let rescale_m1 = start_timer!(|| "Rescale M1"); m_a = cfg_iter!(m_a_1) - .map(|a| mul_helper(a, &c)) + .map(|a| mul_helper(a, &chal)) .zip(m_a_2) .map(|(a_1, a_2)| a_1 + a_2.clone()) .collect::>(); @@ -263,7 +252,7 @@ where let rescale_m2 = start_timer!(|| "Rescale M2"); m_b = cfg_iter!(m_b_2) - .map(|b| mul_helper(b, &c_inv)) + .map(|b| mul_helper(b, &chal_inv)) .zip(m_b_1) .map(|(b_1, b_2)| b_1 + b_2.clone()) .collect::>(); @@ -271,7 +260,7 @@ where let rescale_ck1 = start_timer!(|| "Rescale CK1"); ck_a = cfg_iter!(ck_a_2) - .map(|a| mul_helper(a, &c_inv)) + .map(|a| mul_helper(a, &chal_inv)) .zip(ck_a_1) .map(|(a_1, a_2)| a_1 + a_2.clone()) .collect::>(); @@ -279,19 +268,21 @@ where let rescale_ck2 = start_timer!(|| "Rescale CK2"); ck_b = cfg_iter!(ck_b_1) - .map(|b| mul_helper(b, &c)) + .map(|b| mul_helper(b, &chal)) .zip(ck_b_2) .map(|(b_1, b_2)| b_1 + b_2.clone()) .collect::>(); end_timer!(rescale_ck2); r_commitment_steps.push((com_1, com_2)); - r_transcript.push(c); + r_challenges.push(chal); end_timer!(recurse); } }; - r_transcript.reverse(); + r_commitment_steps.reverse(); + r_challenges.reverse(); + Ok(( GIPAProof { r_commitment_steps, @@ -299,7 +290,7 @@ where _gipa: PhantomData, }, GIPAAux { - r_transcript, + r_challenges, ck_base, _gipa: PhantomData, }, @@ -308,54 +299,52 @@ where // Helper function used to calculate recursive challenges from proof execution (transcript in reverse) pub fn verify_recursive_challenge_transcript( + transcript: &mut Transcript, com: (&LMC::Output, &RMC::Output, &IPC::Output), - proof: &GIPAProof, + proof: &GIPAProof, ) -> Result<((LMC::Output, RMC::Output, IPC::Output), Vec), Error> { - Self::_compute_recursive_challenges((com.0.clone(), com.1.clone(), com.2.clone()), proof) + Self::_compute_recursive_challenges( + transcript, + (com.0.clone(), com.1.clone(), com.2.clone()), + proof, + ) } fn _compute_recursive_challenges( + transcript: &mut Transcript, com: (LMC::Output, RMC::Output, IPC::Output), - proof: &GIPAProof, + proof: &GIPAProof, ) -> Result<((LMC::Output, RMC::Output, IPC::Output), Vec), Error> { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", GIPA_DOMAIN_SEP); + + // Keep track of the challenges from each round + let mut r_challenges = Vec::new(); + let (mut com_a, mut com_b, mut com_t) = com; - let mut r_transcript = Vec::new(); for (com_1, com_2) in proof.r_commitment_steps.iter().rev() { + // Update the transcript + transcript.append_serializable(b"com_1", com_1)?; + transcript.append_serializable(b"com_2", com_2)?; + // Fiat-Shamir challenge - let mut counter_nonce: usize = 0; - let default_transcript = Default::default(); - let transcript = r_transcript.last().unwrap_or(&default_transcript); - let (c, c_inv) = 'challenge: loop { - let mut hash_input = Vec::new(); - hash_input.extend_from_slice(&counter_nonce.to_be_bytes()[..]); - hash_input.extend_from_slice(&to_bytes![ - transcript, com_1.0, com_1.1, com_1.2, com_2.0, com_2.1, com_2.2 - ]?); - let c: LMC::Scalar = u128::from_be_bytes( - D::digest(&hash_input).as_slice()[0..16].try_into().unwrap(), - ) - .into(); - if let Some(c_inv) = c.inverse() { - // Optimization for multiexponentiation to rescale G2 elements with 128-bit challenge - // Swap 'c' and 'c_inv' since can't control bit size of c_inv - break 'challenge (c_inv, c); - } - counter_nonce += 1; - }; - - com_a = mul_helper(&com_1.0, &c) + com_a.clone() + mul_helper(&com_2.0, &c_inv); - com_b = mul_helper(&com_1.1, &c) + com_b.clone() + mul_helper(&com_2.1, &c_inv); - com_t = mul_helper(&com_1.2, &c) + com_t.clone() + mul_helper(&com_2.2, &c_inv); - - r_transcript.push(c); + let chal: LMC::Scalar = transcript.challenge_scalar(b"c"); + // Don't worry about accidentally sampling 0. The probability is negligible. + let chal_inv = chal.inverse().unwrap(); + + com_a = mul_helper(&com_1.0, &chal) + com_a.clone() + mul_helper(&com_2.0, &chal_inv); + com_b = mul_helper(&com_1.1, &chal) + com_b.clone() + mul_helper(&com_2.1, &chal_inv); + com_t = mul_helper(&com_1.2, &chal) + com_t.clone() + mul_helper(&com_2.2, &chal_inv); + + r_challenges.push(chal); } - r_transcript.reverse(); - Ok(((com_a, com_b, com_t), r_transcript)) + r_challenges.reverse(); + Ok(((com_a, com_b, com_t), r_challenges)) } pub(crate) fn _compute_final_commitment_keys( ck: (&[LMC::Key], &[RMC::Key], &IPC::Key), - transcript: &Vec, + r_challenges: &Vec, ) -> Result<(LMC::Key, RMC::Key), Error> { // Calculate base commitment keys let (ck_a, ck_b, _) = ck; @@ -363,11 +352,11 @@ where let mut ck_a_agg_challenge_exponents = vec![LMC::Scalar::one()]; let mut ck_b_agg_challenge_exponents = vec![LMC::Scalar::one()]; - for (i, c) in transcript.iter().enumerate() { - let c_inv = c.inverse().unwrap(); + for (i, chal) in r_challenges.iter().enumerate() { + let chal_inv = chal.inverse().unwrap(); for j in 0..(2_usize).pow(i as u32) { - ck_a_agg_challenge_exponents.push(ck_a_agg_challenge_exponents[j] * &c_inv); - ck_b_agg_challenge_exponents.push(ck_b_agg_challenge_exponents[j] * c); + ck_a_agg_challenge_exponents.push(ck_a_agg_challenge_exponents[j] * &chal_inv); + ck_b_agg_challenge_exponents.push(ck_b_agg_challenge_exponents[j] * chal); } } assert_eq!(ck_a_agg_challenge_exponents.len(), ck_a.len()); @@ -392,7 +381,7 @@ where pub(crate) fn _verify_base_commitment( base_ck: (&LMC::Key, &RMC::Key, &Vec), base_com: (LMC::Output, RMC::Output, IPC::Output), - proof: &GIPAProof, + proof: &GIPAProof, ) -> Result { let (com_a, com_b, com_t) = base_com; let (ck_a_base, ck_b_base, ck_t) = base_ck; @@ -406,9 +395,8 @@ where } } -impl Clone for GIPAProof +impl Clone for GIPAProof where - D: Digest, IP: InnerProduct< LeftMessage = LMC::Message, RightMessage = RMC::Message, @@ -439,8 +427,6 @@ mod tests { use ark_bls12_381::Bls12_381; use ark_ec::PairingEngine; use ark_ff::UniformRand; - use ark_std::rand::{rngs::StdRng, SeedableRng}; - use blake2::Blake2b; use ark_dh_commitments::{ afgho16::{AFGHOCommitmentG1, AFGHOCommitmentG2}, @@ -464,9 +450,9 @@ mod tests { type IP = PairingInnerProduct; type IPC = IdentityCommitment, ::Fr>; - type PairingGIPA = GIPA; + type PairingGIPA = GIPA; - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); let (ck_a, ck_b, ck_t) = PairingGIPA::setup(&mut rng, TEST_SIZE).unwrap(); let m_a = random_generators(&mut rng, TEST_SIZE); let m_b = random_generators(&mut rng, TEST_SIZE); @@ -475,16 +461,23 @@ mod tests { let t = vec![IP::inner_product(&m_a, &m_b).unwrap()]; let com_t = IPC::commit(&vec![ck_t.clone()], &t).unwrap(); + let mut proof_transcript = Transcript::new(b"GIPA-test"); let proof = PairingGIPA::prove( + &mut proof_transcript, (&m_a, &m_b, &t[0]), (&ck_a, &ck_b, &ck_t), (&com_a, &com_b, &com_t), ) .unwrap(); - assert!( - PairingGIPA::verify((&ck_a, &ck_b, &ck_t), (&com_a, &com_b, &com_t), &proof,).unwrap() - ); + let mut verif_transcript = Transcript::new(b"GIPA-test"); + assert!(PairingGIPA::verify( + &mut verif_transcript, + (&ck_a, &ck_b, &ck_t), + (&com_a, &com_b, &com_t), + &proof, + ) + .unwrap()); } #[test] @@ -494,9 +487,9 @@ mod tests { ::G1Projective, ::Fr, >; - type MultiExpGIPA = GIPA; + type MultiExpGIPA = GIPA; - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); let (ck_a, ck_b, ck_t) = MultiExpGIPA::setup(&mut rng, TEST_SIZE).unwrap(); let m_a = random_generators(&mut rng, TEST_SIZE); let mut m_b = Vec::new(); @@ -508,16 +501,23 @@ mod tests { let t = vec![IP::inner_product(&m_a, &m_b).unwrap()]; let com_t = IPC::commit(&vec![ck_t.clone()], &t).unwrap(); + let mut proof_transcript = Transcript::new(b"GIPA-test"); let proof = MultiExpGIPA::prove( + &mut proof_transcript, (&m_a, &m_b, &t[0]), (&ck_a, &ck_b, &ck_t), (&com_a, &com_b, &com_t), ) .unwrap(); - assert!( - MultiExpGIPA::verify((&ck_a, &ck_b, &ck_t), (&com_a, &com_b, &com_t), &proof,).unwrap() - ); + let mut verif_transcript = Transcript::new(b"GIPA-test"); + assert!(MultiExpGIPA::verify( + &mut verif_transcript, + (&ck_a, &ck_b, &ck_t), + (&com_a, &com_b, &com_t), + &proof, + ) + .unwrap()); } #[test] @@ -525,9 +525,9 @@ mod tests { type IP = ScalarInnerProduct<::Fr>; type IPC = IdentityCommitment<::Fr, ::Fr>; - type ScalarGIPA = GIPA; + type ScalarGIPA = GIPA; - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); let (ck_a, ck_b, ck_t) = ScalarGIPA::setup(&mut rng, TEST_SIZE).unwrap(); let mut m_a = Vec::new(); let mut m_b = Vec::new(); @@ -540,15 +540,22 @@ mod tests { let t = vec![IP::inner_product(&m_a, &m_b).unwrap()]; let com_t = IPC::commit(&vec![ck_t.clone()], &t).unwrap(); + let mut proof_transcript = Transcript::new(b"GIPA-test"); let proof = ScalarGIPA::prove( + &mut proof_transcript, (&m_a, &m_b, &t[0]), (&ck_a, &ck_b, &ck_t), (&com_a, &com_b, &com_t), ) .unwrap(); - assert!( - ScalarGIPA::verify((&ck_a, &ck_b, &ck_t), (&com_a, &com_b, &com_t), &proof,).unwrap() - ); + let mut verif_transcript = Transcript::new(b"GIPA-test"); + assert!(ScalarGIPA::verify( + &mut verif_transcript, + (&ck_a, &ck_b, &ck_t), + (&com_a, &com_b, &com_t), + &proof, + ) + .unwrap()); } } diff --git a/ip_proofs/src/lib.rs b/ip_proofs/src/lib.rs index d152847..137e5fe 100644 --- a/ip_proofs/src/lib.rs +++ b/ip_proofs/src/lib.rs @@ -8,6 +8,7 @@ use std::{ pub mod applications; pub mod gipa; pub mod tipa; +mod util; pub type Error = Box; diff --git a/ip_proofs/src/tipa/mod.rs b/ip_proofs/src/tipa/mod.rs index 4b9cefc..2c0f42d 100644 --- a/ip_proofs/src/tipa/mod.rs +++ b/ip_proofs/src/tipa/mod.rs @@ -1,15 +1,6 @@ -use ark_ec::{msm::FixedBaseMSM, PairingEngine, ProjectiveCurve}; -use ark_ff::{to_bytes, Field, One, PrimeField, UniformRand, Zero}; -use ark_poly::polynomial::{univariate::DensePolynomial, UVPolynomial}; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; -use ark_std::rand::Rng; -use ark_std::{end_timer, start_timer}; -use digest::Digest; -use itertools::Itertools; -use std::{marker::PhantomData, ops::MulAssign}; - use crate::{ gipa::{GIPAProof, GIPA}, + util::TranscriptProtocol, Error, }; use ark_dh_commitments::{ @@ -19,29 +10,42 @@ use ark_dh_commitments::{ }; use ark_inner_products::{InnerProduct, MultiexponentiationInnerProduct}; +use std::{marker::PhantomData, ops::MulAssign}; + +use ark_ec::{msm::FixedBaseMSM, PairingEngine, ProjectiveCurve}; +use ark_ff::{Field, One, PrimeField, UniformRand, Zero}; +use ark_poly::polynomial::{univariate::DensePolynomial, UVPolynomial}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; +use ark_std::rand::Rng; +use ark_std::{end_timer, start_timer}; +use itertools::Itertools; +use merlin::Transcript; + pub mod structured_scalar_message; -//TODO: Could generalize: Don't need TIPA over G1 and G2, would work with G1 and G1 or over different pairing engines +const TIPA_DOMAIN_SEP: &[u8] = b"ip_proofs-v0.3-TIPA"; + +// TODO: Could generalize: Don't need TIPA over G1 and G2, would work with G1 and G1 or over +// different pairing engines pub trait TIPACompatibleSetup {} impl TIPACompatibleSetup for PedersenCommitment {} impl TIPACompatibleSetup for AFGHOCommitmentG1

{} impl TIPACompatibleSetup for AFGHOCommitmentG2

{} -//TODO: May need to add "reverse" MultiexponentiationInnerProduct to allow for MIP with G2 messages (because TIP hard-coded G1 left and G2 right) -pub struct TIPA { +// TODO: May need to add "reverse" MultiexponentiationInnerProduct to allow for MIP with G2 messages +// (because TIP hard-coded G1 left and G2 right) +pub struct TIPA { _inner_product: PhantomData, _left_commitment: PhantomData, _right_commitment: PhantomData, _inner_product_commitment: PhantomData, _pair: PhantomData

, - _digest: PhantomData, } #[derive(CanonicalSerialize, CanonicalDeserialize)] -pub struct TIPAProof +pub struct TIPAProof where - D: Digest, P: PairingEngine, IP: InnerProduct< LeftMessage = LMC::Message, @@ -58,15 +62,14 @@ where RMC::Output: MulAssign, IPC::Output: MulAssign, { - gipa_proof: GIPAProof, + gipa_proof: GIPAProof, final_ck: (LMC::Key, RMC::Key), final_ck_proof: (P::G2Projective, P::G1Projective), _pair: PhantomData

, } -impl Clone for TIPAProof +impl Clone for TIPAProof where - D: Digest, P: PairingEngine, IP: InnerProduct< LeftMessage = LMC::Message, @@ -127,9 +130,8 @@ impl SRS

{ } } -impl TIPA +impl TIPA where - D: Digest, P: PairingEngine, IP: InnerProduct< LeftMessage = LMC::Message, @@ -165,62 +167,54 @@ where } pub fn prove( + transcript: &mut Transcript, srs: &SRS

, values: (&[IP::LeftMessage], &[IP::RightMessage]), ck: (&[LMC::Key], &[RMC::Key], &IPC::Key), - ) -> Result, Error> { - Self::prove_with_srs_shift(srs, values, ck, &::one()) + ) -> Result, Error> { + Self::prove_with_srs_shift(transcript, srs, values, ck, &::one()) } // Shifts KZG proof for left message by scalar r (used for efficient composition with aggregation protocols) // LMC commitment key should already be shifted before being passed as input pub fn prove_with_srs_shift( + transcript: &mut Transcript, srs: &SRS

, values: (&[IP::LeftMessage], &[IP::RightMessage]), ck: (&[LMC::Key], &[RMC::Key], &IPC::Key), r_shift: &P::Fr, - ) -> Result, Error> { + ) -> Result, Error> { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", TIPA_DOMAIN_SEP); + // Run GIPA - let (proof, aux) = >::prove_with_aux( + let (proof, aux) = >::prove_with_aux( + transcript, values, (ck.0, ck.1, &vec![ck.2.clone()]), )?; // Prove final commitment keys are wellformed let (ck_a_final, ck_b_final) = aux.ck_base; - let transcript = aux.r_transcript; - let transcript_inverse = transcript.iter().map(|x| x.inverse().unwrap()).collect(); + let r_challenges = aux.r_challenges; + let r_chal_inverses = r_challenges.iter().map(|x| x.inverse().unwrap()).collect(); let r_inverse = r_shift.inverse().unwrap(); // KZG challenge point - let mut counter_nonce: usize = 0; - let c = loop { - let mut hash_input = Vec::new(); - hash_input.extend_from_slice(&counter_nonce.to_be_bytes()[..]); - //TODO: Should use CanonicalSerialize instead of ToBytes - hash_input.extend_from_slice(&to_bytes![ - transcript.first().unwrap(), - ck_a_final, - ck_b_final - ]?); - if let Some(c) = LMC::Scalar::from_random_bytes(&D::digest(&hash_input)) { - break c; - }; - counter_nonce += 1; - }; + let kzg_chal: LMC::Scalar = transcript.challenge_scalar(b"kzg_chal"); // Complete KZG proofs let ck_a_kzg_opening = prove_commitment_key_kzg_opening( &srs.h_beta_powers, - &transcript_inverse, + &r_chal_inverses, &r_inverse, - &c, + &kzg_chal, )?; let ck_b_kzg_opening = prove_commitment_key_kzg_opening( &srs.g_alpha_powers, - &transcript, + &r_challenges, &::one(), - &c, + &kzg_chal, )?; Ok(TIPAProof { @@ -232,61 +226,52 @@ where } pub fn verify( + transcript: &mut Transcript, v_srs: &VerifierSRS

, ck_t: &IPC::Key, com: (&LMC::Output, &RMC::Output, &IPC::Output), - proof: &TIPAProof, + proof: &TIPAProof, ) -> Result { - Self::verify_with_srs_shift(v_srs, ck_t, com, proof, &::one()) + Self::verify_with_srs_shift(transcript, v_srs, ck_t, com, proof, &::one()) } pub fn verify_with_srs_shift( + transcript: &mut Transcript, v_srs: &VerifierSRS

, ck_t: &IPC::Key, com: (&LMC::Output, &RMC::Output, &IPC::Output), - proof: &TIPAProof, + proof: &TIPAProof, r_shift: &P::Fr, ) -> Result { - let (base_com, transcript) = - GIPA::verify_recursive_challenge_transcript(com, &proof.gipa_proof)?; - let transcript_inverse = transcript.iter().map(|x| x.inverse().unwrap()).collect(); + // Domain-separate this protocol + transcript.append_message(b"dom-sep", TIPA_DOMAIN_SEP); + + let (base_com, r_challenges) = + GIPA::verify_recursive_challenge_transcript(transcript, com, &proof.gipa_proof)?; + let r_chal_inverses = r_challenges.iter().map(|x| x.inverse().unwrap()).collect(); // Verify commitment keys wellformed let (ck_a_final, ck_b_final) = &proof.final_ck; let (ck_a_proof, ck_b_proof) = &proof.final_ck_proof; // KZG challenge point - let mut counter_nonce: usize = 0; - let c = loop { - let mut hash_input = Vec::new(); - hash_input.extend_from_slice(&counter_nonce.to_be_bytes()[..]); - //TODO: Should use CanonicalSerialize instead of ToBytes - hash_input.extend_from_slice(&to_bytes![ - transcript.first().unwrap(), - ck_a_final, - ck_b_final - ]?); - if let Some(c) = LMC::Scalar::from_random_bytes(&D::digest(&hash_input)) { - break c; - }; - counter_nonce += 1; - }; + let kzg_chal: LMC::Scalar = transcript.challenge_scalar(b"kzg_chal"); let ck_a_valid = verify_commitment_key_g2_kzg_opening( v_srs, &ck_a_final, &ck_a_proof, - &transcript_inverse, + &r_chal_inverses, &r_shift.inverse().unwrap(), - &c, + &kzg_chal, )?; let ck_b_valid = verify_commitment_key_g1_kzg_opening( v_srs, &ck_b_final, &ck_b_proof, - &transcript, + &r_challenges, &::one(), - &c, + &kzg_chal, )?; // Verify base inner product commitment @@ -304,25 +289,25 @@ where pub fn prove_commitment_key_kzg_opening( srs_powers: &Vec, - transcript: &Vec, + r_challenges: &Vec, r_shift: &G::ScalarField, - kzg_challenge: &G::ScalarField, + kzg_chal: &G::ScalarField, ) -> Result { let ck_polynomial = DensePolynomial::from_coefficients_slice( - &polynomial_coefficients_from_transcript(transcript, r_shift), + &polynomial_coefficients_from_challenges(r_challenges, r_shift), ); assert_eq!(srs_powers.len(), ck_polynomial.coeffs.len()); let eval = start_timer!(|| "polynomial eval"); let ck_polynomial_c_eval = - polynomial_evaluation_product_form_from_transcript(&transcript, kzg_challenge, &r_shift); + polynomial_evaluation_product_form_from_challenges(&r_challenges, kzg_chal, &r_shift); end_timer!(eval); let quotient = start_timer!(|| "polynomial quotient"); let quotient_polynomial = &(&ck_polynomial - &DensePolynomial::from_coefficients_vec(vec![ck_polynomial_c_eval])) / &(DensePolynomial::from_coefficients_vec(vec![ - -kzg_challenge.clone(), + -kzg_chal.clone(), ::one(), ])); end_timer!(quotient); @@ -342,17 +327,17 @@ pub fn verify_commitment_key_g2_kzg_opening( v_srs: &VerifierSRS

, ck_final: &P::G2Projective, ck_opening: &P::G2Projective, - transcript: &Vec, + r_challenges: &Vec, r_shift: &P::Fr, - kzg_challenge: &P::Fr, + kzg_chal: &P::Fr, ) -> Result { let ck_polynomial_c_eval = - polynomial_evaluation_product_form_from_transcript(transcript, kzg_challenge, r_shift); + polynomial_evaluation_product_form_from_challenges(r_challenges, kzg_chal, r_shift); Ok(P::pairing( v_srs.g, *ck_final - &v_srs.h.mul(ck_polynomial_c_eval.into_repr()), ) == P::pairing( - v_srs.g_beta - &v_srs.g.mul(kzg_challenge.into_repr()), + v_srs.g_beta - &v_srs.g.mul(kzg_chal.into_repr()), *ck_opening, )) } @@ -361,18 +346,18 @@ pub fn verify_commitment_key_g1_kzg_opening( v_srs: &VerifierSRS

, ck_final: &P::G1Projective, ck_opening: &P::G1Projective, - transcript: &Vec, + r_challenges: &Vec, r_shift: &P::Fr, - kzg_challenge: &P::Fr, + kzg_chal: &P::Fr, ) -> Result { let ck_polynomial_c_eval = - polynomial_evaluation_product_form_from_transcript(transcript, kzg_challenge, r_shift); + polynomial_evaluation_product_form_from_challenges(r_challenges, kzg_chal, r_shift); Ok(P::pairing( *ck_final - &v_srs.g.mul(ck_polynomial_c_eval.into_repr()), v_srs.h, ) == P::pairing( *ck_opening, - v_srs.h_alpha - &v_srs.h.mul(kzg_challenge.into_repr()), + v_srs.h_alpha - &v_srs.h.mul(kzg_chal.into_repr()), )) } @@ -398,24 +383,24 @@ pub fn structured_generators_scalar_power( powers_of_g } -fn polynomial_evaluation_product_form_from_transcript( - transcript: &Vec, +fn polynomial_evaluation_product_form_from_challenges( + r_challenges: &Vec, z: &F, r_shift: &F, ) -> F { let mut power_2_zr = (z.clone() * z) * r_shift; let mut product_form = Vec::new(); - for x in transcript.iter() { + for x in r_challenges.iter() { product_form.push(F::one() + (x.clone() * &power_2_zr)); power_2_zr *= power_2_zr; } product_form.iter().product() } -fn polynomial_coefficients_from_transcript(transcript: &Vec, r_shift: &F) -> Vec { +fn polynomial_coefficients_from_challenges(r_challenges: &Vec, r_shift: &F) -> Vec { let mut coefficients = vec![F::one()]; let mut power_2_r = r_shift.clone(); - for (i, x) in transcript.iter().enumerate() { + for (i, x) in r_challenges.iter().enumerate() { for j in 0..(2_usize).pow(i as u32) { coefficients.push(coefficients[j] * &(x.clone() * &power_2_r)); } @@ -433,8 +418,6 @@ fn polynomial_coefficients_from_transcript(transcript: &Vec, r_shif mod tests { use super::*; use ark_bls12_381::Bls12_381; - use ark_std::rand::{rngs::StdRng, SeedableRng}; - use blake2::Blake2b; use crate::tipa::structured_scalar_message::structured_scalar_power; use ark_dh_commitments::{ @@ -460,9 +443,9 @@ mod tests { type IP = PairingInnerProduct; type IPC = IdentityCommitment, ::Fr>; - type PairingTIPA = TIPA; + type PairingTIPA = TIPA; - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); let (srs, ck_t) = PairingTIPA::setup(&mut rng, TEST_SIZE).unwrap(); let (ck_a, ck_b) = srs.get_commitment_keys(); let v_srs = srs.get_verifier_key(); @@ -473,9 +456,24 @@ mod tests { let t = vec![IP::inner_product(&m_a, &m_b).unwrap()]; let com_t = IPC::commit(&vec![ck_t.clone()], &t).unwrap(); - let proof = PairingTIPA::prove(&srs, (&m_a, &m_b), (&ck_a, &ck_b, &ck_t)).unwrap(); + let mut proof_transcript = Transcript::new(b"TIPA-test"); + let proof = PairingTIPA::prove( + &mut proof_transcript, + &srs, + (&m_a, &m_b), + (&ck_a, &ck_b, &ck_t), + ) + .unwrap(); - assert!(PairingTIPA::verify(&v_srs, &ck_t, (&com_a, &com_b, &com_t), &proof).unwrap()); + let mut verif_transcript = Transcript::new(b"TIPA-test"); + assert!(PairingTIPA::verify( + &mut verif_transcript, + &v_srs, + &ck_t, + (&com_a, &com_b, &com_t), + &proof + ) + .unwrap()); } #[test] @@ -485,9 +483,9 @@ mod tests { ::G1Projective, ::Fr, >; - type MultiExpTIPA = TIPA; + type MultiExpTIPA = TIPA; - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); let (srs, ck_t) = MultiExpTIPA::setup(&mut rng, TEST_SIZE).unwrap(); let (ck_a, ck_b) = srs.get_commitment_keys(); let v_srs = srs.get_verifier_key(); @@ -501,9 +499,24 @@ mod tests { let t = vec![IP::inner_product(&m_a, &m_b).unwrap()]; let com_t = IPC::commit(&vec![ck_t.clone()], &t).unwrap(); - let proof = MultiExpTIPA::prove(&srs, (&m_a, &m_b), (&ck_a, &ck_b, &ck_t)).unwrap(); + let mut proof_transcript = Transcript::new(b"TIPA-test"); + let proof = MultiExpTIPA::prove( + &mut proof_transcript, + &srs, + (&m_a, &m_b), + (&ck_a, &ck_b, &ck_t), + ) + .unwrap(); - assert!(MultiExpTIPA::verify(&v_srs, &ck_t, (&com_a, &com_b, &com_t), &proof).unwrap()); + let mut verif_transcript = Transcript::new(b"TIPA-test"); + assert!(MultiExpTIPA::verify( + &mut verif_transcript, + &v_srs, + &ck_t, + (&com_a, &com_b, &com_t), + &proof + ) + .unwrap()); } #[test] @@ -511,9 +524,9 @@ mod tests { type IP = ScalarInnerProduct<::Fr>; type IPC = IdentityCommitment<::Fr, ::Fr>; - type ScalarTIPA = TIPA; + type ScalarTIPA = TIPA; - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); let (srs, ck_t) = ScalarTIPA::setup(&mut rng, TEST_SIZE).unwrap(); let (ck_a, ck_b) = srs.get_commitment_keys(); let v_srs = srs.get_verifier_key(); @@ -528,9 +541,24 @@ mod tests { let t = vec![IP::inner_product(&m_a, &m_b).unwrap()]; let com_t = IPC::commit(&vec![ck_t.clone()], &t).unwrap(); - let proof = ScalarTIPA::prove(&srs, (&m_a, &m_b), (&ck_a, &ck_b, &ck_t)).unwrap(); + let mut proof_transcript = Transcript::new(b"TIPA-test"); + let proof = ScalarTIPA::prove( + &mut proof_transcript, + &srs, + (&m_a, &m_b), + (&ck_a, &ck_b, &ck_t), + ) + .unwrap(); - assert!(ScalarTIPA::verify(&v_srs, &ck_t, (&com_a, &com_b, &com_t), &proof).unwrap()); + let mut verif_transcript = Transcript::new(b"TIPA-test"); + assert!(ScalarTIPA::verify( + &mut verif_transcript, + &v_srs, + &ck_t, + (&com_a, &com_b, &com_t), + &proof + ) + .unwrap()); } #[test] @@ -538,9 +566,9 @@ mod tests { type IP = PairingInnerProduct; type IPC = IdentityCommitment, ::Fr>; - type PairingTIPA = TIPA; + type PairingTIPA = TIPA; - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); let (srs, ck_t) = PairingTIPA::setup(&mut rng, TEST_SIZE).unwrap(); let (ck_a, ck_b) = srs.get_commitment_keys(); let v_srs = srs.get_verifier_key(); @@ -568,7 +596,9 @@ mod tests { assert_eq!(com_a, IP::inner_product(&m_a_r, &ck_a_r).unwrap()); + let mut proof_transcript = Transcript::new(b"TIPA-test"); let proof = PairingTIPA::prove_with_srs_shift( + &mut proof_transcript, &srs, (&m_a_r, &m_b), (&ck_a_r, &ck_b, &ck_t), @@ -576,7 +606,9 @@ mod tests { ) .unwrap(); + let mut verif_transcript = Transcript::new(b"TIPA-test"); assert!(PairingTIPA::verify_with_srs_shift( + &mut verif_transcript, &v_srs, &ck_t, (&com_a, &com_b, &com_t), diff --git a/ip_proofs/src/tipa/structured_scalar_message.rs b/ip_proofs/src/tipa/structured_scalar_message.rs index 4fc459e..c92a922 100644 --- a/ip_proofs/src/tipa/structured_scalar_message.rs +++ b/ip_proofs/src/tipa/structured_scalar_message.rs @@ -1,25 +1,30 @@ -use ark_ec::{group::Group, PairingEngine, ProjectiveCurve}; -use ark_ff::{to_bytes, Field, One, PrimeField, UniformRand, Zero}; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; -use ark_std::{cfg_iter, rand::Rng}; -use ark_std::{end_timer, start_timer}; -use digest::Digest; -use std::{marker::PhantomData, ops::MulAssign}; - -#[cfg(feature = "parallel")] -use rayon::prelude::*; - use crate::{ gipa::{GIPAProof, GIPA}, tipa::{ prove_commitment_key_kzg_opening, structured_generators_scalar_power, verify_commitment_key_g2_kzg_opening, TIPACompatibleSetup, VerifierSRS, SRS, }, + util::TranscriptProtocol, Error, }; use ark_dh_commitments::{identity::HomomorphicPlaceholderValue, DoublyHomomorphicCommitment}; use ark_inner_products::InnerProduct; +use std::{marker::PhantomData, ops::MulAssign}; + +use ark_ec::{group::Group, PairingEngine, ProjectiveCurve}; +use ark_ff::{Field, One, PrimeField, UniformRand, Zero}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; +use ark_std::{cfg_iter, rand::Rng}; +use ark_std::{end_timer, start_timer}; +use merlin::Transcript; + +#[cfg(feature = "parallel")] +use rayon::prelude::*; + +const GIPA_SSM_DOMAIN_SEP: &[u8] = b"ip_proofs-v0.3-GIPA_SSM"; +const TIPA_SSM_DOMAIN_SEP: &[u8] = b"ip_proofs-v0.3-TIPA_SSM"; + //TODO: Properly generalize the non-committed message approach of SIPP and MIPP to GIPA //TODO: Structured message is a special case of the non-committed message and does not rely on TIPA //TODO: Can support structured group element messages as well as structured scalar messages @@ -46,16 +51,14 @@ impl DoublyHomomorphicCommitment for SSMPlaceholderCommitment } } -pub struct GIPAWithSSM { +pub struct GIPAWithSSM { _inner_product: PhantomData, _left_commitment: PhantomData, _inner_product_commitment: PhantomData, - _digest: PhantomData, } -impl GIPAWithSSM +impl GIPAWithSSM where - D: Digest, IP: InnerProduct, LMC: DoublyHomomorphicCommitment, IPC: DoublyHomomorphicCommitment, @@ -67,12 +70,18 @@ where Ok((LMC::setup(rng, size)?, IPC::setup(rng, 1)?.pop().unwrap())) } + /// Proves an inner product of `m_a` and `m_b`, where `m_b` is of the form pub fn prove_with_structured_scalar_message( + transcript: &mut Transcript, values: (&[IP::LeftMessage], &[IP::RightMessage]), ck: (&[LMC::Key], &IPC::Key), - ) -> Result, IPC, D>, Error> { + ) -> Result, IPC>, Error> { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", GIPA_SSM_DOMAIN_SEP); + let (proof, _) = - , IPC, D>>::prove_with_aux( + , IPC>>::prove_with_aux( + transcript, values, ( ck.0, @@ -84,19 +93,24 @@ where } pub fn verify_with_structured_scalar_message( + transcript: &mut Transcript, ck: (&[LMC::Key], &IPC::Key), com: (&LMC::Output, &IPC::Output), scalar_b: &LMC::Scalar, - proof: &GIPAProof, IPC, D>, + proof: &GIPAProof, IPC>, ) -> Result { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", GIPA_SSM_DOMAIN_SEP); + // Calculate base commitments and recursive transcript //TODO: Scalar b not included in generating challenges let (base_com, transcript) = GIPA::verify_recursive_challenge_transcript( + transcript, (com.0, &LMC::Scalar::zero(), com.1), proof, )?; // Calculate base commitment keys - let (ck_a_base, ck_b_base) = GIPA::, IPC, D>::_compute_final_commitment_keys( + let (ck_a_base, ck_b_base) = GIPA::, IPC>::_compute_final_commitment_keys( (&ck.0, &vec![HomomorphicPlaceholderValue {}; ck.0.len()], &ck.1), &transcript, )?; @@ -127,18 +141,16 @@ where } } -pub struct TIPAWithSSM { +pub struct TIPAWithSSM { _inner_product: PhantomData, _left_commitment: PhantomData, _inner_product_commitment: PhantomData, _pair: PhantomData

, - _digest: PhantomData, } #[derive(CanonicalSerialize, CanonicalDeserialize)] -pub struct TIPAWithSSMProof +pub struct TIPAWithSSMProof where - D: Digest, P: PairingEngine, IP: InnerProduct, LMC: DoublyHomomorphicCommitment + TIPACompatibleSetup, @@ -149,15 +161,14 @@ where LMC::Message: MulAssign, LMC::Output: MulAssign, { - gipa_proof: GIPAProof, IPC, D>, + gipa_proof: GIPAProof, IPC>, final_ck: LMC::Key, final_ck_proof: P::G2Projective, _pairing: PhantomData

, } -impl Clone for TIPAWithSSMProof +impl Clone for TIPAWithSSMProof where - D: Digest, P: PairingEngine, IP: InnerProduct, LMC: DoublyHomomorphicCommitment + TIPACompatibleSetup, @@ -178,9 +189,8 @@ where } } -impl TIPAWithSSM +impl TIPAWithSSM where - D: Digest, P: PairingEngine, IP: InnerProduct, LMC: DoublyHomomorphicCommitment + TIPACompatibleSetup, @@ -209,50 +219,44 @@ where } pub fn prove_with_structured_scalar_message( + transcript: &mut Transcript, srs: &SRS

, values: (&[IP::LeftMessage], &[IP::RightMessage]), ck: (&[LMC::Key], &IPC::Key), - ) -> Result, Error> { + ) -> Result, Error> { + // Domain-separate this protocol + transcript.append_message(b"dom-sep", TIPA_SSM_DOMAIN_SEP); + // Run GIPA let gipa = start_timer!(|| "GIPA"); - let (proof, aux) = - , IPC, D>>::prove_with_aux( - values, - ( - ck.0, - &vec![HomomorphicPlaceholderValue {}; values.1.len()], - &vec![ck.1.clone()], - ), - )?; + let (proof, aux) = , IPC>>::prove_with_aux( + transcript, + values, + ( + ck.0, + &vec![HomomorphicPlaceholderValue {}; values.1.len()], + &vec![ck.1.clone()], + ), + )?; end_timer!(gipa); // Prove final commitment key is wellformed let ck_kzg = start_timer!(|| "Prove commitment key"); let (ck_a_final, _) = aux.ck_base; - let transcript = aux.r_transcript; - let transcript_inverse = cfg_iter!(transcript) + let r_challenges = aux.r_challenges; + let r_chal_inverses = cfg_iter!(r_challenges) .map(|x| x.inverse().unwrap()) .collect(); // KZG challenge point - let mut counter_nonce: usize = 0; - let c = loop { - let mut hash_input = Vec::new(); - hash_input.extend_from_slice(&counter_nonce.to_be_bytes()[..]); - //TODO: Should use CanonicalSerialize instead of ToBytes - hash_input.extend_from_slice(&to_bytes![transcript.first().unwrap(), ck_a_final]?); - if let Some(c) = LMC::Scalar::from_random_bytes(&D::digest(&hash_input)) { - break c; - }; - counter_nonce += 1; - }; + let kzg_chal = transcript.challenge_scalar(b"kzg_chal"); // Complete KZG proof let ck_a_kzg_opening = prove_commitment_key_kzg_opening( &srs.h_beta_powers, - &transcript_inverse, + &r_chal_inverses, &::one(), - &c, + &kzg_chal, )?; end_timer!(ck_kzg); @@ -265,17 +269,22 @@ where } pub fn verify_with_structured_scalar_message( + transcript: &mut Transcript, v_srs: &VerifierSRS

, ck_t: &IPC::Key, com: (&LMC::Output, &IPC::Output), scalar_b: &P::Fr, - proof: &TIPAWithSSMProof, + proof: &TIPAWithSSMProof, ) -> Result { - let (base_com, transcript) = GIPA::verify_recursive_challenge_transcript( + // Domain-separate this protocol + transcript.append_message(b"dom-sep", TIPA_SSM_DOMAIN_SEP); + + let (base_com, r_challenges) = GIPA::verify_recursive_challenge_transcript( + transcript, (com.0, scalar_b, com.1), &proof.gipa_proof, )?; - let transcript_inverse = cfg_iter!(transcript) + let r_chal_inverses = cfg_iter!(r_challenges) .map(|x| x.inverse().unwrap()) .collect(); @@ -283,32 +292,22 @@ where let ck_a_proof = &proof.final_ck_proof; // KZG challenge point - let mut counter_nonce: usize = 0; - let c = loop { - let mut hash_input = Vec::new(); - hash_input.extend_from_slice(&counter_nonce.to_be_bytes()[..]); - //TODO: Should use CanonicalSerialize instead of ToBytes - hash_input.extend_from_slice(&to_bytes![transcript.first().unwrap(), ck_a_final]?); - if let Some(c) = LMC::Scalar::from_random_bytes(&D::digest(&hash_input)) { - break c; - }; - counter_nonce += 1; - }; + let kzg_chal = transcript.challenge_scalar(b"kzg_chal"); // Check commitment key let ck_a_valid = verify_commitment_key_g2_kzg_opening( v_srs, &ck_a_final, &ck_a_proof, - &transcript_inverse, + &r_chal_inverses, &::one(), - &c, + &kzg_chal, )?; // Compute final scalar let mut power_2_b = scalar_b.clone(); let mut product_form = Vec::new(); - for x in transcript.iter() { + for x in r_challenges.iter() { product_form.push(::one() + &(x.inverse().unwrap() * &power_2_b)); power_2_b *= &power_2_b.clone(); } @@ -325,6 +324,7 @@ where } } +/// Returns `(1, s^1, s^2, ..., s^(num-1))` pub fn structured_scalar_power(num: usize, s: &F) -> Vec { let mut powers = vec![F::one()]; for i in 1..num { @@ -337,8 +337,6 @@ pub fn structured_scalar_power(num: usize, s: &F) -> Vec { mod tests { use super::*; use ark_bls12_381::Bls12_381; - use ark_std::rand::{rngs::StdRng, SeedableRng}; - use blake2::Blake2b; use ark_dh_commitments::{ afgho16::AFGHOCommitmentG1, identity::IdentityCommitment, pedersen::PedersenCommitment, @@ -358,9 +356,9 @@ mod tests { ::G1Projective, ::Fr, >; - type MultiExpTIPA = TIPAWithSSM; + type MultiExpTIPA = TIPAWithSSM; - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); let (srs, ck_t) = MultiExpTIPA::setup(&mut rng, TEST_SIZE).unwrap(); let (ck_a, _) = srs.get_commitment_keys(); let v_srs = srs.get_verifier_key(); @@ -371,11 +369,18 @@ mod tests { let t = vec![IP::inner_product(&m_a, &m_b).unwrap()]; let com_t = IPC::commit(&vec![ck_t.clone()], &t).unwrap(); - let proof = - MultiExpTIPA::prove_with_structured_scalar_message(&srs, (&m_a, &m_b), (&ck_a, &ck_t)) - .unwrap(); + let mut proof_transcript = Transcript::new(b"TIPA_SSM-test"); + let proof = MultiExpTIPA::prove_with_structured_scalar_message( + &mut proof_transcript, + &srs, + (&m_a, &m_b), + (&ck_a, &ck_t), + ) + .unwrap(); + let mut verif_transcript = Transcript::new(b"TIPA_SSM-test"); assert!(MultiExpTIPA::verify_with_structured_scalar_message( + &mut verif_transcript, &v_srs, &ck_t, (&com_a, &com_t), @@ -390,9 +395,9 @@ mod tests { type IP = ScalarInnerProduct<::Fr>; type IPC = IdentityCommitment<::Fr, ::Fr>; - type ScalarGIPA = GIPAWithSSM; + type ScalarGIPA = GIPAWithSSM; - let mut rng = StdRng::seed_from_u64(0u64); + let mut rng = ark_std::test_rng(); let (ck_a, ck_t) = ScalarGIPA::setup(&mut rng, TEST_SIZE).unwrap(); let mut m_a = Vec::new(); for _ in 0..TEST_SIZE { @@ -404,10 +409,17 @@ mod tests { let t = vec![IP::inner_product(&m_a, &m_b).unwrap()]; let com_t = IPC::commit(&vec![ck_t.clone()], &t).unwrap(); - let proof = - ScalarGIPA::prove_with_structured_scalar_message((&m_a, &m_b), (&ck_a, &ck_t)).unwrap(); + let mut proof_transcript = Transcript::new(b"TIPA_SSM-test"); + let proof = ScalarGIPA::prove_with_structured_scalar_message( + &mut proof_transcript, + (&m_a, &m_b), + (&ck_a, &ck_t), + ) + .unwrap(); + let mut verif_transcript = Transcript::new(b"TIPA_SSM-test"); assert!(ScalarGIPA::verify_with_structured_scalar_message( + &mut verif_transcript, (&ck_a, &ck_t), (&com_a, &com_t), &b, diff --git a/ip_proofs/src/util.rs b/ip_proofs/src/util.rs new file mode 100644 index 0000000..7a2f417 --- /dev/null +++ b/ip_proofs/src/util.rs @@ -0,0 +1,42 @@ +use crate::Error; + +use ark_ff::PrimeField; +use ark_serialize::CanonicalSerialize; +use ark_std::rand::{rngs::StdRng, SeedableRng}; + +// Convenience functions for generateing Fiat-Shamir challenges +pub(crate) trait TranscriptProtocol { + /// Appends a CanonicalSerialize-able element to the transcript + fn append_serializable(&mut self, label: &'static [u8], val: &S) -> Result<(), Error> + where + S: CanonicalSerialize + ?Sized; + + /// Produces a pseudorandom field element from the current transcript + fn challenge_scalar(&mut self, label: &'static [u8]) -> F; +} + +impl TranscriptProtocol for merlin::Transcript { + /// Appends a CanonicalSerialize-able element to the transcript + fn append_serializable(&mut self, label: &'static [u8], val: &S) -> Result<(), Error> + where + S: CanonicalSerialize + ?Sized, + { + // Serialize the input and give it to the transcript + let mut buf = Vec::new(); + val.serialize(&mut buf)?; + self.append_message(label, &buf); + + Ok(()) + } + + /// Produces a pseudorandom field element from the current transcript + fn challenge_scalar(&mut self, label: &'static [u8]) -> F { + // Fill a buf with random bytes + let mut buf = <::Seed as Default>::default(); + self.challenge_bytes(label, &mut buf); + + // Use the buf to make an RNG. Then use that RNG to generate a field element + let mut rng = StdRng::from_seed(buf); + F::rand(&mut rng) + } +} diff --git a/sipp/Cargo.toml b/sipp/Cargo.toml index 53a3a02..731e743 100644 --- a/sipp/Cargo.toml +++ b/sipp/Cargo.toml @@ -23,14 +23,13 @@ edition = "2018" [dependencies] ark-ec = "0.3" ark-ff = "0.3" +ark-serialize = "0.3" ark-std = "0.3" rayon = "1" rand_core = "0.6" -rand_chacha = "0.3" -digest = "0.9" +merlin = "3" [dev-dependencies] -blake2 = "0.9" csv = "1" serde = { version = "1", features = [ "derive" ] } ark-bls12-377 = { version = "0.3", features = [ "curve" ] } diff --git a/sipp/examples/scaling-ipp.rs b/sipp/examples/scaling-ipp.rs index 09ffeb1..bfd230d 100644 --- a/sipp/examples/scaling-ipp.rs +++ b/sipp/examples/scaling-ipp.rs @@ -2,12 +2,12 @@ use ark_bls12_377::*; use ark_ec::ProjectiveCurve; use ark_ff::UniformRand; -use ark_sipp::{rng::FiatShamirRng, SIPP}; -use ark_std::rand::seq::SliceRandom; -use blake2::Blake2s; +use ark_sipp::SIPP; +use ark_std::rand::{rngs::StdRng, seq::SliceRandom, SeedableRng}; use std::time::Instant; -type ExampleSIPP = SIPP; +type ExampleSIPP = SIPP; +use merlin::Transcript; use serde::Serialize; #[derive(Debug, Serialize)] @@ -37,7 +37,8 @@ fn main() { .expect("The environment variable `RAYON_NUM_THREADS` must be an integer") }); - let mut rng = FiatShamirRng::::from_seed(b"falafel"); + let mut rng = StdRng::from_entropy(); + let g = G1Projective::rand(&mut rng); let h = G2Projective::rand(&mut rng); let mut a_s = Vec::new(); @@ -73,12 +74,18 @@ fn main() { let z = ark_sipp::product_of_pairings_with_coeffs::(a_s, b_s, &r_s); direct_time += (start.elapsed().as_millis() as f64) / 1_000.0; + let mut proof_transcript = Transcript::new(b"SIPP-bench"); let start = Instant::now(); - let proof = ExampleSIPP::prove(a_s, b_s, &r_s, z.clone()).unwrap(); + let proof = + ExampleSIPP::prove(&mut proof_transcript, a_s, b_s, &r_s, z.clone()).unwrap(); prover_time += (start.elapsed().as_millis() as f64) / 1_000.0; + let mut verif_transcript = Transcript::new(b"SIPP-bench"); let start = Instant::now(); - assert!(ExampleSIPP::verify(a_s, b_s, &r_s, z.clone(), &proof).unwrap()); + assert!( + ExampleSIPP::verify(&mut verif_transcript, a_s, b_s, &r_s, z.clone(), &proof) + .unwrap() + ); verifier_time += (start.elapsed().as_millis() as f64) / 1_000.0; } let num_iters = num_iters as f64; @@ -86,9 +93,9 @@ fn main() { "=== Benchmarking SIPP over Bls12-377 with {} input(s) and {} thread(s) ====", m, num_threads, ); - println!("Direct time: {:?} seconds", direct_time / num_iters); - println!("Prover time: {:?} seconds", prover_time / num_iters); - println!("Verifier time: {:?} seconds", verifier_time / num_iters); + println!("Direct time: {:0.3} seconds", direct_time / num_iters); + println!("Prover time: {:0.3} seconds", prover_time / num_iters); + println!("Verifier time: {:0.3} seconds", verifier_time / num_iters); println!(); let d = ProfileData { size: m, diff --git a/sipp/src/lib.rs b/sipp/src/lib.rs index 266e348..b58a502 100644 --- a/sipp/src/lib.rs +++ b/sipp/src/lib.rs @@ -2,22 +2,22 @@ #![deny(warnings, unused, missing_docs)] #![forbid(unsafe_code)] -use ark_ec::{msm::VariableBaseMSM, AffineCurve, PairingEngine, ProjectiveCurve}; -use ark_ff::{to_bytes, Field, One, PrimeField, UniformRand}; -use digest::Digest; -use rayon::prelude::*; +mod util; +use util::TranscriptProtocol; + use std::marker::PhantomData; -/// Fiat-Shamir Rng -pub mod rng; +use ark_ec::{msm::VariableBaseMSM, AffineCurve, PairingEngine, ProjectiveCurve}; +use ark_ff::{Field, One, PrimeField}; +use merlin::Transcript; +use rayon::prelude::*; -use rng::FiatShamirRng; +const SIPP_DOMAIN_SEP: &[u8] = b"sipp-v0.3-SIPP"; /// SIPP is a inner-pairing product proof that allows a verifier to check an /// inner-pairing product over `n` elements with only a single pairing. -pub struct SIPP { +pub struct SIPP { _engine: PhantomData, - _digest: PhantomData, } /// `Proof` contains the GT elements produced by the prover. @@ -26,9 +26,10 @@ pub struct Proof { gt_elems: Vec<(E::Fqk, E::Fqk)>, } -impl SIPP { +impl SIPP { /// Produce a proof of the inner pairing product. pub fn prove( + transcript: &mut Transcript, a: &[E::G1Affine], b: &[E::G2Affine], r: &[E::Fr], @@ -41,8 +42,16 @@ impl SIPP { assert_eq!(length, b.len()); assert_eq!(length.count_ones(), 1); let mut proof_vec = Vec::new(); - // TODO(psi): should we also input a succinct bilinear group description to the rng? - let mut rng = FiatShamirRng::::from_seed(&to_bytes![a, b, r, value].unwrap()); + + // TODO(psi): should we also input a succinct bilinear group description to the transcript? + transcript.append_message(b"dom-sep", SIPP_DOMAIN_SEP); + + // Update transcript with first values + transcript.append_serializable(b"a", a); + transcript.append_serializable(b"b", b); + transcript.append_serializable(b"r", r); + transcript.append_serializable(b"value", &value); + let a = a .into_par_iter() .zip(r) @@ -62,26 +71,31 @@ impl SIPP { let z_l = product_of_pairings::(a_r, b_l); let z_r = product_of_pairings::(a_l, b_r); proof_vec.push((z_l, z_r)); - rng.absorb(&to_bytes![z_l, z_r].unwrap()); - let x: E::Fr = u128::rand(&mut rng).into(); + + // Update transcript + transcript.append_serializable(b"z_l", &z_l); + transcript.append_serializable(b"z_r", &z_r); + + // Get a challenge + let chal: E::Fr = transcript.challenge_scalar(b"x"); let a_proj = a_l .par_iter() .zip(a_r) .map(|(a_l, a_r)| { - let mut temp = a_r.mul(x); + let mut temp = a_r.mul(chal); temp.add_assign_mixed(a_l); temp }) .collect::>(); a = E::G1Projective::batch_normalization_into_affine(&a_proj); - let x_inv = x.inverse().unwrap(); + let chal_inv = chal.inverse().unwrap(); let b_proj = b_l .par_iter() .zip(b_r) .map(|(b_l, b_r)| { - let mut temp = b_r.mul(x_inv); + let mut temp = b_r.mul(chal_inv); temp.add_assign_mixed(b_l); temp }) @@ -96,6 +110,7 @@ impl SIPP { /// Verify an inner-pairing-product proof. pub fn verify( + transcript: &mut Transcript, a: &[E::G1Affine], b: &[E::G2Affine], r: &[E::Fr], @@ -111,41 +126,48 @@ impl SIPP { let proof_len = proof.gt_elems.len(); assert_eq!(proof_len as f32, f32::log2(length as f32)); - // TODO(psi): should we also input a succinct bilinear group description to the rng? - let mut rng = FiatShamirRng::::from_seed(&to_bytes![a, b, r, claimed_value].unwrap()); + // TODO(psi): should we also input a succinct bilinear group description to the transcript? + transcript.append_message(b"dom-sep", SIPP_DOMAIN_SEP); - let x_s = proof + // Update transcript with first values + transcript.append_serializable(b"a", a); + transcript.append_serializable(b"b", b); + transcript.append_serializable(b"r", r); + transcript.append_serializable(b"value", &claimed_value); + + // Get all the challenges by running through the transcript + let chals = proof .gt_elems .iter() .map(|(z_l, z_r)| { - rng.absorb(&to_bytes![z_l, z_r].unwrap()); - let x: E::Fr = u128::rand(&mut rng).into(); - x + transcript.append_serializable(b"z_l", z_l); + transcript.append_serializable(b"z_r", z_r); + transcript.challenge_scalar(b"x") }) - .collect::>(); + .collect::>(); - let mut x_invs = x_s.clone(); - ark_ff::batch_inversion(&mut x_invs); + let mut chal_invs = chals.clone(); + ark_ff::batch_inversion(&mut chal_invs); let z_prime = claimed_value * &proof .gt_elems .par_iter() - .zip(&x_s) - .zip(&x_invs) - .map(|(((z_l, z_r), x), x_inv)| { - z_l.pow(x.into_repr()) * &z_r.pow(x_inv.into_repr()) + .zip(&chals) + .zip(&chal_invs) + .map(|(((z_l, z_r), chal), chal_inv)| { + z_l.pow(chal.into_repr()) * &z_r.pow(chal_inv.into_repr()) }) .reduce(|| E::Fqk::one(), |a, b| a * &b); let mut s: Vec = vec![E::Fr::one(); length]; let mut s_invs: Vec = vec![E::Fr::one(); length]; // TODO(psi): batch verify - for (j, (x, x_inv)) in x_s.into_iter().zip(x_invs).enumerate() { + for (j, (chal, chal_inv)) in chals.into_iter().zip(chal_invs).enumerate() { for i in 0..length { if i & (1 << (proof_len - j - 1)) != 0 { - s[i] *= &x; - s_invs[i] *= &x_inv; + s[i] *= &chal; + s_invs[i] *= &chal_inv; } } } @@ -210,11 +232,12 @@ pub fn product_of_pairings(a: &[E::G1Affine], b: &[E::G2Affine mod tests { use super::*; use ark_bls12_377::{Bls12_377, Fr, G1Projective, G2Projective}; - use blake2::Blake2s; + use ark_std::UniformRand; #[test] fn prove_and_verify_base_case() { - let mut rng = FiatShamirRng::::from_seed(&to_bytes![b"falafel"].unwrap()); + let mut rng = ark_std::test_rng(); + let mut a = Vec::with_capacity(32); let mut b = Vec::with_capacity(32); let mut r = Vec::with_capacity(32); @@ -226,11 +249,13 @@ mod tests { let z = product_of_pairings_with_coeffs::(&a, &b, &r); - let proof = SIPP::::prove(&a, &b, &r, z); + let mut proof_transcript = Transcript::new(b"SIPP-test"); + let proof = SIPP::::prove(&mut proof_transcript, &a, &b, &r, z); assert!(proof.is_ok()); let proof = proof.unwrap(); - let accept = SIPP::::verify(&a, &b, &r, z, &proof); + let mut verif_transcript = Transcript::new(b"SIPP-test"); + let accept = SIPP::::verify(&mut verif_transcript, &a, &b, &r, z, &proof); assert!(accept.is_ok()); assert!(accept.unwrap()); } diff --git a/sipp/src/rng.rs b/sipp/src/rng.rs deleted file mode 100644 index 7e22576..0000000 --- a/sipp/src/rng.rs +++ /dev/null @@ -1,67 +0,0 @@ -use ark_ff::{FromBytes, ToBytes}; -use ark_std::rand::{RngCore, SeedableRng}; -use digest::{generic_array::GenericArray, Digest}; -use rand_chacha::ChaChaRng; -use std::marker::PhantomData; - -/// A `SeedableRng` that refreshes its seed by hashing together the previous seed -/// and the new seed material. -// TODO: later: re-evaluate decision about ChaChaRng -pub struct FiatShamirRng { - r: ChaChaRng, - seed: GenericArray, - #[doc(hidden)] - digest: PhantomData, -} - -impl RngCore for FiatShamirRng { - #[inline] - fn next_u32(&mut self) -> u32 { - self.r.next_u32() - } - - #[inline] - fn next_u64(&mut self) -> u64 { - self.r.next_u64() - } - - #[inline] - fn fill_bytes(&mut self, dest: &mut [u8]) { - self.r.fill_bytes(dest); - } - - #[inline] - fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), ark_std::rand::Error> { - Ok(self.r.fill_bytes(dest)) - } -} - -impl FiatShamirRng { - /// Create a new `Self` by initialzing with a fresh seed. - /// `self.seed = H(self.seed || new_seed)`. - #[inline] - pub fn from_seed<'a, T: 'a + ToBytes>(seed: &'a T) -> Self { - let mut bytes = Vec::new(); - seed.write(&mut bytes).expect("failed to convert to bytes"); - let seed = D::digest(&bytes); - let r_seed: [u8; 32] = FromBytes::read(seed.as_ref()).expect("failed to get [u32; 8]"); - let r = ChaChaRng::from_seed(r_seed); - Self { - r, - seed, - digest: PhantomData, - } - } - - /// Refresh `self.seed` with new material. Achieved by setting - /// `self.seed = H(self.seed || new_seed)`. - #[inline] - pub fn absorb<'a, T: 'a + ToBytes>(&mut self, seed: &'a T) { - let mut bytes = Vec::new(); - seed.write(&mut bytes).expect("failed to convert to bytes"); - bytes.extend_from_slice(&self.seed); - self.seed = D::digest(&bytes); - let seed: [u8; 32] = FromBytes::read(self.seed.as_ref()).expect("failed to get [u32; 8]"); - self.r = ChaChaRng::from_seed(seed); - } -} diff --git a/sipp/src/util.rs b/sipp/src/util.rs new file mode 100644 index 0000000..1d5f7de --- /dev/null +++ b/sipp/src/util.rs @@ -0,0 +1,44 @@ +use ark_ff::PrimeField; +use ark_serialize::CanonicalSerialize; +use ark_std::rand::{rngs::StdRng, SeedableRng}; + +// Convenience functions for generateing Fiat-Shamir challenges +pub(crate) trait TranscriptProtocol { + /// Appends a CanonicalSerialize-able element to the transcript. Panics on serialization error. + fn append_serializable(&mut self, label: &'static [u8], val: &S) + where + S: CanonicalSerialize + ?Sized; + + /// Produces a pseudorandom field element from the current transcript + fn challenge_scalar(&mut self, label: &'static [u8]) -> F; +} + +impl TranscriptProtocol for merlin::Transcript { + /// Appends a CanonicalSerialize-able element to the transcript. Panics on serialization error. + fn append_serializable(&mut self, label: &'static [u8], val: &S) + where + S: CanonicalSerialize + ?Sized, + { + // Serialize the input and give it to the transcript + let mut buf = Vec::new(); + val.serialize(&mut buf).unwrap(); + self.append_message(label, &buf); + } + + /// Produces a pseudorandom field element from the current transcript + fn challenge_scalar(&mut self, label: &'static [u8]) -> F { + // Fill a buf with random bytes + let mut buf = <::Seed as Default>::default(); + self.challenge_bytes(label, &mut buf); + + // Use the buf to make an RNG. Then use that RNG to generate a field element + let mut rng = StdRng::from_seed(buf); + let f = F::rand(&mut rng); + + let elems = &[f, f, f, f, f, f, f, f, f, f, f]; + let mut buf = Vec::new(); + elems.serialize(&mut buf).unwrap(); + + f + } +}