Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Reduce DensePoly commit time #112

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions jolt-core/src/jolt/vm/instruction_lookups.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,7 @@ where
}
}

#[tracing::instrument(skip_all, name = "InstructionPolynomials.commit")]
fn commit(batched_polys: &Self::BatchedPolynomials) -> Self::Commitment {
let (dim_read_commitment_gens, dim_read_commitment) = batched_polys
.batched_dim_read
Expand Down
11 changes: 9 additions & 2 deletions jolt-core/src/poly/commitments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,8 @@ impl<G: CurveGroup> MultiCommitGens<G> {

pub trait Commitments<G: CurveGroup>: Sized {
fn commit(&self, blind: &G::ScalarField, gens_n: &MultiCommitGens<G>) -> G;
fn batch_commit(inputs: &[Self], blind: &G::ScalarField, gens_n: &MultiCommitGens<G>) -> G;
fn batch_commit(inputs: &[Self], bases: &[G::Affine]) -> G;
fn batch_commit_blinded(inputs: &[Self], blind: &G::ScalarField, gens_n: &MultiCommitGens<G>) -> G;
}

impl<G: CurveGroup> Commitments<G> for G::ScalarField {
Expand All @@ -81,7 +82,13 @@ impl<G: CurveGroup> Commitments<G> for G::ScalarField {
gens_n.G[0] * self + gens_n.h * blind
}

fn batch_commit(inputs: &[Self], blind: &G::ScalarField, gens_n: &MultiCommitGens<G>) -> G {
fn batch_commit(inputs: &[Self], bases: &[G::Affine]) -> G {
assert_eq!(bases.len(), inputs.len());

VariableBaseMSM::msm(&bases, &inputs).unwrap()
}

fn batch_commit_blinded(inputs: &[Self], blind: &G::ScalarField, gens_n: &MultiCommitGens<G>) -> G {
assert_eq!(gens_n.n, inputs.len());

let mut bases = CurveGroup::normalize_batch(gens_n.G.as_ref());
Expand Down
31 changes: 5 additions & 26 deletions jolt-core/src/poly/dense_mlpoly.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,14 @@ use ark_std::Zero;
use core::ops::Index;
use merlin::Transcript;
use std::ops::AddAssign;
use rayon::prelude::*;

#[cfg(feature = "ark-msm")]
use ark_ec::VariableBaseMSM;

#[cfg(not(feature = "ark-msm"))]
use crate::msm::VariableBaseMSM;

#[cfg(feature = "multicore")]
use rayon::prelude::*;

#[derive(Debug, Clone, PartialEq)]
pub struct DensePolynomial<F> {
Expand Down Expand Up @@ -108,7 +107,6 @@ impl<F: PrimeField> DensePolynomial<F> {
)
}

#[cfg(feature = "multicore")]
fn commit_inner<G: CurveGroup<ScalarField = F>>(
&self,
blinds: &[F],
Expand All @@ -117,34 +115,15 @@ impl<F: PrimeField> DensePolynomial<F> {
let L_size = blinds.len();
let R_size = self.Z.len() / L_size;
assert_eq!(L_size * R_size, self.Z.len());
let C = (0..L_size)
.into_par_iter()
.map(|i| {
Commitments::batch_commit(
self.Z[R_size * i..R_size * (i + 1)].as_ref(),
&blinds[i],
gens,
)
})
.collect();
PolyCommitment { C }
}

#[cfg(not(feature = "multicore"))]
fn commit_inner<G: CurveGroup<ScalarField = F>>(
&self,
blinds: &[F],
gens: &MultiCommitGens<G>,
) -> PolyCommitment<G> {
let L_size = blinds.len();
let R_size = self.Z.len() / L_size;
assert_eq!(L_size * R_size, self.Z.len());
let gens = CurveGroup::normalize_batch(&gens.G);

let C = (0..L_size)
.into_par_iter()
.map(|i| {
Commitments::batch_commit(
self.Z[R_size * i..R_size * (i + 1)].as_ref(),
&blinds[i],
gens,
&gens
)
})
.collect();
Expand Down
2 changes: 1 addition & 1 deletion jolt-core/src/poly/unipoly.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ impl<F: PrimeField> UniPoly<F> {
gens: &MultiCommitGens<G>,
blind: &F,
) -> G {
Commitments::batch_commit(&self.coeffs, blind, gens)
Commitments::batch_commit_blinded(&self.coeffs, blind, gens)
}
}

Expand Down
8 changes: 4 additions & 4 deletions jolt-core/src/subprotocols/dot_product.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,15 @@ impl<G: CurveGroup> DotProductProof<G> {
let r_delta = random_tape.random_scalar(b"r_delta");
let r_beta = random_tape.random_scalar(b"r_beta");

let Cx = Commitments::batch_commit(x_vec, blind_x, gens_n);
let Cx = Commitments::batch_commit_blinded(x_vec, blind_x, gens_n);
<Transcript as ProofTranscript<G>>::append_point(transcript, b"Cx", &Cx);

let Cy = y.commit(blind_y, gens_1);
<Transcript as ProofTranscript<G>>::append_point(transcript, b"Cy", &Cy);

<Transcript as ProofTranscript<G>>::append_scalars(transcript, b"a", a_vec);

let delta = Commitments::batch_commit(&d_vec, &r_delta, gens_n);
let delta = Commitments::batch_commit_blinded(&d_vec, &r_delta, gens_n);
<Transcript as ProofTranscript<G>>::append_point(transcript, b"delta", &delta);

let dotproduct_a_d = DotProductProof::<G>::compute_dotproduct(a_vec, &d_vec);
Expand Down Expand Up @@ -123,7 +123,7 @@ impl<G: CurveGroup> DotProductProof<G> {
let c = <Transcript as ProofTranscript<G>>::challenge_scalar(transcript, b"c");

let mut result = *Cx * c + self.delta
== Commitments::batch_commit(self.z.as_ref(), &self.z_delta, gens_n);
== Commitments::batch_commit_blinded(self.z.as_ref(), &self.z_delta, gens_n);

let dotproduct_z_a = DotProductProof::<G>::compute_dotproduct(&self.z, a);
result &= *Cy * c + self.beta == dotproduct_z_a.commit(&self.z_beta, gens_1);
Expand Down Expand Up @@ -195,7 +195,7 @@ impl<G: CurveGroup> DotProductProofLog<G> {
.collect::<Vec<(G::ScalarField, G::ScalarField)>>()
};

let Cx = Commitments::batch_commit(x_vec, blind_x, &gens.gens_n);
let Cx = Commitments::batch_commit_blinded(x_vec, blind_x, &gens.gens_n);
<Transcript as ProofTranscript<G>>::append_point(transcript, b"Cx", &Cx);

let Cy = y.commit(blind_y, &gens.gens_1);
Expand Down
Loading