Skip to content

Commit

Permalink
Commitment Scheme evaluation per size (#483)
Browse files Browse the repository at this point in the history
  • Loading branch information
spapinistarkware authored Mar 26, 2024
1 parent dd8770d commit a7e8bd1
Show file tree
Hide file tree
Showing 9 changed files with 248 additions and 168 deletions.
7 changes: 3 additions & 4 deletions src/core/backend/cpu/quotients.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ pub fn accumulate_row_quotients(
let mut row_accumlator = SecureField::zero();
for sample in samples {
let mut numerator = SecureField::zero();
for (column_index, sampled_value) in &sample.column_indices_and_values {
for (column_index, sampled_value) in &sample.columns_and_values {
let column = &columns[*column_index];
let value = column[row];
let linear_term = complex_conjugate_line(sample.point, *sampled_value, domain_point);
Expand All @@ -54,8 +54,7 @@ pub fn accumulate_row_quotients(
domain_point.into_ef(),
);

row_accumlator = row_accumlator
* random_coeff.pow(sample.column_indices_and_values.len() as u128)
row_accumlator = row_accumlator * random_coeff.pow(sample.columns_and_values.len() as u128)
+ numerator / denominator;
}
row_accumlator
Expand Down Expand Up @@ -85,7 +84,7 @@ mod tests {
coeff,
&[ColumnSampleBatch {
point,
column_indices_and_values: vec![(0, value)],
columns_and_values: vec![(0, value)],
}],
);
let quot_poly_base_field =
Expand Down
64 changes: 32 additions & 32 deletions src/core/commitment_scheme/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,20 +10,21 @@ use super::super::circle::CirclePoint;
use super::super::fields::m31::BaseField;
use super::super::fields::qm31::SecureField;
use super::super::fri::{FriConfig, FriProof, FriProver};
use super::super::oods::get_pair_oods_quotient;
use super::super::poly::circle::CanonicCoset;
use super::super::poly::BitReversedOrder;
use super::super::proof_of_work::{ProofOfWork, ProofOfWorkProof};
use super::super::prover::{
LOG_BLOWUP_FACTOR, LOG_LAST_LAYER_DEGREE_BOUND, N_QUERIES, PROOF_OF_WORK_BITS,
};
use super::super::ColumnVec;
use super::quotients::{compute_fri_quotients, PointSample};
use super::utils::TreeVec;
use crate::commitment_scheme::blake2_hash::{Blake2sHash, Blake2sHasher};
use crate::commitment_scheme::merkle_input::{MerkleTreeColumnLayout, MerkleTreeInput};
use crate::commitment_scheme::mixed_degree_decommitment::MixedDecommitment;
use crate::commitment_scheme::mixed_degree_merkle_tree::MixedDegreeMerkleTree;
use crate::core::channel::Channel;
use crate::core::poly::circle::SecureEvaluation;

type MerkleHasher = Blake2sHasher;
type ProofChannel = Blake2sChannel;
Expand Down Expand Up @@ -65,43 +66,42 @@ impl CommitmentSchemeProver {

pub fn prove_values(
&self,
prove_points: TreeVec<ColumnVec<Vec<CirclePoint<SecureField>>>>,
sampled_points: TreeVec<ColumnVec<Vec<CirclePoint<SecureField>>>>,
channel: &mut ProofChannel,
) -> CommitmentSchemeProof {
// Evaluate polynomials on open points.
let proved_values =
self.polynomials()
.zip_cols(&prove_points)
.map_cols(|(poly, points)| {
points
.iter()
.map(|point| poly.eval_at_point(*point))
.collect_vec()
});
channel.mix_felts(&proved_values.clone().flatten_cols());

// Compute oods quotients for boundary constraints on prove_points.
let quotients = self
.evaluations()
.zip_cols(&proved_values)
.zip_cols(&prove_points)
.map_cols(|((evaluation, values), points)| {
zip(points, values)
.map(|(&point, &value)| {
get_pair_oods_quotient(point, value, evaluation).bit_reverse()
// Evaluate polynomials on samples points.
let samples = self
.polynomials()
.zip_cols(&sampled_points)
.map_cols(|(poly, points)| {
points
.iter()
.map(|&point| PointSample {
point,
value: poly.eval_at_point(point),
})
.collect_vec()
});
let sampled_values = samples
.as_cols_ref()
.map_cols(|x| x.iter().map(|o| o.value).collect());
channel.mix_felts(&sampled_values.clone().flatten_cols());

// Compute oods quotients for boundary constraints on the sampled points.
let columns = self.evaluations().flatten();
let quotients = compute_fri_quotients(&columns, &samples.flatten(), channel.draw_felt());

// TODO(spapini): Conversion to CircleEvaluation can be removed when FRI supports
// SecureColumn.
let quotients = quotients
.into_iter()
.map(SecureEvaluation::to_cpu)
.collect_vec();

// Run FRI commitment phase on the oods quotients.
let fri_config = FriConfig::new(LOG_LAST_LAYER_DEGREE_BOUND, LOG_BLOWUP_FACTOR, N_QUERIES);
// TODO(spapini): Remove rev() when we start accumulating by size.
// This is only done because fri demands descending sizes.
let fri_prover = FriProver::<CPUBackend, MerkleHasher>::commit(
channel,
fri_config,
&quotients.flatten_cols_rev(),
);
let fri_prover =
FriProver::<CPUBackend, MerkleHasher>::commit(channel, fri_config, &quotients);

// Proof of work.
let proof_of_work = ProofOfWork::new(PROOF_OF_WORK_BITS).prove(channel);
Expand All @@ -125,7 +125,7 @@ impl CommitmentSchemeProver {
let decommitments = decommitment_results.map(|(_, d)| d);

CommitmentSchemeProof {
proved_values,
sampled_values,
decommitments,
queried_values,
proof_of_work,
Expand All @@ -136,7 +136,7 @@ impl CommitmentSchemeProver {

#[derive(Debug)]
pub struct CommitmentSchemeProof {
pub proved_values: TreeVec<ColumnVec<Vec<SecureField>>>,
pub sampled_values: TreeVec<ColumnVec<Vec<SecureField>>>,
pub decommitments: TreeVec<MixedDecommitment<BaseField, MerkleHasher>>,
pub queried_values: TreeVec<ColumnVec<Vec<BaseField>>>,
pub proof_of_work: ProofOfWorkProof,
Expand Down
172 changes: 170 additions & 2 deletions src/core/commitment_scheme/quotients.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,21 @@
use std::cmp::Reverse;
use std::collections::BTreeMap;
use std::iter::zip;

use itertools::{izip, multiunzip, Itertools};

use crate::core::backend::cpu::quotients::accumulate_row_quotients;
use crate::core::backend::Backend;
use crate::core::circle::CirclePoint;
use crate::core::fields::m31::BaseField;
use crate::core::fields::qm31::SecureField;
use crate::core::fields::secure_column::SecureColumn;
use crate::core::poly::circle::{CircleDomain, CircleEvaluation};
use crate::core::fri::SparseCircleEvaluation;
use crate::core::poly::circle::{CanonicCoset, CircleDomain, CircleEvaluation, SecureEvaluation};
use crate::core::poly::BitReversedOrder;
use crate::core::prover::VerificationError;
use crate::core::queries::SparseSubCircleDomain;
use crate::core::utils::bit_reverse_index;

pub trait QuotientOps: Backend {
/// Accumulates the quotients of the columns at the given domain.
Expand All @@ -26,5 +37,162 @@ pub struct ColumnSampleBatch {
/// The point at which the columns are sampled.
pub point: CirclePoint<SecureField>,
/// The sampled column indices and their values at the point.
pub column_indices_and_values: Vec<(usize, SecureField)>,
pub columns_and_values: Vec<(usize, SecureField)>,
}
impl ColumnSampleBatch {
/// Groups column samples by sampled point.
/// # Arguments
/// samples: For each column, a vector of samples.
pub fn new(samples: &[&Vec<PointSample>]) -> Vec<Self> {
// Group samples by point, and create a ColumnSampleBatch for each point.
// This should keep a stable ordering.
let mut grouped_samples = BTreeMap::new();
for (column_index, samples) in samples.iter().enumerate() {
for sample in samples.iter() {
grouped_samples
.entry(sample.point)
.or_insert_with(Vec::new)
.push((column_index, sample.value));
}
}
grouped_samples
.into_iter()
.map(|(point, columns_and_values)| ColumnSampleBatch {
point,
columns_and_values,
})
.collect()
}
}

pub struct PointSample {
pub point: CirclePoint<SecureField>,
pub value: SecureField,
}

pub fn compute_fri_quotients<B: QuotientOps>(
columns: &[&CircleEvaluation<B, BaseField, BitReversedOrder>],
samples: &[Vec<PointSample>],
random_coeff: SecureField,
) -> Vec<SecureEvaluation<B>> {
zip(columns, samples)
.sorted_by_key(|(c, _)| Reverse(c.domain.log_size()))
.group_by(|(c, _)| c.domain.log_size())
.into_iter()
.map(|(log_size, tuples)| {
let (columns, samples): (Vec<_>, Vec<_>) = tuples.unzip();
let domain = CanonicCoset::new(log_size).circle_domain();
// TODO: slice.
let batched_samples = ColumnSampleBatch::new(&samples);
let values = B::accumulate_quotients(domain, &columns, random_coeff, &batched_samples);
SecureEvaluation { domain, values }
})
.collect()
}

pub fn fri_answers(
column_log_sizes: Vec<u32>,
samples: &[Vec<PointSample>],
random_coeff: SecureField,
query_domain_per_log_size: BTreeMap<u32, SparseSubCircleDomain>,
queried_values_per_column: &[Vec<BaseField>],
) -> Result<Vec<SparseCircleEvaluation<SecureField>>, VerificationError> {
izip!(column_log_sizes, samples, queried_values_per_column)
.sorted_by_key(|(log_size, ..)| Reverse(*log_size))
.group_by(|(log_size, ..)| *log_size)
.into_iter()
.map(|(log_size, tuples)| {
let (_, samples, queried_valued_per_column): (Vec<_>, Vec<_>, Vec<_>) =
multiunzip(tuples);
fri_answers_for_log_size(
log_size,
&samples,
random_coeff,
&query_domain_per_log_size[&log_size],
&queried_valued_per_column,
)
})
.collect()
}

pub fn fri_answers_for_log_size(
log_size: u32,
samples: &[&Vec<PointSample>],
random_coeff: SecureField,
query_domain: &SparseSubCircleDomain,
queried_values_per_column: &[&Vec<BaseField>],
) -> Result<SparseCircleEvaluation<SecureField>, VerificationError> {
let commitment_domain = CanonicCoset::new(log_size).circle_domain();
let batched_samples = ColumnSampleBatch::new(samples);
for x in queried_values_per_column {
if x.len() != query_domain.flatten().len() {
return Err(VerificationError::InvalidStructure);
}
}
let mut queried_values_per_column = queried_values_per_column
.iter()
.map(|q| q.iter())
.collect_vec();

let mut evals = Vec::new();
for subdomain in query_domain.iter() {
let domain = subdomain.to_circle_domain(&commitment_domain);
let mut column_evals = Vec::new();
for queried_values in queried_values_per_column.iter_mut() {
let eval = CircleEvaluation::new(
domain,
queried_values.take(domain.size()).copied().collect_vec(),
);
column_evals.push(eval);
}
// TODO(spapini): bit reverse iterator.
let mut values = Vec::new();
for row in 0..domain.size() {
let domain_point = domain.at(bit_reverse_index(row, log_size));
let value = accumulate_row_quotients(
&batched_samples,
&column_evals.iter().collect_vec(),
row,
random_coeff,
domain_point,
);
values.push(value);
}
let eval = CircleEvaluation::new(domain, values);
evals.push(eval);
}

let res = SparseCircleEvaluation::new(evals);
if !queried_values_per_column.iter().all(|x| x.is_empty()) {
return Err(VerificationError::InvalidStructure);
}
Ok(res)
}

#[cfg(test)]
mod tests {
use crate::core::backend::cpu::{CPUCircleEvaluation, CPUCirclePoly};
use crate::core::circle::SECURE_FIELD_CIRCLE_GEN;
use crate::core::commitment_scheme::quotients::{compute_fri_quotients, PointSample};
use crate::core::poly::circle::CanonicCoset;
use crate::{m31, qm31};

#[test]
fn test_quotients_are_low_degree() {
const LOG_SIZE: u32 = 7;
let polynomial = CPUCirclePoly::new((0..1 << LOG_SIZE).map(|i| m31!(i)).collect());
let eval_domain = CanonicCoset::new(LOG_SIZE + 1).circle_domain();
let eval = polynomial.evaluate(eval_domain);
let point = SECURE_FIELD_CIRCLE_GEN;
let value = polynomial.eval_at_point(point);
let coeff = qm31!(1, 2, 3, 4);
let quot_eval =
compute_fri_quotients(&[&eval], &[vec![PointSample { point, value }]], coeff)
.pop()
.unwrap();
let quot_poly_base_field =
CPUCircleEvaluation::new(eval_domain, quot_eval.values.columns[0].clone())
.interpolate();
assert!(quot_poly_base_field.is_in_fft_space(LOG_SIZE));
}
}
7 changes: 0 additions & 7 deletions src/core/commitment_scheme/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,11 +95,4 @@ impl<T> TreeVec<ColumnVec<Vec<T>>> {
pub fn flatten_cols(self) -> Vec<T> {
self.0.into_iter().flatten().flatten().collect()
}

// TODO(spapini): Remove after accumulating oods quotients by size.
/// Flattens a [`TreeVec<ColumVec<T>>`] of [Vec]s into a single [Vec] with all the elements
/// combined, in reverse order.
pub fn flatten_cols_rev(self) -> Vec<T> {
self.0.into_iter().flatten().flatten().rev().collect()
}
}
Loading

0 comments on commit a7e8bd1

Please sign in to comment.