diff --git a/halo2/katex-header.html b/.github/katex-header.html similarity index 99% rename from halo2/katex-header.html rename to .github/katex-header.html index 98e85904fa..32ac35a411 100644 --- a/halo2/katex-header.html +++ b/.github/katex-header.html @@ -12,4 +12,4 @@ ] }); }); - \ No newline at end of file + diff --git a/.github/scripts/run-examples.sh b/.github/scripts/run-examples.sh new file mode 100755 index 0000000000..c96c68e1d0 --- /dev/null +++ b/.github/scripts/run-examples.sh @@ -0,0 +1,22 @@ +#!/bin/sh + +# Get the list of examples from "examples" dir & Cargo.toml +EXAMPLES_WITH_FEATURES=$(awk '/^\[\[example\]\]/ { getline; name=$3; name=substr(name, 2, length(name)-2); getline; if ($1 == "required-features") { features=$NF; gsub(/["\[\]]/, "", features); print name "#" features } }' ./halo2_proofs/Cargo.toml) +EXAMPLES_WITHOUT_FEATURES=$(ls ./halo2_proofs/examples/*.rs | xargs -n1 basename -s .rs) + +# Remove examples with features listed in Cargo.toml from examples without features +EXAMPLES_WITHOUT_FEATURES=$(echo "$EXAMPLES_WITHOUT_FEATURES" | grep -vFx "$(echo "$EXAMPLES_WITH_FEATURES" | cut -d '#' -f 1)") + +# Combine examples with and without features +EXAMPLES=$(echo "$EXAMPLES_WITH_FEATURES $EXAMPLES_WITHOUT_FEATURES" | tr ' ' '\n' | sort -u | tr '\n' ' ') + +# Run the examples +for example in $EXAMPLES; do + if [ "$(echo "$example" | grep '#')" ]; then + name=$(echo $example | cut -d '#' -f 1) + features=$(echo $example | cut -d '#' -f 2) + cargo run --package halo2_proofs --example $name --features $features + else + cargo run --package halo2_proofs --example $example + fi +done diff --git a/.github/scripts/wasm-target-test-build.sh b/.github/scripts/wasm-target-test-build.sh new file mode 100755 index 0000000000..eb486aed8e --- /dev/null +++ b/.github/scripts/wasm-target-test-build.sh @@ -0,0 +1,27 @@ +#!/bin/sh + +GIT_ROOT=$(pwd) + +cd /tmp + +# create test project +cargo new foobar +cd foobar + +# set rust-toolchain same as "halo2" +cp "${GIT_ROOT}/rust-toolchain" . + +# add wasm32-* targets +rustup target add wasm32-unknown-unknown wasm32-wasi + +# add dependencies +cargo add --path "${GIT_ROOT}/halo2_proofs" --features batch,dev-graph,gadget-traces,lookup-any-sanity-checks +cargo add getrandom --features js --target wasm32-unknown-unknown + +# test build for wasm32-* targets +cargo build --release --target wasm32-unknown-unknown +cargo build --release --target wasm32-wasi + +# delete test project +cd ../ +rm -rf foobar diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cdce1546c0..5d92f02ffb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,10 +17,9 @@ jobs: os: [ubuntu-latest, windows-latest, macOS-latest] include: - feature_set: basic - features: batch,dev-graph,gadget-traces + features: --features batch,dev-graph,gadget-traces - feature_set: all - features: batch,dev-graph,gadget-traces,test-dev-graph,thread-safe-region,sanity-checks,circuit-params - + features: --all-features steps: - uses: actions/checkout@v3 - uses: actions-rs/toolchain@v1 @@ -30,7 +29,18 @@ jobs: uses: actions-rs/cargo@v1 with: command: test - args: --verbose --release --workspace --no-default-features --features "${{ matrix.features }}" + args: --verbose --release --workspace --no-default-features ${{ matrix.features }} + + examples: + name: Run the examples + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - uses: actions-rs/toolchain@v1 + - name: Run examples + run: | + .github/scripts/run-examples.sh build: name: Build target ${{ matrix.target }} @@ -46,13 +56,12 @@ jobs: - uses: actions-rs/toolchain@v1 with: override: false + default: true - name: Add target run: rustup target add ${{ matrix.target }} - - name: cargo build - uses: actions-rs/cargo@v1 - with: - command: build - args: --no-default-features --features batch,dev-graph,gadget-traces --target ${{ matrix.target }} + - name: Run script file + run: | + .github/scripts/wasm-target-test-build.sh bitrot: name: Bitrot check diff --git a/.github/workflows/docs-ghpages.yml b/.github/workflows/docs-ghpages.yml index 358c8f2950..56dd07cbd9 100644 --- a/.github/workflows/docs-ghpages.yml +++ b/.github/workflows/docs-ghpages.yml @@ -16,9 +16,17 @@ jobs: - uses: actions/checkout@v3 - uses: actions-rs/toolchain@v1 with: - toolchain: nightly + toolchain: nightly-2024-05-17 override: true + - name: Copy the html file to workspace crates + run: | + for cargo_toml in $(find . -name Cargo.toml); do + crate_dir=$(dirname $cargo_toml) + cp .github/katex-header.html $crate_dir + echo "Copied html file to $crate_dir" + done + - name: Build latest rustdocs uses: actions-rs/cargo@v1 with: @@ -33,8 +41,16 @@ jobs: cp -R ./target/doc ./docs echo "" > ./docs/index.html + - name: Delete the html files copied to every crate + run: | + for cargo_toml in $(find . -name Cargo.toml); do + crate_dir=$(dirname $cargo_toml) + rm -f $crate_dir/katex-header.html + echo "Deleted html file in $crate_dir" + done + - name: Deploy to GitHub Pages - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@v4 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./docs diff --git a/.gitignore b/.gitignore index f2af733bf1..123de7b254 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,6 @@ Cargo.lock .vscode **/*.html .DS_Store + +layout.png +serialization-test.pk diff --git a/Cargo.toml b/Cargo.toml index b44700ec43..1e75bcb87e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,4 +2,10 @@ members = [ "halo2", "halo2_proofs", + "halo2_frontend", + "halo2_middleware", + "halo2_backend", + "halo2_debug", + "p3_frontend", ] +resolver = "2" diff --git a/README.md b/README.md index 3c7aa59572..da1d24b174 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,15 @@ -# halo2 [![Crates.io](https://img.shields.io/crates/v/halo2.svg)](https://crates.io/crates/halo2) # +# halo2 ## [Documentation](https://privacy-scaling-explorations.github.io/halo2/halo2_proofs) +This repository contains the [halo2](https://github.com/zcash/halo2) fork from +PSE and includes contributions from the community. + +We use the `main` branch for development, which means it may contain +unstable/unfinished features. For end-users we recomend using the tag releases +which can be seen as curated checkpoints with some level of guarantee of +stability. + For experimental features `privacy-scaling-explorations/halo2` fork adds, please refer to [`experimental-features.md`](./book/src/user/experimental-features.md). ## Minimum Supported Rust Version diff --git a/book/src/user/experimental-features.md b/book/src/user/experimental-features.md index 78ea802ce4..5677fd38b9 100644 --- a/book/src/user/experimental-features.md +++ b/book/src/user/experimental-features.md @@ -138,3 +138,38 @@ For some use cases that want to keep configured `ConstraintSystem` unchanged the ## `Evaluator` and `evaluate_h` They are introduced to improve quotient computation speed and memory usage for circuit with complicated `Expression`. + +## Modular design (frontend-backend split) + +The halo2 implementation has been split into two separate parts: the frontend +and the backend, following these definitions: +- frontend: allows the user to specify the circuit logic and its satisfying + witness. It must provide a way to translate this logic into a low level + arithmetization format specified in the middleware module. +- backend: the proving system implementation that receives the middleware + circuit arithmetization and performs the following tasks: + - Generate the setup (proving and verifying keys) + - Generate a proof (with witness as input) + - Verify a proof + +A note on naming: "halo2" can mean different things: +- halo2 proof system, the protocol +- halo2 proof system implementation, the backend +- halo2 circuit library, the frontend (includes the halo2 circuit API, the + layouter, the selector to fixed column transformation, etc.) +- halo2 full-stack, the proof system full stack (the combination of the backend + and frontend) + +Currently the backend implements the "original" halo2 proof system extended +with the features discussed in this document. Nevertheless, the public +interface that the backend uses is generic for plonkish arithmetization. This +allows for alternative frontend implementations as well as alternative plonkish +proof system implementations. The middleware contains the type definitions +used to connect the frontend and backend. + +Summary of crates: +- `halo2_frontend`: library used to define circuits and calculate their witness. +- `halo2_backend`: implementation of the halo2 proof system (the protocol). +- `halo2_middleware`: type definitions used to interface the backend with the frontend. +- `halo2_proofs`: legacy API built by re-exporting from the frontend and + backend as well as function wrappers. diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000000..ec65ebc105 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,8 @@ +ignore: + - halo2_proofs/benches + - halo2_proofs/examples + - halo2_proofs/tests + - halo2_frontend/src/dev/graph + - halo2_frontend/src/dev/graph.rs + - halo2_frontend/src/dev/costs.rs + - halo2_frontend/src/dev/cost_model.rs \ No newline at end of file diff --git a/halo2/Cargo.toml b/halo2/Cargo.toml index 5618165271..bea2227877 100644 --- a/halo2/Cargo.toml +++ b/halo2/Cargo.toml @@ -5,11 +5,11 @@ authors = [ "Jack Grigg ", ] edition = "2021" -rust-version = "1.56.1" +rust-version = "1.73.0" description = "[BETA] Fast zero-knowledge proof-carrying data implementation with no trusted setup" license = "MIT OR Apache-2.0" -repository = "https://github.com/zcash/halo2" -documentation = "https://docs.rs/halo2" +repository = "https://github.com/privacy-scaling-explorations/halo2" +documentation = "https://privacy-scaling-explorations.github.io/halo2/" readme = "../README.md" categories = ["cryptography"] keywords = ["halo", "proofs", "recursive", "zkp", "zkSNARKs"] @@ -19,7 +19,7 @@ all-features = true rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] [dependencies] -halo2_proofs = { version = "0.3", path = "../halo2_proofs", default-features = false } +halo2_proofs = { version = "0.4", path = "../halo2_proofs", default-features = false } [lib] bench = false diff --git a/halo2_backend/Cargo.toml b/halo2_backend/Cargo.toml new file mode 100644 index 0000000000..4ef90ae843 --- /dev/null +++ b/halo2_backend/Cargo.toml @@ -0,0 +1,62 @@ +[package] +name = "halo2_backend" +version = "0.4.0" +authors = [ + "Sean Bowe ", + "Ying Tong Lai ", + "Daira Hopwood ", + "Jack Grigg ", + "Privacy Scaling Explorations team", +] +edition = "2021" +rust-version = "1.73.0" +description = """ +Halo2 backend implementation. This package implements the halo2 proof system which includes setup (key generation), proving and verifying. +""" +license = "MIT OR Apache-2.0" +repository = "https://github.com/privacy-scaling-explorations/halo2" +documentation = "https://privacy-scaling-explorations.github.io/halo2/" +readme = "README.md" +categories = ["cryptography"] +keywords = ["halo", "proofs", "zkp", "zkSNARKs"] + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] + +[dependencies] +backtrace = { version = "0.3", optional = true } +ff = "0.13" +group = "0.13" +halo2curves = { version = "0.7.0", default-features = false } +rand_core = { version = "0.6", default-features = false } +tracing = "0.1" +blake2b_simd = "1" +sha3 = "0.9.1" +rand_chacha = "0.3" +serde = { version = "1", optional = true, features = ["derive"] } +serde_derive = { version = "1", optional = true} +rayon = "1.8" +halo2_middleware = { path = "../halo2_middleware" } +num-bigint = "0.4.6" + +[dev-dependencies] +assert_matches = "1.5" +criterion = "0.3" +gumdrop = "0.8" +proptest = "1" +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +serde_json = "1" + +[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] +getrandom = { version = "0.2", features = ["js"] } + +[features] +default = ["batch", "bits"] +bits = ["halo2curves/bits"] +sanity-checks = [] +batch = ["rand_core/getrandom"] +derive_serde = ["halo2curves/derive_serde"] + +[lib] +bench = false diff --git a/halo2_proofs/src/arithmetic.rs b/halo2_backend/src/arithmetic.rs similarity index 51% rename from halo2_proofs/src/arithmetic.rs rename to halo2_backend/src/arithmetic.rs index 0163e355eb..34e746f327 100644 --- a/halo2_proofs/src/arithmetic.rs +++ b/halo2_backend/src/arithmetic.rs @@ -1,19 +1,21 @@ //! This module provides common utilities, traits and structures for group, //! field and polynomial arithmetic. -use super::multicore; -pub use ff::Field; use group::{ ff::{BatchInvert, PrimeField}, - Curve, Group, GroupOpsOwned, ScalarMulOwned, + Curve, GroupOpsOwned, ScalarMulOwned, }; +pub use halo2_middleware::ff::Field; +use halo2_middleware::multicore; +use halo2curves::fft::best_fft; pub use halo2curves::{CurveAffine, CurveExt}; /// This represents an element of a group with basic operations that can be /// performed. This allows an FFT implementation (for example) to operate /// generically over either a field or elliptic curve group. -pub trait FftGroup: +#[allow(dead_code)] +pub(crate) trait FftGroup: Copy + Send + Sync + 'static + GroupOpsOwned + ScalarMulOwned { } @@ -25,271 +27,8 @@ where { } -fn multiexp_serial(coeffs: &[C::Scalar], bases: &[C], acc: &mut C::Curve) { - let coeffs: Vec<_> = coeffs.iter().map(|a| a.to_repr()).collect(); - - let c = if bases.len() < 4 { - 1 - } else if bases.len() < 32 { - 3 - } else { - (f64::from(bases.len() as u32)).ln().ceil() as usize - }; - - fn get_at(segment: usize, c: usize, bytes: &F::Repr) -> usize { - let skip_bits = segment * c; - let skip_bytes = skip_bits / 8; - - if skip_bytes >= (F::NUM_BITS as usize + 7) / 8 { - return 0; - } - - let mut v = [0; 8]; - for (v, o) in v.iter_mut().zip(bytes.as_ref()[skip_bytes..].iter()) { - *v = *o; - } - - let mut tmp = u64::from_le_bytes(v); - tmp >>= skip_bits - (skip_bytes * 8); - tmp %= 1 << c; - - tmp as usize - } - - let segments = (C::Scalar::NUM_BITS as usize / c) + 1; - - for current_segment in (0..segments).rev() { - for _ in 0..c { - *acc = acc.double(); - } - - #[derive(Clone, Copy)] - enum Bucket { - None, - Affine(C), - Projective(C::Curve), - } - - impl Bucket { - fn add_assign(&mut self, other: &C) { - *self = match *self { - Bucket::None => Bucket::Affine(*other), - Bucket::Affine(a) => Bucket::Projective(a + *other), - Bucket::Projective(mut a) => { - a += *other; - Bucket::Projective(a) - } - } - } - - fn add(self, mut other: C::Curve) -> C::Curve { - match self { - Bucket::None => other, - Bucket::Affine(a) => { - other += a; - other - } - Bucket::Projective(a) => other + &a, - } - } - } - - let mut buckets: Vec> = vec![Bucket::None; (1 << c) - 1]; - - for (coeff, base) in coeffs.iter().zip(bases.iter()) { - let coeff = get_at::(current_segment, c, coeff); - if coeff != 0 { - buckets[coeff - 1].add_assign(base); - } - } - - // Summation by parts - // e.g. 3a + 2b + 1c = a + - // (a) + b + - // ((a) + b) + c - let mut running_sum = C::Curve::identity(); - for exp in buckets.into_iter().rev() { - running_sum = exp.add(running_sum); - *acc += &running_sum; - } - } -} - -/// Performs a small multi-exponentiation operation. -/// Uses the double-and-add algorithm with doublings shared across points. -pub fn small_multiexp(coeffs: &[C::Scalar], bases: &[C]) -> C::Curve { - let coeffs: Vec<_> = coeffs.iter().map(|a| a.to_repr()).collect(); - let mut acc = C::Curve::identity(); - - // for byte idx - for byte_idx in (0..((C::Scalar::NUM_BITS as usize + 7) / 8)).rev() { - // for bit idx - for bit_idx in (0..8).rev() { - acc = acc.double(); - // for each coeff - for coeff_idx in 0..coeffs.len() { - let byte = coeffs[coeff_idx].as_ref()[byte_idx]; - if ((byte >> bit_idx) & 1) != 0 { - acc += bases[coeff_idx]; - } - } - } - } - - acc -} - -/// Performs a multi-exponentiation operation. -/// -/// This function will panic if coeffs and bases have a different length. -/// -/// This will use multithreading if beneficial. -pub fn best_multiexp(coeffs: &[C::Scalar], bases: &[C]) -> C::Curve { - assert_eq!(coeffs.len(), bases.len()); - - let num_threads = multicore::current_num_threads(); - if coeffs.len() > num_threads { - let chunk = coeffs.len() / num_threads; - let num_chunks = coeffs.chunks(chunk).len(); - let mut results = vec![C::Curve::identity(); num_chunks]; - multicore::scope(|scope| { - let chunk = coeffs.len() / num_threads; - - for ((coeffs, bases), acc) in coeffs - .chunks(chunk) - .zip(bases.chunks(chunk)) - .zip(results.iter_mut()) - { - scope.spawn(move |_| { - multiexp_serial(coeffs, bases, acc); - }); - } - }); - results.iter().fold(C::Curve::identity(), |a, b| a + b) - } else { - let mut acc = C::Curve::identity(); - multiexp_serial(coeffs, bases, &mut acc); - acc - } -} - -/// Performs a radix-$2$ Fast-Fourier Transformation (FFT) on a vector of size -/// $n = 2^k$, when provided `log_n` = $k$ and an element of multiplicative -/// order $n$ called `omega` ($\omega$). The result is that the vector `a`, when -/// interpreted as the coefficients of a polynomial of degree $n - 1$, is -/// transformed into the evaluations of this polynomial at each of the $n$ -/// distinct powers of $\omega$. This transformation is invertible by providing -/// $\omega^{-1}$ in place of $\omega$ and dividing each resulting field element -/// by $n$. -/// -/// This will use multithreading if beneficial. -pub fn best_fft>(a: &mut [G], omega: Scalar, log_n: u32) { - fn bitreverse(mut n: usize, l: usize) -> usize { - let mut r = 0; - for _ in 0..l { - r = (r << 1) | (n & 1); - n >>= 1; - } - r - } - - let threads = multicore::current_num_threads(); - let log_threads = log2_floor(threads); - let n = a.len(); - assert_eq!(n, 1 << log_n); - - for k in 0..n { - let rk = bitreverse(k, log_n as usize); - if k < rk { - a.swap(rk, k); - } - } - - // precompute twiddle factors - let twiddles: Vec<_> = (0..(n / 2)) - .scan(Scalar::ONE, |w, _| { - let tw = *w; - *w *= ω - Some(tw) - }) - .collect(); - - if log_n <= log_threads { - let mut chunk = 2_usize; - let mut twiddle_chunk = n / 2; - for _ in 0..log_n { - a.chunks_mut(chunk).for_each(|coeffs| { - let (left, right) = coeffs.split_at_mut(chunk / 2); - - // case when twiddle factor is one - let (a, left) = left.split_at_mut(1); - let (b, right) = right.split_at_mut(1); - let t = b[0]; - b[0] = a[0]; - a[0] += &t; - b[0] -= &t; - - left.iter_mut() - .zip(right.iter_mut()) - .enumerate() - .for_each(|(i, (a, b))| { - let mut t = *b; - t *= &twiddles[(i + 1) * twiddle_chunk]; - *b = *a; - *a += &t; - *b -= &t; - }); - }); - chunk *= 2; - twiddle_chunk /= 2; - } - } else { - recursive_butterfly_arithmetic(a, n, 1, &twiddles) - } -} - -/// This perform recursive butterfly arithmetic -pub fn recursive_butterfly_arithmetic>( - a: &mut [G], - n: usize, - twiddle_chunk: usize, - twiddles: &[Scalar], -) { - if n == 2 { - let t = a[1]; - a[1] = a[0]; - a[0] += &t; - a[1] -= &t; - } else { - let (left, right) = a.split_at_mut(n / 2); - multicore::join( - || recursive_butterfly_arithmetic(left, n / 2, twiddle_chunk * 2, twiddles), - || recursive_butterfly_arithmetic(right, n / 2, twiddle_chunk * 2, twiddles), - ); - - // case when twiddle factor is one - let (a, left) = left.split_at_mut(1); - let (b, right) = right.split_at_mut(1); - let t = b[0]; - b[0] = a[0]; - a[0] += &t; - b[0] -= &t; - - left.iter_mut() - .zip(right.iter_mut()) - .enumerate() - .for_each(|(i, (a, b))| { - let mut t = *b; - t *= &twiddles[(i + 1) * twiddle_chunk]; - *b = *a; - *a += &t; - *b -= &t; - }); - } -} - /// Convert coefficient bases group elements to lagrange basis by inverse FFT. -pub fn g_to_lagrange(g_projective: Vec, k: u32) -> Vec { +pub(crate) fn g_to_lagrange(g_projective: Vec, k: u32) -> Vec { let n_inv = C::Scalar::TWO_INV.pow_vartime([k as u64, 0, 0, 0]); let mut omega_inv = C::Scalar::ROOT_OF_UNITY_INV; for _ in k..C::Scalar::S { @@ -316,7 +55,7 @@ pub fn g_to_lagrange(g_projective: Vec, k: u32) -> Vec } /// This evaluates a provided polynomial (in coefficient form) at `point`. -pub fn eval_polynomial(poly: &[F], point: F) -> F { +pub(crate) fn eval_polynomial(poly: &[F], point: F) -> F { fn evaluate(poly: &[F], point: F) -> F { poly.iter() .rev() @@ -346,7 +85,7 @@ pub fn eval_polynomial(poly: &[F], point: F) -> F { /// This computes the inner product of two vectors `a` and `b`. /// /// This function will panic if the two vectors are not the same size. -pub fn compute_inner_product(a: &[F], b: &[F]) -> F { +pub(crate) fn compute_inner_product(a: &[F], b: &[F]) -> F { // TODO: parallelize? assert_eq!(a.len(), b.len()); @@ -360,7 +99,7 @@ pub fn compute_inner_product(a: &[F], b: &[F]) -> F { /// Divides polynomial `a` in `X` by `X - b` with /// no remainder. -pub fn kate_division<'a, F: Field, I: IntoIterator>(a: I, mut b: F) -> Vec +pub(crate) fn kate_division<'a, F: Field, I: IntoIterator>(a: I, mut b: F) -> Vec where I::IntoIter: DoubleEndedIterator + ExactSizeIterator, { @@ -433,22 +172,10 @@ pub fn parallelize(v: &mu }); } -fn log2_floor(num: usize) -> u32 { - assert!(num > 0); - - let mut pow = 0; - - while (1 << (pow + 1)) <= num { - pow += 1; - } - - pow -} - /// Returns coefficients of an n - 1 degree polynomial given a set of n points /// and their evaluations. This function will panic if two values in `points` /// are the same. -pub fn lagrange_interpolate(points: &[F], evals: &[F]) -> Vec { +pub(crate) fn lagrange_interpolate(points: &[F], evals: &[F]) -> Vec { assert_eq!(points.len(), evals.len()); if points.len() == 1 { // Constant polynomial @@ -527,11 +254,24 @@ pub(crate) fn powers(base: F) -> impl Iterator { std::iter::successors(Some(F::ONE), move |power| Some(base * power)) } +pub(crate) fn truncate(scalar: F) -> F { + let nb_bytes = F::NUM_BITS.div_ceil(8).div_ceil(2) as usize; + let bytes = scalar.to_repr().as_ref()[..nb_bytes].to_vec(); + let bi = BigUint::from_bytes_le(&bytes); + F::from_str_vartime(&BigUint::to_string(&bi)).unwrap() +} + +pub(crate) fn truncated_powers(base: F) -> impl Iterator { + powers(base).map(truncate) +} + +use num_bigint::BigUint; + #[cfg(test)] use rand_core::OsRng; #[cfg(test)] -use crate::halo2curves::pasta::Fp; +use halo2curves::pasta::Fp; #[test] fn test_lagrange_interpolate() { diff --git a/halo2_proofs/src/helpers.rs b/halo2_backend/src/helpers.rs similarity index 84% rename from halo2_proofs/src/helpers.rs rename to halo2_backend/src/helpers.rs index faf7351a3e..ce69fd0516 100644 --- a/halo2_proofs/src/helpers.rs +++ b/halo2_backend/src/helpers.rs @@ -1,8 +1,9 @@ -use crate::poly::Polynomial; -use ff::PrimeField; +use halo2_middleware::ff::PrimeField; use halo2curves::{serde::SerdeObject, CurveAffine}; use std::io; +use crate::poly::Polynomial; + /// This enum specifies how various types are serialized and deserialized. #[derive(Clone, Copy, Debug)] pub enum SerdeFormat { @@ -20,7 +21,7 @@ pub enum SerdeFormat { } // Keep this trait for compatibility with IPA serialization -pub(crate) trait CurveRead: CurveAffine { +pub trait CurveRead: CurveAffine { /// Reads a compressed element from the buffer and attempts to parse it /// using `from_bytes`. fn read(reader: &mut R) -> io::Result { @@ -36,9 +37,9 @@ pub trait SerdeCurveAffine: CurveAffine + SerdeObject { /// Reads an element from the buffer and parses it according to the `format`: /// - `Processed`: Reads a compressed curve element and decompress it /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. - /// Checks that field elements are less than modulus, and then checks that the point is on the curve. + /// Checks that field elements are less than modulus, and then checks that the point is on the curve. /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; - /// does not perform any checks + /// does not perform any checks fn read(reader: &mut R, format: SerdeFormat) -> io::Result { match format { SerdeFormat::Processed => ::read(reader), @@ -69,9 +70,9 @@ impl SerdeCurveAffine for C {} pub trait SerdePrimeField: PrimeField + SerdeObject { /// Reads a field element as bytes from the buffer according to the `format`: /// - `Processed`: Reads a field element in standard form, with endianness specified by the - /// `PrimeField` implementation, and checks that the element is less than the modulus. + /// `PrimeField` implementation, and checks that the element is less than the modulus. /// - `RawBytes`: Reads a field element from raw bytes in its internal Montgomery representations, - /// and checks that the element is less than the modulus. + /// and checks that the element is less than the modulus. /// - `RawBytesUnchecked`: Reads a field element in Montgomery form and performs no checks. fn read(reader: &mut R, format: SerdeFormat) -> io::Result { match format { @@ -89,9 +90,9 @@ pub trait SerdePrimeField: PrimeField + SerdeObject { /// Writes a field element as bytes to the buffer according to the `format`: /// - `Processed`: Writes a field element in standard form, with endianness specified by the - /// `PrimeField` implementation. + /// `PrimeField` implementation. /// - Otherwise: Writes a field element into raw bytes in its internal Montgomery representation, - /// WITHOUT performing the expensive Montgomery reduction. + /// WITHOUT performing the expensive Montgomery reduction. fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { match format { SerdeFormat::Processed => writer.write_all(self.to_repr().as_ref()), @@ -101,25 +102,6 @@ pub trait SerdePrimeField: PrimeField + SerdeObject { } impl SerdePrimeField for F {} -/// Convert a slice of `bool` into a `u8`. -/// -/// Panics if the slice has length greater than 8. -pub fn pack(bits: &[bool]) -> u8 { - let mut value = 0u8; - assert!(bits.len() <= 8); - for (bit_index, bit) in bits.iter().enumerate() { - value |= (*bit as u8) << bit_index; - } - value -} - -/// Writes the first `bits.len()` bits of a `u8` into `bits`. -pub fn unpack(byte: u8, bits: &mut [bool]) { - for (bit_index, bit) in bits.iter_mut().enumerate() { - *bit = (byte >> bit_index) & 1 == 1; - } -} - /// Reads a vector of polynomials from buffer pub(crate) fn read_polynomial_vec( reader: &mut R, @@ -150,5 +132,5 @@ pub(crate) fn write_polynomial_slice( /// Gets the total number of bytes of a slice of polynomials, assuming all polynomials are the same length pub(crate) fn polynomial_slice_byte_length(slice: &[Polynomial]) -> usize { let field_len = F::default().to_repr().as_ref().len(); - 4 + slice.len() * (4 + field_len * slice.get(0).map(|poly| poly.len()).unwrap_or(0)) + 4 + slice.len() * (4 + field_len * slice.first().map(|poly| poly.len()).unwrap_or(0)) } diff --git a/halo2_backend/src/lib.rs b/halo2_backend/src/lib.rs new file mode 100644 index 0000000000..e11dc54525 --- /dev/null +++ b/halo2_backend/src/lib.rs @@ -0,0 +1,8 @@ +pub mod arithmetic; +pub mod helpers; +pub mod plonk; +pub mod poly; +pub mod transcript; + +// Internal re-exports +pub use halo2_middleware::multicore; diff --git a/halo2_backend/src/plonk.rs b/halo2_backend/src/plonk.rs new file mode 100644 index 0000000000..62223ab7cf --- /dev/null +++ b/halo2_backend/src/plonk.rs @@ -0,0 +1,410 @@ +//! This module provides an implementation of a variant of (Turbo)[PLONK][plonk] +//! that is designed specifically for the polynomial commitment scheme described +//! in the [Halo][halo] paper. +//! +//! [halo]: https://eprint.iacr.org/2019/1021 +//! [plonk]: https://eprint.iacr.org/2019/953 + +use blake2b_simd::Params as Blake2bParams; +use group::ff::{Field, FromUniformBytes, PrimeField}; + +use crate::arithmetic::CurveAffine; +use crate::helpers::{ + polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, SerdeCurveAffine, + SerdeFormat, SerdePrimeField, +}; +pub use crate::plonk::circuit::ConstraintSystemBack; +use crate::plonk::circuit::PinnedConstraintSystem; +use crate::poly::{ + Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, PinnedEvaluationDomain, + Polynomial, +}; +use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; +pub use circuit::{ExpressionBack, VarBack}; +pub(crate) use evaluation::Evaluator; + +use std::io; + +mod circuit; +mod error; +mod evaluation; +pub mod keygen; +mod lookup; +mod permutation; +pub mod prover; +mod shuffle; +mod vanishing; +pub mod verifier; + +pub use error::*; + +/// This is a verifying key which allows for the verification of proofs for a +/// particular circuit. +#[derive(Clone, Debug)] +pub struct VerifyingKey { + /// Evaluation domain + domain: EvaluationDomain, + /// Commitments to fixed columns + fixed_commitments: Vec, + /// Permutation verifying key + permutation: permutation::VerifyingKey, + /// Constraint system + cs: ConstraintSystemBack, + /// Cached maximum degree of `cs` (which doesn't change after construction). + cs_degree: usize, + /// The representative of this `VerifyingKey` in transcripts. + transcript_repr: C::Scalar, +} + +// Current version of the VK +const VERSION: u8 = 0x04; + +impl VerifyingKey +where + C::Scalar: SerdePrimeField + FromUniformBytes<64>, +{ + /// Writes a verifying key to a buffer. + /// + /// Writes a curve element according to `format`: + /// - `Processed`: Writes a compressed curve element with coordinates in standard form. + /// Writes a field element in standard form, with endianness specified by the + /// `PrimeField` implementation. + /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form + /// Writes a field element into raw bytes in its internal Montgomery representation, + /// WITHOUT performing the expensive Montgomery reduction. + pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { + // Version byte that will be checked on read. + writer.write_all(&[VERSION])?; + let k = &self.domain.k(); + assert!(*k <= C::Scalar::S); + // k value fits in 1 byte + writer.write_all(&[*k as u8])?; + writer.write_all(&(self.fixed_commitments.len() as u32).to_le_bytes())?; + for commitment in &self.fixed_commitments { + commitment.write(writer, format)?; + } + self.permutation.write(writer, format)?; + + Ok(()) + } + + /// Reads a verification key from a buffer. + /// + /// Reads a curve element from the buffer and parses it according to the `format`: + /// - `Processed`: Reads a compressed curve element and decompresses it. + /// Reads a field element in standard form, with endianness specified by the + /// `PrimeField` implementation, and checks that the element is less than the modulus. + /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. + /// Checks that field elements are less than modulus, and then checks that the point is on the curve. + /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; + /// does not perform any checks + pub fn read( + reader: &mut R, + format: SerdeFormat, + cs: ConstraintSystemBack, + ) -> io::Result { + let mut version_byte = [0u8; 1]; + reader.read_exact(&mut version_byte)?; + if VERSION != version_byte[0] { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + "unexpected version byte", + )); + } + + let mut k = [0u8; 1]; + reader.read_exact(&mut k)?; + let k = u8::from_le_bytes(k); + if k as u32 > C::Scalar::S { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + format!( + "circuit size value (k): {} exceeds maxium: {}", + k, + C::Scalar::S + ), + )); + } + let domain = keygen::create_domain::(&cs, k as u32); + let mut num_fixed_columns = [0u8; 4]; + reader.read_exact(&mut num_fixed_columns)?; + let num_fixed_columns = u32::from_le_bytes(num_fixed_columns); + + let fixed_commitments: Vec<_> = (0..num_fixed_columns) + .map(|_| C::read(reader, format)) + .collect::>()?; + + let permutation = permutation::VerifyingKey::read(reader, &cs.permutation, format)?; + + Ok(Self::from_parts(domain, fixed_commitments, permutation, cs)) + } + + /// Writes a verifying key to a vector of bytes using [`Self::write`]. + pub fn to_bytes(&self, format: SerdeFormat) -> Vec { + let mut bytes = Vec::::with_capacity(self.bytes_length(format)); + Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); + bytes + } + + /// Reads a verification key from a slice of bytes using [`Self::read`]. + pub fn from_bytes( + mut bytes: &[u8], + format: SerdeFormat, + cs: ConstraintSystemBack, + ) -> io::Result { + Self::read(&mut bytes, format, cs) + } +} + +impl VerifyingKey { + fn bytes_length(&self, format: SerdeFormat) -> usize + where + C: SerdeCurveAffine, + { + 6 // bytes used for encoding VERSION(u8), "domain.k"(u8) & num_fixed_columns(u32) + + (self.fixed_commitments.len() * C::byte_length(format)) + + self.permutation.bytes_length(format) + } + + fn from_parts( + domain: EvaluationDomain, + fixed_commitments: Vec, + permutation: permutation::VerifyingKey, + cs: ConstraintSystemBack, + ) -> Self + where + C::ScalarExt: FromUniformBytes<64>, + { + // Compute cached values. + let cs_degree = cs.degree(); + + let mut vk = Self { + domain, + fixed_commitments, + permutation, + cs, + cs_degree, + // Temporary, this is not pinned. + transcript_repr: C::Scalar::ZERO, + }; + + let mut hasher = Blake2bParams::new() + .hash_length(64) + .personal(b"Halo2-Verify-Key") + .to_state(); + + let s = format!("{:?}", vk.pinned()); + + hasher.update(&(s.len() as u64).to_le_bytes()); + hasher.update(s.as_bytes()); + + // Hash in final Blake2bState + vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); + + vk + } + + /// Hashes a verification key into a transcript. + pub fn hash_into, T: Transcript>( + &self, + transcript: &mut T, + ) -> io::Result<()> { + transcript.common_scalar(self.transcript_repr)?; + + Ok(()) + } + + /// Obtains a pinned representation of this verification key that contains + /// the minimal information necessary to reconstruct the verification key. + pub fn pinned(&self) -> PinnedVerificationKey<'_, C> { + PinnedVerificationKey { + base_modulus: C::Base::MODULUS, + scalar_modulus: C::Scalar::MODULUS, + domain: self.domain.pinned(), + fixed_commitments: &self.fixed_commitments, + permutation: &self.permutation, + cs: self.cs.pinned(), + } + } + + /// Returns commitments of fixed polynomials + pub fn fixed_commitments(&self) -> &Vec { + &self.fixed_commitments + } + + /// Returns the permutation commitments + pub fn permutation(&self) -> &permutation::VerifyingKey { + &self.permutation + } + + /// Returns `ConstraintSystem` + pub(crate) fn cs(&self) -> &ConstraintSystemBack { + &self.cs + } + + /// Returns representative of this `VerifyingKey` in transcripts + pub fn transcript_repr(&self) -> C::Scalar { + self.transcript_repr + } +} + +/// Minimal representation of a verification key that can be used to identify +/// its active contents. +#[allow(dead_code)] +#[derive(Debug)] +pub struct PinnedVerificationKey<'a, C: CurveAffine> { + base_modulus: &'static str, + scalar_modulus: &'static str, + domain: PinnedEvaluationDomain<'a, C::Scalar>, + cs: PinnedConstraintSystem<'a, C::Scalar>, + fixed_commitments: &'a Vec, + permutation: &'a permutation::VerifyingKey, +} + +/// This is a proving key which allows for the creation of proofs for a +/// particular circuit. +#[derive(Clone, Debug)] +pub struct ProvingKey { + vk: VerifyingKey, + l0: Polynomial, + l_last: Polynomial, + l_active_row: Polynomial, + fixed_values: Vec>, + fixed_polys: Vec>, + fixed_cosets: Vec>, + permutation: permutation::ProvingKey, + ev: Evaluator, +} + +impl ProvingKey +where + C::Scalar: FromUniformBytes<64>, +{ + /// Get the underlying [`VerifyingKey`]. + pub fn get_vk(&self) -> &VerifyingKey { + &self.vk + } + + /// Gets the total number of bytes in the serialization of `self` + fn bytes_length(&self, format: SerdeFormat) -> usize + where + C: SerdeCurveAffine, + { + let scalar_len = C::Scalar::default().to_repr().as_ref().len(); + self.vk.bytes_length(format) + + 12 // bytes used for encoding the length(u32) of "l0", "l_last" & "l_active_row" polys + + scalar_len * (self.l0.len() + self.l_last.len() + self.l_active_row.len()) + + polynomial_slice_byte_length(&self.fixed_values) + + polynomial_slice_byte_length(&self.fixed_polys) + + polynomial_slice_byte_length(&self.fixed_cosets) + + self.permutation.bytes_length() + } +} + +impl ProvingKey +where + C::Scalar: SerdePrimeField + FromUniformBytes<64>, +{ + /// Writes a proving key to a buffer. + /// + /// Writes a curve element according to `format`: + /// - `Processed`: Writes a compressed curve element with coordinates in standard form. + /// Writes a field element in standard form, with endianness specified by the + /// `PrimeField` implementation. + /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form + /// Writes a field element into raw bytes in its internal Montgomery representation, + /// WITHOUT performing the expensive Montgomery reduction. + /// Does so by first writing the verifying key and then serializing the rest of the data (in the form of field polynomials) + pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { + self.vk.write(writer, format)?; + self.l0.write(writer, format)?; + self.l_last.write(writer, format)?; + self.l_active_row.write(writer, format)?; + write_polynomial_slice(&self.fixed_values, writer, format)?; + write_polynomial_slice(&self.fixed_polys, writer, format)?; + write_polynomial_slice(&self.fixed_cosets, writer, format)?; + self.permutation.write(writer, format)?; + Ok(()) + } + + /// Reads a proving key from a buffer. + /// Does so by reading verification key first, and then deserializing the rest of the file into the remaining proving key data. + /// + /// Reads a curve element from the buffer and parses it according to the `format`: + /// - `Processed`: Reads a compressed curve element and decompresses it. + /// Reads a field element in standard form, with endianness specified by the + /// `PrimeField` implementation, and checks that the element is less than the modulus. + /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. + /// Checks that field elements are less than modulus, and then checks that the point is on the curve. + /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; + /// does not perform any checks + pub fn read( + reader: &mut R, + format: SerdeFormat, + cs: ConstraintSystemBack, + ) -> io::Result { + let vk = VerifyingKey::::read::(reader, format, cs)?; + let l0 = Polynomial::read(reader, format)?; + let l_last = Polynomial::read(reader, format)?; + let l_active_row = Polynomial::read(reader, format)?; + let fixed_values = read_polynomial_vec(reader, format)?; + let fixed_polys = read_polynomial_vec(reader, format)?; + let fixed_cosets = read_polynomial_vec(reader, format)?; + let permutation = permutation::ProvingKey::read(reader, format)?; + let ev = Evaluator::new(vk.cs()); + Ok(Self { + vk, + l0, + l_last, + l_active_row, + fixed_values, + fixed_polys, + fixed_cosets, + permutation, + ev, + }) + } + + /// Writes a proving key to a vector of bytes using [`Self::write`]. + pub fn to_bytes(&self, format: SerdeFormat) -> Vec { + let mut bytes = Vec::::with_capacity(self.bytes_length(format)); + Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); + bytes + } + + /// Reads a proving key from a slice of bytes using [`Self::read`]. + pub fn from_bytes( + mut bytes: &[u8], + format: SerdeFormat, + cs: ConstraintSystemBack, + ) -> io::Result { + Self::read(&mut bytes, format, cs) + } +} + +impl VerifyingKey { + /// Get the underlying [`EvaluationDomain`]. + pub fn get_domain(&self) -> &EvaluationDomain { + &self.domain + } +} + +#[derive(Clone, Copy, Debug)] +pub(crate) struct Theta; +pub(crate) type ChallengeTheta = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +pub(crate) struct Beta; +pub(crate) type ChallengeBeta = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +pub(crate) struct Gamma; +pub(crate) type ChallengeGamma = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +pub(crate) struct Y; +pub(crate) type ChallengeY = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +pub(crate) struct X; +pub(crate) type ChallengeX = ChallengeScalar; diff --git a/halo2_backend/src/plonk/circuit.rs b/halo2_backend/src/plonk/circuit.rs new file mode 100644 index 0000000000..ec4972e164 --- /dev/null +++ b/halo2_backend/src/plonk/circuit.rs @@ -0,0 +1,421 @@ +use group::ff::Field; +use halo2_middleware::circuit::{Any, ChallengeMid, ColumnMid, Gate}; +use halo2_middleware::expression::{Expression, Variable}; +use halo2_middleware::poly::Rotation; +use halo2_middleware::{lookup, permutation::ArgumentMid, shuffle}; + +// TODO: Reuse ColumnMid inside this. +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub struct QueryBack { + /// Query index + pub(crate) index: usize, + /// Column index + pub(crate) column_index: usize, + /// The type of the column. + pub(crate) column_type: Any, + /// Rotation of this query + pub(crate) rotation: Rotation, +} + +impl QueryBack { + /// Query index + pub fn index(&self) -> usize { + self.index + } + + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// The type of the column + pub fn column_type(&self) -> Any { + self.column_type + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum VarBack { + /// This is a generic column query + Query(QueryBack), + /// This is a challenge + Challenge(ChallengeMid), +} + +impl std::fmt::Display for VarBack { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self) + } +} + +impl Variable for VarBack { + fn degree(&self) -> usize { + match self { + VarBack::Query(_) => 1, + VarBack::Challenge(_) => 0, + } + } + + fn complexity(&self) -> usize { + match self { + VarBack::Query(_) => 1, + VarBack::Challenge(_) => 0, + } + } + + fn write_identifier(&self, writer: &mut W) -> std::io::Result<()> { + write!(writer, "{}", self) + } +} + +pub type ExpressionBack = Expression; +pub(crate) type GateBack = Gate; +pub(crate) type LookupArgumentBack = lookup::Argument; +pub(crate) type ShuffleArgumentBack = shuffle::Argument; +pub type PermutationArgumentBack = ArgumentMid; + +/// This is a description of the circuit environment, such as the gate, column and permutation +/// arrangements. This type is internal to the backend and will appear in the verifying key. +#[derive(Debug, Clone)] +pub struct ConstraintSystemBack { + pub(crate) num_fixed_columns: usize, + pub(crate) num_advice_columns: usize, + pub(crate) num_instance_columns: usize, + pub(crate) num_challenges: usize, + + /// Contains the index of each advice column that is left unblinded. + pub(crate) unblinded_advice_columns: Vec, + + /// Contains the phase for each advice column. Should have same length as num_advice_columns. + pub(crate) advice_column_phase: Vec, + /// Contains the phase for each challenge. Should have same length as num_challenges. + pub(crate) challenge_phase: Vec, + + pub(crate) gates: Vec>, + pub advice_queries: Vec<(ColumnMid, Rotation)>, + // Contains an integer for each advice column + // identifying how many distinct queries it has + // so far; should be same length as num_advice_columns. + pub(crate) num_advice_queries: Vec, + pub(crate) instance_queries: Vec<(ColumnMid, Rotation)>, + pub fixed_queries: Vec<(ColumnMid, Rotation)>, + + // Permutation argument for performing equality constraints + pub(crate) permutation: PermutationArgumentBack, + + // Vector of lookup arguments, where each corresponds to a sequence of + // input expressions and a sequence of table expressions involved in the lookup. + pub(crate) lookups: Vec>, + + // Vector of shuffle arguments, where each corresponds to a sequence of + // input expressions and a sequence of shuffle expressions involved in the shuffle. + pub(crate) shuffles: Vec>, + + // The minimum degree required by the circuit, which can be set to a + // larger amount than actually needed. This can be used, for example, to + // force the permutation argument to involve more columns in the same set. + pub(crate) minimum_degree: Option, +} + +impl ConstraintSystemBack { + /// Compute the degree of the constraint system (the maximum degree of all + /// constraints). + pub fn degree(&self) -> usize { + // The permutation argument will serve alongside the gates, so must be + // accounted for. + let mut degree = permutation_argument_required_degree(); + + // The lookup argument also serves alongside the gates and must be accounted + // for. + degree = std::cmp::max( + degree, + self.lookups + .iter() + .map(|l| lookup_argument_required_degree(l)) + .max() + .unwrap_or(1), + ); + + // The lookup argument also serves alongside the gates and must be accounted + // for. + degree = std::cmp::max( + degree, + self.shuffles + .iter() + .map(|l| shuffle_argument_required_degree(l)) + .max() + .unwrap_or(1), + ); + + // Account for each gate to ensure our quotient polynomial is the + // correct degree and that our extended domain is the right size. + degree = std::cmp::max( + degree, + self.gates + .iter() + .map(|gate| gate.poly.degree()) + .max() + .unwrap_or(0), + ); + + std::cmp::max(degree, self.minimum_degree.unwrap_or(1)) + } + + /// Compute the number of blinding factors necessary to perfectly blind + /// each of the prover's witness polynomials. + pub fn blinding_factors(&self) -> usize { + // All of the prover's advice columns are evaluated at no more than + let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); + // distinct points during gate checks. + + // - The permutation argument witness polynomials are evaluated at most 3 times. + // - Each lookup argument has independent witness polynomials, and they are + // evaluated at most 2 times. + let factors = std::cmp::max(3, factors); + + // Each polynomial is evaluated at most an additional time during + // multiopen (at x_3 to produce q_evals): + let factors = factors + 1; + + // h(x) is derived by the other evaluations so it does not reveal + // anything; in fact it does not even appear in the proof. + + // h(x_3) is also not revealed; the verifier only learns a single + // evaluation of a polynomial in x_1 which has h(x_3) and another random + // polynomial evaluated at x_3 as coefficients -- this random polynomial + // is "random_poly" in the vanishing argument. + + // Add an additional blinding factor as a slight defense against + // off-by-one errors. + factors + 1 + } + + /// Returns the minimum necessary rows that need to exist in order to + /// account for e.g. blinding factors. + pub(crate) fn minimum_rows(&self) -> usize { + self.blinding_factors() // m blinding factors + + 1 // for l_{-(m + 1)} (l_last) + + 1 // for l_0 (just for extra breathing room for the permutation + // argument, to essentially force a separation in the + // permutation polynomial between the roles of l_last, l_0 + // and the interstitial values.) + + 1 // for at least one row + } + + pub(crate) fn get_any_query_index(&self, column: ColumnMid, at: Rotation) -> usize { + let queries = match column.column_type { + Any::Advice => &self.advice_queries, + Any::Fixed => &self.fixed_queries, + Any::Instance => &self.instance_queries, + }; + for (index, instance_query) in queries.iter().enumerate() { + if instance_query == &(column, at) { + return index; + } + } + panic!("get_any_query_index called for non-existent query"); + } + + /// Returns the list of phases + pub fn phases(&self) -> impl Iterator { + let max_phase = self + .advice_column_phase + .iter() + .max() + .copied() + .unwrap_or_default(); + 0..=max_phase + } + + /// Number of fixed columns + pub fn num_fixed_columns(&self) -> usize { + self.num_fixed_columns + } + /// Number of advice columns + pub fn num_advice_columns(&self) -> usize { + self.num_advice_columns + } + /// Number of instance columns + pub fn num_instance_columns(&self) -> usize { + self.num_instance_columns + } + /// Return gates of the constraint system + pub fn gates(&self) -> &Vec> { + &self.gates + } + /// Returns the instance queries + pub fn instance_queries(&self) -> &Vec<(ColumnMid, Rotation)> { + &self.instance_queries + } + // Permutation argument for performing equality constraints + pub fn permutation(&self) -> &PermutationArgumentBack { + &self.permutation + } + + // Vector of lookup arguments, where each corresponds to a sequence of + // input expressions and a sequence of table expressions involved in the lookup. + pub fn lookups(&self) -> &Vec> { + &self.lookups + } + + /// Obtain a pinned version of this constraint system; a structure with the + /// minimal parameters needed to determine the rest of the constraint + /// system. + pub(crate) fn pinned(&self) -> PinnedConstraintSystem<'_, F> { + PinnedConstraintSystem { + num_fixed_columns: &self.num_fixed_columns, + num_advice_columns: &self.num_advice_columns, + num_instance_columns: &self.num_instance_columns, + num_challenges: &self.num_challenges, + advice_column_phase: &self.advice_column_phase, + challenge_phase: &self.challenge_phase, + gates: PinnedGates(&self.gates), + fixed_queries: &self.fixed_queries, + advice_queries: &self.advice_queries, + instance_queries: &self.instance_queries, + permutation: &self.permutation, + lookups: &self.lookups, + shuffles: &self.shuffles, + minimum_degree: &self.minimum_degree, + } + } +} + +struct PinnedGates<'a, F: Field>(&'a Vec>); + +impl<'a, F: Field> std::fmt::Debug for PinnedGates<'a, F> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + f.debug_list() + .entries(self.0.iter().map(|gate| &gate.poly)) + .finish() + } +} + +/// Represents the minimal parameters that determine a `ConstraintSystem`. +#[allow(dead_code)] +#[derive(Debug)] +pub(crate) struct PinnedConstraintSystem<'a, F: Field> { + num_fixed_columns: &'a usize, + num_advice_columns: &'a usize, + num_instance_columns: &'a usize, + num_challenges: &'a usize, + advice_column_phase: &'a Vec, + challenge_phase: &'a Vec, + gates: PinnedGates<'a, F>, + advice_queries: &'a Vec<(ColumnMid, Rotation)>, + instance_queries: &'a Vec<(ColumnMid, Rotation)>, + fixed_queries: &'a Vec<(ColumnMid, Rotation)>, + permutation: &'a PermutationArgumentBack, + lookups: &'a Vec>, + shuffles: &'a Vec>, + minimum_degree: &'a Option, +} + +// Cost functions: arguments required degree + +/// Returns the minimum circuit degree required by the permutation argument. +/// The argument may use larger degree gates depending on the actual +/// circuit's degree and how many columns are involved in the permutation. +fn permutation_argument_required_degree() -> usize { + // degree 2: + // l_0(X) * (1 - z(X)) = 0 + // + // We will fit as many polynomials p_i(X) as possible + // into the required degree of the circuit, so the + // following will not affect the required degree of + // this middleware. + // + // (1 - (l_last(X) + l_blind(X))) * ( + // z(\omega X) \prod (p(X) + \beta s_i(X) + \gamma) + // - z(X) \prod (p(X) + \delta^i \beta X + \gamma) + // ) + // + // On the first sets of columns, except the first + // set, we will do + // + // l_0(X) * (z(X) - z'(\omega^(last) X)) = 0 + // + // where z'(X) is the permutation for the previous set + // of columns. + // + // On the final set of columns, we will do + // + // degree 3: + // l_last(X) * (z'(X)^2 - z'(X)) = 0 + // + // which will allow the last value to be zero to + // ensure the argument is perfectly complete. + + // There are constraints of degree 3 regardless of the + // number of columns involved. + 3 +} + +fn lookup_argument_required_degree(arg: &lookup::Argument) -> usize { + assert_eq!(arg.input_expressions.len(), arg.table_expressions.len()); + + // The first value in the permutation poly should be one. + // degree 2: + // l_0(X) * (1 - z(X)) = 0 + // + // The "last" value in the permutation poly should be a boolean, for + // completeness and soundness. + // degree 3: + // l_last(X) * (z(X)^2 - z(X)) = 0 + // + // Enable the permutation argument for only the rows involved. + // degree (2 + input_degree + table_degree) or 4, whichever is larger: + // (1 - (l_last(X) + l_blind(X))) * ( + // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) + // ) = 0 + // + // The first two values of a' and s' should be the same. + // degree 2: + // l_0(X) * (a'(X) - s'(X)) = 0 + // + // Either the two values are the same, or the previous + // value of a' is the same as the current value. + // degree 3: + // (1 - (l_last(X) + l_blind(X))) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 + let mut input_degree = 1; + for expr in arg.input_expressions.iter() { + input_degree = std::cmp::max(input_degree, expr.degree()); + } + let mut table_degree = 1; + for expr in arg.table_expressions.iter() { + table_degree = std::cmp::max(table_degree, expr.degree()); + } + + // In practice because input_degree and table_degree are initialized to + // one, the latter half of this max() invocation is at least 4 always, + // rendering this call pointless except to be explicit in case we change + // the initialization of input_degree/table_degree in the future. + std::cmp::max( + // (1 - (l_last + l_blind)) z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) + 4, + // (1 - (l_last + l_blind)) z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) + 2 + input_degree + table_degree, + ) +} + +fn shuffle_argument_required_degree(arg: &shuffle::Argument) -> usize { + assert_eq!(arg.input_expressions.len(), arg.shuffle_expressions.len()); + + let mut input_degree = 1; + for expr in arg.input_expressions.iter() { + input_degree = std::cmp::max(input_degree, expr.degree()); + } + let mut shuffle_degree = 1; + for expr in arg.shuffle_expressions.iter() { + shuffle_degree = std::cmp::max(shuffle_degree, expr.degree()); + } + + // (1 - (l_last + l_blind)) (z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) + std::cmp::max(2 + shuffle_degree, 2 + input_degree) +} diff --git a/halo2_backend/src/plonk/error.rs b/halo2_backend/src/plonk/error.rs new file mode 100644 index 0000000000..716e466d1b --- /dev/null +++ b/halo2_backend/src/plonk/error.rs @@ -0,0 +1,76 @@ +use std::error; +use std::fmt; +use std::io; + +use halo2_middleware::circuit::ColumnMid; + +/// This is an error that could occur during proving. +#[derive(Debug)] +pub enum Error { + /// The provided instances do not match the circuit parameters. + InvalidInstances, + /// The constraint system is not satisfied. + ConstraintSystemFailure, + /// Out of bounds index passed to a backend + BoundsFailure, + /// Opening error + Opening, + /// Transcript error + Transcript(io::Error), + /// `k` is too small for the given circuit. + NotEnoughRowsAvailable { + /// The current value of `k` being used. + current_k: u32, + }, + /// Instance provided exceeds number of available rows + InstanceTooLarge, + /// The instance sets up a copy constraint involving a column that has not been + /// included in the permutation. + ColumnNotInPermutation(ColumnMid), + /// Generic error not covered by previous cases + Other(String), +} + +impl From for Error { + fn from(error: io::Error) -> Self { + // The only place we can get io::Error from is the transcript. + Error::Transcript(error) + } +} + +impl Error { + /// Constructs an `Error::NotEnoughRowsAvailable`. + pub fn not_enough_rows_available(current_k: u32) -> Self { + Error::NotEnoughRowsAvailable { current_k } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Error::InvalidInstances => write!(f, "Provided instances do not match the circuit"), + Error::ConstraintSystemFailure => write!(f, "The constraint system is not satisfied"), + Error::BoundsFailure => write!(f, "An out-of-bounds index was passed to the backend"), + Error::Opening => write!(f, "Multi-opening proof was invalid"), + Error::Transcript(e) => write!(f, "Transcript error: {e}"), + Error::NotEnoughRowsAvailable { current_k } => write!( + f, + "k = {current_k} is too small for the given circuit. Try using a larger value of k", + ), + Error::InstanceTooLarge => write!(f, "Instance vectors are larger than the circuit"), + Error::ColumnNotInPermutation(column) => { + write!(f, "Column {column:?} must be included in the permutation",) + } + Error::Other(error) => write!(f, "Other: {error}"), + } + } +} + +impl error::Error for Error { + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + match self { + Error::Transcript(e) => Some(e), + _ => None, + } + } +} diff --git a/halo2_proofs/src/plonk/evaluation.rs b/halo2_backend/src/plonk/evaluation.rs similarity index 69% rename from halo2_proofs/src/plonk/evaluation.rs rename to halo2_backend/src/plonk/evaluation.rs index 431c487c7e..09d8b452d3 100644 --- a/halo2_proofs/src/plonk/evaluation.rs +++ b/halo2_backend/src/plonk/evaluation.rs @@ -1,22 +1,31 @@ +//! This module: +//! - Evaluates the h polynomial: Evaluator::new(ConstraintSystem).evaluate_h(...) +//! - Evaluates an Expression using Lagrange basis + use crate::multicore; -use crate::plonk::{lookup, permutation, Any, ProvingKey}; -use crate::poly::Basis; +use crate::plonk::{ + circuit::{ConstraintSystemBack, ExpressionBack, VarBack}, + lookup, permutation, ProvingKey, +}; +use crate::poly::{Basis, LagrangeBasis}; use crate::{ arithmetic::{parallelize, CurveAffine}, - poly::{Coeff, ExtendedLagrangeCoeff, Polynomial, Rotation}, + poly::{Coeff, ExtendedLagrangeCoeff, Polynomial}, }; use group::ff::{Field, PrimeField, WithSmallOrderMulGroup}; +use halo2_middleware::circuit::Any; +use halo2_middleware::poly::Rotation; -use super::{shuffle, ConstraintSystem, Expression}; +use super::shuffle; /// Return the index in the polynomial of size `isize` after rotation `rot`. fn get_rotation_idx(idx: usize, rot: i32, rot_scale: i32, isize: i32) -> usize { (((idx as i32) + (rot * rot_scale)).rem_euclid(isize)) as usize } -/// Value used in a calculation +/// Value used in [`Calculation`] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd)] -pub enum ValueSource { +enum ValueSource { /// This is a constant value Constant(usize), /// This is an intermediate value @@ -50,7 +59,7 @@ impl Default for ValueSource { impl ValueSource { /// Get the value for this source #[allow(clippy::too_many_arguments)] - pub fn get( + fn get( &self, rotations: &[usize], constants: &[F], @@ -89,7 +98,7 @@ impl ValueSource { /// Calculation #[derive(Clone, Debug, PartialEq, Eq)] -pub enum Calculation { +enum Calculation { /// This is an addition Add(ValueSource, ValueSource), /// This is a subtraction @@ -111,7 +120,7 @@ pub enum Calculation { impl Calculation { /// Get the resulting value of this calculation #[allow(clippy::too_many_arguments)] - pub fn evaluate( + fn evaluate( &self, rotations: &[usize], constants: &[F], @@ -164,59 +173,67 @@ impl Calculation { /// Evaluator #[derive(Clone, Default, Debug)] -pub struct Evaluator { +pub(crate) struct Evaluator { /// Custom gates evalution - pub custom_gates: GraphEvaluator, + custom_gates: GraphEvaluator, /// Lookups evalution - pub lookups: Vec>, + lookups: Vec>, /// Shuffle evalution - pub shuffles: Vec>, + shuffles: Vec>, } -/// GraphEvaluator +/// The purpose of GraphEvaluator to is to collect a set of computations and compute them by making a graph of +/// its internal operations to avoid repeating computations. +/// +/// Computations can be added in two ways: +/// +/// - using [`Self::add_expression`] where expressions are added and internally turned into a graph. +/// A reference to the computation is returned in the form of [ `ValueSource::Intermediate`] reference +/// index. +/// - using [`Self::add_calculation`] where you can add only a single operation or a +/// [Horner polynomial evaluation](https://en.wikipedia.org/wiki/Horner's_method) by using +/// Calculation::Horner +/// +/// Finally, call [`Self::evaluate`] to get the result of the last calculation added. +/// #[derive(Clone, Debug)] -pub struct GraphEvaluator { +struct GraphEvaluator { /// Constants - pub constants: Vec, + constants: Vec, /// Rotations - pub rotations: Vec, + rotations: Vec, /// Calculations - pub calculations: Vec, + calculations: Vec, /// Number of intermediates - pub num_intermediates: usize, + num_intermediates: usize, } /// EvaluationData #[derive(Default, Debug)] -pub struct EvaluationData { +struct EvaluationData { /// Intermediates - pub intermediates: Vec, + intermediates: Vec, /// Rotations - pub rotations: Vec, + rotations: Vec, } -/// CaluclationInfo +/// CalculationInfo contains a calculation to perform and in [`Self::target`] the [`EvaluationData::intermediates`] where the value is going to be stored. #[derive(Clone, Debug)] -pub struct CalculationInfo { +struct CalculationInfo { /// Calculation - pub calculation: Calculation, + calculation: Calculation, /// Target - pub target: usize, + target: usize, } impl Evaluator { - /// Creates a new evaluation structure - pub fn new(cs: &ConstraintSystem) -> Self { + /// Creates a new evaluation structure from a [`ConstraintSystemBack`] + pub fn new(cs: &ConstraintSystemBack) -> Self { let mut ev = Evaluator::default(); - // Custom gates let mut parts = Vec::new(); for gate in cs.gates.iter() { - parts.extend( - gate.polynomials() - .iter() - .map(|poly| ev.custom_gates.add_expression(poly)), - ); + parts.push(ev.custom_gates.add_expression(&gate.poly)); } ev.custom_gates.add_calculation(Calculation::Horner( ValueSource::PreviousValue(), @@ -228,7 +245,7 @@ impl Evaluator { for lookup in cs.lookups.iter() { let mut graph = GraphEvaluator::default(); - let mut evaluate_lc = |expressions: &Vec>| { + let mut evaluate_lc = |expressions: &Vec>| { let parts = expressions .iter() .map(|expr| graph.add_expression(expr)) @@ -260,7 +277,8 @@ impl Evaluator { // Shuffles for shuffle in cs.shuffles.iter() { - let evaluate_lc = |expressions: &Vec>, graph: &mut GraphEvaluator| { + let evaluate_lc = |expressions: &Vec>, + graph: &mut GraphEvaluator| { let parts = expressions .iter() .map(|expr| graph.add_expression(expr)) @@ -388,10 +406,18 @@ impl Evaluator { let blinding_factors = pk.vk.cs.blinding_factors(); let last_rotation = Rotation(-((blinding_factors + 1) as i32)); let chunk_len = pk.vk.cs.degree() - 2; - let delta_start = beta * &C::Scalar::ZETA; + let delta_start = beta * C::Scalar::ZETA; + + let permutation_product_cosets: Vec< + Polynomial, + > = sets + .iter() + .map(|set| domain.coeff_to_extended(set.permutation_product_poly.clone())) + .collect(); - let first_set = sets.first().unwrap(); - let last_set = sets.last().unwrap(); + let first_set_permutation_product_coset = + permutation_product_cosets.first().unwrap(); + let last_set_permutation_product_coset = permutation_product_cosets.last().unwrap(); // Permutation constraints parallelize(&mut values, |values, start| { @@ -404,22 +430,21 @@ impl Evaluator { // Enforce only for the first set. // l_0(X) * (1 - z_0(X)) = 0 *value = *value * y - + ((one - first_set.permutation_product_coset[idx]) * l0[idx]); + + ((one - first_set_permutation_product_coset[idx]) * l0[idx]); // Enforce only for the last set. // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 *value = *value * y - + ((last_set.permutation_product_coset[idx] - * last_set.permutation_product_coset[idx] - - last_set.permutation_product_coset[idx]) + + ((last_set_permutation_product_coset[idx] + * last_set_permutation_product_coset[idx] + - last_set_permutation_product_coset[idx]) * l_last[idx]); // Except for the first set, enforce. // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 - for (set_idx, set) in sets.iter().enumerate() { + for set_idx in 0..sets.len() { if set_idx != 0 { *value = *value * y - + ((set.permutation_product_coset[idx] - - permutation.sets[set_idx - 1].permutation_product_coset - [r_last]) + + ((permutation_product_cosets[set_idx][idx] + - permutation_product_cosets[set_idx - 1][r_last]) * l0[idx]); } } @@ -429,29 +454,30 @@ impl Evaluator { // - z_i(X) \prod_j (p(X) + \delta^j \beta X + \gamma) // ) let mut current_delta = delta_start * beta_term; - for ((set, columns), cosets) in sets - .iter() - .zip(p.columns.chunks(chunk_len)) - .zip(pk.permutation.cosets.chunks(chunk_len)) + for ((permutation_product_coset, columns), cosets) in + permutation_product_cosets + .iter() + .zip(p.columns.chunks(chunk_len)) + .zip(pk.permutation.cosets.chunks(chunk_len)) { - let mut left = set.permutation_product_coset[r_next]; + let mut left = permutation_product_coset[r_next]; for (values, permutation) in columns .iter() - .map(|&column| match column.column_type() { - Any::Advice(_) => &advice[column.index()], - Any::Fixed => &fixed[column.index()], - Any::Instance => &instance[column.index()], + .map(|&column| match column.column_type { + Any::Advice => &advice[column.index], + Any::Fixed => &fixed[column.index], + Any::Instance => &instance[column.index], }) .zip(cosets.iter()) { left *= values[idx] + beta * permutation[idx] + gamma; } - let mut right = set.permutation_product_coset[idx]; - for values in columns.iter().map(|&column| match column.column_type() { - Any::Advice(_) => &advice[column.index()], - Any::Fixed => &fixed[column.index()], - Any::Instance => &instance[column.index()], + let mut right = permutation_product_coset[idx]; + for values in columns.iter().map(|&column| match column.column_type { + Any::Advice => &advice[column.index], + Any::Fixed => &fixed[column.index], + Any::Instance => &instance[column.index], }) { right *= values[idx] + current_delta + gamma; current_delta *= &C::Scalar::DELTA; @@ -670,36 +696,30 @@ impl GraphEvaluator { } /// Generates an optimized evaluation for the expression - fn add_expression(&mut self, expr: &Expression) -> ValueSource { + fn add_expression(&mut self, expr: &ExpressionBack) -> ValueSource { match expr { - Expression::Constant(scalar) => self.add_constant(scalar), - Expression::Selector(_selector) => unreachable!(), - Expression::Fixed(query) => { - let rot_idx = self.add_rotation(&query.rotation); - self.add_calculation(Calculation::Store(ValueSource::Fixed( - query.column_index, - rot_idx, - ))) - } - Expression::Advice(query) => { + ExpressionBack::Constant(scalar) => self.add_constant(scalar), + ExpressionBack::Var(VarBack::Query(query)) => { let rot_idx = self.add_rotation(&query.rotation); - self.add_calculation(Calculation::Store(ValueSource::Advice( - query.column_index, - rot_idx, - ))) - } - Expression::Instance(query) => { - let rot_idx = self.add_rotation(&query.rotation); - self.add_calculation(Calculation::Store(ValueSource::Instance( - query.column_index, - rot_idx, - ))) + match query.column_type { + Any::Fixed => self.add_calculation(Calculation::Store(ValueSource::Fixed( + query.column_index, + rot_idx, + ))), + Any::Advice => self.add_calculation(Calculation::Store(ValueSource::Advice( + query.column_index, + rot_idx, + ))), + Any::Instance => self.add_calculation(Calculation::Store( + ValueSource::Instance(query.column_index, rot_idx), + )), + } } - Expression::Challenge(challenge) => self.add_calculation(Calculation::Store( - ValueSource::Challenge(challenge.index()), - )), - Expression::Negated(a) => match **a { - Expression::Constant(scalar) => self.add_constant(&-scalar), + ExpressionBack::Var(VarBack::Challenge(challenge)) => self.add_calculation( + Calculation::Store(ValueSource::Challenge(challenge.index())), + ), + ExpressionBack::Negated(a) => match **a { + ExpressionBack::Constant(scalar) => self.add_constant(&-scalar), _ => { let result_a = self.add_expression(a); match result_a { @@ -708,10 +728,10 @@ impl GraphEvaluator { } } }, - Expression::Sum(a, b) => { + ExpressionBack::Sum(a, b) => { // Undo subtraction stored as a + (-b) in expressions match &**b { - Expression::Negated(b_int) => { + ExpressionBack::Negated(b_int) => { let result_a = self.add_expression(a); let result_b = self.add_expression(b_int); if result_a == ValueSource::Constant(0) { @@ -737,7 +757,7 @@ impl GraphEvaluator { } } } - Expression::Product(a, b) => { + ExpressionBack::Product(a, b) => { let result_a = self.add_expression(a); let result_b = self.add_expression(b); if result_a == ValueSource::Constant(0) || result_b == ValueSource::Constant(0) { @@ -758,22 +778,11 @@ impl GraphEvaluator { self.add_calculation(Calculation::Mul(result_b, result_a)) } } - Expression::Scaled(a, f) => { - if *f == C::ScalarExt::ZERO { - ValueSource::Constant(0) - } else if *f == C::ScalarExt::ONE { - self.add_expression(a) - } else { - let cst = self.add_constant(f); - let result_a = self.add_expression(a); - self.add_calculation(Calculation::Mul(result_a, cst)) - } - } } } /// Creates a new evaluation structure - pub fn instance(&self) -> EvaluationData { + fn instance(&self) -> EvaluationData { EvaluationData { intermediates: vec![C::ScalarExt::ZERO; self.num_intermediates], rotations: vec![0usize; self.rotations.len()], @@ -781,7 +790,11 @@ impl GraphEvaluator { } #[allow(clippy::too_many_arguments)] - pub fn evaluate( + /// Fills the EvaluationData + /// .intermediaries with the evaluation the calculation + /// .rotations with the indexes of the polinomials after rotations + /// returns the value of last evaluation done. + fn evaluate( &self, data: &mut EvaluationData, fixed: &[Polynomial], @@ -829,9 +842,9 @@ impl GraphEvaluator { } } -/// Simple evaluation of an expression -pub fn evaluate( - expression: &Expression, +/// Simple evaluation of an [`ExpressionBack`] over the provided lagrange polynomials +pub(crate) fn evaluate( + expression: &ExpressionBack, size: usize, rot_scale: i32, fixed: &[Polynomial], @@ -846,26 +859,192 @@ pub fn evaluate( let idx = start + i; *value = expression.evaluate( &|scalar| scalar, - &|_| panic!("virtual selectors are removed during optimization"), - &|query| { - fixed[query.column_index] - [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] - }, - &|query| { - advice[query.column_index] - [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] - }, - &|query| { - instance[query.column_index] - [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)] + &|var| match var { + VarBack::Challenge(challenge) => challenges[challenge.index()], + VarBack::Query(query) => { + let rot_idx = get_rotation_idx(idx, query.rotation.0, rot_scale, isize); + match query.column_type { + Any::Fixed => fixed[query.column_index][rot_idx], + Any::Advice => advice[query.column_index][rot_idx], + Any::Instance => instance[query.column_index][rot_idx], + } + } }, - &|challenge| challenges[challenge.index()], &|a| -a, - &|a, b| a + &b, + &|a, b| a + b, &|a, b| a * b, - &|a, scalar| a * scalar, ); } }); values } + +#[cfg(test)] +mod test { + use crate::plonk::circuit::{ExpressionBack, QueryBack, VarBack}; + use crate::poly::LagrangeCoeff; + use halo2_middleware::circuit::{Any, ChallengeMid}; + use halo2_middleware::poly::Rotation; + use halo2curves::pasta::pallas::{Affine, Scalar}; + + use super::*; + + fn check(calc: Option, expr: Option>, expected: i64) { + let lagranges = |v: &[&[u64]]| -> Vec> { + v.iter() + .map(|vv| { + Polynomial::new_lagrange_from_vec( + vv.iter().map(|v| Scalar::from(*v)).collect::>(), + ) + }) + .collect() + }; + + let mut gv = GraphEvaluator::::default(); + if let Some(expression) = expr { + gv.add_expression(&expression); + } else if let Some(calculation) = calc { + gv.add_rotation(&Rotation::cur()); + gv.add_rotation(&Rotation::next()); + gv.add_calculation(calculation); + } else { + unreachable!() + } + + let mut evaluation_data = gv.instance(); + let result = gv.evaluate( + &mut evaluation_data, + &lagranges(&[&[2, 3], &[1002, 1003]]), // fixed + &lagranges(&[&[4, 5], &[1004, 1005]]), // advice + &lagranges(&[&[6, 7], &[1006, 1007]]), // instance + &[8u64.into(), 9u64.into()], // challenges + &Scalar::from_raw([10, 0, 0, 0]), // beta + &Scalar::from_raw([11, 0, 0, 0]), // gamma + &Scalar::from_raw([12, 0, 0, 0]), // theta + &Scalar::from_raw([13, 0, 0, 0]), // y + &Scalar::from_raw([14, 0, 0, 0]), // previous value + 0, // idx + 1, // rot_scale + 32, // isize + ); + let fq_expected = if expected < 0 { + -Scalar::from(-expected as u64) + } else { + Scalar::from(expected as u64) + }; + + assert_eq!( + result, fq_expected, + "Expected {} was {:?}", + expected, result + ); + } + fn check_expr(expr: ExpressionBack, expected: i64) { + check(None, Some(expr), expected); + } + fn check_calc(calc: Calculation, expected: i64) { + check(Some(calc), None, expected); + } + + #[test] + fn graphevaluator_values() { + use VarBack::*; + // Check values + for (col, rot, expected) in [(0, 0, 2), (0, 1, 3), (1, 0, 1002), (1, 1, 1003)] { + check_expr( + ExpressionBack::Var(Query(QueryBack { + index: 0, + column_index: col, + column_type: Any::Fixed, + rotation: Rotation(rot), + })), + expected, + ); + } + for (col, rot, expected) in [(0, 0, 4), (0, 1, 5), (1, 0, 1004), (1, 1, 1005)] { + check_expr( + ExpressionBack::Var(Query(QueryBack { + index: 0, + column_index: col, + column_type: Any::Advice, + rotation: Rotation(rot), + })), + expected, + ); + } + for (col, rot, expected) in [(0, 0, 6), (0, 1, 7), (1, 0, 1006), (1, 1, 1007)] { + check_expr( + ExpressionBack::Var(Query(QueryBack { + index: 0, + column_index: col, + column_type: Any::Instance, + rotation: Rotation(rot), + })), + expected, + ); + } + for (ch, expected) in [(0, 8), (1, 9)] { + check_expr( + ExpressionBack::Var(Challenge(ChallengeMid { + index: ch, + phase: 0, + })), + expected, + ); + } + + check_calc(Calculation::Store(ValueSource::Beta()), 10); + check_calc(Calculation::Store(ValueSource::Gamma()), 11); + check_calc(Calculation::Store(ValueSource::Theta()), 12); + check_calc(Calculation::Store(ValueSource::Y()), 13); + check_calc(Calculation::Store(ValueSource::PreviousValue()), 14); + } + + #[test] + fn graphevaluator_expr_operations() { + use VarBack::*; + // Check expression operations + let two = || { + Box::new(ExpressionBack::::Var(Query(QueryBack { + index: 0, + column_index: 0, + column_type: Any::Fixed, + rotation: Rotation(0), + }))) + }; + + let three = || { + Box::new(ExpressionBack::::Var(Query(QueryBack { + index: 0, + column_index: 0, + column_type: Any::Fixed, + rotation: Rotation(1), + }))) + }; + + check_expr(ExpressionBack::Sum(two(), three()), 5); + check_expr(ExpressionBack::Product(two(), three()), 6); + check_expr( + ExpressionBack::Sum(ExpressionBack::Negated(two()).into(), three()), + 1, + ); + } + + #[test] + fn graphevaluator_calc_operations() { + // Check calculation operations + let two = || ValueSource::Fixed(0, 0); + let three = || ValueSource::Fixed(0, 1); + + check_calc(Calculation::Add(two(), three()), 5); + check_calc(Calculation::Double(two()), 4); + check_calc(Calculation::Mul(two(), three()), 6); + check_calc(Calculation::Square(three()), 9); + check_calc(Calculation::Negate(two()), -2); + check_calc(Calculation::Sub(three(), two()), 1); + check_calc( + Calculation::Horner(two(), vec![three(), two()], three()), + 2 + 3 * 3 + 2 * 9, + ); + } +} diff --git a/halo2_backend/src/plonk/keygen.rs b/halo2_backend/src/plonk/keygen.rs new file mode 100644 index 0000000000..636e3ee4b0 --- /dev/null +++ b/halo2_backend/src/plonk/keygen.rs @@ -0,0 +1,394 @@ +//! This module +//! - creates the proving and verifying keys for a circuit +//! - crates a domain, constraint system, and configuration for a circuit + +#![allow(clippy::int_plus_one)] + +use group::Curve; +use halo2_middleware::ff::{Field, FromUniformBytes}; +use halo2_middleware::zal::impls::H2cEngine; + +use super::{evaluation::Evaluator, permutation, Polynomial, ProvingKey, VerifyingKey}; +use crate::{ + arithmetic::{parallelize, CurveAffine}, + plonk::circuit::{ + ConstraintSystemBack, ExpressionBack, GateBack, LookupArgumentBack, QueryBack, + ShuffleArgumentBack, VarBack, + }, + plonk::Error, + poly::{ + commitment::{Blind, Params}, + EvaluationDomain, + }, +}; +use halo2_middleware::circuit::{ + Any, ColumnMid, CompiledCircuit, ConstraintSystemMid, ExpressionMid, VarMid, +}; +use halo2_middleware::multicore::ParallelIterator; +use halo2_middleware::{lookup, poly::Rotation, shuffle}; +use rayon::iter::IntoParallelRefIterator; +use std::collections::HashMap; + +/// Creates a domain, constraint system, and configuration for a circuit. +pub(crate) fn create_domain( + cs: &ConstraintSystemBack, + k: u32, +) -> EvaluationDomain +where + C: CurveAffine, +{ + let degree = cs.degree(); + EvaluationDomain::new(degree as u32, k) +} + +/// Generate a `VerifyingKey` from an instance of `CompiledCircuit`. +pub fn keygen_vk( + params: &P, + circuit: &CompiledCircuit, +) -> Result, Error> +where + C: CurveAffine, + P: Params, + C::Scalar: FromUniformBytes<64>, +{ + let cs_mid = &circuit.cs; + let cs: ConstraintSystemBack = cs_mid.clone().into(); + let domain = EvaluationDomain::new(cs.degree() as u32, params.k()); + + if (params.n() as usize) < cs.minimum_rows() { + return Err(Error::not_enough_rows_available(params.k())); + } + + let permutation_vk = permutation::keygen::Assembly::new_from_assembly_mid( + params.n() as usize, + &cs_mid.permutation, + &circuit.preprocessing.permutation, + )? + .build_vk(params, &domain, &cs.permutation); + + let fixed_commitments = { + let fixed_commitments_projective: Vec = circuit + .preprocessing + .fixed + .iter() + .map(|poly| { + params.commit_lagrange( + &H2cEngine::new(), + &Polynomial::new_lagrange_from_vec(poly.clone()), + Blind::default(), + ) + }) + .collect(); + let mut fixed_commitments = vec![C::identity(); fixed_commitments_projective.len()]; + C::CurveExt::batch_normalize(&fixed_commitments_projective, &mut fixed_commitments); + fixed_commitments + }; + + Ok(VerifyingKey::from_parts( + domain, + fixed_commitments, + permutation_vk, + cs, + )) +} + +/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`. +pub fn keygen_pk( + params: &P, + vk: VerifyingKey, + circuit: &CompiledCircuit, +) -> Result, Error> +where + C: CurveAffine, + P: Params, +{ + let cs = &circuit.cs; + + if (params.n() as usize) < vk.cs.minimum_rows() { + return Err(Error::not_enough_rows_available(params.k())); + } + + // Compute fixeds + let fixed_polys: Vec<_> = circuit + .preprocessing + .fixed + .par_iter() + .map(|poly| { + vk.domain + .lagrange_to_coeff(Polynomial::new_lagrange_from_vec(poly.clone())) + }) + .collect(); + let fixed_cosets = fixed_polys + .par_iter() + .map(|poly| vk.domain.coeff_to_extended(poly.clone())) + .collect(); + + let fixed_values = circuit + .preprocessing + .fixed + .clone() + .into_iter() + .map(Polynomial::new_lagrange_from_vec) + .collect(); + + // Compute l_0(X) + // TODO: this can be done more efficiently + // https://github.com/privacy-scaling-explorations/halo2/issues/269 + let mut l0 = vk.domain.empty_lagrange(); + l0[0] = C::Scalar::ONE; + let l0 = vk.domain.lagrange_to_coeff(l0); + let l0 = vk.domain.coeff_to_extended(l0); + + // Compute l_blind(X) which evaluates to 1 for each blinding factor row + // and 0 otherwise over the domain. + let mut l_blind = vk.domain.empty_lagrange(); + for evaluation in l_blind[..].iter_mut().rev().take(vk.cs.blinding_factors()) { + *evaluation = C::Scalar::ONE; + } + let l_blind = vk.domain.lagrange_to_coeff(l_blind); + let l_blind = vk.domain.coeff_to_extended(l_blind); + + // Compute l_last(X) which evaluates to 1 on the first inactive row (just + // before the blinding factors) and 0 otherwise over the domain + let mut l_last = vk.domain.empty_lagrange(); + l_last[params.n() as usize - vk.cs.blinding_factors() - 1] = C::Scalar::ONE; + let l_last = vk.domain.lagrange_to_coeff(l_last); + let l_last = vk.domain.coeff_to_extended(l_last); + + // Compute l_active_row(X) + let one = C::Scalar::ONE; + let mut l_active_row = vk.domain.empty_extended(); + parallelize(&mut l_active_row, |values, start| { + for (i, value) in values.iter_mut().enumerate() { + let idx = i + start; + *value = one - (l_last[idx] + l_blind[idx]); + } + }); + + // Compute the optimized evaluation data structure + let ev = Evaluator::new(&vk.cs); + + // Compute the permutation proving key + let permutation_pk = permutation::keygen::Assembly::new_from_assembly_mid( + params.n() as usize, + &cs.permutation, + &circuit.preprocessing.permutation, + )? + .build_pk(params, &vk.domain, &cs.permutation.clone()); + + Ok(ProvingKey { + vk, + l0, + l_last, + l_active_row, + fixed_values, + fixed_polys, + fixed_cosets, + permutation: permutation_pk, + ev, + }) +} + +struct QueriesMap { + map: HashMap<(ColumnMid, Rotation), usize>, + advice: Vec<(ColumnMid, Rotation)>, + instance: Vec<(ColumnMid, Rotation)>, + fixed: Vec<(ColumnMid, Rotation)>, +} + +impl QueriesMap { + fn add(&mut self, col: ColumnMid, rot: Rotation) -> usize { + *self + .map + .entry((col, rot)) + .or_insert_with(|| match col.column_type { + Any::Advice => { + self.advice.push((col, rot)); + self.advice.len() - 1 + } + Any::Instance => { + self.instance.push((col, rot)); + self.instance.len() - 1 + } + Any::Fixed => { + self.fixed.push((col, rot)); + self.fixed.len() - 1 + } + }) + } +} + +impl QueriesMap { + fn as_expression(&mut self, expr: &ExpressionMid) -> ExpressionBack { + match expr { + ExpressionMid::Constant(c) => ExpressionBack::Constant(*c), + ExpressionMid::Var(VarMid::Query(query)) => { + let column = ColumnMid::new(query.column_type, query.column_index); + let index = self.add(column, query.rotation); + ExpressionBack::Var(VarBack::Query(QueryBack { + index, + column_index: query.column_index, + column_type: query.column_type, + rotation: query.rotation, + })) + } + ExpressionMid::Var(VarMid::Challenge(c)) => ExpressionBack::Var(VarBack::Challenge(*c)), + ExpressionMid::Negated(e) => ExpressionBack::Negated(Box::new(self.as_expression(e))), + ExpressionMid::Sum(lhs, rhs) => ExpressionBack::Sum( + Box::new(self.as_expression(lhs)), + Box::new(self.as_expression(rhs)), + ), + ExpressionMid::Product(lhs, rhs) => ExpressionBack::Product( + Box::new(self.as_expression(lhs)), + Box::new(self.as_expression(rhs)), + ), + } + } +} + +/// Collect queries used in gates while mapping those gates to equivalent ones with indexed +/// query references in the expressions. +fn cs_mid_collect_queries_gates( + cs_mid: &ConstraintSystemMid, + queries: &mut QueriesMap, +) -> Vec> { + cs_mid + .gates + .iter() + .map(|gate| GateBack { + name: gate.name.clone(), + poly: queries.as_expression(&gate.poly), + }) + .collect() +} + +/// Collect queries used in lookups while mapping those lookups to equivalent ones with indexed +/// query references in the expressions. +fn cs_mid_collect_queries_lookups( + cs_mid: &ConstraintSystemMid, + queries: &mut QueriesMap, +) -> Vec> { + cs_mid + .lookups + .iter() + .map(|lookup| lookup::Argument { + name: lookup.name.clone(), + input_expressions: lookup + .input_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + table_expressions: lookup + .table_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + }) + .collect() +} + +/// Collect queries used in shuffles while mapping those lookups to equivalent ones with indexed +/// query references in the expressions. +fn cs_mid_collect_queries_shuffles( + cs_mid: &ConstraintSystemMid, + queries: &mut QueriesMap, +) -> Vec> { + cs_mid + .shuffles + .iter() + .map(|shuffle| shuffle::Argument { + name: shuffle.name.clone(), + input_expressions: shuffle + .input_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + shuffle_expressions: shuffle + .shuffle_expressions + .iter() + .map(|e| queries.as_expression(e)) + .collect(), + }) + .collect() +} + +/// Collect all queries used in the expressions of gates, lookups and shuffles. Map the +/// expressions of gates, lookups and shuffles into equivalent ones with indexed query +/// references. +#[allow(clippy::type_complexity)] +fn collect_queries( + cs_mid: &ConstraintSystemMid, +) -> ( + Queries, + Vec>, + Vec>, + Vec>, +) { + let mut queries = QueriesMap { + map: HashMap::new(), + advice: Vec::new(), + instance: Vec::new(), + fixed: Vec::new(), + }; + + let gates = cs_mid_collect_queries_gates(cs_mid, &mut queries); + let lookups = cs_mid_collect_queries_lookups(cs_mid, &mut queries); + let shuffles = cs_mid_collect_queries_shuffles(cs_mid, &mut queries); + + // Each column used in a copy constraint involves a query at rotation current. + for column in &cs_mid.permutation.columns { + queries.add(*column, Rotation::cur()); + } + + let mut num_advice_queries = vec![0; cs_mid.num_advice_columns]; + for (column, _) in queries.advice.iter() { + num_advice_queries[column.index] += 1; + } + + let queries = Queries { + advice: queries.advice, + instance: queries.instance, + fixed: queries.fixed, + num_advice_queries, + }; + (queries, gates, lookups, shuffles) +} + +impl From> for ConstraintSystemBack { + fn from(cs_mid: ConstraintSystemMid) -> Self { + let (queries, gates, lookups, shuffles) = collect_queries(&cs_mid); + Self { + num_fixed_columns: cs_mid.num_fixed_columns, + num_advice_columns: cs_mid.num_advice_columns, + num_instance_columns: cs_mid.num_instance_columns, + num_challenges: cs_mid.num_challenges, + unblinded_advice_columns: cs_mid.unblinded_advice_columns, + advice_column_phase: cs_mid.advice_column_phase, + challenge_phase: cs_mid.challenge_phase, + gates, + advice_queries: queries.advice, + num_advice_queries: queries.num_advice_queries, + instance_queries: queries.instance, + fixed_queries: queries.fixed, + permutation: cs_mid.permutation, + lookups, + shuffles, + minimum_degree: cs_mid.minimum_degree, + } + } +} + +/// List of queries (columns and rotations) used by a circuit +#[derive(Debug, Clone)] +pub(crate) struct Queries { + /// List of unique advice queries + pub(crate) advice: Vec<(ColumnMid, Rotation)>, + /// List of unique instance queries + pub(crate) instance: Vec<(ColumnMid, Rotation)>, + /// List of unique fixed queries + pub(crate) fixed: Vec<(ColumnMid, Rotation)>, + /// Contains an integer for each advice column + /// identifying how many distinct queries it has + /// so far; should be same length as cs.num_advice_columns. + pub(crate) num_advice_queries: Vec, +} diff --git a/halo2_backend/src/plonk/lookup.rs b/halo2_backend/src/plonk/lookup.rs new file mode 100644 index 0000000000..795f3b744b --- /dev/null +++ b/halo2_backend/src/plonk/lookup.rs @@ -0,0 +1,4 @@ +pub(crate) mod prover; +pub(crate) mod verifier; + +use crate::plonk::circuit::LookupArgumentBack as Argument; diff --git a/halo2_proofs/src/plonk/lookup/prover.rs b/halo2_backend/src/plonk/lookup/prover.rs similarity index 74% rename from halo2_proofs/src/plonk/lookup/prover.rs rename to halo2_backend/src/plonk/lookup/prover.rs index 028b298853..8d34723fea 100644 --- a/halo2_proofs/src/plonk/lookup/prover.rs +++ b/halo2_backend/src/plonk/lookup/prover.rs @@ -1,22 +1,23 @@ -use super::super::{ - circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, Error, - ProvingKey, -}; +use super::super::ProvingKey; use super::Argument; use crate::plonk::evaluation::evaluate; use crate::{ arithmetic::{eval_polynomial, parallelize, CurveAffine}, + plonk::circuit::ExpressionBack, + plonk::{ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, Error}, poly::{ commitment::{Blind, Params}, - Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, Rotation, + Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, }, transcript::{EncodedChallenge, TranscriptWrite}, }; -use ff::WithSmallOrderMulGroup; use group::{ ff::{BatchInvert, Field}, Curve, }; +use halo2_middleware::ff::WithSmallOrderMulGroup; +use halo2_middleware::poly::Rotation; +use halo2_middleware::zal::{impls::PlonkEngine, traits::MsmAccel}; use rand_core::RngCore; use std::{ collections::BTreeMap, @@ -50,128 +51,143 @@ pub(in crate::plonk) struct Evaluated { constructed: Committed, } -impl> Argument { - /// Given a Lookup with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions - /// [S_0, S_1, ..., S_{m-1}], this method - /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} - /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, - /// - permutes A_compressed and S_compressed using permute_expression_pair() helper, - /// obtaining A' and S', and - /// - constructs Permuted struct using permuted_input_value = A', and - /// permuted_table_expression = S'. - /// The Permuted struct is used to update the Lookup, and is then returned. - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit_permuted< - 'a, - 'params: 'a, - C, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - mut rng: R, - transcript: &mut T, - ) -> Result, Error> - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - // Closure to get values of expressions and compress them - let compress_expressions = |expressions: &[Expression]| { - let compressed_expression = expressions - .iter() - .map(|expression| { - pk.vk.domain.lagrange_from_vec(evaluate( - expression, - params.n() as usize, - 1, - fixed_values, - advice_values, - instance_values, - challenges, - )) - }) - .fold(domain.empty_lagrange(), |acc, expression| { - acc * *theta + &expression - }); - compressed_expression - }; - - // Get values of input expressions involved in the lookup and compress them - let compressed_input_expression = compress_expressions(&self.input_expressions); - - // Get values of table expressions involved in the lookup and compress them - let compressed_table_expression = compress_expressions(&self.table_expressions); - - // Permute compressed (InputExpression, TableExpression) pair - let (permuted_input_expression, permuted_table_expression) = permute_expression_pair( - pk, - params, - domain, - &mut rng, - &compressed_input_expression, - &compressed_table_expression, - )?; - - // Closure to construct commitment to vector of values - let mut commit_values = |values: &Polynomial| { - let poly = pk.vk.domain.lagrange_to_coeff(values.clone()); - let blind = Blind(C::Scalar::random(&mut rng)); - let commitment = params.commit_lagrange(values, blind).to_affine(); - (poly, blind, commitment) - }; - - // Commit to permuted input expression - let (permuted_input_poly, permuted_input_blind, permuted_input_commitment) = - commit_values(&permuted_input_expression); - - // Commit to permuted table expression - let (permuted_table_poly, permuted_table_blind, permuted_table_commitment) = - commit_values(&permuted_table_expression); - - // Hash permuted input commitment - transcript.write_point(permuted_input_commitment)?; - - // Hash permuted table commitment - transcript.write_point(permuted_table_commitment)?; - - Ok(Permuted { - compressed_input_expression, - permuted_input_expression, - permuted_input_poly, - permuted_input_blind, - compressed_table_expression, - permuted_table_expression, - permuted_table_poly, - permuted_table_blind, - }) - } +/// Given a Lookup with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions +/// [S_0, S_1, ..., S_{m-1}], this method +/// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} +/// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, +/// - permutes A_compressed and S_compressed using permute_expression_pair() helper, +/// obtaining A' and S', and +/// - constructs [`Permuted`] struct using permuted_input_value = A', and +/// permuted_table_expression = S'. +/// +/// The [`Permuted`] struct is used to update the Lookup, and is then returned. +#[allow(clippy::too_many_arguments)] +pub(in crate::plonk) fn lookup_commit_permuted< + 'a, + F: WithSmallOrderMulGroup<3>, + C, + P: Params, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + M: MsmAccel, +>( + engine: &PlonkEngine, + arg: &Argument, + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], + mut rng: R, + transcript: &mut T, +) -> Result, Error> +where + C: CurveAffine, + C::Curve: Mul + MulAssign, +{ + // Closure to get values of expressions and compress them + let compress_expressions = |expressions: &[ExpressionBack]| { + let compressed_expression = expressions + .iter() + .map(|expression| { + pk.vk.domain.lagrange_from_vec(evaluate( + expression, + params.n() as usize, + 1, + fixed_values, + advice_values, + instance_values, + challenges, + )) + }) + .fold(domain.empty_lagrange(), |acc, expression| { + acc * *theta + &expression + }); + compressed_expression + }; + + // Get values of input expressions involved in the lookup and compress them + let compressed_input_expression = compress_expressions(&arg.input_expressions); + + // Get values of table expressions involved in the lookup and compress them + let compressed_table_expression = compress_expressions(&arg.table_expressions); + + // Permute compressed (InputExpression, TableExpression) pair + let (permuted_input_expression, permuted_table_expression) = permute_expression_pair( + pk, + params, + domain, + &mut rng, + &compressed_input_expression, + &compressed_table_expression, + )?; + + // Closure to construct commitment to vector of values + let mut commit_values = |values: &Polynomial| { + let poly = pk.vk.domain.lagrange_to_coeff(values.clone()); + let blind = Blind(C::Scalar::random(&mut rng)); + let commitment = params.commit_lagrange(&engine.msm_backend, values, blind); + (poly, blind, commitment) + }; + + // Commit to permuted input expression + let (permuted_input_poly, permuted_input_blind, permuted_input_commitment_projective) = + commit_values(&permuted_input_expression); + + // Commit to permuted table expression + let (permuted_table_poly, permuted_table_blind, permuted_table_commitment_projective) = + commit_values(&permuted_table_expression); + + let [permuted_input_commitment, permuted_table_commitment] = { + let mut affines = [C::identity(); 2]; + C::CurveExt::batch_normalize( + &[ + permuted_input_commitment_projective, + permuted_table_commitment_projective, + ], + &mut affines, + ); + affines + }; + + // Hash permuted input commitment + transcript.write_point(permuted_input_commitment)?; + + // Hash permuted table commitment + transcript.write_point(permuted_table_commitment)?; + + Ok(Permuted { + compressed_input_expression, + permuted_input_expression, + permuted_input_poly, + permuted_input_blind, + compressed_table_expression, + permuted_table_expression, + permuted_table_poly, + permuted_table_blind, + }) } impl Permuted { /// Given a Lookup with input expressions, table expressions, and the permuted /// input expression and permuted table expression, this method constructs the /// grand product polynomial over the lookup. The grand product polynomial - /// is used to populate the Product struct. The Product struct is + /// is used to populate the [`Committed`] struct. The [`Committed`] struct is /// added to the Lookup and finally returned by the method. + #[allow(clippy::too_many_arguments)] pub(in crate::plonk) fn commit_product< - 'params, - P: Params<'params, C>, + P: Params, E: EncodedChallenge, R: RngCore, T: TranscriptWrite, + M: MsmAccel, >( self, + engine: &PlonkEngine, pk: &ProvingKey, params: &P, beta: ChallengeBeta, @@ -199,7 +215,7 @@ impl Permuted { .zip(self.permuted_input_expression[start..].iter()) .zip(self.permuted_table_expression[start..].iter()) { - *lookup_product = (*beta + permuted_input_value) * &(*gamma + permuted_table_value); + *lookup_product = (*beta + permuted_input_value) * (*gamma + permuted_table_value); } }); @@ -214,8 +230,8 @@ impl Permuted { for (i, product) in product.iter_mut().enumerate() { let i = i + start; - *product *= &(self.compressed_input_expression[i] + &*beta); - *product *= &(self.compressed_table_expression[i] + &*gamma); + *product *= &(self.compressed_input_expression[i] + *beta); + *product *= &(self.compressed_table_expression[i] + *gamma); } }); @@ -276,7 +292,7 @@ impl Permuted { input_term += &(*beta); table_term += &(*gamma); - right *= &(input_term * &table_term); + right *= &(input_term * table_term); assert_eq!(left, right); } @@ -288,7 +304,9 @@ impl Permuted { } let product_blind = Blind(C::Scalar::random(rng)); - let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); + let product_commitment = params + .commit_lagrange(&engine.msm_backend, &z, product_blind) + .to_affine(); let z = pk.vk.domain.lagrange_to_coeff(z); // Hash product commitment @@ -387,8 +405,9 @@ type ExpressionPair = (Polynomial, Polynomial, R: RngCore>( +fn permute_expression_pair, R: RngCore>( pk: &ProvingKey, params: &P, domain: &EvaluationDomain, diff --git a/halo2_proofs/src/plonk/lookup/verifier.rs b/halo2_backend/src/plonk/lookup/verifier.rs similarity index 68% rename from halo2_proofs/src/plonk/lookup/verifier.rs rename to halo2_backend/src/plonk/lookup/verifier.rs index bbc86c8e9d..aadf8cd0e6 100644 --- a/halo2_proofs/src/plonk/lookup/verifier.rs +++ b/halo2_backend/src/plonk/lookup/verifier.rs @@ -1,28 +1,28 @@ use std::iter; -use super::super::{ - circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, -}; use super::Argument; use crate::{ arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, - poly::{commitment::MSM, Rotation, VerifierQuery}, + plonk::circuit::{ExpressionBack, QueryBack, VarBack}, + plonk::{ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, Error, VerifyingKey}, + poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; -use ff::Field; +use halo2_middleware::circuit::Any; +use halo2_middleware::ff::Field; +use halo2_middleware::poly::Rotation; -pub struct PermutationCommitments { +pub(crate) struct PermutationCommitments { permuted_input_commitment: C, permuted_table_commitment: C, } -pub struct Committed { +pub(crate) struct Committed { permuted: PermutationCommitments, product_commitment: C, } -pub struct Evaluated { +pub(crate) struct Evaluated { committed: Committed, product_eval: C::Scalar, product_next_eval: C::Scalar, @@ -31,23 +31,20 @@ pub struct Evaluated { permuted_table_eval: C::Scalar, } -impl Argument { - pub(in crate::plonk) fn read_permuted_commitments< - C: CurveAffine, - E: EncodedChallenge, - T: TranscriptRead, - >( - &self, - transcript: &mut T, - ) -> Result, Error> { - let permuted_input_commitment = transcript.read_point()?; - let permuted_table_commitment = transcript.read_point()?; - - Ok(PermutationCommitments { - permuted_input_commitment, - permuted_table_commitment, - }) - } +pub(in crate::plonk) fn lookup_read_permuted_commitments< + C: CurveAffine, + E: EncodedChallenge, + T: TranscriptRead, +>( + transcript: &mut T, +) -> Result, Error> { + let permuted_input_commitment = transcript.read_point()?; + let permuted_table_commitment = transcript.read_point()?; + + Ok(PermutationCommitments { + permuted_input_commitment, + permuted_table_commitment, + }) } impl PermutationCommitments { @@ -111,43 +108,47 @@ impl Evaluated { // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma) // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma) let left = self.product_next_eval - * &(self.permuted_input_eval + &*beta) - * &(self.permuted_table_eval + &*gamma); + * (self.permuted_input_eval + *beta) + * (self.permuted_table_eval + *gamma); - let compress_expressions = |expressions: &[Expression]| { + let compress_expressions = |expressions: &[ExpressionBack]| { expressions .iter() .map(|expression| { expression.evaluate( &|scalar| scalar, - &|_| panic!("virtual selectors are removed during optimization"), - &|query| fixed_evals[query.index.unwrap()], - &|query| advice_evals[query.index.unwrap()], - &|query| instance_evals[query.index.unwrap()], - &|challenge| challenges[challenge.index()], + &|var| match var { + VarBack::Challenge(challenge) => challenges[challenge.index], + VarBack::Query(QueryBack { + index, column_type, .. + }) => match column_type { + Any::Fixed => fixed_evals[index], + Any::Advice => advice_evals[index], + Any::Instance => instance_evals[index], + }, + }, &|a| -a, - &|a, b| a + &b, - &|a, b| a * &b, - &|a, scalar| a * &scalar, + &|a, b| a + b, + &|a, b| a * b, ) }) - .fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval) + .fold(C::Scalar::ZERO, |acc, eval| acc * *theta + eval) }; let right = self.product_eval - * &(compress_expressions(&argument.input_expressions) + &*beta) - * &(compress_expressions(&argument.table_expressions) + &*gamma); + * (compress_expressions(&argument.input_expressions) + *beta) + * (compress_expressions(&argument.table_expressions) + *gamma); - (left - &right) * &active_rows + (left - right) * active_rows }; std::iter::empty() .chain( // l_0(X) * (1 - z(X)) = 0 - Some(l_0 * &(C::Scalar::ONE - &self.product_eval)), + Some(l_0 * (C::Scalar::ONE - self.product_eval)), ) .chain( // l_last(X) * (z(X)^2 - z(X)) = 0 - Some(l_last * &(self.product_eval.square() - &self.product_eval)), + Some(l_last * (self.product_eval.square() - self.product_eval)), ) .chain( // (1 - (l_last(X) + l_blind(X))) * ( @@ -158,13 +159,13 @@ impl Evaluated { ) .chain(Some( // l_0(X) * (a'(X) - s'(X)) = 0 - l_0 * &(self.permuted_input_eval - &self.permuted_table_eval), + l_0 * (self.permuted_input_eval - self.permuted_table_eval), )) .chain(Some( // (1 - (l_last(X) + l_blind(X))) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0 - (self.permuted_input_eval - &self.permuted_table_eval) - * &(self.permuted_input_eval - &self.permuted_input_inv_eval) - * &active_rows, + (self.permuted_input_eval - self.permuted_table_eval) + * (self.permuted_input_eval - self.permuted_input_inv_eval) + * active_rows, )) } diff --git a/halo2_proofs/src/plonk/permutation.rs b/halo2_backend/src/plonk/permutation.rs similarity index 56% rename from halo2_proofs/src/plonk/permutation.rs rename to halo2_backend/src/plonk/permutation.rs index 22c1fad6c3..26d65b9623 100644 --- a/halo2_proofs/src/plonk/permutation.rs +++ b/halo2_backend/src/plonk/permutation.rs @@ -1,86 +1,20 @@ -//! Implementation of permutation argument. +//! Verifying/Proving key of a permutation argument, with its serialization. -use super::circuit::{Any, Column}; +use crate::helpers::{SerdeCurveAffine, SerdeFormat, SerdePrimeField}; use crate::{ arithmetic::CurveAffine, - helpers::{ - polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, - SerdeCurveAffine, SerdePrimeField, - }, + helpers::{polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice}, poly::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}, - SerdeFormat, }; +// TODO: Remove the renaming +pub use halo2_middleware::permutation::ArgumentMid as Argument; + +use std::io; pub(crate) mod keygen; pub(crate) mod prover; pub(crate) mod verifier; -pub use keygen::Assembly; - -use std::io; - -/// A permutation argument. -#[derive(Debug, Clone)] -pub struct Argument { - /// A sequence of columns involved in the argument. - pub(super) columns: Vec>, -} - -impl Argument { - pub(crate) fn new() -> Self { - Argument { columns: vec![] } - } - - /// Returns the minimum circuit degree required by the permutation argument. - /// The argument may use larger degree gates depending on the actual - /// circuit's degree and how many columns are involved in the permutation. - pub(crate) fn required_degree(&self) -> usize { - // degree 2: - // l_0(X) * (1 - z(X)) = 0 - // - // We will fit as many polynomials p_i(X) as possible - // into the required degree of the circuit, so the - // following will not affect the required degree of - // this middleware. - // - // (1 - (l_last(X) + l_blind(X))) * ( - // z(\omega X) \prod (p(X) + \beta s_i(X) + \gamma) - // - z(X) \prod (p(X) + \delta^i \beta X + \gamma) - // ) - // - // On the first sets of columns, except the first - // set, we will do - // - // l_0(X) * (z(X) - z'(\omega^(last) X)) = 0 - // - // where z'(X) is the permutation for the previous set - // of columns. - // - // On the final set of columns, we will do - // - // degree 3: - // l_last(X) * (z'(X)^2 - z'(X)) = 0 - // - // which will allow the last value to be zero to - // ensure the argument is perfectly complete. - - // There are constraints of degree 3 regardless of the - // number of columns involved. - 3 - } - - pub(crate) fn add_column(&mut self, column: Column) { - if !self.columns.contains(&column) { - self.columns.push(column); - } - } - - /// Returns columns that participate on the permutation argument. - pub fn get_columns(&self) -> Vec> { - self.columns.clone() - } -} - /// The verifying key for a single permutation argument. #[derive(Clone, Debug)] pub struct VerifyingKey { @@ -88,11 +22,6 @@ pub struct VerifyingKey { } impl VerifyingKey { - /// Returns commitments of sigma polynomials - pub fn commitments(&self) -> &Vec { - &self.commitments - } - pub(crate) fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> where C: SerdeCurveAffine, @@ -123,6 +52,11 @@ impl VerifyingKey { { self.commitments.len() * C::byte_length(format) } + + /// Returns the commitments of the verifying key. + pub fn commitments(&self) -> &Vec { + &self.commitments + } } /// The proving key for a single permutation argument. diff --git a/halo2_backend/src/plonk/permutation/keygen.rs b/halo2_backend/src/plonk/permutation/keygen.rs new file mode 100644 index 0000000000..5a0b94837c --- /dev/null +++ b/halo2_backend/src/plonk/permutation/keygen.rs @@ -0,0 +1,288 @@ +use group::Curve; +use halo2_middleware::ff::{Field, PrimeField}; +use halo2_middleware::multicore::IndexedParallelIterator; +use halo2_middleware::multicore::ParallelIterator; +use halo2_middleware::zal::impls::H2cEngine; +use rayon::iter::IntoParallelRefMutIterator; + +use super::{Argument, ProvingKey, VerifyingKey}; +use crate::{ + arithmetic::{parallelize, CurveAffine}, + plonk::Error, + poly::{ + commitment::{Blind, Params}, + EvaluationDomain, + }, +}; +use halo2_middleware::circuit::ColumnMid; +use halo2_middleware::permutation::{ArgumentMid, AssemblyMid}; + +/// Struct that accumulates all the necessary data in order to construct the permutation argument. +#[derive(Clone, Debug, PartialEq, Eq)] +pub(crate) struct Assembly { + /// Columns that participate on the copy permutation argument. + columns: Vec, + /// Mapping of the actual copies done. + mapping: Vec>, + /// Some aux data used to swap positions directly when sorting. + aux: Vec>, + /// More aux data + sizes: Vec>, +} + +impl Assembly { + pub(crate) fn new_from_assembly_mid( + n: usize, + p: &ArgumentMid, + a: &AssemblyMid, + ) -> Result { + let mut assembly = Self::new(n, &p.clone()); + for copy in &a.copies { + assembly.copy(copy.0.column, copy.0.row, copy.1.column, copy.1.row)?; + } + Ok(assembly) + } + + pub(crate) fn new(n: usize, p: &Argument) -> Self { + // Initialize the copy vector to keep track of copy constraints in all + // the permutation arguments. + let mut columns = vec![]; + for i in 0..p.columns.len() { + // Computes [(i, 0), (i, 1), ..., (i, n - 1)] + columns.push((0..n).map(|j| (i, j)).collect()); + } + + // Before any equality constraints are applied, every cell in the permutation is + // in a 1-cycle; therefore mapping and aux are identical, because every cell is + // its own distinguished element. + Assembly { + columns: p.columns.clone(), + mapping: columns.clone(), + aux: columns, + sizes: vec![vec![1usize; n]; p.columns.len()], + } + } + + pub(crate) fn copy( + &mut self, + left_column: ColumnMid, + left_row: usize, + right_column: ColumnMid, + right_row: usize, + ) -> Result<(), Error> { + let left_column = self + .columns + .iter() + .position(|c| c == &left_column) + .ok_or(Error::ColumnNotInPermutation(left_column))?; + let right_column = self + .columns + .iter() + .position(|c| c == &right_column) + .ok_or(Error::ColumnNotInPermutation(right_column))?; + + // Check bounds + if left_row >= self.mapping[left_column].len() + || right_row >= self.mapping[right_column].len() + { + return Err(Error::BoundsFailure); + } + + // See book/src/design/permutation.md for a description of this algorithm. + + let mut left_cycle = self.aux[left_column][left_row]; + let mut right_cycle = self.aux[right_column][right_row]; + + // If left and right are in the same cycle, do nothing. + if left_cycle == right_cycle { + return Ok(()); + } + + if self.sizes[left_cycle.0][left_cycle.1] < self.sizes[right_cycle.0][right_cycle.1] { + std::mem::swap(&mut left_cycle, &mut right_cycle); + } + + // Merge the right cycle into the left one. + self.sizes[left_cycle.0][left_cycle.1] += self.sizes[right_cycle.0][right_cycle.1]; + let mut i = right_cycle; + loop { + self.aux[i.0][i.1] = left_cycle; + i = self.mapping[i.0][i.1]; + if i == right_cycle { + break; + } + } + + let tmp = self.mapping[left_column][left_row]; + self.mapping[left_column][left_row] = self.mapping[right_column][right_row]; + self.mapping[right_column][right_row] = tmp; + + Ok(()) + } + + pub(crate) fn build_vk>( + self, + params: &P, + domain: &EvaluationDomain, + p: &Argument, + ) -> VerifyingKey { + build_vk(params, domain, p, |i, j| self.mapping[i][j]) + } + + pub(crate) fn build_pk>( + self, + params: &P, + domain: &EvaluationDomain, + p: &Argument, + ) -> ProvingKey { + build_pk(params, domain, p, |i, j| self.mapping[i][j]) + } +} + +pub(crate) fn build_pk>( + params: &P, + domain: &EvaluationDomain, + p: &Argument, + mapping: impl Fn(usize, usize) -> (usize, usize) + Sync, +) -> ProvingKey { + // Compute [omega^0, omega^1, ..., omega^{params.n - 1}] + let mut omega_powers = vec![C::Scalar::ZERO; params.n() as usize]; + { + let omega = domain.get_omega(); + parallelize(&mut omega_powers, |o, start| { + let mut cur = omega.pow_vartime([start as u64]); + for v in o.iter_mut() { + *v = cur; + cur *= ω + } + }) + } + + // Compute [omega_powers * \delta^0, omega_powers * \delta^1, ..., omega_powers * \delta^m] + let mut deltaomega = vec![omega_powers; p.columns.len()]; + { + parallelize(&mut deltaomega, |o, start| { + let mut cur = C::Scalar::DELTA.pow_vartime([start as u64]); + for omega_powers in o.iter_mut() { + for v in omega_powers { + *v *= &cur; + } + cur *= &C::Scalar::DELTA; + } + }); + } + + // Compute permutation polynomials, convert to coset form. + let mut permutations = vec![domain.empty_lagrange(); p.columns.len()]; + { + parallelize(&mut permutations, |o, start| { + o.par_iter_mut() + .enumerate() + .for_each(|(x, permutation_poly)| { + let i = start + x; + permutation_poly + .par_iter_mut() + .enumerate() + .for_each(|(j, p)| { + let (permuted_i, permuted_j) = mapping(i, j); + *p = deltaomega[permuted_i][permuted_j]; + }) + }) + }); + } + + let mut polys = vec![domain.empty_coeff(); p.columns.len()]; + { + parallelize(&mut polys, |o, start| { + o.par_iter_mut().enumerate().for_each(|(x, poly)| { + let i = start + x; + let permutation_poly = permutations[i].clone(); + *poly = domain.lagrange_to_coeff(permutation_poly); + }) + }); + } + + let mut cosets = vec![domain.empty_extended(); p.columns.len()]; + { + parallelize(&mut cosets, |o, start| { + o.par_iter_mut().enumerate().for_each(|(x, coset)| { + let i = start + x; + let poly = polys[i].clone(); + *coset = domain.coeff_to_extended(poly); + }) + }); + } + + ProvingKey { + permutations, + polys, + cosets, + } +} + +pub(crate) fn build_vk>( + params: &P, + domain: &EvaluationDomain, + p: &Argument, + mapping: impl Fn(usize, usize) -> (usize, usize) + Sync, +) -> VerifyingKey { + // Compute [omega^0, omega^1, ..., omega^{params.n - 1}] + let mut omega_powers = vec![C::Scalar::ZERO; params.n() as usize]; + { + let omega = domain.get_omega(); + parallelize(&mut omega_powers, |o, start| { + let mut cur = omega.pow_vartime([start as u64]); + for v in o.iter_mut() { + *v = cur; + cur *= ω + } + }) + } + + // Compute [omega_powers * \delta^0, omega_powers * \delta^1, ..., omega_powers * \delta^m] + let mut deltaomega = vec![omega_powers; p.columns.len()]; + { + parallelize(&mut deltaomega, |o, start| { + let mut cur = C::Scalar::DELTA.pow_vartime([start as u64]); + for omega_powers in o.iter_mut() { + for v in omega_powers { + *v *= &cur; + } + cur *= &::DELTA; + } + }); + } + + // Computes the permutation polynomial based on the permutation + // description in the assembly. + let mut permutations = vec![domain.empty_lagrange(); p.columns.len()]; + { + parallelize(&mut permutations, |o, start| { + for (x, permutation_poly) in o.iter_mut().enumerate() { + let i = start + x; + for (j, p) in permutation_poly.iter_mut().enumerate() { + let (permuted_i, permuted_j) = mapping(i, j); + *p = deltaomega[permuted_i][permuted_j]; + } + } + }); + } + + // Pre-compute commitments for the URS. + let commitments = { + let mut commitments_projective = Vec::with_capacity(p.columns.len()); + for permutation in &permutations { + // Compute commitment to permutation polynomial + commitments_projective.push(params.commit_lagrange( + &H2cEngine::new(), + permutation, + Blind::default(), + )); + } + let mut commitments = vec![C::identity(); p.columns.len()]; + C::CurveExt::batch_normalize(&commitments_projective, &mut commitments); + commitments + }; + + VerifyingKey { commitments } +} diff --git a/halo2_backend/src/plonk/permutation/prover.rs b/halo2_backend/src/plonk/permutation/prover.rs new file mode 100644 index 0000000000..585fa3ab47 --- /dev/null +++ b/halo2_backend/src/plonk/permutation/prover.rs @@ -0,0 +1,303 @@ +use group::{ + ff::{BatchInvert, Field}, + Curve, +}; +use halo2_middleware::zal::traits::MsmAccel; +use halo2_middleware::{ff::PrimeField, zal::impls::PlonkEngine}; +use rand_core::RngCore; +use std::iter::{self, ExactSizeIterator}; + +use super::Argument; +use crate::{ + arithmetic::{eval_polynomial, parallelize, CurveAffine}, + plonk::{self, permutation::ProvingKey, ChallengeBeta, ChallengeGamma, ChallengeX, Error}, + poly::{ + commitment::{Blind, Params}, + Coeff, LagrangeCoeff, Polynomial, ProverQuery, + }, + transcript::{EncodedChallenge, TranscriptWrite}, +}; +use halo2_middleware::circuit::Any; +use halo2_middleware::poly::Rotation; + +// TODO: Document a bit these types +// https://github.com/privacy-scaling-explorations/halo2/issues/264 + +pub(crate) struct CommittedSet { + pub(crate) permutation_product_poly: Polynomial, + permutation_product_blind: Blind, +} + +pub(crate) struct Committed { + pub(crate) sets: Vec>, +} + +pub(crate) struct Evaluated { + constructed: Committed, +} + +#[allow(clippy::too_many_arguments)] +pub(in crate::plonk) fn permutation_commit< + C: CurveAffine, + P: Params, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + M: MsmAccel, +>( + engine: &PlonkEngine, + arg: &Argument, + params: &P, + pk: &plonk::ProvingKey, + pkey: &ProvingKey, + advice: &[Polynomial], + fixed: &[Polynomial], + instance: &[Polynomial], + beta: ChallengeBeta, + gamma: ChallengeGamma, + mut rng: R, + transcript: &mut T, +) -> Result, Error> { + let domain = &pk.vk.domain; + + // How many columns can be included in a single permutation polynomial? + // We need to multiply by z(X) and (1 - (l_last(X) + l_blind(X))). This + // will never underflow because of the requirement of at least a degree + // 3 circuit for the permutation argument. + assert!(pk.vk.cs_degree >= 3); + let chunk_len = pk.vk.cs_degree - 2; + let blinding_factors = pk.vk.cs.blinding_factors(); + + // Each column gets its own delta power. + let mut deltaomega = C::Scalar::ONE; + + // Track the "last" value from the previous column set + let mut last_z = C::Scalar::ONE; + + let mut sets = vec![]; + + for (columns, permutations) in arg + .columns + .chunks(chunk_len) + .zip(pkey.permutations.chunks(chunk_len)) + { + // Goal is to compute the products of fractions + // + // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / + // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) + // + // where p_j(X) is the jth column in this permutation, + // and i is the ith row of the column. + + let mut modified_values = vec![C::Scalar::ONE; params.n() as usize]; + + // Iterate over each column of the permutation + for (&column, permuted_column_values) in columns.iter().zip(permutations.iter()) { + let values = match column.column_type { + Any::Advice => advice, + Any::Fixed => fixed, + Any::Instance => instance, + }; + parallelize(&mut modified_values, |modified_values, start| { + for ((modified_values, value), permuted_value) in modified_values + .iter_mut() + .zip(values[column.index][start..].iter()) + .zip(permuted_column_values[start..].iter()) + { + *modified_values *= *beta * permuted_value + *gamma + value; + } + }); + } + + // Invert to obtain the denominator for the permutation product polynomial + modified_values.batch_invert(); + + // Iterate over each column again, this time finishing the computation + // of the entire fraction by computing the numerators + for &column in columns.iter() { + let omega = domain.get_omega(); + let values = match column.column_type { + Any::Advice => advice, + Any::Fixed => fixed, + Any::Instance => instance, + }; + parallelize(&mut modified_values, |modified_values, start| { + let mut deltaomega = deltaomega * omega.pow_vartime([start as u64, 0, 0, 0]); + for (modified_values, value) in modified_values + .iter_mut() + .zip(values[column.index][start..].iter()) + { + // Multiply by p_j(\omega^i) + \delta^j \omega^i \beta + *modified_values *= deltaomega * *beta + *gamma + value; + deltaomega *= ω + } + }); + deltaomega *= &::DELTA; + } + + // The modified_values vector is a vector of products of fractions + // of the form + // + // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / + // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) + // + // where i is the index into modified_values, for the jth column in + // the permutation + + // Compute the evaluations of the permutation product polynomial + // over our domain, starting with z[0] = 1 + let mut z = vec![last_z]; + for row in 1..(params.n() as usize) { + let mut tmp = z[row - 1]; + + tmp *= &modified_values[row - 1]; + z.push(tmp); + } + let mut z = domain.lagrange_from_vec(z); + // Set blinding factors + for z in &mut z[params.n() as usize - blinding_factors..] { + *z = C::Scalar::random(&mut rng); + } + // Set new last_z + last_z = z[params.n() as usize - (blinding_factors + 1)]; + + let blind = Blind(C::Scalar::random(&mut rng)); + + let permutation_product_commitment = params + .commit_lagrange(&engine.msm_backend, &z, blind) + .to_affine(); + let permutation_product_blind = blind; + let permutation_product_poly = domain.lagrange_to_coeff(z); + + // Hash the permutation product commitment + transcript.write_point(permutation_product_commitment)?; + + sets.push(CommittedSet { + permutation_product_poly, + permutation_product_blind, + }); + } + + Ok(Committed { sets }) +} + +impl super::ProvingKey { + pub(in crate::plonk) fn open( + &self, + x: ChallengeX, + ) -> impl Iterator> + Clone { + self.polys.iter().map(move |poly| ProverQuery { + point: *x, + poly, + blind: Blind::default(), + }) + } + + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( + &self, + x: ChallengeX, + transcript: &mut T, + ) -> Result<(), Error> { + // Hash permutation evals + for eval in self.polys.iter().map(|poly| eval_polynomial(poly, *x)) { + transcript.write_scalar(eval)?; + } + + Ok(()) + } +} + +impl Committed { + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( + self, + pk: &plonk::ProvingKey, + x: ChallengeX, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + let blinding_factors = pk.vk.cs.blinding_factors(); + + { + let mut sets = self.sets.iter(); + + while let Some(set) = sets.next() { + let permutation_product_eval = eval_polynomial(&set.permutation_product_poly, *x); + + let permutation_product_next_eval = eval_polynomial( + &set.permutation_product_poly, + domain.rotate_omega(*x, Rotation::next()), + ); + + // Hash permutation product evals + for eval in iter::empty() + .chain(Some(&permutation_product_eval)) + .chain(Some(&permutation_product_next_eval)) + { + transcript.write_scalar(*eval)?; + } + + // If we have any remaining sets to process, evaluate this set at omega^u + // so we can constrain the last value of its running product to equal the + // first value of the next set's running product, chaining them together. + if sets.len() > 0 { + let permutation_product_last_eval = eval_polynomial( + &set.permutation_product_poly, + domain.rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))), + ); + + transcript.write_scalar(permutation_product_last_eval)?; + } + } + } + + Ok(Evaluated { constructed: self }) + } +} + +impl Evaluated { + pub(in crate::plonk) fn open<'a>( + &'a self, + pk: &'a plonk::ProvingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let blinding_factors = pk.vk.cs.blinding_factors(); + let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); + let x_last = pk + .vk + .domain + .rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))); + + iter::empty() + .chain(self.constructed.sets.iter().flat_map(move |set| { + iter::empty() + // Open permutation product commitments at x and \omega x + .chain(Some(ProverQuery { + point: *x, + poly: &set.permutation_product_poly, + blind: set.permutation_product_blind, + })) + .chain(Some(ProverQuery { + point: x_next, + poly: &set.permutation_product_poly, + blind: set.permutation_product_blind, + })) + })) + // Open it at \omega^{last} x for all but the last set. This rotation is only + // sensical for the first row, but we only use this rotation in a constraint + // that is gated on l_0. + .chain( + self.constructed + .sets + .iter() + .rev() + .skip(1) + .flat_map(move |set| { + Some(ProverQuery { + point: x_last, + poly: &set.permutation_product_poly, + blind: set.permutation_product_blind, + }) + }), + ) + } +} diff --git a/halo2_proofs/src/plonk/permutation/verifier.rs b/halo2_backend/src/plonk/permutation/verifier.rs similarity index 81% rename from halo2_proofs/src/plonk/permutation/verifier.rs rename to halo2_backend/src/plonk/permutation/verifier.rs index a4637422ae..6fead2ffc7 100644 --- a/halo2_proofs/src/plonk/permutation/verifier.rs +++ b/halo2_backend/src/plonk/permutation/verifier.rs @@ -1,56 +1,55 @@ -use ff::{Field, PrimeField}; +use halo2_middleware::ff::{Field, PrimeField}; use std::iter; -use super::super::{circuit::Any, ChallengeBeta, ChallengeGamma, ChallengeX}; use super::{Argument, VerifyingKey}; use crate::{ arithmetic::CurveAffine, - plonk::{self, Error}, - poly::{commitment::MSM, Rotation, VerifierQuery}, + plonk::{self, ChallengeBeta, ChallengeGamma, ChallengeX, Error}, + poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; +use halo2_middleware::circuit::Any; +use halo2_middleware::poly::Rotation; -pub struct Committed { +pub(crate) struct Committed { permutation_product_commitments: Vec, } -pub struct EvaluatedSet { +pub(crate) struct EvaluatedSet { permutation_product_commitment: C, permutation_product_eval: C::Scalar, permutation_product_next_eval: C::Scalar, permutation_product_last_eval: Option, } -pub struct CommonEvaluated { +pub(crate) struct CommonEvaluated { permutation_evals: Vec, } -pub struct Evaluated { +pub(crate) struct Evaluated { sets: Vec>, } -impl Argument { - pub(crate) fn read_product_commitments< - C: CurveAffine, - E: EncodedChallenge, - T: TranscriptRead, - >( - &self, - vk: &plonk::VerifyingKey, - transcript: &mut T, - ) -> Result, Error> { - let chunk_len = vk.cs_degree - 2; - - let permutation_product_commitments = self - .columns - .chunks(chunk_len) - .map(|_| transcript.read_point()) - .collect::, _>>()?; - - Ok(Committed { - permutation_product_commitments, - }) - } +pub(crate) fn permutation_read_product_commitments< + C: CurveAffine, + E: EncodedChallenge, + T: TranscriptRead, +>( + arg: &Argument, + vk: &plonk::VerifyingKey, + transcript: &mut T, +) -> Result, Error> { + let chunk_len = vk.cs_degree - 2; + + let permutation_product_commitments = arg + .columns + .chunks(chunk_len) + .map(|_| transcript.read_point()) + .collect::, _>>()?; + + Ok(Committed { + permutation_product_commitments, + }) } impl VerifyingKey { @@ -122,13 +121,13 @@ impl Evaluated { .chain( self.sets .first() - .map(|first_set| l_0 * &(C::Scalar::ONE - &first_set.permutation_product_eval)), + .map(|first_set| l_0 * (C::Scalar::ONE - first_set.permutation_product_eval)), ) // Enforce only for the last set. // l_last(X) * (z_l(X)^2 - z_l(X)) = 0 .chain(self.sets.last().map(|last_set| { - (last_set.permutation_product_eval.square() - &last_set.permutation_product_eval) - * &l_last + (last_set.permutation_product_eval.square() - last_set.permutation_product_eval) + * l_last })) // Except for the first set, enforce. // l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0 @@ -143,7 +142,7 @@ impl Evaluated { last_set.permutation_product_last_eval.unwrap(), ) }) - .map(move |(set, prev_last)| (set - &prev_last) * &l_0), + .map(move |(set, prev_last)| (set - prev_last) * l_0), ) // And for all the sets we enforce: // (1 - (l_last(X) + l_blind(X))) * ( @@ -160,8 +159,8 @@ impl Evaluated { let mut left = set.permutation_product_next_eval; for (eval, permutation_eval) in columns .iter() - .map(|&column| match column.column_type() { - Any::Advice(_) => { + .map(|&column| match column.column_type { + Any::Advice => { advice_evals[vk.cs.get_any_query_index(column, Rotation::cur())] } Any::Fixed => { @@ -174,15 +173,15 @@ impl Evaluated { }) .zip(permutation_evals.iter()) { - left *= &(eval + &(*beta * permutation_eval) + &*gamma); + left *= eval + (*beta * permutation_eval) + *gamma; } let mut right = set.permutation_product_eval; - let mut current_delta = (*beta * &*x) - * &(::DELTA + let mut current_delta = (*beta * *x) + * (::DELTA .pow_vartime([(chunk_index * chunk_len) as u64])); - for eval in columns.iter().map(|&column| match column.column_type() { - Any::Advice(_) => { + for eval in columns.iter().map(|&column| match column.column_type { + Any::Advice => { advice_evals[vk.cs.get_any_query_index(column, Rotation::cur())] } Any::Fixed => { @@ -192,11 +191,11 @@ impl Evaluated { instance_evals[vk.cs.get_any_query_index(column, Rotation::cur())] } }) { - right *= &(eval + ¤t_delta + &*gamma); + right *= eval + current_delta + *gamma; current_delta *= &C::Scalar::DELTA; } - (left - &right) * (C::Scalar::ONE - &(l_last + &l_blind)) + (left - right) * (C::Scalar::ONE - (l_last + l_blind)) }), ) } diff --git a/halo2_backend/src/plonk/prover.rs b/halo2_backend/src/plonk/prover.rs new file mode 100644 index 0000000000..ffdb0413cd --- /dev/null +++ b/halo2_backend/src/plonk/prover.rs @@ -0,0 +1,910 @@ +//! Generate a proof + +use group::prime::PrimeCurveAffine; +use group::Curve; +use rand_core::RngCore; +use std::collections::{BTreeSet, HashSet}; +use std::{collections::HashMap, iter}; + +use crate::arithmetic::{eval_polynomial, CurveAffine}; +use crate::plonk::{ + lookup, lookup::prover::lookup_commit_permuted, permutation, + permutation::prover::permutation_commit, shuffle, shuffle::prover::shuffle_commit_product, + vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, + ProvingKey, +}; +use crate::poly::{ + commitment::{self, Blind, CommitmentScheme, Params}, + Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, +}; +use crate::transcript::{EncodedChallenge, TranscriptWrite}; +use halo2_middleware::ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; +use halo2_middleware::zal::{ + impls::{H2cEngine, PlonkEngine, PlonkEngineConfig}, + traits::MsmAccel, +}; + +/// Collection of instance data used during proving for a single circuit proof. +#[derive(Debug)] +struct InstanceSingle { + pub instance_values: Vec>, + pub instance_polys: Vec>, +} + +/// Collection of advice data used during proving for a single circuit proof. +#[derive(Debug, Clone)] +struct AdviceSingle { + pub advice_polys: Vec>, + pub advice_blinds: Vec>, +} + +/// The prover object used to create proofs interactively by passing the witnesses to commit at +/// each phase. This works for a single proof. This is a wrapper over Prover. +#[derive(Debug)] +pub struct ProverSingle< + 'a, + 'params, + Scheme: CommitmentScheme, + P: commitment::Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + M: MsmAccel, +>(Prover<'a, 'params, Scheme, P, E, R, T, M>); + +impl< + 'a, + 'params, + Scheme: CommitmentScheme, + P: commitment::Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + M: MsmAccel, + > ProverSingle<'a, 'params, Scheme, P, E, R, T, M> +{ + /// Create a new prover object + pub fn new_with_engine( + engine: PlonkEngine, + params: &'params Scheme::ParamsProver, + pk: &'a ProvingKey, + instance: Vec>, + rng: R, + transcript: &'a mut T, + ) -> Result + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + Ok(Self(Prover::new_with_engine( + engine, + params, + pk, + &[instance], + rng, + transcript, + )?)) + } + + pub fn new( + params: &'params Scheme::ParamsProver, + pk: &'a ProvingKey, + instance: Vec>, + rng: R, + transcript: &'a mut T, + ) -> Result, Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + let engine = PlonkEngineConfig::build_default(); + ProverSingle::new_with_engine(engine, params, pk, instance, rng, transcript) + } + + /// Commit the `witness` at `phase` and return the challenges after `phase`. + pub fn commit_phase( + &mut self, + phase: u8, + witness: Vec>>, + ) -> Result, Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + self.0.commit_phase(phase, vec![witness]) + } + + /// Finalizes the proof creation. + pub fn create_proof(self) -> Result<(), Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + self.0.create_proof() + } +} + +/// The prover object used to create proofs interactively by passing the witnesses to commit at +/// each phase. This supports batch proving. +#[derive(Debug)] +pub struct Prover< + 'a, + 'params, + Scheme: CommitmentScheme, + P: commitment::Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + M: MsmAccel, +> { + engine: PlonkEngine, + // Circuit and setup fields + params: &'params Scheme::ParamsProver, + // Plonk proving key + pk: &'a ProvingKey, + // Phases + phases: Vec, + // Polynomials (Lagrange and Coeff) for all circuits instances + instances: Vec>, + // Advice polynomials with its blindings + advices: Vec>, + // The phase challenges by challenge index + challenges: HashMap, + // The next phase to be committed + next_phase_index: usize, + // Transcript to be updated + transcript: &'a mut T, + // Randomness + rng: R, + _marker: std::marker::PhantomData<(P, E)>, +} + +impl< + 'a, + 'params, + Scheme: CommitmentScheme, + P: commitment::Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + M: MsmAccel, + > Prover<'a, 'params, Scheme, P, E, R, T, M> +{ + /// Create a new prover object + pub fn new_with_engine( + engine: PlonkEngine, + params: &'params Scheme::ParamsProver, + pk: &'a ProvingKey, + circuits_instances: &[Vec>], + rng: R, + transcript: &'a mut T, + ) -> Result + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + for instance in circuits_instances.iter() { + if instance.len() != pk.vk.cs.num_instance_columns { + return Err(Error::InvalidInstances); + } + } + + // Hash verification key into transcript [TRANSCRIPT-1] + pk.vk.hash_into(transcript)?; + + let meta = &pk.vk.cs; + let phases = meta.phases().collect(); + + let domain = &pk.vk.domain; + + // commit_instance_fn is a helper function to return the polynomials (and its commitments) of + // instance columns while updating the transcript. + let mut commit_instance_fn = + |instance: &[Vec]| -> Result, Error> { + // Create a lagrange polynomial for each instance column + + let instance_values = instance + .iter() + .map(|values| { + let mut poly = domain.empty_lagrange(); + assert_eq!(poly.len(), params.n() as usize); + if values.len() > (poly.len() - (meta.blinding_factors() + 1)) { + return Err(Error::InstanceTooLarge); + } + for (poly, value) in poly.iter_mut().zip(values.iter()) { + if !P::QUERY_INSTANCE { + // Add to the transcript the instance polynomials lagrange value. + transcript.common_scalar(*value)?; + } + *poly = *value; + } + Ok(poly) + }) + .collect::, _>>()?; + + if P::QUERY_INSTANCE { + // Add to the transcript the commitments of the instance lagrange polynomials + + let instance_commitments_projective: Vec<_> = instance_values + .iter() + .map(|poly| { + params.commit_lagrange(&engine.msm_backend, poly, Blind::default()) + }) + .collect(); + let mut instance_commitments = + vec![Scheme::Curve::identity(); instance_commitments_projective.len()]; + ::CurveExt::batch_normalize( + &instance_commitments_projective, + &mut instance_commitments, + ); + let instance_commitments = instance_commitments; + drop(instance_commitments_projective); + + for commitment in &instance_commitments { + transcript.common_point(*commitment)?; + } + } + + // Convert from evaluation to coefficient form. + + let instance_polys: Vec<_> = instance_values + .iter() + .map(|poly| { + let lagrange_vec = domain.lagrange_from_vec(poly.to_vec()); + domain.lagrange_to_coeff(lagrange_vec) + }) + .collect(); + + Ok(InstanceSingle { + instance_values, + instance_polys, + }) + }; + + // Commit the polynomials of all circuits instances + // [TRANSCRIPT-2] + + let instances: Vec> = circuits_instances + .iter() + .map(|instance| commit_instance_fn(instance)) + .collect::, _>>()?; + + // Create an structure to hold the advice polynomials and its blinds, it will be filled later in the + // [`commit_phase`]. + + let advices = vec![ + AdviceSingle:: { + // Create vectors with empty polynomials to free space while they are not being used + advice_polys: vec![ + Polynomial::new_empty(0, Scheme::Scalar::ZERO); + meta.num_advice_columns + ], + advice_blinds: vec![Blind::default(); meta.num_advice_columns], + }; + circuits_instances.len() + ]; + + // Challenges will be also filled later in the [`commit_phase`]. + + let challenges = HashMap::::with_capacity(meta.num_challenges); + + Ok(Prover { + engine, + params, + pk, + phases, + instances, + rng, + transcript, + advices, + challenges, + next_phase_index: 0, + _marker: std::marker::PhantomData {}, + }) + } + + /// Commit the `witness` at `phase` and return the challenges after `phase`. + #[allow(clippy::type_complexity)] + pub fn commit_phase( + &mut self, + phase: u8, + witness: Vec>>>, + ) -> Result, Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + // Check if the phase is valid. + + let current_phase = match self.phases.get(self.next_phase_index) { + Some(phase) => phase, + None => { + return Err(Error::Other("All phases already committed".to_string())); + } + }; + if phase != *current_phase { + return Err(Error::Other(format!( + "Committing invalid phase. Expected {current_phase}, got {phase}", + ))); + } + + let params = self.params; + let meta = &self.pk.vk.cs; + + let advices = &mut self.advices; + let challenges = &mut self.challenges; + + // Get the indices of the advice columns that are in the current phase. + + let column_indices = meta + .advice_column_phase + .iter() + .enumerate() + .filter_map(|(column_index, phase)| { + if current_phase == phase { + Some(column_index) + } else { + None + } + }) + .collect::>(); + + if witness.len() != advices.len() { + return Err(Error::Other("witness.len() != advice.len()".to_string())); + } + + // Check all witness are consistent with the current phase. + + for witness_circuit in &witness { + // Check contains all columns. + if witness_circuit.len() != meta.num_advice_columns { + return Err(Error::Other(format!( + "unexpected length in witness_circuitk. Got {}, expected {}", + witness_circuit.len(), + meta.num_advice_columns, + ))); + } + // Check that all current_phase advice columns are Some, and their length is correct + for (column_index, advice_column) in witness_circuit.iter().enumerate() { + if column_indices.contains(&column_index) { + match advice_column { + None => { + return Err(Error::Other(format!( + "expected advice column with index {column_index} at phase {current_phase}", + ))) + } + Some(advice_column) => { + if advice_column.len() != params.n() as usize { + return Err(Error::Other(format!( + "expected advice column with index {} to have length {}", + column_index, + params.n(), + ))); + } + } + } + } else if advice_column.is_some() { + return Err(Error::Other(format!( + "expected no advice column with index {column_index} at phase {current_phase}", + ))); + }; + } + } + + // commit_phase_fn fills advice columns (no defined as unblinded) with binding factors, + // adding to the transcript its blinded affine commitments. + // Also sets advice_polys with the (blinding) updated advice columns and advice_blinds with + // the blinding factor used for each advice column. + + let mut commit_phase_fn = |advice: &mut AdviceSingle, + witness: Vec< + Option>, + >| + -> Result<(), Error> { + let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); + let mut advice_values: Vec<_> = witness.into_iter().flatten().collect(); + let unblinded_advice: HashSet = + HashSet::from_iter(meta.unblinded_advice_columns.clone()); + + // Add blinding factors to advice columns. + for (column_index, advice_values) in column_indices.iter().zip(&mut advice_values) { + if !unblinded_advice.contains(column_index) { + for cell in &mut advice_values[unusable_rows_start..] { + *cell = Scheme::Scalar::random(&mut self.rng); + } + } else { + #[cfg(feature = "sanity-checks")] + for cell in &advice_values[unusable_rows_start..] { + assert_eq!(*cell, Scheme::Scalar::ZERO); + } + } + } + + // Compute commitments to advice column polynomials + let blinds: Vec<_> = column_indices + .iter() + .map(|i| { + if unblinded_advice.contains(i) { + Blind::default() + } else { + Blind(Scheme::Scalar::random(&mut self.rng)) + } + }) + .collect(); + let advice_commitments_projective: Vec<_> = advice_values + .iter() + .zip(blinds.iter()) + .map(|(poly, blind)| params.commit_lagrange(&self.engine.msm_backend, poly, *blind)) + .collect(); + let mut advice_commitments_affine = + vec![Scheme::Curve::identity(); advice_commitments_projective.len()]; + ::CurveExt::batch_normalize( + &advice_commitments_projective, + &mut advice_commitments_affine, + ); + let advice_commitments_affine = advice_commitments_affine; + drop(advice_commitments_projective); + + // Update transcript. + // [TRANSCRIPT-3] + for commitment in &advice_commitments_affine { + self.transcript.write_point(*commitment)?; + } + + // Set advice_polys & advice_blinds + for ((column_index, advice_values), blind) in + column_indices.iter().zip(advice_values).zip(blinds) + { + advice.advice_polys[*column_index] = advice_values; + advice.advice_blinds[*column_index] = blind; + } + Ok(()) + }; + + // Update blindings for each advice column + // [TRANSCRIPT-3] + + for (witness, advice) in witness.into_iter().zip(advices.iter_mut()) { + commit_phase_fn( + advice, + witness + .into_iter() + .map(|v| v.map(Polynomial::new_lagrange_from_vec)) + .collect(), + )?; + } + + // Squeeze the current transcript and get an new fresh challenge from the current phase. + // [TRANSCRIPT-4] + + for (index, phase) in meta.challenge_phase.iter().enumerate() { + if current_phase == phase { + let existing = + challenges.insert(index, *self.transcript.squeeze_challenge_scalar::<()>()); + assert!(existing.is_none()); + } + } + + // Move on + + self.next_phase_index += 1; + Ok(challenges.clone()) + } + + /// Finalizes the proof creation. + /// The following steps are performed: + /// - 1. Generate commited lookup polys + /// - 2. Generate commited permutation polys + /// - 3. Generate commited lookup polys + /// - 4. Generate commited shuffle polys + /// - 5. Commit to the vanishing argument's random polynomial + /// - 6. Generate the advice polys + /// - 7. Evaluate the h(X) polynomial + /// - 8. Construct the vanishing argument's h(X) commitments + /// - 9. Compute x + /// - 10. Compute and hash instance evals for the circuit instance + /// - 11. Compute and hash fixed evals + /// - 12. Evaluate permutation, lookups and shuffles at x + /// - 13. Generate all queries ([`ProverQuery`]) + /// - 14. Send the queries to the [`Prover`] + pub fn create_proof(mut self) -> Result<(), Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + let params = self.params; + let cs = &self.pk.vk.cs; + let pk = self.pk; + let domain = &self.pk.vk.domain; + + let mut rng = self.rng; + + let instances = std::mem::take(&mut self.instances); + let advices = std::mem::take(&mut self.advices); + let mut challenges = self.challenges; + + assert_eq!(challenges.len(), cs.num_challenges); + let challenges = (0..cs.num_challenges) + .map(|index| challenges.remove(&index).unwrap()) + .collect::>(); + + // 1. Generate commited ( added to transcript ) lookup polys --------------------------------------- + + // Sample theta challenge for keeping lookup columns linearly independent + // [TRANSCRIPT-5] + + let theta: ChallengeTheta<_> = self.transcript.squeeze_challenge_scalar(); + + // 2. Get permuted lookup polys + // [TRANSCRIPT-6] + + let mut lookups_fn = + |instance: &InstanceSingle, + advice: &AdviceSingle| + -> Result>, Error> { + cs.lookups + .iter() + .map(|lookup| { + lookup_commit_permuted( + &self.engine, + lookup, + pk, + params, + domain, + theta, + &advice.advice_polys, + &pk.fixed_values, + &instance.instance_values, + &challenges, + &mut rng, + self.transcript, + ) + }) + .collect::, _>>() + }; + let permuted_lookups: Vec>> = instances + .iter() + .zip(advices.iter()) + .map(|(instance, advice)| -> Result, Error> { + // Construct and commit to permuted values for each lookup + lookups_fn(instance, advice) + }) + .collect::, _>>()?; + + // Sample beta challenge + // [TRANSCRIPT-7] + let beta: ChallengeBeta<_> = self.transcript.squeeze_challenge_scalar(); + + // Sample gamma challenge + // [TRANSCRIPT-8] + let gamma: ChallengeGamma<_> = self.transcript.squeeze_challenge_scalar(); + + // 2. Generate commited permutation polys ----------------------------------------- + // [TRANSCRIPT-9] + let permutations_commited: Vec> = instances + .iter() + .zip(advices.iter()) + .map(|(instance, advice)| { + permutation_commit( + &self.engine, + &cs.permutation, + params, + pk, + &pk.permutation, + &advice.advice_polys, + &pk.fixed_values, + &instance.instance_values, + beta, + gamma, + &mut rng, + self.transcript, + ) + }) + .collect::, _>>()?; + + // 3. Generate commited lookup polys ---------------------------------------------------------- + + // [TRANSCRIPT-10] + let lookups_commited: Vec>> = permuted_lookups + .into_iter() + .map(|lookups| -> Result, _> { + // Construct and commit to products for each lookup + lookups + .into_iter() + .map(|lookup| { + lookup.commit_product( + &self.engine, + pk, + params, + beta, + gamma, + &mut rng, + self.transcript, + ) + }) + .collect::, _>>() + }) + .collect::, _>>()?; + + // 4. Generate commited shuffle polys ------------------------------------------------------- + + // [TRANSCRIPT-11] + let shuffles_commited: Vec>> = instances + .iter() + .zip(advices.iter()) + .map(|(instance, advice)| -> Result, _> { + // Compress expressions for each shuffle + cs.shuffles + .iter() + .map(|shuffle| { + shuffle_commit_product( + &self.engine, + shuffle, + pk, + params, + domain, + theta, + gamma, + &advice.advice_polys, + &pk.fixed_values, + &instance.instance_values, + &challenges, + &mut rng, + self.transcript, + ) + }) + .collect::, _>>() + }) + .collect::, _>>()?; + + // 5. Commit to the vanishing argument's random polynomial for blinding h(x_3) ------------------- + // [TRANSCRIPT-12] + let vanishing = vanishing::Argument::commit( + &self.engine.msm_backend, + params, + domain, + &mut rng, + self.transcript, + )?; + + // 6. Generate the advice polys ------------------------------------------------------------------ + + let advice: Vec> = advices + .into_iter() + .map( + |AdviceSingle { + advice_polys, + advice_blinds, + }| { + AdviceSingle { + advice_polys: advice_polys + .into_iter() + .map(|poly| domain.lagrange_to_coeff(poly)) + .collect::>(), + advice_blinds, + } + }, + ) + .collect(); + + // 7. Evaluate the h(X) polynomial ----------------------------------------------------------- + + // Obtain challenge for keeping all separate gates linearly independent + // [TRANSCRIPT-13] + let y: ChallengeY<_> = self.transcript.squeeze_challenge_scalar(); + + let h_poly = pk.ev.evaluate_h( + pk, + &advice + .iter() + .map(|a| a.advice_polys.as_slice()) + .collect::>(), + &instances + .iter() + .map(|i| i.instance_polys.as_slice()) + .collect::>(), + &challenges, + *y, + *beta, + *gamma, + *theta, + &lookups_commited, + &shuffles_commited, + &permutations_commited, + ); + + // 8. Construct the vanishing argument's h(X) commitments -------------------------------------- + // [TRANSCRIPT-14] + let vanishing = vanishing.construct( + &self.engine, + params, + domain, + h_poly, + &mut rng, + self.transcript, + )?; + + // 9. Compute x -------------------------------------------------------------------------------- + // [TRANSCRIPT-15] + let x: ChallengeX<_> = self.transcript.squeeze_challenge_scalar(); + + let x_pow_n = x.pow([params.n()]); + + // [TRANSCRIPT-16] + if P::QUERY_INSTANCE { + // Compute and hash instance evals for the circuit instance + for instance in instances.iter() { + // Evaluate polynomials at omega^i x + let instance_evals: Vec<_> = cs + .instance_queries + .iter() + .map(|&(column, at)| { + eval_polynomial( + &instance.instance_polys[column.index], + domain.rotate_omega(*x, at), + ) + }) + .collect(); + + // Hash each instance column evaluation + for eval in instance_evals.iter() { + self.transcript.write_scalar(*eval)?; + } + } + } + + // 10. Compute and hash advice evals for the circuit instance ------------------------------------ + // [TRANSCRIPT-17] + for advice in advice.iter() { + // Evaluate polynomials at omega^i x + let advice_evals: Vec<_> = cs + .advice_queries + .iter() + .map(|&(column, at)| { + eval_polynomial( + &advice.advice_polys[column.index], + domain.rotate_omega(*x, at), + ) + }) + .collect(); + + // Hash each advice column evaluation + for eval in advice_evals.iter() { + self.transcript.write_scalar(*eval)?; + } + } + + // 11. Compute and hash fixed evals ----------------------------------------------------------- + let fixed_evals: Vec<_> = cs + .fixed_queries + .iter() + .map(|&(column, at)| { + eval_polynomial(&pk.fixed_polys[column.index], domain.rotate_omega(*x, at)) + }) + .collect(); + + // Hash each fixed column evaluation + // [TRANSCRIPT-18] + for eval in fixed_evals.iter() { + self.transcript.write_scalar(*eval)?; + } + + // [TRANSCRIPT-19] + let vanishing = vanishing.evaluate(x, x_pow_n, domain, self.transcript)?; + + // 12. Evaluate permutation, lookups and shuffles at x ----------------------------------- + + // Evaluate common permutation data + // [TRANSCRIPT-20] + pk.permutation.evaluate(x, self.transcript)?; + + // Evaluate the permutations, if any, at omega^i x. + // [TRANSCRIPT-21] + let permutations_evaluated: Vec> = + permutations_commited + .into_iter() + .map(|permutation| -> Result<_, _> { permutation.evaluate(pk, x, self.transcript) }) + .collect::, _>>()?; + + // Evaluate the lookups, if any, at omega^i x. + // [TRANSCRIPT-22] + let lookups_evaluated: Vec>> = + lookups_commited + .into_iter() + .map(|lookups| -> Result, _> { + lookups + .into_iter() + .map(|p| p.evaluate(pk, x, self.transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + // Evaluate the shuffles, if any, at omega^i x. + // [TRANSCRIPT-23] + let shuffles_evaluated: Vec>> = + shuffles_commited + .into_iter() + .map(|shuffles| -> Result, _> { + shuffles + .into_iter() + .map(|p| p.evaluate(pk, x, self.transcript)) + .collect::, _>>() + }) + .collect::, _>>()?; + + // 13. Generate all queries ([`ProverQuery`]) that needs to be sent to prover -------------------- + + let queries = instances + // group the instance, advice, permutation, lookups and shuffles + .iter() + .zip(advice.iter()) + .zip(permutations_evaluated.iter()) + .zip(lookups_evaluated.iter()) + .zip(shuffles_evaluated.iter()) + .flat_map(|((((instance, advice), permutation), lookups), shuffles)| { + // Build a (an iterator) over a set of ProverQueries for each instance, advice, permutatiom, lookup and shuffle + iter::empty() + // Instances + .chain( + P::QUERY_INSTANCE + .then_some(cs.instance_queries.iter().map(move |&(column, at)| { + ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &instance.instance_polys[column.index], + blind: Blind::default(), + } + })) + .into_iter() + .flatten(), + ) + // Advices + .chain( + cs.advice_queries + .iter() + .map(move |&(column, at)| ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &advice.advice_polys[column.index], + blind: advice.advice_blinds[column.index], + }), + ) + // Permutations + .chain(permutation.open(pk, x)) + // Lookups + .chain(lookups.iter().flat_map(move |p| p.open(pk, x))) + // Shuffles + .chain(shuffles.iter().flat_map(move |p| p.open(pk, x))) + }) + // Queries to fixed columns + .chain(cs.fixed_queries.iter().map(|&(column, at)| ProverQuery { + point: domain.rotate_omega(*x, at), + poly: &pk.fixed_polys[column.index], + blind: Blind::default(), + })) + // Copy constraints + .chain(pk.permutation.open(x)) + // We query the h(X) polynomial at x + .chain(vanishing.open(x)); + + // 14. Send the queries to the [`Prover`] ------------------------------------------------ + + let prover = P::new(params); + prover + .create_proof_with_engine(&self.engine.msm_backend, rng, self.transcript, queries) + .map_err(|_| Error::ConstraintSystemFailure)?; + + Ok(()) + } + + /// Returns the phases of the circuit + pub fn phases(&self) -> &[u8] { + self.phases.as_slice() + } + + /// Create a new prover object + pub fn new( + params: &'params Scheme::ParamsProver, + pk: &'a ProvingKey, + circuits_instances: &[Vec>], + rng: R, + transcript: &'a mut T, + ) -> Result, Error> + where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + let engine = PlonkEngineConfig::build_default(); + Prover::new_with_engine(engine, params, pk, circuits_instances, rng, transcript) + } +} diff --git a/halo2_backend/src/plonk/shuffle.rs b/halo2_backend/src/plonk/shuffle.rs new file mode 100644 index 0000000000..16e593775f --- /dev/null +++ b/halo2_backend/src/plonk/shuffle.rs @@ -0,0 +1,4 @@ +pub(crate) mod prover; +pub(crate) mod verifier; + +use crate::plonk::circuit::ShuffleArgumentBack as Argument; diff --git a/halo2_backend/src/plonk/shuffle/prover.rs b/halo2_backend/src/plonk/shuffle/prover.rs new file mode 100644 index 0000000000..cc01a65255 --- /dev/null +++ b/halo2_backend/src/plonk/shuffle/prover.rs @@ -0,0 +1,254 @@ +use super::super::ProvingKey; +use super::Argument; +use crate::plonk::evaluation::evaluate; +use crate::{ + arithmetic::{eval_polynomial, parallelize, CurveAffine}, + plonk::circuit::ExpressionBack, + plonk::{ChallengeGamma, ChallengeTheta, ChallengeX, Error}, + poly::{ + commitment::{Blind, Params}, + Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, + }, + transcript::{EncodedChallenge, TranscriptWrite}, +}; +use group::{ff::BatchInvert, ff::WithSmallOrderMulGroup, Curve}; +use halo2_middleware::poly::Rotation; +use halo2_middleware::zal::{impls::PlonkEngine, traits::MsmAccel}; +use rand_core::RngCore; +use std::{ + iter, + ops::{Mul, MulAssign}, +}; + +#[derive(Debug)] +struct Compressed { + input_expression: Polynomial, + shuffle_expression: Polynomial, +} + +#[derive(Debug)] +pub(in crate::plonk) struct Committed { + pub(in crate::plonk) product_poly: Polynomial, + product_blind: Blind, +} + +pub(in crate::plonk) struct Evaluated { + constructed: Committed, +} + +/// Given a Shuffle with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions +/// [S_0, S_1, ..., S_{m-1}], this method +/// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} +/// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, +#[allow(clippy::too_many_arguments)] +fn shuffle_compress<'a, 'params: 'a, F: WithSmallOrderMulGroup<3>, C, P: Params>( + arg: &Argument, + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], +) -> Compressed +where + C: CurveAffine, + C::Curve: Mul + MulAssign, +{ + // Closure to get values of expressions and compress them + let compress_expressions = |expressions: &[ExpressionBack]| { + let compressed_expression = expressions + .iter() + .map(|expression| { + pk.vk.domain.lagrange_from_vec(evaluate( + expression, + params.n() as usize, + 1, + fixed_values, + advice_values, + instance_values, + challenges, + )) + }) + .fold(domain.empty_lagrange(), |acc, expression| { + acc * *theta + &expression + }); + compressed_expression + }; + + // Get values of input expressions involved in the shuffle and compress them + let input_expression = compress_expressions(&arg.input_expressions); + + // Get values of table expressions involved in the shuffle and compress them + let shuffle_expression = compress_expressions(&arg.shuffle_expressions); + + Compressed { + input_expression, + shuffle_expression, + } +} + +/// Given a Shuffle with input expressions and table expressions this method +/// constructs the grand product polynomial over the shuffle. +/// The grand product polynomial is used to populate the [`Committed`] struct. +/// The [`Committed`] struct is added to the Shuffle and finally returned by the method. +#[allow(clippy::too_many_arguments)] +pub(in crate::plonk) fn shuffle_commit_product< + 'a, + F: WithSmallOrderMulGroup<3>, + C, + P: Params, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + M: MsmAccel, +>( + engine: &PlonkEngine, + arg: &Argument, + pk: &ProvingKey, + params: &P, + domain: &EvaluationDomain, + theta: ChallengeTheta, + gamma: ChallengeGamma, + advice_values: &'a [Polynomial], + fixed_values: &'a [Polynomial], + instance_values: &'a [Polynomial], + challenges: &'a [C::Scalar], + mut rng: R, + transcript: &mut T, +) -> Result, Error> +where + C: CurveAffine, + C::Curve: Mul + MulAssign, +{ + let compressed = shuffle_compress( + arg, + pk, + params, + domain, + theta, + advice_values, + fixed_values, + instance_values, + challenges, + ); + + let blinding_factors = pk.vk.cs.blinding_factors(); + + let mut shuffle_product = vec![C::Scalar::ZERO; params.n() as usize]; + parallelize(&mut shuffle_product, |shuffle_product, start| { + for (shuffle_product, shuffle_value) in shuffle_product + .iter_mut() + .zip(compressed.shuffle_expression[start..].iter()) + { + *shuffle_product = *gamma + shuffle_value; + } + }); + + shuffle_product.iter_mut().batch_invert(); + + parallelize(&mut shuffle_product, |product, start| { + for (i, product) in product.iter_mut().enumerate() { + let i = i + start; + *product *= &(*gamma + compressed.input_expression[i]); + } + }); + + // Compute the evaluations of the shuffle product polynomial + // over our domain, starting with z[0] = 1 + let z = iter::once(C::Scalar::ONE) + .chain(shuffle_product) + .scan(C::Scalar::ONE, |state, cur| { + *state *= &cur; + Some(*state) + }) + // Take all rows including the "last" row which should + // be a boolean (and ideally 1, else soundness is broken) + .take(params.n() as usize - blinding_factors) + // Chain random blinding factors. + .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) + .collect::>(); + assert_eq!(z.len(), params.n() as usize); + let z = pk.vk.domain.lagrange_from_vec(z); + + #[cfg(feature = "sanity-checks")] + { + // While in Lagrange basis, check that product is correctly constructed + let u = (params.n() as usize) - (blinding_factors + 1); + assert_eq!(z[0], C::Scalar::ONE); + for i in 0..u { + let mut left = z[i + 1]; + let input_value = &compressed.input_expression[i]; + let shuffle_value = &compressed.shuffle_expression[i]; + left *= &(*gamma + shuffle_value); + let mut right = z[i]; + right *= &(*gamma + input_value); + assert_eq!(left, right); + } + assert_eq!(z[u], C::Scalar::ONE); + } + + let product_blind = Blind(C::Scalar::random(rng)); + let product_commitment = params + .commit_lagrange(&engine.msm_backend, &z, product_blind) + .to_affine(); + let z = pk.vk.domain.lagrange_to_coeff(z); + + // Hash product commitment + transcript.write_point(product_commitment)?; + + Ok(Committed:: { + product_poly: z, + product_blind, + }) +} + +impl Committed { + pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( + self, + pk: &ProvingKey, + x: ChallengeX, + transcript: &mut T, + ) -> Result, Error> { + let domain = &pk.vk.domain; + let x_next = domain.rotate_omega(*x, Rotation::next()); + + let product_eval = eval_polynomial(&self.product_poly, *x); + let product_next_eval = eval_polynomial(&self.product_poly, x_next); + + // Hash each advice evaluation + for eval in iter::empty() + .chain(Some(product_eval)) + .chain(Some(product_next_eval)) + { + transcript.write_scalar(eval)?; + } + + Ok(Evaluated { constructed: self }) + } +} + +impl Evaluated { + pub(in crate::plonk) fn open<'a>( + &'a self, + pk: &'a ProvingKey, + x: ChallengeX, + ) -> impl Iterator> + Clone { + let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); + + iter::empty() + // Open shuffle product commitments at x + .chain(Some(ProverQuery { + point: *x, + poly: &self.constructed.product_poly, + blind: self.constructed.product_blind, + })) + // Open shuffle product commitments at x_next + .chain(Some(ProverQuery { + point: x_next, + poly: &self.constructed.product_poly, + blind: self.constructed.product_blind, + })) + } +} diff --git a/halo2_proofs/src/plonk/shuffle/verifier.rs b/halo2_backend/src/plonk/shuffle/verifier.rs similarity index 62% rename from halo2_proofs/src/plonk/shuffle/verifier.rs rename to halo2_backend/src/plonk/shuffle/verifier.rs index 379cc5c8a1..1462e47343 100644 --- a/halo2_proofs/src/plonk/shuffle/verifier.rs +++ b/halo2_backend/src/plonk/shuffle/verifier.rs @@ -1,38 +1,38 @@ use std::iter; -use super::super::{circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX}; use super::Argument; use crate::{ arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, - poly::{commitment::MSM, Rotation, VerifierQuery}, + plonk::circuit::{ExpressionBack, QueryBack, VarBack}, + plonk::{ChallengeGamma, ChallengeTheta, ChallengeX, Error, VerifyingKey}, + poly::{commitment::MSM, VerifierQuery}, transcript::{EncodedChallenge, TranscriptRead}, }; -use ff::Field; +use halo2_middleware::circuit::Any; +use halo2_middleware::ff::Field; +use halo2_middleware::poly::Rotation; -pub struct Committed { +pub(crate) struct Committed { product_commitment: C, } -pub struct Evaluated { +pub(crate) struct Evaluated { committed: Committed, product_eval: C::Scalar, product_next_eval: C::Scalar, } -impl Argument { - pub(in crate::plonk) fn read_product_commitment< - C: CurveAffine, - E: EncodedChallenge, - T: TranscriptRead, - >( - &self, - transcript: &mut T, - ) -> Result, Error> { - let product_commitment = transcript.read_point()?; +pub(in crate::plonk) fn shuffle_read_product_commitment< + F: Field, + C: CurveAffine, + E: EncodedChallenge, + T: TranscriptRead, +>( + transcript: &mut T, +) -> Result, Error> { + let product_commitment = transcript.read_point()?; - Ok(Committed { product_commitment }) - } + Ok(Committed { product_commitment }) } impl Committed { @@ -70,43 +70,47 @@ impl Evaluated { let product_expression = || { // z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma) - let compress_expressions = |expressions: &[Expression]| { + let compress_expressions = |expressions: &[ExpressionBack]| { expressions .iter() .map(|expression| { expression.evaluate( &|scalar| scalar, - &|_| panic!("virtual selectors are removed during optimization"), - &|query| fixed_evals[query.index.unwrap()], - &|query| advice_evals[query.index.unwrap()], - &|query| instance_evals[query.index.unwrap()], - &|challenge| challenges[challenge.index()], + &|var| match var { + VarBack::Challenge(challenge) => challenges[challenge.index], + VarBack::Query(QueryBack { + index, column_type, .. + }) => match column_type { + Any::Fixed => fixed_evals[index], + Any::Advice => advice_evals[index], + Any::Instance => instance_evals[index], + }, + }, &|a| -a, - &|a, b| a + &b, - &|a, b| a * &b, - &|a, scalar| a * &scalar, + &|a, b| a + b, + &|a, b| a * b, ) }) - .fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval) + .fold(C::Scalar::ZERO, |acc, eval| acc * *theta + eval) }; // z(\omega X) (s(X) + \gamma) let left = self.product_next_eval - * &(compress_expressions(&argument.shuffle_expressions) + &*gamma); + * (compress_expressions(&argument.shuffle_expressions) + *gamma); // z(X) (a(X) + \gamma) let right = - self.product_eval * &(compress_expressions(&argument.input_expressions) + &*gamma); + self.product_eval * (compress_expressions(&argument.input_expressions) + *gamma); - (left - &right) * &active_rows + (left - right) * active_rows }; std::iter::empty() .chain( // l_0(X) * (1 - z'(X)) = 0 - Some(l_0 * &(C::Scalar::ONE - &self.product_eval)), + Some(l_0 * (C::Scalar::ONE - self.product_eval)), ) .chain( // l_last(X) * (z(X)^2 - z(X)) = 0 - Some(l_last * &(self.product_eval.square() - &self.product_eval)), + Some(l_last * (self.product_eval.square() - self.product_eval)), ) .chain( // (1 - (l_last(X) + l_blind(X))) * ( z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) diff --git a/halo2_proofs/src/plonk/vanishing.rs b/halo2_backend/src/plonk/vanishing.rs similarity index 100% rename from halo2_proofs/src/plonk/vanishing.rs rename to halo2_backend/src/plonk/vanishing.rs diff --git a/halo2_proofs/src/plonk/vanishing/prover.rs b/halo2_backend/src/plonk/vanishing/prover.rs similarity index 84% rename from halo2_proofs/src/plonk/vanishing/prover.rs rename to halo2_backend/src/plonk/vanishing/prover.rs index 7943086826..96ce797ee4 100644 --- a/halo2_proofs/src/plonk/vanishing/prover.rs +++ b/halo2_backend/src/plonk/vanishing/prover.rs @@ -1,7 +1,9 @@ use std::{collections::HashMap, iter}; -use ff::Field; +use crate::plonk::Error; use group::Curve; +use halo2_middleware::ff::Field; +use halo2_middleware::zal::{impls::PlonkEngine, traits::MsmAccel}; use rand_chacha::ChaCha20Rng; use rand_core::{RngCore, SeedableRng}; @@ -9,7 +11,7 @@ use super::Argument; use crate::{ arithmetic::{eval_polynomial, parallelize, CurveAffine}, multicore::current_num_threads, - plonk::{ChallengeX, Error}, + plonk::ChallengeX, poly::{ commitment::{Blind, ParamsProver}, Coeff, EvaluationDomain, ExtendedLagrangeCoeff, Polynomial, ProverQuery, @@ -36,12 +38,12 @@ pub(in crate::plonk) struct Evaluated { impl Argument { pub(in crate::plonk) fn commit< - 'params, - P: ParamsProver<'params, C>, + P: ParamsProver, E: EncodedChallenge, R: RngCore, T: TranscriptWrite, >( + engine: &impl MsmAccel, params: &P, domain: &EvaluationDomain, mut rng: R, @@ -83,7 +85,9 @@ impl Argument { let random_blind = Blind(C::Scalar::random(rng)); // Commit - let c = params.commit(&random_poly, random_blind).to_affine(); + let c = params + .commit(engine, &random_poly, random_blind) + .to_affine(); transcript.write_point(c)?; Ok(Committed { @@ -95,13 +99,14 @@ impl Argument { impl Committed { pub(in crate::plonk) fn construct< - 'params, - P: ParamsProver<'params, C>, + P: ParamsProver, E: EncodedChallenge, R: RngCore, T: TranscriptWrite, + M: MsmAccel, >( self, + engine: &PlonkEngine, params: &P, domain: &EvaluationDomain, h_poly: Polynomial, @@ -126,18 +131,20 @@ impl Committed { .collect(); // Compute commitments to each h(X) piece - let h_commitments_projective: Vec<_> = h_pieces - .iter() - .zip(h_blinds.iter()) - .map(|(h_piece, blind)| params.commit(h_piece, *blind)) - .collect(); - let mut h_commitments = vec![C::identity(); h_commitments_projective.len()]; - C::Curve::batch_normalize(&h_commitments_projective, &mut h_commitments); - let h_commitments = h_commitments; + let h_commitments = { + let h_commitments_projective: Vec<_> = h_pieces + .iter() + .zip(h_blinds.iter()) + .map(|(h_piece, blind)| params.commit(&engine.msm_backend, h_piece, *blind)) + .collect(); + let mut h_commitments = vec![C::identity(); h_commitments_projective.len()]; + C::Curve::batch_normalize(&h_commitments_projective, &mut h_commitments); + h_commitments + }; // Hash each h(X) piece - for c in h_commitments.iter() { - transcript.write_point(*c)?; + for c in h_commitments { + transcript.write_point(c)?; } Ok(Constructed { diff --git a/halo2_proofs/src/plonk/vanishing/verifier.rs b/halo2_backend/src/plonk/vanishing/verifier.rs similarity index 87% rename from halo2_proofs/src/plonk/vanishing/verifier.rs rename to halo2_backend/src/plonk/vanishing/verifier.rs index 0881dfb2c0..2b54035450 100644 --- a/halo2_proofs/src/plonk/vanishing/verifier.rs +++ b/halo2_backend/src/plonk/vanishing/verifier.rs @@ -1,36 +1,35 @@ use std::iter; -use ff::Field; +use halo2_middleware::ff::Field; use crate::{ arithmetic::CurveAffine, - plonk::{Error, VerifyingKey}, + plonk::{ChallengeX, ChallengeY, Error, VerifyingKey}, poly::{ - commitment::{Params, MSM}, + commitment::{ParamsVerifier, MSM}, VerifierQuery, }, transcript::{read_n_points, EncodedChallenge, TranscriptRead}, }; -use super::super::{ChallengeX, ChallengeY}; use super::Argument; -pub struct Committed { +pub(in crate::plonk) struct Committed { random_poly_commitment: C, } -pub struct Constructed { +pub(in crate::plonk) struct Constructed { h_commitments: Vec, random_poly_commitment: C, } -pub struct PartiallyEvaluated { +pub(in crate::plonk) struct PartiallyEvaluated { h_commitments: Vec, random_poly_commitment: C, random_eval: C::Scalar, } -pub struct Evaluated> { +pub(in crate::plonk) struct Evaluated> { h_commitment: M, random_poly_commitment: C, expected_h_eval: C::Scalar, @@ -87,14 +86,14 @@ impl Constructed { } impl PartiallyEvaluated { - pub(in crate::plonk) fn verify<'params, P: Params<'params, C>>( + pub(in crate::plonk) fn verify<'params, P: ParamsVerifier<'params, C>>( self, params: &'params P, expressions: impl Iterator, y: ChallengeY, xn: C::Scalar, ) -> Evaluated { - let expected_h_eval = expressions.fold(C::Scalar::ZERO, |h_eval, v| h_eval * &*y + &v); + let expected_h_eval = expressions.fold(C::Scalar::ZERO, |h_eval, v| h_eval * *y + v); let expected_h_eval = expected_h_eval * ((xn - C::Scalar::ONE).invert().unwrap()); let h_commitment = diff --git a/halo2_proofs/src/plonk/verifier.rs b/halo2_backend/src/plonk/verifier.rs similarity index 69% rename from halo2_proofs/src/plonk/verifier.rs rename to halo2_backend/src/plonk/verifier.rs index 76675bcdfa..c300e5d2e2 100644 --- a/halo2_proofs/src/plonk/verifier.rs +++ b/halo2_backend/src/plonk/verifier.rs @@ -1,17 +1,25 @@ -use ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; +//! Verify a plonk proof + +use group::prime::PrimeCurveAffine; use group::Curve; +use halo2_middleware::circuit::Any; +use halo2_middleware::ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; +use halo2_middleware::zal::impls::H2cEngine; +use halo2curves::CurveAffine; use std::iter; -use super::{ - vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, ChallengeY, Error, - VerifyingKey, -}; +use super::{vanishing, VerifyingKey}; use crate::arithmetic::compute_inner_product; -use crate::poly::commitment::{CommitmentScheme, Verifier}; -use crate::poly::VerificationStrategy; +use crate::plonk::{ + circuit::VarBack, lookup::verifier::lookup_read_permuted_commitments, + permutation::verifier::permutation_read_product_commitments, + shuffle::verifier::shuffle_read_product_commitment, ChallengeBeta, ChallengeGamma, + ChallengeTheta, ChallengeX, ChallengeY, Error, +}; +use crate::poly::commitment::ParamsVerifier; use crate::poly::{ - commitment::{Blind, Params}, - VerifierQuery, + commitment::{Blind, CommitmentScheme, Params, Verifier}, + VerificationStrategy, VerifierQuery, }; use crate::transcript::{read_n_scalars, EncodedChallenge, TranscriptRead}; @@ -20,6 +28,26 @@ mod batch; #[cfg(feature = "batch")] pub use batch::BatchVerifier; +/// Returns a boolean indicating whether or not the proof is valid. Verifies a single proof (not +/// batched). +pub fn verify_proof_single<'params, Scheme, V, E, T, Strategy>( + params: &'params Scheme::ParamsVerifier, + vk: &VerifyingKey, + strategy: Strategy, + instance: Vec>, + transcript: &mut T, +) -> Result +where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + Scheme: CommitmentScheme, + V: Verifier<'params, Scheme>, + E: EncodedChallenge, + T: TranscriptRead, + Strategy: VerificationStrategy<'params, Scheme, V>, +{ + verify_proof(params, vk, strategy, &[instance], transcript) +} + /// Returns a boolean indicating whether or not the proof is valid pub fn verify_proof< 'params, @@ -32,12 +60,15 @@ pub fn verify_proof< params: &'params Scheme::ParamsVerifier, vk: &VerifyingKey, strategy: Strategy, - instances: &[&[&[Scheme::Scalar]]], + instances: &[Vec>], transcript: &mut T, ) -> Result where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, { + // ZAL: Verification is (supposedly) cheap, hence we don't use an accelerator engine + let default_engine = H2cEngine::new(); + // Check that instances matches the expected number of instance columns for instances in instances.iter() { if instances.len() != vk.cs.num_instance_columns { @@ -45,8 +76,19 @@ where } } + // Check that the Scheme parameters support commitment to instance + // if it is required by the verifier. + assert!( + !V::QUERY_INSTANCE + || >::COMMIT_INSTANCE + ); + + // 1. Get the commitments of the instance polynomials. ---------------------------------------- + let instance_commitments = if V::QUERY_INSTANCE { - instances + let mut instance_commitments = Vec::with_capacity(instances.len()); + + let instances_projective = instances .iter() .map(|instance| { instance @@ -59,20 +101,36 @@ where poly.resize(params.n() as usize, Scheme::Scalar::ZERO); let poly = vk.domain.lagrange_from_vec(poly); - Ok(params.commit_lagrange(&poly, Blind::default()).to_affine()) + Ok(params.commit_lagrange(&default_engine, &poly, Blind::default())) }) .collect::, _>>() }) - .collect::, _>>()? + .collect::, _>>()?; + + for instance_projective in instances_projective { + let mut affines = + vec![::Curve::identity(); instance_projective.len()]; + <::Curve as CurveAffine>::CurveExt::batch_normalize( + &instance_projective, + &mut affines, + ); + instance_commitments.push(affines); + } + instance_commitments } else { vec![vec![]; instances.len()] }; let num_proofs = instance_commitments.len(); - // Hash verification key into transcript + // 2. Add hash of verification key and instances into transcript. ----------------------------- + // [TRANSCRIPT-1] + vk.hash_into(transcript)?; + // 3. Add instance commitments into the transcript. -------------------------------------------- + // [TRANSCRIPT-2] + if V::QUERY_INSTANCE { for instance_commitments in instance_commitments.iter() { // Hash the instance (external) commitments into the transcript @@ -90,13 +148,15 @@ where } } - // Hash the prover's advice commitments into the transcript and squeeze challenges + // 3. Hash the prover's advice commitments into the transcript and squeeze challenges --------- + let (advice_commitments, challenges) = { let mut advice_commitments = vec![vec![Scheme::Curve::default(); vk.cs.num_advice_columns]; num_proofs]; let mut challenges = vec![Scheme::Scalar::ZERO; vk.cs.num_challenges]; for current_phase in vk.cs.phases() { + // [TRANSCRIPT-3] for advice_commitments in advice_commitments.iter_mut() { for (phase, commitment) in vk .cs @@ -109,6 +169,8 @@ where } } } + + // [TRANSCRIPT-4] for (phase, challenge) in vk.cs.challenge_phase.iter().zip(challenges.iter_mut()) { if current_phase == *phase { *challenge = *transcript.squeeze_challenge_scalar::<()>(); @@ -119,33 +181,46 @@ where (advice_commitments, challenges) }; - // Sample theta challenge for keeping lookup columns linearly independent + // 4. Sample theta challenge for keeping lookup columns linearly independent ------------------ + // [TRANSCRIPT-5] + let theta: ChallengeTheta<_> = transcript.squeeze_challenge_scalar(); + // 5. Read lookup permuted commitments + // [TRANSCRIPT-6] + let lookups_permuted = (0..num_proofs) .map(|_| -> Result, _> { // Hash each lookup permuted commitment vk.cs .lookups .iter() - .map(|argument| argument.read_permuted_commitments(transcript)) + .map(|_argument| lookup_read_permuted_commitments(transcript)) .collect::, _>>() }) .collect::, _>>()?; + // 6. Sample beta and gamma challenges -------------------------------------------------------- + // Sample beta challenge + // [TRANSCRIPT-7] let beta: ChallengeBeta<_> = transcript.squeeze_challenge_scalar(); // Sample gamma challenge + // [TRANSCRIPT-8] let gamma: ChallengeGamma<_> = transcript.squeeze_challenge_scalar(); + // 7. Read commitments for permutation, lookups, and shuffles --------------------------------- + + // [TRANSCRIPT-9] let permutations_committed = (0..num_proofs) .map(|_| { // Hash each permutation product commitment - vk.cs.permutation.read_product_commitments(vk, transcript) + permutation_read_product_commitments(&vk.cs.permutation, vk, transcript) }) .collect::, _>>()?; + // [TRANSCRIPT-10] let lookups_committed = lookups_permuted .into_iter() .map(|lookups| { @@ -157,28 +232,38 @@ where }) .collect::, _>>()?; + // [TRANSCRIPT-11] let shuffles_committed = (0..num_proofs) .map(|_| -> Result, _> { // Hash each shuffle product commitment vk.cs .shuffles .iter() - .map(|argument| argument.read_product_commitment(transcript)) + .map(|_argument| shuffle_read_product_commitment(transcript)) .collect::, _>>() }) .collect::, _>>()?; + // 8. Read vanishing argument (before y) ------------------------------------------------------ + // [TRANSCRIPT-12] let vanishing = vanishing::Argument::read_commitments_before_y(transcript)?; - // Sample y challenge, which keeps the gates linearly independent. + // 9. Sample y challenge, which keeps the gates linearly independent. ------------------------- + // [TRANSCRIPT-13] let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); + // 10. Read vanishing argument (after y) ------------------------------------------------------ + // [TRANSCRIPT-14] let vanishing = vanishing.read_commitments_after_y(vk, transcript)?; - // Sample x challenge, which is used to ensure the circuit is - // satisfied with high probability. + // 11. Sample x challenge, which is used to ensure the circuit is + // satisfied with high probability. ----------------------------------------------------------- + // [TRANSCRIPT-15] let x: ChallengeX<_> = transcript.squeeze_challenge_scalar(); + + // 12. Get the instance evaluations let instance_evals = if V::QUERY_INSTANCE { + // [TRANSCRIPT-16] (0..num_proofs) .map(|_| -> Result, _> { read_n_scalars(transcript, vk.cs.instance_queries.len()) @@ -216,30 +301,39 @@ where .instance_queries .iter() .map(|(column, rotation)| { - let instances = instances[column.index()]; + let instances = &instances[column.index]; let offset = (max_rotation - rotation.0) as usize; - compute_inner_product(instances, &l_i_s[offset..offset + instances.len()]) + compute_inner_product( + instances.as_slice(), + &l_i_s[offset..offset + instances.len()], + ) }) .collect::>() }) .collect::>() }; + // [TRANSCRIPT-17] let advice_evals = (0..num_proofs) .map(|_| -> Result, _> { read_n_scalars(transcript, vk.cs.advice_queries.len()) }) .collect::, _>>()?; + // [TRANSCRIPT-18] let fixed_evals = read_n_scalars(transcript, vk.cs.fixed_queries.len())?; + // [TRANSCRIPT-19] let vanishing = vanishing.evaluate_after_x(transcript)?; + // [TRANSCRIPT-20] let permutations_common = vk.permutation.evaluate(transcript)?; + // [TRANSCRIPT-21] let permutations_evaluated = permutations_committed .into_iter() .map(|permutation| permutation.evaluate(transcript)) .collect::, _>>()?; + // [TRANSCRIPT-22] let lookups_evaluated = lookups_committed .into_iter() .map(|lookups| -> Result, _> { @@ -250,6 +344,7 @@ where }) .collect::, _>>()?; + // [TRANSCRIPT-23] let shuffles_evaluated = shuffles_committed .into_iter() .map(|shuffles| -> Result, _> { @@ -290,23 +385,21 @@ where let fixed_evals = &fixed_evals; std::iter::empty() // Evaluate the circuit using the custom gates provided - .chain(vk.cs.gates.iter().flat_map(move |gate| { - gate.polynomials().iter().map(move |poly| { - poly.evaluate( - &|scalar| scalar, - &|_| { - panic!("virtual selectors are removed during optimization") + .chain(vk.cs.gates.iter().map(move |gate| { + gate.poly.evaluate( + &|scalar| scalar, + &|var| match var { + VarBack::Query(query) => match query.column_type { + Any::Fixed => fixed_evals[query.index], + Any::Advice => advice_evals[query.index], + Any::Instance => instance_evals[query.index], }, - &|query| fixed_evals[query.index.unwrap()], - &|query| advice_evals[query.index.unwrap()], - &|query| instance_evals[query.index.unwrap()], - &|challenge| challenges[challenge.index()], - &|a| -a, - &|a, b| a + &b, - &|a, b| a * &b, - &|a, scalar| a * &scalar, - ) - }) + VarBack::Challenge(challenge) => challenges[challenge.index], + }, + &|a| -a, + &|a, b| a + b, + &|a, b| a * b, + ) })) .chain(permutation.expressions( vk, @@ -361,6 +454,7 @@ where vanishing.verify(params, expressions, y, xn) }; + #[rustfmt::skip] let queries = instance_commitments .iter() .zip(instance_evals.iter()) @@ -369,27 +463,14 @@ where .zip(permutations_evaluated.iter()) .zip(lookups_evaluated.iter()) .zip(shuffles_evaluated.iter()) - .flat_map( - |( - ( - ( - ( - ((instance_commitments, instance_evals), advice_commitments), - advice_evals, - ), - permutation, - ), - lookups, - ), - shuffles, - )| { + .flat_map(|((((((instance_commitments, instance_evals), advice_commitments),advice_evals),permutation),lookups), shuffles)| { iter::empty() .chain( V::QUERY_INSTANCE .then_some(vk.cs.instance_queries.iter().enumerate().map( move |(query_index, &(column, at))| { VerifierQuery::new_commitment( - &instance_commitments[column.index()], + &instance_commitments[column.index], vk.domain.rotate_omega(*x, at), instance_evals[query_index], ) @@ -401,7 +482,7 @@ where .chain(vk.cs.advice_queries.iter().enumerate().map( move |(query_index, &(column, at))| { VerifierQuery::new_commitment( - &advice_commitments[column.index()], + &advice_commitments[column.index], vk.domain.rotate_omega(*x, at), advice_evals[query_index], ) @@ -419,7 +500,7 @@ where .enumerate() .map(|(query_index, &(column, at))| { VerifierQuery::new_commitment( - &vk.fixed_commitments[column.index()], + &vk.fixed_commitments[column.index], vk.domain.rotate_omega(*x, at), fixed_evals[query_index], ) @@ -431,7 +512,7 @@ where // We are now convinced the circuit is satisfied so long as the // polynomial commitments open to the correct values. - let verifier = V::new(params); + let verifier = V::new(); strategy.process(|msm| { verifier .verify_proof(transcript, queries, msm) diff --git a/halo2_proofs/src/plonk/verifier/batch.rs b/halo2_backend/src/plonk/verifier/batch.rs similarity index 84% rename from halo2_proofs/src/plonk/verifier/batch.rs rename to halo2_backend/src/plonk/verifier/batch.rs index ba3e2419e6..54b06450d9 100644 --- a/halo2_proofs/src/plonk/verifier/batch.rs +++ b/halo2_backend/src/plonk/verifier/batch.rs @@ -1,5 +1,7 @@ -use ff::FromUniformBytes; +use crate::{plonk::Error, poly::commitment::ParamsVerifier}; use group::ff::Field; +use halo2_middleware::ff::FromUniformBytes; +use halo2_middleware::zal::impls::H2cEngine; use halo2curves::CurveAffine; use rand_core::OsRng; @@ -8,9 +10,9 @@ use crate::{ multicore::{ IndexedParallelIterator, IntoParallelIterator, ParallelIterator, TryFoldAndReduce, }, - plonk::{Error, VerifyingKey}, + plonk::VerifyingKey, poly::{ - commitment::{Params, MSM}, + commitment::MSM, ipa::{ commitment::{IPACommitmentScheme, ParamsVerifierIPA}, msm::MSMIPA, @@ -29,8 +31,7 @@ struct BatchStrategy<'params, C: CurveAffine> { msm: MSMIPA<'params, C>, } -impl<'params, C: CurveAffine> - VerificationStrategy<'params, IPACommitmentScheme, VerifierIPA<'params, C>> +impl<'params, C: CurveAffine> VerificationStrategy<'params, IPACommitmentScheme, VerifierIPA> for BatchStrategy<'params, C> { type Output = MSMIPA<'params, C>; @@ -98,7 +99,6 @@ where // `is_zero() == false` then this argument won't be able to interfere with it // to make it true, with high probability. acc.scale(C::Scalar::random(OsRng)); - acc.add_msm(&msm); acc } @@ -108,27 +108,21 @@ where .into_par_iter() .enumerate() .map(|(i, item)| { - let instances: Vec> = item - .instances - .iter() - .map(|i| i.iter().map(|c| &c[..]).collect()) - .collect(); - let instances: Vec<_> = instances.iter().map(|i| &i[..]).collect(); - let strategy = BatchStrategy::new(params); let mut transcript = Blake2bRead::init(&item.proof[..]); - verify_proof(params, vk, strategy, &instances, &mut transcript).map_err(|e| { + verify_proof(params, vk, strategy, &item.instances, &mut transcript).map_err(|e| { tracing::debug!("Batch item {} failed verification: {}", i, e); e }) }) .try_fold_and_reduce( - || params.empty_msm(), + || ParamsVerifier::<'_, C>::empty_msm(params), |acc, res| res.map(|proof_msm| accumulate_msm(acc, proof_msm)), ); match final_msm { - Ok(msm) => msm.check(), + // ZAL: Verification is (supposedly) cheap, hence we don't use an accelerator engine + Ok(msm) => msm.check(&H2cEngine::new()), Err(_) => false, } } diff --git a/halo2_proofs/src/poly.rs b/halo2_backend/src/poly.rs similarity index 74% rename from halo2_proofs/src/poly.rs rename to halo2_backend/src/poly.rs index 9cb6b149bc..093034ed83 100644 --- a/halo2_proofs/src/poly.rs +++ b/halo2_backend/src/poly.rs @@ -3,11 +3,10 @@ //! the committed polynomials at arbitrary points. use crate::arithmetic::parallelize; -use crate::helpers::SerdePrimeField; -use crate::plonk::Assigned; -use crate::SerdeFormat; +use crate::helpers::{SerdeFormat, SerdePrimeField}; -use group::ff::{BatchInvert, Field}; +use group::ff::Field; +use halo2_middleware::poly::Rotation; use std::fmt::Debug; use std::io; use std::marker::PhantomData; @@ -29,11 +28,10 @@ pub mod kzg; mod multiopen_test; pub use domain::*; -pub use query::{ProverQuery, VerifierQuery}; +pub(crate) use query::{ProverQuery, VerifierQuery}; pub use strategy::{Guard, VerificationStrategy}; /// This is an error that could occur during proving or circuit synthesis. -// TODO: these errors need to be cleaned up #[derive(Debug)] pub enum Error { /// OpeningProof is not well-formed @@ -44,6 +42,7 @@ pub enum Error { /// The basis over which a polynomial is described. pub trait Basis: Copy + Debug + Send + Sync {} +pub trait LagrangeBasis: Copy + Debug + Send + Sync {} /// The polynomial is defined as coefficients #[derive(Clone, Copy, Debug)] @@ -54,19 +53,42 @@ impl Basis for Coeff {} #[derive(Clone, Copy, Debug)] pub struct LagrangeCoeff; impl Basis for LagrangeCoeff {} +impl LagrangeBasis for LagrangeCoeff {} /// The polynomial is defined as coefficients of Lagrange basis polynomials in /// an extended size domain which supports multiplication #[derive(Clone, Copy, Debug)] pub struct ExtendedLagrangeCoeff; impl Basis for ExtendedLagrangeCoeff {} +impl LagrangeBasis for ExtendedLagrangeCoeff {} /// Represents a univariate polynomial defined over a field and a particular /// basis. #[derive(Clone, Debug)] pub struct Polynomial { - values: Vec, - _marker: PhantomData, + pub values: Vec, + pub _marker: PhantomData, +} + +impl Polynomial { + pub fn new_empty(size: usize, zero: F) -> Self { + Polynomial { + values: vec![zero; size], + _marker: PhantomData, + } + } +} + +impl Polynomial { + /// Obtains a polynomial in Lagrange form when given a vector of Lagrange + /// coefficients of size `n`; panics if the provided vector is the wrong + /// length. + pub fn new_lagrange_from_vec(values: Vec) -> Polynomial { + Polynomial { + values, + _marker: PhantomData, + } + } } impl Index for Polynomial { @@ -175,53 +197,6 @@ impl Polynomial { } } -pub(crate) fn batch_invert_assigned( - assigned: Vec, LagrangeCoeff>>, -) -> Vec> { - let mut assigned_denominators: Vec<_> = assigned - .iter() - .map(|f| { - f.iter() - .map(|value| value.denominator()) - .collect::>() - }) - .collect(); - - assigned_denominators - .iter_mut() - .flat_map(|f| { - f.iter_mut() - // If the denominator is trivial, we can skip it, reducing the - // size of the batch inversion. - .filter_map(|d| d.as_mut()) - }) - .batch_invert(); - - assigned - .iter() - .zip(assigned_denominators) - .map(|(poly, inv_denoms)| poly.invert(inv_denoms.into_iter().map(|d| d.unwrap_or(F::ONE)))) - .collect() -} - -impl Polynomial, LagrangeCoeff> { - pub(crate) fn invert( - &self, - inv_denoms: impl Iterator + ExactSizeIterator, - ) -> Polynomial { - assert_eq!(inv_denoms.len(), self.values.len()); - Polynomial { - values: self - .values - .iter() - .zip(inv_denoms) - .map(|(a, inv_den)| a.numerator() * inv_den) - .collect(), - _marker: self._marker, - } - } -} - impl<'a, F: Field, B: Basis> Add<&'a Polynomial> for Polynomial { type Output = Polynomial; @@ -299,26 +274,3 @@ impl<'a, F: Field, B: Basis> Sub for &'a Polynomial { res } } - -/// Describes the relative rotation of a vector. Negative numbers represent -/// reverse (leftmost) rotations and positive numbers represent forward (rightmost) -/// rotations. Zero represents no rotation. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct Rotation(pub i32); - -impl Rotation { - /// The current location in the evaluation domain - pub fn cur() -> Rotation { - Rotation(0) - } - - /// The previous location in the evaluation domain - pub fn prev() -> Rotation { - Rotation(-1) - } - - /// The next location in the evaluation domain - pub fn next() -> Rotation { - Rotation(1) - } -} diff --git a/halo2_proofs/src/poly/commitment.rs b/halo2_backend/src/poly/commitment.rs similarity index 74% rename from halo2_proofs/src/poly/commitment.rs rename to halo2_backend/src/poly/commitment.rs index ebc26fe9c3..fb11099150 100644 --- a/halo2_proofs/src/poly/commitment.rs +++ b/halo2_backend/src/poly/commitment.rs @@ -5,7 +5,8 @@ use super::{ }; use crate::poly::Error; use crate::transcript::{EncodedChallenge, TranscriptRead, TranscriptWrite}; -use ff::Field; +use halo2_middleware::ff::Field; +use halo2_middleware::zal::{impls::PlonkEngineConfig, traits::MsmAccel}; use halo2curves::CurveAffine; use rand_core::RngCore; use std::{ @@ -23,11 +24,7 @@ pub trait CommitmentScheme { type Curve: CurveAffine; /// Constant prover parameters - type ParamsProver: for<'params> ParamsProver< - 'params, - Self::Curve, - ParamsVerifier = Self::ParamsVerifier, - >; + type ParamsProver: ParamsProver; /// Constant verifier parameters type ParamsVerifier: for<'params> ParamsVerifier<'params, Self::Curve>; @@ -39,12 +36,11 @@ pub trait CommitmentScheme { fn read_params(reader: &mut R) -> io::Result; } -/// Parameters for circuit sysnthesis and prover parameters. -pub trait Params<'params, C: CurveAffine>: Sized + Clone { - /// Multi scalar multiplication engine - type MSM: MSM + 'params; - - /// Logaritmic size of the circuit +/// Common for Verifier and Prover. +/// +/// Parameters for circuit synthesis and prover parameters. +pub trait Params: Sized + Clone + Debug { + /// Logarithmic size of the circuit fn k(&self) -> u32; /// Size of the circuit @@ -53,15 +49,12 @@ pub trait Params<'params, C: CurveAffine>: Sized + Clone { /// Downsize `Params` with smaller `k`. fn downsize(&mut self, k: u32); - /// Generates an empty multiscalar multiplication struct using the - /// appropriate params. - fn empty_msm(&'params self) -> Self::MSM; - /// This commits to a polynomial using its evaluations over the $2^k$ size /// evaluation domain. The commitment will be blinded by the blinding factor /// `r`. fn commit_lagrange( &self, + engine: &impl MsmAccel, poly: &Polynomial, r: Blind, ) -> C::CurveExt; @@ -73,31 +66,34 @@ pub trait Params<'params, C: CurveAffine>: Sized + Clone { fn read(reader: &mut R) -> io::Result; } -/// Parameters for circuit sysnthesis and prover parameters. -pub trait ParamsProver<'params, C: CurveAffine>: Params<'params, C> { - /// Constant verifier parameters. - type ParamsVerifier: ParamsVerifier<'params, C>; - +/// Parameters for circuit synthesis and prover parameters. +pub trait ParamsProver: Params { /// Returns new instance of parameters fn new(k: u32) -> Self; /// This computes a commitment to a polynomial described by the provided /// slice of coefficients. The commitment may be blinded by the blinding /// factor `r`. - fn commit(&self, poly: &Polynomial, r: Blind) - -> C::CurveExt; - - /// Getter for g generators - fn get_g(&self) -> &[C]; - - /// Returns verification parameters. - fn verifier_params(&'params self) -> &'params Self::ParamsVerifier; + fn commit( + &self, + engine: &impl MsmAccel, + poly: &Polynomial, + r: Blind, + ) -> C::CurveExt; } /// Verifier specific functionality with circuit constraints -pub trait ParamsVerifier<'params, C: CurveAffine>: Params<'params, C> {} +pub trait ParamsVerifier<'params, C: CurveAffine>: Params { + /// Multiscalar multiplication engine + type MSM: MSM + 'params; + /// Can commit to instance or not. + const COMMIT_INSTANCE: bool; + /// Generates an empty multiscalar multiplication struct using the + /// appropriate params. + fn empty_msm(&'params self) -> Self::MSM; +} -/// Multi scalar multiplication engine +/// Multiscalar multiplication engine pub trait MSM: Clone + Debug + Send + Sync { /// Add arbitrary term (the scalar and the point) fn append_term(&mut self, scalar: C::Scalar, point: C::CurveExt); @@ -111,10 +107,10 @@ pub trait MSM: Clone + Debug + Send + Sync { fn scale(&mut self, factor: C::Scalar); /// Perform multiexp and check that it results in zero - fn check(&self) -> bool; + fn check(&self, engine: &impl MsmAccel) -> bool; /// Perform multiexp and return the result - fn eval(&self) -> C::CurveExt; + fn eval(&self, engine: &impl MsmAccel) -> C::CurveExt; /// Return base points fn bases(&self) -> Vec; @@ -132,7 +128,7 @@ pub trait Prover<'params, Scheme: CommitmentScheme> { fn new(params: &'params Scheme::ParamsProver) -> Self; /// Create a multi-opening proof - fn create_proof< + fn create_proof_with_engine< 'com, E: EncodedChallenge, T: TranscriptWrite, @@ -140,6 +136,7 @@ pub trait Prover<'params, Scheme: CommitmentScheme> { I, >( &self, + engine: &impl MsmAccel, rng: R, transcript: &mut T, queries: I, @@ -147,24 +144,45 @@ pub trait Prover<'params, Scheme: CommitmentScheme> { where I: IntoIterator> + Clone, R: RngCore; + + /// Create a multi-opening proof + fn create_proof< + 'com, + E: EncodedChallenge, + T: TranscriptWrite, + R, + I, + >( + &self, + rng: R, + transcript: &mut T, + queries: I, + ) -> io::Result<()> + where + I: IntoIterator> + Clone, + R: RngCore, + { + let engine = PlonkEngineConfig::build_default::(); + self.create_proof_with_engine(&engine.msm_backend, rng, transcript, queries) + } } /// Common multi-open verifier interface for various commitment schemes pub trait Verifier<'params, Scheme: CommitmentScheme> { /// Unfinalized verification result. This is returned in verification - /// to allow developer to compress or combined verification results + /// to allow developer to compress or combine verification results type Guard: Guard; - /// Accumulator fot comressed verification + /// Accumulator for compressed verification type MSMAccumulator; /// Query instance or not const QUERY_INSTANCE: bool; /// Creates new verifier instance - fn new(params: &'params Scheme::ParamsVerifier) -> Self; + fn new() -> Self; - /// Process the proof and returns unfinished result named `Guard` + /// Process the proof and return unfinished result named `Guard` fn verify_proof< 'com, E: EncodedChallenge, @@ -182,7 +200,7 @@ pub trait Verifier<'params, Scheme: CommitmentScheme> { Item = VerifierQuery< 'com, Scheme::Curve, - >::MSM, + >::MSM, >, > + Clone; } diff --git a/halo2_proofs/src/poly/domain.rs b/halo2_backend/src/poly/domain.rs similarity index 97% rename from halo2_proofs/src/poly/domain.rs rename to halo2_backend/src/poly/domain.rs index ae9b8bf9ae..dabc797da4 100644 --- a/halo2_proofs/src/poly/domain.rs +++ b/halo2_backend/src/poly/domain.rs @@ -1,14 +1,13 @@ //! Contains utilities for performing polynomial arithmetic over an evaluation //! domain that is of a suitable size for the application. -use crate::{ - arithmetic::{best_fft, parallelize}, - plonk::Assigned, -}; +use crate::arithmetic::parallelize; -use super::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, Rotation}; -use ff::WithSmallOrderMulGroup; +use super::{Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial}; use group::ff::{BatchInvert, Field}; +use halo2_middleware::ff::WithSmallOrderMulGroup; +use halo2_middleware::poly::Rotation; +use halo2curves::fft::best_fft; use std::marker::PhantomData; @@ -184,15 +183,6 @@ impl> EvaluationDomain { } } - /// Returns an empty (zero) polynomial in the Lagrange coefficient basis, with - /// deferred inversions. - pub fn empty_lagrange_assigned(&self) -> Polynomial, LagrangeCoeff> { - Polynomial { - values: vec![F::ZERO.into(); self.n as usize], - _marker: PhantomData, - } - } - /// Returns a constant polynomial in the Lagrange coefficient basis pub fn constant_lagrange(&self, scalar: F) -> Polynomial { Polynomial { @@ -297,7 +287,7 @@ impl> EvaluationDomain { // evaluation domain might be slightly larger than necessary because // it always lies on a power-of-two boundary. a.values - .truncate((&self.n * self.quotient_poly_degree) as usize); + .truncate((self.n * self.quotient_poly_degree) as usize); a.values } diff --git a/halo2_proofs/src/poly/ipa/commitment.rs b/halo2_backend/src/poly/ipa/commitment.rs similarity index 82% rename from halo2_proofs/src/poly/ipa/commitment.rs rename to halo2_backend/src/poly/ipa/commitment.rs index 7be053c49c..e1436e28c5 100644 --- a/halo2_proofs/src/poly/ipa/commitment.rs +++ b/halo2_backend/src/poly/ipa/commitment.rs @@ -3,19 +3,20 @@ //! //! [halo]: https://eprint.iacr.org/2019/1021 -use crate::arithmetic::{best_multiexp, g_to_lagrange, parallelize, CurveAffine, CurveExt}; +use crate::arithmetic::{g_to_lagrange, parallelize, CurveAffine, CurveExt}; use crate::helpers::CurveRead; use crate::poly::commitment::{Blind, CommitmentScheme, Params, ParamsProver, ParamsVerifier}; use crate::poly::ipa::msm::MSMIPA; use crate::poly::{Coeff, LagrangeCoeff, Polynomial}; use group::{Curve, Group}; +use halo2_middleware::zal::traits::MsmAccel; use std::marker::PhantomData; mod prover; mod verifier; -pub use prover::create_proof; +pub use prover::create_proof_with_engine; pub use verifier::verify_proof; use std::io; @@ -56,11 +57,18 @@ impl CommitmentScheme for IPACommitmentScheme { /// Verifier parameters pub type ParamsVerifierIPA = ParamsIPA; -impl<'params, C: CurveAffine> ParamsVerifier<'params, C> for ParamsIPA {} - -impl<'params, C: CurveAffine> Params<'params, C> for ParamsIPA { +impl<'params, C: CurveAffine> ParamsVerifier<'params, C> for ParamsIPA { type MSM = MSMIPA<'params, C>; + // IPA params always support commitment. + const COMMIT_INSTANCE: bool = true; + + fn empty_msm(&self) -> MSMIPA { + MSMIPA::new(self) + } +} + +impl Params for ParamsIPA { fn k(&self) -> u32 { self.k } @@ -78,15 +86,12 @@ impl<'params, C: CurveAffine> Params<'params, C> for ParamsIPA { self.g_lagrange = g_to_lagrange(self.g.iter().map(|g| g.to_curve()).collect(), k); } - fn empty_msm(&'params self) -> MSMIPA { - MSMIPA::new(self) - } - /// This commits to a polynomial using its evaluations over the $2^k$ size /// evaluation domain. The commitment will be blinded by the blinding factor /// `r`. fn commit_lagrange( &self, + engine: &impl MsmAccel, poly: &Polynomial, r: Blind, ) -> C::Curve { @@ -99,7 +104,7 @@ impl<'params, C: CurveAffine> Params<'params, C> for ParamsIPA { tmp_bases.extend(self.g_lagrange.iter()); tmp_bases.push(self.w); - best_multiexp::(&tmp_scalars, &tmp_bases) + engine.msm(&tmp_scalars, &tmp_bases) } /// Writes params to a buffer. @@ -142,13 +147,7 @@ impl<'params, C: CurveAffine> Params<'params, C> for ParamsIPA { } } -impl<'params, C: CurveAffine> ParamsProver<'params, C> for ParamsIPA { - type ParamsVerifier = ParamsVerifierIPA; - - fn verifier_params(&'params self) -> &'params Self::ParamsVerifier { - self - } - +impl ParamsProver for ParamsIPA { /// Initializes parameters for the curve, given a random oracle to draw /// points from. fn new(k: u32) -> Self { @@ -193,8 +192,13 @@ impl<'params, C: CurveAffine> ParamsProver<'params, C> for ParamsIPA { let g_lagrange = g_to_lagrange(g_projective, k); let hasher = C::CurveExt::hash_to_curve("Halo2-Parameters"); - let w = hasher(&[1]).to_affine(); - let u = hasher(&[2]).to_affine(); + + let [w, u] = { + let projectives = vec![hasher(&[1]), hasher(&[2])]; + let mut affines = [C::identity(); 2]; + C::CurveExt::batch_normalize(&projectives, &mut affines); + affines + }; ParamsIPA { k, @@ -209,7 +213,12 @@ impl<'params, C: CurveAffine> ParamsProver<'params, C> for ParamsIPA { /// This computes a commitment to a polynomial described by the provided /// slice of coefficients. The commitment will be blinded by the blinding /// factor `r`. - fn commit(&self, poly: &Polynomial, r: Blind) -> C::Curve { + fn commit( + &self, + engine: &impl MsmAccel, + poly: &Polynomial, + r: Blind, + ) -> C::Curve { let mut tmp_scalars = Vec::with_capacity(poly.len() + 1); let mut tmp_bases = Vec::with_capacity(poly.len() + 1); @@ -219,11 +228,7 @@ impl<'params, C: CurveAffine> ParamsProver<'params, C> for ParamsIPA { tmp_bases.extend(self.g.iter()); tmp_bases.push(self.w); - best_multiexp::(&tmp_scalars, &tmp_bases) - } - - fn get_g(&self) -> &[C] { - &self.g + engine.msm(&tmp_scalars, &tmp_bases) } } @@ -231,11 +236,12 @@ impl<'params, C: CurveAffine> ParamsProver<'params, C> for ParamsIPA { mod test { use crate::poly::commitment::ParamsProver; use crate::poly::commitment::{Blind, Params, MSM}; - use crate::poly::ipa::commitment::{create_proof, verify_proof, ParamsIPA}; + use crate::poly::ipa::commitment::{create_proof_with_engine, verify_proof, ParamsIPA}; use crate::poly::ipa::msm::MSMIPA; - use ff::Field; use group::Curve; + use halo2_middleware::ff::Field; + use halo2_middleware::zal::impls::H2cEngine; #[test] fn test_commit_lagrange_epaffine() { @@ -246,6 +252,7 @@ mod test { use crate::poly::EvaluationDomain; use halo2curves::pasta::{EpAffine, Fq}; + let engine = H2cEngine::new(); let params = ParamsIPA::::new(K); let domain = EvaluationDomain::new(1, K); @@ -259,7 +266,10 @@ mod test { let alpha = Blind(Fq::random(OsRng)); - assert_eq!(params.commit(&b, alpha), params.commit_lagrange(&a, alpha)); + assert_eq!( + params.commit(&engine, &b, alpha), + params.commit_lagrange(&engine, &a, alpha) + ); } #[test] @@ -271,6 +281,7 @@ mod test { use crate::poly::EvaluationDomain; use halo2curves::pasta::{EqAffine, Fp}; + let engine = H2cEngine::new(); let params: ParamsIPA = ParamsIPA::::new(K); let domain = EvaluationDomain::new(1, K); @@ -284,29 +295,33 @@ mod test { let alpha = Blind(Fp::random(OsRng)); - assert_eq!(params.commit(&b, alpha), params.commit_lagrange(&a, alpha)); + assert_eq!( + params.commit(&engine, &b, alpha), + params.commit_lagrange(&engine, &a, alpha) + ); } #[test] fn test_opening_proof() { const K: u32 = 6; - use ff::Field; + use halo2_middleware::ff::Field; use rand_core::OsRng; use super::super::commitment::{Blind, Params}; use crate::arithmetic::eval_polynomial; - use crate::halo2curves::pasta::{EpAffine, Fq}; use crate::poly::EvaluationDomain; use crate::transcript::{ Blake2bRead, Blake2bWrite, Challenge255, Transcript, TranscriptRead, TranscriptWrite, }; + use halo2curves::pasta::{EpAffine, Fq}; use crate::transcript::TranscriptReadBuffer; use crate::transcript::TranscriptWriterBuffer; let rng = OsRng; + let engine = H2cEngine::new(); let params = ParamsIPA::::new(K); let mut params_buffer = vec![]; as Params<_>>::write(¶ms, &mut params_buffer).unwrap(); @@ -322,7 +337,7 @@ mod test { let blind = Blind(Fq::random(rng)); - let p = params.commit(&px, blind).to_affine(); + let p = params.commit(&engine, &px, blind).to_affine(); let mut transcript = Blake2bWrite::, EpAffine, Challenge255>::init(vec![]); @@ -333,7 +348,8 @@ mod test { transcript.write_scalar(v).unwrap(); let (proof, ch_prover) = { - create_proof(¶ms, rng, &mut transcript, &px, blind, *x).unwrap(); + create_proof_with_engine(&engine, ¶ms, rng, &mut transcript, &px, blind, *x) + .unwrap(); let ch_prover = transcript.squeeze_challenge(); (transcript.finalize(), ch_prover) }; @@ -351,7 +367,7 @@ mod test { let mut commitment_msm = MSMIPA::new(¶ms); commitment_msm.append_term(Fq::one(), p.into()); - let guard = verify_proof(¶ms, commitment_msm, &mut transcript, *x, v).unwrap(); + let guard = verify_proof(commitment_msm, &mut transcript, *x, v).unwrap(); let ch_verifier = transcript.squeeze_challenge(); assert_eq!(*ch_prover, *ch_verifier); @@ -359,12 +375,12 @@ mod test { { // Test use_challenges() let msm_challenges = guard.clone().use_challenges(); - assert!(msm_challenges.check()); + assert!(msm_challenges.check(&engine)); // Test use_g() - let g = guard.compute_g(); + let g = guard.compute_g(&engine); let (msm_g, _accumulator) = guard.clone().use_g(g); - assert!(msm_g.check()); + assert!(msm_g.check(&engine)); } } } diff --git a/halo2_proofs/src/poly/ipa/commitment/prover.rs b/halo2_backend/src/poly/ipa/commitment/prover.rs similarity index 83% rename from halo2_proofs/src/poly/ipa/commitment/prover.rs rename to halo2_backend/src/poly/ipa/commitment/prover.rs index 344dbc0e65..7e04206e07 100644 --- a/halo2_proofs/src/poly/ipa/commitment/prover.rs +++ b/halo2_backend/src/poly/ipa/commitment/prover.rs @@ -1,10 +1,9 @@ -use ff::Field; +use halo2_middleware::ff::Field; +use halo2_middleware::zal::traits::MsmAccel; use rand_core::RngCore; use super::ParamsIPA; -use crate::arithmetic::{ - best_multiexp, compute_inner_product, eval_polynomial, parallelize, CurveAffine, -}; +use crate::arithmetic::{compute_inner_product, eval_polynomial, parallelize, CurveAffine}; use crate::poly::commitment::ParamsProver; use crate::poly::{commitment::Blind, Coeff, Polynomial}; @@ -26,12 +25,13 @@ use std::io::{self}; /// opening v, and the point x. It's probably also nice for the transcript /// to have seen the elliptic curve description and the URS, if you want to /// be rigorous. -pub fn create_proof< +pub fn create_proof_with_engine< C: CurveAffine, E: EncodedChallenge, R: RngCore, T: TranscriptWrite, >( + engine: &impl MsmAccel, params: &ParamsIPA, mut rng: R, transcript: &mut T, @@ -56,7 +56,7 @@ pub fn create_proof< let s_poly_blind = Blind(C::Scalar::random(&mut rng)); // Write a commitment to the random polynomial to the transcript - let s_poly_commitment = params.commit(&s_poly, s_poly_blind).to_affine(); + let s_poly_commitment = params.commit(engine, &s_poly, s_poly_blind).to_affine(); transcript.write_point(s_poly_commitment)?; // Challenge that will ensure that the prover cannot change P but can only @@ -106,16 +106,19 @@ pub fn create_proof< // // TODO: If we modify multiexp to take "extra" bases, we could speed // this piece up a bit by combining the multiexps. - let l_j = best_multiexp(&p_prime[half..], &g_prime[0..half]); - let r_j = best_multiexp(&p_prime[0..half], &g_prime[half..]); + let l_j = engine.msm(&p_prime[half..], &g_prime[0..half]); + let r_j = engine.msm(&p_prime[0..half], &g_prime[half..]); let value_l_j = compute_inner_product(&p_prime[half..], &b[0..half]); let value_r_j = compute_inner_product(&p_prime[0..half], &b[half..]); let l_j_randomness = C::Scalar::random(&mut rng); let r_j_randomness = C::Scalar::random(&mut rng); - let l_j = l_j + &best_multiexp(&[value_l_j * &z, l_j_randomness], &[params.u, params.w]); - let r_j = r_j + &best_multiexp(&[value_r_j * &z, r_j_randomness], &[params.u, params.w]); - let l_j = l_j.to_affine(); - let r_j = r_j.to_affine(); + let l_j = l_j + engine.msm(&[value_l_j * z, l_j_randomness], &[params.u, params.w]); + let r_j = r_j + engine.msm(&[value_r_j * z, r_j_randomness], &[params.u, params.w]); + let [l_j, r_j] = { + let mut affines = [C::identity(); 2]; + C::CurveExt::batch_normalize(&[l_j, r_j], &mut affines); + affines + }; // Feed L and R into the real transcript transcript.write_point(l_j)?; @@ -127,8 +130,8 @@ pub fn create_proof< // Collapse `p_prime` and `b`. // TODO: parallelize for i in 0..half { - p_prime[i] = p_prime[i] + &(p_prime[i + half] * &u_j_inv); - b[i] = b[i] + &(b[i + half] * &u_j); + p_prime[i] = p_prime[i] + (p_prime[i + half] * u_j_inv); + b[i] = b[i] + (b[i + half] * u_j); } p_prime.truncate(half); b.truncate(half); @@ -138,8 +141,8 @@ pub fn create_proof< g_prime.truncate(half); // Update randomness (the synthetic blinding factor at the end) - f += &(l_j_randomness * &u_j_inv); - f += &(r_j_randomness * &u_j); + f += l_j_randomness * u_j_inv; + f += r_j_randomness * u_j; } // We have fully collapsed `p_prime`, `b`, `G'` @@ -160,7 +163,7 @@ fn parallel_generator_collapse(g: &mut [C], challenge: C::Scalar let g_hi = &g_hi[start..]; let mut tmp = Vec::with_capacity(g_lo.len()); for (g_lo, g_hi) in g_lo.iter().zip(g_hi.iter()) { - tmp.push(g_lo.to_curve() + &(*g_hi * challenge)); + tmp.push(g_lo.to_curve() + *g_hi * challenge); } C::Curve::batch_normalize(&tmp, g_lo); }); diff --git a/halo2_proofs/src/poly/ipa/commitment/verifier.rs b/halo2_backend/src/poly/ipa/commitment/verifier.rs similarity index 94% rename from halo2_proofs/src/poly/ipa/commitment/verifier.rs rename to halo2_backend/src/poly/ipa/commitment/verifier.rs index cf258625d5..2e6723c5b6 100644 --- a/halo2_proofs/src/poly/ipa/commitment/verifier.rs +++ b/halo2_backend/src/poly/ipa/commitment/verifier.rs @@ -1,6 +1,5 @@ use group::ff::{BatchInvert, Field}; -use super::ParamsIPA; use crate::{arithmetic::CurveAffine, poly::ipa::strategy::GuardIPA}; use crate::{ poly::{commitment::MSM, ipa::msm::MSMIPA, Error}, @@ -11,13 +10,12 @@ use crate::{ /// point `x` that the polynomial commitment `P` opens purportedly to the value /// `v`. The provided `msm` should evaluate to the commitment `P` being opened. pub fn verify_proof<'params, C: CurveAffine, E: EncodedChallenge, T: TranscriptRead>( - params: &'params ParamsIPA, mut msm: MSMIPA<'params, C>, transcript: &mut T, x: C::Scalar, v: C::Scalar, ) -> Result, Error> { - let k = params.k as usize; + let k = msm.params.k as usize; // P' = P - [v] G_0 + [ξ] S msm.add_constant_term(-v); // add [-v] G_0 @@ -75,7 +73,7 @@ pub fn verify_proof<'params, C: CurveAffine, E: EncodedChallenge, T: Transcri let f = transcript.read_scalar().map_err(|_| Error::SamplingError)?; let b = compute_b(x, &u); - msm.add_to_u_scalar(neg_c * &b * &z); + msm.add_to_u_scalar(neg_c * b * z); msm.add_to_w_scalar(-f); let guard = GuardIPA { @@ -93,7 +91,7 @@ fn compute_b(x: F, u: &[F]) -> F { let mut tmp = F::ONE; let mut cur = x; for u_j in u.iter().rev() { - tmp *= F::ONE + &(*u_j * &cur); + tmp *= F::ONE + (*u_j * cur); cur *= cur; } tmp diff --git a/halo2_proofs/src/poly/ipa/mod.rs b/halo2_backend/src/poly/ipa/mod.rs similarity index 100% rename from halo2_proofs/src/poly/ipa/mod.rs rename to halo2_backend/src/poly/ipa/mod.rs diff --git a/halo2_proofs/src/poly/ipa/msm.rs b/halo2_backend/src/poly/ipa/msm.rs similarity index 89% rename from halo2_proofs/src/poly/ipa/msm.rs rename to halo2_backend/src/poly/ipa/msm.rs index a615ddce49..b2869e9dd0 100644 --- a/halo2_proofs/src/poly/ipa/msm.rs +++ b/halo2_backend/src/poly/ipa/msm.rs @@ -1,7 +1,8 @@ -use crate::arithmetic::{best_multiexp, CurveAffine}; +use crate::arithmetic::CurveAffine; use crate::poly::{commitment::MSM, ipa::commitment::ParamsVerifierIPA}; -use ff::Field; use group::Group; +use halo2_middleware::ff::Field; +use halo2_middleware::zal::traits::MsmAccel; use std::collections::BTreeMap; /// A multiscalar multiplication in the polynomial commitment scheme @@ -126,15 +127,15 @@ impl<'a, C: CurveAffine> MSM for MSMIPA<'a, C> { other.0 *= factor; } - self.w_scalar = self.w_scalar.map(|a| a * &factor); - self.u_scalar = self.u_scalar.map(|a| a * &factor); + self.w_scalar = self.w_scalar.map(|a| a * factor); + self.u_scalar = self.u_scalar.map(|a| a * factor); } - fn check(&self) -> bool { - bool::from(self.eval().is_identity()) + fn check(&self, engine: &impl MsmAccel) -> bool { + bool::from(self.eval(engine).is_identity()) } - fn eval(&self) -> C::Curve { + fn eval(&self, engine: &impl MsmAccel) -> C::Curve { let len = self.g_scalars.as_ref().map(|v| v.len()).unwrap_or(0) + self.w_scalar.map(|_| 1).unwrap_or(0) + self.u_scalar.map(|_| 1).unwrap_or(0) @@ -165,8 +166,7 @@ impl<'a, C: CurveAffine> MSM for MSMIPA<'a, C> { } assert_eq!(scalars.len(), len); - - best_multiexp(&scalars, &bases) + engine.msm(&scalars, &bases) } fn bases(&self) -> Vec { @@ -207,12 +207,12 @@ impl<'a, C: CurveAffine> MSMIPA<'a, C> { } /// Add to `w_scalar` pub fn add_to_w_scalar(&mut self, scalar: C::Scalar) { - self.w_scalar = self.w_scalar.map_or(Some(scalar), |a| Some(a + &scalar)); + self.w_scalar = self.w_scalar.map_or(Some(scalar), |a| Some(a + scalar)); } /// Add to `u_scalar` pub fn add_to_u_scalar(&mut self, scalar: C::Scalar) { - self.u_scalar = self.u_scalar.map_or(Some(scalar), |a| Some(a + &scalar)); + self.u_scalar = self.u_scalar.map_or(Some(scalar), |a| Some(a + scalar)); } } @@ -222,6 +222,7 @@ mod tests { commitment::{ParamsProver, MSM}, ipa::{commitment::ParamsIPA, msm::MSMIPA}, }; + use halo2_middleware::zal::impls::H2cEngine; use halo2curves::{ pasta::{Ep, EpAffine, Fp, Fq}, CurveAffine, @@ -232,40 +233,41 @@ mod tests { let base: Ep = EpAffine::from_xy(-Fp::one(), Fp::from(2)).unwrap().into(); let base_viol = base + base; + let engine = H2cEngine::new(); let params = ParamsIPA::new(4); let mut a: MSMIPA = MSMIPA::new(¶ms); a.append_term(Fq::one(), base); // a = [1] P - assert!(!a.clone().check()); + assert!(!a.clone().check(&engine)); a.append_term(Fq::one(), base); // a = [1+1] P - assert!(!a.clone().check()); + assert!(!a.clone().check(&engine)); a.append_term(-Fq::one(), base_viol); // a = [1+1] P + [-1] 2P - assert!(a.clone().check()); + assert!(a.clone().check(&engine)); let b = a.clone(); // Append a point that is the negation of an existing one. a.append_term(Fq::from(4), -base); // a = [1+1-4] P + [-1] 2P - assert!(!a.clone().check()); + assert!(!a.clone().check(&engine)); a.append_term(Fq::from(2), base_viol); // a = [1+1-4] P + [-1+2] 2P - assert!(a.clone().check()); + assert!(a.clone().check(&engine)); // Add two MSMs with common bases. a.scale(Fq::from(3)); a.add_msm(&b); // a = [3*(1+1)+(1+1-4)] P + [3*(-1)+(-1+2)] 2P - assert!(a.clone().check()); + assert!(a.clone().check(&engine)); let mut c: MSMIPA = MSMIPA::new(¶ms); c.append_term(Fq::from(2), base); c.append_term(Fq::one(), -base_viol); // c = [2] P + [1] (-2P) - assert!(c.clone().check()); + assert!(c.clone().check(&engine)); // Add two MSMs with bases that differ only in sign. a.add_msm(&c); - assert!(a.check()); + assert!(a.check(&engine)); } } diff --git a/halo2_proofs/src/poly/ipa/multiopen.rs b/halo2_backend/src/poly/ipa/multiopen.rs similarity index 98% rename from halo2_proofs/src/poly/ipa/multiopen.rs rename to halo2_backend/src/poly/ipa/multiopen.rs index b78acb5934..d4e1a33ba1 100644 --- a/halo2_proofs/src/poly/ipa/multiopen.rs +++ b/halo2_backend/src/poly/ipa/multiopen.rs @@ -5,7 +5,7 @@ use super::*; use crate::{poly::query::Query, transcript::ChallengeScalar}; -use ff::Field; +use halo2_middleware::ff::Field; use std::collections::{BTreeMap, BTreeSet}; mod prover; @@ -131,7 +131,7 @@ where let mut point_index_set = BTreeSet::new(); for (commitment, point_idx_set) in commitment_set_map.iter() { if query.get_commitment() == *commitment { - point_index_set = point_idx_set.clone(); + point_index_set.clone_from(point_idx_set); } } assert!(!point_index_set.is_empty()); diff --git a/halo2_proofs/src/poly/ipa/multiopen/prover.rs b/halo2_backend/src/poly/ipa/multiopen/prover.rs similarity index 87% rename from halo2_proofs/src/poly/ipa/multiopen/prover.rs rename to halo2_backend/src/poly/ipa/multiopen/prover.rs index 2ae745d457..e16582b080 100644 --- a/halo2_proofs/src/poly/ipa/multiopen/prover.rs +++ b/halo2_backend/src/poly/ipa/multiopen/prover.rs @@ -7,8 +7,9 @@ use crate::poly::query::ProverQuery; use crate::poly::{Coeff, Polynomial}; use crate::transcript::{EncodedChallenge, TranscriptWrite}; -use ff::Field; use group::Curve; +use halo2_middleware::ff::Field; +use halo2_middleware::zal::traits::MsmAccel; use rand_core::RngCore; use std::io; use std::marker::PhantomData; @@ -27,8 +28,9 @@ impl<'params, C: CurveAffine> Prover<'params, IPACommitmentScheme> for Prover } /// Create a multi-opening proof - fn create_proof<'com, Z: EncodedChallenge, T: TranscriptWrite, R, I>( + fn create_proof_with_engine<'com, Z: EncodedChallenge, T: TranscriptWrite, R, I>( &self, + engine: &impl MsmAccel, mut rng: R, transcript: &mut T, queries: I, @@ -93,7 +95,10 @@ impl<'params, C: CurveAffine> Prover<'params, IPACommitmentScheme> for Prover .unwrap(); let q_prime_blind = Blind(C::Scalar::random(&mut rng)); - let q_prime_commitment = self.params.commit(&q_prime_poly, q_prime_blind).to_affine(); + let q_prime_commitment = self + .params + .commit(engine, &q_prime_poly, q_prime_blind) + .to_affine(); transcript.write_point(q_prime_commitment)?; @@ -112,11 +117,19 @@ impl<'params, C: CurveAffine> Prover<'params, IPACommitmentScheme> for Prover |(q_prime_poly, q_prime_blind), (poly, blind)| { ( q_prime_poly * *x_4 + &poly.unwrap(), - Blind((q_prime_blind.0 * &(*x_4)) + &blind.0), + Blind((q_prime_blind.0 * (*x_4)) + blind.0), ) }, ); - commitment::create_proof(self.params, rng, transcript, &p_poly, p_poly_blind, *x_3) + commitment::create_proof_with_engine( + engine, + self.params, + rng, + transcript, + &p_poly, + p_poly_blind, + *x_3, + ) } } diff --git a/halo2_proofs/src/poly/ipa/multiopen/verifier.rs b/halo2_backend/src/poly/ipa/multiopen/verifier.rs similarity index 87% rename from halo2_proofs/src/poly/ipa/multiopen/verifier.rs rename to halo2_backend/src/poly/ipa/multiopen/verifier.rs index d559e33384..79133eeffa 100644 --- a/halo2_proofs/src/poly/ipa/multiopen/verifier.rs +++ b/halo2_backend/src/poly/ipa/multiopen/verifier.rs @@ -1,11 +1,12 @@ use std::fmt::Debug; +use std::marker::PhantomData; -use ff::Field; +use halo2_middleware::ff::Field; use super::{construct_intermediate_sets, ChallengeX1, ChallengeX2, ChallengeX3, ChallengeX4}; use crate::arithmetic::{eval_polynomial, lagrange_interpolate, CurveAffine}; -use crate::poly::commitment::{Params, Verifier, MSM}; -use crate::poly::ipa::commitment::{IPACommitmentScheme, ParamsIPA, ParamsVerifierIPA}; +use crate::poly::commitment::{ParamsVerifier, Verifier, MSM}; +use crate::poly::ipa::commitment::IPACommitmentScheme; use crate::poly::ipa::msm::MSMIPA; use crate::poly::ipa::strategy::GuardIPA; use crate::poly::query::{CommitmentReference, VerifierQuery}; @@ -14,20 +15,20 @@ use crate::transcript::{EncodedChallenge, TranscriptRead}; /// IPA multi-open verifier #[derive(Debug)] -pub struct VerifierIPA<'params, C: CurveAffine> { - params: &'params ParamsIPA, +pub struct VerifierIPA { + _marker: PhantomData, } -impl<'params, C: CurveAffine> Verifier<'params, IPACommitmentScheme> - for VerifierIPA<'params, C> -{ +impl<'params, C: CurveAffine> Verifier<'params, IPACommitmentScheme> for VerifierIPA { type Guard = GuardIPA<'params, C>; type MSMAccumulator = MSMIPA<'params, C>; const QUERY_INSTANCE: bool = true; - fn new(params: &'params ParamsVerifierIPA) -> Self { - Self { params } + fn new() -> Self { + Self { + _marker: PhantomData, + } } fn verify_proof<'com, E: EncodedChallenge, T: TranscriptRead, I>( @@ -51,8 +52,10 @@ impl<'params, C: CurveAffine> Verifier<'params, IPACommitmentScheme> // Compress the commitments and expected evaluations at x together. // using the challenge x_1 + let empty_msm = ParamsVerifier::<'params, C>::empty_msm(msm.params); let mut q_commitments: Vec<_> = vec![ - (self.params.empty_msm(), C::Scalar::ONE); // (accumulator, next x_1 power). + + (empty_msm, C::Scalar::ONE); // (accumulator, next x_1 power). point_sets.len()]; // A vec of vecs of evals. The outer vec corresponds to the point set, @@ -120,10 +123,10 @@ impl<'params, C: CurveAffine> Verifier<'params, IPACommitmentScheme> |msm_eval, ((points, evals), proof_eval)| { let r_poly = lagrange_interpolate(points, evals); let r_eval = eval_polynomial(&r_poly, *x_3); - let eval = points.iter().fold(*proof_eval - &r_eval, |eval, point| { - eval * &(*x_3 - point).invert().unwrap() + let eval = points.iter().fold(*proof_eval - r_eval, |eval, point| { + eval * (*x_3 - point).invert().unwrap() }); - msm_eval * &(*x_2) + &eval + msm_eval * (*x_2) + eval }, ); @@ -138,11 +141,11 @@ impl<'params, C: CurveAffine> Verifier<'params, IPACommitmentScheme> |(mut msm, msm_eval), ((q_commitment, _), q_eval)| { msm.scale(*x_4); msm.add_msm(&q_commitment); - (msm, msm_eval * &(*x_4) + q_eval) + (msm, msm_eval * (*x_4) + q_eval) }, ); // Verify the opening proof - super::commitment::verify_proof(self.params, msm, transcript, *x_3, v) + super::commitment::verify_proof(msm, transcript, *x_3, v) } } diff --git a/halo2_proofs/src/poly/ipa/strategy.rs b/halo2_backend/src/poly/ipa/strategy.rs similarity index 86% rename from halo2_proofs/src/poly/ipa/strategy.rs rename to halo2_backend/src/poly/ipa/strategy.rs index d2d1b3d364..4fdc47523c 100644 --- a/halo2_proofs/src/poly/ipa/strategy.rs +++ b/halo2_backend/src/poly/ipa/strategy.rs @@ -2,15 +2,15 @@ use super::commitment::{IPACommitmentScheme, ParamsIPA}; use super::msm::MSMIPA; use super::multiopen::VerifierIPA; use crate::{ - arithmetic::best_multiexp, plonk::Error, poly::{ commitment::MSM, strategy::{Guard, VerificationStrategy}, }, }; -use ff::Field; use group::Curve; +use halo2_middleware::ff::Field; +use halo2_middleware::zal::{impls::H2cEngine, traits::MsmAccel}; use halo2curves::CurveAffine; use rand_core::OsRng; @@ -64,10 +64,9 @@ impl<'params, C: CurveAffine> GuardIPA<'params, C> { } /// Computes G = ⟨s, params.g⟩ - pub fn compute_g(&self) -> C { + pub fn compute_g(&self, engine: &impl MsmAccel) -> C { let s = compute_s(&self.u, C::Scalar::ONE); - - best_multiexp(&s, &self.msm.params.g).to_affine() + engine.msm(&s, &self.msm.params.g).to_affine() } } @@ -77,8 +76,7 @@ pub struct AccumulatorStrategy<'params, C: CurveAffine> { msm: MSMIPA<'params, C>, } -impl<'params, C: CurveAffine> - VerificationStrategy<'params, IPACommitmentScheme, VerifierIPA<'params, C>> +impl<'params, C: CurveAffine> VerificationStrategy<'params, IPACommitmentScheme, VerifierIPA> for AccumulatorStrategy<'params, C> { type Output = Self; @@ -107,7 +105,8 @@ impl<'params, C: CurveAffine> /// specific failing proofs, it must re-process the proofs separately. #[must_use] fn finalize(self) -> bool { - self.msm.check() + // TODO: Verification is cheap, ZkAccel on verifier is not a priority. + self.msm.check(&H2cEngine::new()) } } @@ -117,8 +116,7 @@ pub struct SingleStrategy<'params, C: CurveAffine> { msm: MSMIPA<'params, C>, } -impl<'params, C: CurveAffine> - VerificationStrategy<'params, IPACommitmentScheme, VerifierIPA<'params, C>> +impl<'params, C: CurveAffine> VerificationStrategy<'params, IPACommitmentScheme, VerifierIPA> for SingleStrategy<'params, C> { type Output = (); @@ -135,7 +133,8 @@ impl<'params, C: CurveAffine> ) -> Result { let guard = f(self.msm)?; let msm = guard.use_challenges(); - if msm.check() { + // ZAL: Verification is (supposedly) cheap, hence we don't use an accelerator engine + if msm.check(&H2cEngine::new()) { Ok(()) } else { Err(Error::ConstraintSystemFailure) diff --git a/halo2_proofs/src/poly/kzg/commitment.rs b/halo2_backend/src/poly/kzg/commitment.rs similarity index 74% rename from halo2_proofs/src/poly/kzg/commitment.rs rename to halo2_backend/src/poly/kzg/commitment.rs index 114b9ac013..f273d7ec3f 100644 --- a/halo2_proofs/src/poly/kzg/commitment.rs +++ b/halo2_backend/src/poly/kzg/commitment.rs @@ -1,13 +1,13 @@ -use crate::arithmetic::{best_multiexp, g_to_lagrange, parallelize}; -use crate::helpers::SerdeCurveAffine; +use crate::arithmetic::{g_to_lagrange, parallelize}; +use crate::helpers::{SerdeCurveAffine, SerdeFormat}; use crate::poly::commitment::{Blind, CommitmentScheme, Params, ParamsProver, ParamsVerifier}; use crate::poly::{Coeff, LagrangeCoeff, Polynomial}; -use crate::SerdeFormat; -use ff::{Field, PrimeField}; use group::{prime::PrimeCurveAffine, Curve, Group}; +use halo2_middleware::ff::{Field, PrimeField}; +use halo2_middleware::zal::traits::MsmAccel; use halo2curves::pairing::Engine; -use halo2curves::CurveExt; +use halo2curves::{CurveAffine, CurveExt}; use rand_core::{OsRng, RngCore}; use std::fmt::Debug; use std::marker::PhantomData; @@ -19,21 +19,115 @@ use super::msm::MSMKZG; /// These are the public parameters for the polynomial commitment scheme. #[derive(Debug, Clone)] pub struct ParamsKZG { - pub(crate) k: u32, - pub(crate) n: u64, + pub k: u32, + pub n: u64, pub(crate) g: Vec, pub(crate) g_lagrange: Vec, - pub(crate) g2: E::G2Affine, + pub g2: E::G2Affine, + pub s_g2: E::G2Affine, +} + +/// Parameters KZG-based proof verification: +#[derive(Debug, Clone)] +pub struct ParamsVerifierKZG { + pub(crate) k: u32, pub(crate) s_g2: E::G2Affine, } +impl Params for ParamsVerifierKZG +where + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, +{ + fn k(&self) -> u32 { + self.k + } + + fn n(&self) -> u64 { + 1 << self.k + } + + fn downsize(&mut self, k: u32) { + assert!(k <= self.k); + self.k = k; + } + + fn commit_lagrange( + &self, + _engine: &impl MsmAccel, + _poly: &Polynomial, + _: Blind, + ) -> E::G1 { + panic!("Commitment is not supported for ParamsVerifierKZG, use ParamsKZG instead."); + } + + /// Writes params to a buffer. + fn write(&self, writer: &mut W) -> io::Result<()> { + Self::write_custom(self, writer, SerdeFormat::RawBytes) + } + + /// Reads params from a buffer. + fn read(reader: &mut R) -> io::Result { + Self::read_custom(reader, SerdeFormat::RawBytes) + } +} + +impl<'params, E: Engine> ParamsVerifier<'params, E::G1Affine> for ParamsVerifierKZG +where + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, +{ + type MSM = MSMKZG; + + // Do not support commitment. + const COMMIT_INSTANCE: bool = false; + + fn empty_msm(&'params self) -> MSMKZG { + MSMKZG::new() + } +} + +impl ParamsVerifierKZG +where + E::G1Affine: CurveAffine, + E::G1: CurveExt, +{ + pub fn write_custom(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> + where + E::G1Affine: SerdeCurveAffine, + E::G2Affine: SerdeCurveAffine, + { + writer.write_all(&self.k.to_le_bytes())?; + self.s_g2.write(writer, format)?; + Ok(()) + } + + pub fn read_custom(reader: &mut R, format: SerdeFormat) -> io::Result + where + E::G1Affine: SerdeCurveAffine, + E::G2Affine: SerdeCurveAffine, + { + let mut k = [0u8; 4]; + reader.read_exact(&mut k[..])?; + let k = u32::from_le_bytes(k); + // This is a generous bound on the size of the domain. + debug_assert!(k < 32); + + let s_g2 = E::G2Affine::read(reader, format)?; + + Ok(Self { k, s_g2 }) + } +} + /// Umbrella commitment scheme construction for all KZG variants #[derive(Debug)] pub struct KZGCommitmentScheme { _marker: PhantomData, } -impl CommitmentScheme for KZGCommitmentScheme +impl CommitmentScheme for KZGCommitmentScheme where E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, @@ -54,9 +148,9 @@ where } } -impl ParamsKZG +impl ParamsKZG where - E::G1Affine: SerdeCurveAffine, + E::G1Affine: CurveAffine, E::G1: CurveExt, { /// Initializes parameters for the curve, draws toxic secret from given rng. @@ -153,19 +247,17 @@ where } } - /// Returns gernerator on G2 - pub fn g2(&self) -> E::G2Affine { - self.g2 - } - - /// Returns first power of secret on G2 - pub fn s_g2(&self) -> E::G2Affine { - self.s_g2 + pub fn verifier_params(&self) -> ParamsVerifierKZG { + ParamsVerifierKZG { + k: self.k, + s_g2: self.s_g2, + } } /// Writes parameters to buffer pub fn write_custom(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> where + E::G1Affine: SerdeCurveAffine, E::G2Affine: SerdeCurveAffine, { writer.write_all(&self.k.to_le_bytes())?; @@ -183,6 +275,7 @@ where /// Reads params from a buffer. pub fn read_custom(reader: &mut R, format: SerdeFormat) -> io::Result where + E::G1Affine: SerdeCurveAffine, E::G2Affine: SerdeCurveAffine, { let mut k = [0u8; 4]; @@ -267,19 +360,12 @@ where } } -// TODO: see the issue at https://github.com/appliedzkp/halo2/issues/45 -// So we probably need much smaller verifier key. However for new bases in g1 should be in verifier keys. -/// KZG multi-open verification parameters -pub type ParamsVerifierKZG = ParamsKZG; - -impl<'params, E: Engine + Debug> Params<'params, E::G1Affine> for ParamsKZG +impl Params for ParamsKZG where E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, E::G2Affine: SerdeCurveAffine, { - type MSM = MSMKZG; - fn k(&self) -> u32 { self.k } @@ -288,6 +374,20 @@ where self.n } + fn commit_lagrange( + &self, + engine: &impl MsmAccel, + poly: &Polynomial, + _: Blind, + ) -> E::G1 { + let mut scalars = Vec::with_capacity(poly.len()); + scalars.extend(poly.iter()); + let bases = &self.g_lagrange; + let size = scalars.len(); + assert!(bases.len() >= size); + engine.msm(&scalars, &bases[0..size]) + } + fn downsize(&mut self, k: u32) { assert!(k <= self.k); @@ -298,19 +398,6 @@ where self.g_lagrange = g_to_lagrange(self.g.iter().map(|g| g.to_curve()).collect(), k); } - fn empty_msm(&'params self) -> MSMKZG { - MSMKZG::new() - } - - fn commit_lagrange(&self, poly: &Polynomial, _: Blind) -> E::G1 { - let mut scalars = Vec::with_capacity(poly.len()); - scalars.extend(poly.iter()); - let bases = &self.g_lagrange; - let size = scalars.len(); - assert!(bases.len() >= size); - best_multiexp(&scalars, &bases[0..size]) - } - /// Writes params to a buffer. fn write(&self, writer: &mut W) -> io::Result<()> { self.write_custom(writer, SerdeFormat::RawBytes) @@ -322,41 +409,42 @@ where } } -impl<'params, E: Engine + Debug> ParamsVerifier<'params, E::G1Affine> for ParamsKZG +impl<'params, E: Engine> ParamsVerifier<'params, E::G1Affine> for ParamsKZG where E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, E::G2Affine: SerdeCurveAffine, { + type MSM = MSMKZG; + // KZG params with support for commitment. + const COMMIT_INSTANCE: bool = true; + fn empty_msm(&self) -> MSMKZG { + MSMKZG::new() + } } -impl<'params, E: Engine + Debug> ParamsProver<'params, E::G1Affine> for ParamsKZG +impl ParamsProver for ParamsKZG where E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, E::G2Affine: SerdeCurveAffine, { - type ParamsVerifier = ParamsVerifierKZG; - - fn verifier_params(&'params self) -> &'params Self::ParamsVerifier { - self - } - fn new(k: u32) -> Self { Self::setup(k, OsRng) } - fn commit(&self, poly: &Polynomial, _: Blind) -> E::G1 { + fn commit( + &self, + engine: &impl MsmAccel, + poly: &Polynomial, + _: Blind, + ) -> E::G1 { let mut scalars = Vec::with_capacity(poly.len()); scalars.extend(poly.iter()); let bases = &self.g; let size = scalars.len(); assert!(bases.len() >= size); - best_multiexp(&scalars, &bases[0..size]) - } - - fn get_g(&self) -> &[E::G1Affine] { - &self.g + engine.msm(&scalars, &bases[0..size]) } } @@ -365,7 +453,8 @@ mod test { use crate::poly::commitment::ParamsProver; use crate::poly::commitment::{Blind, Params}; use crate::poly::kzg::commitment::ParamsKZG; - use ff::Field; + use halo2_middleware::ff::Field; + use halo2_middleware::zal::impls::H2cEngine; #[test] fn test_commit_lagrange() { @@ -376,6 +465,7 @@ mod test { use crate::poly::EvaluationDomain; use halo2curves::bn256::{Bn256, Fr}; + let engine = H2cEngine::new(); let params = ParamsKZG::::new(K); let domain = EvaluationDomain::new(1, K); @@ -389,7 +479,10 @@ mod test { let alpha = Blind(Fr::random(OsRng)); - assert_eq!(params.commit(&b, alpha), params.commit_lagrange(&a, alpha)); + assert_eq!( + params.commit(&engine, &b, alpha), + params.commit_lagrange(&engine, &a, alpha) + ); } #[test] @@ -397,7 +490,7 @@ mod test { const K: u32 = 4; use super::super::commitment::Params; - use crate::halo2curves::bn256::Bn256; + use halo2curves::bn256::Bn256; let params0 = ParamsKZG::::new(K); let mut data = vec![]; diff --git a/halo2_proofs/src/poly/kzg/mod.rs b/halo2_backend/src/poly/kzg/mod.rs similarity index 100% rename from halo2_proofs/src/poly/kzg/mod.rs rename to halo2_backend/src/poly/kzg/mod.rs diff --git a/halo2_proofs/src/poly/kzg/msm.rs b/halo2_backend/src/poly/kzg/msm.rs similarity index 81% rename from halo2_proofs/src/poly/kzg/msm.rs rename to halo2_backend/src/poly/kzg/msm.rs index f9b8c284bd..b5a36d031a 100644 --- a/halo2_proofs/src/poly/kzg/msm.rs +++ b/halo2_backend/src/poly/kzg/msm.rs @@ -1,11 +1,9 @@ use std::fmt::Debug; -use super::commitment::ParamsKZG; -use crate::{ - arithmetic::{best_multiexp, parallelize}, - poly::commitment::MSM, -}; +use super::commitment::ParamsVerifierKZG; +use crate::{arithmetic::parallelize, poly::commitment::MSM}; use group::{Curve, Group}; +use halo2_middleware::zal::traits::MsmAccel; use halo2curves::{ pairing::{Engine, MillerLoopResult, MultiMillerLoop}, CurveAffine, CurveExt, @@ -37,7 +35,7 @@ where /// Prepares all scalars in the MSM to linear combination pub fn combine_with_base(&mut self, base: E::Fr) { - use ff::Field; + use halo2_middleware::ff::Field; let mut acc = E::Fr::ONE; if !self.scalars.is_empty() { for scalar in self.scalars.iter_mut().rev() { @@ -48,7 +46,7 @@ where } } -impl MSM for MSMKZG +impl MSM for MSMKZG where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, @@ -73,15 +71,15 @@ where } } - fn check(&self) -> bool { - bool::from(self.eval().is_identity()) + fn check(&self, engine: &impl MsmAccel) -> bool { + bool::from(self.eval(engine).is_identity()) } - fn eval(&self) -> E::G1 { + fn eval(&self, engine: &impl MsmAccel) -> E::G1 { use group::prime::PrimeCurveAffine; let mut bases = vec![E::G1Affine::identity(); self.scalars.len()]; E::G1::batch_normalize(&self.bases, &mut bases); - best_multiexp(&self.scalars, &bases) + engine.msm(&self.scalars, &bases) } fn bases(&self) -> Vec { @@ -103,17 +101,23 @@ where projectives_msms: Vec>, } -impl PreMSM +impl Default for PreMSM where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { - pub(crate) fn new() -> Self { + fn default() -> Self { PreMSM { projectives_msms: vec![], } } +} +impl PreMSM +where + E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ pub(crate) fn normalize(self) -> MSMKZG { let (scalars, bases) = self .projectives_msms @@ -132,37 +136,25 @@ where } } -impl<'params, E: MultiMillerLoop + Debug> From<&'params ParamsKZG> for DualMSM<'params, E> -where - E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, -{ - fn from(params: &'params ParamsKZG) -> Self { - DualMSM::new(params) - } -} - /// Two channel MSM accumulator -#[derive(Debug, Clone)] -pub struct DualMSM<'a, E: Engine> +#[derive(Debug, Default, Clone)] +pub struct DualMSM where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { - pub(crate) params: &'a ParamsKZG, pub(crate) left: MSMKZG, pub(crate) right: MSMKZG, } -impl<'a, E: MultiMillerLoop + Debug> DualMSM<'a, E> +impl DualMSM where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { /// Create a new two channel MSM accumulator instance - pub fn new(params: &'a ParamsKZG) -> Self { + pub fn new() -> Self { Self { - params, left: MSMKZG::new(), right: MSMKZG::new(), } @@ -181,12 +173,12 @@ where } /// Performs final pairing check with given verifier params and two channel linear combination - pub fn check(self) -> bool { - let s_g2_prepared = E::G2Prepared::from(self.params.s_g2); - let n_g2_prepared = E::G2Prepared::from(-self.params.g2); + pub fn check(self, engine: &impl MsmAccel, params: &ParamsVerifierKZG) -> bool { + let s_g2_prepared = E::G2Prepared::from(params.s_g2); + let n_g2_prepared = E::G2Prepared::from((-E::G2::generator()).into()); - let left = self.left.eval(); - let right = self.right.eval(); + let left = self.left.eval(engine); + let right = self.right.eval(engine); let (term_1, term_2) = ( (&left.into(), &s_g2_prepared), diff --git a/halo2_proofs/src/poly/kzg/multiopen.rs b/halo2_backend/src/poly/kzg/multiopen.rs similarity index 100% rename from halo2_proofs/src/poly/kzg/multiopen.rs rename to halo2_backend/src/poly/kzg/multiopen.rs diff --git a/halo2_backend/src/poly/kzg/multiopen/gwc.rs b/halo2_backend/src/poly/kzg/multiopen/gwc.rs new file mode 100644 index 0000000000..b1cdbda7c4 --- /dev/null +++ b/halo2_backend/src/poly/kzg/multiopen/gwc.rs @@ -0,0 +1,170 @@ +mod prover; +mod verifier; + +pub use prover::ProverGWC; +use std::collections::{BTreeMap, BTreeSet}; +pub use verifier::VerifierGWC; + +use crate::{poly::query::Query, transcript::ChallengeScalar}; +use ff::Field; + +// ======================= ZCASH intermediate sets ======================== // + +#[derive(Clone, Copy, Debug)] +struct X1 {} +/// Challenge for compressing openings at the same point sets together. +type ChallengeX1 = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct X2 {} +/// Challenge for keeping the multi-point quotient polynomial terms linearly independent. +type ChallengeX2 = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct X3 {} +/// Challenge point at which the commitments are opened. +type ChallengeX3 = ChallengeScalar; + +#[derive(Clone, Copy, Debug)] +struct X4 {} +/// Challenge for collapsing the openings of the various remaining polynomials at x_3 +/// together. +type ChallengeX4 = ChallengeScalar; + +#[derive(Clone, Debug)] +struct CommitmentDataZCash { + commitment: T, + set_index: usize, + point_indices: Vec, + evals: Vec, +} + +impl CommitmentDataZCash { + fn new(commitment: T) -> Self { + CommitmentDataZCash { + commitment, + set_index: 0, + point_indices: vec![], + evals: vec![], + } + } +} + +type IntermediateSets = ( + Vec>::Eval, >::Commitment>>, + Vec>, +); + +fn construct_intermediate_sets_zcash>( + queries: I, +) -> IntermediateSets +where + I: IntoIterator + Clone, +{ + // Construct sets of unique commitments and corresponding information about + // their queries. + let mut commitment_map: Vec> = vec![]; + + // Also construct mapping from a unique point to a point_index. This defines + // an ordering on the points. + let mut point_index_map = BTreeMap::new(); + + // Iterate over all of the queries, computing the ordering of the points + // while also creating new commitment data. + for query in queries.clone() { + let num_points = point_index_map.len(); + let point_idx = point_index_map + .entry(query.get_point()) + .or_insert(num_points); + + if let Some(pos) = commitment_map + .iter() + .position(|comm| comm.commitment == query.get_commitment()) + { + commitment_map[pos].point_indices.push(*point_idx); + } else { + let mut tmp = CommitmentDataZCash::new(query.get_commitment()); + tmp.point_indices.push(*point_idx); + commitment_map.push(tmp); + } + } + + // Also construct inverse mapping from point_index to the point + let mut inverse_point_index_map = BTreeMap::new(); + for (&point, &point_index) in point_index_map.iter() { + inverse_point_index_map.insert(point_index, point); + } + + // Construct map of unique ordered point_idx_sets to their set_idx + let mut point_idx_sets = BTreeMap::new(); + // Also construct mapping from commitment to point_idx_set + let mut commitment_set_map = Vec::new(); + + for commitment_data in commitment_map.iter() { + let mut point_index_set = BTreeSet::new(); + // Note that point_index_set is ordered, unlike point_indices + for &point_index in commitment_data.point_indices.iter() { + point_index_set.insert(point_index); + } + + // Push point_index_set to CommitmentData for the relevant commitment + commitment_set_map.push((commitment_data.commitment, point_index_set.clone())); + + let num_sets = point_idx_sets.len(); + point_idx_sets.entry(point_index_set).or_insert(num_sets); + } + + // Initialise empty evals vec for each unique commitment + for commitment_data in commitment_map.iter_mut() { + let len = commitment_data.point_indices.len(); + commitment_data.evals = vec![Q::Eval::default(); len]; + } + + // Populate set_index, evals and points for each commitment using point_idx_sets + for query in queries { + // The index of the point at which the commitment is queried + let point_index = point_index_map.get(&query.get_point()).unwrap(); + + // The point_index_set at which the commitment was queried + let mut point_index_set = BTreeSet::new(); + for (commitment, point_idx_set) in commitment_set_map.iter() { + if query.get_commitment() == *commitment { + point_index_set = point_idx_set.clone(); + } + } + assert!(!point_index_set.is_empty()); + + // The set_index of the point_index_set + let set_index = point_idx_sets.get(&point_index_set).unwrap(); + for commitment_data in commitment_map.iter_mut() { + if query.get_commitment() == commitment_data.commitment { + commitment_data.set_index = *set_index; + } + } + let point_index_set: Vec = point_index_set.iter().cloned().collect(); + + // The offset of the point_index in the point_index_set + let point_index_in_set = point_index_set + .iter() + .position(|i| i == point_index) + .unwrap(); + + for commitment_data in commitment_map.iter_mut() { + if query.get_commitment() == commitment_data.commitment { + // Insert the eval using the ordering of the point_index_set + commitment_data.evals[point_index_in_set] = query.get_eval(); + } + } + } + + // Get actual points in each point set + let mut point_sets: Vec> = vec![Vec::new(); point_idx_sets.len()]; + for (point_idx_set, &set_idx) in point_idx_sets.iter() { + for &point_idx in point_idx_set.iter() { + let point = inverse_point_index_map.get(&point_idx).unwrap(); + point_sets[set_idx].push(*point); + } + } + + (commitment_map, point_sets) +} diff --git a/halo2_backend/src/poly/kzg/multiopen/gwc/prover.rs b/halo2_backend/src/poly/kzg/multiopen/gwc/prover.rs new file mode 100644 index 0000000000..eda98e6bf6 --- /dev/null +++ b/halo2_backend/src/poly/kzg/multiopen/gwc/prover.rs @@ -0,0 +1,156 @@ +use super::{ + construct_intermediate_sets_zcash, ChallengeX1, ChallengeX2, ChallengeX3, ChallengeX4, +}; +use crate::arithmetic::{eval_polynomial, kate_division, powers}; +use crate::arithmetic::{truncate, truncated_powers}; +use crate::helpers::SerdeCurveAffine; +use crate::poly::commitment::ParamsProver; +use crate::poly::commitment::Prover; +use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; +use crate::poly::query::ProverQuery; +use crate::poly::Coeff; +use crate::poly::{commitment::Blind, Polynomial}; +use crate::transcript::{EncodedChallenge, TranscriptWrite}; +use ff::Field; + +use ff::PrimeField; +use group::Curve; +use halo2_middleware::zal::traits::MsmAccel; +use halo2curves::pairing::{Engine, MultiMillerLoop}; +use halo2curves::CurveExt; +use rand_core::RngCore; +use std::fmt::Debug; +use std::io::{self}; +use std::marker::PhantomData; + +/// Concrete KZG prover with GWC variant +#[derive(Debug)] +pub struct ProverGWC<'params, E: Engine> { + params: &'params ParamsKZG, +} + +impl<'params, E: Engine + Debug> ProverGWC<'params, E> +where + E::Fr: PrimeField, + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, +{ + fn inner_product( + polys: &[Polynomial], + scalars: impl Iterator, + ) -> Polynomial { + polys + .iter() + .zip(scalars) + .map(|(p, s)| p.clone() * s) + .reduce(|acc, p| acc + &p) + .unwrap() + } +} + +/// Create a multi-opening proof +impl<'params, E: Engine + Debug> Prover<'params, KZGCommitmentScheme> for ProverGWC<'params, E> +where + E: MultiMillerLoop, + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, + E::Fr: Ord, +{ + const QUERY_INSTANCE: bool = false; + + fn new(params: &'params ParamsKZG) -> Self { + Self { params } + } + + /// Create a multi-opening proof + fn create_proof_with_engine< + 'com, + Ch: EncodedChallenge, + T: TranscriptWrite, + R, + I, + >( + &self, + engine: &impl MsmAccel, + _: R, + transcript: &mut T, + queries: I, + ) -> io::Result<()> + where + I: IntoIterator> + Clone, + R: RngCore, + { + // Refer to the halo2 book for docs: + // https://zcash.github.io/halo2/design/proving-system/multipoint-opening.html + + let x1: ChallengeX1<_> = transcript.squeeze_challenge_scalar(); + let x2: ChallengeX2<_> = transcript.squeeze_challenge_scalar(); + + let (poly_map, point_sets) = construct_intermediate_sets_zcash(queries); + + let mut q_polys = vec![vec![]; point_sets.len()]; + + for com_data in poly_map.iter() { + q_polys[com_data.set_index].push(com_data.commitment.poly.clone()); + } + + let q_polys = q_polys + .iter() + .map(|polys| Self::inner_product(polys, truncated_powers(*x1))) + .collect::>(); + + let f_poly = { + let f_polys = point_sets + .iter() + .zip(q_polys.clone()) + .map(|(points, q_poly)| { + let mut poly = points.iter().fold(q_poly.clone().values, |poly, point| { + kate_division(&poly, *point) + }); + poly.resize(self.params.n as usize, E::Fr::ZERO); + Polynomial { + values: poly, + _marker: PhantomData, + } + }) + .collect::>(); + Self::inner_product(&f_polys, powers(*x2)) + }; + let f_com = self + .params + .commit(engine, &f_poly, Blind::default()) + .to_affine(); + + transcript.write_point(f_com)?; + + let x3: ChallengeX3<_> = transcript.squeeze_challenge_scalar(); + let x3 = truncate(*x3); + + for q_poly in q_polys.iter() { + transcript.write_scalar(eval_polynomial(q_poly.as_ref(), x3))?; + } + + let x4: ChallengeX4<_> = transcript.squeeze_challenge_scalar(); + + let final_poly = { + let mut polys = q_polys; + polys.push(f_poly); + Self::inner_product(&polys, truncated_powers(*x4)) + }; + + let v = eval_polynomial(&final_poly, x3); + + let pi = { + let pi_poly = Polynomial { + values: kate_division(&(&final_poly - v).values, x3), + _marker: PhantomData, + }; + self.params + .commit(engine, &pi_poly, Blind::default()) + .to_affine() + }; + + transcript.write_point(pi) + } +} diff --git a/halo2_backend/src/poly/kzg/multiopen/gwc/verifier.rs b/halo2_backend/src/poly/kzg/multiopen/gwc/verifier.rs new file mode 100644 index 0000000000..10115392ad --- /dev/null +++ b/halo2_backend/src/poly/kzg/multiopen/gwc/verifier.rs @@ -0,0 +1,199 @@ +use std::fmt::Debug; +use std::marker::PhantomData; + +use super::{ + construct_intermediate_sets_zcash, ChallengeX1, ChallengeX2, ChallengeX3, ChallengeX4, +}; +use crate::arithmetic::{eval_polynomial, lagrange_interpolate}; +use crate::arithmetic::{truncate, truncated_powers}; +use crate::helpers::SerdeCurveAffine; +use crate::poly::commitment::Verifier; +use crate::poly::commitment::MSM; +use crate::poly::kzg::commitment::KZGCommitmentScheme; +use crate::poly::kzg::msm::{DualMSM, MSMKZG}; +use crate::poly::kzg::strategy::GuardKZG; +use crate::poly::query::{CommitmentReference, VerifierQuery}; +use crate::poly::Error; +use crate::transcript::{EncodedChallenge, TranscriptRead}; + +use ff::{Field, PrimeField}; +use group::prime::PrimeCurveAffine; +use halo2curves::pairing::{Engine, MultiMillerLoop}; +use halo2curves::CurveExt; + +#[derive(Debug)] +/// Concrete KZG verifier with GWC variant +pub struct VerifierGWC { + _marker: PhantomData, +} + +fn msm_inner_product(msms: &[MSMKZG], scalars: impl Iterator) -> MSMKZG +where + E: MultiMillerLoop + Debug, + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, + E::G1: CurveExt, + E::Fr: Ord, +{ + let mut res = MSMKZG::::new(); + let mut msms = msms.to_vec(); + for (msm, s) in msms.iter_mut().zip(scalars) { + msm.scale(s); + res.add_msm(msm); + } + res +} + +fn scalars_inner_product(v1: &[F], scalars: impl Iterator) -> F { + v1.iter() + .zip(scalars) + .map(|(s1, s2)| *s1 * s2) + .reduce(|acc, s| acc + s) + .unwrap() +} + +/// Inter produc with truncated powers of the given x. +fn evals_inner_product( + evals_set: &[Vec], + scalars: impl Iterator, +) -> Vec { + let mut res = vec![F::ZERO; evals_set[0].len()]; + for (poly_evals, s) in evals_set.iter().zip(scalars) { + for i in 0..res.len() { + res[i] += poly_evals[i] * s; + } + } + res +} + +impl<'params, E> Verifier<'params, KZGCommitmentScheme> for VerifierGWC +where + E: MultiMillerLoop + Debug, + E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, + E::G1: CurveExt, + E::G2Affine: SerdeCurveAffine, + E::G1: CurveExt, + E::Fr: Ord, +{ + type Guard = GuardKZG; + type MSMAccumulator = DualMSM; + + const QUERY_INSTANCE: bool = false; + + fn new() -> Self { + Self { + _marker: PhantomData, + } + } + + fn verify_proof< + 'com, + Ch: EncodedChallenge, + T: TranscriptRead, + I, + >( + &self, + transcript: &mut T, + queries: I, + mut msm_accumulator: DualMSM, + ) -> Result + where + I: IntoIterator>> + Clone, + { + // Refer to the halo2 book for docs: + // https://zcash.github.io/halo2/design/proving-system/multipoint-opening.html + + let x1: ChallengeX1<_> = transcript.squeeze_challenge_scalar(); + let x2: ChallengeX2<_> = transcript.squeeze_challenge_scalar(); + + let (commitment_map, point_sets) = construct_intermediate_sets_zcash(queries); + + let mut q_coms: Vec<_> = vec![vec![]; point_sets.len()]; + let mut q_eval_sets = vec![vec![]; point_sets.len()]; + + for com_data in commitment_map.into_iter() { + let com_data_msm = match com_data.commitment { + CommitmentReference::Commitment(c) => { + let mut msm = MSMKZG::new(); + msm.append_term(E::Fr::ONE, (*c).into()); + msm + } + CommitmentReference::MSM(msm) => msm.clone(), + }; + q_coms[com_data.set_index].push(com_data_msm); + q_eval_sets[com_data.set_index].push(com_data.evals); + } + + let q_coms = q_coms + .iter() + .map(|msms| msm_inner_product(msms, truncated_powers(*x1))) + .collect::>(); + let q_eval_sets = q_eval_sets + .iter() + .map(|evals| evals_inner_product(evals, truncated_powers(*x1))) + .collect::>(); + + let f_com = transcript.read_point().map_err(|_| Error::SamplingError)?; + // Sample a challenge x_3 for checking that f(X) was committed to + // correctly. + let x3: ChallengeX3<_> = transcript.squeeze_challenge_scalar(); + let x3 = truncate(*x3); + + let mut q_evals_on_x3 = Vec::with_capacity(q_eval_sets.len()); + for _ in 0..q_eval_sets.len() { + q_evals_on_x3.push(transcript.read_scalar().map_err(|_| Error::SamplingError)?); + } + + // We can compute the expected msm_eval at x_3 using the u provided + // by the prover and from x_2 + let f_eval = point_sets + .iter() + .zip(q_eval_sets.iter()) + .zip(q_evals_on_x3.iter()) + .rev() + .fold(E::Fr::ZERO, |acc_eval, ((points, evals), proof_eval)| { + let r_poly = lagrange_interpolate(points, evals); + let r_eval = eval_polynomial(&r_poly, x3); + let eval = points.iter().fold(*proof_eval - r_eval, |eval, point| { + eval * (x3 - point).invert().unwrap() + }); + acc_eval * *x2 + eval + }); + + let x4: ChallengeX4<_> = transcript.squeeze_challenge_scalar(); + + let final_com = { + let mut polys = q_coms; + let mut f_com_as_msm = MSMKZG::new(); + f_com_as_msm.append_term(E::Fr::ONE, f_com.into()); + polys.push(f_com_as_msm); + msm_inner_product(&polys, truncated_powers(*x4)) + }; + + let v = { + let mut evals = q_evals_on_x3; + evals.push(f_eval); + scalars_inner_product(&evals, truncated_powers(*x4)) + }; + + let pi = transcript.read_point().map_err(|_| Error::SamplingError)?; + + let mut pi_msm = MSMKZG::::new(); + pi_msm.append_term(E::Fr::ONE, pi.into()); + + // Scale zπ + let mut scaled_pi = MSMKZG::::new(); + scaled_pi.append_term(x3, pi.into()); + + // (π, C − vG + zπ) + msm_accumulator.left.add_msm(&pi_msm); // π + + msm_accumulator.right.add_msm(&scaled_pi); // zπ + msm_accumulator.right.add_msm(&final_com); // C + let g0: E::G1 = E::G1Affine::generator().into(); + msm_accumulator.right.append_term(v, -g0); // -vG + + Ok(Self::Guard::new(msm_accumulator)) + } +} diff --git a/halo2_proofs/src/poly/kzg/multiopen/shplonk.rs b/halo2_backend/src/poly/kzg/multiopen/shplonk.rs similarity index 98% rename from halo2_proofs/src/poly/kzg/multiopen/shplonk.rs rename to halo2_backend/src/poly/kzg/multiopen/shplonk.rs index d0814e83e3..5f963f4049 100644 --- a/halo2_proofs/src/poly/kzg/multiopen/shplonk.rs +++ b/halo2_backend/src/poly/kzg/multiopen/shplonk.rs @@ -3,7 +3,7 @@ mod verifier; use crate::multicore::{IntoParallelIterator, ParallelIterator}; use crate::{poly::query::Query, transcript::ChallengeScalar}; -use ff::Field; +use halo2_middleware::ff::Field; pub use prover::ProverSHPLONK; use std::collections::BTreeSet; pub use verifier::VerifierSHPLONK; @@ -142,7 +142,7 @@ where #[cfg(test)] mod proptests { use super::{construct_intermediate_sets, Commitment, IntermediateSets}; - use ff::FromUniformBytes; + use halo2_middleware::ff::FromUniformBytes; use halo2curves::pasta::Fp; use proptest::{collection::vec, prelude::*, sample::select}; use std::convert::TryFrom; diff --git a/halo2_proofs/src/poly/kzg/multiopen/shplonk/prover.rs b/halo2_backend/src/poly/kzg/multiopen/shplonk/prover.rs similarity index 96% rename from halo2_proofs/src/poly/kzg/multiopen/shplonk/prover.rs rename to halo2_backend/src/poly/kzg/multiopen/shplonk/prover.rs index 5001d69094..194215e6da 100644 --- a/halo2_proofs/src/poly/kzg/multiopen/shplonk/prover.rs +++ b/halo2_backend/src/poly/kzg/multiopen/shplonk/prover.rs @@ -13,8 +13,9 @@ use crate::poly::{Coeff, Polynomial}; use crate::transcript::{EncodedChallenge, TranscriptWrite}; use crate::multicore::{IntoParallelIterator, ParallelIterator}; -use ff::Field; use group::Curve; +use halo2_middleware::ff::Field; +use halo2_middleware::zal::traits::MsmAccel; use halo2curves::pairing::Engine; use halo2curves::CurveExt; use rand_core::RngCore; @@ -117,7 +118,7 @@ where } /// Create a multi-opening proof - fn create_proof< + fn create_proof_with_engine< 'com, Ch: EncodedChallenge, T: TranscriptWrite, @@ -125,6 +126,7 @@ where I, >( &self, + engine: &impl MsmAccel, _: R, transcript: &mut T, queries: I, @@ -208,7 +210,10 @@ where .reduce(|acc, poly| acc + &poly) .unwrap(); - let h = self.params.commit(&h_x, Blind::default()).to_affine(); + let h = self + .params + .commit(engine, &h_x, Blind::default()) + .to_affine(); transcript.write_point(h)?; let u: ChallengeU<_> = transcript.squeeze_challenge_scalar(); @@ -290,7 +295,10 @@ where _marker: PhantomData, }; - let h = self.params.commit(&h_x, Blind::default()).to_affine(); + let h = self + .params + .commit(engine, &h_x, Blind::default()) + .to_affine(); transcript.write_point(h)?; Ok(()) diff --git a/halo2_proofs/src/poly/kzg/multiopen/shplonk/verifier.rs b/halo2_backend/src/poly/kzg/multiopen/shplonk/verifier.rs similarity index 88% rename from halo2_proofs/src/poly/kzg/multiopen/shplonk/verifier.rs rename to halo2_backend/src/poly/kzg/multiopen/shplonk/verifier.rs index 5d03940177..a67a555733 100644 --- a/halo2_proofs/src/poly/kzg/multiopen/shplonk/verifier.rs +++ b/halo2_backend/src/poly/kzg/multiopen/shplonk/verifier.rs @@ -1,4 +1,5 @@ use std::fmt::Debug; +use std::marker::PhantomData; use super::ChallengeY; use super::{construct_intermediate_sets, ChallengeU, ChallengeV}; @@ -8,25 +9,26 @@ use crate::arithmetic::{ use crate::helpers::SerdeCurveAffine; use crate::poly::commitment::Verifier; use crate::poly::commitment::MSM; -use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; +use crate::poly::kzg::commitment::KZGCommitmentScheme; use crate::poly::kzg::msm::DualMSM; use crate::poly::kzg::msm::{PreMSM, MSMKZG}; use crate::poly::kzg::strategy::GuardKZG; use crate::poly::query::{CommitmentReference, VerifierQuery}; use crate::poly::Error; use crate::transcript::{EncodedChallenge, TranscriptRead}; -use ff::Field; +use group::prime::PrimeCurveAffine; +use halo2_middleware::ff::Field; use halo2curves::pairing::{Engine, MultiMillerLoop}; use halo2curves::CurveExt; use std::ops::MulAssign; /// Concrete KZG multiopen verifier with SHPLONK variant #[derive(Debug)] -pub struct VerifierSHPLONK<'params, E: Engine> { - params: &'params ParamsKZG, +pub struct VerifierSHPLONK { + _marker: PhantomData, } -impl<'params, E> Verifier<'params, KZGCommitmentScheme> for VerifierSHPLONK<'params, E> +impl<'params, E> Verifier<'params, KZGCommitmentScheme> for VerifierSHPLONK where E: MultiMillerLoop + Debug, E::Fr: Ord, @@ -34,13 +36,15 @@ where E::G1: CurveExt, E::G2Affine: SerdeCurveAffine, { - type Guard = GuardKZG<'params, E>; - type MSMAccumulator = DualMSM<'params, E>; + type Guard = GuardKZG; + type MSMAccumulator = DualMSM; const QUERY_INSTANCE: bool = false; - fn new(params: &'params ParamsKZG) -> Self { - Self { params } + fn new() -> Self { + Self { + _marker: PhantomData, + } } /// Verify a multi-opening proof @@ -53,7 +57,7 @@ where &self, transcript: &mut T, queries: I, - mut msm_accumulator: DualMSM<'params, E>, + mut msm_accumulator: DualMSM, ) -> Result where I: IntoIterator>> + Clone, @@ -72,7 +76,7 @@ where let h2 = transcript.read_point().map_err(|_| Error::SamplingError)?; let (mut z_0_diff_inverse, mut z_0) = (E::Fr::ZERO, E::Fr::ZERO); - let (mut outer_msm, mut r_outer_acc) = (PreMSM::::new(), E::Fr::ZERO); + let (mut outer_msm, mut r_outer_acc) = (PreMSM::::default(), E::Fr::ZERO); for (i, (rotation_set, power_of_v)) in rotation_sets.iter().zip(powers(*v)).enumerate() { let diffs: Vec = super_point_set .iter() @@ -126,7 +130,7 @@ where r_outer_acc += power_of_v * r_inner_acc * z_diff_i; } let mut outer_msm = outer_msm.normalize(); - let g1: E::G1 = self.params.g[0].into(); + let g1: E::G1 = ::generator().into(); outer_msm.append_term(-r_outer_acc, g1); outer_msm.append_term(-z_0, h1.into()); outer_msm.append_term(*u, h2.into()); diff --git a/halo2_proofs/src/poly/kzg/strategy.rs b/halo2_backend/src/poly/kzg/strategy.rs similarity index 53% rename from halo2_proofs/src/poly/kzg/strategy.rs rename to halo2_backend/src/poly/kzg/strategy.rs index ee80d800ac..e9d50f7a4f 100644 --- a/halo2_proofs/src/poly/kzg/strategy.rs +++ b/halo2_backend/src/poly/kzg/strategy.rs @@ -1,5 +1,7 @@ +use std::fmt::Debug; + use super::{ - commitment::{KZGCommitmentScheme, ParamsKZG}, + commitment::{KZGCommitmentScheme, ParamsVerifierKZG}, msm::DualMSM, }; use crate::{ @@ -11,114 +13,137 @@ use crate::{ }, }; use ff::Field; +use halo2_middleware::zal::impls::H2cEngine; use halo2curves::{ pairing::{Engine, MultiMillerLoop}, CurveAffine, CurveExt, }; use rand_core::OsRng; -use std::fmt::Debug; /// Wrapper for linear verification accumulator #[derive(Debug, Clone)] -pub struct GuardKZG<'params, E: MultiMillerLoop + Debug> +pub struct GuardKZG where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { - pub(crate) msm_accumulator: DualMSM<'params, E>, + pub(crate) msm_accumulator: DualMSM, } /// Define accumulator type as `DualMSM` -impl<'params, E> Guard> for GuardKZG<'params, E> +impl Guard> for GuardKZG where E: MultiMillerLoop + Debug, E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, E::G2Affine: SerdeCurveAffine, + E::G1: CurveExt, { - type MSMAccumulator = DualMSM<'params, E>; + type MSMAccumulator = DualMSM; } /// KZG specific operations -impl<'params, E: MultiMillerLoop + Debug> GuardKZG<'params, E> +impl GuardKZG where + E: MultiMillerLoop, E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { - pub(crate) fn new(msm_accumulator: DualMSM<'params, E>) -> Self { + pub(crate) fn new(msm_accumulator: DualMSM) -> Self { Self { msm_accumulator } } } /// A verifier that checks multiple proofs in a batch #[derive(Clone, Debug)] -pub struct AccumulatorStrategy<'params, E: Engine> +pub struct AccumulatorStrategy where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { - pub(crate) msm_accumulator: DualMSM<'params, E>, + pub(crate) msm_accumulator: DualMSM, + params: ParamsVerifierKZG, } -impl<'params, E: MultiMillerLoop + Debug> AccumulatorStrategy<'params, E> +impl AccumulatorStrategy where + E: MultiMillerLoop, E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { /// Constructs an empty batch verifier - pub fn new(params: &'params ParamsKZG) -> Self { + pub fn new(params: &ParamsVerifierKZG) -> Self { AccumulatorStrategy { - msm_accumulator: DualMSM::new(params), + msm_accumulator: DualMSM::new(), + params: params.clone(), } } /// Constructs and initialized new batch verifier - pub fn with(msm_accumulator: DualMSM<'params, E>) -> Self { - AccumulatorStrategy { msm_accumulator } + pub fn with(msm_accumulator: DualMSM, params: &ParamsVerifierKZG) -> Self { + AccumulatorStrategy { + msm_accumulator, + params: params.clone(), + } + } + + #[allow(clippy::type_complexity)] + /// Extracts both sides of the dual MSM accumulator without evaluating them. + /// Each resulting side is given by a vector of base-scalar pairs. + pub fn extract_acc_without_evaluation( + &self, + ) -> ( + Vec<(::G1, ::Fr)>, + Vec<(::G1, ::Fr)>, + ) { + let acc = self.msm_accumulator.clone(); + let left: Vec<_> = (acc.left.bases.clone().into_iter()) + .zip(acc.left.scalars.clone()) + .collect(); + let right: Vec<_> = (acc.right.bases.clone().into_iter()) + .zip(acc.right.scalars.clone()) + .collect(); + (left, right) } } /// A verifier that checks a single proof #[derive(Clone, Debug)] -pub struct SingleStrategy<'params, E: Engine> +pub struct SingleStrategy where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { - pub(crate) msm: DualMSM<'params, E>, + pub(crate) msm: DualMSM, + params: ParamsVerifierKZG, } -impl<'params, E: MultiMillerLoop + Debug> SingleStrategy<'params, E> +impl<'params, E: MultiMillerLoop + Debug> SingleStrategy where E::G1Affine: CurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, { /// Constructs an empty batch verifier - pub fn new(params: &'params ParamsKZG) -> Self { + pub fn new(params: &'params ParamsVerifierKZG) -> Self { SingleStrategy { - msm: DualMSM::new(params), + msm: DualMSM::new(), + params: params.clone(), } } } -impl< - 'params, - E: MultiMillerLoop + Debug, - V: Verifier< - 'params, - KZGCommitmentScheme, - MSMAccumulator = DualMSM<'params, E>, - Guard = GuardKZG<'params, E>, - >, - > VerificationStrategy<'params, KZGCommitmentScheme, V> for AccumulatorStrategy<'params, E> +impl<'params, E, V> VerificationStrategy<'params, KZGCommitmentScheme, V> + for AccumulatorStrategy where + E: MultiMillerLoop + Debug, + V: Verifier<'params, KZGCommitmentScheme, MSMAccumulator = DualMSM, Guard = GuardKZG>, E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, E::G2Affine: SerdeCurveAffine, { type Output = Self; - fn new(params: &'params ParamsKZG) -> Self { + fn new(params: &'params ParamsVerifierKZG) -> Self { AccumulatorStrategy::new(params) } @@ -132,32 +157,28 @@ where let guard = f(self.msm_accumulator)?; Ok(Self { msm_accumulator: guard.msm_accumulator, + params: self.params, }) } fn finalize(self) -> bool { - self.msm_accumulator.check() + // ZAL: Verification is (supposedly) cheap, hence we don't use an accelerator engine + let default_engine = H2cEngine::new(); + self.msm_accumulator.check(&default_engine, &self.params) } } -impl< - 'params, - E: MultiMillerLoop + Debug, - V: Verifier< - 'params, - KZGCommitmentScheme, - MSMAccumulator = DualMSM<'params, E>, - Guard = GuardKZG<'params, E>, - >, - > VerificationStrategy<'params, KZGCommitmentScheme, V> for SingleStrategy<'params, E> +impl<'params, E, V> VerificationStrategy<'params, KZGCommitmentScheme, V> for SingleStrategy where + E: MultiMillerLoop + Debug, + V: Verifier<'params, KZGCommitmentScheme, MSMAccumulator = DualMSM, Guard = GuardKZG>, E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, E::G1: CurveExt, E::G2Affine: SerdeCurveAffine, { type Output = (); - fn new(params: &'params ParamsKZG) -> Self { + fn new(params: &'params ParamsVerifierKZG) -> Self { Self::new(params) } @@ -168,7 +189,9 @@ where // Guard is updated with new msm contributions let guard = f(self.msm)?; let msm = guard.msm_accumulator; - if msm.check() { + // Verification is (supposedly) cheap, hence we don't use an accelerator engine + let default_engine = H2cEngine::new(); + if msm.check(&default_engine, &self.params) { Ok(()) } else { Err(Error::ConstraintSystemFailure) diff --git a/halo2_proofs/src/poly/multiopen_test.rs b/halo2_backend/src/poly/multiopen_test.rs similarity index 87% rename from halo2_proofs/src/poly/multiopen_test.rs rename to halo2_backend/src/poly/multiopen_test.rs index 47c6731167..e907d0134c 100644 --- a/halo2_proofs/src/poly/multiopen_test.rs +++ b/halo2_backend/src/poly/multiopen_test.rs @@ -14,8 +14,9 @@ mod test { Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, Keccak256Read, Keccak256Write, TranscriptReadBuffer, TranscriptWriterBuffer, }; - use ff::WithSmallOrderMulGroup; use group::Curve; + use halo2_middleware::ff::WithSmallOrderMulGroup; + use halo2_middleware::zal::{impls::H2cEngine, traits::MsmAccel}; use rand_core::OsRng; #[test] @@ -27,6 +28,7 @@ mod test { const K: u32 = 4; + let engine = H2cEngine::new(); let params = ParamsIPA::::new(K); let proof = create_proof::< @@ -34,9 +36,9 @@ mod test { ProverIPA<_>, _, Blake2bWrite<_, _, Challenge255<_>>, - >(¶ms); + >(&engine, ¶ms); - let verifier_params = params.verifier_params(); + let verifier_params = params; verify::< IPACommitmentScheme, @@ -44,7 +46,7 @@ mod test { _, Blake2bRead<_, _, Challenge255<_>>, AccumulatorStrategy<_>, - >(verifier_params, &proof[..], false); + >(&verifier_params, &proof[..], false); verify::< IPACommitmentScheme, @@ -52,7 +54,7 @@ mod test { _, Blake2bRead<_, _, Challenge255<_>>, AccumulatorStrategy<_>, - >(verifier_params, &proof[..], true); + >(&verifier_params, &proof[..], true); } #[test] @@ -64,6 +66,7 @@ mod test { const K: u32 = 4; + let engine = H2cEngine::new(); let params = ParamsIPA::::new(K); let proof = create_proof::< @@ -71,9 +74,9 @@ mod test { ProverIPA<_>, _, Keccak256Write<_, _, Challenge255<_>>, - >(¶ms); + >(&engine, ¶ms); - let verifier_params = params.verifier_params(); + let verifier_params = params; verify::< IPACommitmentScheme, @@ -81,7 +84,7 @@ mod test { _, Keccak256Read<_, _, Challenge255<_>>, AccumulatorStrategy<_>, - >(verifier_params, &proof[..], false); + >(&verifier_params, &proof[..], false); verify::< IPACommitmentScheme, @@ -89,7 +92,7 @@ mod test { _, Keccak256Read<_, _, Challenge255<_>>, AccumulatorStrategy<_>, - >(verifier_params, &proof[..], true); + >(&verifier_params, &proof[..], true); } #[test] @@ -101,15 +104,17 @@ mod test { const K: u32 = 4; + let engine = H2cEngine::new(); let params = ParamsKZG::::new(K); - let proof = - create_proof::<_, ProverGWC<_>, _, Blake2bWrite<_, _, Challenge255<_>>>(¶ms); + let proof = create_proof::<_, ProverGWC<_>, _, Blake2bWrite<_, _, Challenge255<_>>>( + &engine, ¶ms, + ); let verifier_params = params.verifier_params(); verify::<_, VerifierGWC<_>, _, Blake2bRead<_, _, Challenge255<_>>, AccumulatorStrategy<_>>( - verifier_params, + &verifier_params, &proof[..], false, ); @@ -120,7 +125,7 @@ mod test { _, Blake2bRead<_, _, Challenge255<_>>, AccumulatorStrategy<_>, - >(verifier_params, &proof[..], true); + >(&verifier_params, &proof[..], true); } #[test] @@ -132,6 +137,7 @@ mod test { const K: u32 = 4; + let engine = H2cEngine::new(); let params = ParamsKZG::::new(K); let proof = create_proof::< @@ -139,7 +145,7 @@ mod test { ProverSHPLONK<_>, _, Blake2bWrite<_, _, Challenge255<_>>, - >(¶ms); + >(&engine, ¶ms); let verifier_params = params.verifier_params(); @@ -149,7 +155,7 @@ mod test { _, Blake2bRead<_, _, Challenge255<_>>, AccumulatorStrategy<_>, - >(verifier_params, &proof[..], false); + >(&verifier_params, &proof[..], false); verify::< KZGCommitmentScheme, @@ -157,7 +163,7 @@ mod test { _, Blake2bRead<_, _, Challenge255<_>>, AccumulatorStrategy<_>, - >(verifier_params, &proof[..], true); + >(&verifier_params, &proof[..], true); } fn verify< @@ -173,7 +179,7 @@ mod test { proof: &'a [u8], should_fail: bool, ) { - let verifier = V::new(params); + let verifier = V::new(); let mut transcript = T::init(proof); @@ -225,6 +231,7 @@ mod test { E: EncodedChallenge, T: TranscriptWriterBuffer, Scheme::Curve, E>, >( + engine: &impl MsmAccel, params: &'params Scheme::ParamsProver, ) -> Vec where @@ -250,9 +257,9 @@ mod test { let mut transcript = T::init(vec![]); let blind = Blind::new(&mut OsRng); - let a = params.commit(&ax, blind).to_affine(); - let b = params.commit(&bx, blind).to_affine(); - let c = params.commit(&cx, blind).to_affine(); + let a = params.commit(engine, &ax, blind).to_affine(); + let b = params.commit(engine, &bx, blind).to_affine(); + let c = params.commit(engine, &cx, blind).to_affine(); transcript.write_point(a).unwrap(); transcript.write_point(b).unwrap(); diff --git a/halo2_proofs/src/poly/query.rs b/halo2_backend/src/poly/query.rs similarity index 98% rename from halo2_proofs/src/poly/query.rs rename to halo2_backend/src/poly/query.rs index bc7a20c240..30be4fbec7 100644 --- a/halo2_proofs/src/poly/query.rs +++ b/halo2_backend/src/poly/query.rs @@ -132,7 +132,7 @@ pub enum CommitmentReference<'r, C: CurveAffine, M: MSM> { impl<'r, C: CurveAffine, M: MSM> Copy for CommitmentReference<'r, C, M> {} impl<'r, C: CurveAffine, M: MSM> PartialEq for CommitmentReference<'r, C, M> { - #![allow(clippy::vtable_address_comparisons)] + #![allow(ambiguous_wide_pointer_comparisons)] fn eq(&self, other: &Self) -> bool { match (self, other) { (&CommitmentReference::Commitment(a), &CommitmentReference::Commitment(b)) => { diff --git a/halo2_proofs/src/poly/strategy.rs b/halo2_backend/src/poly/strategy.rs similarity index 100% rename from halo2_proofs/src/poly/strategy.rs rename to halo2_backend/src/poly/strategy.rs diff --git a/halo2_proofs/src/transcript.rs b/halo2_backend/src/transcript.rs similarity index 98% rename from halo2_proofs/src/transcript.rs rename to halo2_backend/src/transcript.rs index 6e4f812bdf..79678f5c97 100644 --- a/halo2_proofs/src/transcript.rs +++ b/halo2_backend/src/transcript.rs @@ -539,14 +539,14 @@ where } } -pub(crate) fn read_n_points, T: TranscriptRead>( +pub fn read_n_points, T: TranscriptRead>( transcript: &mut T, n: usize, ) -> io::Result> { (0..n).map(|_| transcript.read_point()).collect() } -pub(crate) fn read_n_scalars, T: TranscriptRead>( +pub fn read_n_scalars, T: TranscriptRead>( transcript: &mut T, n: usize, ) -> io::Result> { diff --git a/halo2_debug/Cargo.toml b/halo2_debug/Cargo.toml new file mode 100644 index 0000000000..33c08cdb6e --- /dev/null +++ b/halo2_debug/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "halo2_debug" +version = "0.4.0" +authors = [ + "Privacy Scaling Explorations team", +] +edition = "2021" +rust-version = "1.73.0" +description = """ +Halo2 Debug. This package contains utilities for debugging and testing within +the halo2 ecosystem. +""" +license = "MIT OR Apache-2.0" +repository = "https://github.com/privacy-scaling-explorations/halo2" +documentation = "https://privacy-scaling-explorations.github.io/halo2/" +categories = ["cryptography"] +keywords = ["halo", "proofs", "zkp", "zkSNARKs"] + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] + +[dependencies] +ff = "0.13" +halo2curves = { version = "0.7.0", default-features = false } +num-bigint = "0.4.5" +halo2_middleware = { path = "../halo2_middleware" } +tiny-keccak = { version = "2.0.2", features=["keccak"] } +hex = "0.4.3" +rand_core = "0.6.4" +rand_chacha = "0.3" +rayon = "1.8" + +[features] +vector-tests = [] diff --git a/halo2_debug/src/display.rs b/halo2_debug/src/display.rs new file mode 100644 index 0000000000..c2e2faed4b --- /dev/null +++ b/halo2_debug/src/display.rs @@ -0,0 +1,351 @@ +use ff::PrimeField; +use halo2_middleware::circuit::{ColumnMid, VarMid}; +use halo2_middleware::expression::{Expression, Variable}; +use halo2_middleware::{lookup, shuffle}; +use num_bigint::BigUint; +use std::collections::HashMap; +use std::fmt; + +/// Wrapper type over `PrimeField` that implements Display with nice output. +/// - If the value is a power of two, format it as `2^k` +/// - If the value is smaller than 2^16, format it in decimal +/// - If the value is bigger than congruent -2^16, format it in decimal as the negative congruent +/// (between -2^16 and 0). +/// - Else format it in hex without leading zeros. +pub struct FDisp<'a, F: PrimeField>(pub &'a F); + +impl fmt::Display for FDisp<'_, F> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let v = (*self.0).to_repr(); + let v = v.as_ref(); + let v = BigUint::from_bytes_le(v); + let v_bits = v.bits(); + if v_bits >= 8 && v.count_ones() == 1 { + write!(f, "2^{}", v.trailing_zeros().unwrap_or_default()) + } else if v_bits < 16 { + write!(f, "{}", v) + } else { + let v_neg = (F::ZERO - self.0).to_repr(); + let v_neg = v_neg.as_ref(); + let v_neg = BigUint::from_bytes_le(v_neg); + let v_neg_bits = v_neg.bits(); + if v_neg_bits < 16 { + write!(f, "-{}", v_neg) + } else { + write!(f, "0x{:x}", v) + } + } + } +} + +/// Wrapper type over `Expression` that implements Display with nice output. +/// The formatting of the `Expression::Variable` case is parametrized with the second field, which +/// take as auxiliary value the third field. +/// Use the constructor `expr_disp` to format variables using their `Display` implementation. +/// Use the constructor `expr_disp_names` for an `Expression` with `V: VarMid` to format column +/// queries according to their string names. +pub struct ExprDisp<'a, F: PrimeField, V: Variable, A>( + /// Expression to display + pub &'a Expression, + /// `V: Variable` formatter method that uses auxiliary value + pub fn(&V, &mut fmt::Formatter<'_>, a: &A) -> fmt::Result, + /// Auxiliary value to be passed to the `V: Variable` formatter + pub &'a A, +); + +fn var_fmt_default(v: &V, f: &mut fmt::Formatter<'_>, _: &()) -> fmt::Result { + write!(f, "{}", v) +} + +fn var_fmt_names( + v: &VarMid, + f: &mut fmt::Formatter<'_>, + names: &HashMap, +) -> fmt::Result { + if let VarMid::Query(q) = v { + if let Some(name) = names.get(&ColumnMid::new(q.column_type, q.column_index)) { + return write!(f, "{}", name); + } + } + write!(f, "{}", v) +} + +/// ExprDisp constructor that formats viariables using their `Display` implementation. +pub fn expr_disp(e: &Expression) -> ExprDisp { + ExprDisp(e, var_fmt_default, &()) +} + +/// ExprDisp constructor for an `Expression` with `V: VarMid` that formats column queries according +/// to their string names. +pub fn expr_disp_names<'a, F: PrimeField>( + e: &'a Expression, + names: &'a HashMap, +) -> ExprDisp<'a, F, VarMid, HashMap> { + ExprDisp(e, var_fmt_names, names) +} + +impl fmt::Display for ExprDisp<'_, F, V, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let is_sum = |e: &Expression| -> bool { matches!(e, Expression::Sum(_, _)) }; + let fmt_expr = + |e: &Expression, f: &mut fmt::Formatter<'_>, parens: bool| -> fmt::Result { + if parens { + write!(f, "(")?; + } + write!(f, "{}", ExprDisp(e, self.1, self.2))?; + if parens { + write!(f, ")")?; + } + Ok(()) + }; + + match self.0 { + Expression::Constant(c) => write!(f, "{}", FDisp(c)), + Expression::Var(v) => self.1(v, f, self.2), + Expression::Negated(a) => { + write!(f, "-")?; + fmt_expr(a, f, is_sum(a)) + } + Expression::Sum(a, b) => { + fmt_expr(a, f, false)?; + if let Expression::Negated(neg) = &**b { + write!(f, " - ")?; + fmt_expr(neg, f, is_sum(neg)) + } else { + write!(f, " + ")?; + fmt_expr(b, f, false) + } + } + Expression::Product(a, b) => { + fmt_expr(a, f, is_sum(a))?; + write!(f, " * ")?; + fmt_expr(b, f, is_sum(b)) + } + } + } +} + +/// Wrapper type over `lookup::Argument` that implements Display with nice output. +/// The formatting of the `Expression::Variable` case is parametrized with the second field, which +/// take as auxiliary value the third field. +/// Use the constructor `lookup_arg_disp` to format variables using their `Display` implementation. +/// Use the constructor `lookup_arg_disp_names` for a lookup of `Expression` with `V: VarMid` that +/// formats column queries according to their string names. +pub struct LookupArgDisp<'a, F: PrimeField, V: Variable, A>( + /// Lookup argument to display + pub &'a lookup::Argument, + /// `V: Variable` formatter method that uses auxiliary value + pub fn(&V, &mut fmt::Formatter<'_>, a: &A) -> fmt::Result, + /// Auxiliary value to be passed to the `V: Variable` formatter + pub &'a A, +); + +/// LookupArgDisp constructor that formats viariables using their `Display` implementation. +pub fn lookup_arg_disp( + a: &lookup::Argument, +) -> LookupArgDisp { + LookupArgDisp(a, var_fmt_default, &()) +} + +/// LookupArgDisp constructor for a lookup of `Expression` with `V: VarMid` that formats column +/// queries according to their string names. +pub fn lookup_arg_disp_names<'a, F: PrimeField>( + a: &'a lookup::Argument, + names: &'a HashMap, +) -> LookupArgDisp<'a, F, VarMid, HashMap> { + LookupArgDisp(a, var_fmt_names, names) +} + +impl fmt::Display for LookupArgDisp<'_, F, V, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "[")?; + for (i, expr) in self.0.input_expressions.iter().enumerate() { + if i != 0 { + write!(f, ", ")?; + } + write!(f, "{}", ExprDisp(expr, self.1, self.2))?; + } + write!(f, "] in [")?; + for (i, expr) in self.0.table_expressions.iter().enumerate() { + if i != 0 { + write!(f, ", ")?; + } + write!(f, "{}", ExprDisp(expr, self.1, self.2))?; + } + write!(f, "]")?; + Ok(()) + } +} + +/// Wrapper type over `shuffle::Argument` that implements Display with nice output. +/// The formatting of the `Expression::Variable` case is parametrized with the second field, which +/// take as auxiliary value the third field. +/// Use the constructor `shuffle_arg_disp` to format variables using their `Display` +/// implementation. +/// Use the constructor `shuffle_arg_disp_names` for a shuffle of `Expression` with `V: VarMid` +/// that formats column queries according to their string names. +pub struct ShuffleArgDisp<'a, F: PrimeField, V: Variable, A>( + /// Shuffle argument to display + pub &'a shuffle::Argument, + /// `V: Variable` formatter method that uses auxiliary value + pub fn(&V, &mut fmt::Formatter<'_>, a: &A) -> fmt::Result, + /// Auxiliary value to be passed to the `V: Variable` formatter + pub &'a A, +); + +/// ShuffleArgDisp constructor that formats viariables using their `Display` implementation. +pub fn shuffle_arg_disp( + a: &shuffle::Argument, +) -> ShuffleArgDisp { + ShuffleArgDisp(a, var_fmt_default, &()) +} + +/// ShuffleArgDisp constructor for a shuffle of `Expression` with `V: VarMid` that formats column +/// queries according to their string names. +pub fn shuffle_arg_disp_names<'a, F: PrimeField>( + a: &'a shuffle::Argument, + names: &'a HashMap, +) -> ShuffleArgDisp<'a, F, VarMid, HashMap> { + ShuffleArgDisp(a, var_fmt_names, names) +} + +impl fmt::Display for ShuffleArgDisp<'_, F, V, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "[")?; + for (i, expr) in self.0.input_expressions.iter().enumerate() { + if i != 0 { + write!(f, ", ")?; + } + write!(f, "{}", ExprDisp(expr, self.1, self.2))?; + } + write!(f, "] shuff [")?; + for (i, expr) in self.0.shuffle_expressions.iter().enumerate() { + if i != 0 { + write!(f, ", ")?; + } + write!(f, "{}", ExprDisp(expr, self.1, self.2))?; + } + write!(f, "]")?; + Ok(()) + } +} + +#[cfg(test)] +mod test { + use super::*; + use ff::Field; + use halo2_middleware::circuit::{Any, QueryMid, VarMid}; + use halo2_middleware::poly::Rotation; + use halo2curves::bn256::Fr; + + #[test] + fn test_lookup_shuffle_arg_disp() { + type E = Expression; + let a0 = VarMid::Query(QueryMid::new(Any::Advice, 0, Rotation(0))); + let a1 = VarMid::Query(QueryMid::new(Any::Advice, 1, Rotation(0))); + let f0 = VarMid::Query(QueryMid::new(Any::Fixed, 0, Rotation(0))); + let a0: E = Expression::Var(a0); + let a1: E = Expression::Var(a1); + let f0: E = Expression::Var(f0); + + let names = [ + (ColumnMid::new(Any::Advice, 0), "a".to_string()), + (ColumnMid::new(Any::Advice, 1), "b".to_string()), + (ColumnMid::new(Any::Fixed, 0), "s".to_string()), + ] + .into_iter() + .collect(); + + let arg = lookup::Argument { + name: "lookup".to_string(), + input_expressions: vec![f0.clone() * a0.clone(), f0.clone() * a1.clone()], + table_expressions: vec![f0.clone(), f0.clone() * (a0.clone() + a1.clone())], + }; + assert_eq!( + "[f0 * a0, f0 * a1] in [f0, f0 * (a0 + a1)]", + format!("{}", lookup_arg_disp(&arg)) + ); + assert_eq!( + "[s * a, s * b] in [s, s * (a + b)]", + format!("{}", lookup_arg_disp_names(&arg, &names)) + ); + + let arg = shuffle::Argument { + name: "shuffle".to_string(), + input_expressions: vec![f0.clone() * a0.clone(), f0.clone() * a1.clone()], + shuffle_expressions: vec![f0.clone(), f0.clone() * (a0.clone() + a1.clone())], + }; + assert_eq!( + "[f0 * a0, f0 * a1] shuff [f0, f0 * (a0 + a1)]", + format!("{}", shuffle_arg_disp(&arg)) + ); + assert_eq!( + "[s * a, s * b] shuff [s, s * (a + b)]", + format!("{}", shuffle_arg_disp_names(&arg, &names)) + ); + } + + #[test] + fn test_expr_disp() { + type E = Expression; + let a0 = VarMid::Query(QueryMid::new(Any::Advice, 0, Rotation(0))); + let a1 = VarMid::Query(QueryMid::new(Any::Advice, 1, Rotation(0))); + let a0: E = Expression::Var(a0); + let a1: E = Expression::Var(a1); + + let e = a0.clone() + a1.clone(); + assert_eq!("a0 + a1", format!("{}", expr_disp(&e))); + let e = a0.clone() + a1.clone() + a0.clone(); + assert_eq!("a0 + a1 + a0", format!("{}", expr_disp(&e))); + + let e = a0.clone() * a1.clone(); + assert_eq!("a0 * a1", format!("{}", expr_disp(&e))); + let e = a0.clone() * a1.clone() * a0.clone(); + assert_eq!("a0 * a1 * a0", format!("{}", expr_disp(&e))); + + let e = a0.clone() - a1.clone(); + assert_eq!("a0 - a1", format!("{}", expr_disp(&e))); + let e = (a0.clone() - a1.clone()) - a0.clone(); + assert_eq!("a0 - a1 - a0", format!("{}", expr_disp(&e))); + let e = a0.clone() - (a1.clone() - a0.clone()); + assert_eq!("a0 - (a1 - a0)", format!("{}", expr_disp(&e))); + + let e = a0.clone() * a1.clone() + a0.clone(); + assert_eq!("a0 * a1 + a0", format!("{}", expr_disp(&e))); + let e = a0.clone() * (a1.clone() + a0.clone()); + assert_eq!("a0 * (a1 + a0)", format!("{}", expr_disp(&e))); + + let e = a0.clone() + a1.clone(); + let names = [ + (ColumnMid::new(Any::Advice, 0), "a".to_string()), + (ColumnMid::new(Any::Advice, 1), "b".to_string()), + ] + .into_iter() + .collect(); + assert_eq!("a + b", format!("{}", expr_disp_names(&e, &names))); + } + + #[test] + fn test_f_disp() { + let v = Fr::ZERO; + assert_eq!("0", format!("{}", FDisp(&v))); + + let v = Fr::ONE; + assert_eq!("1", format!("{}", FDisp(&v))); + + let v = Fr::from(12345u64); + assert_eq!("12345", format!("{}", FDisp(&v))); + + let v = Fr::from(0x10000); + assert_eq!("2^16", format!("{}", FDisp(&v))); + + let v = Fr::from(0x12345); + assert_eq!("0x12345", format!("{}", FDisp(&v))); + + let v = -Fr::ONE; + assert_eq!("-1", format!("{}", FDisp(&v))); + + let v = -Fr::from(12345u64); + assert_eq!("-12345", format!("{}", FDisp(&v))); + } +} diff --git a/halo2_debug/src/lib.rs b/halo2_debug/src/lib.rs new file mode 100644 index 0000000000..911e90e6df --- /dev/null +++ b/halo2_debug/src/lib.rs @@ -0,0 +1,38 @@ +use rand_chacha::ChaCha20Rng; +use rand_core::SeedableRng; +use tiny_keccak::Hasher; + +pub fn test_rng() -> ChaCha20Rng { + ChaCha20Rng::seed_from_u64(0xdeadbeef) +} + +/// Gets the hex representation of the keccak hash of the input data +pub fn keccak_hex>(data: D) -> String { + let mut hash = [0u8; 32]; + let mut hasher = tiny_keccak::Keccak::v256(); + hasher.update(data.as_ref()); + hasher.finalize(&mut hash); + hex::encode(hash) +} + +/// When the feature `vector-tests` is enabled, executes the test in a single thread and checks the result against the expected value. +/// When the feature `vector-tests` is disabled, just executes the test. +pub fn test_result Vec + Send>(test: F, _expected: &str) -> Vec { + #[cfg(feature = "vector-tests")] + let result = rayon::ThreadPoolBuilder::new() + .num_threads(1) + .build() + .unwrap() + .install(|| { + let result = test(); + assert_eq!(_expected, keccak_hex(result.clone()),); + result + }); + + #[cfg(not(feature = "vector-tests"))] + let result = test(); + + result +} + +pub mod display; diff --git a/halo2_frontend/Cargo.toml b/halo2_frontend/Cargo.toml new file mode 100644 index 0000000000..e6f9a966f9 --- /dev/null +++ b/halo2_frontend/Cargo.toml @@ -0,0 +1,68 @@ +[package] +name = "halo2_frontend" +version = "0.4.0" +authors = [ + "Sean Bowe ", + "Ying Tong Lai ", + "Daira Hopwood ", + "Jack Grigg ", + "Privacy Scaling Explorations team", +] +edition = "2021" +rust-version = "1.73.0" +description = """ +Halo2 frontend implementation. This package implements an API to write circuits, handles witness generation and contains the MockProver. +""" +license = "MIT OR Apache-2.0" +repository = "https://github.com/privacy-scaling-explorations/halo2" +documentation = "https://privacy-scaling-explorations.github.io/halo2/" +readme = "README.md" +categories = ["cryptography"] +keywords = ["halo", "proofs", "zkp", "zkSNARKs"] + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] + +[dependencies] +backtrace = { version = "0.3", optional = true } +ff = "0.13" +group = "0.13" +halo2curves = { version = "0.7.0", default-features = false } +tracing = "0.1" +blake2b_simd = "1" +serde = { version = "1", optional = true, features = ["derive"] } +serde_derive = { version = "1", optional = true} +halo2_middleware = { path = "../halo2_middleware" } + +# Developer tooling dependencies +plotters = { version = "0.3.0", default-features = false, optional = true } +tabbycat = { version = "0.1", features = ["attributes"], optional = true } + +[dev-dependencies] +proptest = "1" +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +serde_json = "1" + +[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] +getrandom = { version = "0.2", features = ["js"] } + +[features] +default = ["bits", "lookup-any-sanity-checks"] +dev-graph = ["plotters", "tabbycat"] +test-dev-graph = [ + "dev-graph", + "plotters/bitmap_backend", + "plotters/bitmap_encoder", + "plotters/ttf", +] +bits = ["halo2curves/bits"] +gadget-traces = ["backtrace"] +thread-safe-region = [] +circuit-params = [] +cost-estimator = ["serde", "serde_derive"] +derive_serde = ["halo2curves/derive_serde"] +lookup-any-sanity-checks = [] + +[lib] +bench = false diff --git a/halo2_proofs/src/circuit.rs b/halo2_frontend/src/circuit.rs similarity index 59% rename from halo2_proofs/src/circuit.rs rename to halo2_frontend/src/circuit.rs index 56a6be0e5c..8bdefe3a20 100644 --- a/halo2_proofs/src/circuit.rs +++ b/halo2_frontend/src/circuit.rs @@ -1,24 +1,408 @@ //! Traits and structs for implementing circuit components. -use std::{fmt, marker::PhantomData}; - -use ff::Field; - +use crate::plonk; use crate::plonk::{ - Advice, Any, Assigned, Challenge, Column, Error, Fixed, Instance, Selector, TableColumn, + permutation, + sealed::{self, SealedPhase}, + Advice, Assignment, Circuit, ConstraintSystem, FirstPhase, Fixed, FloorPlanner, Instance, + SecondPhase, ThirdPhase, }; +use halo2_middleware::circuit::{Any, CompiledCircuit, Preprocessing}; +use halo2_middleware::ff::{BatchInvert, Field}; +use std::collections::BTreeSet; +use std::collections::HashMap; +use std::fmt::Debug; +use std::ops::RangeTo; + +pub mod floor_planner; +mod table_layouter; + +use std::{fmt, marker::PhantomData}; + +use crate::plonk::Assigned; +use crate::plonk::{Challenge, Column, Error, Selector, TableColumn}; mod value; pub use value::Value; -pub mod floor_planner; pub use floor_planner::single_pass::SimpleFloorPlanner; pub mod layouter; -mod table_layouter; pub use table_layouter::{SimpleTableLayouter, TableLayouter}; +/// Compile a circuit. Runs configure and synthesize on the circuit in order to materialize the +/// circuit into its columns and the column configuration; as well as doing the fixed column and +/// copy constraints assignments. The output of this function can then be used for the key +/// generation, and proof generation. +/// If `compress_selectors` is true, multiple selector columns may be multiplexed. +#[allow(clippy::type_complexity)] +pub fn compile_circuit>( + k: u32, + circuit: &ConcreteCircuit, + compress_selectors: bool, +) -> Result< + ( + CompiledCircuit, + ConcreteCircuit::Config, + ConstraintSystem, + ), + Error, +> { + let n = 2usize.pow(k); + + let mut cs = ConstraintSystem::default(); + #[cfg(feature = "circuit-params")] + let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); + #[cfg(not(feature = "circuit-params"))] + let config = ConcreteCircuit::configure(&mut cs); + let cs = cs; + + if n < cs.minimum_rows() { + return Err(Error::not_enough_rows_available(k)); + } + + let mut assembly = plonk::keygen::Assembly { + k, + fixed: vec![vec![F::ZERO.into(); n]; cs.num_fixed_columns], + permutation: permutation::Assembly::new(n, &cs.permutation), + selectors: vec![vec![false; n]; cs.num_selectors], + usable_rows: 0..n - (cs.blinding_factors() + 1), + _marker: std::marker::PhantomData, + }; + + // Synthesize the circuit to obtain URS + ConcreteCircuit::FloorPlanner::synthesize( + &mut assembly, + circuit, + config.clone(), + cs.constants.clone(), + )?; + + let mut fixed = batch_invert_assigned(assembly.fixed); + let (cs, selector_polys) = if compress_selectors { + cs.compress_selectors(assembly.selectors) + } else { + // After this, the ConstraintSystem should not have any selectors: `verify` does not need them, and `keygen_pk` regenerates `cs` from scratch anyways. + let selectors = std::mem::take(&mut assembly.selectors); + cs.directly_convert_selectors_to_fixed(selectors) + }; + + fixed.extend(selector_polys); + + // sort the "copies" for deterministic ordering + #[cfg(feature = "thread-safe-region")] + assembly.permutation.copies.sort(); + + let preprocessing = Preprocessing { + permutation: halo2_middleware::permutation::AssemblyMid { + copies: assembly.permutation.copies, + }, + fixed, + }; + + Ok(( + CompiledCircuit { + cs: cs.clone().into(), + preprocessing, + }, + config, + cs, + )) +} + +struct WitnessCollection<'a, F: Field> { + k: u32, + current_phase: sealed::Phase, + advice_column_phase: &'a Vec, + advice: Vec>>, + challenges: &'a HashMap, + instances: &'a [Vec], + usable_rows: RangeTo, +} + +impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { + fn enter_region(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about regions in this context. + } + + fn exit_region(&mut self) { + // Do nothing; we don't care about regions in this context. + } + + fn enable_selector(&mut self, _: A, _: &Selector, _: usize) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into, + { + // We only care about advice columns here + + Ok(()) + } + + fn annotate_column(&mut self, _annotation: A, _column: Column) + where + A: FnOnce() -> AR, + AR: Into, + { + // Do nothing + } + + fn query_instance(&self, column: Column, row: usize) -> Result, Error> { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.instances + .get(column.index()) + .and_then(|column| column.get(row)) + .map(|v| Value::known(*v)) + .ok_or(Error::BoundsFailure) + } + + fn assign_advice( + &mut self, + _: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // Ignore assignment of advice column in different phase than current one. + let phase = self.advice_column_phase[column.index]; + if self.current_phase != phase { + return Ok(()); + } + + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + *self + .advice + .get_mut(column.index()) + .and_then(|v| v.get_mut(row)) + .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; + + Ok(()) + } + + fn assign_fixed( + &mut self, + _: A, + _: Column, + _: usize, + _: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // We only care about advice columns here + + Ok(()) + } + + fn copy(&mut self, _: Column, _: usize, _: Column, _: usize) -> Result<(), Error> { + // We only care about advice columns here + + Ok(()) + } + + fn fill_from_row( + &mut self, + _: Column, + _: usize, + _: Value>, + ) -> Result<(), Error> { + Ok(()) + } + + fn get_challenge(&self, challenge: Challenge) -> Value { + self.challenges + .get(&challenge.index()) + .cloned() + .map(Value::known) + .unwrap_or_else(Value::unknown) + } + + fn push_namespace(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about namespaces in this context. + } + + fn pop_namespace(&mut self, _: Option) { + // Do nothing; we don't care about namespaces in this context. + } +} + +/// Witness calculator. Frontend function +#[derive(Debug)] +pub struct WitnessCalculator<'a, F: Field, ConcreteCircuit: Circuit> { + k: u32, + n: usize, + unusable_rows_start: usize, + circuit: &'a ConcreteCircuit, + config: &'a ConcreteCircuit::Config, + cs: &'a ConstraintSystem, + instances: &'a [Vec], + next_phase: u8, +} + +impl<'a, F: Field, ConcreteCircuit: Circuit> WitnessCalculator<'a, F, ConcreteCircuit> { + /// Create a new WitnessCalculator + pub fn new( + k: u32, + circuit: &'a ConcreteCircuit, + config: &'a ConcreteCircuit::Config, + cs: &'a ConstraintSystem, + instances: &'a [Vec], + ) -> Self { + let n = 2usize.pow(k); + let unusable_rows_start = n - (cs.blinding_factors() + 1); + Self { + k, + n, + unusable_rows_start, + circuit, + config, + cs, + instances, + next_phase: 0, + } + } + + /// Calculate witness at phase + pub fn calc( + &mut self, + phase: u8, + challenges: &HashMap, + ) -> Result>>, Error> { + if phase != self.next_phase { + return Err(Error::Other(format!( + "Expected phase {}, got {}", + self.next_phase, phase + ))); + } + let current_phase = match phase { + 0 => FirstPhase.to_sealed(), + 1 => SecondPhase.to_sealed(), + 2 => ThirdPhase.to_sealed(), + _ => unreachable!("only phase [0,2] supported"), + }; + + let mut witness = WitnessCollection { + k: self.k, + current_phase, + advice_column_phase: &self.cs.advice_column_phase, + advice: vec![vec![Assigned::Zero; self.n]; self.cs.num_advice_columns], + instances: self.instances, + challenges, + // The prover will not be allowed to assign values to advice + // cells that exist within inactive rows, which include some + // number of blinding factors and an extra row for use in the + // permutation argument. + usable_rows: ..self.unusable_rows_start, + }; + + // Synthesize the circuit to obtain the witness and other information. + ConcreteCircuit::FloorPlanner::synthesize( + &mut witness, + self.circuit, + self.config.clone(), + self.cs.constants.clone(), + ) + .expect("todo"); + + let column_indices = self + .cs + .advice_column_phase + .iter() + .enumerate() + .filter_map(|(column_index, phase)| { + if current_phase == *phase { + Some(column_index) + } else { + None + } + }) + .collect::>(); + + self.next_phase += 1; + let advice_values = batch_invert_assigned(witness.advice); + Ok(advice_values + .into_iter() + .enumerate() + .map(|(column_index, advice)| { + if column_indices.contains(&column_index) { + Some(advice) + } else { + None + } + }) + .collect()) + } +} + +// Turn vectors of `Assigned` into vectors of `F` by evaluation the divisions in `Assigned` +// using batched inversions. +fn batch_invert_assigned(assigned: Vec>>) -> Vec> { + let mut assigned_denominators: Vec<_> = assigned + .iter() + .map(|f| { + f.iter() + .map(|value| value.denominator()) + .collect::>() + }) + .collect(); + + assigned_denominators + .iter_mut() + .flat_map(|f| { + f.iter_mut() + // If the denominator is trivial, we can skip it, reducing the + // size of the batch inversion. + .filter_map(|d| d.as_mut()) + }) + .batch_invert(); + + assigned + .iter() + .zip(assigned_denominators) + .map(|(poly, inv_denoms)| { + poly_invert(poly, inv_denoms.into_iter().map(|d| d.unwrap_or(F::ONE))) + }) + .collect() +} + +// Turn a slice of `Assigned` into a vector of F by multiplying each numerator with the elements +// from `inv_denoms`, assuming that `inv_denoms` are the inverted denominators of the +// `Assigned`. +fn poly_invert( + poly: &[Assigned], + inv_denoms: impl ExactSizeIterator, +) -> Vec { + assert_eq!(inv_denoms.len(), poly.len()); + poly.iter() + .zip(inv_denoms) + .map(|(a, inv_den)| a.numerator() * inv_den) + .collect() +} + /// A chip implements a set of instructions that can be used by gadgets. /// /// The chip stores state that is required at circuit synthesis time in @@ -51,7 +435,7 @@ pub trait Chip: Sized { } /// Index of a region in a layouter -#[derive(Clone, Copy, Debug)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct RegionIndex(usize); impl From for RegionIndex { @@ -87,7 +471,7 @@ impl std::ops::Deref for RegionStart { } /// A pointer to a cell within a circuit. -#[derive(Clone, Copy, Debug)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct Cell { /// Identifies the region in which this cell resides. pub region_index: RegionIndex, @@ -105,6 +489,21 @@ pub struct AssignedCell { _marker: PhantomData, } +impl PartialEq for AssignedCell { + fn eq(&self, other: &Self) -> bool { + self.cell == other.cell + } +} + +impl Eq for AssignedCell {} + +use std::hash::{Hash, Hasher}; +impl Hash for AssignedCell { + fn hash(&self, state: &mut H) { + self.cell.hash(state) + } +} + impl AssignedCell { /// Returns the value of the [`AssignedCell`]. pub fn value(&self) -> Value<&V> { @@ -192,7 +591,7 @@ impl<'r, F: Field> From<&'r mut dyn layouter::RegionLayouter> for Region<'r, impl<'r, F: Field> Region<'r, F> { /// Enables a selector at the given offset. - pub(crate) fn enable_selector( + pub fn enable_selector( &mut self, annotation: A, selector: &Selector, diff --git a/halo2_proofs/src/circuit/floor_planner.rs b/halo2_frontend/src/circuit/floor_planner.rs similarity index 68% rename from halo2_proofs/src/circuit/floor_planner.rs rename to halo2_frontend/src/circuit/floor_planner.rs index 1b629034e6..e07adad9a4 100644 --- a/halo2_proofs/src/circuit/floor_planner.rs +++ b/halo2_frontend/src/circuit/floor_planner.rs @@ -1,6 +1,5 @@ //! Implementations of common circuit floor planners. +pub mod single_pass; +pub mod v1; -pub(super) mod single_pass; - -mod v1; pub use v1::{V1Pass, V1}; diff --git a/halo2_proofs/src/circuit/floor_planner/single_pass.rs b/halo2_frontend/src/circuit/floor_planner/single_pass.rs similarity index 96% rename from halo2_proofs/src/circuit/floor_planner/single_pass.rs rename to halo2_frontend/src/circuit/floor_planner/single_pass.rs index 33c09e4c57..9a37c56e1c 100644 --- a/halo2_proofs/src/circuit/floor_planner/single_pass.rs +++ b/halo2_frontend/src/circuit/floor_planner/single_pass.rs @@ -3,17 +3,19 @@ use std::collections::HashMap; use std::fmt; use std::marker::PhantomData; -use ff::Field; +use halo2_middleware::circuit::Any; +use halo2_middleware::ff::Field; +use crate::plonk::Assigned; use crate::{ circuit::{ layouter::{RegionColumn, RegionLayouter, RegionShape, SyncDeps, TableLayouter}, table_layouter::{compute_table_lengths, SimpleTableLayouter}, - Cell, Layouter, Region, RegionIndex, RegionStart, Table, Value, + Cell, Column, Layouter, Region, RegionIndex, RegionStart, Table, Value, }, plonk::{ - Advice, Any, Assigned, Assignment, Challenge, Circuit, Column, Error, Fixed, FloorPlanner, - Instance, Selector, TableColumn, + Advice, Assignment, Challenge, Circuit, Error, Fixed, FloorPlanner, Instance, Selector, + TableColumn, }, }; @@ -381,10 +383,8 @@ mod tests { use halo2curves::pasta::vesta; use super::SimpleFloorPlanner; - use crate::{ - dev::MockProver, - plonk::{Advice, Circuit, Column, Error}, - }; + use crate::dev::MockProver; + use crate::plonk::{Advice, Circuit, Column, ConstraintSystem, Error}; #[test] fn not_enough_columns_for_constants() { @@ -400,7 +400,7 @@ mod tests { MyCircuit {} } - fn configure(meta: &mut crate::plonk::ConstraintSystem) -> Self::Config { + fn configure(meta: &mut ConstraintSystem) -> Self::Config { meta.advice_column() } @@ -408,7 +408,7 @@ mod tests { &self, config: Self::Config, mut layouter: impl crate::circuit::Layouter, - ) -> Result<(), crate::plonk::Error> { + ) -> Result<(), Error> { layouter.assign_region( || "assign constant", |mut region| { diff --git a/halo2_proofs/src/circuit/floor_planner/v1.rs b/halo2_frontend/src/circuit/floor_planner/v1.rs similarity index 96% rename from halo2_proofs/src/circuit/floor_planner/v1.rs rename to halo2_frontend/src/circuit/floor_planner/v1.rs index fd26e681df..7b62e78ca0 100644 --- a/halo2_proofs/src/circuit/floor_planner/v1.rs +++ b/halo2_frontend/src/circuit/floor_planner/v1.rs @@ -1,20 +1,22 @@ use std::fmt; -use ff::Field; +use halo2_middleware::circuit::Any; +use halo2_middleware::ff::Field; +use crate::plonk::Assigned; use crate::{ circuit::{ layouter::{RegionColumn, RegionLayouter, RegionShape, SyncDeps, TableLayouter}, table_layouter::{compute_table_lengths, SimpleTableLayouter}, - Cell, Layouter, Region, RegionIndex, RegionStart, Table, Value, + Cell, Column, Layouter, Region, RegionIndex, RegionStart, Table, Value, }, plonk::{ - Advice, Any, Assigned, Assignment, Challenge, Circuit, Column, Error, Fixed, FloorPlanner, - Instance, Selector, TableColumn, + Advice, Assignment, Challenge, Circuit, Error, Fixed, FloorPlanner, Instance, Selector, + TableColumn, }, }; -mod strategy; +pub mod strategy; /// The version 1 [`FloorPlanner`] provided by `halo2`. /// @@ -496,10 +498,8 @@ impl<'r, 'a, F: Field, CS: Assignment + SyncDeps> RegionLayouter for V1Reg mod tests { use halo2curves::pasta::vesta; - use crate::{ - dev::MockProver, - plonk::{Advice, Circuit, Column, Error}, - }; + use crate::dev::MockProver; + use crate::plonk::{Advice, Circuit, Column, ConstraintSystem, Error}; #[test] fn not_enough_columns_for_constants() { @@ -515,7 +515,7 @@ mod tests { MyCircuit {} } - fn configure(meta: &mut crate::plonk::ConstraintSystem) -> Self::Config { + fn configure(meta: &mut ConstraintSystem) -> Self::Config { meta.advice_column() } @@ -523,7 +523,7 @@ mod tests { &self, config: Self::Config, mut layouter: impl crate::circuit::Layouter, - ) -> Result<(), crate::plonk::Error> { + ) -> Result<(), Error> { layouter.assign_region( || "assign constant", |mut region| { diff --git a/halo2_proofs/src/circuit/floor_planner/v1/strategy.rs b/halo2_frontend/src/circuit/floor_planner/v1/strategy.rs similarity index 85% rename from halo2_proofs/src/circuit/floor_planner/v1/strategy.rs rename to halo2_frontend/src/circuit/floor_planner/v1/strategy.rs index 71745de245..8e32d2c33b 100644 --- a/halo2_proofs/src/circuit/floor_planner/v1/strategy.rs +++ b/halo2_frontend/src/circuit/floor_planner/v1/strategy.rs @@ -5,7 +5,8 @@ use std::{ }; use super::{RegionColumn, RegionShape}; -use crate::{circuit::RegionStart, plonk::Any}; +use crate::circuit::RegionStart; +use halo2_middleware::circuit::Any; /// A region allocated within a column. #[derive(Clone, Default, Debug, PartialEq, Eq)] @@ -46,7 +47,7 @@ impl EmptySpace { /// /// This is a set of [a_start, a_end) pairs representing disjoint allocated intervals. #[derive(Clone, Default, Debug)] -pub struct Allocations(BTreeSet); +pub(crate) struct Allocations(BTreeSet); impl Allocations { /// Returns the row that forms the unbounded unallocated interval [row, None). @@ -99,7 +100,7 @@ impl Allocations { } /// Allocated rows within a circuit. -pub type CircuitAllocations = HashMap; +pub(crate) type CircuitAllocations = HashMap; /// - `start` is the current start row of the region (not of this column). /// - `slack` is the maximum number of rows the start could be moved down, taking into @@ -195,7 +196,7 @@ fn slot_in( } /// Sorts the regions by advice area and then lays them out with the [`slot_in`] strategy. -pub fn slot_in_biggest_advice_first( +pub(crate) fn slot_in_biggest_advice_first( region_shapes: Vec, ) -> (Vec, CircuitAllocations) { let mut sorted_regions: Vec<_> = region_shapes.into_iter().collect(); @@ -205,7 +206,7 @@ pub fn slot_in_biggest_advice_first( .columns() .iter() .filter(|c| match c { - RegionColumn::Column(c) => matches!(c.column_type(), Any::Advice(_)), + RegionColumn::Column(c) => matches!(c.column_type(), Any::Advice), _ => false, }) .count(); @@ -213,22 +214,8 @@ pub fn slot_in_biggest_advice_first( advice_cols * shape.row_count() }; - // This used to incorrectly use `sort_unstable_by_key` with non-unique keys, which gave - // output that differed between 32-bit and 64-bit platforms, and potentially between Rust - // versions. - // We now use `sort_by_cached_key` with non-unique keys, and rely on `region_shapes` - // being sorted by region index (which we also rely on below to return `RegionStart`s - // in the correct order). - #[cfg(not(feature = "floor-planner-v1-legacy-pdqsort"))] sorted_regions.sort_by_cached_key(sort_key); - // To preserve compatibility, when the "floor-planner-v1-legacy-pdqsort" feature is enabled, - // we use a copy of the pdqsort implementation from the Rust 1.56.1 standard library, fixed - // to its behaviour on 64-bit platforms. - // https://github.com/rust-lang/rust/blob/1.56.1/library/core/src/slice/mod.rs#L2365-L2402 - #[cfg(feature = "floor-planner-v1-legacy-pdqsort")] - halo2_legacy_pdqsort::sort::quicksort(&mut sorted_regions, |a, b| sort_key(a).lt(&sort_key(b))); - sorted_regions.reverse(); // Lay out the sorted regions. @@ -243,12 +230,15 @@ pub fn slot_in_biggest_advice_first( #[test] fn test_slot_in() { + use crate::circuit::floor_planner::v1::strategy::slot_in; + use crate::circuit::layouter::RegionShape; use crate::plonk::Column; + use halo2_middleware::circuit::Any; let regions = vec![ RegionShape { region_index: 0.into(), - columns: vec![Column::new(0, Any::advice()), Column::new(1, Any::advice())] + columns: vec![Column::new(0, Any::Advice), Column::new(1, Any::Advice)] .into_iter() .map(|a| a.into()) .collect(), @@ -256,7 +246,7 @@ fn test_slot_in() { }, RegionShape { region_index: 1.into(), - columns: vec![Column::new(2, Any::advice())] + columns: vec![Column::new(2, Any::Advice)] .into_iter() .map(|a| a.into()) .collect(), @@ -264,7 +254,7 @@ fn test_slot_in() { }, RegionShape { region_index: 2.into(), - columns: vec![Column::new(2, Any::advice()), Column::new(0, Any::advice())] + columns: vec![Column::new(2, Any::Advice), Column::new(0, Any::Advice)] .into_iter() .map(|a| a.into()) .collect(), diff --git a/halo2_proofs/src/circuit/layouter.rs b/halo2_frontend/src/circuit/layouter.rs similarity index 98% rename from halo2_proofs/src/circuit/layouter.rs rename to halo2_frontend/src/circuit/layouter.rs index f939c3fca5..743492daa4 100644 --- a/halo2_proofs/src/circuit/layouter.rs +++ b/halo2_frontend/src/circuit/layouter.rs @@ -4,11 +4,13 @@ use std::cmp; use std::collections::HashSet; use std::fmt; -use ff::Field; +use halo2_middleware::circuit::Any; +use halo2_middleware::ff::Field; pub use super::table_layouter::TableLayouter; use super::{Cell, RegionIndex, Value}; -use crate::plonk::{Advice, Any, Assigned, Column, Error, Fixed, Instance, Selector}; +use crate::plonk::Assigned; +use crate::plonk::{Advice, Column, Error, Fixed, Instance, Selector}; /// Intermediate trait requirements for [`RegionLayouter`] when thread-safe regions are enabled. #[cfg(feature = "thread-safe-region")] diff --git a/halo2_proofs/src/circuit/table_layouter.rs b/halo2_frontend/src/circuit/table_layouter.rs similarity index 98% rename from halo2_proofs/src/circuit/table_layouter.rs rename to halo2_frontend/src/circuit/table_layouter.rs index 06338bb896..5f6b9560b6 100644 --- a/halo2_proofs/src/circuit/table_layouter.rs +++ b/halo2_frontend/src/circuit/table_layouter.rs @@ -5,11 +5,9 @@ use std::{ fmt::{self, Debug}, }; -use ff::Field; - -use crate::plonk::{Assigned, Assignment, Error, TableColumn, TableError}; - use super::Value; +use crate::plonk::{Assigned, Assignment, Error, TableColumn, TableError}; +use halo2_middleware::ff::Field; /// Helper trait for implementing a custom [`Layouter`]. /// @@ -155,14 +153,13 @@ pub(crate) fn compute_table_lengths( mod tests { use halo2curves::pasta::Fp; + use crate::circuit::Value; + use crate::plonk::{Circuit, ConstraintSystem, Error, TableColumn}; use crate::{ circuit::{Layouter, SimpleFloorPlanner}, dev::MockProver, - plonk::{Circuit, ConstraintSystem}, - poly::Rotation, }; - - use super::*; + use halo2_middleware::poly::Rotation; #[test] fn table_no_default() { diff --git a/halo2_proofs/src/circuit/value.rs b/halo2_frontend/src/circuit/value.rs similarity index 67% rename from halo2_proofs/src/circuit/value.rs rename to halo2_frontend/src/circuit/value.rs index f3ea6a39ea..a169d60e41 100644 --- a/halo2_proofs/src/circuit/value.rs +++ b/halo2_frontend/src/circuit/value.rs @@ -25,6 +25,7 @@ impl Default for Value { impl Value { /// Constructs an unwitnessed value. + #[must_use] pub const fn unknown() -> Self { Self { inner: None } } @@ -34,10 +35,11 @@ impl Value { /// # Examples /// /// ``` - /// use halo2_proofs::circuit::Value; + /// use halo2_frontend::circuit::Value; /// /// let v = Value::known(37); /// ``` + #[must_use] pub const fn known(value: V) -> Self { Self { inner: Some(value) } } @@ -45,7 +47,7 @@ impl Value { /// Obtains the inner value for assigning into the circuit. /// /// Returns `Error::Synthesis` if this is [`Value::unknown()`]. - pub(crate) fn assign(self) -> Result { + pub fn assign(self) -> Result { self.inner.ok_or(Error::Synthesis) } @@ -63,11 +65,6 @@ impl Value { } } - /// ONLY FOR INTERNAL CRATE USAGE; DO NOT EXPOSE! - pub(crate) fn into_option(self) -> Option { - self.inner - } - /// Enforces an assertion on the contained value, if known. /// /// The assertion is ignored if `self` is [`Value::unknown()`]. Do not try to enforce @@ -102,6 +99,7 @@ impl Value { /// Returns [`Value::unknown()`] if the value is [`Value::unknown()`], otherwise calls /// `f` with the wrapped value and returns the result. + #[must_use] pub fn and_then Value>(self, f: F) -> Value { match self.inner { Some(v) => f(v), @@ -113,6 +111,7 @@ impl Value { /// /// If `self` is `Value::known(s)` and `other` is `Value::known(o)`, this method /// returns `Value::known((s, o))`. Otherwise, [`Value::unknown()`] is returned. + #[must_use] pub fn zip(self, other: Value) -> Value<(V, W)> { Value { inner: self.inner.zip(other.inner), @@ -126,6 +125,7 @@ impl Value<(V, W)> { /// If `self` is `Value::known((a, b)), this method returns /// `(Value::known(a), Value::known(b))`. Otherwise, /// `(Value::unknown(), Value::unknown())` is returned. + #[must_use] pub fn unzip(self) -> (Value, Value) { match self.inner { Some((a, b)) => (Value::known(a), Value::known(b)), @@ -186,6 +186,7 @@ impl Value<[V; LEN]> { /// Transposes a `Value<[V; LEN]>` into a `[Value; LEN]`. /// /// [`Value::unknown()`] will be mapped to `[Value::unknown(); LEN]`. + #[must_use] pub fn transpose_array(self) -> [Value; LEN] { let mut ret = [Value::unknown(); LEN]; if let Some(arr) = self.inner { @@ -209,6 +210,7 @@ where /// # Panics /// /// Panics if `self` is `Value::known(values)` and `values.len() != length`. + #[must_use] pub fn transpose_vec(self, length: usize) -> Vec> { match self.inner { Some(values) => { @@ -230,6 +232,7 @@ impl> FromIterator> for Value { /// elements are taken, and the [`Value::unknown()`] is returned. Should no /// [`Value::unknown()`] occur, a container of type `V` containing the values of each /// [`Value`] is returned. + #[must_use] fn from_iter>>(iter: I) -> Self { Self { inner: iter.into_iter().map(|v| v.inner).collect(), @@ -244,6 +247,7 @@ impl> FromIterator> for Value { impl Neg for Value { type Output = Value; + #[must_use] fn neg(self) -> Self::Output { Value { inner: self.inner.map(|v| -v), @@ -261,6 +265,7 @@ where { type Output = Value; + #[must_use] fn add(self, rhs: Self) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), @@ -274,6 +279,7 @@ where { type Output = Value; + #[must_use] fn add(self, rhs: Self) -> Self::Output { Value { inner: self @@ -291,6 +297,7 @@ where { type Output = Value; + #[must_use] fn add(self, rhs: Value<&V>) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), @@ -304,6 +311,7 @@ where { type Output = Value; + #[must_use] fn add(self, rhs: Value) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), @@ -317,6 +325,7 @@ where { type Output = Value; + #[must_use] fn add(self, rhs: &Self) -> Self::Output { self + rhs.as_ref() } @@ -328,6 +337,7 @@ where { type Output = Value; + #[must_use] fn add(self, rhs: Value) -> Self::Output { self.as_ref() + rhs } @@ -343,6 +353,7 @@ where { type Output = Value; + #[must_use] fn sub(self, rhs: Self) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), @@ -356,6 +367,7 @@ where { type Output = Value; + #[must_use] fn sub(self, rhs: Self) -> Self::Output { Value { inner: self @@ -373,6 +385,7 @@ where { type Output = Value; + #[must_use] fn sub(self, rhs: Value<&V>) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), @@ -386,6 +399,7 @@ where { type Output = Value; + #[must_use] fn sub(self, rhs: Value) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), @@ -399,6 +413,7 @@ where { type Output = Value; + #[must_use] fn sub(self, rhs: &Self) -> Self::Output { self - rhs.as_ref() } @@ -410,6 +425,7 @@ where { type Output = Value; + #[must_use] fn sub(self, rhs: Value) -> Self::Output { self.as_ref() - rhs } @@ -425,6 +441,7 @@ where { type Output = Value; + #[must_use] fn mul(self, rhs: Self) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), @@ -438,6 +455,7 @@ where { type Output = Value; + #[must_use] fn mul(self, rhs: Self) -> Self::Output { Value { inner: self @@ -455,6 +473,7 @@ where { type Output = Value; + #[must_use] fn mul(self, rhs: Value<&V>) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), @@ -468,6 +487,7 @@ where { type Output = Value; + #[must_use] fn mul(self, rhs: Value) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), @@ -481,6 +501,7 @@ where { type Output = Value; + #[must_use] fn mul(self, rhs: &Self) -> Self::Output { self * rhs.as_ref() } @@ -492,6 +513,7 @@ where { type Output = Value; + #[must_use] fn mul(self, rhs: Value) -> Self::Output { self.as_ref() * rhs } @@ -502,6 +524,7 @@ where // impl From> for Value> { + #[must_use] fn from(value: Value) -> Self { Self { inner: value.inner.map(Assigned::from), @@ -512,6 +535,7 @@ impl From> for Value> { impl Add> for Value> { type Output = Value>; + #[must_use] fn add(self, rhs: Value) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), @@ -522,6 +546,7 @@ impl Add> for Value> { impl Add for Value> { type Output = Value>; + #[must_use] fn add(self, rhs: F) -> Self::Output { self + Value::known(rhs) } @@ -530,6 +555,7 @@ impl Add for Value> { impl Add> for Value<&Assigned> { type Output = Value>; + #[must_use] fn add(self, rhs: Value) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a + b), @@ -540,6 +566,7 @@ impl Add> for Value<&Assigned> { impl Add for Value<&Assigned> { type Output = Value>; + #[must_use] fn add(self, rhs: F) -> Self::Output { self + Value::known(rhs) } @@ -548,6 +575,7 @@ impl Add for Value<&Assigned> { impl Sub> for Value> { type Output = Value>; + #[must_use] fn sub(self, rhs: Value) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), @@ -558,6 +586,7 @@ impl Sub> for Value> { impl Sub for Value> { type Output = Value>; + #[must_use] fn sub(self, rhs: F) -> Self::Output { self - Value::known(rhs) } @@ -566,6 +595,7 @@ impl Sub for Value> { impl Sub> for Value<&Assigned> { type Output = Value>; + #[must_use] fn sub(self, rhs: Value) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a - b), @@ -576,6 +606,7 @@ impl Sub> for Value<&Assigned> { impl Sub for Value<&Assigned> { type Output = Value>; + #[must_use] fn sub(self, rhs: F) -> Self::Output { self - Value::known(rhs) } @@ -584,6 +615,7 @@ impl Sub for Value<&Assigned> { impl Mul> for Value> { type Output = Value>; + #[must_use] fn mul(self, rhs: Value) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), @@ -594,6 +626,7 @@ impl Mul> for Value> { impl Mul for Value> { type Output = Value>; + #[must_use] fn mul(self, rhs: F) -> Self::Output { self * Value::known(rhs) } @@ -602,6 +635,7 @@ impl Mul for Value> { impl Mul> for Value<&Assigned> { type Output = Value>; + #[must_use] fn mul(self, rhs: Value) -> Self::Output { Value { inner: self.inner.zip(rhs.inner).map(|(a, b)| a * b), @@ -612,6 +646,7 @@ impl Mul> for Value<&Assigned> { impl Mul for Value<&Assigned> { type Output = Value>; + #[must_use] fn mul(self, rhs: F) -> Self::Output { self * Value::known(rhs) } @@ -619,6 +654,7 @@ impl Mul for Value<&Assigned> { impl Value { /// Returns the field element corresponding to this value. + #[must_use] pub fn to_field(&self) -> Value> where for<'v> Assigned: From<&'v V>, @@ -629,6 +665,7 @@ impl Value { } /// Returns the field element corresponding to this value. + #[must_use] pub fn into_field(self) -> Value> where V: Into>, @@ -645,12 +682,14 @@ impl Value { /// If you have a `Value`, convert it to `Value>` first: /// ``` /// # use halo2curves::pasta::pallas::Base as F; - /// use halo2_proofs::{circuit::Value, plonk::Assigned}; + /// use halo2_frontend::circuit::Value; + /// use halo2_frontend::plonk::Assigned; /// /// let v = Value::known(F::from(2)); /// let v: Value> = v.into(); - /// v.double(); + /// let _ = v.double(); /// ``` + #[must_use] pub fn double(&self) -> Value> where V: Borrow>, @@ -661,6 +700,7 @@ impl Value { } /// Squares this field element. + #[must_use] pub fn square(&self) -> Value> where V: Borrow>, @@ -671,6 +711,7 @@ impl Value { } /// Cubes this field element. + #[must_use] pub fn cube(&self) -> Value> where V: Borrow>, @@ -681,6 +722,7 @@ impl Value { } /// Inverts this assigned value (taking the inverse of zero to be zero). + #[must_use] pub fn invert(&self) -> Value> where V: Borrow>, @@ -695,9 +737,246 @@ impl Value> { /// Evaluates this value directly, performing an unbatched inversion if necessary. /// /// If the denominator is zero, the returned value is zero. + #[must_use] pub fn evaluate(self) -> Value { Value { inner: self.inner.map(|v| v.evaluate()), } } } + +#[cfg(test)] +mod test { + #![allow(clippy::op_ref)] + + use super::*; + use halo2curves::bn256::Fr; + + type V = Value; + + impl PartialEq for V { + fn eq(&self, other: &Self) -> bool { + self.inner == other.inner + } + } + impl PartialEq for Value> { + fn eq(&self, other: &Self) -> bool { + self.inner == other.inner + } + } + + #[test] + fn test_value_as_mut() { + let mut v_some = V::known(1); + let mut v_none = V::default(); + v_some.as_mut().map(|v| *v = 3); + v_none.as_mut().map(|v| *v = 3); + assert_eq!(v_some, V::known(3)); + assert_eq!(v_none, V::unknown()); + } + + #[test] + fn test_value_assert_if_known_ok() { + V::known(1).assert_if_known(|v| *v == 1); + V::unknown().assert_if_known(|v| *v == 1); + } + + #[test] + #[should_panic] + fn test_value_assert_if_known_ko() { + V::known(1).assert_if_known(|v| *v == 2); + } + + #[test] + fn test_value_error_if_known() { + assert!(V::known(1).error_if_known_and(|v| *v == 1).is_err()); + assert!(V::known(1).error_if_known_and(|v| *v == 2).is_ok()); + assert!(V::unknown().error_if_known_and(|_| true).is_ok()); + } + + #[test] + fn test_map() { + assert_eq!(V::known(1).map(|v| v + 1), V::known(2)); + assert_eq!(V::unknown().map(|v| v + 1), V::unknown()); + } + + #[test] + fn test_value_and_then() { + let v = V::known(1); + assert_eq!(v.and_then(|v| V::known(v + 1)), V::known(2)); + assert_eq!(v.and_then(|_| V::unknown()), V::unknown()); + assert_eq!(V::unknown().and_then(|v| V::known(v + 1)), V::unknown()); + } + + #[test] + fn test_value_zip() { + assert_eq!( + V::known(1).zip(V::known(2)).unzip(), + (V::known(1), V::known(2)) + ); + assert_eq!( + V::known(1).zip(V::unknown()).unzip(), + (V::unknown(), V::unknown()) + ); + assert_eq!( + V::unknown().zip(V::known(2)).unzip(), + (Value::unknown(), V::unknown()) + ); + assert_eq!( + V::unknown().zip(V::unknown()).unzip(), + (Value::unknown(), V::unknown()) + ); + } + + #[test] + fn test_value_copies() { + let copy = Value::<&mut i64>::known(&mut 1).copied(); + let clon = Value::<&mut i64>::known(&mut 1).cloned(); + assert_eq!(copy, Value::known(1)); + assert_eq!(clon, Value::known(1)); + } + + #[test] + fn test_value_transpose_array() { + assert_eq!( + Value::<[_; 2]>::known([1, 2]).transpose_array(), + [V::known(1), V::known(2)] + ); + } + + #[test] + fn test_value_transpose_vec_ok() { + assert_eq!( + Value::<[_; 2]>::known([1, 2]).transpose_vec(2), + vec![V::known(1), V::known(2)] + ); + assert_eq!( + Value::<[_; 2]>::unknown().transpose_vec(2), + vec![V::unknown(), V::unknown()] + ); + + // TODO: check if should be this allowed or not + assert_eq!( + Value::<[_; 6]>::unknown().transpose_vec(2), + vec![V::unknown(), V::unknown()] + ); + } + + #[test] + #[should_panic] + fn test_value_transpose_vec_ko_1() { + assert_eq!( + Value::<[_; 2]>::known([1, 2]).transpose_vec(1), + vec![V::known(1), V::known(2)] + ); + } + + #[test] + #[should_panic] + fn test_value_transpose_vec_ko_2() { + assert_eq!( + Value::<[_; 2]>::known([1, 2]).transpose_vec(3), + vec![V::known(1), V::known(2)] + ); + } + + #[test] + fn test_value_from_iter() { + assert_eq!( + Value::>::from_iter([V::known(1), V::known(2)]).inner, + Some(vec![1, 2]) + ); + assert_eq!( + Value::>::from_iter([V::known(1), V::unknown()]).inner, + None + ); + } + + #[test] + fn test_value_ops() { + assert_eq!(-V::known(5), Value::known(-5)); + + assert_eq!(V::known(5) + V::known(2), V::known(7)); + assert_eq!(&V::known(5) + V::known(2), V::known(7)); + assert_eq!(V::known(5) + &V::known(2), V::known(7)); + assert_eq!(&V::known(5) + &V::known(2), V::known(7)); + + assert_eq!(V::known(5) - V::known(2), V::known(3)); + assert_eq!(&V::known(5) - V::known(2), V::known(3)); + assert_eq!(V::known(5) - &V::known(2), V::known(3)); + assert_eq!(&V::known(5) - &V::known(2), V::known(3)); + + assert_eq!(V::known(5) * V::known(2), V::known(10)); + assert_eq!(&V::known(5) * V::known(2), V::known(10)); + assert_eq!(V::known(5) * &V::known(2), V::known(10)); + assert_eq!(&V::known(5) * &V::known(2), V::known(10)); + } + + #[test] + fn test_value_assigned() { + let fr_two = || Fr::from(2); + let fr_three = || Fr::from(3); + + let one = Value::known(Assigned::Trivial(Fr::one())); + let two = Value::known(Assigned::Trivial(Fr::from(2))); + let six = Value::known(Assigned::Trivial(Fr::from(6))); + + let v: Value> = Value::known(Fr::one()).into(); + assert_eq!(v, Value::known(Assigned::Trivial(Fr::one()))); + + assert_eq!(one + Fr::one(), two); + assert_eq!(one + Value::known(Fr::one()), two); + assert_eq!( + Value::known(&Assigned::Trivial(Fr::one())) + Value::known(Fr::one()), + two + ); + assert_eq!(Value::known(&Assigned::Trivial(Fr::one())) + Fr::one(), two); + + assert_eq!(two - Value::known(Fr::one()), one); + assert_eq!(two - Fr::one(), one); + assert_eq!( + Value::known(&Assigned::Trivial(fr_two())) - Value::known(Fr::one()), + one + ); + assert_eq!(Value::known(&Assigned::Trivial(fr_two())) - Fr::one(), one); + + assert_eq!(two * Value::known(fr_three()), six); + assert_eq!(two * fr_three(), six); + assert_eq!( + Value::known(&Assigned::Trivial(fr_two())) * Value::known(fr_three()), + six + ); + assert_eq!(Value::known(&Assigned::Trivial(fr_two())) * fr_three(), six); + } + + #[test] + fn test_value_impl() { + assert_eq!( + Value::known(Fr::one()).to_field(), + Value::known(Assigned::Trivial(Fr::one())) + ); + assert_eq!( + Value::known(Fr::one()).into_field(), + Value::known(Assigned::Trivial(Fr::one())) + ); + + assert_eq!( + Value::known(Assigned::Trivial(Fr::from(3))).double(), + Value::known(Assigned::Trivial(Fr::from(6))) + ); + assert_eq!( + Value::known(Assigned::Trivial(Fr::from(3))).square(), + Value::known(Assigned::Trivial(Fr::from(9))) + ); + assert_eq!( + Value::known(Assigned::Trivial(Fr::from(3))).cube(), + Value::known(Assigned::Trivial(Fr::from(27))) + ); + assert_eq!( + Value::known(Assigned::Trivial(Fr::from(3))) + .invert() + .invert(), + Value::known(Assigned::Trivial(Fr::from(3))) + ); + } +} diff --git a/halo2_proofs/src/dev.rs b/halo2_frontend/src/dev.rs similarity index 71% rename from halo2_proofs/src/dev.rs rename to halo2_frontend/src/dev.rs index 7a3aca10cc..e017e664b4 100644 --- a/halo2_proofs/src/dev.rs +++ b/halo2_frontend/src/dev.rs @@ -6,23 +6,20 @@ use std::iter; use std::ops::{Add, Mul, Neg, Range}; use blake2b_simd::blake2b; -use ff::Field; -use ff::FromUniformBytes; -use crate::plonk::permutation::keygen::Assembly; use crate::{ circuit, plonk::{ permutation, sealed::{self, SealedPhase}, - Advice, Any, Assigned, Assignment, Challenge, Circuit, Column, ConstraintSystem, Error, + Advice, Assigned, Assignment, Challenge, Circuit, Column, ConstraintSystem, Error, Expression, FirstPhase, Fixed, FloorPlanner, Instance, Phase, Selector, }, }; - -use crate::multicore::{ - IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, - ParallelSliceMut, +use halo2_middleware::circuit::{Any, ColumnMid}; +use halo2_middleware::ff::{Field, FromUniformBytes}; +use halo2_middleware::multicore::{ + IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, ParallelSliceMut, }; pub mod metadata; @@ -47,10 +44,13 @@ pub use tfp::TracingFloorPlanner; #[cfg(feature = "dev-graph")] mod graph; +use crate::plonk::circuit::constraint_system::VirtualCell; #[cfg(feature = "dev-graph")] #[cfg_attr(docsrs, doc(cfg(feature = "dev-graph")))] pub use graph::{circuit_dot_graph, layout::CircuitLayout}; +use halo2_middleware::poly::Rotation; +/// Region of assignments that are done during synthesis. #[derive(Debug)] struct Region { /// The name of the region. Not required to be unique. @@ -181,13 +181,14 @@ impl Mul for Value { /// # Examples /// /// ``` -/// use halo2_proofs::{ +/// use halo2_frontend::{ /// circuit::{Layouter, SimpleFloorPlanner, Value}, /// dev::{FailureLocation, MockProver, VerifyFailure}, -/// plonk::{Advice, Any, Circuit, Column, ConstraintSystem, Error, Selector}, -/// poly::Rotation, +/// plonk::{circuit::Column, Circuit, ConstraintSystem, Error, Advice, Selector}, /// }; -/// use ff::PrimeField; +/// use halo2_middleware::circuit::{Any, ColumnMid}; +/// use halo2_middleware::poly::Rotation; +/// use halo2_middleware::ff::PrimeField; /// use halo2curves::pasta::Fp; /// const K: u32 = 5; /// @@ -270,9 +271,9 @@ impl Mul for Value { /// offset: 0, /// }, /// cell_values: vec![ -/// (((Any::advice(), 0).into(), 0).into(), "0x2".to_string()), -/// (((Any::advice(), 1).into(), 0).into(), "0x4".to_string()), -/// (((Any::advice(), 2).into(), 0).into(), "0x8".to_string()), +/// ((ColumnMid::new(Any::Advice, 0), 0).into(), "0x2".to_string()), +/// ((ColumnMid::new(Any::Advice, 1), 0).into(), "0x4".to_string()), +/// ((ColumnMid::new(Any::Advice, 2), 0).into(), "0x8".to_string()), /// ], /// }]) /// ); @@ -310,7 +311,7 @@ pub struct MockProver { challenges: Vec, - permutation: permutation::keygen::Assembly, + permutation: permutation::Assembly, // A range of available rows for assignment and copies. usable_rows: Range, @@ -328,7 +329,8 @@ pub enum InstanceValue { } impl InstanceValue { - fn value(&self) -> F { + /// Field value on the instance cell + pub fn value(&self) -> F { match self { InstanceValue::Assigned(v) => *v, InstanceValue::Padding => F::ZERO, @@ -342,6 +344,18 @@ impl MockProver { } } +impl MockProver { + /// Return the content of an advice column as mutable + pub fn advice_mut(&mut self, column_index: usize) -> &mut [CellValue] { + self.advice[column_index].as_mut_slice() + } + + /// Return the content of an instance column as mutable + pub fn instance_mut(&mut self, column_index: usize) -> &mut [InstanceValue] { + self.instance[column_index].as_mut_slice() + } +} + impl Assignment for MockProver { fn enter_region(&mut self, name: N) where @@ -383,7 +397,7 @@ impl Assignment for MockProver { if let Some(region) = self.current_region.as_mut() { region .annotations - .insert(ColumnMetadata::from(column), annotation().into()); + .insert(column.into(), annotation().into()); } } @@ -483,7 +497,8 @@ impl Assignment for MockProver { } Err(err) => { // Propagate `assign` error if the column is in current phase. - if self.in_phase(column.column_type().phase) { + let phase = self.cs.advice_column_phase[column.index]; + if self.in_phase(phase) { return Err(err); } } @@ -585,7 +600,7 @@ impl Assignment for MockProver { } fn get_challenge(&self, challenge: Challenge) -> circuit::Value { - if self.current_phase <= challenge.phase { + if self.current_phase.0 <= challenge.phase() { return circuit::Value::unknown(); } @@ -669,7 +684,7 @@ impl + Ord> MockProver { }; cs.num_advice_columns ]; - let permutation = permutation::keygen::Assembly::new(n, &cs.permutation); + let permutation = permutation::Assembly::new(n, &cs.permutation); let constants = cs.constants.clone(); // Use hash chain to derive deterministic challenges for testing @@ -720,7 +735,7 @@ impl + Ord> MockProver { })); #[cfg(feature = "thread-safe-region")] - prover.permutation.build_ordered_mapping(); + prover.permutation.copies.sort(); Ok(prover) } @@ -817,7 +832,15 @@ impl + Ord> MockProver { } _ => { // Check that it was assigned! - if r.cells.contains_key(&(cell.column, cell_row)) { + if r.cells.contains_key(&(cell.column, cell_row)) + || gate.polynomials().par_iter().all(|expr| { + self.cell_is_irrelevant( + cell, + expr, + gate_row as usize, + ) + }) + { None } else { Some(VerifyFailure::CellNotAssigned { @@ -898,9 +921,9 @@ impl + Ord> MockProver { cell_values: util::cell_values( gate, poly, - &util::load(n, row, &self.cs.fixed_queries, &self.fixed), - &util::load(n, row, &self.cs.advice_queries, &self.advice), - &util::load_instance( + util::load(n, row, &self.cs.fixed_queries, &self.fixed), + util::load(n, row, &self.cs.advice_queries, &self.advice), + util::load_instance( n, row, &self.cs.instance_queries, @@ -1110,54 +1133,36 @@ impl + Ord> MockProver { .collect::>() }); - let mapping = self.permutation.mapping(); // Check that permutations preserve the original values of the cells. - let perm_errors = { - // Original values of columns involved in the permutation. - let original = |column, row| { - self.cs - .permutation - .get_columns() - .get(column) - .map(|c: &Column| match c.column_type() { - Any::Advice(_) => self.advice[c.index()][row], - Any::Fixed => self.fixed[c.index()][row], - Any::Instance => { - let cell: &InstanceValue = &self.instance[c.index()][row]; - CellValue::Assigned(cell.value()) - } - }) - .unwrap() - }; - - // Iterate over each column of the permutation - mapping.enumerate().flat_map(move |(column, values)| { - // Iterate over each row of the column to check that the cell's - // value is preserved by the mapping. - values - .enumerate() - .filter_map(move |(row, cell)| { - let original_cell = original(column, row); - let permuted_cell = original(cell.0, cell.1); - if original_cell == permuted_cell { - None - } else { - let columns = self.cs.permutation.get_columns(); - let column = columns.get(column).unwrap(); - Some(VerifyFailure::Permutation { - column: (*column).into(), - location: FailureLocation::find( - &self.regions, - row, - Some(column).into_iter().cloned().collect(), - ), - }) - } - }) - .collect::>() - }) + // Original values of columns involved in the permutation. + let original = |column: ColumnMid, row: usize| match column.column_type { + halo2_middleware::circuit::Any::Advice => self.advice[column.index][row], + halo2_middleware::circuit::Any::Fixed => self.fixed[column.index][row], + halo2_middleware::circuit::Any::Instance => { + let cell: &InstanceValue = &self.instance[column.index][row]; + CellValue::Assigned(cell.value()) + } }; + // Iterate over each pair of copied cells to check that the cell's value is preserved + // by the copy. + let perm_errors = self.permutation.copies.iter().flat_map(|(cell_a, cell_b)| { + let original_cell = original(cell_a.column, cell_a.row); + let permuted_cell = original(cell_b.column, cell_b.row); + if original_cell == permuted_cell { + None + } else { + Some(VerifyFailure::Permutation { + column: cell_a.column, + location: FailureLocation::find( + &self.regions, + cell_a.row, + Some(&cell_a.column.into()).into_iter().cloned().collect(), + ), + }) + } + }); + let mut errors: Vec<_> = iter::empty() .chain(selector_errors) .chain(gate_errors) @@ -1182,6 +1187,66 @@ impl + Ord> MockProver { } } + // Checks if the given expression is guaranteed to be constantly zero at the given offset. + fn expr_is_constantly_zero(&self, expr: &Expression, offset: usize) -> bool { + match expr { + Expression::Constant(constant) => constant.is_zero().into(), + Expression::Selector(selector) => !self.selectors[selector.0][offset], + Expression::Fixed(query) => match self.fixed[query.column_index][offset] { + CellValue::Assigned(value) => value.is_zero().into(), + _ => false, + }, + Expression::Scaled(e, factor) => { + factor.is_zero().into() || self.expr_is_constantly_zero(e, offset) + } + Expression::Sum(e1, e2) => { + self.expr_is_constantly_zero(e1, offset) && self.expr_is_constantly_zero(e2, offset) + } + Expression::Product(e1, e2) => { + self.expr_is_constantly_zero(e1, offset) || self.expr_is_constantly_zero(e2, offset) + } + _ => false, + } + } + // Verify that the value of the given cell within the given expression is + // irrelevant to the evaluation of the expression. This may be because + // the cell is always multiplied by an expression that evaluates to 0, or + // because the cell is not being queried in the expression at all. + fn cell_is_irrelevant(&self, cell: &VirtualCell, expr: &Expression, offset: usize) -> bool { + // Check if a given query (defined by its columnd and rotation, since we + // want this function to support different query types) is equal to `cell`. + let eq_query = |query_column: usize, query_rotation: Rotation, col_type: Any| { + cell.column.index == query_column + && cell.column.column_type == col_type + && query_rotation == cell.rotation + }; + match expr { + Expression::Constant(_) | Expression::Selector(_) => true, + Expression::Fixed(query) => !eq_query(query.column_index, query.rotation(), Any::Fixed), + Expression::Advice(query) => { + !eq_query(query.column_index, query.rotation(), Any::Advice) + } + Expression::Instance(query) => { + !eq_query(query.column_index, query.rotation(), Any::Instance) + } + Expression::Challenge(_) => true, + Expression::Negated(e) => self.cell_is_irrelevant(cell, e, offset), + Expression::Sum(e1, e2) => { + self.cell_is_irrelevant(cell, e1, offset) + && self.cell_is_irrelevant(cell, e2, offset) + } + Expression::Product(e1, e2) => { + (self.expr_is_constantly_zero(e1, offset) + || self.expr_is_constantly_zero(e2, offset)) + || (self.cell_is_irrelevant(cell, e1, offset) + && self.cell_is_irrelevant(cell, e2, offset)) + } + Expression::Scaled(e, factor) => { + factor.is_zero().into() || self.cell_is_irrelevant(cell, e, offset) + } + } + } + /// Panics if the circuit being checked by this `MockProver` is not satisfied. /// /// Any verification failures will be pretty-printed to stderr before the function @@ -1255,11 +1320,6 @@ impl + Ord> MockProver { pub fn instance(&self) -> &Vec>> { &self.instance } - - /// Returns the permutation argument (`Assembly`) used within a MockProver instance. - pub fn permutation(&self) -> &Assembly { - &self.permutation - } } #[cfg(test)] @@ -1267,14 +1327,14 @@ mod tests { use halo2curves::pasta::Fp; use super::{FailureLocation, MockProver, VerifyFailure}; - use crate::{ - circuit::{Layouter, SimpleFloorPlanner, Value}, - plonk::{ - sealed::SealedPhase, Advice, Any, Circuit, Column, ConstraintSystem, Error, Expression, - FirstPhase, Fixed, Instance, Selector, TableColumn, - }, - poly::Rotation, + use crate::circuit::{Layouter, SimpleFloorPlanner, Value}; + use crate::dev::{CellValue, InstanceValue}; + use crate::plonk::{ + Advice, Circuit, Column, ConstraintSystem, Error, Expression, Fixed, Instance, Selector, + TableColumn, }; + use halo2_middleware::circuit::{Any, ColumnMid}; + use halo2_middleware::poly::Rotation; #[test] fn unassigned_cell() { @@ -1352,19 +1412,14 @@ mod tests { gate: (0, "Equality check").into(), region: (0, "Faulty synthesis".to_owned()).into(), gate_offset: 1, - column: Column::new( - 1, - Any::Advice(Advice { - phase: FirstPhase.to_sealed() - }) - ), + column: Column::new(1, Any::Advice), offset: 1, }]) ); } #[test] - fn bad_lookup_any() { + fn bad_lookup_any_faulty_synthesis() { const K: u32 = 4; #[derive(Clone)] @@ -1373,6 +1428,7 @@ mod tests { table: Column, advice_table: Column, q: Selector, + s_ltable: Selector, } struct FaultyCircuit {} @@ -1388,6 +1444,7 @@ mod tests { let q = meta.complex_selector(); let table = meta.instance_column(); let advice_table = meta.advice_column(); + let s_ltable = meta.complex_selector(); meta.annotate_lookup_any_column(table, || "Inst-Table"); meta.enable_equality(table); @@ -1399,17 +1456,14 @@ mod tests { let q = cells.query_selector(q); let advice_table = cells.query_advice(advice_table, Rotation::cur()); let table = cells.query_instance(table, Rotation::cur()); + let s_ltable = cells.query_selector(s_ltable); // If q is enabled, a must be in the table. - // When q is not enabled, lookup the default value instead. - let not_q = Expression::Constant(Fp::one()) - q.clone(); - let default = Expression::Constant(Fp::from(2)); + // If `s_ltable` is enabled, the value of `advice_table` & `table` is used as lookup table. vec![ - ( - q.clone() * a.clone() + not_q.clone() * default.clone(), - table, - ), - (q * a + not_q * default, advice_table), + (q.clone() * a.clone(), table * s_ltable.clone()), + (q.clone() * a, advice_table * s_ltable.clone()), + (q, s_ltable), ] }); @@ -1418,6 +1472,7 @@ mod tests { q, table, advice_table, + s_ltable, } } @@ -1430,15 +1485,9 @@ mod tests { config: Self::Config, mut layouter: impl Layouter, ) -> Result<(), Error> { - // No assignment needed for the table as is an Instance Column. - layouter.assign_region( - || "Good synthesis", + || "Assign dynamic table", |mut region| { - // Enable the lookup on rows 0 and 1. - config.q.enable(&mut region, 0)?; - config.q.enable(&mut region, 1)?; - for i in 0..4 { // Load Advice lookup table with Instance lookup table values. region.assign_advice_from_instance( @@ -1448,22 +1497,14 @@ mod tests { config.advice_table, i, )?; - } - - // Assign a = 2 and a = 6. - region.assign_advice( - || "a = 2", - config.a, - 0, - || Value::known(Fp::from(2)), - )?; - region.assign_advice( - || "a = 6", - config.a, - 1, - || Value::known(Fp::from(6)), - )?; + // Enable the rows, which are used for lookup table values. + region.enable_selector( + || format!("enabling table row {}", i), + &config.s_ltable, + i, + )?; + } Ok(()) }, )?; @@ -1475,17 +1516,6 @@ mod tests { config.q.enable(&mut region, 0)?; config.q.enable(&mut region, 1)?; - for i in 0..4 { - // Load Advice lookup table with Instance lookup table values. - region.assign_advice_from_instance( - || "Advice from instance tables", - config.table, - i, - config.advice_table, - i, - )?; - } - // Assign a = 4. region.assign_advice( || "a = 4", @@ -1535,6 +1565,367 @@ mod tests { ); } + #[cfg(feature = "lookup-any-sanity-checks")] + #[test] + #[should_panic( + expected = "pair of tagging expressions(query of the tag columns or mutiple query combinations) should be included" + )] + fn bad_lookup_any_not_add_tagging_pairs() { + const K: u32 = 4; + + #[derive(Clone)] + #[allow(dead_code)] + struct FaultyCircuitConfig { + a: Column, + table: Column, + advice_table: Column, + q: Selector, + s_ltable: Selector, + } + + struct FaultyCircuit {} + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let q = meta.complex_selector(); + let table = meta.instance_column(); + let advice_table = meta.advice_column(); + let s_ltable = meta.complex_selector(); + + meta.annotate_lookup_any_column(table, || "Inst-Table"); + meta.enable_equality(table); + meta.annotate_lookup_any_column(advice_table, || "Adv-Table"); + meta.enable_equality(advice_table); + + meta.lookup_any("lookup", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + let q = cells.query_selector(q); + let advice_table = cells.query_advice(advice_table, Rotation::cur()); + let table = cells.query_instance(table, Rotation::cur()); + let s_ltable = cells.query_selector(s_ltable); + + // If q is enabled, a must be in the table. + vec![ + (q.clone() * a.clone(), s_ltable.clone() * table), + (q * a, s_ltable * advice_table), + ] + }); + + FaultyCircuitConfig { + a, + q, + table, + advice_table, + s_ltable, + } + } + + fn without_witnesses(&self) -> Self { + Self {} + } + + fn synthesize(&self, _: Self::Config, _: impl Layouter) -> Result<(), Error> { + unreachable!("Should not be called because of configuration error"); + } + } + + let _ = MockProver::run( + K, + &FaultyCircuit {}, + // This is our "lookup table". + vec![vec![ + Fp::from(1u64), + Fp::from(2u64), + Fp::from(4u64), + Fp::from(6u64), + ]], + ) + .unwrap(); + } + + #[cfg(feature = "lookup-any-sanity-checks")] + #[test] + #[should_panic(expected = "all table expressions need selector/fixed query for tagging")] + fn bad_lookup_any_no_fixed_col_or_selector() { + const K: u32 = 4; + + #[derive(Clone)] + #[allow(dead_code)] + struct FaultyCircuitConfig { + a: Column, + table: Column, + advice_table: Column, + q: Selector, + } + + struct FaultyCircuit {} + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let q = meta.complex_selector(); + let table = meta.instance_column(); + let advice_table = meta.advice_column(); + + meta.annotate_lookup_any_column(table, || "Inst-Table"); + meta.enable_equality(table); + meta.annotate_lookup_any_column(advice_table, || "Adv-Table"); + meta.enable_equality(advice_table); + + meta.lookup_any("lookup", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + let q = cells.query_selector(q); + let advice_table = cells.query_advice(advice_table, Rotation::cur()); + let table = cells.query_instance(table, Rotation::cur()); + + // If q is enabled, a must be in the table. + vec![(q.clone() * a.clone(), table), (q * a, advice_table)] + }); + + FaultyCircuitConfig { + a, + q, + table, + advice_table, + } + } + + fn without_witnesses(&self) -> Self { + Self {} + } + + fn synthesize(&self, _: Self::Config, _: impl Layouter) -> Result<(), Error> { + unreachable!("Should not be called because of configuration error"); + } + } + + let _ = MockProver::run( + K, + &FaultyCircuit {}, + // This is our "lookup table". + vec![vec![ + Fp::from(1u64), + Fp::from(2u64), + Fp::from(4u64), + Fp::from(6u64), + ]], + ) + .unwrap(); + } + + #[cfg(feature = "lookup-any-sanity-checks")] + #[test] + #[should_panic( + expected = "all table expressions contain only fixed query, should use `lookup` api instead of `lookup_any`" + )] + fn bad_lookup_any_use_only_fixed_col() { + const K: u32 = 4; + + #[derive(Clone)] + #[allow(dead_code)] + struct FaultyCircuitConfig { + a: Column, + fixed_table: Column, + q: Selector, + } + + struct FaultyCircuit {} + + impl Circuit for FaultyCircuit { + type Config = FaultyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let q = meta.complex_selector(); + let fixed_table = meta.fixed_column(); + + meta.annotate_lookup_any_column(fixed_table, || "Fixed-Table"); + meta.enable_equality(fixed_table); + + meta.lookup_any("lookup", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + let q = cells.query_selector(q); + let fixed_table = cells.query_fixed(fixed_table, Rotation::cur()); + + // If q is enabled, a must be in the table. + vec![(q * a, fixed_table)] + }); + + FaultyCircuitConfig { a, q, fixed_table } + } + + fn without_witnesses(&self) -> Self { + Self {} + } + + fn synthesize(&self, _: Self::Config, _: impl Layouter) -> Result<(), Error> { + unreachable!("Should not be called because of configuration error"); + } + } + + let _ = MockProver::run( + K, + &FaultyCircuit {}, + // This is our "lookup table". + vec![vec![ + Fp::from(1u64), + Fp::from(2u64), + Fp::from(4u64), + Fp::from(6u64), + ]], + ) + .unwrap(); + } + + #[test] + fn good_lookup_any() { + const K: u32 = 4; + + #[derive(Clone)] + struct GoodLookupAnyCircuitConfig { + a: Column, + table: Column, + advice_table: Column, + q: Selector, + s_ltable: Selector, + } + + struct GoodLookupAnyCircuit {} + + impl Circuit for GoodLookupAnyCircuit { + type Config = GoodLookupAnyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let q = meta.complex_selector(); + let table = meta.instance_column(); + let advice_table = meta.advice_column(); + let s_ltable = meta.complex_selector(); + + meta.annotate_lookup_any_column(table, || "Inst-Table"); + meta.enable_equality(table); + meta.annotate_lookup_any_column(advice_table, || "Adv-Table"); + meta.enable_equality(advice_table); + + meta.lookup_any("lookup", |cells| { + let a = cells.query_advice(a, Rotation::cur()); + let q = cells.query_selector(q); + let advice_table = cells.query_advice(advice_table, Rotation::cur()); + let table = cells.query_instance(table, Rotation::cur()); + let s_ltable = cells.query_selector(s_ltable); + + // If q is enabled, a must be in the table. + // If `s_ltable` is enabled, the value of `advice_table` & `table` is used as lookup table. + vec![ + (q.clone() * a.clone(), table * s_ltable.clone()), + (q.clone() * a, advice_table * s_ltable.clone()), + (q, s_ltable), + ] + }); + + GoodLookupAnyCircuitConfig { + a, + q, + table, + advice_table, + s_ltable, + } + } + + fn without_witnesses(&self) -> Self { + Self {} + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + layouter.assign_region( + || "Assign dynamic table", + |mut region| { + for i in 0..4 { + // Load Advice lookup table with Instance lookup table values. + region.assign_advice_from_instance( + || "Advice from instance tables", + config.table, + i, + config.advice_table, + i, + )?; + + // Enable the rows, which are used for lookup table values. + region.enable_selector( + || format!("enabling table row {}", i), + &config.s_ltable, + i, + )?; + } + Ok(()) + }, + )?; + + layouter.assign_region( + || "Good synthesis", + |mut region| { + // Enable the lookup on rows 0 and 1. + config.q.enable(&mut region, 0)?; + config.q.enable(&mut region, 1)?; + + // Assign a = 2 and a = 6. + region.assign_advice( + || "a = 2", + config.a, + 0, + || Value::known(Fp::from(2)), + )?; + region.assign_advice( + || "a = 6", + config.a, + 1, + || Value::known(Fp::from(6)), + )?; + + Ok(()) + }, + )?; + + Ok(()) + } + } + + let prover = MockProver::run( + K, + &GoodLookupAnyCircuit {}, + // This is our "lookup table". + vec![vec![ + Fp::from(1u64), + Fp::from(2u64), + Fp::from(4u64), + Fp::from(6u64), + ]], + ) + .unwrap(); + assert_eq!(prover.verify(), Ok(())); + } + #[test] fn bad_fixed_lookup() { const K: u32 = 4; @@ -1804,51 +2195,135 @@ mod tests { offset: 0, }, cell_values: vec![ + ((ColumnMid::new(Any::Advice, 0), 0).into(), "1".to_string()), + ((ColumnMid::new(Any::Advice, 1), 0).into(), "0".to_string()), ( - ( - ( - Any::Advice(Advice { - phase: FirstPhase.to_sealed() - }), - 0 - ) - .into(), - 0 - ) - .into(), - "1".to_string() - ), - ( - ( - ( - Any::Advice(Advice { - phase: FirstPhase.to_sealed() - }), - 1 - ) - .into(), - 0 - ) - .into(), - "0".to_string() - ), - ( - ( - ( - Any::Advice(Advice { - phase: FirstPhase.to_sealed() - }), - 2 - ) - .into(), - 0 - ) - .into(), + (ColumnMid::new(Any::Advice, 2), 0).into(), "0x5".to_string() ), - (((Any::Fixed, 0).into(), 0).into(), "0x7".to_string()), + ((ColumnMid::new(Any::Fixed, 0), 0).into(), "0x7".to_string()), ], },]) ) } + + #[test] + fn modify_proof() { + const K: u32 = 4; + + struct EasyCircuit {} + + #[derive(Clone)] + struct EasyCircuitConfig { + a: Column, + b: Column, + q: Selector, + } + + impl Circuit for EasyCircuit { + type Config = EasyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + Self {} + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + let a = meta.advice_column(); + let b = meta.instance_column(); + let q = meta.selector(); + + meta.enable_equality(a); + meta.enable_equality(b); + + meta.create_gate("squared", |cells| { + let cur = cells.query_advice(a, Rotation::cur()); + let next = cells.query_advice(a, Rotation::next()); + let q = cells.query_selector(q); + + vec![q * (next - cur.clone() * cur)] + }); + + EasyCircuitConfig { a, b, q } + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + let out = layouter.assign_region( + || "main region", + |mut region| { + config.q.enable(&mut region, 0)?; + config.q.enable(&mut region, 1)?; + + region.assign_advice(|| "a0", config.a, 0, || Value::known(Fp::from(3)))?; + region.assign_advice(|| "a1", config.a, 1, || Value::known(Fp::from(9)))?; + region.assign_advice(|| "a2", config.a, 2, || Value::known(Fp::from(81))) + }, + )?; + + layouter.constrain_instance(out.cell(), config.b, 0) + } + } + + let mut prover = MockProver::run(K, &EasyCircuit {}, vec![vec![Fp::from(81)]]).unwrap(); + assert_eq!(prover.verify(), Ok(())); + + let err1 = VerifyFailure::ConstraintNotSatisfied { + constraint: ((0, "squared").into(), 0, "").into(), + location: FailureLocation::InRegion { + region: (0, "main region").into(), + offset: 1, + }, + cell_values: vec![ + ( + (ColumnMid::new(Any::Advice, 0), 0).into(), + "0x9".to_string(), + ), + ( + (ColumnMid::new(Any::Advice, 0), 1).into(), + "0xa".to_string(), + ), + ], + }; + + let err2 = VerifyFailure::Permutation { + column: ColumnMid::new(Any::Advice, 0), + location: FailureLocation::InRegion { + region: (0, "main region").into(), + offset: 2, + }, + }; + + // first we modify the instance -> this results in the permutation being unsatisfied + let instance = prover.instance_mut(0); + instance[0] = InstanceValue::Assigned(Fp::from(11)); + assert_eq!(prover.verify(), Err(vec![err2.clone()])); + + // then we modify the witness -> the contraint `squared` will fail + let advice0 = prover.advice_mut(0); + advice0[2] = CellValue::Assigned(Fp::from(10)); + assert_eq!(prover.verify(), Err(vec![err1, err2])); + + // we reset back to original values + let instance = prover.instance_mut(0); + instance[0] = InstanceValue::Assigned(Fp::from(81)); + let advice0 = prover.advice_mut(0); + advice0[2] = CellValue::Assigned(Fp::from(81)); + assert_eq!(prover.verify(), Ok(())); + + // and now we try to trick the verifier + // we assign on row 0 `Fp - 3`, which is also a square root of 9 + // but will overflow the prime field + let sqrt_9 = Fp::zero() - Fp::from(3); + let advice0 = prover.advice_mut(0); + advice0[0] = CellValue::Assigned(sqrt_9); + + // if this verifies correctly -> we have an issue and we are missing a range check + assert_eq!(prover.verify(), Ok(())); + } } diff --git a/halo2_proofs/src/dev/cost.rs b/halo2_frontend/src/dev/cost.rs similarity index 97% rename from halo2_proofs/src/dev/cost.rs rename to halo2_frontend/src/dev/cost.rs index 735f1f0dc7..870a7e6008 100644 --- a/halo2_proofs/src/dev/cost.rs +++ b/halo2_frontend/src/dev/cost.rs @@ -8,16 +8,17 @@ use std::{ ops::{Add, Mul}, }; -use ff::{Field, PrimeField}; use group::prime::PrimeGroup; +use halo2_middleware::circuit::Any; +use halo2_middleware::ff::{Field, PrimeField}; +use halo2_middleware::poly::Rotation; use crate::{ circuit::{layouter::RegionColumn, Value}, plonk::{ - Advice, Any, Assigned, Assignment, Challenge, Circuit, Column, ConstraintSystem, Error, - Fixed, FloorPlanner, Instance, Selector, + Advice, Assigned, Assignment, Challenge, Circuit, Column, ConstraintSystem, Error, Fixed, + FloorPlanner, Instance, Selector, }, - poly::Rotation, }; /// Measures a circuit to determine its costs, and explain what contributes to them. @@ -94,7 +95,7 @@ pub(crate) struct Layout { impl Layout { /// Creates a empty layout - pub fn new(k: u32, n: usize, num_selectors: usize) -> Self { + pub(crate) fn new(k: u32, n: usize, num_selectors: usize) -> Self { Layout { k, regions: vec![], @@ -112,14 +113,12 @@ impl Layout { } /// Update layout metadata - pub fn update(&mut self, column: RegionColumn, row: usize) { + pub(crate) fn update(&mut self, column: RegionColumn, row: usize) { self.total_rows = cmp::max(self.total_rows, row + 1); if let RegionColumn::Column(col) = column { match col.column_type() { - Any::Advice(_) => { - self.total_advice_rows = cmp::max(self.total_advice_rows, row + 1) - } + Any::Advice => self.total_advice_rows = cmp::max(self.total_advice_rows, row + 1), Any::Fixed => self.total_fixed_rows = cmp::max(self.total_fixed_rows, row + 1), _ => {} } diff --git a/halo2_proofs/src/dev/cost_model.rs b/halo2_frontend/src/dev/cost_model.rs similarity index 70% rename from halo2_proofs/src/dev/cost_model.rs rename to halo2_frontend/src/dev/cost_model.rs index 51b3a1ad76..a430eb41fa 100644 --- a/halo2_proofs/src/dev/cost_model.rs +++ b/halo2_frontend/src/dev/cost_model.rs @@ -2,10 +2,11 @@ //! verification cost, as well as resulting proof size. use std::collections::HashSet; -use std::{iter, num::ParseIntError, str::FromStr}; +use std::panic::AssertUnwindSafe; +use std::{iter, num::ParseIntError, panic, str::FromStr}; use crate::plonk::Circuit; -use ff::{Field, FromUniformBytes}; +use halo2_middleware::ff::{Field, FromUniformBytes}; use serde::Deserialize; use serde_derive::Serialize; @@ -49,8 +50,21 @@ pub struct CostOptions { /// A shuffle over N columns with max input degree I and max shuffle degree T. May be repeated. pub shuffle: Vec, - /// 2^K bound on the number of rows. - pub k: usize, + /// 2^K bound on the number of rows, accounting for ZK, PIs and Lookup tables. + pub min_k: usize, + + /// Rows count, not including table rows and not accounting for compression + /// (where multiple regions can use the same rows). + pub rows_count: usize, + + /// Table rows count, not accounting for compression (where multiple regions + /// can use the same rows), but not much if any compression can happen with + /// table rows anyway. + pub table_rows_count: usize, + + /// Compressed rows count, accounting for compression (where multiple + /// regions can use the same rows). + pub compressed_rows_count: usize, } /// Structure holding polynomial related data for benchmarks @@ -76,7 +90,7 @@ impl FromStr for Poly { pub struct Lookup; impl Lookup { - fn queries(&self) -> impl Iterator { + pub fn queries(&self) -> impl Iterator { // - product commitments at x and \omega x // - input commitments at x and x_inv // - table commitments at x @@ -98,7 +112,7 @@ pub struct Permutation { } impl Permutation { - fn queries(&self) -> impl Iterator { + pub fn queries(&self) -> impl Iterator { // - product commitments at x and x_inv // - polynomial commitments at x let product = "0,-1".parse().unwrap(); @@ -108,6 +122,10 @@ impl Permutation { .chain(Some(product)) .chain(iter::repeat(poly).take(self.columns)) } + + pub fn nr_columns(&self) -> usize { + self.columns + } } /// Structure holding the [Shuffle] related data for circuit benchmarks. @@ -128,6 +146,10 @@ impl Shuffle { pub struct ModelCircuit { /// Power-of-2 bound on the number of rows in the circuit. pub k: usize, + /// Number of rows in the circuit (not including table rows). + pub rows: usize, + /// Number of table rows in the circuit. + pub table_rows: usize, /// Maximum degree of the circuit. pub max_deg: usize, /// Number of advice columns. @@ -199,7 +221,7 @@ impl CostOptions { // - inner product argument (k rounds * 2 * COMM bytes) // - a (SCALAR bytes) // - xi (SCALAR bytes) - comp_bytes(1 + 2 * self.k, 2) + comp_bytes(1 + 2 * self.min_k, 2) } CommitmentScheme::KZGGWC => { let mut nr_rotations = HashSet::new(); @@ -227,7 +249,9 @@ impl CostOptions { let size = plonk + vanishing + multiopen + polycomm; ModelCircuit { - k: self.k, + k: self.min_k, + rows: self.rows_count, + table_rows: self.table_rows_count, max_deg: self.max_degree, advice_columns: self.advice.len(), lookups: self.lookup.len(), @@ -247,7 +271,7 @@ pub fn from_circuit_to_model_circuit< const COMM: usize, const SCALAR: usize, >( - k: u32, + k: Option, circuit: &C, instances: Vec>, comm_scheme: CommitmentScheme, @@ -256,13 +280,35 @@ pub fn from_circuit_to_model_circuit< options.into_model_circuit::(comm_scheme) } -/// Given a Plonk circuit, this function returns [CostOptions] +fn run_mock_prover_with_fallback, C: Circuit>( + circuit: &C, + instances: Vec>, +) -> MockProver { + (5..25) + .find_map(|k| { + panic::catch_unwind(AssertUnwindSafe(|| { + MockProver::run(k, circuit, instances.clone()).unwrap() + })) + .ok() + }) + .expect("A circuit which can be implemented with at most 2^24 rows.") +} + +/// Given a circuit, this function returns [CostOptions]. If no upper bound for `k` is +/// provided, we iterate until a valid `k` is found (this might delay the computation). pub fn from_circuit_to_cost_model_options, C: Circuit>( - k: u32, + k_upper_bound: Option, circuit: &C, instances: Vec>, ) -> CostOptions { - let prover = MockProver::run(k, circuit, instances).unwrap(); + let instance_len = instances.iter().map(Vec::len).max().unwrap_or(0); + + let prover = if let Some(k) = k_upper_bound { + MockProver::run(k, circuit, instances).unwrap() + } else { + run_mock_prover_with_fallback(circuit, instances.clone()) + }; + let cs = prover.cs; let fixed = { @@ -307,7 +353,50 @@ pub fn from_circuit_to_cost_model_options, .max() .unwrap_or(0); - let k = prover.k.try_into().unwrap(); + // Note that this computation does't assume that `regions` is already in + // order of increasing row indices. + let (rows_count, table_rows_count, compressed_rows_count) = { + let mut rows_count = 0; + let mut table_rows_count = 0; + let mut compressed_rows_count = 0; + for region in prover.regions { + // If `region.rows == None`, then that region has no rows. + if let Some((start, end)) = region.rows { + // Note that `end` is the index of the last column, so when + // counting rows this last column needs to be counted via `end + + // 1`. + + // A region is a _table region_ if all of its columns are `Fixed` + // columns (see that [`plonk::circuit::TableColumn` is a wrapper + // around `Column`]). All of a table region's rows are + // counted towards `table_rows_count.` + if region + .columns + .iter() + .all(|c| *c.column_type() == halo2_middleware::circuit::Any::Fixed) + { + table_rows_count += (end + 1) - start; + } else { + rows_count += (end + 1) - start; + } + compressed_rows_count = std::cmp::max(compressed_rows_count, end + 1); + } + } + (rows_count, table_rows_count, compressed_rows_count) + }; + + let min_k = [ + rows_count + cs.blinding_factors(), + table_rows_count + cs.blinding_factors(), + instance_len, + ] + .into_iter() + .max() + .unwrap(); + + if min_k == instance_len { + println!("WARNING: The dominant factor in your circuit's size is the number of public inputs, which causes the verifier to perform linear work."); + } CostOptions { advice, @@ -318,6 +407,9 @@ pub fn from_circuit_to_cost_model_options, lookup, permutation, shuffle, - k, + min_k, + rows_count, + table_rows_count, + compressed_rows_count, } } diff --git a/halo2_proofs/src/dev/failure.rs b/halo2_frontend/src/dev/failure.rs similarity index 96% rename from halo2_proofs/src/dev/failure.rs rename to halo2_frontend/src/dev/failure.rs index f9f5c27ded..a11d382afa 100644 --- a/halo2_proofs/src/dev/failure.rs +++ b/halo2_frontend/src/dev/failure.rs @@ -11,10 +11,12 @@ use super::{ Region, }; use crate::dev::metadata::Constraint; -use crate::{ - dev::{Instance, Value}, - plonk::{Any, Column, ConstraintSystem, Expression, Gate}, +use crate::dev::{Instance, Value}; +use crate::plonk::{ + circuit::expression::{Column, Expression}, + ConstraintSystem, Gate, }; +use halo2_middleware::circuit::{Any, ColumnMid}; mod emitter; @@ -49,7 +51,10 @@ impl fmt::Display for FailureLocation { impl FailureLocation { /// Returns a `DebugColumn` from Column metadata and `&self`. - pub(super) fn get_debug_column(&self, metadata: metadata::Column) -> DebugColumn { + pub(super) fn get_debug_column( + &self, + metadata: halo2_middleware::circuit::ColumnMid, + ) -> DebugColumn { match self { Self::InRegion { region, .. } => { DebugColumn::from((metadata, region.column_annotations.as_ref())) @@ -121,7 +126,7 @@ impl FailureLocation { } /// The reasons why a particular circuit is not satisfied. -#[derive(PartialEq, Eq)] +#[derive(PartialEq, Eq, Clone)] pub enum VerifyFailure { /// A cell used in an active gate was not assigned to. CellNotAssigned { @@ -530,7 +535,7 @@ fn render_lookup( prover .cs .general_column_annotations - .get(&metadata::Column::from((Any::Fixed, query.column_index))) + .get(&ColumnMid::new(Any::Fixed, query.column_index)) .cloned() .unwrap_or_else(|| format!("F{}", query.column_index())) ) @@ -541,7 +546,7 @@ fn render_lookup( prover .cs .general_column_annotations - .get(&metadata::Column::from((Any::advice(), query.column_index))) + .get(&ColumnMid::new(Any::Advice, query.column_index)) .cloned() .unwrap_or_else(|| format!("A{}", query.column_index())) ) @@ -552,7 +557,7 @@ fn render_lookup( prover .cs .general_column_annotations - .get(&metadata::Column::from((Any::Instance, query.column_index))) + .get(&ColumnMid::new(Any::Instance, query.column_index)) .cloned() .unwrap_or_else(|| format!("I{}", query.column_index())) ) @@ -576,7 +581,7 @@ fn render_lookup( .. } = query.into(); Some(( - ((column_type, column_index).into(), rotation.0).into(), + (ColumnMid::new(column_type, column_index), rotation.0).into(), match load(query) { Value::Real(v) => util::format_value(v), Value::Poison => unreachable!(), @@ -696,7 +701,7 @@ fn render_shuffle( prover .cs .general_column_annotations - .get(&metadata::Column::from((Any::Fixed, query.column_index))) + .get(&ColumnMid::new(Any::Fixed, query.column_index)) .cloned() .unwrap_or_else(|| format!("F{}", query.column_index())) ) @@ -707,7 +712,7 @@ fn render_shuffle( prover .cs .general_column_annotations - .get(&metadata::Column::from((Any::advice(), query.column_index))) + .get(&ColumnMid::new(Any::Advice, query.column_index)) .cloned() .unwrap_or_else(|| format!("A{}", query.column_index())) ) @@ -718,7 +723,7 @@ fn render_shuffle( prover .cs .general_column_annotations - .get(&metadata::Column::from((Any::Instance, query.column_index))) + .get(&ColumnMid::new(Any::Instance, query.column_index)) .cloned() .unwrap_or_else(|| format!("I{}", query.column_index())) ) @@ -742,7 +747,7 @@ fn render_shuffle( .. } = query.into(); Some(( - ((column_type, column_index).into(), rotation.0).into(), + (ColumnMid::new(column_type, column_index), rotation.0).into(), match load(query) { Value::Real(v) => util::format_value(v), Value::Poison => unreachable!(), @@ -834,7 +839,7 @@ fn render_shuffle( impl VerifyFailure { /// Emits this failure in pretty-printed format to stderr. - pub(super) fn emit(&self, prover: &MockProver) { + pub fn emit(&self, prover: &MockProver) { match self { Self::CellNotAssigned { gate, diff --git a/halo2_proofs/src/dev/failure/emitter.rs b/halo2_frontend/src/dev/failure/emitter.rs similarity index 91% rename from halo2_proofs/src/dev/failure/emitter.rs rename to halo2_frontend/src/dev/failure/emitter.rs index 24109d599b..fe63c475d1 100644 --- a/halo2_proofs/src/dev/failure/emitter.rs +++ b/halo2_frontend/src/dev/failure/emitter.rs @@ -4,10 +4,9 @@ use std::iter; use group::ff::Field; use super::FailureLocation; -use crate::{ - dev::{metadata, util}, - plonk::{Advice, Any, Expression}, -}; +use crate::dev::{metadata, util}; +use crate::plonk::circuit::expression::Expression; +use halo2_middleware::circuit::{Any, ColumnMid}; fn padded(p: char, width: usize, text: &str) -> String { let pad = width - text.len(); @@ -24,7 +23,7 @@ fn column_type_and_idx(column: &metadata::Column) -> String { format!( "{}{}", match column.column_type { - Any::Advice(_) => "A", + Any::Advice => "A", Any::Fixed => "F", Any::Instance => "I", }, @@ -148,7 +147,7 @@ pub(super) fn expression_to_string( &|query| { if let Some(label) = layout .get(&query.rotation.0) - .and_then(|row| row.get(&(Any::Fixed, query.column_index).into())) + .and_then(|row| row.get(&ColumnMid::new(Any::Fixed, query.column_index))) { label.clone() } else if query.rotation.0 == 0 { @@ -162,15 +161,7 @@ pub(super) fn expression_to_string( &|query| { layout .get(&query.rotation.0) - .and_then(|map| { - map.get( - &( - Any::Advice(Advice { phase: query.phase }), - query.column_index, - ) - .into(), - ) - }) + .and_then(|map| map.get(&ColumnMid::new(Any::Advice, query.column_index))) .cloned() .unwrap_or_default() }, @@ -178,7 +169,7 @@ pub(super) fn expression_to_string( layout .get(&query.rotation.0) .unwrap() - .get(&(Any::Instance, query.column_index).into()) + .get(&ColumnMid::new(Any::Instance, query.column_index)) .unwrap() .clone() }, diff --git a/halo2_proofs/src/dev/gates.rs b/halo2_frontend/src/dev/gates.rs similarity index 93% rename from halo2_proofs/src/dev/gates.rs rename to halo2_frontend/src/dev/gates.rs index 4421c0967f..5c48bbed4c 100644 --- a/halo2_proofs/src/dev/gates.rs +++ b/halo2_frontend/src/dev/gates.rs @@ -3,12 +3,10 @@ use std::{ fmt::{self, Write}, }; -use ff::PrimeField; +use halo2_middleware::ff::PrimeField; -use crate::{ - dev::util, - plonk::{sealed::SealedPhase, Circuit, ConstraintSystem, FirstPhase}, -}; +use crate::dev::util; +use crate::plonk::{sealed::SealedPhase, Circuit, ConstraintSystem, FirstPhase}; #[derive(Debug)] struct Constraint { @@ -28,12 +26,12 @@ struct Gate { /// # Examples /// /// ``` -/// use ff::Field; -/// use halo2_proofs::{ +/// use halo2_middleware::ff::Field; +/// use halo2_middleware::poly::Rotation; +/// use halo2_frontend::{ /// circuit::{Layouter, SimpleFloorPlanner}, /// dev::CircuitGates, /// plonk::{Circuit, ConstraintSystem, Error}, -/// poly::Rotation, /// }; /// use halo2curves::pasta::pallas; /// @@ -131,14 +129,13 @@ impl CircuitGates { &|selector| format!("S{}", selector.0), &|query| format!("F{}@{}", query.column_index, query.rotation.0), &|query| { - if query.phase == FirstPhase.to_sealed() { + let phase = cs.advice_column_phase[query.column_index]; + if phase == FirstPhase.to_sealed() { format!("A{}@{}", query.column_index, query.rotation.0) } else { format!( "A{}({})@{}", - query.column_index, - query.phase(), - query.rotation.0 + query.column_index, phase.0, query.rotation.0 ) } }, @@ -181,14 +178,13 @@ impl CircuitGates { .collect() }, &|query| { - let query = if query.phase == FirstPhase.to_sealed() { + let phase = cs.advice_column_phase[query.column_index]; + let query = if phase == FirstPhase.to_sealed() { format!("A{}@{}", query.column_index, query.rotation.0) } else { format!( "A{}({})@{}", - query.column_index, - query.phase(), - query.rotation.0 + query.column_index, phase.0, query.rotation.0 ) }; vec![query].into_iter().collect() diff --git a/halo2_proofs/src/dev/graph.rs b/halo2_frontend/src/dev/graph.rs similarity index 95% rename from halo2_proofs/src/dev/graph.rs rename to halo2_frontend/src/dev/graph.rs index 11654fe415..13baca9bbd 100644 --- a/halo2_proofs/src/dev/graph.rs +++ b/halo2_frontend/src/dev/graph.rs @@ -1,13 +1,12 @@ -use ff::Field; +use crate::plonk::{ + Advice, Assigned, Assignment, Challenge, Circuit, Column, ConstraintSystem, Error, Fixed, + FloorPlanner, Instance, Selector, +}; +use halo2_middleware::circuit::Any; +use halo2_middleware::ff::Field; use tabbycat::{AttrList, Edge, GraphBuilder, GraphType, Identity, StmtList}; -use crate::{ - circuit::Value, - plonk::{ - Advice, Any, Assigned, Assignment, Challenge, Circuit, Column, ConstraintSystem, Error, - Fixed, FloorPlanner, Instance, Selector, - }, -}; +use crate::circuit::Value; pub mod layout; diff --git a/halo2_proofs/src/dev/graph/layout.rs b/halo2_frontend/src/dev/graph/layout.rs similarity index 97% rename from halo2_proofs/src/dev/graph/layout.rs rename to halo2_frontend/src/dev/graph/layout.rs index 94bd7eea14..cb04e4d9e6 100644 --- a/halo2_proofs/src/dev/graph/layout.rs +++ b/halo2_frontend/src/dev/graph/layout.rs @@ -1,4 +1,4 @@ -use ff::Field; +use halo2_middleware::ff::Field; use plotters::{ coord::Shift, prelude::{DrawingArea, DrawingAreaErrorKind, DrawingBackend}, @@ -6,11 +6,9 @@ use plotters::{ use std::collections::HashSet; use std::ops::Range; -use crate::{ - circuit::layouter::RegionColumn, - dev::cost::Layout, - plonk::{Any, Circuit, Column, ConstraintSystem, FloorPlanner}, -}; +use crate::plonk::{Circuit, Column, ConstraintSystem, FloorPlanner}; +use crate::{circuit::layouter::RegionColumn, dev::cost::Layout}; +use halo2_middleware::circuit::Any; /// Graphical renderer for circuit layouts. /// @@ -120,7 +118,7 @@ impl CircuitLayout { column.index() + match column.column_type() { Any::Instance => 0, - Any::Advice(_) => cs.num_instance_columns, + Any::Advice => cs.num_instance_columns, Any::Fixed => cs.num_instance_columns + cs.num_advice_columns, } }; diff --git a/halo2_proofs/src/dev/metadata.rs b/halo2_frontend/src/dev/metadata.rs similarity index 88% rename from halo2_proofs/src/dev/metadata.rs rename to halo2_frontend/src/dev/metadata.rs index f81bfa67a7..ca9152534a 100644 --- a/halo2_proofs/src/dev/metadata.rs +++ b/halo2_frontend/src/dev/metadata.rs @@ -1,51 +1,13 @@ //! Metadata about circuits. use super::metadata::Column as ColumnMetadata; -use crate::plonk::{self, Any}; +use crate::plonk; +use halo2_middleware::circuit::Any; +pub use halo2_middleware::circuit::ColumnMid as Column; use std::{ collections::HashMap, fmt::{self, Debug}, }; -/// Metadata about a column within a circuit. -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Column { - /// The type of the column. - pub(super) column_type: Any, - /// The index of the column. - pub(super) index: usize, -} - -impl Column { - /// Return the column type. - pub fn column_type(&self) -> Any { - self.column_type - } - /// Return the column index. - pub fn index(&self) -> usize { - self.index - } -} - -impl fmt::Display for Column { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Column('{:?}', {})", self.column_type, self.index) - } -} - -impl From<(Any, usize)> for Column { - fn from((column_type, index): (Any, usize)) -> Self { - Column { column_type, index } - } -} - -impl From> for Column { - fn from(column: plonk::Column) -> Self { - Column { - column_type: *column.column_type(), - index: column.index(), - } - } -} /// A helper structure that allows to print a Column with it's annotation as a single structure. #[derive(Debug, Clone)] @@ -84,7 +46,7 @@ impl fmt::Display for DebugColumn { /// A "virtual cell" is a PLONK cell that has been queried at a particular relative offset /// within a custom gate. -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] pub struct VirtualCell { name: String, pub(super) column: Column, diff --git a/halo2_proofs/src/dev/tfp.rs b/halo2_frontend/src/dev/tfp.rs similarity index 95% rename from halo2_proofs/src/dev/tfp.rs rename to halo2_frontend/src/dev/tfp.rs index 011ba3cac0..0d02d1d2bd 100644 --- a/halo2_proofs/src/dev/tfp.rs +++ b/halo2_frontend/src/dev/tfp.rs @@ -1,17 +1,17 @@ use std::{fmt, marker::PhantomData}; -use ff::Field; +use halo2_middleware::circuit::Any; +use halo2_middleware::ff::Field; use tracing::{debug, debug_span, span::EnteredSpan}; -use crate::{ - circuit::{ - layouter::{RegionLayouter, SyncDeps}, - AssignedCell, Cell, Layouter, Region, Table, Value, - }, - plonk::{ - Advice, Any, Assigned, Assignment, Challenge, Circuit, Column, ConstraintSystem, Error, - Fixed, FloorPlanner, Instance, Selector, - }, +use crate::circuit::{ + layouter::{RegionLayouter, SyncDeps}, + AssignedCell, Cell, Layouter, Region, Table, Value, +}; +use crate::plonk::{ + circuit::expression::{Challenge, Column}, + Advice, Assigned, Assignment, Circuit, ConstraintSystem, Error, Fixed, FloorPlanner, Instance, + Selector, }; /// A helper type that augments a [`FloorPlanner`] with [`tracing`] spans and events. @@ -29,8 +29,8 @@ use crate::{ /// # Examples /// /// ``` -/// use ff::Field; -/// use halo2_proofs::{ +/// use halo2_middleware::ff::Field; +/// use halo2_frontend::{ /// circuit::{floor_planner, Layouter, Value}, /// dev::TracingFloorPlanner, /// plonk::{Circuit, ConstraintSystem, Error}, @@ -223,10 +223,15 @@ impl> Layouter for TracingLayouter { } } -fn debug_value_and_return_cell(value: AssignedCell) -> Cell { - if let Some(v) = value.value().into_option() { +fn debug_value(value: &AssignedCell) { + value.value().assert_if_known(|v| { debug!(target: "assigned", value = ?v); - } + true + }); +} + +fn debug_value_and_return_cell(value: AssignedCell) -> Cell { + debug_value(&value); value.cell() } @@ -309,9 +314,7 @@ impl<'r, F: Field> RegionLayouter for TracingRegion<'r, F> { self.0 .assign_advice_from_instance(annotation, instance, row, advice, offset) .map(|value| { - if let Some(v) = value.value().into_option() { - debug!(target: "assigned", value = ?v); - } + debug_value(&value); (value.cell(), value.value().cloned()) }) } diff --git a/halo2_proofs/src/dev/util.rs b/halo2_frontend/src/dev/util.rs similarity index 93% rename from halo2_proofs/src/dev/util.rs rename to halo2_frontend/src/dev/util.rs index a663f9b80b..5445ffb627 100644 --- a/halo2_proofs/src/dev/util.rs +++ b/halo2_frontend/src/dev/util.rs @@ -2,13 +2,11 @@ use group::ff::Field; use std::collections::BTreeMap; use super::{metadata, CellValue, InstanceValue, Value}; -use crate::{ - plonk::{ - Advice, AdviceQuery, Any, Column, ColumnType, Expression, FixedQuery, Gate, InstanceQuery, - VirtualCell, - }, - poly::Rotation, +use crate::plonk::{ + AdviceQuery, Column, ColumnType, Expression, FixedQuery, Gate, InstanceQuery, VirtualCell, }; +use halo2_middleware::circuit::Any; +use halo2_middleware::poly::Rotation; pub(crate) struct AnyQuery { /// Query index @@ -36,7 +34,7 @@ impl From for AnyQuery { fn from(query: AdviceQuery) -> Self { Self { index: query.index, - column_type: Any::Advice(Advice { phase: query.phase }), + column_type: Any::Advice, column_index: query.column_index, rotation: query.rotation, } @@ -65,7 +63,7 @@ pub(super) fn format_value(v: F) -> String { // Format value as hex. let s = format!("{v:?}"); // Remove leading zeroes. - let s = s.strip_prefix("0x").unwrap(); + let s = s.split_once("0x").unwrap().1.split(")").next().unwrap(); let s = s.trim_start_matches('0'); format!("0x{s}") } diff --git a/halo2_frontend/src/lib.rs b/halo2_frontend/src/lib.rs new file mode 100644 index 0000000000..fb45137388 --- /dev/null +++ b/halo2_frontend/src/lib.rs @@ -0,0 +1,5 @@ +#![cfg_attr(docsrs, feature(doc_cfg))] + +pub mod circuit; +pub mod dev; +pub mod plonk; diff --git a/halo2_frontend/src/plonk.rs b/halo2_frontend/src/plonk.rs new file mode 100644 index 0000000000..36701a97be --- /dev/null +++ b/halo2_frontend/src/plonk.rs @@ -0,0 +1,11 @@ +pub mod assigned; +pub mod circuit; +pub mod error; +pub mod keygen; +pub mod lookup; +pub mod permutation; +pub mod shuffle; + +pub use assigned::*; +pub use circuit::*; +pub use error::*; diff --git a/halo2_proofs/src/plonk/assigned.rs b/halo2_frontend/src/plonk/assigned.rs similarity index 99% rename from halo2_proofs/src/plonk/assigned.rs rename to halo2_frontend/src/plonk/assigned.rs index 07de325678..44ec9887bd 100644 --- a/halo2_proofs/src/plonk/assigned.rs +++ b/halo2_frontend/src/plonk/assigned.rs @@ -1,7 +1,6 @@ +use halo2_middleware::ff::Field; use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; -use group::ff::Field; - /// A value assigned to a cell within a circuit. /// /// Stored as a fraction, so the backend can use batch inversion. @@ -365,83 +364,6 @@ impl Assigned { } } -#[cfg(test)] -mod tests { - use halo2curves::pasta::Fp; - - use super::Assigned; - // We use (numerator, denominator) in the comments below to denote a rational. - #[test] - fn add_trivial_to_inv0_rational() { - // a = 2 - // b = (1,0) - let a = Assigned::Trivial(Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // 2 + (1,0) = 2 + 0 = 2 - // This fails if addition is implemented using normal rules for rationals. - assert_eq!((a + b).evaluate(), a.evaluate()); - assert_eq!((b + a).evaluate(), a.evaluate()); - } - - #[test] - fn add_rational_to_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,2) + (1,0) = (1,2) + 0 = (1,2) - // This fails if addition is implemented using normal rules for rationals. - assert_eq!((a + b).evaluate(), a.evaluate()); - assert_eq!((b + a).evaluate(), a.evaluate()); - } - - #[test] - fn sub_trivial_from_inv0_rational() { - // a = 2 - // b = (1,0) - let a = Assigned::Trivial(Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,0) - 2 = 0 - 2 = -2 - // This fails if subtraction is implemented using normal rules for rationals. - assert_eq!((b - a).evaluate(), (-a).evaluate()); - - // 2 - (1,0) = 2 - 0 = 2 - assert_eq!((a - b).evaluate(), a.evaluate()); - } - - #[test] - fn sub_rational_from_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,0) - (1,2) = 0 - (1,2) = -(1,2) - // This fails if subtraction is implemented using normal rules for rationals. - assert_eq!((b - a).evaluate(), (-a).evaluate()); - - // (1,2) - (1,0) = (1,2) - 0 = (1,2) - assert_eq!((a - b).evaluate(), a.evaluate()); - } - - #[test] - fn mul_rational_by_inv0_rational() { - // a = (1,2) - // b = (1,0) - let a = Assigned::Rational(Fp::one(), Fp::from(2)); - let b = Assigned::Rational(Fp::one(), Fp::zero()); - - // (1,2) * (1,0) = (1,2) * 0 = 0 - assert_eq!((a * b).evaluate(), Fp::zero()); - - // (1,0) * (1,2) = 0 * (1,2) = 0 - assert_eq!((b * a).evaluate(), Fp::zero()); - } -} - #[cfg(test)] mod proptests { use std::{ @@ -663,3 +585,80 @@ mod proptests { } } } + +#[cfg(test)] +mod tests { + use halo2curves::pasta::Fp; + + use super::Assigned; + // We use (numerator, denominator) in the comments below to denote a rational. + #[test] + fn add_trivial_to_inv0_rational() { + // a = 2 + // b = (1,0) + let a = Assigned::Trivial(Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // 2 + (1,0) = 2 + 0 = 2 + // This fails if addition is implemented using normal rules for rationals. + assert_eq!((a + b).evaluate(), a.evaluate()); + assert_eq!((b + a).evaluate(), a.evaluate()); + } + + #[test] + fn add_rational_to_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,2) + (1,0) = (1,2) + 0 = (1,2) + // This fails if addition is implemented using normal rules for rationals. + assert_eq!((a + b).evaluate(), a.evaluate()); + assert_eq!((b + a).evaluate(), a.evaluate()); + } + + #[test] + fn sub_trivial_from_inv0_rational() { + // a = 2 + // b = (1,0) + let a = Assigned::Trivial(Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,0) - 2 = 0 - 2 = -2 + // This fails if subtraction is implemented using normal rules for rationals. + assert_eq!((b - a).evaluate(), (-a).evaluate()); + + // 2 - (1,0) = 2 - 0 = 2 + assert_eq!((a - b).evaluate(), a.evaluate()); + } + + #[test] + fn sub_rational_from_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,0) - (1,2) = 0 - (1,2) = -(1,2) + // This fails if subtraction is implemented using normal rules for rationals. + assert_eq!((b - a).evaluate(), (-a).evaluate()); + + // (1,2) - (1,0) = (1,2) - 0 = (1,2) + assert_eq!((a - b).evaluate(), a.evaluate()); + } + + #[test] + fn mul_rational_by_inv0_rational() { + // a = (1,2) + // b = (1,0) + let a = Assigned::Rational(Fp::one(), Fp::from(2)); + let b = Assigned::Rational(Fp::one(), Fp::zero()); + + // (1,2) * (1,0) = (1,2) * 0 = 0 + assert_eq!((a * b).evaluate(), Fp::zero()); + + // (1,0) * (1,2) = 0 * (1,2) = 0 + assert_eq!((b * a).evaluate(), Fp::zero()); + } +} diff --git a/halo2_frontend/src/plonk/circuit.rs b/halo2_frontend/src/plonk/circuit.rs new file mode 100644 index 0000000000..e81f455bc5 --- /dev/null +++ b/halo2_frontend/src/plonk/circuit.rs @@ -0,0 +1,284 @@ +use crate::circuit::{layouter::SyncDeps, Layouter, Value}; +use crate::plonk::{Assigned, Error}; +use halo2_middleware::circuit::Any; +use halo2_middleware::ff::Field; +use halo2_middleware::poly::Rotation; + +pub mod compress_selectors; +pub mod constraint_system; +pub mod expression; + +pub use constraint_system::*; +pub use expression::*; + +/// A column type +pub trait ColumnType: + 'static + Sized + Copy + std::fmt::Debug + PartialEq + Eq + Into +{ + /// Return expression from cell + fn query_cell(&self, index: usize, at: Rotation) -> Expression; +} + +/// An advice column +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Advice; + +/// A fixed column +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Fixed; + +/// An instance column +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Instance; + +impl ColumnType for Advice { + fn query_cell(&self, index: usize, at: Rotation) -> Expression { + Expression::Advice(AdviceQuery { + index: None, + column_index: index, + rotation: at, + }) + } +} +impl ColumnType for Fixed { + fn query_cell(&self, index: usize, at: Rotation) -> Expression { + Expression::Fixed(FixedQuery { + index: None, + column_index: index, + rotation: at, + }) + } +} +impl ColumnType for Instance { + fn query_cell(&self, index: usize, at: Rotation) -> Expression { + Expression::Instance(InstanceQuery { + index: None, + column_index: index, + rotation: at, + }) + } +} +impl ColumnType for Any { + fn query_cell(&self, index: usize, at: Rotation) -> Expression { + match self { + Any::Advice => Expression::Advice(AdviceQuery { + index: None, + column_index: index, + rotation: at, + }), + Any::Fixed => Expression::Fixed(FixedQuery { + index: None, + column_index: index, + rotation: at, + }), + Any::Instance => Expression::Instance(InstanceQuery { + index: None, + column_index: index, + rotation: at, + }), + } + } +} + +impl From for Any { + fn from(_: Advice) -> Any { + Any::Advice + } +} + +impl From for Any { + fn from(_: Fixed) -> Any { + Any::Fixed + } +} + +impl From for Any { + fn from(_: Instance) -> Any { + Any::Instance + } +} + +/// This trait allows a [`Circuit`] to direct some backend to assign a witness +/// for a constraint system. +pub trait Assignment { + /// Creates a new region and enters into it. + /// + /// Panics if we are currently in a region (if `exit_region` was not called). + /// + /// Not intended for downstream consumption; use [`Layouter::assign_region`] instead. + /// + /// [`Layouter::assign_region`]: crate::circuit::Layouter#method.assign_region + fn enter_region(&mut self, name_fn: N) + where + NR: Into, + N: FnOnce() -> NR; + + /// Allows the developer to include an annotation for an specific column within a `Region`. + /// + /// This is usually useful for debugging circuit failures. + fn annotate_column(&mut self, annotation: A, column: Column) + where + A: FnOnce() -> AR, + AR: Into; + + /// Exits the current region. + /// + /// Panics if we are not currently in a region (if `enter_region` was not called). + /// + /// Not intended for downstream consumption; use [`Layouter::assign_region`] instead. + /// + /// [`Layouter::assign_region`]: crate::circuit::Layouter#method.assign_region + fn exit_region(&mut self); + + /// Enables a selector at the given row. + fn enable_selector( + &mut self, + annotation: A, + selector: &Selector, + row: usize, + ) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into; + + /// Queries the cell of an instance column at a particular absolute row. + /// + /// Returns the cell's value, if known. + fn query_instance(&self, column: Column, row: usize) -> Result, Error>; + + /// Assign an advice column value (witness) + fn assign_advice( + &mut self, + annotation: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into; + + /// Assign a fixed value + fn assign_fixed( + &mut self, + annotation: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into; + + /// Assign two cells to have the same value + fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), Error>; + + /// Fills a fixed `column` starting from the given `row` with value `to`. + fn fill_from_row( + &mut self, + column: Column, + row: usize, + to: Value>, + ) -> Result<(), Error>; + + /// Queries the value of the given challenge. + /// + /// Returns `Value::unknown()` if the current synthesis phase is before the challenge can be queried. + fn get_challenge(&self, challenge: Challenge) -> Value; + + /// Creates a new (sub)namespace and enters into it. + /// + /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. + /// + /// [`Layouter::namespace`]: crate::circuit::Layouter#method.namespace + fn push_namespace(&mut self, name_fn: N) + where + NR: Into, + N: FnOnce() -> NR; + + /// Exits out of the existing namespace. + /// + /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. + /// + /// [`Layouter::namespace`]: crate::circuit::Layouter#method.namespace + fn pop_namespace(&mut self, gadget_name: Option); +} + +/// A floor planning strategy for a circuit. +/// +/// The floor planner is chip-agnostic and applies its strategy to the circuit it is used +/// within. +pub trait FloorPlanner { + /// Given the provided `cs`, synthesize the given circuit. + /// + /// `constants` is the list of fixed columns that the layouter may use to assign + /// global constant values. These columns will all have been equality-enabled. + /// + /// Internally, a floor planner will perform the following operations: + /// - Instantiate a [`Layouter`] for this floor planner. + /// - Perform any necessary setup or measurement tasks, which may involve one or more + /// calls to `Circuit::default().synthesize(config, &mut layouter)`. + /// - Call `circuit.synthesize(config, &mut layouter)` exactly once. + fn synthesize + SyncDeps, C: Circuit>( + cs: &mut CS, + circuit: &C, + config: C::Config, + constants: Vec>, + ) -> Result<(), Error>; +} + +/// This is a trait that circuits provide implementations for so that the +/// backend prover can ask the circuit to synthesize using some given +/// [`ConstraintSystem`] implementation. +pub trait Circuit { + /// This is a configuration object that stores things like columns. + type Config: Clone; + /// The floor planner used for this circuit. This is an associated type of the + /// `Circuit` trait because its behaviour is circuit-critical. + type FloorPlanner: FloorPlanner; + /// Optional circuit configuration parameters. Requires the `circuit-params` feature. + #[cfg(feature = "circuit-params")] + type Params: Default; + + /// Returns a copy of this circuit with no witness values (i.e. all witnesses set to + /// `None`). For most circuits, this will be equal to `Self::default()`. + fn without_witnesses(&self) -> Self; + + /// Returns a reference to the parameters that should be used to configure the circuit. + /// Requires the `circuit-params` feature. + #[cfg(feature = "circuit-params")] + fn params(&self) -> Self::Params { + Self::Params::default() + } + + /// The circuit is given an opportunity to describe the exact gate + /// arrangement, column arrangement, etc. Takes a runtime parameter. The default + /// implementation calls `configure` ignoring the `_params` argument in order to easily support + /// circuits that don't use configuration parameters. + #[cfg(feature = "circuit-params")] + fn configure_with_params( + meta: &mut ConstraintSystem, + _params: Self::Params, + ) -> Self::Config { + Self::configure(meta) + } + + /// The circuit is given an opportunity to describe the exact gate + /// arrangement, column arrangement, etc. + fn configure(meta: &mut ConstraintSystem) -> Self::Config; + + /// Given the provided `cs`, synthesize the circuit. The concrete type of + /// the caller will be different depending on the context, and they may or + /// may not expect to have a witness present. + fn synthesize(&self, config: Self::Config, layouter: impl Layouter) -> Result<(), Error>; +} diff --git a/halo2_proofs/src/plonk/circuit/compress_selectors.rs b/halo2_frontend/src/plonk/circuit/compress_selectors.rs similarity index 98% rename from halo2_proofs/src/plonk/circuit/compress_selectors.rs rename to halo2_frontend/src/plonk/circuit/compress_selectors.rs index 053ebe3178..46af4a0e35 100644 --- a/halo2_proofs/src/plonk/circuit/compress_selectors.rs +++ b/halo2_frontend/src/plonk/circuit/compress_selectors.rs @@ -1,9 +1,9 @@ use super::Expression; -use ff::Field; +use halo2_middleware::ff::Field; /// This describes a selector and where it is activated. #[derive(Debug, Clone)] -pub struct SelectorDescription { +pub(crate) struct SelectorDescription { /// The selector that this description references, by index. pub selector: usize, @@ -20,7 +20,7 @@ pub struct SelectorDescription { /// This describes the assigned combination of a particular selector as well as /// the expression it should be substituted with. #[derive(Debug, Clone)] -pub struct SelectorAssignment { +pub(crate) struct SelectorAssignment { /// The selector that this structure references, by index. pub selector: usize, @@ -48,7 +48,7 @@ pub struct SelectorAssignment { /// substitutions to the constraint system. /// /// This function is completely deterministic. -pub fn process( +pub(crate) fn process( mut selectors: Vec, max_degree: usize, mut allocate_fixed_column: E, @@ -229,7 +229,8 @@ where #[cfg(test)] mod tests { use super::*; - use crate::{plonk::FixedQuery, poly::Rotation}; + use crate::plonk::FixedQuery; + use halo2_middleware::poly::Rotation; use halo2curves::pasta::Fp; use proptest::collection::{vec, SizeRange}; use proptest::prelude::*; diff --git a/halo2_frontend/src/plonk/circuit/constraint_system.rs b/halo2_frontend/src/plonk/circuit/constraint_system.rs new file mode 100644 index 0000000000..bd95af59e1 --- /dev/null +++ b/halo2_frontend/src/plonk/circuit/constraint_system.rs @@ -0,0 +1,1211 @@ +use super::compress_selectors; +use super::expression::sealed; +use crate::plonk::{ + lookup, permutation, shuffle, Advice, AdviceQuery, Challenge, Column, Expression, FirstPhase, + Fixed, FixedQuery, Instance, InstanceQuery, Phase, Selector, TableColumn, +}; +use core::cmp::max; +use halo2_middleware::circuit::{Any, ColumnMid, ConstraintSystemMid, GateMid}; +use halo2_middleware::ff::Field; +use halo2_middleware::poly::Rotation; +use std::collections::HashMap; +use std::convert::TryFrom; +use std::fmt::Debug; + +/// A "virtual cell" is a PLONK cell that has been queried at a particular relative offset +/// within a custom gate. +#[derive(Clone, Debug)] +pub struct VirtualCell { + pub column: Column, + pub rotation: Rotation, +} + +impl>> From<(Col, Rotation)> for VirtualCell { + fn from((column, rotation): (Col, Rotation)) -> Self { + VirtualCell { + column: column.into(), + rotation, + } + } +} + +/// An individual polynomial constraint. +/// +/// These are returned by the closures passed to `ConstraintSystem::create_gate`. +#[derive(Debug)] +pub struct Constraint { + name: String, + poly: Expression, +} + +impl From> for Constraint { + fn from(poly: Expression) -> Self { + Constraint { + name: "".to_string(), + poly, + } + } +} + +impl> From<(S, Expression)> for Constraint { + fn from((name, poly): (S, Expression)) -> Self { + Constraint { + name: name.as_ref().to_string(), + poly, + } + } +} + +impl From> for Vec> { + fn from(poly: Expression) -> Self { + vec![Constraint { + name: "".to_string(), + poly, + }] + } +} + +/// A set of polynomial constraints with a common selector. +/// +/// ``` +/// use halo2_middleware::poly::Rotation; +/// use halo2curves::pasta::Fp; +/// # use halo2_frontend::plonk::{Constraints, Expression, ConstraintSystem}; +/// +/// # let mut meta = ConstraintSystem::::default(); +/// let a = meta.advice_column(); +/// let b = meta.advice_column(); +/// let c = meta.advice_column(); +/// let s = meta.selector(); +/// +/// meta.create_gate("foo", |meta| { +/// let next = meta.query_advice(a, Rotation::next()); +/// let a = meta.query_advice(a, Rotation::cur()); +/// let b = meta.query_advice(b, Rotation::cur()); +/// let c = meta.query_advice(c, Rotation::cur()); +/// let s_ternary = meta.query_selector(s); +/// +/// let one_minus_a = Expression::Constant(Fp::one()) - a.clone(); +/// +/// Constraints::with_selector( +/// s_ternary, +/// std::array::IntoIter::new([ +/// ("a is boolean", a.clone() * one_minus_a.clone()), +/// ("next == a ? b : c", next - (a * b + one_minus_a * c)), +/// ]), +/// ) +/// }); +/// ``` +/// +/// Note that the use of `std::array::IntoIter::new` is only necessary if you need to +/// support Rust 1.51 or 1.52. If your minimum supported Rust version is 1.53 or greater, +/// you can pass an array directly. +#[derive(Debug)] +pub struct Constraints>, Iter: IntoIterator> { + selector: Expression, + constraints: Iter, +} + +impl>, Iter: IntoIterator> Constraints { + /// Constructs a set of constraints that are controlled by the given selector. + /// + /// Each constraint `c` in `iterator` will be converted into the constraint + /// `selector * c`. + pub fn with_selector(selector: Expression, constraints: Iter) -> Self { + Constraints { + selector, + constraints, + } + } +} + +fn apply_selector_to_constraint>>( + (selector, c): (Expression, C), +) -> Constraint { + let constraint: Constraint = c.into(); + Constraint { + name: constraint.name, + poly: selector * constraint.poly, + } +} + +type ApplySelectorToConstraint = fn((Expression, C)) -> Constraint; +type ConstraintsIterator = std::iter::Map< + std::iter::Zip>, I>, + ApplySelectorToConstraint, +>; + +impl>, Iter: IntoIterator> IntoIterator + for Constraints +{ + type Item = Constraint; + type IntoIter = ConstraintsIterator; + + fn into_iter(self) -> Self::IntoIter { + std::iter::repeat(self.selector) + .zip(self.constraints) + .map(apply_selector_to_constraint) + } +} + +/// Gate +#[derive(Clone, Debug)] +pub struct Gate { + pub(crate) name: String, + pub(crate) constraint_names: Vec, + pub(crate) polys: Vec>, + /// We track queried selectors separately from other cells, so that we can use them to + /// trigger debug checks on gates. + pub(crate) queried_selectors: Vec, + pub(crate) queried_cells: Vec, +} + +impl Gate { + /// Returns the gate name. + pub fn name(&self) -> &str { + self.name.as_str() + } + + /// Returns the name of the constraint at index `constraint_index`. + pub fn constraint_name(&self, constraint_index: usize) -> &str { + self.constraint_names[constraint_index].as_str() + } + + /// Returns constraints of this gate + pub fn polynomials(&self) -> &[Expression] { + &self.polys + } + + pub fn queried_selectors(&self) -> &[Selector] { + &self.queried_selectors + } + + pub fn queried_cells(&self) -> &[VirtualCell] { + &self.queried_cells + } +} + +impl From> for ConstraintSystemMid { + fn from(cs: ConstraintSystem) -> Self { + ConstraintSystemMid { + num_fixed_columns: cs.num_fixed_columns, + num_advice_columns: cs.num_advice_columns, + num_instance_columns: cs.num_instance_columns, + num_challenges: cs.num_challenges, + unblinded_advice_columns: cs.unblinded_advice_columns, + advice_column_phase: cs.advice_column_phase.iter().map(|p| p.0).collect(), + challenge_phase: cs.challenge_phase.iter().map(|p| p.0).collect(), + gates: cs + .gates + .into_iter() + .flat_map(|mut g| { + let constraint_names = std::mem::take(&mut g.constraint_names); + let gate_name = g.name.clone(); + g.polys.into_iter().enumerate().map(move |(i, e)| { + let name = match constraint_names[i].as_str() { + "" => gate_name.clone(), + constraint_name => format!("{gate_name}:{constraint_name}"), + }; + GateMid { + name, + poly: e.into(), + } + }) + }) + .collect(), + permutation: halo2_middleware::permutation::ArgumentMid { + columns: cs + .permutation + .columns + .into_iter() + .map(|c| c.into()) + .collect(), + }, + lookups: cs + .lookups + .into_iter() + .map(|l| halo2_middleware::lookup::ArgumentMid { + name: l.name, + input_expressions: l.input_expressions.into_iter().map(|e| e.into()).collect(), + table_expressions: l.table_expressions.into_iter().map(|e| e.into()).collect(), + }) + .collect(), + shuffles: cs + .shuffles + .into_iter() + .map(|s| halo2_middleware::shuffle::ArgumentMid { + name: s.name.clone(), + input_expressions: s.input_expressions.into_iter().map(|e| e.into()).collect(), + shuffle_expressions: s + .shuffle_expressions + .into_iter() + .map(|e| e.into()) + .collect(), + }) + .collect(), + general_column_annotations: cs.general_column_annotations, + minimum_degree: cs.minimum_degree, + } + } +} + +/// This is a description of the circuit environment, such as the gate, column and +/// permutation arrangements. +#[derive(Debug, Clone)] +pub struct ConstraintSystem { + pub(crate) num_fixed_columns: usize, + pub(crate) num_advice_columns: usize, + pub(crate) num_instance_columns: usize, + pub(crate) num_selectors: usize, + pub(crate) num_challenges: usize, + + /// Contains the index of each advice column that is left unblinded. + pub(crate) unblinded_advice_columns: Vec, + + /// Contains the phase for each advice column. Should have same length as num_advice_columns. + pub(crate) advice_column_phase: Vec, + /// Contains the phase for each challenge. Should have same length as num_challenges. + pub(crate) challenge_phase: Vec, + + /// This is a cached vector that maps virtual selectors to the concrete + /// fixed column that they were compressed into. This is just used by dev + /// tooling right now. + pub(crate) selector_map: Vec>, + + pub(crate) gates: Vec>, + pub(crate) advice_queries: Vec<(Column, Rotation)>, + // Contains an integer for each advice column + // identifying how many distinct queries it has + // so far; should be same length as num_advice_columns. + pub(crate) num_advice_queries: Vec, + pub(crate) instance_queries: Vec<(Column, Rotation)>, + pub(crate) fixed_queries: Vec<(Column, Rotation)>, + + // Permutation argument for performing equality constraints + pub(crate) permutation: permutation::Argument, + + // Vector of lookup arguments, where each corresponds to a sequence of + // input expressions and a sequence of table expressions involved in the lookup. + pub(crate) lookups: Vec>, + + // Vector of shuffle arguments, where each corresponds to a sequence of + // input expressions and a sequence of shuffle expressions involved in the shuffle. + pub(crate) shuffles: Vec>, + + // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. + pub(crate) general_column_annotations: HashMap, + + // Vector of fixed columns, which can be used to store constant values + // that are copied into advice columns. + pub(crate) constants: Vec>, + + pub(crate) minimum_degree: Option, +} + +impl Default for ConstraintSystem { + fn default() -> ConstraintSystem { + ConstraintSystem { + num_fixed_columns: 0, + num_advice_columns: 0, + num_instance_columns: 0, + num_selectors: 0, + num_challenges: 0, + unblinded_advice_columns: Vec::new(), + advice_column_phase: Vec::new(), + challenge_phase: Vec::new(), + selector_map: vec![], + gates: vec![], + fixed_queries: Vec::new(), + advice_queries: Vec::new(), + num_advice_queries: Vec::new(), + instance_queries: Vec::new(), + permutation: permutation::Argument::default(), + lookups: Vec::new(), + shuffles: Vec::new(), + general_column_annotations: HashMap::new(), + constants: vec![], + minimum_degree: None, + } + } +} + +impl ConstraintSystem { + /// Enables this fixed column to be used for global constant assignments. + /// + /// # Side-effects + /// + /// The column will be equality-enabled. + pub fn enable_constant(&mut self, column: Column) { + if !self.constants.contains(&column) { + self.constants.push(column); + self.enable_equality(column); + } + } + + /// Enable the ability to enforce equality over cells in this column + pub fn enable_equality>>(&mut self, column: C) { + let column = column.into(); + self.query_any_index(column, Rotation::cur()); + self.permutation.add_column(column); + } + + /// Add a lookup argument for some input expressions and table columns. + /// + /// `table_map` returns a map between input expressions and the table columns + /// they need to match. + pub fn lookup>( + &mut self, + name: S, + table_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression, TableColumn)>, + ) -> usize { + let mut cells = VirtualCells::new(self); + let table_map = table_map(&mut cells) + .into_iter() + .map(|(mut input, table)| { + if input.contains_simple_selector() { + panic!("expression containing simple selector supplied to lookup argument"); + } + let mut table = cells.query_fixed(table.inner(), Rotation::cur()); + input.query_cells(&mut cells); + table.query_cells(&mut cells); + (input, table) + }) + .collect(); + let index = self.lookups.len(); + + self.lookups + .push(lookup::Argument::new(name.as_ref(), table_map)); + + index + } + + /// Add a lookup argument for some input expressions and table expressions. + /// + /// `table_map` returns a map between input expressions and the table expressions + /// they need to match. + /// + /// **NOTE:** + /// We should use extra `Fixed` column or `Selector` for tagging the table rows. + /// Also, it is needed to include a pair of tagging expressions(`[lookup_activator, table_activator]`) in the `table_map`. + /// Otherwise, we have soundness error.(See [here](https://github.com/privacy-scaling-explorations/halo2/issues/335)) + /// For correct use, please reference + /// [here](https://github.com/privacy-scaling-explorations/halo2/blob/main/halo2_proofs/tests/frontend_backend_split.rs) + /// and [here](https://github.com/privacy-scaling-explorations/halo2/blob/main/halo2_frontend/src/dev.rs). + pub fn lookup_any>( + &mut self, + name: S, + table_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression, Expression)>, + ) -> usize { + let mut cells = VirtualCells::new(self); + + let mut is_all_table_expr_single_fixed = true; + let mut is_all_table_expr_contain_fixed_or_selector = true; + let mut is_tagging_exprs_pair_exists = false; + + let table_map = table_map(&mut cells) + .into_iter() + .map(|(mut input, mut table)| { + if input.contains_simple_selector() { + panic!("expression containing simple selector supplied to lookup argument"); + } + if table.contains_simple_selector() { + panic!("expression containing simple selector supplied to lookup argument"); + } + + is_all_table_expr_single_fixed &= table.degree() == 1 && table.contains_fixed_col(); + is_all_table_expr_contain_fixed_or_selector &= + table.contains_fixed_col_or_selector(); + is_tagging_exprs_pair_exists |= + table.contains_fixed_col_or_selector() && table.degree() == 1; + + input.query_cells(&mut cells); + table.query_cells(&mut cells); + (input, table) + }) + .collect(); + + #[cfg(feature = "lookup-any-sanity-checks")] + { + // NOTE: These checks try to detect unsound cases of lookups and are only active + // with the `lookup-any-sanity-checks`. False positives may exist: in some + // particular scenarios the lookup can be sound but these checks will not pass, + // leading to panics. For those cases you can disable the + // `lookup-any-sanity-checks` feature. We will appreciate it if you report false + // positives by opening issues on the repository. + if is_all_table_expr_single_fixed { + panic!("all table expressions contain only fixed query, should use `lookup` api instead of `lookup_any`"); + } + if !is_all_table_expr_contain_fixed_or_selector { + panic!("all table expressions need selector/fixed query for tagging"); + } + if !is_tagging_exprs_pair_exists { + panic!("pair of tagging expressions(query of the tag columns or mutiple query combinations) should be included"); + } + } + + let index = self.lookups.len(); + + self.lookups + .push(lookup::Argument::new(name.as_ref(), table_map)); + + index + } + + /// Add a shuffle argument for some input expressions and table expressions. + pub fn shuffle>( + &mut self, + name: S, + shuffle_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression, Expression)>, + ) -> usize { + let mut cells = VirtualCells::new(self); + let shuffle_map = shuffle_map(&mut cells) + .into_iter() + .map(|(mut input, mut table)| { + input.query_cells(&mut cells); + table.query_cells(&mut cells); + (input, table) + }) + .collect(); + let index = self.shuffles.len(); + + self.shuffles + .push(shuffle::Argument::new(name.as_ref(), shuffle_map)); + + index + } + + pub(super) fn query_fixed_index(&mut self, column: Column, at: Rotation) -> usize { + // Return existing query, if it exists + for (index, fixed_query) in self.fixed_queries.iter().enumerate() { + if fixed_query == &(column, at) { + return index; + } + } + + // Make a new query + let index = self.fixed_queries.len(); + self.fixed_queries.push((column, at)); + + index + } + + pub(crate) fn query_advice_index(&mut self, column: Column, at: Rotation) -> usize { + // Return existing query, if it exists + for (index, advice_query) in self.advice_queries.iter().enumerate() { + if advice_query == &(column, at) { + return index; + } + } + + // Make a new query + let index = self.advice_queries.len(); + self.advice_queries.push((column, at)); + self.num_advice_queries[column.index] += 1; + + index + } + + pub(super) fn query_instance_index(&mut self, column: Column, at: Rotation) -> usize { + // Return existing query, if it exists + for (index, instance_query) in self.instance_queries.iter().enumerate() { + if instance_query == &(column, at) { + return index; + } + } + + // Make a new query + let index = self.instance_queries.len(); + self.instance_queries.push((column, at)); + + index + } + + fn query_any_index(&mut self, column: Column, at: Rotation) -> usize { + match column.column_type() { + Any::Advice => self.query_advice_index(Column::::try_from(column).unwrap(), at), + Any::Fixed => self.query_fixed_index(Column::::try_from(column).unwrap(), at), + Any::Instance => { + self.query_instance_index(Column::::try_from(column).unwrap(), at) + } + } + } + + pub(crate) fn get_advice_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, advice_query) in self.advice_queries.iter().enumerate() { + if advice_query == &(column, at) { + return index; + } + } + + panic!("get_advice_query_index called for non-existent query"); + } + + pub(crate) fn get_fixed_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, fixed_query) in self.fixed_queries.iter().enumerate() { + if fixed_query == &(column, at) { + return index; + } + } + + panic!("get_fixed_query_index called for non-existent query"); + } + + pub(crate) fn get_instance_query_index(&self, column: Column, at: Rotation) -> usize { + for (index, instance_query) in self.instance_queries.iter().enumerate() { + if instance_query == &(column, at) { + return index; + } + } + + panic!("get_instance_query_index called for non-existent query"); + } + + pub fn get_any_query_index(&self, column: Column, at: Rotation) -> usize { + match column.column_type() { + Any::Advice => { + self.get_advice_query_index(Column::::try_from(column).unwrap(), at) + } + Any::Fixed => { + self.get_fixed_query_index(Column::::try_from(column).unwrap(), at) + } + Any::Instance => { + self.get_instance_query_index(Column::::try_from(column).unwrap(), at) + } + } + } + + /// Sets the minimum degree required by the circuit, which can be set to a + /// larger amount than actually needed. This can be used, for example, to + /// force the permutation argument to involve more columns in the same set. + pub fn set_minimum_degree(&mut self, degree: usize) { + self.minimum_degree = Some(degree); + } + + /// Creates a new gate. + /// + /// # Panics + /// + /// A gate is required to contain polynomial constraints. This method will panic if + /// `constraints` returns an empty iterator. + pub fn create_gate>, Iter: IntoIterator, S: AsRef>( + &mut self, + name: S, + constraints: impl FnOnce(&mut VirtualCells<'_, F>) -> Iter, + ) { + let mut cells = VirtualCells::new(self); + let constraints = constraints(&mut cells); + let (constraint_names, polys): (_, Vec<_>) = constraints + .into_iter() + .map(|c| c.into()) + .map(|mut c: Constraint| { + c.poly.query_cells(&mut cells); + (c.name, c.poly) + }) + .unzip(); + + let queried_selectors = cells.queried_selectors; + let queried_cells = cells.queried_cells; + + assert!( + !polys.is_empty(), + "Gates must contain at least one constraint." + ); + + self.gates.push(Gate { + name: name.as_ref().to_string(), + constraint_names, + polys, + queried_selectors, + queried_cells, + }); + } + + /// This will compress selectors together depending on their provided + /// assignments. This `ConstraintSystem` will then be modified to add new + /// fixed columns (representing the actual selectors) and will return the + /// polynomials for those columns. Finally, an internal map is updated to + /// find which fixed column corresponds with a given `Selector`. + /// + /// Do not call this twice. Yes, this should be a builder pattern instead. + pub fn compress_selectors(mut self, selectors: Vec>) -> (Self, Vec>) { + // The number of provided selector assignments must be the number we + // counted for this constraint system. + assert_eq!(selectors.len(), self.num_selectors); + + // Compute the maximal degree of every selector. We only consider the + // expressions in gates, as lookup arguments cannot support simple + // selectors. Selectors that are complex or do not appear in any gates + // will have degree zero. + let mut degrees = vec![0; selectors.len()]; + for expr in self.gates.iter().flat_map(|gate| gate.polys.iter()) { + if let Some(selector) = expr.extract_simple_selector() { + degrees[selector.0] = max(degrees[selector.0], expr.degree()); + } + } + + // We will not increase the degree of the constraint system, so we limit + // ourselves to the largest existing degree constraint. + let max_degree = self.degree(); + + let mut new_columns = vec![]; + let (polys, selector_assignment) = compress_selectors::process( + selectors + .into_iter() + .zip(degrees) + .enumerate() + .map( + |(i, (activations, max_degree))| compress_selectors::SelectorDescription { + selector: i, + activations, + max_degree, + }, + ) + .collect(), + max_degree, + || { + let column = self.fixed_column(); + new_columns.push(column); + Expression::Fixed(FixedQuery { + index: Some(self.query_fixed_index(column, Rotation::cur())), + column_index: column.index, + rotation: Rotation::cur(), + }) + }, + ); + + let mut selector_map = vec![None; selector_assignment.len()]; + let mut selector_replacements = vec![None; selector_assignment.len()]; + for assignment in selector_assignment { + selector_replacements[assignment.selector] = Some(assignment.expression); + selector_map[assignment.selector] = Some(new_columns[assignment.combination_index]); + } + + self.selector_map = selector_map + .into_iter() + .map(|a| a.unwrap()) + .collect::>(); + let selector_replacements = selector_replacements + .into_iter() + .map(|a| a.unwrap()) + .collect::>(); + self.replace_selectors_with_fixed(&selector_replacements); + + (self, polys) + } + + /// Does not combine selectors and directly replaces them everywhere with fixed columns. + pub fn directly_convert_selectors_to_fixed( + mut self, + selectors: Vec>, + ) -> (Self, Vec>) { + // The number of provided selector assignments must be the number we + // counted for this constraint system. + assert_eq!(selectors.len(), self.num_selectors); + + let (polys, selector_replacements): (Vec<_>, Vec<_>) = selectors + .into_iter() + .map(|selector| { + let poly = selector + .iter() + .map(|b| if *b { F::ONE } else { F::ZERO }) + .collect::>(); + let column = self.fixed_column(); + let rotation = Rotation::cur(); + let expr = Expression::Fixed(FixedQuery { + index: Some(self.query_fixed_index(column, rotation)), + column_index: column.index, + rotation, + }); + (poly, expr) + }) + .unzip(); + + self.replace_selectors_with_fixed(&selector_replacements); + self.num_selectors = 0; + + (self, polys) + } + + fn replace_selectors_with_fixed(&mut self, selector_replacements: &[Expression]) { + fn replace_selectors( + expr: &mut Expression, + selector_replacements: &[Expression], + must_be_nonsimple: bool, + ) { + *expr = expr.evaluate( + &|constant| Expression::Constant(constant), + &|selector| { + if must_be_nonsimple { + // Simple selectors are prohibited from appearing in + // expressions in the lookup argument by + // `ConstraintSystem`. + assert!(!selector.is_simple()); + } + + selector_replacements[selector.0].clone() + }, + &|query| Expression::Fixed(query), + &|query| Expression::Advice(query), + &|query| Expression::Instance(query), + &|challenge| Expression::Challenge(challenge), + &|a| -a, + &|a, b| a + b, + &|a, b| a * b, + &|a, f| a * f, + ); + } + + // Substitute selectors for the real fixed columns in all gates + for expr in self.gates.iter_mut().flat_map(|gate| gate.polys.iter_mut()) { + replace_selectors(expr, selector_replacements, false); + } + + // Substitute non-simple selectors for the real fixed columns in all + // lookup expressions + for expr in self.lookups.iter_mut().flat_map(|lookup| { + lookup + .input_expressions + .iter_mut() + .chain(lookup.table_expressions.iter_mut()) + }) { + replace_selectors(expr, selector_replacements, true); + } + + for expr in self.shuffles.iter_mut().flat_map(|shuffle| { + shuffle + .input_expressions + .iter_mut() + .chain(shuffle.shuffle_expressions.iter_mut()) + }) { + replace_selectors(expr, selector_replacements, true); + } + } + + /// Allocate a new (simple) selector. Simple selectors cannot be added to + /// expressions nor multiplied by other expressions containing simple + /// selectors. Also, simple selectors may not appear in lookup argument + /// inputs. + pub fn selector(&mut self) -> Selector { + let index = self.num_selectors; + self.num_selectors += 1; + Selector(index, true) + } + + /// Allocate a new complex selector that can appear anywhere + /// within expressions. + pub fn complex_selector(&mut self) -> Selector { + let index = self.num_selectors; + self.num_selectors += 1; + Selector(index, false) + } + + /// Allocates a new fixed column that can be used in a lookup table. + pub fn lookup_table_column(&mut self) -> TableColumn { + TableColumn { + inner: self.fixed_column(), + } + } + + /// Annotate a Lookup column. + pub fn annotate_lookup_column(&mut self, column: TableColumn, annotation: A) + where + A: Fn() -> AR, + AR: Into, + { + // We don't care if the table has already an annotation. If it's the case we keep the new one. + self.general_column_annotations.insert( + ColumnMid { + index: column.inner().index, + column_type: halo2_middleware::circuit::Any::Fixed, + }, + annotation().into(), + ); + } + + /// Annotate an Instance column. + pub fn annotate_lookup_any_column(&mut self, column: T, annotation: A) + where + A: Fn() -> AR, + AR: Into, + T: Into>, + { + self.annotate_column(column, annotation) + } + + /// Annotate a column. + pub fn annotate_column(&mut self, column: T, annotation: A) + where + A: Fn() -> AR, + AR: Into, + T: Into>, + { + let col_any = column.into(); + // We don't care if the table has already an annotation. If it's the case we keep the new one. + self.general_column_annotations.insert( + ColumnMid { + column_type: col_any.column_type, + index: col_any.index, + }, + annotation().into(), + ); + } + + /// Allocate a new fixed column + pub fn fixed_column(&mut self) -> Column { + let tmp = Column { + index: self.num_fixed_columns, + column_type: Fixed, + }; + self.num_fixed_columns += 1; + tmp + } + + /// Allocate a new unblinded advice column at `FirstPhase` + pub fn unblinded_advice_column(&mut self) -> Column { + self.unblinded_advice_column_in(FirstPhase) + } + + /// Allocate a new advice column at `FirstPhase` + pub fn advice_column(&mut self) -> Column { + self.advice_column_in(FirstPhase) + } + + /// Allocate a new unblinded advice column in given phase. This allows for the generation of deterministic commitments to advice columns + /// which can be used to split large circuits into smaller ones, whose proofs can then be "joined" together by their common witness commitments. + pub fn unblinded_advice_column_in(&mut self, phase: P) -> Column { + let phase = phase.to_sealed(); + if let Some(previous_phase) = phase.prev() { + self.assert_phase_exists( + previous_phase, + format!("Column in later phase {phase:?}").as_str(), + ); + } + + let tmp = Column { + index: self.num_advice_columns, + column_type: Advice, + }; + self.unblinded_advice_columns.push(tmp.index); + self.num_advice_columns += 1; + self.num_advice_queries.push(0); + self.advice_column_phase.push(phase); + tmp + } + + /// Allocate a new advice column in given phase + /// + /// # Panics + /// + /// It panics if previous phase before the given one doesn't have advice column allocated. + pub fn advice_column_in(&mut self, phase: P) -> Column { + let phase = phase.to_sealed(); + if let Some(previous_phase) = phase.prev() { + self.assert_phase_exists( + previous_phase, + format!("Column in later phase {phase:?}").as_str(), + ); + } + + let tmp = Column { + index: self.num_advice_columns, + column_type: Advice, + }; + self.num_advice_columns += 1; + self.num_advice_queries.push(0); + self.advice_column_phase.push(phase); + tmp + } + + /// Allocate a new instance column + pub fn instance_column(&mut self) -> Column { + let tmp = Column { + index: self.num_instance_columns, + column_type: Instance, + }; + self.num_instance_columns += 1; + tmp + } + + /// Requests a challenge that is usable after the given phase. + /// + /// # Panics + /// + /// It panics if the given phase doesn't have advice column allocated. + pub fn challenge_usable_after(&mut self, phase: P) -> Challenge { + let phase = phase.to_sealed(); + self.assert_phase_exists( + phase, + format!("Challenge usable after phase {phase:?}").as_str(), + ); + + let tmp = Challenge { + index: self.num_challenges, + phase: phase.0, + }; + self.num_challenges += 1; + self.challenge_phase.push(phase); + tmp + } + + /// Helper funciotn to assert phase exists, to make sure phase-aware resources + /// are allocated in order, and to avoid any phase to be skipped accidentally + /// to cause unexpected issue in the future. + fn assert_phase_exists(&self, phase: sealed::Phase, resource: &str) { + self.advice_column_phase + .iter() + .find(|advice_column_phase| **advice_column_phase == phase) + .unwrap_or_else(|| { + panic!( + "No Column is used in phase {phase:?} while allocating a new {resource:?}" + ) + }); + } + + /// Returns the list of phases + pub fn phases(&self) -> impl Iterator { + let max_phase = self + .advice_column_phase + .iter() + .max() + .map(|phase| phase.0) + .unwrap_or_default(); + (0..=max_phase).map(sealed::Phase) + } + + /// Compute the degree of the constraint system (the maximum degree of all + /// constraints). + pub fn degree(&self) -> usize { + // The permutation argument will serve alongside the gates, so must be + // accounted for. + let mut degree = self.permutation.required_degree(); + + // The lookup argument also serves alongside the gates and must be accounted + // for. + degree = std::cmp::max( + degree, + self.lookups + .iter() + .map(|l| l.required_degree()) + .max() + .unwrap_or(1), + ); + + // The lookup argument also serves alongside the gates and must be accounted + // for. + degree = std::cmp::max( + degree, + self.shuffles + .iter() + .map(|l| l.required_degree()) + .max() + .unwrap_or(1), + ); + + // Account for each gate to ensure our quotient polynomial is the + // correct degree and that our extended domain is the right size. + degree = std::cmp::max( + degree, + self.gates + .iter() + .flat_map(|gate| gate.polynomials().iter().map(|poly| poly.degree())) + .max() + .unwrap_or(0), + ); + + std::cmp::max(degree, self.minimum_degree.unwrap_or(1)) + } + + /// Compute the number of blinding factors necessary to perfectly blind + /// each of the prover's witness polynomials. + pub fn blinding_factors(&self) -> usize { + // All of the prover's advice columns are evaluated at no more than + let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); + // distinct points during gate checks. + + // - The permutation argument witness polynomials are evaluated at most 3 times. + // - Each lookup argument has independent witness polynomials, and they are + // evaluated at most 2 times. + let factors = std::cmp::max(3, factors); + + // Each polynomial is evaluated at most an additional time during + // multiopen (at x_3 to produce q_evals): + let factors = factors + 1; + + // h(x) is derived by the other evaluations so it does not reveal + // anything; in fact it does not even appear in the proof. + + // h(x_3) is also not revealed; the verifier only learns a single + // evaluation of a polynomial in x_1 which has h(x_3) and another random + // polynomial evaluated at x_3 as coefficients -- this random polynomial + // is "random_poly" in the vanishing argument. + + // Add an additional blinding factor as a slight defense against + // off-by-one errors. + factors + 1 + } + + /// Returns the minimum necessary rows that need to exist in order to + /// account for e.g. blinding factors. + pub fn minimum_rows(&self) -> usize { + self.blinding_factors() // m blinding factors + + 1 // for l_{-(m + 1)} (l_last) + + 1 // for l_0 (just for extra breathing room for the permutation + // argument, to essentially force a separation in the + // permutation polynomial between the roles of l_last, l_0 + // and the interstitial values.) + + 1 // for at least one row + } + + /// Returns number of fixed columns + pub fn num_fixed_columns(&self) -> usize { + self.num_fixed_columns + } + + /// Returns number of advice columns + pub fn num_advice_columns(&self) -> usize { + self.num_advice_columns + } + + /// Returns number of instance columns + pub fn num_instance_columns(&self) -> usize { + self.num_instance_columns + } + + /// Returns number of selectors + pub fn num_selectors(&self) -> usize { + self.num_selectors + } + + /// Returns number of challenges + pub fn num_challenges(&self) -> usize { + self.num_challenges + } + + /// Returns phase of advice columns + pub fn advice_column_phase(&self) -> Vec { + self.advice_column_phase + .iter() + .map(|phase| phase.0) + .collect() + } + + /// Returns phase of challenges + pub fn challenge_phase(&self) -> Vec { + self.challenge_phase.iter().map(|phase| phase.0).collect() + } + + /// Returns gates + pub fn gates(&self) -> &Vec> { + &self.gates + } + + /// Returns general column annotations + pub fn general_column_annotations(&self) -> &HashMap { + &self.general_column_annotations + } + + /// Returns advice queries + pub fn advice_queries(&self) -> &Vec<(Column, Rotation)> { + &self.advice_queries + } + + /// Returns instance queries + pub fn instance_queries(&self) -> &Vec<(Column, Rotation)> { + &self.instance_queries + } + + /// Returns fixed queries + pub fn fixed_queries(&self) -> &Vec<(Column, Rotation)> { + &self.fixed_queries + } + + /// Returns permutation argument + pub fn permutation(&self) -> &permutation::Argument { + &self.permutation + } + + /// Returns lookup arguments + pub fn lookups(&self) -> &Vec> { + &self.lookups + } + + /// Returns shuffle arguments + pub fn shuffles(&self) -> &Vec> { + &self.shuffles + } + + /// Returns constants + pub fn constants(&self) -> &Vec> { + &self.constants + } +} + +/// Exposes the "virtual cells" that can be queried while creating a custom gate or lookup +/// table. +#[derive(Debug)] +pub struct VirtualCells<'a, F: Field> { + pub(super) meta: &'a mut ConstraintSystem, + pub(super) queried_selectors: Vec, + pub(super) queried_cells: Vec, +} + +impl<'a, F: Field> VirtualCells<'a, F> { + fn new(meta: &'a mut ConstraintSystem) -> Self { + VirtualCells { + meta, + queried_selectors: vec![], + queried_cells: vec![], + } + } + + /// Query a selector at the current position. + pub fn query_selector(&mut self, selector: Selector) -> Expression { + self.queried_selectors.push(selector); + Expression::Selector(selector) + } + + /// Query a fixed column at a relative position + pub fn query_fixed(&mut self, column: Column, at: Rotation) -> Expression { + self.queried_cells.push((column, at).into()); + Expression::Fixed(FixedQuery { + index: Some(self.meta.query_fixed_index(column, at)), + column_index: column.index, + rotation: at, + }) + } + + /// Query an advice column at a relative position + pub fn query_advice(&mut self, column: Column, at: Rotation) -> Expression { + self.queried_cells.push((column, at).into()); + Expression::Advice(AdviceQuery { + index: Some(self.meta.query_advice_index(column, at)), + column_index: column.index, + rotation: at, + }) + } + + /// Query an instance column at a relative position + pub fn query_instance(&mut self, column: Column, at: Rotation) -> Expression { + self.queried_cells.push((column, at).into()); + Expression::Instance(InstanceQuery { + index: Some(self.meta.query_instance_index(column, at)), + column_index: column.index, + rotation: at, + }) + } + + /// Query an Any column at a relative position + pub fn query_any>>(&mut self, column: C, at: Rotation) -> Expression { + let column = column.into(); + match column.column_type() { + Any::Advice => self.query_advice(Column::::try_from(column).unwrap(), at), + Any::Fixed => self.query_fixed(Column::::try_from(column).unwrap(), at), + Any::Instance => self.query_instance(Column::::try_from(column).unwrap(), at), + } + } + + /// Query a challenge + pub fn query_challenge(&mut self, challenge: Challenge) -> Expression { + Expression::Challenge(challenge) + } +} diff --git a/halo2_frontend/src/plonk/circuit/expression.rs b/halo2_frontend/src/plonk/circuit/expression.rs new file mode 100644 index 0000000000..3a991d1980 --- /dev/null +++ b/halo2_frontend/src/plonk/circuit/expression.rs @@ -0,0 +1,1140 @@ +use crate::circuit::Region; +use crate::plonk::circuit::{Advice, ColumnType, Fixed, Instance, VirtualCells}; +use crate::plonk::Error; +use core::cmp::max; +use core::ops::{Add, Mul}; +use halo2_middleware::circuit::{Any, ChallengeMid, ColumnMid, ExpressionMid, QueryMid, VarMid}; +use halo2_middleware::ff::Field; +use halo2_middleware::poly::Rotation; +use sealed::SealedPhase; +use std::fmt::Debug; +use std::iter::{Product, Sum}; +use std::{ + convert::TryFrom, + ops::{Neg, Sub}, +}; + +/// A column with an index and type +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Column { + pub index: usize, + pub column_type: C, +} + +impl From> for ColumnMid { + fn from(val: Column) -> Self { + ColumnMid { + index: val.index(), + column_type: (*val.column_type()), + } + } +} + +impl Column { + pub fn new(index: usize, column_type: C) -> Self { + Column { index, column_type } + } + + /// Index of this column. + pub fn index(&self) -> usize { + self.index + } + + /// Type of this column. + pub fn column_type(&self) -> &C { + &self.column_type + } + + /// Return expression from column at a relative position + pub fn query_cell(&self, at: Rotation) -> Expression { + self.column_type.query_cell(self.index, at) + } + + /// Return expression from column at the current row + pub fn cur(&self) -> Expression { + self.query_cell(Rotation::cur()) + } + + /// Return expression from column at the next row + pub fn next(&self) -> Expression { + self.query_cell(Rotation::next()) + } + + /// Return expression from column at the previous row + pub fn prev(&self) -> Expression { + self.query_cell(Rotation::prev()) + } + + /// Return expression from column at the specified rotation + pub fn rot(&self, rotation: i32) -> Expression { + self.query_cell(Rotation(rotation)) + } +} + +impl Ord for Column { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + // This ordering is consensus-critical! The layouters rely on deterministic column + // orderings. + match self.column_type.into().cmp(&other.column_type.into()) { + // Indices are assigned within column types. + std::cmp::Ordering::Equal => self.index.cmp(&other.index), + order => order, + } + } +} + +impl PartialOrd for Column { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl From for Column { + fn from(column: ColumnMid) -> Column { + Column { + index: column.index, + column_type: column.column_type, + } + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Advice, + } + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Fixed, + } + } +} + +impl From> for Column { + fn from(advice: Column) -> Column { + Column { + index: advice.index(), + column_type: Any::Instance, + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Advice => Ok(Column { + index: any.index(), + column_type: Advice, + }), + _ => Err("Cannot convert into Column"), + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Fixed => Ok(Column { + index: any.index(), + column_type: Fixed, + }), + _ => Err("Cannot convert into Column"), + } + } +} + +impl TryFrom> for Column { + type Error = &'static str; + + fn try_from(any: Column) -> Result { + match any.column_type() { + Any::Instance => Ok(Column { + index: any.index(), + column_type: Instance, + }), + _ => Err("Cannot convert into Column"), + } + } +} + +pub mod sealed { + /// Phase of advice column + #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] + pub struct Phase(pub u8); + + impl Phase { + pub fn prev(&self) -> Option { + self.0.checked_sub(1).map(Phase) + } + } + + impl SealedPhase for Phase { + fn to_sealed(self) -> Phase { + self + } + } + + /// Sealed trait to help keep `Phase` private. + pub trait SealedPhase { + fn to_sealed(self) -> Phase; + } +} + +/// Phase of advice column +pub trait Phase: SealedPhase {} + +impl Phase for P {} + +/// First phase +#[derive(Debug)] +pub struct FirstPhase; + +impl SealedPhase for FirstPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(0) + } +} + +/// Second phase +#[derive(Debug)] +pub struct SecondPhase; + +impl SealedPhase for SecondPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(1) + } +} + +/// Third phase +#[derive(Debug)] +pub struct ThirdPhase; + +impl SealedPhase for ThirdPhase { + fn to_sealed(self) -> sealed::Phase { + sealed::Phase(2) + } +} + +/// A selector, representing a fixed boolean value per row of the circuit. +/// +/// Selectors can be used to conditionally enable (portions of) gates: +/// ``` +/// use halo2_middleware::poly::Rotation; +/// # use halo2curves::pasta::Fp; +/// # use halo2_frontend::plonk::ConstraintSystem; +/// +/// # let mut meta = ConstraintSystem::::default(); +/// let a = meta.advice_column(); +/// let b = meta.advice_column(); +/// let s = meta.selector(); +/// +/// meta.create_gate("foo", |meta| { +/// let a = meta.query_advice(a, Rotation::prev()); +/// let b = meta.query_advice(b, Rotation::cur()); +/// let s = meta.query_selector(s); +/// +/// // On rows where the selector is enabled, a is constrained to equal b. +/// // On rows where the selector is disabled, a and b can take any value. +/// vec![s * (a - b)] +/// }); +/// ``` +/// +/// Selectors are disabled on all rows by default, and must be explicitly enabled on each +/// row when required: +/// ``` +/// use halo2_frontend::circuit::{Chip, Layouter, Value}; +/// use halo2_frontend::plonk::{Advice, Fixed, Error, Column, Selector}; +/// use halo2_middleware::ff::Field; +/// +/// struct Config { +/// a: Column, +/// b: Column, +/// s: Selector, +/// } +/// +/// fn circuit_logic>(chip: C, mut layouter: impl Layouter) -> Result<(), Error> { +/// let config = chip.config(); +/// # let config: Config = todo!(); +/// layouter.assign_region(|| "bar", |mut region| { +/// region.assign_advice(|| "a", config.a, 0, || Value::known(F::ONE))?; +/// region.assign_advice(|| "a", config.b, 1, || Value::known(F::ONE))?; +/// config.s.enable(&mut region, 1) +/// })?; +/// Ok(()) +/// } +/// ``` +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct Selector(pub usize, pub(crate) bool); + +impl Selector { + /// Enable this selector at the given offset within the given region. + pub fn enable(&self, region: &mut Region, offset: usize) -> Result<(), Error> { + region.enable_selector(|| "", self, offset) + } + + /// Is this selector "simple"? Simple selectors can only be multiplied + /// by expressions that contain no other simple selectors. + pub fn is_simple(&self) -> bool { + self.1 + } + + /// Returns index of this selector + pub fn index(&self) -> usize { + self.0 + } + + /// Return expression from selector + pub fn expr(&self) -> Expression { + Expression::Selector(*self) + } +} + +/// Query of fixed column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct FixedQuery { + /// Query index + pub index: Option, + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, +} + +impl FixedQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } +} + +/// Query of advice column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct AdviceQuery { + /// Query index + pub index: Option, + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, +} + +impl AdviceQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } +} + +/// Query of instance column at a certain relative location +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct InstanceQuery { + /// Query index + pub index: Option, + /// Column index + pub column_index: usize, + /// Rotation of this query + pub rotation: Rotation, +} + +impl InstanceQuery { + /// Column index + pub fn column_index(&self) -> usize { + self.column_index + } + + /// Rotation of this query + pub fn rotation(&self) -> Rotation { + self.rotation + } +} + +/// A fixed column of a lookup table. +/// +/// A lookup table can be loaded into this column via [`Layouter::assign_table`]. Columns +/// can currently only contain a single table, but they may be used in multiple lookup +/// arguments via [`super::constraint_system::ConstraintSystem::lookup`]. +/// +/// Lookup table columns are always "encumbered" by the lookup arguments they are used in; +/// they cannot simultaneously be used as general fixed columns. +/// +/// [`Layouter::assign_table`]: crate::circuit::Layouter::assign_table +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] +pub struct TableColumn { + /// The fixed column that this table column is stored in. + /// + /// # Security + /// + /// This inner column MUST NOT be exposed in the public API, or else chip developers + /// can load lookup tables into their circuits without default-value-filling the + /// columns, which can cause soundness bugs. + pub(super) inner: Column, +} + +impl TableColumn { + /// Returns inner column + pub fn inner(&self) -> Column { + self.inner + } +} + +/// A challenge squeezed from transcript after advice columns at the phase have been committed. +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Challenge { + pub index: usize, + pub(crate) phase: u8, +} + +impl Challenge { + /// Index of this challenge. + pub fn index(&self) -> usize { + self.index + } + + /// Phase of this challenge. + pub fn phase(&self) -> u8 { + self.phase + } + + /// Return Expression + pub fn expr(&self) -> Expression { + Expression::Challenge(*self) + } +} + +impl From for ChallengeMid { + fn from(val: Challenge) -> Self { + ChallengeMid { + index: val.index, + phase: val.phase, + } + } +} + +impl From for Challenge { + fn from(c: ChallengeMid) -> Self { + Self { + index: c.index, + phase: c.phase, + } + } +} + +/// Low-degree expression representing an identity that must hold over the committed columns. +#[derive(Clone, PartialEq, Eq)] +pub enum Expression { + /// This is a constant polynomial + Constant(F), + /// This is a virtual selector + Selector(Selector), + /// This is a fixed column queried at a certain relative location + Fixed(FixedQuery), + /// This is an advice (witness) column queried at a certain relative location + Advice(AdviceQuery), + /// This is an instance (external) column queried at a certain relative location + Instance(InstanceQuery), + /// This is a challenge + Challenge(Challenge), + /// This is a negated polynomial + Negated(Box>), + /// This is the sum of two polynomials + Sum(Box>, Box>), + /// This is the product of two polynomials + Product(Box>, Box>), + /// This is a scaled polynomial + Scaled(Box>, F), +} + +impl From> for ExpressionMid { + fn from(val: Expression) -> Self { + match val { + Expression::Constant(c) => ExpressionMid::Constant(c), + Expression::Selector(_) => unreachable!(), + Expression::Fixed(FixedQuery { + column_index, + rotation, + .. + }) => ExpressionMid::Var(VarMid::Query(QueryMid { + column_index, + column_type: Any::Fixed, + rotation, + })), + Expression::Advice(AdviceQuery { + column_index, + rotation, + .. + }) => ExpressionMid::Var(VarMid::Query(QueryMid { + column_index, + column_type: Any::Advice, + rotation, + })), + Expression::Instance(InstanceQuery { + column_index, + rotation, + .. + }) => ExpressionMid::Var(VarMid::Query(QueryMid { + column_index, + column_type: Any::Instance, + rotation, + })), + Expression::Challenge(c) => ExpressionMid::Var(VarMid::Challenge(c.into())), + Expression::Negated(e) => ExpressionMid::Negated(Box::new((*e).into())), + Expression::Sum(lhs, rhs) => { + ExpressionMid::Sum(Box::new((*lhs).into()), Box::new((*rhs).into())) + } + Expression::Product(lhs, rhs) => { + ExpressionMid::Product(Box::new((*lhs).into()), Box::new((*rhs).into())) + } + Expression::Scaled(e, c) => { + ExpressionMid::Product(Box::new((*e).into()), Box::new(ExpressionMid::Constant(c))) + } + } + } +} + +impl Expression { + /// Make side effects + pub fn query_cells(&mut self, cells: &mut VirtualCells<'_, F>) { + match self { + Expression::Constant(_) => (), + Expression::Selector(selector) => { + if !cells.queried_selectors.contains(selector) { + cells.queried_selectors.push(*selector); + } + } + Expression::Fixed(query) => { + if query.index.is_none() { + let col = Column { + index: query.column_index, + column_type: Fixed, + }; + cells.queried_cells.push((col, query.rotation).into()); + query.index = Some(cells.meta.query_fixed_index(col, query.rotation)); + } + } + Expression::Advice(query) => { + if query.index.is_none() { + let col = Column { + index: query.column_index, + column_type: Advice, + }; + cells.queried_cells.push((col, query.rotation).into()); + query.index = Some(cells.meta.query_advice_index(col, query.rotation)); + } + } + Expression::Instance(query) => { + if query.index.is_none() { + let col = Column { + index: query.column_index, + column_type: Instance, + }; + cells.queried_cells.push((col, query.rotation).into()); + query.index = Some(cells.meta.query_instance_index(col, query.rotation)); + } + } + Expression::Challenge(_) => (), + Expression::Negated(a) => a.query_cells(cells), + Expression::Sum(a, b) => { + a.query_cells(cells); + b.query_cells(cells); + } + Expression::Product(a, b) => { + a.query_cells(cells); + b.query_cells(cells); + } + Expression::Scaled(a, _) => a.query_cells(cells), + }; + } + + /// Evaluate the polynomial using the provided closures to perform the + /// operations. + #[allow(clippy::too_many_arguments)] + pub fn evaluate( + &self, + constant: &impl Fn(F) -> T, + selector_column: &impl Fn(Selector) -> T, + fixed_column: &impl Fn(FixedQuery) -> T, + advice_column: &impl Fn(AdviceQuery) -> T, + instance_column: &impl Fn(InstanceQuery) -> T, + challenge: &impl Fn(Challenge) -> T, + negated: &impl Fn(T) -> T, + sum: &impl Fn(T, T) -> T, + product: &impl Fn(T, T) -> T, + scaled: &impl Fn(T, F) -> T, + ) -> T { + match self { + Expression::Constant(scalar) => constant(*scalar), + Expression::Selector(selector) => selector_column(*selector), + Expression::Fixed(query) => fixed_column(*query), + Expression::Advice(query) => advice_column(*query), + Expression::Instance(query) => instance_column(*query), + Expression::Challenge(value) => challenge(*value), + Expression::Negated(a) => { + let a = a.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + negated(a) + } + Expression::Sum(a, b) => { + let a = a.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + let b = b.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + sum(a, b) + } + Expression::Product(a, b) => { + let a = a.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + let b = b.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + product(a, b) + } + Expression::Scaled(a, f) => { + let a = a.evaluate( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + ); + scaled(a, *f) + } + } + } + + /// Evaluate the polynomial lazily using the provided closures to perform the + /// operations. + #[allow(clippy::too_many_arguments)] + pub fn evaluate_lazy( + &self, + constant: &impl Fn(F) -> T, + selector_column: &impl Fn(Selector) -> T, + fixed_column: &impl Fn(FixedQuery) -> T, + advice_column: &impl Fn(AdviceQuery) -> T, + instance_column: &impl Fn(InstanceQuery) -> T, + challenge: &impl Fn(Challenge) -> T, + negated: &impl Fn(T) -> T, + sum: &impl Fn(T, T) -> T, + product: &impl Fn(T, T) -> T, + scaled: &impl Fn(T, F) -> T, + zero: &T, + ) -> T { + match self { + Expression::Constant(scalar) => constant(*scalar), + Expression::Selector(selector) => selector_column(*selector), + Expression::Fixed(query) => fixed_column(*query), + Expression::Advice(query) => advice_column(*query), + Expression::Instance(query) => instance_column(*query), + Expression::Challenge(value) => challenge(*value), + Expression::Negated(a) => { + let a = a.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + negated(a) + } + Expression::Sum(a, b) => { + let a = a.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + let b = b.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + sum(a, b) + } + Expression::Product(a, b) => { + let (a, b) = if a.complexity() <= b.complexity() { + (a, b) + } else { + (b, a) + }; + let a = a.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + + if a == *zero { + a + } else { + let b = b.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + product(a, b) + } + } + Expression::Scaled(a, f) => { + let a = a.evaluate_lazy( + constant, + selector_column, + fixed_column, + advice_column, + instance_column, + challenge, + negated, + sum, + product, + scaled, + zero, + ); + scaled(a, *f) + } + } + } + + fn write_identifier(&self, writer: &mut W) -> std::io::Result<()> { + match self { + Expression::Constant(scalar) => write!(writer, "{scalar:?}"), + Expression::Selector(selector) => write!(writer, "selector[{}]", selector.0), + Expression::Fixed(query) => { + write!( + writer, + "fixed[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Advice(query) => { + write!( + writer, + "advice[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Instance(query) => { + write!( + writer, + "instance[{}][{}]", + query.column_index, query.rotation.0 + ) + } + Expression::Challenge(challenge) => { + write!(writer, "challenge[{}]", challenge.index()) + } + Expression::Negated(a) => { + writer.write_all(b"(-")?; + a.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Sum(a, b) => { + writer.write_all(b"(")?; + a.write_identifier(writer)?; + writer.write_all(b"+")?; + b.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Product(a, b) => { + writer.write_all(b"(")?; + a.write_identifier(writer)?; + writer.write_all(b"*")?; + b.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Scaled(a, f) => { + a.write_identifier(writer)?; + write!(writer, "*{f:?}") + } + } + } + + /// Identifier for this expression. Expressions with identical identifiers + /// do the same calculation (but the expressions don't need to be exactly equal + /// in how they are composed e.g. `1 + 2` and `2 + 1` can have the same identifier). + pub fn identifier(&self) -> String { + let mut cursor = std::io::Cursor::new(Vec::new()); + self.write_identifier(&mut cursor).unwrap(); + String::from_utf8(cursor.into_inner()).unwrap() + } + + /// Compute the degree of this polynomial + pub fn degree(&self) -> usize { + match self { + Expression::Constant(_) => 0, + Expression::Selector(_) => 1, + Expression::Fixed(_) => 1, + Expression::Advice(_) => 1, + Expression::Instance(_) => 1, + Expression::Challenge(_) => 0, + Expression::Negated(poly) => poly.degree(), + Expression::Sum(a, b) => max(a.degree(), b.degree()), + Expression::Product(a, b) => a.degree() + b.degree(), + Expression::Scaled(poly, _) => poly.degree(), + } + } + + /// Approximate the computational complexity of this expression. + pub fn complexity(&self) -> usize { + match self { + Expression::Constant(_) => 0, + Expression::Selector(_) => 1, + Expression::Fixed(_) => 1, + Expression::Advice(_) => 1, + Expression::Instance(_) => 1, + Expression::Challenge(_) => 0, + Expression::Negated(poly) => poly.complexity() + 5, + Expression::Sum(a, b) => a.complexity() + b.complexity() + 15, + Expression::Product(a, b) => a.complexity() + b.complexity() + 30, + Expression::Scaled(poly, _) => poly.complexity() + 30, + } + } + + /// Square this expression. + pub fn square(self) -> Self { + self.clone() * self + } + + /// Returns whether or not this expression contains a simple `Selector`. + pub(super) fn contains_simple_selector(&self) -> bool { + self.evaluate( + &|_| false, + &|selector| selector.is_simple(), + &|_| false, + &|_| false, + &|_| false, + &|_| false, + &|a| a, + &|a, b| a || b, + &|a, b| a || b, + &|a, _| a, + ) + } + + // TODO: Where is this used? + /// Extracts a simple selector from this gate, if present + pub(super) fn extract_simple_selector(&self) -> Option { + let op = |a, b| match (a, b) { + (Some(a), None) | (None, Some(a)) => Some(a), + (Some(_), Some(_)) => panic!("two simple selectors cannot be in the same expression"), + _ => None, + }; + + self.evaluate( + &|_| None, + &|selector| { + if selector.is_simple() { + Some(selector) + } else { + None + } + }, + &|_| None, + &|_| None, + &|_| None, + &|_| None, + &|a| a, + &op, + &op, + &|a, _| a, + ) + } + + /// Returns whether or not this expression contains a `Fixed` column. + pub(super) fn contains_fixed_col(&self) -> bool { + self.evaluate( + &|_| false, + &|_| false, + &|_| true, + &|_| false, + &|_| false, + &|_| false, + &|a| a, + &|a, b| a || b, + &|a, b| a || b, + &|a, _| a, + ) + } + + /// Returns whether or not this expression contains a `Selector`. + pub(super) fn contains_selector(&self) -> bool { + self.evaluate( + &|_| false, + &|_| true, + &|_| false, + &|_| false, + &|_| false, + &|_| false, + &|a| a, + &|a, b| a || b, + &|a, b| a || b, + &|a, _| a, + ) + } + + /// Returns whether or not this expression contains a `Selector` or `Fixed` column. + pub(super) fn contains_fixed_col_or_selector(&self) -> bool { + self.contains_fixed_col() || self.contains_selector() + } +} + +impl std::fmt::Debug for Expression { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Expression::Constant(scalar) => f.debug_tuple("Constant").field(scalar).finish(), + Expression::Selector(selector) => f.debug_tuple("Selector").field(selector).finish(), + // Skip enum variant and print query struct directly to maintain backwards compatibility. + Expression::Fixed(query) => { + let mut debug_struct = f.debug_struct("Fixed"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation) + .finish() + } + Expression::Advice(query) => { + let mut debug_struct = f.debug_struct("Advice"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation); + debug_struct.finish() + } + Expression::Instance(query) => { + let mut debug_struct = f.debug_struct("Instance"); + match query.index { + None => debug_struct.field("query_index", &query.index), + Some(idx) => debug_struct.field("query_index", &idx), + }; + debug_struct + .field("column_index", &query.column_index) + .field("rotation", &query.rotation) + .finish() + } + Expression::Challenge(challenge) => { + f.debug_tuple("Challenge").field(challenge).finish() + } + Expression::Negated(poly) => f.debug_tuple("Negated").field(poly).finish(), + Expression::Sum(a, b) => f.debug_tuple("Sum").field(a).field(b).finish(), + Expression::Product(a, b) => f.debug_tuple("Product").field(a).field(b).finish(), + Expression::Scaled(poly, scalar) => { + f.debug_tuple("Scaled").field(poly).field(scalar).finish() + } + } + } +} + +impl Neg for Expression { + type Output = Expression; + fn neg(self) -> Self::Output { + Expression::Negated(Box::new(self)) + } +} + +impl Add for Expression { + type Output = Expression; + fn add(self, rhs: Expression) -> Expression { + if self.contains_simple_selector() || rhs.contains_simple_selector() { + panic!("attempted to use a simple selector in an addition"); + } + Expression::Sum(Box::new(self), Box::new(rhs)) + } +} + +impl Sub for Expression { + type Output = Expression; + fn sub(self, rhs: Expression) -> Expression { + if self.contains_simple_selector() || rhs.contains_simple_selector() { + panic!("attempted to use a simple selector in a subtraction"); + } + Expression::Sum(Box::new(self), Box::new(-rhs)) + } +} + +impl Mul for Expression { + type Output = Expression; + fn mul(self, rhs: Expression) -> Expression { + if self.contains_simple_selector() && rhs.contains_simple_selector() { + panic!("attempted to multiply two expressions containing simple selectors"); + } + Expression::Product(Box::new(self), Box::new(rhs)) + } +} + +impl Mul for Expression { + type Output = Expression; + fn mul(self, rhs: F) -> Expression { + Expression::Scaled(Box::new(self), rhs) + } +} + +impl Sum for Expression { + fn sum>(iter: I) -> Self { + iter.reduce(|acc, x| acc + x) + .unwrap_or(Expression::Constant(F::ZERO)) + } +} + +impl Product for Expression { + fn product>(iter: I) -> Self { + iter.reduce(|acc, x| acc * x) + .unwrap_or(Expression::Constant(F::ONE)) + } +} + +#[cfg(test)] +mod tests { + use super::Expression; + use halo2curves::bn256::Fr; + + #[test] + fn iter_sum() { + let exprs: Vec> = vec![ + Expression::Constant(1.into()), + Expression::Constant(2.into()), + Expression::Constant(3.into()), + ]; + let happened: Expression = exprs.into_iter().sum(); + let expected: Expression = Expression::Sum( + Box::new(Expression::Sum( + Box::new(Expression::Constant(1.into())), + Box::new(Expression::Constant(2.into())), + )), + Box::new(Expression::Constant(3.into())), + ); + + assert_eq!(happened, expected); + } + + #[test] + fn iter_product() { + let exprs: Vec> = vec![ + Expression::Constant(1.into()), + Expression::Constant(2.into()), + Expression::Constant(3.into()), + ]; + let happened: Expression = exprs.into_iter().product(); + let expected: Expression = Expression::Product( + Box::new(Expression::Product( + Box::new(Expression::Constant(1.into())), + Box::new(Expression::Constant(2.into())), + )), + Box::new(Expression::Constant(3.into())), + ); + + assert_eq!(happened, expected); + } +} diff --git a/halo2_frontend/src/plonk/error.rs b/halo2_frontend/src/plonk/error.rs new file mode 100644 index 0000000000..18133f2683 --- /dev/null +++ b/halo2_frontend/src/plonk/error.rs @@ -0,0 +1,103 @@ +use std::fmt; + +use super::TableColumn; +use crate::plonk::Column; +use halo2_middleware::circuit::Any; + +/// This is an error that could occur during circuit synthesis. +#[derive(Debug)] +pub enum Error { + /// This is an error that can occur during synthesis of the circuit, for + /// example, when the witness is not present. + Synthesis, + /// Out of bounds index passed to a backend + BoundsFailure, + /// `k` is too small for the given circuit. + NotEnoughRowsAvailable { + /// The current value of `k` being used. + current_k: u32, + }, + /// Circuit synthesis requires global constants, but circuit configuration did not + /// call [`ConstraintSystem::enable_constant`] on fixed columns with sufficient space. + /// + /// [`ConstraintSystem::enable_constant`]: crate::plonk::ConstraintSystem::enable_constant + NotEnoughColumnsForConstants, + /// The instance sets up a copy constraint involving a column that has not been + /// included in the permutation. + ColumnNotInPermutation(Column), + /// An error relating to a lookup table. + TableError(TableError), + /// Generic error not covered by previous cases + Other(String), +} + +impl Error { + /// Constructs an `Error::NotEnoughRowsAvailable`. + pub fn not_enough_rows_available(current_k: u32) -> Self { + Error::NotEnoughRowsAvailable { current_k } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Error::Synthesis => write!(f, "General synthesis error"), + Error::BoundsFailure => write!(f, "An out-of-bounds index was passed to the backend"), + Error::NotEnoughRowsAvailable { current_k } => write!( + f, + "k = {current_k} is too small for the given circuit. Try using a larger value of k", + ), + Error::NotEnoughColumnsForConstants => { + write!( + f, + "Too few fixed columns are enabled for global constants usage" + ) + } + Error::ColumnNotInPermutation(column) => write!( + f, + "Column {column:?} must be included in the permutation. Help: try applying `meta.enable_equalty` on the column", + ), + Error::TableError(error) => write!(f, "{error}"), + Error::Other(error) => write!(f, "Other: {error}"), + } + } +} + +/// This is an error that could occur during table synthesis. +#[derive(Debug)] +pub enum TableError { + /// A `TableColumn` has not been assigned. + ColumnNotAssigned(TableColumn), + /// A Table has columns of uneven lengths. + UnevenColumnLengths((TableColumn, usize), (TableColumn, usize)), + /// Attempt to assign a used `TableColumn` + UsedColumn(TableColumn), + /// Attempt to overwrite a default value + OverwriteDefault(TableColumn, String, String), +} + +impl fmt::Display for TableError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + TableError::ColumnNotAssigned(col) => { + write!( + f, + "{col:?} not fully assigned. Help: assign a value at offset 0.", + ) + } + TableError::UnevenColumnLengths((col, col_len), (table, table_len)) => write!( + f, + "{col:?} has length {col_len} while {table:?} has length {table_len}", + ), + TableError::UsedColumn(col) => { + write!(f, "{col:?} has already been used") + } + TableError::OverwriteDefault(col, default, val) => { + write!( + f, + "Attempted to overwrite default value {default} with {val} in {col:?}", + ) + } + } + } +} diff --git a/halo2_frontend/src/plonk/keygen.rs b/halo2_frontend/src/plonk/keygen.rs new file mode 100644 index 0000000000..8e9b829033 --- /dev/null +++ b/halo2_frontend/src/plonk/keygen.rs @@ -0,0 +1,163 @@ +use std::ops::Range; + +use halo2_middleware::circuit::Any; +use halo2_middleware::ff::Field; + +use crate::circuit::Value; +use crate::plonk::{ + permutation, Advice, Assigned, Assignment, Challenge, Column, Error, Fixed, Instance, Selector, +}; + +/// Assembly to be used in circuit synthesis. +#[derive(Debug)] +pub(crate) struct Assembly { + pub(crate) k: u32, + pub(crate) fixed: Vec>>, + pub(crate) permutation: permutation::Assembly, + pub(crate) selectors: Vec>, + // A range of available rows for assignment and copies. + pub(crate) usable_rows: Range, + pub(crate) _marker: std::marker::PhantomData, +} + +impl Assignment for Assembly { + fn enter_region(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about regions in this context. + } + + fn exit_region(&mut self) { + // Do nothing; we don't care about regions in this context. + } + + fn enable_selector(&mut self, _: A, selector: &Selector, row: usize) -> Result<(), Error> + where + A: FnOnce() -> AR, + AR: Into, + { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.selectors[selector.0][row] = true; + + Ok(()) + } + + fn query_instance(&self, _: Column, row: usize) -> Result, Error> { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + // There is no instance in this context. + Ok(Value::unknown()) + } + + fn assign_advice( + &mut self, + _: A, + _: Column, + _: usize, + _: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + // We only care about fixed columns here + Ok(()) + } + + fn assign_fixed( + &mut self, + _: A, + column: Column, + row: usize, + to: V, + ) -> Result<(), Error> + where + V: FnOnce() -> Value, + VR: Into>, + A: FnOnce() -> AR, + AR: Into, + { + if !self.usable_rows.contains(&row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + *self + .fixed + .get_mut(column.index()) + .and_then(|v| v.get_mut(row)) + .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; + + Ok(()) + } + + fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), Error> { + if !self.usable_rows.contains(&left_row) || !self.usable_rows.contains(&right_row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + self.permutation + .copy(left_column, left_row, right_column, right_row) + } + + fn fill_from_row( + &mut self, + column: Column, + from_row: usize, + to: Value>, + ) -> Result<(), Error> { + if !self.usable_rows.contains(&from_row) { + return Err(Error::not_enough_rows_available(self.k)); + } + + let col = self + .fixed + .get_mut(column.index()) + .ok_or(Error::BoundsFailure)?; + + let filler = to.assign()?; + for row in self.usable_rows.clone().skip(from_row) { + col[row] = filler; + } + + Ok(()) + } + + fn get_challenge(&self, _: Challenge) -> Value { + Value::unknown() + } + + fn annotate_column(&mut self, _annotation: A, _column: Column) + where + A: FnOnce() -> AR, + AR: Into, + { + // Do nothing + } + + fn push_namespace(&mut self, _: N) + where + NR: Into, + N: FnOnce() -> NR, + { + // Do nothing; we don't care about namespaces in this context. + } + + fn pop_namespace(&mut self, _: Option) { + // Do nothing; we don't care about namespaces in this context. + } +} diff --git a/halo2_proofs/src/plonk/lookup.rs b/halo2_frontend/src/plonk/lookup.rs similarity index 96% rename from halo2_proofs/src/plonk/lookup.rs rename to halo2_frontend/src/plonk/lookup.rs index a7c4f68af2..85098d7fb0 100644 --- a/halo2_proofs/src/plonk/lookup.rs +++ b/halo2_frontend/src/plonk/lookup.rs @@ -1,10 +1,8 @@ -use super::circuit::Expression; -use ff::Field; +use crate::plonk::Expression; +use halo2_middleware::ff::Field; use std::fmt::{self, Debug}; -pub(crate) mod prover; -pub(crate) mod verifier; - +/// Expressions involved in a lookup argument, with a name as metadata. #[derive(Clone)] pub struct Argument { pub(crate) name: String, diff --git a/halo2_frontend/src/plonk/permutation.rs b/halo2_frontend/src/plonk/permutation.rs new file mode 100644 index 0000000000..594832d849 --- /dev/null +++ b/halo2_frontend/src/plonk/permutation.rs @@ -0,0 +1,109 @@ +//! Implementation of permutation argument. + +use crate::plonk::{Column, Error}; +use halo2_middleware::circuit::{Any, Cell}; + +/// A permutation argument. +#[derive(Default, Debug, Clone)] +pub struct Argument { + /// A sequence of columns involved in the argument. + pub columns: Vec>, +} + +impl Argument { + /// Returns the minimum circuit degree required by the permutation argument. + /// The argument may use larger degree gates depending on the actual + /// circuit's degree and how many columns are involved in the permutation. + pub(crate) fn required_degree(&self) -> usize { + // degree 2: + // l_0(X) * (1 - z(X)) = 0 + // + // We will fit as many polynomials p_i(X) as possible + // into the required degree of the circuit, so the + // following will not affect the required degree of + // this middleware. + // + // (1 - (l_last(X) + l_blind(X))) * ( + // z(\omega X) \prod (p(X) + \beta s_i(X) + \gamma) + // - z(X) \prod (p(X) + \delta^i \beta X + \gamma) + // ) + // + // On the first sets of columns, except the first + // set, we will do + // + // l_0(X) * (z(X) - z'(\omega^(last) X)) = 0 + // + // where z'(X) is the permutation for the previous set + // of columns. + // + // On the final set of columns, we will do + // + // degree 3: + // l_last(X) * (z'(X)^2 - z'(X)) = 0 + // + // which will allow the last value to be zero to + // ensure the argument is perfectly complete. + + // There are constraints of degree 3 regardless of the + // number of columns involved. + 3 + } + + pub(crate) fn add_column(&mut self, column: Column) { + if !self.columns.contains(&column) { + self.columns.push(column); + } + } + + /// Returns columns that participate on the permutation argument. + pub fn get_columns(&self) -> Vec> { + self.columns.clone() + } +} + +#[derive(Clone, Debug)] +pub(crate) struct Assembly { + pub(crate) n: usize, + pub(crate) columns: Vec>, + pub(crate) copies: Vec<(Cell, Cell)>, +} + +impl Assembly { + pub(crate) fn new(n: usize, p: &Argument) -> Self { + Self { + n, + columns: p.columns.clone(), + copies: Vec::new(), + } + } + + pub(crate) fn copy( + &mut self, + left_column: Column, + left_row: usize, + right_column: Column, + right_row: usize, + ) -> Result<(), Error> { + if !self.columns.contains(&left_column) { + return Err(Error::ColumnNotInPermutation(left_column)); + } + if !self.columns.contains(&right_column) { + return Err(Error::ColumnNotInPermutation(right_column)); + } + // Check bounds + if left_row >= self.n || right_row >= self.n { + return Err(Error::BoundsFailure); + } + self.copies.push(( + Cell { + column: left_column.into(), + row: left_row, + }, + Cell { + column: right_column.into(), + row: right_row, + }, + )); + Ok(()) + } +} diff --git a/halo2_proofs/src/plonk/shuffle.rs b/halo2_frontend/src/plonk/shuffle.rs similarity index 93% rename from halo2_proofs/src/plonk/shuffle.rs rename to halo2_frontend/src/plonk/shuffle.rs index e32353c710..5d379fb659 100644 --- a/halo2_proofs/src/plonk/shuffle.rs +++ b/halo2_frontend/src/plonk/shuffle.rs @@ -1,10 +1,8 @@ -use super::circuit::Expression; -use ff::Field; +use crate::plonk::Expression; +use halo2_middleware::ff::Field; use std::fmt::{self, Debug}; -pub(crate) mod prover; -pub(crate) mod verifier; - +/// Expressions involved in a shuffle argument, with a name as metadata. #[derive(Clone)] pub struct Argument { pub(crate) name: String, diff --git a/halo2_middleware/Cargo.toml b/halo2_middleware/Cargo.toml new file mode 100644 index 0000000000..7a57aa3e9b --- /dev/null +++ b/halo2_middleware/Cargo.toml @@ -0,0 +1,42 @@ +[package] +name = "halo2_middleware" +version = "0.4.0" +authors = [ + "Sean Bowe ", + "Ying Tong Lai ", + "Daira Hopwood ", + "Jack Grigg ", + "Privacy Scaling Explorations team", +] +edition = "2021" +rust-version = "1.73.0" +description = """ +Halo2 middleware. This package contains the types and traits required for the frontend-backend interaction. +""" +license = "MIT OR Apache-2.0" +repository = "https://github.com/privacy-scaling-explorations/halo2" +documentation = "https://privacy-scaling-explorations.github.io/halo2/" +readme = "README.md" +categories = ["cryptography"] +keywords = ["halo", "proofs", "zkp", "zkSNARKs"] + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] + +[dependencies] +ff = "0.13" +halo2curves = { version = "0.7.0", default-features = false } +serde = { version = "1", optional = true, features = ["derive"] } +serde_derive = { version = "1", optional = true} +rayon = "1.8" + +[dev-dependencies] +ark-std = { version = "0.3" } +proptest = "1" +group = "0.13" +rand_xorshift = "0.3.0" +rand_core = "0.6.4" + +[lib] +bench = false diff --git a/halo2_middleware/src/circuit.rs b/halo2_middleware/src/circuit.rs new file mode 100644 index 0000000000..4c8d4ee0d9 --- /dev/null +++ b/halo2_middleware/src/circuit.rs @@ -0,0 +1,264 @@ +use crate::expression::{Expression, Variable}; +use crate::poly::Rotation; +use crate::{lookup, permutation, shuffle}; +use ff::Field; +use std::collections::HashMap; +use std::fmt; + +/// A challenge squeezed from transcript after advice columns at the phase have been committed. +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct ChallengeMid { + pub index: usize, + pub phase: u8, +} + +impl ChallengeMid { + /// Index of this challenge. + pub fn index(&self) -> usize { + self.index + } + + /// Phase of this challenge. + pub fn phase(&self) -> u8 { + self.phase + } +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub struct QueryMid { + /// Column index + pub column_index: usize, + /// The type of the column. + pub column_type: Any, + /// Rotation of this query + pub rotation: Rotation, +} + +impl QueryMid { + pub fn new(column_type: Any, column_index: usize, rotation: Rotation) -> Self { + Self { + column_index, + column_type, + rotation, + } + } +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum VarMid { + /// This is a generic column query + Query(QueryMid), + /// This is a challenge + Challenge(ChallengeMid), +} + +impl fmt::Display for VarMid { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + VarMid::Query(query) => { + match query.column_type { + Any::Fixed => write!(f, "f")?, + Any::Advice => write!(f, "a")?, + Any::Instance => write!(f, "i")?, + }; + write!(f, "{}", query.column_index)?; + if query.rotation.0 != 0 { + write!(f, "[{}]", query.rotation.0)?; + } + Ok(()) + } + VarMid::Challenge(challenge) => { + write!(f, "ch{}", challenge.index()) + } + } + } +} + +impl Variable for VarMid { + fn degree(&self) -> usize { + match self { + VarMid::Query(_) => 1, + VarMid::Challenge(_) => 0, + } + } + + fn complexity(&self) -> usize { + match self { + VarMid::Query(_) => 1, + VarMid::Challenge(_) => 0, + } + } + + fn write_identifier(&self, writer: &mut W) -> std::io::Result<()> { + write!(writer, "{}", self) + } +} + +pub type ExpressionMid = Expression; + +/// A Gate contains a single polynomial identity with a name as metadata. +#[derive(Clone, Debug)] +pub struct Gate { + pub name: String, + pub poly: Expression, +} + +impl Gate { + /// Returns the gate name. + pub fn name(&self) -> &str { + self.name.as_str() + } + + /// Returns the polynomial identity of this gate + pub fn polynomial(&self) -> &Expression { + &self.poly + } +} + +pub type GateMid = Gate; + +/// This is a description of the circuit environment, such as the gate, column and +/// permutation arrangements. +#[derive(Debug, Clone)] +pub struct ConstraintSystemMid { + pub num_fixed_columns: usize, + pub num_advice_columns: usize, + pub num_instance_columns: usize, + pub num_challenges: usize, + + /// Contains the index of each advice column that is left unblinded. + pub unblinded_advice_columns: Vec, + + /// Contains the phase for each advice column. Should have same length as num_advice_columns. + pub advice_column_phase: Vec, + /// Contains the phase for each challenge. Should have same length as num_challenges. + pub challenge_phase: Vec, + + pub gates: Vec>, + + // Permutation argument for performing equality constraints + pub permutation: permutation::ArgumentMid, + + // Vector of lookup arguments, where each corresponds to a sequence of + // input expressions and a sequence of table expressions involved in the lookup. + pub lookups: Vec>, + + // Vector of shuffle arguments, where each corresponds to a sequence of + // input expressions and a sequence of shuffle expressions involved in the shuffle. + pub shuffles: Vec>, + + // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. + pub general_column_annotations: HashMap, + + // The minimum degree required by the circuit, which can be set to a + // larger amount than actually needed. This can be used, for example, to + // force the permutation argument to involve more columns in the same set. + pub minimum_degree: Option, +} + +impl ConstraintSystemMid { + /// Returns the number of phases + pub fn phases(&self) -> usize { + let max_phase = self + .advice_column_phase + .iter() + .copied() + .max() + .unwrap_or_default(); + max_phase as usize + 1 + } +} + +/// Data that needs to be preprocessed from a circuit +#[derive(Debug, Clone)] +pub struct Preprocessing { + pub permutation: permutation::AssemblyMid, + pub fixed: Vec>, +} + +/// This is a description of a low level Plonkish compiled circuit. Contains the Constraint System +/// as well as the fixed columns and copy constraints information. +#[derive(Debug, Clone)] +pub struct CompiledCircuit { + pub preprocessing: Preprocessing, + pub cs: ConstraintSystemMid, +} + +/// An enum over the Advice, Fixed, Instance structs +#[derive(Clone, Copy, Eq, PartialEq, Hash)] +pub enum Any { + /// An Advice variant + Advice, + /// A Fixed variant + Fixed, + /// An Instance variant + Instance, +} + +impl std::fmt::Debug for Any { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Any::Advice => f.debug_struct("Advice").finish(), + Any::Fixed => f.debug_struct("Fixed").finish(), + Any::Instance => f.debug_struct("Instance").finish(), + } + } +} + +impl Ord for Any { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + // This ordering is consensus-critical! The layouters rely on deterministic column + // orderings. + match (self, other) { + (Any::Instance, Any::Instance) + | (Any::Fixed, Any::Fixed) + | (Any::Advice, Any::Advice) => std::cmp::Ordering::Equal, + // Across column types, sort Instance < Advice < Fixed. + (Any::Instance, Any::Advice) + | (Any::Advice, Any::Fixed) + | (Any::Instance, Any::Fixed) => std::cmp::Ordering::Less, + (Any::Fixed, Any::Instance) + | (Any::Fixed, Any::Advice) + | (Any::Advice, Any::Instance) => std::cmp::Ordering::Greater, + } + } +} + +impl PartialOrd for Any { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +/// A column with an index and type +#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub struct ColumnMid { + /// The type of the column. + pub column_type: Any, + /// The index of the column. + pub index: usize, +} + +impl ColumnMid { + pub fn new(column_type: Any, index: usize) -> Self { + ColumnMid { column_type, index } + } +} + +impl fmt::Display for ColumnMid { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let column_type = match self.column_type { + Any::Advice => "a", + Any::Fixed => "f", + Any::Instance => "i", + }; + write!(f, "{}{}", column_type, self.index) + } +} + +/// A cell identifies a position in the plonkish matrix identified by a column and a row offset. +#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Debug)] +pub struct Cell { + pub column: ColumnMid, + pub row: usize, +} diff --git a/halo2_middleware/src/expression.rs b/halo2_middleware/src/expression.rs new file mode 100644 index 0000000000..1cbb557ed4 --- /dev/null +++ b/halo2_middleware/src/expression.rs @@ -0,0 +1,174 @@ +use core::cmp::max; +use core::ops::{Add, Mul, Neg, Sub}; +use ff::Field; +use std::iter::{Product, Sum}; + +pub trait Variable: Clone + Copy + std::fmt::Debug + std::fmt::Display + Eq + PartialEq { + /// Degree that an expression would have if it was only this variable. + fn degree(&self) -> usize; + + /// Approximate the computational complexity an expression would have if it was only this + /// variable. + fn complexity(&self) -> usize { + 0 + } + + /// Write an identifier of the variable. If two variables have the same identifier, they must + /// be the same variable. + fn write_identifier(&self, writer: &mut W) -> std::io::Result<()>; +} + +/// Low-degree expression representing an identity that must hold over the committed columns. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum Expression { + /// This is a constant polynomial + Constant(F), + /// This is a variable + Var(V), + /// This is a negated polynomial + Negated(Box>), + /// This is the sum of two polynomials + Sum(Box>, Box>), + /// This is the product of two polynomials + Product(Box>, Box>), +} + +impl Expression { + /// Evaluate the polynomial using the provided closures to perform the + /// operations. + #[allow(clippy::too_many_arguments)] + pub fn evaluate( + &self, + constant: &impl Fn(F) -> T, + var: &impl Fn(V) -> T, + negated: &impl Fn(T) -> T, + sum: &impl Fn(T, T) -> T, + product: &impl Fn(T, T) -> T, + ) -> T { + match self { + Expression::Constant(scalar) => constant(*scalar), + Expression::Var(v) => var(*v), + Expression::Negated(a) => { + let a = a.evaluate(constant, var, negated, sum, product); + negated(a) + } + Expression::Sum(a, b) => { + let a = a.evaluate(constant, var, negated, sum, product); + let b = b.evaluate(constant, var, negated, sum, product); + sum(a, b) + } + Expression::Product(a, b) => { + let a = a.evaluate(constant, var, negated, sum, product); + let b = b.evaluate(constant, var, negated, sum, product); + product(a, b) + } + } + } + + pub fn write_identifier(&self, writer: &mut W) -> std::io::Result<()> { + match self { + Expression::Constant(scalar) => write!(writer, "{scalar:?}"), + Expression::Var(v) => v.write_identifier(writer), + Expression::Negated(a) => { + writer.write_all(b"(-")?; + a.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Sum(a, b) => { + writer.write_all(b"(")?; + a.write_identifier(writer)?; + writer.write_all(b"+")?; + b.write_identifier(writer)?; + writer.write_all(b")") + } + Expression::Product(a, b) => { + writer.write_all(b"(")?; + a.write_identifier(writer)?; + writer.write_all(b"*")?; + b.write_identifier(writer)?; + writer.write_all(b")") + } + } + } + + /// Identifier for this expression. Expressions with identical identifiers + /// do the same calculation (but the expressions don't need to be exactly equal + /// in how they are composed e.g. `1 + 2` and `2 + 1` can have the same identifier). + pub fn identifier(&self) -> String { + let mut cursor = std::io::Cursor::new(Vec::new()); + self.write_identifier(&mut cursor).unwrap(); + String::from_utf8(cursor.into_inner()).unwrap() + } + + /// Compute the degree of this polynomial + pub fn degree(&self) -> usize { + use Expression::*; + match self { + Constant(_) => 0, + Var(v) => v.degree(), + Negated(poly) => poly.degree(), + Sum(a, b) => max(a.degree(), b.degree()), + Product(a, b) => a.degree() + b.degree(), + } + } + + /// Approximate the computational complexity of this expression. + pub fn complexity(&self) -> usize { + match self { + Expression::Constant(_) => 0, + Expression::Var(v) => v.complexity(), + Expression::Negated(poly) => poly.complexity() + 5, + Expression::Sum(a, b) => a.complexity() + b.complexity() + 15, + Expression::Product(a, b) => a.complexity() + b.complexity() + 30, + } + } +} + +impl Neg for Expression { + type Output = Expression; + fn neg(self) -> Self::Output { + Expression::Negated(Box::new(self)) + } +} + +impl Add for Expression { + type Output = Expression; + fn add(self, rhs: Expression) -> Expression { + Expression::Sum(Box::new(self), Box::new(rhs)) + } +} + +impl Sub for Expression { + type Output = Expression; + fn sub(self, rhs: Expression) -> Expression { + Expression::Sum(Box::new(self), Box::new(-rhs)) + } +} + +impl Mul for Expression { + type Output = Expression; + fn mul(self, rhs: Expression) -> Expression { + Expression::Product(Box::new(self), Box::new(rhs)) + } +} + +impl Mul for Expression { + type Output = Expression; + fn mul(self, rhs: F) -> Expression { + Expression::Product(Box::new(self), Box::new(Expression::Constant(rhs))) + } +} + +impl Sum for Expression { + fn sum>(iter: I) -> Self { + iter.reduce(|acc, x| acc + x) + .unwrap_or(Expression::Constant(F::ZERO)) + } +} + +impl Product for Expression { + fn product>(iter: I) -> Self { + iter.reduce(|acc, x| acc * x) + .unwrap_or(Expression::Constant(F::ONE)) + } +} diff --git a/halo2_middleware/src/lib.rs b/halo2_middleware/src/lib.rs new file mode 100644 index 0000000000..b5156509d5 --- /dev/null +++ b/halo2_middleware/src/lib.rs @@ -0,0 +1,11 @@ +pub mod circuit; +pub mod expression; +pub mod lookup; +pub mod multicore; +pub mod permutation; +pub mod poly; +pub mod shuffle; +pub mod zal; + +pub use ff; +pub use halo2curves; diff --git a/halo2_middleware/src/lookup.rs b/halo2_middleware/src/lookup.rs new file mode 100644 index 0000000000..d2084010bf --- /dev/null +++ b/halo2_middleware/src/lookup.rs @@ -0,0 +1,13 @@ +use super::circuit::VarMid; +use super::expression::{Expression, Variable}; +use ff::Field; + +/// Expressions involved in a lookup argument, with a name as metadata. +#[derive(Clone, Debug)] +pub struct Argument { + pub name: String, + pub input_expressions: Vec>, + pub table_expressions: Vec>, +} + +pub type ArgumentMid = Argument; diff --git a/halo2_proofs/src/multicore.rs b/halo2_middleware/src/multicore.rs similarity index 100% rename from halo2_proofs/src/multicore.rs rename to halo2_middleware/src/multicore.rs diff --git a/halo2_middleware/src/permutation.rs b/halo2_middleware/src/permutation.rs new file mode 100644 index 0000000000..02d71a7b03 --- /dev/null +++ b/halo2_middleware/src/permutation.rs @@ -0,0 +1,13 @@ +use crate::circuit::{Cell, ColumnMid}; + +#[derive(Clone, Debug)] +pub struct AssemblyMid { + pub copies: Vec<(Cell, Cell)>, +} + +/// A permutation argument. +#[derive(Debug, Clone)] +pub struct ArgumentMid { + /// A sequence of columns involved in the argument. + pub columns: Vec, +} diff --git a/halo2_middleware/src/poly.rs b/halo2_middleware/src/poly.rs new file mode 100644 index 0000000000..7f5f58eb8d --- /dev/null +++ b/halo2_middleware/src/poly.rs @@ -0,0 +1,22 @@ +/// Describes the relative rotation of a vector. Negative numbers represent +/// reverse (leftmost) rotations and positive numbers represent forward (rightmost) +/// rotations. Zero represents no rotation. +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Rotation(pub i32); + +impl Rotation { + /// The current location in the evaluation domain + pub fn cur() -> Rotation { + Rotation(0) + } + + /// The previous location in the evaluation domain + pub fn prev() -> Rotation { + Rotation(-1) + } + + /// The next location in the evaluation domain + pub fn next() -> Rotation { + Rotation(1) + } +} diff --git a/halo2_middleware/src/shuffle.rs b/halo2_middleware/src/shuffle.rs new file mode 100644 index 0000000000..a56678b576 --- /dev/null +++ b/halo2_middleware/src/shuffle.rs @@ -0,0 +1,13 @@ +use super::circuit::VarMid; +use super::expression::{Expression, Variable}; +use ff::Field; + +/// Expressions involved in a shuffle argument, with a name as metadata. +#[derive(Clone, Debug)] +pub struct Argument { + pub name: String, + pub input_expressions: Vec>, + pub shuffle_expressions: Vec>, +} + +pub type ArgumentMid = Argument; diff --git a/halo2_middleware/src/zal.rs b/halo2_middleware/src/zal.rs new file mode 100644 index 0000000000..b6cf37229a --- /dev/null +++ b/halo2_middleware/src/zal.rs @@ -0,0 +1,373 @@ +//! This module provides "ZK Acceleration Layer" traits +//! to abstract away the execution engine for performance-critical primitives. +//! +//! Terminology +//! ----------- +//! +//! We use the name Backend+Engine for concrete implementations of ZalEngine. +//! For example H2cEngine for pure Halo2curves implementation. +//! +//! Alternative names considered were Executor or Driver however +//! - executor is already used in Rust (and the name is long) +//! - driver will be confusing as we work quite low-level with GPUs and FPGAs. +//! +//! Unfortunately the "Engine" name is used in bn256 for pairings. +//! Fortunately a ZalEngine is only used in the prover (at least for now) +//! while "pairing engine" is only used in the verifier +//! +//! Initialization design space +//! --------------------------- +//! +//! It is recommended that ZAL backends provide: +//! - an initialization function: +//! - either "fn new() -> ZalEngine" for simple libraries +//! - or a builder pattern for complex initializations +//! - a shutdown function or document when it is not needed (when it's a global threadpool like Rayon for example). +//! +//! Backends might want to add as an option: +//! - The number of threads (CPU) +//! - The device(s) to run on (multi-sockets machines, multi-GPUs machines, ...) +//! - The curve (JIT-compiled backend) +//! +//! Descriptors +//! --------------------------- +//! +//! Descriptors enable providers to configure opaque details on data +//! when doing repeated computations with the same input(s). +//! For example: +//! - Pointer(s) caching to limit data movement between CPU and GPU, FPGAs +//! - Length of data +//! - data in layout: +//! - canonical or Montgomery fields, unsaturated representation, endianness +//! - jacobian or projective coordinates or maybe even Twisted Edwards for faster elliptic curve additions, +//! - FFT: canonical or bit-reversed permuted +//! - data out layout +//! - Device(s) ID +//! +//! For resources that need special cleanup like GPU memory, a custom `Drop` is required. +//! +//! Note that resources can also be stored in the engine in a hashmap +//! and an integer ID or a pointer can be opaquely given as a descriptor. + +// The ZK Accel Layer API +// --------------------------------------------------- +pub mod traits { + use halo2curves::CurveAffine; + + pub trait MsmAccel { + fn msm(&self, coeffs: &[C::Scalar], base: &[C]) -> C::Curve; + + // Caching API + // ------------------------------------------------- + // From here we propose an extended API + // that allows reusing coeffs and/or the base points + // + // This is inspired by CuDNN API (Nvidia GPU) + // and oneDNN API (CPU, OpenCL) https://docs.nvidia.com/deeplearning/cudnn/api/index.html#cudnn-ops-infer-so-opaque + // usage of descriptors + // + // https://github.com/oneapi-src/oneDNN/blob/master/doc/programming_model/basic_concepts.md + // + // Descriptors are opaque pointers that hold the input in a format suitable for the accelerator engine. + // They may be: + // - Input moved on accelerator device (only once for repeated calls) + // - Endianess conversion + // - Converting from Montgomery to Canonical form + // - Input changed from Projective to Jacobian coordinates or even to a Twisted Edwards curve. + // - other form of expensive preprocessing + type CoeffsDescriptor<'c>; + type BaseDescriptor<'b>; + + fn get_coeffs_descriptor<'c>(&self, coeffs: &'c [C::Scalar]) -> Self::CoeffsDescriptor<'c>; + fn get_base_descriptor<'b>(&self, base: &'b [C]) -> Self::BaseDescriptor<'b>; + + fn msm_with_cached_scalars( + &self, + coeffs: &Self::CoeffsDescriptor<'_>, + base: &[C], + ) -> C::Curve; + + fn msm_with_cached_base( + &self, + coeffs: &[C::Scalar], + base: &Self::BaseDescriptor<'_>, + ) -> C::Curve; + + fn msm_with_cached_inputs( + &self, + coeffs: &Self::CoeffsDescriptor<'_>, + base: &Self::BaseDescriptor<'_>, + ) -> C::Curve; + // Execute MSM according to descriptors + // Unsure of naming, msm_with_cached_inputs, msm_apply, msm_cached, msm_with_descriptors, ... + } +} + +// ZAL using Halo2curves as a backend +// --------------------------------------------------- + +pub mod impls { + use std::marker::PhantomData; + + use crate::zal::traits::MsmAccel; + use halo2curves::msm::msm_best; + use halo2curves::CurveAffine; + + // Halo2curve Backend + // --------------------------------------------------- + #[derive(Default)] + pub struct H2cEngine; + + pub struct H2cMsmCoeffsDesc<'c, C: CurveAffine> { + raw: &'c [C::Scalar], + } + + pub struct H2cMsmBaseDesc<'b, C: CurveAffine> { + raw: &'b [C], + } + + impl H2cEngine { + pub fn new() -> Self { + Self {} + } + } + + impl MsmAccel for H2cEngine { + fn msm(&self, coeffs: &[C::Scalar], bases: &[C]) -> C::Curve { + msm_best(coeffs, bases) + } + + // Caching API + // ------------------------------------------------- + + type CoeffsDescriptor<'c> = H2cMsmCoeffsDesc<'c, C>; + type BaseDescriptor<'b> = H2cMsmBaseDesc<'b, C>; + + fn get_coeffs_descriptor<'c>(&self, coeffs: &'c [C::Scalar]) -> Self::CoeffsDescriptor<'c> { + // Do expensive device/library specific preprocessing here + Self::CoeffsDescriptor { raw: coeffs } + } + fn get_base_descriptor<'b>(&self, base: &'b [C]) -> Self::BaseDescriptor<'b> { + Self::BaseDescriptor { raw: base } + } + + fn msm_with_cached_scalars( + &self, + coeffs: &Self::CoeffsDescriptor<'_>, + base: &[C], + ) -> C::Curve { + msm_best(coeffs.raw, base) + } + + fn msm_with_cached_base( + &self, + coeffs: &[C::Scalar], + base: &Self::BaseDescriptor<'_>, + ) -> C::Curve { + msm_best(coeffs, base.raw) + } + + fn msm_with_cached_inputs( + &self, + coeffs: &Self::CoeffsDescriptor<'_>, + base: &Self::BaseDescriptor<'_>, + ) -> C::Curve { + msm_best(coeffs.raw, base.raw) + } + } + + // Backend-agnostic engine objects + // --------------------------------------------------- + #[derive(Debug)] + pub struct PlonkEngine> { + pub msm_backend: MsmEngine, + _marker: PhantomData, // compiler complains about unused C otherwise + } + + #[derive(Default)] + pub struct PlonkEngineConfig { + curve: PhantomData, + msm_backend: M, + } + + #[derive(Default)] + pub struct NoCurve; + + #[derive(Default)] + pub struct HasCurve(PhantomData); + + #[derive(Default)] + pub struct NoMsmEngine; + + pub struct HasMsmEngine>(M, PhantomData); + + impl PlonkEngineConfig { + pub fn new() -> PlonkEngineConfig { + Default::default() + } + + pub fn set_curve(self) -> PlonkEngineConfig, NoMsmEngine> { + Default::default() + } + + pub fn build_default() -> PlonkEngine { + PlonkEngine { + msm_backend: H2cEngine::new(), + _marker: Default::default(), + } + } + } + + impl PlonkEngineConfig, M> { + pub fn set_msm>( + self, + engine: MsmEngine, + ) -> PlonkEngineConfig, HasMsmEngine> { + // Copy all other parameters + let Self { curve, .. } = self; + // Return with modified MSM engine + PlonkEngineConfig { + curve, + msm_backend: HasMsmEngine(engine, Default::default()), + } + } + } + + impl> PlonkEngineConfig, HasMsmEngine> { + pub fn build(self) -> PlonkEngine { + PlonkEngine { + msm_backend: self.msm_backend.0, + _marker: Default::default(), + } + } + } +} + +// Testing +// --------------------------------------------------- + +#[cfg(test)] +mod test { + use crate::zal::impls::{H2cEngine, PlonkEngineConfig}; + use crate::zal::traits::MsmAccel; + use halo2curves::bn256::G1Affine; + use halo2curves::msm::msm_best; + use halo2curves::CurveAffine; + + use ark_std::{end_timer, start_timer}; + use ff::Field; + use group::{Curve, Group}; + use rand_core::SeedableRng; + use rand_xorshift::XorShiftRng; + + fn gen_points_scalars(k: usize) -> (Vec, Vec) { + let mut rng = XorShiftRng::seed_from_u64(3141592u64); + + let points = (0..1 << k) + .map(|_| C::Curve::random(&mut rng)) + .collect::>(); + let mut affine_points = vec![C::identity(); 1 << k]; + C::Curve::batch_normalize(&points[..], &mut affine_points[..]); + let points = affine_points; + + let scalars = (0..1 << k) + .map(|_| C::Scalar::random(&mut rng)) + .collect::>(); + + (points, scalars) + } + + fn run_msm_zal_default(points: &[C], scalars: &[C::Scalar], k: usize) { + let points = &points[..1 << k]; + let scalars = &scalars[..1 << k]; + + let t0 = start_timer!(|| format!("freestanding msm k={}", k)); + let e0 = msm_best(scalars, points); + end_timer!(t0); + + let engine = PlonkEngineConfig::build_default::(); + let t1 = start_timer!(|| format!("H2cEngine msm k={}", k)); + let e1 = engine.msm_backend.msm(scalars, points); + end_timer!(t1); + + assert_eq!(e0, e1); + + // Caching API + // ----------- + let t2 = start_timer!(|| format!("H2cEngine msm cached base k={}", k)); + let base_descriptor = engine.msm_backend.get_base_descriptor(points); + let e2 = engine + .msm_backend + .msm_with_cached_base(scalars, &base_descriptor); + end_timer!(t2); + assert_eq!(e0, e2); + + let t3 = start_timer!(|| format!("H2cEngine msm cached coeffs k={}", k)); + let coeffs_descriptor = engine.msm_backend.get_coeffs_descriptor(scalars); + let e3 = engine + .msm_backend + .msm_with_cached_scalars(&coeffs_descriptor, points); + end_timer!(t3); + assert_eq!(e0, e3); + + let t4 = start_timer!(|| format!("H2cEngine msm cached inputs k={}", k)); + let e4 = engine + .msm_backend + .msm_with_cached_inputs(&coeffs_descriptor, &base_descriptor); + end_timer!(t4); + assert_eq!(e0, e4); + } + + fn run_msm_zal_custom(points: &[C], scalars: &[C::Scalar], k: usize) { + let points = &points[..1 << k]; + let scalars = &scalars[..1 << k]; + + let t0 = start_timer!(|| format!("freestanding msm k={}", k)); + let e0 = msm_best(scalars, points); + end_timer!(t0); + + let engine = PlonkEngineConfig::new() + .set_curve::() + .set_msm(H2cEngine::new()) + .build(); + let t1 = start_timer!(|| format!("H2cEngine msm k={}", k)); + let e1 = engine.msm_backend.msm(scalars, points); + end_timer!(t1); + + assert_eq!(e0, e1); + + // Caching API + // ----------- + let t2 = start_timer!(|| format!("H2cEngine msm cached base k={}", k)); + let base_descriptor = engine.msm_backend.get_base_descriptor(points); + let e2 = engine + .msm_backend + .msm_with_cached_base(scalars, &base_descriptor); + end_timer!(t2); + + assert_eq!(e0, e2) + } + + #[test] + #[ignore] + fn test_performance_h2c_msm_zal() { + let (min_k, max_k) = (3, 14); + let (points, scalars) = gen_points_scalars::(max_k); + + for k in min_k..=max_k { + let points = &points[..1 << k]; + let scalars = &scalars[..1 << k]; + + run_msm_zal_default(points, scalars, k); + run_msm_zal_custom(points, scalars, k); + } + } + + #[test] + fn test_msm_zal() { + const MSM_SIZE: usize = 12; + let (points, scalars) = gen_points_scalars::(MSM_SIZE); + run_msm_zal_default(&points, &scalars, MSM_SIZE); + run_msm_zal_custom(&points, &scalars, MSM_SIZE); + } +} diff --git a/halo2_proofs/Cargo.toml b/halo2_proofs/Cargo.toml index e340407dd6..ba3e410436 100644 --- a/halo2_proofs/Cargo.toml +++ b/halo2_proofs/Cargo.toml @@ -1,20 +1,21 @@ [package] name = "halo2_proofs" -version = "0.3.0" +version = "0.4.0" authors = [ "Sean Bowe ", "Ying Tong Lai ", "Daira Hopwood ", "Jack Grigg ", + "Privacy Scaling Explorations team", ] edition = "2021" -rust-version = "1.66.0" +rust-version = "1.73.0" description = """ Fast PLONK-based zero-knowledge proving system with no trusted setup """ license = "MIT OR Apache-2.0" -repository = "https://github.com/zcash/halo2" -documentation = "https://docs.rs/halo2_proofs" +repository = "https://github.com/privacy-scaling-explorations/halo2" +documentation = "https://privacy-scaling-explorations.github.io/halo2/" readme = "README.md" categories = ["cryptography"] keywords = ["halo", "proofs", "zkp", "zkSNARKs"] @@ -23,10 +24,6 @@ keywords = ["halo", "proofs", "zkp", "zkSNARKs"] all-features = true rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] -[[bench]] -name = "arithmetic" -harness = false - [[bench]] name = "commit_zk" harness = false @@ -43,59 +40,52 @@ harness = false name = "dev_lookup" harness = false -[[bench]] -name = "fft" -harness = false - [dependencies] -backtrace = { version = "0.3", optional = true } +halo2_middleware = { path = "../halo2_middleware" } +halo2_backend = { path = "../halo2_backend", default-features = false } +halo2_frontend = { path = "../halo2_frontend", default-features = false } +halo2curves = { version = "0.7.0", default-features = false } +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +plotters = { version = "0.3.0", default-features = false, optional = true } +group = "0.13" + +[dev-dependencies] ff = "0.13" group = "0.13" -halo2curves = { version = "0.6.0", default-features = false } -rand_core = { version = "0.6", default-features = false } tracing = "0.1" -blake2b_simd = "1" # MSRV 1.66.0 -sha3 = "0.9.1" rand_chacha = "0.3" -serde = { version = "1", optional = true, features = ["derive"] } -serde_derive = { version = "1", optional = true} rayon = "1.8" - -# Developer tooling dependencies -plotters = { version = "0.3.0", default-features = false, optional = true } -tabbycat = { version = "0.1", features = ["attributes"], optional = true } - -# Legacy circuit compatibility -halo2_legacy_pdqsort = { version = "0.1.0", optional = true } - -[dev-dependencies] assert_matches = "1.5" criterion = "0.3" gumdrop = "0.8" proptest = "1" -rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +dhat = "0.3.2" serde_json = "1" +halo2_debug = { path = "../halo2_debug" } [target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies] getrandom = { version = "0.2", features = ["js"] } [features] -default = ["batch", "bits"] -dev-graph = ["plotters", "tabbycat"] +default = ["batch", "bits", "halo2_frontend/default", "halo2_backend/default", "lookup-any-sanity-checks"] +dev-graph = ["halo2_frontend/dev-graph", "plotters"] test-dev-graph = [ + "halo2_frontend/test-dev-graph", "dev-graph", "plotters/bitmap_backend", "plotters/bitmap_encoder", - "plotters/ttf", + "plotters/ttf" ] -bits = ["halo2curves/bits"] -gadget-traces = ["backtrace"] -thread-safe-region = [] -sanity-checks = [] -batch = ["rand_core/getrandom"] -circuit-params = [] -cost-estimator = ["serde", "serde_derive"] -derive_serde = ["halo2curves/derive_serde"] +bits = ["halo2curves/bits", "halo2_frontend/bits", "halo2_backend/bits"] +gadget-traces = ["halo2_frontend/gadget-traces"] +thread-safe-region = ["halo2_frontend/thread-safe-region"] +sanity-checks = ["halo2_backend/sanity-checks"] +batch = ["rand_core/getrandom", "halo2_backend/batch"] +circuit-params = ["halo2_frontend/circuit-params"] +cost-estimator = ["halo2_frontend/cost-estimator"] +derive_serde = ["halo2curves/derive_serde", "halo2_frontend/derive_serde", "halo2_backend/derive_serde"] +vector-tests = [] +lookup-any-sanity-checks = ["halo2_frontend/lookup-any-sanity-checks"] [lib] bench = false diff --git a/halo2_proofs/README.md b/halo2_proofs/README.md deleted file mode 100644 index bdb9a63639..0000000000 --- a/halo2_proofs/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# halo2_proofs [![Crates.io](https://img.shields.io/crates/v/halo2_proofs.svg)](https://crates.io/crates/halo2_proofs) # - -## [Documentation](https://docs.rs/halo2_proofs) - -## Minimum Supported Rust Version - -Requires Rust **1.65.0** or higher. - -Minimum supported Rust version can be changed in the future, but it will be done with a -minor version bump. - -## Controlling parallelism - -`halo2_proofs` currently uses [rayon](https://github.com/rayon-rs/rayon) for parallel -computation. The `RAYON_NUM_THREADS` environment variable can be used to set the number of -threads. - -When compiling to WASM-targets, notice that since version `1.7`, `rayon` will fallback automatically (with no need to handle features) to require `getrandom` in order to be able to work. For more info related to WASM-compilation. - -See: [Rayon: Usage with WebAssembly](https://github.com/rayon-rs/rayon#usage-with-webassembly) for more - -## License - -Licensed under either of - - * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or - http://www.apache.org/licenses/LICENSE-2.0) - * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) - -at your option. - -### Contribution - -Unless you explicitly state otherwise, any contribution intentionally -submitted for inclusion in the work by you, as defined in the Apache-2.0 -license, shall be dual licensed as above, without any additional terms or -conditions. diff --git a/halo2_proofs/benches/arithmetic.rs b/halo2_proofs/benches/arithmetic.rs deleted file mode 100644 index 4ae88af137..0000000000 --- a/halo2_proofs/benches/arithmetic.rs +++ /dev/null @@ -1,38 +0,0 @@ -#[macro_use] -extern crate criterion; - -use crate::arithmetic::small_multiexp; -use crate::halo2curves::pasta::{EqAffine, Fp}; -use group::ff::Field; -use halo2_proofs::*; - -use halo2_proofs::poly::{commitment::ParamsProver, ipa::commitment::ParamsIPA}; - -use criterion::{black_box, Criterion}; -use rand_core::OsRng; - -fn criterion_benchmark(c: &mut Criterion) { - let rng = OsRng; - - // small multiexp - { - let params: ParamsIPA = ParamsIPA::new(5); - let g = &mut params.get_g().to_vec(); - let len = g.len() / 2; - let (g_lo, g_hi) = g.split_at_mut(len); - - let coeff_1 = Fp::random(rng); - let coeff_2 = Fp::random(rng); - - c.bench_function("double-and-add", |b| { - b.iter(|| { - for (g_lo, g_hi) in g_lo.iter().zip(g_hi.iter()) { - small_multiexp(&[black_box(coeff_1), black_box(coeff_2)], &[*g_lo, *g_hi]); - } - }) - }); - } -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/halo2_proofs/benches/dev_lookup.rs b/halo2_proofs/benches/dev_lookup.rs index 569ffd1019..05ff08da3d 100644 --- a/halo2_proofs/benches/dev_lookup.rs +++ b/halo2_proofs/benches/dev_lookup.rs @@ -56,7 +56,7 @@ fn criterion_benchmark(c: &mut Criterion) { &self, config: MyConfig, mut layouter: impl Layouter, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { layouter.assign_table( || "8-bit table", |mut table| { diff --git a/halo2_proofs/benches/fft.rs b/halo2_proofs/benches/fft.rs deleted file mode 100644 index 0de72a0380..0000000000 --- a/halo2_proofs/benches/fft.rs +++ /dev/null @@ -1,26 +0,0 @@ -#[macro_use] -extern crate criterion; - -use crate::arithmetic::best_fft; -use group::ff::Field; -use halo2_proofs::*; -use halo2curves::pasta::Fp; - -use criterion::{BenchmarkId, Criterion}; -use rand_core::OsRng; - -fn criterion_benchmark(c: &mut Criterion) { - let mut group = c.benchmark_group("fft"); - for k in 3..19 { - group.bench_function(BenchmarkId::new("k", k), |b| { - let mut a = (0..(1 << k)).map(|_| Fp::random(OsRng)).collect::>(); - let omega = Fp::random(OsRng); // would be weird if this mattered - b.iter(|| { - best_fft(&mut a, omega, k as u32); - }); - }); - } -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/halo2_proofs/benches/plonk.rs b/halo2_proofs/benches/plonk.rs index 9c9bd2618a..ce03d5171e 100644 --- a/halo2_proofs/benches/plonk.rs +++ b/halo2_proofs/benches/plonk.rs @@ -28,6 +28,7 @@ use criterion::{BenchmarkId, Criterion}; fn criterion_benchmark(c: &mut Criterion) { /// This represents an advice column at a certain row in the ConstraintSystem #[derive(Copy, Clone, Debug)] + #[allow(dead_code)] pub struct Variable(Column, usize); #[derive(Clone)] @@ -47,17 +48,22 @@ fn criterion_benchmark(c: &mut Criterion) { &self, layouter: &mut impl Layouter, f: F, - ) -> Result<(Cell, Cell, Cell), Error> + ) -> Result<(Cell, Cell, Cell), ErrorFront> where F: FnMut() -> Value<(Assigned, Assigned, Assigned)>; fn raw_add( &self, layouter: &mut impl Layouter, f: F, - ) -> Result<(Cell, Cell, Cell), Error> + ) -> Result<(Cell, Cell, Cell), ErrorFront> where F: FnMut() -> Value<(Assigned, Assigned, Assigned)>; - fn copy(&self, layouter: &mut impl Layouter, a: Cell, b: Cell) -> Result<(), Error>; + fn copy( + &self, + layouter: &mut impl Layouter, + a: Cell, + b: Cell, + ) -> Result<(), ErrorFront>; } #[derive(Clone)] @@ -85,7 +91,7 @@ fn criterion_benchmark(c: &mut Criterion) { &self, layouter: &mut impl Layouter, mut f: F, - ) -> Result<(Cell, Cell, Cell), Error> + ) -> Result<(Cell, Cell, Cell), ErrorFront> where F: FnMut() -> Value<(Assigned, Assigned, Assigned)>, { @@ -127,7 +133,7 @@ fn criterion_benchmark(c: &mut Criterion) { &self, layouter: &mut impl Layouter, mut f: F, - ) -> Result<(Cell, Cell, Cell), Error> + ) -> Result<(Cell, Cell, Cell), ErrorFront> where F: FnMut() -> Value<(Assigned, Assigned, Assigned)>, { @@ -175,7 +181,7 @@ fn criterion_benchmark(c: &mut Criterion) { layouter: &mut impl Layouter, left: Cell, right: Cell, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { layouter.assign_region(|| "copy", |mut region| region.constrain_equal(left, right)) } } @@ -237,7 +243,7 @@ fn criterion_benchmark(c: &mut Criterion) { &self, config: PlonkConfig, mut layouter: impl Layouter, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { let cs = StandardPlonk::new(config); for _ in 0..((1 << (self.k - 1)) - 3) { @@ -285,7 +291,7 @@ fn criterion_benchmark(c: &mut Criterion) { params, pk, &[circuit], - &[&[]], + &[vec![]], rng, &mut transcript, ) @@ -296,7 +302,7 @@ fn criterion_benchmark(c: &mut Criterion) { fn verifier(params: &ParamsIPA, vk: &VerifyingKey, proof: &[u8]) { let strategy = SingleStrategy::new(params); let mut transcript = Blake2bRead::<_, _, Challenge255<_>>::init(proof); - assert!(verify_proof(params, vk, strategy, &[&[]], &mut transcript).is_ok()); + assert!(verify_proof(params, vk, strategy, &[vec![]], &mut transcript).is_ok()); } let k_range = 8..=16; diff --git a/halo2_proofs/examples/circuit-layout.rs b/halo2_proofs/examples/circuit-layout.rs index b65adf5599..3127faa1a9 100644 --- a/halo2_proofs/examples/circuit-layout.rs +++ b/halo2_proofs/examples/circuit-layout.rs @@ -1,7 +1,7 @@ use ff::Field; use halo2_proofs::{ circuit::{Cell, Layouter, Region, SimpleFloorPlanner, Value}, - plonk::{Advice, Assigned, Circuit, Column, ConstraintSystem, Error, Fixed, TableColumn}, + plonk::{Advice, Assigned, Circuit, Column, ConstraintSystem, ErrorFront, Fixed, TableColumn}, poly::Rotation, }; use halo2curves::pasta::Fp; @@ -10,6 +10,7 @@ use std::marker::PhantomData; /// This represents an advice column at a certain row in the ConstraintSystem #[derive(Copy, Clone, Debug)] +#[allow(dead_code)] pub struct Variable(Column, usize); #[derive(Clone)] @@ -28,14 +29,22 @@ struct PlonkConfig { } trait StandardCs { - fn raw_multiply(&self, region: &mut Region, f: F) -> Result<(Cell, Cell, Cell), Error> + fn raw_multiply( + &self, + region: &mut Region, + f: F, + ) -> Result<(Cell, Cell, Cell), ErrorFront> where F: FnMut() -> Value<(Assigned, Assigned, Assigned)>; - fn raw_add(&self, region: &mut Region, f: F) -> Result<(Cell, Cell, Cell), Error> + fn raw_add(&self, region: &mut Region, f: F) -> Result<(Cell, Cell, Cell), ErrorFront> where F: FnMut() -> Value<(Assigned, Assigned, Assigned)>; - fn copy(&self, region: &mut Region, a: Cell, b: Cell) -> Result<(), Error>; - fn lookup_table(&self, layouter: &mut impl Layouter, values: &[FF]) -> Result<(), Error>; + fn copy(&self, region: &mut Region, a: Cell, b: Cell) -> Result<(), ErrorFront>; + fn lookup_table( + &self, + layouter: &mut impl Layouter, + values: &[FF], + ) -> Result<(), ErrorFront>; } struct MyCircuit { @@ -62,7 +71,7 @@ impl StandardCs for StandardPlonk { &self, region: &mut Region, mut f: F, - ) -> Result<(Cell, Cell, Cell), Error> + ) -> Result<(Cell, Cell, Cell), ErrorFront> where F: FnMut() -> Value<(Assigned, Assigned, Assigned)>, { @@ -99,7 +108,11 @@ impl StandardCs for StandardPlonk { region.assign_fixed(|| "a * b", self.config.sm, 0, || Value::known(FF::ONE))?; Ok((lhs.cell(), rhs.cell(), out.cell())) } - fn raw_add(&self, region: &mut Region, mut f: F) -> Result<(Cell, Cell, Cell), Error> + fn raw_add( + &self, + region: &mut Region, + mut f: F, + ) -> Result<(Cell, Cell, Cell), ErrorFront> where F: FnMut() -> Value<(Assigned, Assigned, Assigned)>, { @@ -136,10 +149,14 @@ impl StandardCs for StandardPlonk { region.assign_fixed(|| "a * b", self.config.sm, 0, || Value::known(FF::ZERO))?; Ok((lhs.cell(), rhs.cell(), out.cell())) } - fn copy(&self, region: &mut Region, left: Cell, right: Cell) -> Result<(), Error> { + fn copy(&self, region: &mut Region, left: Cell, right: Cell) -> Result<(), ErrorFront> { region.constrain_equal(left, right) } - fn lookup_table(&self, layouter: &mut impl Layouter, values: &[FF]) -> Result<(), Error> { + fn lookup_table( + &self, + layouter: &mut impl Layouter, + values: &[FF], + ) -> Result<(), ErrorFront> { layouter.assign_table( || "", |mut table| { @@ -240,7 +257,11 @@ impl Circuit for MyCircuit { } } - fn synthesize(&self, config: PlonkConfig, mut layouter: impl Layouter) -> Result<(), Error> { + fn synthesize( + &self, + config: PlonkConfig, + mut layouter: impl Layouter, + ) -> Result<(), ErrorFront> { let cs = StandardPlonk::new(config); for i in 0..10 { diff --git a/halo2_proofs/examples/proof-size.rs b/halo2_proofs/examples/proof-size.rs index 3d5b242fb0..46f0fbd6da 100644 --- a/halo2_proofs/examples/proof-size.rs +++ b/halo2_proofs/examples/proof-size.rs @@ -1,7 +1,7 @@ use ff::Field; use halo2_proofs::{ circuit::{Layouter, SimpleFloorPlanner, Value}, - plonk::{Advice, Circuit, Column, ConstraintSystem, Error}, + plonk::{Advice, Circuit, Column, ConstraintSystem, ErrorFront}, }; use halo2curves::pasta::Fp; @@ -47,7 +47,11 @@ impl Circuit for TestCircuit { config } - fn synthesize(&self, config: MyConfig, mut layouter: impl Layouter) -> Result<(), Error> { + fn synthesize( + &self, + config: MyConfig, + mut layouter: impl Layouter, + ) -> Result<(), ErrorFront> { layouter.assign_table( || "8-bit table", |mut table| { @@ -89,7 +93,7 @@ fn main() { let circuit = TestCircuit {}; let model = from_circuit_to_model_circuit::<_, _, 56, 56>( - K, + Some(K), &circuit, vec![], CommitmentScheme::KZGGWC, diff --git a/halo2_proofs/examples/simple-example.rs b/halo2_proofs/examples/simple-example.rs index 242257a692..a938065d57 100644 --- a/halo2_proofs/examples/simple-example.rs +++ b/halo2_proofs/examples/simple-example.rs @@ -3,7 +3,7 @@ use std::marker::PhantomData; use halo2_proofs::{ arithmetic::Field, circuit::{AssignedCell, Chip, Layouter, Region, SimpleFloorPlanner, Value}, - plonk::{Advice, Circuit, Column, ConstraintSystem, Error, Fixed, Instance, Selector}, + plonk::{Advice, Circuit, Column, ConstraintSystem, ErrorFront, Fixed, Instance, Selector}, poly::Rotation, }; @@ -13,10 +13,18 @@ trait NumericInstructions: Chip { type Num; /// Loads a number into the circuit as a private input. - fn load_private(&self, layouter: impl Layouter, a: Value) -> Result; + fn load_private( + &self, + layouter: impl Layouter, + a: Value, + ) -> Result; /// Loads a number into the circuit as a fixed constant. - fn load_constant(&self, layouter: impl Layouter, constant: F) -> Result; + fn load_constant( + &self, + layouter: impl Layouter, + constant: F, + ) -> Result; /// Returns `c = a * b`. fn mul( @@ -24,7 +32,7 @@ trait NumericInstructions: Chip { layouter: impl Layouter, a: Self::Num, b: Self::Num, - ) -> Result; + ) -> Result; /// Exposes a number as a public input to the circuit. fn expose_public( @@ -32,7 +40,7 @@ trait NumericInstructions: Chip { layouter: impl Layouter, num: Self::Num, row: usize, - ) -> Result<(), Error>; + ) -> Result<(), ErrorFront>; } // ANCHOR_END: instructions @@ -152,7 +160,7 @@ impl NumericInstructions for FieldChip { &self, mut layouter: impl Layouter, value: Value, - ) -> Result { + ) -> Result { let config = self.config(); layouter.assign_region( @@ -169,7 +177,7 @@ impl NumericInstructions for FieldChip { &self, mut layouter: impl Layouter, constant: F, - ) -> Result { + ) -> Result { let config = self.config(); layouter.assign_region( @@ -187,7 +195,7 @@ impl NumericInstructions for FieldChip { mut layouter: impl Layouter, a: Self::Num, b: Self::Num, - ) -> Result { + ) -> Result { let config = self.config(); layouter.assign_region( @@ -223,7 +231,7 @@ impl NumericInstructions for FieldChip { mut layouter: impl Layouter, num: Self::Num, row: usize, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { let config = self.config(); layouter.constrain_instance(num.0.cell(), config.instance, row) @@ -272,7 +280,7 @@ impl Circuit for MyCircuit { &self, config: Self::Config, mut layouter: impl Layouter, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { let field_chip = FieldChip::::construct(config); // Load our private values into the circuit. diff --git a/halo2_proofs/examples/two-chip.rs b/halo2_proofs/examples/two-chip.rs index 336f9c4957..b0b614cb60 100644 --- a/halo2_proofs/examples/two-chip.rs +++ b/halo2_proofs/examples/two-chip.rs @@ -3,7 +3,7 @@ use std::marker::PhantomData; use halo2_proofs::{ arithmetic::Field, circuit::{AssignedCell, Chip, Layouter, Region, SimpleFloorPlanner, Value}, - plonk::{Advice, Circuit, Column, ConstraintSystem, Error, Instance, Selector}, + plonk::{Advice, Circuit, Column, ConstraintSystem, ErrorFront, Instance, Selector}, poly::Rotation, }; @@ -21,7 +21,7 @@ trait FieldInstructions: AddInstructions + MulInstructions { &self, layouter: impl Layouter, a: Value, - ) -> Result<>::Num, Error>; + ) -> Result<>::Num, ErrorFront>; /// Returns `d = (a + b) * c`. fn add_and_mul( @@ -30,7 +30,7 @@ trait FieldInstructions: AddInstructions + MulInstructions { a: >::Num, b: >::Num, c: >::Num, - ) -> Result<>::Num, Error>; + ) -> Result<>::Num, ErrorFront>; /// Exposes a number as a public input to the circuit. fn expose_public( @@ -38,7 +38,7 @@ trait FieldInstructions: AddInstructions + MulInstructions { layouter: impl Layouter, num: >::Num, row: usize, - ) -> Result<(), Error>; + ) -> Result<(), ErrorFront>; } // ANCHOR_END: field-instructions @@ -53,7 +53,7 @@ trait AddInstructions: Chip { layouter: impl Layouter, a: Self::Num, b: Self::Num, - ) -> Result; + ) -> Result; } // ANCHOR_END: add-instructions @@ -68,7 +68,7 @@ trait MulInstructions: Chip { layouter: impl Layouter, a: Self::Num, b: Self::Num, - ) -> Result; + ) -> Result; } // ANCHOR_END: mul-instructions @@ -181,7 +181,7 @@ impl AddInstructions for FieldChip { layouter: impl Layouter, a: Self::Num, b: Self::Num, - ) -> Result { + ) -> Result { let config = self.config().add_config.clone(); let add_chip = AddChip::::construct(config, ()); @@ -197,7 +197,7 @@ impl AddInstructions for AddChip { mut layouter: impl Layouter, a: Self::Num, b: Self::Num, - ) -> Result { + ) -> Result { let config = self.config(); layouter.assign_region( @@ -303,7 +303,7 @@ impl MulInstructions for FieldChip { layouter: impl Layouter, a: Self::Num, b: Self::Num, - ) -> Result { + ) -> Result { let config = self.config().mul_config.clone(); let mul_chip = MulChip::::construct(config, ()); mul_chip.mul(layouter, a, b) @@ -318,7 +318,7 @@ impl MulInstructions for MulChip { mut layouter: impl Layouter, a: Self::Num, b: Self::Num, - ) -> Result { + ) -> Result { let config = self.config(); layouter.assign_region( @@ -403,7 +403,7 @@ impl FieldInstructions for FieldChip { &self, mut layouter: impl Layouter, value: Value, - ) -> Result<>::Num, Error> { + ) -> Result<>::Num, ErrorFront> { let config = self.config(); layouter.assign_region( @@ -423,7 +423,7 @@ impl FieldInstructions for FieldChip { a: >::Num, b: >::Num, c: >::Num, - ) -> Result<>::Num, Error> { + ) -> Result<>::Num, ErrorFront> { let ab = self.add(layouter.namespace(|| "a + b"), a, b)?; self.mul(layouter.namespace(|| "(a + b) * c"), ab, c) } @@ -433,7 +433,7 @@ impl FieldInstructions for FieldChip { mut layouter: impl Layouter, num: >::Num, row: usize, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { let config = self.config(); layouter.constrain_instance(num.0.cell(), config.instance, row) @@ -479,7 +479,7 @@ impl Circuit for MyCircuit { &self, config: Self::Config, mut layouter: impl Layouter, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { let field_chip = FieldChip::::construct(config, ()); // Load our private values into the circuit. diff --git a/halo2_proofs/examples/vector-mul.rs b/halo2_proofs/examples/vector-mul.rs index 01728fdf36..c5e4dc577c 100644 --- a/halo2_proofs/examples/vector-mul.rs +++ b/halo2_proofs/examples/vector-mul.rs @@ -3,7 +3,7 @@ use std::marker::PhantomData; use halo2_proofs::{ arithmetic::Field, circuit::{AssignedCell, Chip, Layouter, Region, SimpleFloorPlanner, Value}, - plonk::{Advice, Circuit, Column, ConstraintSystem, Error, Instance, Selector}, + plonk::{Advice, Circuit, Column, ConstraintSystem, ErrorFront, Instance, Selector}, poly::Rotation, }; @@ -17,7 +17,7 @@ trait NumericInstructions: Chip { &self, layouter: impl Layouter, a: &[Value], - ) -> Result, Error>; + ) -> Result, ErrorFront>; /// Returns `c = a * b`. The caller is responsible for ensuring that `a.len() == b.len()`. fn mul( @@ -25,7 +25,7 @@ trait NumericInstructions: Chip { layouter: impl Layouter, a: &[Self::Num], b: &[Self::Num], - ) -> Result, Error>; + ) -> Result, ErrorFront>; /// Exposes a number as a public input to the circuit. fn expose_public( @@ -33,7 +33,7 @@ trait NumericInstructions: Chip { layouter: impl Layouter, num: &Self::Num, row: usize, - ) -> Result<(), Error>; + ) -> Result<(), ErrorFront>; } // ANCHOR_END: instructions @@ -150,7 +150,7 @@ impl NumericInstructions for FieldChip { &self, mut layouter: impl Layouter, values: &[Value], - ) -> Result, Error> { + ) -> Result, ErrorFront> { let config = self.config(); layouter.assign_region( @@ -174,7 +174,7 @@ impl NumericInstructions for FieldChip { mut layouter: impl Layouter, a: &[Self::Num], b: &[Self::Num], - ) -> Result, Error> { + ) -> Result, ErrorFront> { let config = self.config(); assert_eq!(a.len(), b.len()); @@ -208,7 +208,7 @@ impl NumericInstructions for FieldChip { mut layouter: impl Layouter, num: &Self::Num, row: usize, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { let config = self.config(); layouter.constrain_instance(num.0.cell(), config.instance, row) @@ -257,7 +257,7 @@ impl Circuit for MyCircuit { &self, config: Self::Config, mut layouter: impl Layouter, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { let field_chip = FieldChip::::construct(config); // Load our private values into the circuit. diff --git a/halo2_proofs/katex-header.html b/halo2_proofs/katex-header.html deleted file mode 100644 index 98e85904fa..0000000000 --- a/halo2_proofs/katex-header.html +++ /dev/null @@ -1,15 +0,0 @@ - - - - \ No newline at end of file diff --git a/halo2_proofs/src/lib.rs b/halo2_proofs/src/lib.rs index acc26aff15..03386ea0e8 100644 --- a/halo2_proofs/src/lib.rs +++ b/halo2_proofs/src/lib.rs @@ -1,4 +1,7 @@ -//! # halo2_proofs +//! Legacy halo2 API that wraps the frontend-backend split API. This crate doesn't implement any +//! core functionality, it just imports from the other crates and offers the legacy API in the same +//! module structure so that projects depending on halo2 can update their dependency towards it +//! without breaking. #![cfg_attr(docsrs, feature(doc_cfg))] // The actual lints we want to disable. @@ -8,14 +11,53 @@ #![deny(missing_docs)] #![deny(unsafe_code)] -pub mod arithmetic; -pub mod circuit; -pub use halo2curves; -mod multicore; pub mod plonk; -pub mod poly; -pub mod transcript; -pub mod dev; -mod helpers; -pub use helpers::SerdeFormat; +/// Traits and structs for implementing circuit components. +pub mod circuit { + pub use halo2_frontend::circuit::floor_planner; + pub use halo2_frontend::circuit::{ + AssignedCell, Cell, Chip, Layouter, NamespacedLayouter, Region, RegionIndex, + SimpleFloorPlanner, Table, Value, + }; +} +/// This module provides common utilities, traits and structures for group, +/// field and polynomial arithmetic. +pub mod arithmetic { + pub use halo2_backend::arithmetic::{parallelize, CurveAffine, CurveExt, Field}; +} +/// Tools for developing circuits. +pub mod dev { + pub use halo2_frontend::dev::{ + metadata, CellValue, FailureLocation, InstanceValue, MockProver, VerifyFailure, + }; + + #[cfg(feature = "cost-estimator")] + pub use halo2_frontend::dev::cost_model; + + #[cfg(feature = "dev-graph")] + pub use halo2_frontend::dev::{circuit_dot_graph, CircuitLayout}; +} +/// Contains utilities for performing arithmetic over univariate polynomials in +/// various forms, including computing commitments to them and provably opening +/// the committed polynomials at arbitrary points. +pub mod poly { + pub use halo2_backend::poly::VerificationStrategy; + pub use halo2_backend::poly::{commitment, ipa, kzg, EvaluationDomain}; + pub use halo2_middleware::poly::Rotation; +} +/// This module contains utilities and traits for dealing with Fiat-Shamir +/// transcripts. +pub mod transcript { + pub use halo2_backend::transcript::{ + Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, Transcript, TranscriptRead, + TranscriptReadBuffer, TranscriptWrite, TranscriptWriterBuffer, + }; +} +mod helpers { + pub use halo2_backend::helpers::SerdeFormat; +} + +pub use crate::helpers::SerdeFormat; + +pub use halo2curves; diff --git a/halo2_proofs/src/plonk.rs b/halo2_proofs/src/plonk.rs index 78bfc21501..c181afc554 100644 --- a/halo2_proofs/src/plonk.rs +++ b/halo2_proofs/src/plonk.rs @@ -5,486 +5,114 @@ //! [halo]: https://eprint.iacr.org/2019/1021 //! [plonk]: https://eprint.iacr.org/2019/953 -use blake2b_simd::Params as Blake2bParams; -use group::ff::{Field, FromUniformBytes, PrimeField}; - -use crate::arithmetic::CurveAffine; -use crate::helpers::{ - polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, SerdeCurveAffine, - SerdePrimeField, -}; -use crate::poly::{ - Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, PinnedEvaluationDomain, - Polynomial, -}; -use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript}; -use crate::SerdeFormat; - -mod assigned; -mod circuit; mod error; -mod evaluation; mod keygen; -mod lookup; -pub mod permutation; -mod shuffle; -mod vanishing; - mod prover; -mod verifier; +mod verifier { + pub use halo2_backend::plonk::verifier::verify_proof; +} -pub use assigned::*; -pub use circuit::*; -pub use error::*; -pub use keygen::*; -pub use prover::*; -pub use verifier::*; +use halo2_frontend::circuit::compile_circuit; +pub use keygen::{keygen_pk, keygen_pk_custom, keygen_vk, keygen_vk_custom}; -use evaluation::Evaluator; -use std::io; +pub use prover::{create_proof, create_proof_with_engine}; +pub use verifier::verify_proof; -/// This is a verifying key which allows for the verification of proofs for a -/// particular circuit. -#[derive(Clone, Debug)] -pub struct VerifyingKey { - domain: EvaluationDomain, - fixed_commitments: Vec, - permutation: permutation::VerifyingKey, - cs: ConstraintSystem, - /// Cached maximum degree of `cs` (which doesn't change after construction). - cs_degree: usize, - /// The representative of this `VerifyingKey` in transcripts. - transcript_repr: C::Scalar, - selectors: Vec>, - /// Whether selector compression is turned on or not. - compress_selectors: bool, -} +pub use error::Error; +pub use halo2_backend::plonk::{Error as ErrorBack, ProvingKey, VerifyingKey}; +pub use halo2_frontend::plonk::{ + Advice, Assigned, Assignment, Challenge, Circuit, Column, ColumnType, Constraint, + ConstraintSystem, Constraints, Error as ErrorFront, Expression, FirstPhase, Fixed, FixedQuery, + FloorPlanner, Instance, Phase, SecondPhase, Selector, TableColumn, ThirdPhase, VirtualCells, +}; +pub use halo2_middleware::circuit::{Any, ConstraintSystemMid}; -// Current version of the VK -const VERSION: u8 = 0x03; +use group::ff::FromUniformBytes; +use halo2_backend::helpers::{SerdeCurveAffine, SerdeFormat, SerdePrimeField}; +use std::io; -impl VerifyingKey +/// Reads a verification key from a buffer without compressed selectors. +/// +/// Reads a curve element from the buffer and parses it according to the `format`: +/// - `Processed`: Reads a compressed curve element and decompresses it. +/// Reads a field element in standard form, with endianness specified by the +/// `PrimeField` implementation, and checks that the element is less than the modulus. +/// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. +/// Checks that field elements are less than modulus, and then checks that the point is on the curve. +/// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; +/// does not perform any checks +pub fn vk_read>( + reader: &mut R, + format: SerdeFormat, + #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, +) -> io::Result> where C::Scalar: SerdePrimeField + FromUniformBytes<64>, { - /// Writes a verifying key to a buffer. - /// - /// Writes a curve element according to `format`: - /// - `Processed`: Writes a compressed curve element with coordinates in standard form. - /// Writes a field element in standard form, with endianness specified by the - /// `PrimeField` implementation. - /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form - /// Writes a field element into raw bytes in its internal Montgomery representation, - /// WITHOUT performing the expensive Montgomery reduction. - pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { - // Version byte that will be checked on read. - writer.write_all(&[VERSION])?; - let k = &self.domain.k(); - assert!(*k <= C::Scalar::S); - // k value fits in 1 byte - writer.write_all(&[*k as u8])?; - writer.write_all(&[self.compress_selectors as u8])?; - writer.write_all(&(self.fixed_commitments.len() as u32).to_le_bytes())?; - for commitment in &self.fixed_commitments { - commitment.write(writer, format)?; - } - self.permutation.write(writer, format)?; - - if !self.compress_selectors { - assert!(self.selectors.is_empty()); - } - // write self.selectors - for selector in &self.selectors { - // since `selector` is filled with `bool`, we pack them 8 at a time into bytes and then write - for bits in selector.chunks(8) { - writer.write_all(&[crate::helpers::pack(bits)])?; - } - } - Ok(()) - } - - /// Reads a verification key from a buffer. - /// - /// Reads a curve element from the buffer and parses it according to the `format`: - /// - `Processed`: Reads a compressed curve element and decompresses it. - /// Reads a field element in standard form, with endianness specified by the - /// `PrimeField` implementation, and checks that the element is less than the modulus. - /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. - /// Checks that field elements are less than modulus, and then checks that the point is on the curve. - /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; - /// does not perform any checks - pub fn read>( - reader: &mut R, - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - let mut version_byte = [0u8; 1]; - reader.read_exact(&mut version_byte)?; - if VERSION != version_byte[0] { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - "unexpected version byte", - )); - } - - let mut k = [0u8; 1]; - reader.read_exact(&mut k)?; - let k = u8::from_le_bytes(k); - if k as u32 > C::Scalar::S { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - format!( - "circuit size value (k): {} exceeds maxium: {}", - k, - C::Scalar::S - ), - )); - } - let mut compress_selectors = [0u8; 1]; - reader.read_exact(&mut compress_selectors)?; - if compress_selectors[0] != 0 && compress_selectors[0] != 1 { - return Err(io::Error::new( - io::ErrorKind::InvalidData, - "unexpected compress_selectors not boolean", - )); - } - let compress_selectors = compress_selectors[0] == 1; - let (domain, cs, _) = keygen::create_domain::( - k as u32, - #[cfg(feature = "circuit-params")] - params, - ); - let mut num_fixed_columns = [0u8; 4]; - reader.read_exact(&mut num_fixed_columns)?; - let num_fixed_columns = u32::from_le_bytes(num_fixed_columns); - - let fixed_commitments: Vec<_> = (0..num_fixed_columns) - .map(|_| C::read(reader, format)) - .collect::>()?; - - let permutation = permutation::VerifyingKey::read(reader, &cs.permutation, format)?; - - let (cs, selectors) = if compress_selectors { - // read selectors - let selectors: Vec> = vec![vec![false; 1 << k]; cs.num_selectors] - .into_iter() - .map(|mut selector| { - let mut selector_bytes = vec![0u8; (selector.len() + 7) / 8]; - reader.read_exact(&mut selector_bytes)?; - for (bits, byte) in selector.chunks_mut(8).zip(selector_bytes) { - crate::helpers::unpack(byte, bits); - } - Ok(selector) - }) - .collect::>()?; - let (cs, _) = cs.compress_selectors(selectors.clone()); - (cs, selectors) - } else { - // we still need to replace selectors with fixed Expressions in `cs` - let fake_selectors = vec![vec![]; cs.num_selectors]; - let (cs, _) = cs.directly_convert_selectors_to_fixed(fake_selectors); - (cs, vec![]) - }; - - Ok(Self::from_parts( - domain, - fixed_commitments, - permutation, - cs, - selectors, - compress_selectors, - )) - } - - /// Writes a verifying key to a vector of bytes using [`Self::write`]. - pub fn to_bytes(&self, format: SerdeFormat) -> Vec { - let mut bytes = Vec::::with_capacity(self.bytes_length(format)); - Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); - bytes - } - - /// Reads a verification key from a slice of bytes using [`Self::read`]. - pub fn from_bytes>( - mut bytes: &[u8], - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - Self::read::<_, ConcreteCircuit>( - &mut bytes, - format, - #[cfg(feature = "circuit-params")] - params, - ) - } + let mut cs = ConstraintSystem::default(); + #[cfg(feature = "circuit-params")] + ConcreteCircuit::configure_with_params(&mut cs, params); + #[cfg(not(feature = "circuit-params"))] + ConcreteCircuit::configure(&mut cs); + + // we still need to replace selectors with fixed Expressions in `cs` + let fake_selectors = vec![vec![]; cs.num_selectors()]; + let (cs, _) = cs.directly_convert_selectors_to_fixed(fake_selectors); + + let cs_mid: ConstraintSystemMid<_> = cs.into(); + VerifyingKey::read(reader, format, cs_mid.into()) } -impl VerifyingKey { - fn bytes_length(&self, format: SerdeFormat) -> usize - where - C: SerdeCurveAffine, - { - 10 + (self.fixed_commitments.len() * C::byte_length(format)) - + self.permutation.bytes_length(format) - + self.selectors.len() - * (self - .selectors - .get(0) - .map(|selector| (selector.len() + 7) / 8) - .unwrap_or(0)) - } - - fn from_parts( - domain: EvaluationDomain, - fixed_commitments: Vec, - permutation: permutation::VerifyingKey, - cs: ConstraintSystem, - selectors: Vec>, - compress_selectors: bool, - ) -> Self - where - C::ScalarExt: FromUniformBytes<64>, - { - // Compute cached values. - let cs_degree = cs.degree(); - - let mut vk = Self { - domain, - fixed_commitments, - permutation, - cs, - cs_degree, - // Temporary, this is not pinned. - transcript_repr: C::Scalar::ZERO, - selectors, - compress_selectors, - }; - - let mut hasher = Blake2bParams::new() - .hash_length(64) - .personal(b"Halo2-Verify-Key") - .to_state(); - - let s = format!("{:?}", vk.pinned()); - - hasher.update(&(s.len() as u64).to_le_bytes()); - hasher.update(s.as_bytes()); - - // Hash in final Blake2bState - vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array()); - - vk - } - - /// Hashes a verification key into a transcript. - pub fn hash_into, T: Transcript>( - &self, - transcript: &mut T, - ) -> io::Result<()> { - transcript.common_scalar(self.transcript_repr)?; - - Ok(()) - } - - /// Obtains a pinned representation of this verification key that contains - /// the minimal information necessary to reconstruct the verification key. - pub fn pinned(&self) -> PinnedVerificationKey<'_, C> { - PinnedVerificationKey { - base_modulus: C::Base::MODULUS, - scalar_modulus: C::Scalar::MODULUS, - domain: self.domain.pinned(), - fixed_commitments: &self.fixed_commitments, - permutation: &self.permutation, - cs: self.cs.pinned(), - } - } - - /// Returns commitments of fixed polynomials - pub fn fixed_commitments(&self) -> &Vec { - &self.fixed_commitments - } - - /// Returns `VerifyingKey` of permutation - pub fn permutation(&self) -> &permutation::VerifyingKey { - &self.permutation - } - - /// Returns `ConstraintSystem` - pub fn cs(&self) -> &ConstraintSystem { - &self.cs - } - - /// Returns representative of this `VerifyingKey` in transcripts - pub fn transcript_repr(&self) -> C::Scalar { - self.transcript_repr - } -} - -/// Minimal representation of a verification key that can be used to identify -/// its active contents. -#[allow(dead_code)] -#[derive(Debug)] -pub struct PinnedVerificationKey<'a, C: CurveAffine> { - base_modulus: &'static str, - scalar_modulus: &'static str, - domain: PinnedEvaluationDomain<'a, C::Scalar>, - cs: PinnedConstraintSystem<'a, C::Scalar>, - fixed_commitments: &'a Vec, - permutation: &'a permutation::VerifyingKey, -} -/// This is a proving key which allows for the creation of proofs for a -/// particular circuit. -#[derive(Clone, Debug)] -pub struct ProvingKey { - vk: VerifyingKey, - l0: Polynomial, - l_last: Polynomial, - l_active_row: Polynomial, - fixed_values: Vec>, - fixed_polys: Vec>, - fixed_cosets: Vec>, - permutation: permutation::ProvingKey, - ev: Evaluator, -} - -impl ProvingKey +/// Reads a verification key from a buffer with compressed selectors. +/// +/// Reads a curve element from the buffer and parses it according to the `format`: +/// - `Processed`: Reads a compressed curve element and decompresses it. +/// Reads a field element in standard form, with endianness specified by the +/// `PrimeField` implementation, and checks that the element is less than the modulus. +/// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. +/// Checks that field elements are less than modulus, and then checks that the point is on the curve. +/// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; +/// does not perform any checks +pub fn vk_read_compressed>( + reader: &mut R, + format: SerdeFormat, + k: u32, + circuit: &ConcreteCircuit, +) -> io::Result> where - C::Scalar: FromUniformBytes<64>, + C::Scalar: SerdePrimeField + FromUniformBytes<64>, { - /// Get the underlying [`VerifyingKey`]. - pub fn get_vk(&self) -> &VerifyingKey { - &self.vk - } + let (_, _, cs) = compile_circuit(k, circuit, true) + .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?; - /// Gets the total number of bytes in the serialization of `self` - fn bytes_length(&self, format: SerdeFormat) -> usize - where - C: SerdeCurveAffine, - { - let scalar_len = C::Scalar::default().to_repr().as_ref().len(); - self.vk.bytes_length(format) - + 12 - + scalar_len * (self.l0.len() + self.l_last.len() + self.l_active_row.len()) - + polynomial_slice_byte_length(&self.fixed_values) - + polynomial_slice_byte_length(&self.fixed_polys) - + polynomial_slice_byte_length(&self.fixed_cosets) - + self.permutation.bytes_length() - } + let cs_mid: ConstraintSystemMid<_> = cs.into(); + VerifyingKey::read(reader, format, cs_mid.into()) } -impl ProvingKey +/// Reads a proving key from a buffer. +/// Does so by reading verification key first, and then deserializing the rest of the file into the +/// remaining proving key data. +/// +/// Reads a curve element from the buffer and parses it according to the `format`: +/// - `Processed`: Reads a compressed curve element and decompresses it. +/// Reads a field element in standard form, with endianness specified by the +/// `PrimeField` implementation, and checks that the element is less than the modulus. +/// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. +/// Checks that field elements are less than modulus, and then checks that the point is on the curve. +/// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; +/// does not perform any checks +pub fn pk_read>( + reader: &mut R, + format: SerdeFormat, + k: u32, + circuit: &ConcreteCircuit, + compress_selectors: bool, +) -> io::Result> where C::Scalar: SerdePrimeField + FromUniformBytes<64>, { - /// Writes a proving key to a buffer. - /// - /// Writes a curve element according to `format`: - /// - `Processed`: Writes a compressed curve element with coordinates in standard form. - /// Writes a field element in standard form, with endianness specified by the - /// `PrimeField` implementation. - /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form - /// Writes a field element into raw bytes in its internal Montgomery representation, - /// WITHOUT performing the expensive Montgomery reduction. - /// Does so by first writing the verifying key and then serializing the rest of the data (in the form of field polynomials) - pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> { - self.vk.write(writer, format)?; - self.l0.write(writer, format)?; - self.l_last.write(writer, format)?; - self.l_active_row.write(writer, format)?; - write_polynomial_slice(&self.fixed_values, writer, format)?; - write_polynomial_slice(&self.fixed_polys, writer, format)?; - write_polynomial_slice(&self.fixed_cosets, writer, format)?; - self.permutation.write(writer, format)?; - Ok(()) - } - - /// Reads a proving key from a buffer. - /// Does so by reading verification key first, and then deserializing the rest of the file into the remaining proving key data. - /// - /// Reads a curve element from the buffer and parses it according to the `format`: - /// - `Processed`: Reads a compressed curve element and decompresses it. - /// Reads a field element in standard form, with endianness specified by the - /// `PrimeField` implementation, and checks that the element is less than the modulus. - /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form. - /// Checks that field elements are less than modulus, and then checks that the point is on the curve. - /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form; - /// does not perform any checks - pub fn read>( - reader: &mut R, - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - let vk = VerifyingKey::::read::( - reader, - format, - #[cfg(feature = "circuit-params")] - params, - )?; - let l0 = Polynomial::read(reader, format)?; - let l_last = Polynomial::read(reader, format)?; - let l_active_row = Polynomial::read(reader, format)?; - let fixed_values = read_polynomial_vec(reader, format)?; - let fixed_polys = read_polynomial_vec(reader, format)?; - let fixed_cosets = read_polynomial_vec(reader, format)?; - let permutation = permutation::ProvingKey::read(reader, format)?; - let ev = Evaluator::new(vk.cs()); - Ok(Self { - vk, - l0, - l_last, - l_active_row, - fixed_values, - fixed_polys, - fixed_cosets, - permutation, - ev, - }) - } - - /// Writes a proving key to a vector of bytes using [`Self::write`]. - pub fn to_bytes(&self, format: SerdeFormat) -> Vec { - let mut bytes = Vec::::with_capacity(self.bytes_length(format)); - Self::write(self, &mut bytes, format).expect("Writing to vector should not fail"); - bytes - } - - /// Reads a proving key from a slice of bytes using [`Self::read`]. - pub fn from_bytes>( - mut bytes: &[u8], - format: SerdeFormat, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, - ) -> io::Result { - Self::read::<_, ConcreteCircuit>( - &mut bytes, - format, - #[cfg(feature = "circuit-params")] - params, - ) - } -} - -impl VerifyingKey { - /// Get the underlying [`EvaluationDomain`]. - pub fn get_domain(&self) -> &EvaluationDomain { - &self.domain - } + let (_, _, cs) = compile_circuit(k, circuit, compress_selectors) + .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?; + let cs_mid: ConstraintSystemMid<_> = cs.into(); + ProvingKey::read(reader, format, cs_mid.into()) } - -#[derive(Clone, Copy, Debug)] -struct Theta; -type ChallengeTheta = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct Beta; -type ChallengeBeta = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct Gamma; -type ChallengeGamma = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct Y; -type ChallengeY = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct X; -type ChallengeX = ChallengeScalar; diff --git a/halo2_proofs/src/plonk/circuit.rs b/halo2_proofs/src/plonk/circuit.rs deleted file mode 100644 index 5107554186..0000000000 --- a/halo2_proofs/src/plonk/circuit.rs +++ /dev/null @@ -1,2584 +0,0 @@ -use super::{lookup, permutation, shuffle, Assigned, Error}; -use crate::circuit::layouter::SyncDeps; -use crate::dev::metadata; -use crate::{ - circuit::{Layouter, Region, Value}, - poly::Rotation, -}; -use core::cmp::max; -use core::ops::{Add, Mul}; -use ff::Field; -use sealed::SealedPhase; -use std::collections::HashMap; -use std::fmt::Debug; -use std::iter::{Product, Sum}; -use std::{ - convert::TryFrom, - ops::{Neg, Sub}, -}; - -mod compress_selectors; - -/// A column type -pub trait ColumnType: - 'static + Sized + Copy + std::fmt::Debug + PartialEq + Eq + Into -{ - /// Return expression from cell - fn query_cell(&self, index: usize, at: Rotation) -> Expression; -} - -/// A column with an index and type -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Column { - index: usize, - column_type: C, -} - -impl Column { - #[cfg(test)] - pub(crate) fn new(index: usize, column_type: C) -> Self { - Column { index, column_type } - } - - /// Index of this column. - pub fn index(&self) -> usize { - self.index - } - - /// Type of this column. - pub fn column_type(&self) -> &C { - &self.column_type - } - - /// Return expression from column at a relative position - pub fn query_cell(&self, at: Rotation) -> Expression { - self.column_type.query_cell(self.index, at) - } - - /// Return expression from column at the current row - pub fn cur(&self) -> Expression { - self.query_cell(Rotation::cur()) - } - - /// Return expression from column at the next row - pub fn next(&self) -> Expression { - self.query_cell(Rotation::next()) - } - - /// Return expression from column at the previous row - pub fn prev(&self) -> Expression { - self.query_cell(Rotation::prev()) - } - - /// Return expression from column at the specified rotation - pub fn rot(&self, rotation: i32) -> Expression { - self.query_cell(Rotation(rotation)) - } -} - -impl Ord for Column { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - // This ordering is consensus-critical! The layouters rely on deterministic column - // orderings. - match self.column_type.into().cmp(&other.column_type.into()) { - // Indices are assigned within column types. - std::cmp::Ordering::Equal => self.index.cmp(&other.index), - order => order, - } - } -} - -impl PartialOrd for Column { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -pub(crate) mod sealed { - /// Phase of advice column - #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] - pub struct Phase(pub(super) u8); - - impl Phase { - pub fn prev(&self) -> Option { - self.0.checked_sub(1).map(Phase) - } - } - - impl SealedPhase for Phase { - fn to_sealed(self) -> Phase { - self - } - } - - /// Sealed trait to help keep `Phase` private. - pub trait SealedPhase { - fn to_sealed(self) -> Phase; - } -} - -/// Phase of advice column -pub trait Phase: SealedPhase {} - -impl Phase for P {} - -/// First phase -#[derive(Debug)] -pub struct FirstPhase; - -impl SealedPhase for super::FirstPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(0) - } -} - -/// Second phase -#[derive(Debug)] -pub struct SecondPhase; - -impl SealedPhase for super::SecondPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(1) - } -} - -/// Third phase -#[derive(Debug)] -pub struct ThirdPhase; - -impl SealedPhase for super::ThirdPhase { - fn to_sealed(self) -> sealed::Phase { - sealed::Phase(2) - } -} - -/// An advice column -#[derive(Clone, Copy, Eq, PartialEq, Hash)] -pub struct Advice { - pub(crate) phase: sealed::Phase, -} - -impl Default for Advice { - fn default() -> Advice { - Advice { - phase: FirstPhase.to_sealed(), - } - } -} - -impl Advice { - /// Returns `Advice` in given `Phase` - pub fn new(phase: P) -> Advice { - Advice { - phase: phase.to_sealed(), - } - } - - /// Phase of this column - pub fn phase(&self) -> u8 { - self.phase.0 - } -} - -impl std::fmt::Debug for Advice { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut debug_struct = f.debug_struct("Advice"); - // Only show advice's phase if it's not in first phase. - if self.phase != FirstPhase.to_sealed() { - debug_struct.field("phase", &self.phase); - } - debug_struct.finish() - } -} - -/// A fixed column -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Fixed; - -/// An instance column -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Instance; - -/// An enum over the Advice, Fixed, Instance structs -#[derive(Clone, Copy, Eq, PartialEq, Hash)] -pub enum Any { - /// An Advice variant - Advice(Advice), - /// A Fixed variant - Fixed, - /// An Instance variant - Instance, -} - -impl Any { - /// Returns Advice variant in `FirstPhase` - pub fn advice() -> Any { - Any::Advice(Advice::default()) - } - - /// Returns Advice variant in given `Phase` - pub fn advice_in(phase: P) -> Any { - Any::Advice(Advice::new(phase)) - } -} - -impl std::fmt::Debug for Any { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Any::Advice(advice) => { - let mut debug_struct = f.debug_struct("Advice"); - // Only show advice's phase if it's not in first phase. - if advice.phase != FirstPhase.to_sealed() { - debug_struct.field("phase", &advice.phase); - } - debug_struct.finish() - } - Any::Fixed => f.debug_struct("Fixed").finish(), - Any::Instance => f.debug_struct("Instance").finish(), - } - } -} - -impl Ord for Any { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - // This ordering is consensus-critical! The layouters rely on deterministic column - // orderings. - match (self, other) { - (Any::Instance, Any::Instance) | (Any::Fixed, Any::Fixed) => std::cmp::Ordering::Equal, - (Any::Advice(lhs), Any::Advice(rhs)) => lhs.phase.cmp(&rhs.phase), - // Across column types, sort Instance < Advice < Fixed. - (Any::Instance, Any::Advice(_)) - | (Any::Advice(_), Any::Fixed) - | (Any::Instance, Any::Fixed) => std::cmp::Ordering::Less, - (Any::Fixed, Any::Instance) - | (Any::Fixed, Any::Advice(_)) - | (Any::Advice(_), Any::Instance) => std::cmp::Ordering::Greater, - } - } -} - -impl PartialOrd for Any { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl ColumnType for Advice { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - Expression::Advice(AdviceQuery { - index: None, - column_index: index, - rotation: at, - phase: self.phase, - }) - } -} -impl ColumnType for Fixed { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - Expression::Fixed(FixedQuery { - index: None, - column_index: index, - rotation: at, - }) - } -} -impl ColumnType for Instance { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - Expression::Instance(InstanceQuery { - index: None, - column_index: index, - rotation: at, - }) - } -} -impl ColumnType for Any { - fn query_cell(&self, index: usize, at: Rotation) -> Expression { - match self { - Any::Advice(Advice { phase }) => Expression::Advice(AdviceQuery { - index: None, - column_index: index, - rotation: at, - phase: *phase, - }), - Any::Fixed => Expression::Fixed(FixedQuery { - index: None, - column_index: index, - rotation: at, - }), - Any::Instance => Expression::Instance(InstanceQuery { - index: None, - column_index: index, - rotation: at, - }), - } - } -} - -impl From for Any { - fn from(advice: Advice) -> Any { - Any::Advice(advice) - } -} - -impl From for Any { - fn from(_: Fixed) -> Any { - Any::Fixed - } -} - -impl From for Any { - fn from(_: Instance) -> Any { - Any::Instance - } -} - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Advice(advice.column_type), - } - } -} - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Fixed, - } - } -} - -impl From> for Column { - fn from(advice: Column) -> Column { - Column { - index: advice.index(), - column_type: Any::Instance, - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Advice(advice) => Ok(Column { - index: any.index(), - column_type: *advice, - }), - _ => Err("Cannot convert into Column"), - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Fixed => Ok(Column { - index: any.index(), - column_type: Fixed, - }), - _ => Err("Cannot convert into Column"), - } - } -} - -impl TryFrom> for Column { - type Error = &'static str; - - fn try_from(any: Column) -> Result { - match any.column_type() { - Any::Instance => Ok(Column { - index: any.index(), - column_type: Instance, - }), - _ => Err("Cannot convert into Column"), - } - } -} - -/// A selector, representing a fixed boolean value per row of the circuit. -/// -/// Selectors can be used to conditionally enable (portions of) gates: -/// ``` -/// use halo2_proofs::poly::Rotation; -/// # use halo2curves::pasta::Fp; -/// # use halo2_proofs::plonk::ConstraintSystem; -/// -/// # let mut meta = ConstraintSystem::::default(); -/// let a = meta.advice_column(); -/// let b = meta.advice_column(); -/// let s = meta.selector(); -/// -/// meta.create_gate("foo", |meta| { -/// let a = meta.query_advice(a, Rotation::prev()); -/// let b = meta.query_advice(b, Rotation::cur()); -/// let s = meta.query_selector(s); -/// -/// // On rows where the selector is enabled, a is constrained to equal b. -/// // On rows where the selector is disabled, a and b can take any value. -/// vec![s * (a - b)] -/// }); -/// ``` -/// -/// Selectors are disabled on all rows by default, and must be explicitly enabled on each -/// row when required: -/// ``` -/// use halo2_proofs::{ -/// circuit::{Chip, Layouter, Value}, -/// plonk::{Advice, Column, Error, Selector}, -/// }; -/// use ff::Field; -/// # use halo2_proofs::plonk::Fixed; -/// -/// struct Config { -/// a: Column, -/// b: Column, -/// s: Selector, -/// } -/// -/// fn circuit_logic>(chip: C, mut layouter: impl Layouter) -> Result<(), Error> { -/// let config = chip.config(); -/// # let config: Config = todo!(); -/// layouter.assign_region(|| "bar", |mut region| { -/// region.assign_advice(|| "a", config.a, 0, || Value::known(F::ONE))?; -/// region.assign_advice(|| "a", config.b, 1, || Value::known(F::ONE))?; -/// config.s.enable(&mut region, 1) -/// })?; -/// Ok(()) -/// } -/// ``` -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct Selector(pub(crate) usize, bool); - -impl Selector { - /// Enable this selector at the given offset within the given region. - pub fn enable(&self, region: &mut Region, offset: usize) -> Result<(), Error> { - region.enable_selector(|| "", self, offset) - } - - /// Is this selector "simple"? Simple selectors can only be multiplied - /// by expressions that contain no other simple selectors. - pub fn is_simple(&self) -> bool { - self.1 - } - - /// Returns index of this selector - pub fn index(&self) -> usize { - self.0 - } - - /// Return expression from selector - pub fn expr(&self) -> Expression { - Expression::Selector(*self) - } -} - -/// Query of fixed column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct FixedQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, -} - -impl FixedQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } -} - -/// Query of advice column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct AdviceQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, - /// Phase of this advice column - pub(crate) phase: sealed::Phase, -} - -impl AdviceQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } - - /// Phase of this advice column - pub fn phase(&self) -> u8 { - self.phase.0 - } -} - -/// Query of instance column at a certain relative location -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct InstanceQuery { - /// Query index - pub(crate) index: Option, - /// Column index - pub(crate) column_index: usize, - /// Rotation of this query - pub(crate) rotation: Rotation, -} - -impl InstanceQuery { - /// Column index - pub fn column_index(&self) -> usize { - self.column_index - } - - /// Rotation of this query - pub fn rotation(&self) -> Rotation { - self.rotation - } -} - -/// A fixed column of a lookup table. -/// -/// A lookup table can be loaded into this column via [`Layouter::assign_table`]. Columns -/// can currently only contain a single table, but they may be used in multiple lookup -/// arguments via [`ConstraintSystem::lookup`]. -/// -/// Lookup table columns are always "encumbered" by the lookup arguments they are used in; -/// they cannot simultaneously be used as general fixed columns. -/// -/// [`Layouter::assign_table`]: crate::circuit::Layouter::assign_table -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] -pub struct TableColumn { - /// The fixed column that this table column is stored in. - /// - /// # Security - /// - /// This inner column MUST NOT be exposed in the public API, or else chip developers - /// can load lookup tables into their circuits without default-value-filling the - /// columns, which can cause soundness bugs. - inner: Column, -} - -impl TableColumn { - /// Returns inner column - pub fn inner(&self) -> Column { - self.inner - } -} - -/// A challenge squeezed from transcript after advice columns at the phase have been committed. -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Challenge { - index: usize, - pub(crate) phase: sealed::Phase, -} - -impl Challenge { - /// Index of this challenge. - pub fn index(&self) -> usize { - self.index - } - - /// Phase of this challenge. - pub fn phase(&self) -> u8 { - self.phase.0 - } - - /// Return Expression - pub fn expr(&self) -> Expression { - Expression::Challenge(*self) - } -} - -/// This trait allows a [`Circuit`] to direct some backend to assign a witness -/// for a constraint system. -pub trait Assignment { - /// Creates a new region and enters into it. - /// - /// Panics if we are currently in a region (if `exit_region` was not called). - /// - /// Not intended for downstream consumption; use [`Layouter::assign_region`] instead. - /// - /// [`Layouter::assign_region`]: crate::circuit::Layouter#method.assign_region - fn enter_region(&mut self, name_fn: N) - where - NR: Into, - N: FnOnce() -> NR; - - /// Allows the developer to include an annotation for an specific column within a `Region`. - /// - /// This is usually useful for debugging circuit failures. - fn annotate_column(&mut self, annotation: A, column: Column) - where - A: FnOnce() -> AR, - AR: Into; - - /// Exits the current region. - /// - /// Panics if we are not currently in a region (if `enter_region` was not called). - /// - /// Not intended for downstream consumption; use [`Layouter::assign_region`] instead. - /// - /// [`Layouter::assign_region`]: crate::circuit::Layouter#method.assign_region - fn exit_region(&mut self); - - /// Enables a selector at the given row. - fn enable_selector( - &mut self, - annotation: A, - selector: &Selector, - row: usize, - ) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into; - - /// Queries the cell of an instance column at a particular absolute row. - /// - /// Returns the cell's value, if known. - fn query_instance(&self, column: Column, row: usize) -> Result, Error>; - - /// Assign an advice column value (witness) - fn assign_advice( - &mut self, - annotation: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into; - - /// Assign a fixed value - fn assign_fixed( - &mut self, - annotation: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into; - - /// Assign two cells to have the same value - fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), Error>; - - /// Fills a fixed `column` starting from the given `row` with value `to`. - fn fill_from_row( - &mut self, - column: Column, - row: usize, - to: Value>, - ) -> Result<(), Error>; - - /// Queries the value of the given challenge. - /// - /// Returns `Value::unknown()` if the current synthesis phase is before the challenge can be queried. - fn get_challenge(&self, challenge: Challenge) -> Value; - - /// Creates a new (sub)namespace and enters into it. - /// - /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. - /// - /// [`Layouter::namespace`]: crate::circuit::Layouter#method.namespace - fn push_namespace(&mut self, name_fn: N) - where - NR: Into, - N: FnOnce() -> NR; - - /// Exits out of the existing namespace. - /// - /// Not intended for downstream consumption; use [`Layouter::namespace`] instead. - /// - /// [`Layouter::namespace`]: crate::circuit::Layouter#method.namespace - fn pop_namespace(&mut self, gadget_name: Option); -} - -/// A floor planning strategy for a circuit. -/// -/// The floor planner is chip-agnostic and applies its strategy to the circuit it is used -/// within. -pub trait FloorPlanner { - /// Given the provided `cs`, synthesize the given circuit. - /// - /// `constants` is the list of fixed columns that the layouter may use to assign - /// global constant values. These columns will all have been equality-enabled. - /// - /// Internally, a floor planner will perform the following operations: - /// - Instantiate a [`Layouter`] for this floor planner. - /// - Perform any necessary setup or measurement tasks, which may involve one or more - /// calls to `Circuit::default().synthesize(config, &mut layouter)`. - /// - Call `circuit.synthesize(config, &mut layouter)` exactly once. - fn synthesize + SyncDeps, C: Circuit>( - cs: &mut CS, - circuit: &C, - config: C::Config, - constants: Vec>, - ) -> Result<(), Error>; -} - -/// This is a trait that circuits provide implementations for so that the -/// backend prover can ask the circuit to synthesize using some given -/// [`ConstraintSystem`] implementation. -pub trait Circuit { - /// This is a configuration object that stores things like columns. - type Config: Clone; - /// The floor planner used for this circuit. This is an associated type of the - /// `Circuit` trait because its behaviour is circuit-critical. - type FloorPlanner: FloorPlanner; - /// Optional circuit configuration parameters. Requires the `circuit-params` feature. - #[cfg(feature = "circuit-params")] - type Params: Default; - - /// Returns a copy of this circuit with no witness values (i.e. all witnesses set to - /// `None`). For most circuits, this will be equal to `Self::default()`. - fn without_witnesses(&self) -> Self; - - /// Returns a reference to the parameters that should be used to configure the circuit. - /// Requires the `circuit-params` feature. - #[cfg(feature = "circuit-params")] - fn params(&self) -> Self::Params { - Self::Params::default() - } - - /// The circuit is given an opportunity to describe the exact gate - /// arrangement, column arrangement, etc. Takes a runtime parameter. The default - /// implementation calls `configure` ignoring the `_params` argument in order to easily support - /// circuits that don't use configuration parameters. - #[cfg(feature = "circuit-params")] - fn configure_with_params( - meta: &mut ConstraintSystem, - _params: Self::Params, - ) -> Self::Config { - Self::configure(meta) - } - - /// The circuit is given an opportunity to describe the exact gate - /// arrangement, column arrangement, etc. - fn configure(meta: &mut ConstraintSystem) -> Self::Config; - - /// Given the provided `cs`, synthesize the circuit. The concrete type of - /// the caller will be different depending on the context, and they may or - /// may not expect to have a witness present. - fn synthesize(&self, config: Self::Config, layouter: impl Layouter) -> Result<(), Error>; -} - -/// Low-degree expression representing an identity that must hold over the committed columns. -#[derive(Clone, PartialEq, Eq)] -pub enum Expression { - /// This is a constant polynomial - Constant(F), - /// This is a virtual selector - Selector(Selector), - /// This is a fixed column queried at a certain relative location - Fixed(FixedQuery), - /// This is an advice (witness) column queried at a certain relative location - Advice(AdviceQuery), - /// This is an instance (external) column queried at a certain relative location - Instance(InstanceQuery), - /// This is a challenge - Challenge(Challenge), - /// This is a negated polynomial - Negated(Box>), - /// This is the sum of two polynomials - Sum(Box>, Box>), - /// This is the product of two polynomials - Product(Box>, Box>), - /// This is a scaled polynomial - Scaled(Box>, F), -} - -impl Expression { - /// Make side effects - pub fn query_cells(&mut self, cells: &mut VirtualCells<'_, F>) { - match self { - Expression::Constant(_) => (), - Expression::Selector(selector) => { - if !cells.queried_selectors.contains(selector) { - cells.queried_selectors.push(*selector); - } - } - Expression::Fixed(query) => { - if query.index.is_none() { - let col = Column { - index: query.column_index, - column_type: Fixed, - }; - cells.queried_cells.push((col, query.rotation).into()); - query.index = Some(cells.meta.query_fixed_index(col, query.rotation)); - } - } - Expression::Advice(query) => { - if query.index.is_none() { - let col = Column { - index: query.column_index, - column_type: Advice { phase: query.phase }, - }; - cells.queried_cells.push((col, query.rotation).into()); - query.index = Some(cells.meta.query_advice_index(col, query.rotation)); - } - } - Expression::Instance(query) => { - if query.index.is_none() { - let col = Column { - index: query.column_index, - column_type: Instance, - }; - cells.queried_cells.push((col, query.rotation).into()); - query.index = Some(cells.meta.query_instance_index(col, query.rotation)); - } - } - Expression::Challenge(_) => (), - Expression::Negated(a) => a.query_cells(cells), - Expression::Sum(a, b) => { - a.query_cells(cells); - b.query_cells(cells); - } - Expression::Product(a, b) => { - a.query_cells(cells); - b.query_cells(cells); - } - Expression::Scaled(a, _) => a.query_cells(cells), - }; - } - - /// Evaluate the polynomial using the provided closures to perform the - /// operations. - #[allow(clippy::too_many_arguments)] - pub fn evaluate( - &self, - constant: &impl Fn(F) -> T, - selector_column: &impl Fn(Selector) -> T, - fixed_column: &impl Fn(FixedQuery) -> T, - advice_column: &impl Fn(AdviceQuery) -> T, - instance_column: &impl Fn(InstanceQuery) -> T, - challenge: &impl Fn(Challenge) -> T, - negated: &impl Fn(T) -> T, - sum: &impl Fn(T, T) -> T, - product: &impl Fn(T, T) -> T, - scaled: &impl Fn(T, F) -> T, - ) -> T { - match self { - Expression::Constant(scalar) => constant(*scalar), - Expression::Selector(selector) => selector_column(*selector), - Expression::Fixed(query) => fixed_column(*query), - Expression::Advice(query) => advice_column(*query), - Expression::Instance(query) => instance_column(*query), - Expression::Challenge(value) => challenge(*value), - Expression::Negated(a) => { - let a = a.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - negated(a) - } - Expression::Sum(a, b) => { - let a = a.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - let b = b.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - sum(a, b) - } - Expression::Product(a, b) => { - let a = a.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - let b = b.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - product(a, b) - } - Expression::Scaled(a, f) => { - let a = a.evaluate( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - ); - scaled(a, *f) - } - } - } - - /// Evaluate the polynomial lazily using the provided closures to perform the - /// operations. - #[allow(clippy::too_many_arguments)] - pub fn evaluate_lazy( - &self, - constant: &impl Fn(F) -> T, - selector_column: &impl Fn(Selector) -> T, - fixed_column: &impl Fn(FixedQuery) -> T, - advice_column: &impl Fn(AdviceQuery) -> T, - instance_column: &impl Fn(InstanceQuery) -> T, - challenge: &impl Fn(Challenge) -> T, - negated: &impl Fn(T) -> T, - sum: &impl Fn(T, T) -> T, - product: &impl Fn(T, T) -> T, - scaled: &impl Fn(T, F) -> T, - zero: &T, - ) -> T { - match self { - Expression::Constant(scalar) => constant(*scalar), - Expression::Selector(selector) => selector_column(*selector), - Expression::Fixed(query) => fixed_column(*query), - Expression::Advice(query) => advice_column(*query), - Expression::Instance(query) => instance_column(*query), - Expression::Challenge(value) => challenge(*value), - Expression::Negated(a) => { - let a = a.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - negated(a) - } - Expression::Sum(a, b) => { - let a = a.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - let b = b.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - sum(a, b) - } - Expression::Product(a, b) => { - let (a, b) = if a.complexity() <= b.complexity() { - (a, b) - } else { - (b, a) - }; - let a = a.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - - if a == *zero { - a - } else { - let b = b.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - product(a, b) - } - } - Expression::Scaled(a, f) => { - let a = a.evaluate_lazy( - constant, - selector_column, - fixed_column, - advice_column, - instance_column, - challenge, - negated, - sum, - product, - scaled, - zero, - ); - scaled(a, *f) - } - } - } - - fn write_identifier(&self, writer: &mut W) -> std::io::Result<()> { - match self { - Expression::Constant(scalar) => write!(writer, "{scalar:?}"), - Expression::Selector(selector) => write!(writer, "selector[{}]", selector.0), - Expression::Fixed(query) => { - write!( - writer, - "fixed[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Advice(query) => { - write!( - writer, - "advice[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Instance(query) => { - write!( - writer, - "instance[{}][{}]", - query.column_index, query.rotation.0 - ) - } - Expression::Challenge(challenge) => { - write!(writer, "challenge[{}]", challenge.index()) - } - Expression::Negated(a) => { - writer.write_all(b"(-")?; - a.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Sum(a, b) => { - writer.write_all(b"(")?; - a.write_identifier(writer)?; - writer.write_all(b"+")?; - b.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Product(a, b) => { - writer.write_all(b"(")?; - a.write_identifier(writer)?; - writer.write_all(b"*")?; - b.write_identifier(writer)?; - writer.write_all(b")") - } - Expression::Scaled(a, f) => { - a.write_identifier(writer)?; - write!(writer, "*{f:?}") - } - } - } - - /// Identifier for this expression. Expressions with identical identifiers - /// do the same calculation (but the expressions don't need to be exactly equal - /// in how they are composed e.g. `1 + 2` and `2 + 1` can have the same identifier). - pub fn identifier(&self) -> String { - let mut cursor = std::io::Cursor::new(Vec::new()); - self.write_identifier(&mut cursor).unwrap(); - String::from_utf8(cursor.into_inner()).unwrap() - } - - /// Compute the degree of this polynomial - pub fn degree(&self) -> usize { - match self { - Expression::Constant(_) => 0, - Expression::Selector(_) => 1, - Expression::Fixed(_) => 1, - Expression::Advice(_) => 1, - Expression::Instance(_) => 1, - Expression::Challenge(_) => 0, - Expression::Negated(poly) => poly.degree(), - Expression::Sum(a, b) => max(a.degree(), b.degree()), - Expression::Product(a, b) => a.degree() + b.degree(), - Expression::Scaled(poly, _) => poly.degree(), - } - } - - /// Approximate the computational complexity of this expression. - pub fn complexity(&self) -> usize { - match self { - Expression::Constant(_) => 0, - Expression::Selector(_) => 1, - Expression::Fixed(_) => 1, - Expression::Advice(_) => 1, - Expression::Instance(_) => 1, - Expression::Challenge(_) => 0, - Expression::Negated(poly) => poly.complexity() + 5, - Expression::Sum(a, b) => a.complexity() + b.complexity() + 15, - Expression::Product(a, b) => a.complexity() + b.complexity() + 30, - Expression::Scaled(poly, _) => poly.complexity() + 30, - } - } - - /// Square this expression. - pub fn square(self) -> Self { - self.clone() * self - } - - /// Returns whether or not this expression contains a simple `Selector`. - fn contains_simple_selector(&self) -> bool { - self.evaluate( - &|_| false, - &|selector| selector.is_simple(), - &|_| false, - &|_| false, - &|_| false, - &|_| false, - &|a| a, - &|a, b| a || b, - &|a, b| a || b, - &|a, _| a, - ) - } - - /// Extracts a simple selector from this gate, if present - fn extract_simple_selector(&self) -> Option { - let op = |a, b| match (a, b) { - (Some(a), None) | (None, Some(a)) => Some(a), - (Some(_), Some(_)) => panic!("two simple selectors cannot be in the same expression"), - _ => None, - }; - - self.evaluate( - &|_| None, - &|selector| { - if selector.is_simple() { - Some(selector) - } else { - None - } - }, - &|_| None, - &|_| None, - &|_| None, - &|_| None, - &|a| a, - &op, - &op, - &|a, _| a, - ) - } -} - -impl std::fmt::Debug for Expression { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Expression::Constant(scalar) => f.debug_tuple("Constant").field(scalar).finish(), - Expression::Selector(selector) => f.debug_tuple("Selector").field(selector).finish(), - // Skip enum variant and print query struct directly to maintain backwards compatibility. - Expression::Fixed(query) => { - let mut debug_struct = f.debug_struct("Fixed"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation) - .finish() - } - Expression::Advice(query) => { - let mut debug_struct = f.debug_struct("Advice"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation); - // Only show advice's phase if it's not in first phase. - if query.phase != FirstPhase.to_sealed() { - debug_struct.field("phase", &query.phase); - } - debug_struct.finish() - } - Expression::Instance(query) => { - let mut debug_struct = f.debug_struct("Instance"); - match query.index { - None => debug_struct.field("query_index", &query.index), - Some(idx) => debug_struct.field("query_index", &idx), - }; - debug_struct - .field("column_index", &query.column_index) - .field("rotation", &query.rotation) - .finish() - } - Expression::Challenge(challenge) => { - f.debug_tuple("Challenge").field(challenge).finish() - } - Expression::Negated(poly) => f.debug_tuple("Negated").field(poly).finish(), - Expression::Sum(a, b) => f.debug_tuple("Sum").field(a).field(b).finish(), - Expression::Product(a, b) => f.debug_tuple("Product").field(a).field(b).finish(), - Expression::Scaled(poly, scalar) => { - f.debug_tuple("Scaled").field(poly).field(scalar).finish() - } - } - } -} - -impl Neg for Expression { - type Output = Expression; - fn neg(self) -> Self::Output { - Expression::Negated(Box::new(self)) - } -} - -impl Add for Expression { - type Output = Expression; - fn add(self, rhs: Expression) -> Expression { - if self.contains_simple_selector() || rhs.contains_simple_selector() { - panic!("attempted to use a simple selector in an addition"); - } - Expression::Sum(Box::new(self), Box::new(rhs)) - } -} - -impl Sub for Expression { - type Output = Expression; - fn sub(self, rhs: Expression) -> Expression { - if self.contains_simple_selector() || rhs.contains_simple_selector() { - panic!("attempted to use a simple selector in a subtraction"); - } - Expression::Sum(Box::new(self), Box::new(-rhs)) - } -} - -impl Mul for Expression { - type Output = Expression; - fn mul(self, rhs: Expression) -> Expression { - if self.contains_simple_selector() && rhs.contains_simple_selector() { - panic!("attempted to multiply two expressions containing simple selectors"); - } - Expression::Product(Box::new(self), Box::new(rhs)) - } -} - -impl Mul for Expression { - type Output = Expression; - fn mul(self, rhs: F) -> Expression { - Expression::Scaled(Box::new(self), rhs) - } -} - -impl Sum for Expression { - fn sum>(iter: I) -> Self { - iter.reduce(|acc, x| acc + x) - .unwrap_or(Expression::Constant(F::ZERO)) - } -} - -impl Product for Expression { - fn product>(iter: I) -> Self { - iter.reduce(|acc, x| acc * x) - .unwrap_or(Expression::Constant(F::ONE)) - } -} - -/// Represents an index into a vector where each entry corresponds to a distinct -/// point that polynomials are queried at. -#[derive(Copy, Clone, Debug)] -pub(crate) struct PointIndex(pub usize); - -/// A "virtual cell" is a PLONK cell that has been queried at a particular relative offset -/// within a custom gate. -#[derive(Clone, Debug)] -pub struct VirtualCell { - pub(crate) column: Column, - pub(crate) rotation: Rotation, -} - -impl>> From<(Col, Rotation)> for VirtualCell { - fn from((column, rotation): (Col, Rotation)) -> Self { - VirtualCell { - column: column.into(), - rotation, - } - } -} - -/// An individual polynomial constraint. -/// -/// These are returned by the closures passed to `ConstraintSystem::create_gate`. -#[derive(Debug)] -pub struct Constraint { - name: String, - poly: Expression, -} - -impl From> for Constraint { - fn from(poly: Expression) -> Self { - Constraint { - name: "".to_string(), - poly, - } - } -} - -impl> From<(S, Expression)> for Constraint { - fn from((name, poly): (S, Expression)) -> Self { - Constraint { - name: name.as_ref().to_string(), - poly, - } - } -} - -impl From> for Vec> { - fn from(poly: Expression) -> Self { - vec![Constraint { - name: "".to_string(), - poly, - }] - } -} - -/// A set of polynomial constraints with a common selector. -/// -/// ``` -/// use halo2_proofs::{plonk::{Constraints, Expression}, poly::Rotation}; -/// use halo2curves::pasta::Fp; -/// # use halo2_proofs::plonk::ConstraintSystem; -/// -/// # let mut meta = ConstraintSystem::::default(); -/// let a = meta.advice_column(); -/// let b = meta.advice_column(); -/// let c = meta.advice_column(); -/// let s = meta.selector(); -/// -/// meta.create_gate("foo", |meta| { -/// let next = meta.query_advice(a, Rotation::next()); -/// let a = meta.query_advice(a, Rotation::cur()); -/// let b = meta.query_advice(b, Rotation::cur()); -/// let c = meta.query_advice(c, Rotation::cur()); -/// let s_ternary = meta.query_selector(s); -/// -/// let one_minus_a = Expression::Constant(Fp::one()) - a.clone(); -/// -/// Constraints::with_selector( -/// s_ternary, -/// std::array::IntoIter::new([ -/// ("a is boolean", a.clone() * one_minus_a.clone()), -/// ("next == a ? b : c", next - (a * b + one_minus_a * c)), -/// ]), -/// ) -/// }); -/// ``` -/// -/// Note that the use of `std::array::IntoIter::new` is only necessary if you need to -/// support Rust 1.51 or 1.52. If your minimum supported Rust version is 1.53 or greater, -/// you can pass an array directly. -#[derive(Debug)] -pub struct Constraints>, Iter: IntoIterator> { - selector: Expression, - constraints: Iter, -} - -impl>, Iter: IntoIterator> Constraints { - /// Constructs a set of constraints that are controlled by the given selector. - /// - /// Each constraint `c` in `iterator` will be converted into the constraint - /// `selector * c`. - pub fn with_selector(selector: Expression, constraints: Iter) -> Self { - Constraints { - selector, - constraints, - } - } -} - -fn apply_selector_to_constraint>>( - (selector, c): (Expression, C), -) -> Constraint { - let constraint: Constraint = c.into(); - Constraint { - name: constraint.name, - poly: selector * constraint.poly, - } -} - -type ApplySelectorToConstraint = fn((Expression, C)) -> Constraint; -type ConstraintsIterator = std::iter::Map< - std::iter::Zip>, I>, - ApplySelectorToConstraint, ->; - -impl>, Iter: IntoIterator> IntoIterator - for Constraints -{ - type Item = Constraint; - type IntoIter = ConstraintsIterator; - - fn into_iter(self) -> Self::IntoIter { - std::iter::repeat(self.selector) - .zip(self.constraints) - .map(apply_selector_to_constraint) - } -} - -/// Gate -#[derive(Clone, Debug)] -pub struct Gate { - name: String, - constraint_names: Vec, - polys: Vec>, - /// We track queried selectors separately from other cells, so that we can use them to - /// trigger debug checks on gates. - queried_selectors: Vec, - queried_cells: Vec, -} - -impl Gate { - /// Returns the gate name. - pub fn name(&self) -> &str { - self.name.as_str() - } - - /// Returns the name of the constraint at index `constraint_index`. - pub fn constraint_name(&self, constraint_index: usize) -> &str { - self.constraint_names[constraint_index].as_str() - } - - /// Returns constraints of this gate - pub fn polynomials(&self) -> &[Expression] { - &self.polys - } - - pub(crate) fn queried_selectors(&self) -> &[Selector] { - &self.queried_selectors - } - - pub(crate) fn queried_cells(&self) -> &[VirtualCell] { - &self.queried_cells - } -} - -/// This is a description of the circuit environment, such as the gate, column and -/// permutation arrangements. -#[derive(Debug, Clone)] -pub struct ConstraintSystem { - pub(crate) num_fixed_columns: usize, - pub(crate) num_advice_columns: usize, - pub(crate) num_instance_columns: usize, - pub(crate) num_selectors: usize, - pub(crate) num_challenges: usize, - - /// Contains the index of each advice column that is left unblinded. - pub(crate) unblinded_advice_columns: Vec, - - /// Contains the phase for each advice column. Should have same length as num_advice_columns. - pub(crate) advice_column_phase: Vec, - /// Contains the phase for each challenge. Should have same length as num_challenges. - pub(crate) challenge_phase: Vec, - - /// This is a cached vector that maps virtual selectors to the concrete - /// fixed column that they were compressed into. This is just used by dev - /// tooling right now. - pub(crate) selector_map: Vec>, - - pub(crate) gates: Vec>, - pub(crate) advice_queries: Vec<(Column, Rotation)>, - // Contains an integer for each advice column - // identifying how many distinct queries it has - // so far; should be same length as num_advice_columns. - num_advice_queries: Vec, - pub(crate) instance_queries: Vec<(Column, Rotation)>, - pub(crate) fixed_queries: Vec<(Column, Rotation)>, - - // Permutation argument for performing equality constraints - pub(crate) permutation: permutation::Argument, - - // Vector of lookup arguments, where each corresponds to a sequence of - // input expressions and a sequence of table expressions involved in the lookup. - pub(crate) lookups: Vec>, - - // Vector of shuffle arguments, where each corresponds to a sequence of - // input expressions and a sequence of shuffle expressions involved in the shuffle. - pub(crate) shuffles: Vec>, - - // List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation. - pub(crate) general_column_annotations: HashMap, - - // Vector of fixed columns, which can be used to store constant values - // that are copied into advice columns. - pub(crate) constants: Vec>, - - pub(crate) minimum_degree: Option, -} - -/// Represents the minimal parameters that determine a `ConstraintSystem`. -#[allow(dead_code)] -pub struct PinnedConstraintSystem<'a, F: Field> { - num_fixed_columns: &'a usize, - num_advice_columns: &'a usize, - num_instance_columns: &'a usize, - num_selectors: &'a usize, - num_challenges: &'a usize, - advice_column_phase: &'a Vec, - challenge_phase: &'a Vec, - gates: PinnedGates<'a, F>, - advice_queries: &'a Vec<(Column, Rotation)>, - instance_queries: &'a Vec<(Column, Rotation)>, - fixed_queries: &'a Vec<(Column, Rotation)>, - permutation: &'a permutation::Argument, - lookups: &'a Vec>, - shuffles: &'a Vec>, - constants: &'a Vec>, - minimum_degree: &'a Option, -} - -impl<'a, F: Field> std::fmt::Debug for PinnedConstraintSystem<'a, F> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut debug_struct = f.debug_struct("PinnedConstraintSystem"); - debug_struct - .field("num_fixed_columns", self.num_fixed_columns) - .field("num_advice_columns", self.num_advice_columns) - .field("num_instance_columns", self.num_instance_columns) - .field("num_selectors", self.num_selectors); - // Only show multi-phase related fields if it's used. - if *self.num_challenges > 0 { - debug_struct - .field("num_challenges", self.num_challenges) - .field("advice_column_phase", self.advice_column_phase) - .field("challenge_phase", self.challenge_phase); - } - debug_struct - .field("gates", &self.gates) - .field("advice_queries", self.advice_queries) - .field("instance_queries", self.instance_queries) - .field("fixed_queries", self.fixed_queries) - .field("permutation", self.permutation) - .field("lookups", self.lookups); - if !self.shuffles.is_empty() { - debug_struct.field("shuffles", self.shuffles); - } - debug_struct - .field("constants", self.constants) - .field("minimum_degree", self.minimum_degree); - debug_struct.finish() - } -} - -struct PinnedGates<'a, F: Field>(&'a Vec>); - -impl<'a, F: Field> std::fmt::Debug for PinnedGates<'a, F> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { - f.debug_list() - .entries(self.0.iter().flat_map(|gate| gate.polynomials().iter())) - .finish() - } -} - -impl Default for ConstraintSystem { - fn default() -> ConstraintSystem { - ConstraintSystem { - num_fixed_columns: 0, - num_advice_columns: 0, - num_instance_columns: 0, - num_selectors: 0, - num_challenges: 0, - unblinded_advice_columns: Vec::new(), - advice_column_phase: Vec::new(), - challenge_phase: Vec::new(), - selector_map: vec![], - gates: vec![], - fixed_queries: Vec::new(), - advice_queries: Vec::new(), - num_advice_queries: Vec::new(), - instance_queries: Vec::new(), - permutation: permutation::Argument::new(), - lookups: Vec::new(), - shuffles: Vec::new(), - general_column_annotations: HashMap::new(), - constants: vec![], - minimum_degree: None, - } - } -} - -impl ConstraintSystem { - /// Obtain a pinned version of this constraint system; a structure with the - /// minimal parameters needed to determine the rest of the constraint - /// system. - pub fn pinned(&self) -> PinnedConstraintSystem<'_, F> { - PinnedConstraintSystem { - num_fixed_columns: &self.num_fixed_columns, - num_advice_columns: &self.num_advice_columns, - num_instance_columns: &self.num_instance_columns, - num_selectors: &self.num_selectors, - num_challenges: &self.num_challenges, - advice_column_phase: &self.advice_column_phase, - challenge_phase: &self.challenge_phase, - gates: PinnedGates(&self.gates), - fixed_queries: &self.fixed_queries, - advice_queries: &self.advice_queries, - instance_queries: &self.instance_queries, - permutation: &self.permutation, - lookups: &self.lookups, - shuffles: &self.shuffles, - constants: &self.constants, - minimum_degree: &self.minimum_degree, - } - } - - /// Enables this fixed column to be used for global constant assignments. - /// - /// # Side-effects - /// - /// The column will be equality-enabled. - pub fn enable_constant(&mut self, column: Column) { - if !self.constants.contains(&column) { - self.constants.push(column); - self.enable_equality(column); - } - } - - /// Enable the ability to enforce equality over cells in this column - pub fn enable_equality>>(&mut self, column: C) { - let column = column.into(); - self.query_any_index(column, Rotation::cur()); - self.permutation.add_column(column); - } - - /// Add a lookup argument for some input expressions and table columns. - /// - /// `table_map` returns a map between input expressions and the table columns - /// they need to match. - pub fn lookup>( - &mut self, - name: S, - table_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression, TableColumn)>, - ) -> usize { - let mut cells = VirtualCells::new(self); - let table_map = table_map(&mut cells) - .into_iter() - .map(|(mut input, table)| { - if input.contains_simple_selector() { - panic!("expression containing simple selector supplied to lookup argument"); - } - let mut table = cells.query_fixed(table.inner(), Rotation::cur()); - input.query_cells(&mut cells); - table.query_cells(&mut cells); - (input, table) - }) - .collect(); - let index = self.lookups.len(); - - self.lookups - .push(lookup::Argument::new(name.as_ref(), table_map)); - - index - } - - /// Add a lookup argument for some input expressions and table expressions. - /// - /// `table_map` returns a map between input expressions and the table expressions - /// they need to match. - pub fn lookup_any>( - &mut self, - name: S, - table_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression, Expression)>, - ) -> usize { - let mut cells = VirtualCells::new(self); - let table_map = table_map(&mut cells) - .into_iter() - .map(|(mut input, mut table)| { - if input.contains_simple_selector() { - panic!("expression containing simple selector supplied to lookup argument"); - } - if table.contains_simple_selector() { - panic!("expression containing simple selector supplied to lookup argument"); - } - input.query_cells(&mut cells); - table.query_cells(&mut cells); - (input, table) - }) - .collect(); - let index = self.lookups.len(); - - self.lookups - .push(lookup::Argument::new(name.as_ref(), table_map)); - - index - } - - /// Add a shuffle argument for some input expressions and table expressions. - pub fn shuffle>( - &mut self, - name: S, - shuffle_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression, Expression)>, - ) -> usize { - let mut cells = VirtualCells::new(self); - let shuffle_map = shuffle_map(&mut cells) - .into_iter() - .map(|(mut input, mut table)| { - input.query_cells(&mut cells); - table.query_cells(&mut cells); - (input, table) - }) - .collect(); - let index = self.shuffles.len(); - - self.shuffles - .push(shuffle::Argument::new(name.as_ref(), shuffle_map)); - - index - } - - fn query_fixed_index(&mut self, column: Column, at: Rotation) -> usize { - // Return existing query, if it exists - for (index, fixed_query) in self.fixed_queries.iter().enumerate() { - if fixed_query == &(column, at) { - return index; - } - } - - // Make a new query - let index = self.fixed_queries.len(); - self.fixed_queries.push((column, at)); - - index - } - - pub(crate) fn query_advice_index(&mut self, column: Column, at: Rotation) -> usize { - // Return existing query, if it exists - for (index, advice_query) in self.advice_queries.iter().enumerate() { - if advice_query == &(column, at) { - return index; - } - } - - // Make a new query - let index = self.advice_queries.len(); - self.advice_queries.push((column, at)); - self.num_advice_queries[column.index] += 1; - - index - } - - fn query_instance_index(&mut self, column: Column, at: Rotation) -> usize { - // Return existing query, if it exists - for (index, instance_query) in self.instance_queries.iter().enumerate() { - if instance_query == &(column, at) { - return index; - } - } - - // Make a new query - let index = self.instance_queries.len(); - self.instance_queries.push((column, at)); - - index - } - - fn query_any_index(&mut self, column: Column, at: Rotation) -> usize { - match column.column_type() { - Any::Advice(_) => { - self.query_advice_index(Column::::try_from(column).unwrap(), at) - } - Any::Fixed => self.query_fixed_index(Column::::try_from(column).unwrap(), at), - Any::Instance => { - self.query_instance_index(Column::::try_from(column).unwrap(), at) - } - } - } - - pub(crate) fn get_advice_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, advice_query) in self.advice_queries.iter().enumerate() { - if advice_query == &(column, at) { - return index; - } - } - - panic!("get_advice_query_index called for non-existent query"); - } - - pub(crate) fn get_fixed_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, fixed_query) in self.fixed_queries.iter().enumerate() { - if fixed_query == &(column, at) { - return index; - } - } - - panic!("get_fixed_query_index called for non-existent query"); - } - - pub(crate) fn get_instance_query_index(&self, column: Column, at: Rotation) -> usize { - for (index, instance_query) in self.instance_queries.iter().enumerate() { - if instance_query == &(column, at) { - return index; - } - } - - panic!("get_instance_query_index called for non-existent query"); - } - - pub(crate) fn get_any_query_index(&self, column: Column, at: Rotation) -> usize { - match column.column_type() { - Any::Advice(_) => { - self.get_advice_query_index(Column::::try_from(column).unwrap(), at) - } - Any::Fixed => { - self.get_fixed_query_index(Column::::try_from(column).unwrap(), at) - } - Any::Instance => { - self.get_instance_query_index(Column::::try_from(column).unwrap(), at) - } - } - } - - /// Sets the minimum degree required by the circuit, which can be set to a - /// larger amount than actually needed. This can be used, for example, to - /// force the permutation argument to involve more columns in the same set. - pub fn set_minimum_degree(&mut self, degree: usize) { - self.minimum_degree = Some(degree); - } - - /// Creates a new gate. - /// - /// # Panics - /// - /// A gate is required to contain polynomial constraints. This method will panic if - /// `constraints` returns an empty iterator. - pub fn create_gate>, Iter: IntoIterator, S: AsRef>( - &mut self, - name: S, - constraints: impl FnOnce(&mut VirtualCells<'_, F>) -> Iter, - ) { - let mut cells = VirtualCells::new(self); - let constraints = constraints(&mut cells); - let (constraint_names, polys): (_, Vec<_>) = constraints - .into_iter() - .map(|c| c.into()) - .map(|mut c: Constraint| { - c.poly.query_cells(&mut cells); - (c.name, c.poly) - }) - .unzip(); - - let queried_selectors = cells.queried_selectors; - let queried_cells = cells.queried_cells; - - assert!( - !polys.is_empty(), - "Gates must contain at least one constraint." - ); - - self.gates.push(Gate { - name: name.as_ref().to_string(), - constraint_names, - polys, - queried_selectors, - queried_cells, - }); - } - - /// This will compress selectors together depending on their provided - /// assignments. This `ConstraintSystem` will then be modified to add new - /// fixed columns (representing the actual selectors) and will return the - /// polynomials for those columns. Finally, an internal map is updated to - /// find which fixed column corresponds with a given `Selector`. - /// - /// Do not call this twice. Yes, this should be a builder pattern instead. - pub fn compress_selectors(mut self, selectors: Vec>) -> (Self, Vec>) { - // The number of provided selector assignments must be the number we - // counted for this constraint system. - assert_eq!(selectors.len(), self.num_selectors); - - // Compute the maximal degree of every selector. We only consider the - // expressions in gates, as lookup arguments cannot support simple - // selectors. Selectors that are complex or do not appear in any gates - // will have degree zero. - let mut degrees = vec![0; selectors.len()]; - for expr in self.gates.iter().flat_map(|gate| gate.polys.iter()) { - if let Some(selector) = expr.extract_simple_selector() { - degrees[selector.0] = max(degrees[selector.0], expr.degree()); - } - } - - // We will not increase the degree of the constraint system, so we limit - // ourselves to the largest existing degree constraint. - let max_degree = self.degree(); - - let mut new_columns = vec![]; - let (polys, selector_assignment) = compress_selectors::process( - selectors - .into_iter() - .zip(degrees) - .enumerate() - .map( - |(i, (activations, max_degree))| compress_selectors::SelectorDescription { - selector: i, - activations, - max_degree, - }, - ) - .collect(), - max_degree, - || { - let column = self.fixed_column(); - new_columns.push(column); - Expression::Fixed(FixedQuery { - index: Some(self.query_fixed_index(column, Rotation::cur())), - column_index: column.index, - rotation: Rotation::cur(), - }) - }, - ); - - let mut selector_map = vec![None; selector_assignment.len()]; - let mut selector_replacements = vec![None; selector_assignment.len()]; - for assignment in selector_assignment { - selector_replacements[assignment.selector] = Some(assignment.expression); - selector_map[assignment.selector] = Some(new_columns[assignment.combination_index]); - } - - self.selector_map = selector_map - .into_iter() - .map(|a| a.unwrap()) - .collect::>(); - let selector_replacements = selector_replacements - .into_iter() - .map(|a| a.unwrap()) - .collect::>(); - self.replace_selectors_with_fixed(&selector_replacements); - - (self, polys) - } - - /// Does not combine selectors and directly replaces them everywhere with fixed columns. - pub fn directly_convert_selectors_to_fixed( - mut self, - selectors: Vec>, - ) -> (Self, Vec>) { - // The number of provided selector assignments must be the number we - // counted for this constraint system. - assert_eq!(selectors.len(), self.num_selectors); - - let (polys, selector_replacements): (Vec<_>, Vec<_>) = selectors - .into_iter() - .map(|selector| { - let poly = selector - .iter() - .map(|b| if *b { F::ONE } else { F::ZERO }) - .collect::>(); - let column = self.fixed_column(); - let rotation = Rotation::cur(); - let expr = Expression::Fixed(FixedQuery { - index: Some(self.query_fixed_index(column, rotation)), - column_index: column.index, - rotation, - }); - (poly, expr) - }) - .unzip(); - - self.replace_selectors_with_fixed(&selector_replacements); - self.num_selectors = 0; - - (self, polys) - } - - fn replace_selectors_with_fixed(&mut self, selector_replacements: &[Expression]) { - fn replace_selectors( - expr: &mut Expression, - selector_replacements: &[Expression], - must_be_nonsimple: bool, - ) { - *expr = expr.evaluate( - &|constant| Expression::Constant(constant), - &|selector| { - if must_be_nonsimple { - // Simple selectors are prohibited from appearing in - // expressions in the lookup argument by - // `ConstraintSystem`. - assert!(!selector.is_simple()); - } - - selector_replacements[selector.0].clone() - }, - &|query| Expression::Fixed(query), - &|query| Expression::Advice(query), - &|query| Expression::Instance(query), - &|challenge| Expression::Challenge(challenge), - &|a| -a, - &|a, b| a + b, - &|a, b| a * b, - &|a, f| a * f, - ); - } - - // Substitute selectors for the real fixed columns in all gates - for expr in self.gates.iter_mut().flat_map(|gate| gate.polys.iter_mut()) { - replace_selectors(expr, selector_replacements, false); - } - - // Substitute non-simple selectors for the real fixed columns in all - // lookup expressions - for expr in self.lookups.iter_mut().flat_map(|lookup| { - lookup - .input_expressions - .iter_mut() - .chain(lookup.table_expressions.iter_mut()) - }) { - replace_selectors(expr, selector_replacements, true); - } - - for expr in self.shuffles.iter_mut().flat_map(|shuffle| { - shuffle - .input_expressions - .iter_mut() - .chain(shuffle.shuffle_expressions.iter_mut()) - }) { - replace_selectors(expr, selector_replacements, true); - } - } - - /// Allocate a new (simple) selector. Simple selectors cannot be added to - /// expressions nor multiplied by other expressions containing simple - /// selectors. Also, simple selectors may not appear in lookup argument - /// inputs. - pub fn selector(&mut self) -> Selector { - let index = self.num_selectors; - self.num_selectors += 1; - Selector(index, true) - } - - /// Allocate a new complex selector that can appear anywhere - /// within expressions. - pub fn complex_selector(&mut self) -> Selector { - let index = self.num_selectors; - self.num_selectors += 1; - Selector(index, false) - } - - /// Allocates a new fixed column that can be used in a lookup table. - pub fn lookup_table_column(&mut self) -> TableColumn { - TableColumn { - inner: self.fixed_column(), - } - } - - /// Annotate a Lookup column. - pub fn annotate_lookup_column(&mut self, column: TableColumn, annotation: A) - where - A: Fn() -> AR, - AR: Into, - { - // We don't care if the table has already an annotation. If it's the case we keep the new one. - self.general_column_annotations.insert( - metadata::Column::from((Any::Fixed, column.inner().index)), - annotation().into(), - ); - } - - /// Annotate an Instance column. - pub fn annotate_lookup_any_column(&mut self, column: T, annotation: A) - where - A: Fn() -> AR, - AR: Into, - T: Into>, - { - let col_any = column.into(); - // We don't care if the table has already an annotation. If it's the case we keep the new one. - self.general_column_annotations.insert( - metadata::Column::from((col_any.column_type, col_any.index)), - annotation().into(), - ); - } - - /// Allocate a new fixed column - pub fn fixed_column(&mut self) -> Column { - let tmp = Column { - index: self.num_fixed_columns, - column_type: Fixed, - }; - self.num_fixed_columns += 1; - tmp - } - - /// Allocate a new unblinded advice column at `FirstPhase` - pub fn unblinded_advice_column(&mut self) -> Column { - self.unblinded_advice_column_in(FirstPhase) - } - - /// Allocate a new advice column at `FirstPhase` - pub fn advice_column(&mut self) -> Column { - self.advice_column_in(FirstPhase) - } - - /// Allocate a new unblinded advice column in given phase. This allows for the generation of deterministic commitments to advice columns - /// which can be used to split large circuits into smaller ones, whose proofs can then be "joined" together by their common witness commitments. - pub fn unblinded_advice_column_in(&mut self, phase: P) -> Column { - let phase = phase.to_sealed(); - if let Some(previous_phase) = phase.prev() { - self.assert_phase_exists( - previous_phase, - format!("Column in later phase {phase:?}").as_str(), - ); - } - - let tmp = Column { - index: self.num_advice_columns, - column_type: Advice { phase }, - }; - self.unblinded_advice_columns.push(tmp.index); - self.num_advice_columns += 1; - self.num_advice_queries.push(0); - self.advice_column_phase.push(phase); - tmp - } - - /// Allocate a new advice column in given phase - /// - /// # Panics - /// - /// It panics if previous phase before the given one doesn't have advice column allocated. - pub fn advice_column_in(&mut self, phase: P) -> Column { - let phase = phase.to_sealed(); - if let Some(previous_phase) = phase.prev() { - self.assert_phase_exists( - previous_phase, - format!("Column in later phase {phase:?}").as_str(), - ); - } - - let tmp = Column { - index: self.num_advice_columns, - column_type: Advice { phase }, - }; - self.num_advice_columns += 1; - self.num_advice_queries.push(0); - self.advice_column_phase.push(phase); - tmp - } - - /// Allocate a new instance column - pub fn instance_column(&mut self) -> Column { - let tmp = Column { - index: self.num_instance_columns, - column_type: Instance, - }; - self.num_instance_columns += 1; - tmp - } - - /// Requests a challenge that is usable after the given phase. - /// - /// # Panics - /// - /// It panics if the given phase doesn't have advice column allocated. - pub fn challenge_usable_after(&mut self, phase: P) -> Challenge { - let phase = phase.to_sealed(); - self.assert_phase_exists( - phase, - format!("Challenge usable after phase {phase:?}").as_str(), - ); - - let tmp = Challenge { - index: self.num_challenges, - phase, - }; - self.num_challenges += 1; - self.challenge_phase.push(phase); - tmp - } - - /// Helper funciotn to assert phase exists, to make sure phase-aware resources - /// are allocated in order, and to avoid any phase to be skipped accidentally - /// to cause unexpected issue in the future. - fn assert_phase_exists(&self, phase: sealed::Phase, resource: &str) { - self.advice_column_phase - .iter() - .find(|advice_column_phase| **advice_column_phase == phase) - .unwrap_or_else(|| { - panic!( - "No Column is used in phase {phase:?} while allocating a new {resource:?}" - ) - }); - } - - pub(crate) fn phases(&self) -> impl Iterator { - let max_phase = self - .advice_column_phase - .iter() - .max() - .map(|phase| phase.0) - .unwrap_or_default(); - (0..=max_phase).map(sealed::Phase) - } - - /// Compute the degree of the constraint system (the maximum degree of all - /// constraints). - pub fn degree(&self) -> usize { - // The permutation argument will serve alongside the gates, so must be - // accounted for. - let mut degree = self.permutation.required_degree(); - - // The lookup argument also serves alongside the gates and must be accounted - // for. - degree = std::cmp::max( - degree, - self.lookups - .iter() - .map(|l| l.required_degree()) - .max() - .unwrap_or(1), - ); - - // The lookup argument also serves alongside the gates and must be accounted - // for. - degree = std::cmp::max( - degree, - self.shuffles - .iter() - .map(|l| l.required_degree()) - .max() - .unwrap_or(1), - ); - - // Account for each gate to ensure our quotient polynomial is the - // correct degree and that our extended domain is the right size. - degree = std::cmp::max( - degree, - self.gates - .iter() - .flat_map(|gate| gate.polynomials().iter().map(|poly| poly.degree())) - .max() - .unwrap_or(0), - ); - - std::cmp::max(degree, self.minimum_degree.unwrap_or(1)) - } - - /// Compute the number of blinding factors necessary to perfectly blind - /// each of the prover's witness polynomials. - pub fn blinding_factors(&self) -> usize { - // All of the prover's advice columns are evaluated at no more than - let factors = *self.num_advice_queries.iter().max().unwrap_or(&1); - // distinct points during gate checks. - - // - The permutation argument witness polynomials are evaluated at most 3 times. - // - Each lookup argument has independent witness polynomials, and they are - // evaluated at most 2 times. - let factors = std::cmp::max(3, factors); - - // Each polynomial is evaluated at most an additional time during - // multiopen (at x_3 to produce q_evals): - let factors = factors + 1; - - // h(x) is derived by the other evaluations so it does not reveal - // anything; in fact it does not even appear in the proof. - - // h(x_3) is also not revealed; the verifier only learns a single - // evaluation of a polynomial in x_1 which has h(x_3) and another random - // polynomial evaluated at x_3 as coefficients -- this random polynomial - // is "random_poly" in the vanishing argument. - - // Add an additional blinding factor as a slight defense against - // off-by-one errors. - factors + 1 - } - - /// Returns the minimum necessary rows that need to exist in order to - /// account for e.g. blinding factors. - pub fn minimum_rows(&self) -> usize { - self.blinding_factors() // m blinding factors - + 1 // for l_{-(m + 1)} (l_last) - + 1 // for l_0 (just for extra breathing room for the permutation - // argument, to essentially force a separation in the - // permutation polynomial between the roles of l_last, l_0 - // and the interstitial values.) - + 1 // for at least one row - } - - /// Returns number of fixed columns - pub fn num_fixed_columns(&self) -> usize { - self.num_fixed_columns - } - - /// Returns number of advice columns - pub fn num_advice_columns(&self) -> usize { - self.num_advice_columns - } - - /// Returns number of instance columns - pub fn num_instance_columns(&self) -> usize { - self.num_instance_columns - } - - /// Returns number of selectors - pub fn num_selectors(&self) -> usize { - self.num_selectors - } - - /// Returns number of challenges - pub fn num_challenges(&self) -> usize { - self.num_challenges - } - - /// Returns phase of advice columns - pub fn advice_column_phase(&self) -> Vec { - self.advice_column_phase - .iter() - .map(|phase| phase.0) - .collect() - } - - /// Returns phase of challenges - pub fn challenge_phase(&self) -> Vec { - self.challenge_phase.iter().map(|phase| phase.0).collect() - } - - /// Returns gates - pub fn gates(&self) -> &Vec> { - &self.gates - } - - /// Returns general column annotations - pub fn general_column_annotations(&self) -> &HashMap { - &self.general_column_annotations - } - - /// Returns advice queries - pub fn advice_queries(&self) -> &Vec<(Column, Rotation)> { - &self.advice_queries - } - - /// Returns instance queries - pub fn instance_queries(&self) -> &Vec<(Column, Rotation)> { - &self.instance_queries - } - - /// Returns fixed queries - pub fn fixed_queries(&self) -> &Vec<(Column, Rotation)> { - &self.fixed_queries - } - - /// Returns permutation argument - pub fn permutation(&self) -> &permutation::Argument { - &self.permutation - } - - /// Returns lookup arguments - pub fn lookups(&self) -> &Vec> { - &self.lookups - } - - /// Returns shuffle arguments - pub fn shuffles(&self) -> &Vec> { - &self.shuffles - } - - /// Returns constants - pub fn constants(&self) -> &Vec> { - &self.constants - } -} - -/// Exposes the "virtual cells" that can be queried while creating a custom gate or lookup -/// table. -#[derive(Debug)] -pub struct VirtualCells<'a, F: Field> { - meta: &'a mut ConstraintSystem, - queried_selectors: Vec, - queried_cells: Vec, -} - -impl<'a, F: Field> VirtualCells<'a, F> { - fn new(meta: &'a mut ConstraintSystem) -> Self { - VirtualCells { - meta, - queried_selectors: vec![], - queried_cells: vec![], - } - } - - /// Query a selector at the current position. - pub fn query_selector(&mut self, selector: Selector) -> Expression { - self.queried_selectors.push(selector); - Expression::Selector(selector) - } - - /// Query a fixed column at a relative position - pub fn query_fixed(&mut self, column: Column, at: Rotation) -> Expression { - self.queried_cells.push((column, at).into()); - Expression::Fixed(FixedQuery { - index: Some(self.meta.query_fixed_index(column, at)), - column_index: column.index, - rotation: at, - }) - } - - /// Query an advice column at a relative position - pub fn query_advice(&mut self, column: Column, at: Rotation) -> Expression { - self.queried_cells.push((column, at).into()); - Expression::Advice(AdviceQuery { - index: Some(self.meta.query_advice_index(column, at)), - column_index: column.index, - rotation: at, - phase: column.column_type().phase, - }) - } - - /// Query an instance column at a relative position - pub fn query_instance(&mut self, column: Column, at: Rotation) -> Expression { - self.queried_cells.push((column, at).into()); - Expression::Instance(InstanceQuery { - index: Some(self.meta.query_instance_index(column, at)), - column_index: column.index, - rotation: at, - }) - } - - /// Query an Any column at a relative position - pub fn query_any>>(&mut self, column: C, at: Rotation) -> Expression { - let column = column.into(); - match column.column_type() { - Any::Advice(_) => self.query_advice(Column::::try_from(column).unwrap(), at), - Any::Fixed => self.query_fixed(Column::::try_from(column).unwrap(), at), - Any::Instance => self.query_instance(Column::::try_from(column).unwrap(), at), - } - } - - /// Query a challenge - pub fn query_challenge(&mut self, challenge: Challenge) -> Expression { - Expression::Challenge(challenge) - } -} - -#[cfg(test)] -mod tests { - use super::Expression; - use halo2curves::bn256::Fr; - - #[test] - fn iter_sum() { - let exprs: Vec> = vec![ - Expression::Constant(1.into()), - Expression::Constant(2.into()), - Expression::Constant(3.into()), - ]; - let happened: Expression = exprs.into_iter().sum(); - let expected: Expression = Expression::Sum( - Box::new(Expression::Sum( - Box::new(Expression::Constant(1.into())), - Box::new(Expression::Constant(2.into())), - )), - Box::new(Expression::Constant(3.into())), - ); - - assert_eq!(happened, expected); - } - - #[test] - fn iter_product() { - let exprs: Vec> = vec![ - Expression::Constant(1.into()), - Expression::Constant(2.into()), - Expression::Constant(3.into()), - ]; - let happened: Expression = exprs.into_iter().product(); - let expected: Expression = Expression::Product( - Box::new(Expression::Product( - Box::new(Expression::Constant(1.into())), - Box::new(Expression::Constant(2.into())), - )), - Box::new(Expression::Constant(3.into())), - ); - - assert_eq!(happened, expected); - } -} diff --git a/halo2_proofs/src/plonk/error.rs b/halo2_proofs/src/plonk/error.rs index e6caded801..a7a78a1ab2 100644 --- a/halo2_proofs/src/plonk/error.rs +++ b/halo2_proofs/src/plonk/error.rs @@ -1,133 +1,32 @@ -use std::error; +use super::{ErrorBack, ErrorFront}; use std::fmt; -use std::io; - -use super::TableColumn; -use super::{Any, Column}; /// This is an error that could occur during proving or circuit synthesis. -// TODO: these errors need to be cleaned up #[derive(Debug)] pub enum Error { - /// This is an error that can occur during synthesis of the circuit, for - /// example, when the witness is not present. - Synthesis, - /// The provided instances do not match the circuit parameters. - InvalidInstances, - /// The constraint system is not satisfied. - ConstraintSystemFailure, - /// Out of bounds index passed to a backend - BoundsFailure, - /// Opening error - Opening, - /// Transcript error - Transcript(io::Error), - /// `k` is too small for the given circuit. - NotEnoughRowsAvailable { - /// The current value of `k` being used. - current_k: u32, - }, - /// Instance provided exceeds number of available rows - InstanceTooLarge, - /// Circuit synthesis requires global constants, but circuit configuration did not - /// call [`ConstraintSystem::enable_constant`] on fixed columns with sufficient space. - /// - /// [`ConstraintSystem::enable_constant`]: crate::plonk::ConstraintSystem::enable_constant - NotEnoughColumnsForConstants, - /// The instance sets up a copy constraint involving a column that has not been - /// included in the permutation. - ColumnNotInPermutation(Column), - /// An error relating to a lookup table. - TableError(TableError), -} - -impl From for Error { - fn from(error: io::Error) -> Self { - // The only place we can get io::Error from is the transcript. - Error::Transcript(error) - } -} - -impl Error { - /// Constructs an `Error::NotEnoughRowsAvailable`. - pub(crate) fn not_enough_rows_available(current_k: u32) -> Self { - Error::NotEnoughRowsAvailable { current_k } - } + /// Frontend error case + Frontend(ErrorFront), + /// Backend error case + Backend(ErrorBack), } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Error::Synthesis => write!(f, "General synthesis error"), - Error::InvalidInstances => write!(f, "Provided instances do not match the circuit"), - Error::ConstraintSystemFailure => write!(f, "The constraint system is not satisfied"), - Error::BoundsFailure => write!(f, "An out-of-bounds index was passed to the backend"), - Error::Opening => write!(f, "Multi-opening proof was invalid"), - Error::Transcript(e) => write!(f, "Transcript error: {e}"), - Error::NotEnoughRowsAvailable { current_k } => write!( - f, - "k = {current_k} is too small for the given circuit. Try using a larger value of k", - ), - Error::InstanceTooLarge => write!(f, "Instance vectors are larger than the circuit"), - Error::NotEnoughColumnsForConstants => { - write!( - f, - "Too few fixed columns are enabled for global constants usage" - ) - } - Error::ColumnNotInPermutation(column) => write!( - f, - "Column {column:?} must be included in the permutation. Help: try applying `meta.enable_equalty` on the column", - ), - Error::TableError(error) => write!(f, "{error}") + Error::Frontend(err) => write!(f, "Frontend: {err}"), + Error::Backend(err) => write!(f, "Backend: {err}"), } } } -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match self { - Error::Transcript(e) => Some(e), - _ => None, - } +impl From for Error { + fn from(err: ErrorFront) -> Self { + Error::Frontend(err) } } -/// This is an error that could occur during table synthesis. -#[derive(Debug)] -pub enum TableError { - /// A `TableColumn` has not been assigned. - ColumnNotAssigned(TableColumn), - /// A Table has columns of uneven lengths. - UnevenColumnLengths((TableColumn, usize), (TableColumn, usize)), - /// Attempt to assign a used `TableColumn` - UsedColumn(TableColumn), - /// Attempt to overwrite a default value - OverwriteDefault(TableColumn, String, String), -} - -impl fmt::Display for TableError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - TableError::ColumnNotAssigned(col) => { - write!( - f, - "{col:?} not fully assigned. Help: assign a value at offset 0.", - ) - } - TableError::UnevenColumnLengths((col, col_len), (table, table_len)) => write!( - f, - "{col:?} has length {col_len} while {table:?} has length {table_len}", - ), - TableError::UsedColumn(col) => { - write!(f, "{col:?} has already been used") - } - TableError::OverwriteDefault(col, default, val) => { - write!( - f, - "Attempted to overwrite default value {default} with {val} in {col:?}", - ) - } - } +impl From for Error { + fn from(err: ErrorBack) -> Self { + Error::Backend(err) } } diff --git a/halo2_proofs/src/plonk/keygen.rs b/halo2_proofs/src/plonk/keygen.rs index 984eecb9e8..58e49390de 100644 --- a/halo2_proofs/src/plonk/keygen.rs +++ b/halo2_proofs/src/plonk/keygen.rs @@ -1,407 +1,93 @@ -#![allow(clippy::int_plus_one)] - -use std::ops::Range; - -use ff::{Field, FromUniformBytes}; -use group::Curve; - -use super::{ - circuit::{ - Advice, Any, Assignment, Circuit, Column, ConstraintSystem, Fixed, FloorPlanner, Instance, - Selector, - }, - evaluation::Evaluator, - permutation, Assigned, Challenge, Error, LagrangeCoeff, Polynomial, ProvingKey, VerifyingKey, +use crate::plonk::Error; +use halo2_backend::plonk::{ + keygen::{keygen_pk as backend_keygen_pk, keygen_vk as backend_keygen_vk}, + ProvingKey, VerifyingKey, }; -use crate::{ - arithmetic::{parallelize, CurveAffine}, - circuit::Value, - poly::{ - batch_invert_assigned, - commitment::{Blind, Params}, - EvaluationDomain, - }, -}; - -pub(crate) fn create_domain( - k: u32, - #[cfg(feature = "circuit-params")] params: ConcreteCircuit::Params, -) -> ( - EvaluationDomain, - ConstraintSystem, - ConcreteCircuit::Config, -) -where - C: CurveAffine, - ConcreteCircuit: Circuit, -{ - let mut cs = ConstraintSystem::default(); - #[cfg(feature = "circuit-params")] - let config = ConcreteCircuit::configure_with_params(&mut cs, params); - #[cfg(not(feature = "circuit-params"))] - let config = ConcreteCircuit::configure(&mut cs); - - let degree = cs.degree(); - - let domain = EvaluationDomain::new(degree as u32, k); - - (domain, cs, config) -} - -/// Assembly to be used in circuit synthesis. -#[derive(Debug)] -struct Assembly { - k: u32, - fixed: Vec, LagrangeCoeff>>, - permutation: permutation::keygen::Assembly, - selectors: Vec>, - // A range of available rows for assignment and copies. - usable_rows: Range, - _marker: std::marker::PhantomData, -} - -impl Assignment for Assembly { - fn enter_region(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about regions in this context. - } - - fn exit_region(&mut self) { - // Do nothing; we don't care about regions in this context. - } - - fn enable_selector(&mut self, _: A, selector: &Selector, row: usize) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into, - { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.selectors[selector.0][row] = true; - - Ok(()) - } - - fn query_instance(&self, _: Column, row: usize) -> Result, Error> { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - // There is no instance in this context. - Ok(Value::unknown()) - } - - fn assign_advice( - &mut self, - _: A, - _: Column, - _: usize, - _: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // We only care about fixed columns here - Ok(()) - } - - fn assign_fixed( - &mut self, - _: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - *self - .fixed - .get_mut(column.index()) - .and_then(|v| v.get_mut(row)) - .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; - - Ok(()) - } - - fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), Error> { - if !self.usable_rows.contains(&left_row) || !self.usable_rows.contains(&right_row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.permutation - .copy(left_column, left_row, right_column, right_row) - } - - fn fill_from_row( - &mut self, - column: Column, - from_row: usize, - to: Value>, - ) -> Result<(), Error> { - if !self.usable_rows.contains(&from_row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - let col = self - .fixed - .get_mut(column.index()) - .ok_or(Error::BoundsFailure)?; - - let filler = to.assign()?; - for row in self.usable_rows.clone().skip(from_row) { - col[row] = filler; - } - - Ok(()) - } - - fn get_challenge(&self, _: Challenge) -> Value { - Value::unknown() - } - - fn annotate_column(&mut self, _annotation: A, _column: Column) - where - A: FnOnce() -> AR, - AR: Into, - { - // Do nothing - } - - fn push_namespace(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about namespaces in this context. - } - - fn pop_namespace(&mut self, _: Option) { - // Do nothing; we don't care about namespaces in this context. - } -} +use halo2_backend::{arithmetic::CurveAffine, poly::commitment::Params}; +use halo2_frontend::circuit::compile_circuit; +use halo2_frontend::plonk::Circuit; +use halo2_middleware::ff::FromUniformBytes; /// Generate a `VerifyingKey` from an instance of `Circuit`. -/// By default, selector compression is turned **off**. -pub fn keygen_vk<'params, C, P, ConcreteCircuit>( +/// By default, selector compression is turned **ON**. +/// +/// **NOTE**: This `keygen_vk` is legacy one, assuming that `compress_selector: true`. +/// Hence, it is HIGHLY recommended to pair this util with `keygen_pk`. +/// In addition, when using this for key generation, user MUST use `compress_selectors: true`. +pub fn keygen_vk( params: &P, circuit: &ConcreteCircuit, ) -> Result, Error> where C: CurveAffine, - P: Params<'params, C>, + P: Params, ConcreteCircuit: Circuit, C::Scalar: FromUniformBytes<64>, { - keygen_vk_custom(params, circuit, true) + keygen_vk_custom(params, circuit, false) } /// Generate a `VerifyingKey` from an instance of `Circuit`. /// /// The selector compression optimization is turned on only if `compress_selectors` is `true`. -pub fn keygen_vk_custom<'params, C, P, ConcreteCircuit>( +/// +/// **NOTE**: This `keygen_vk_custom` MUST share the same `compress_selectors` with +/// `ProvingKey` generation process. +/// Otherwise, the user could get unmatching pk/vk pair. +/// Hence, it is HIGHLY recommended to pair this util with `keygen_pk_custom`. +pub fn keygen_vk_custom( params: &P, circuit: &ConcreteCircuit, compress_selectors: bool, ) -> Result, Error> where C: CurveAffine, - P: Params<'params, C>, + P: Params, ConcreteCircuit: Circuit, C::Scalar: FromUniformBytes<64>, { - let (domain, cs, config) = create_domain::( - params.k(), - #[cfg(feature = "circuit-params")] - circuit.params(), - ); - - if (params.n() as usize) < cs.minimum_rows() { - return Err(Error::not_enough_rows_available(params.k())); - } - - let mut assembly: Assembly = Assembly { - k: params.k(), - fixed: vec![domain.empty_lagrange_assigned(); cs.num_fixed_columns], - permutation: permutation::keygen::Assembly::new(params.n() as usize, &cs.permutation), - selectors: vec![vec![false; params.n() as usize]; cs.num_selectors], - usable_rows: 0..params.n() as usize - (cs.blinding_factors() + 1), - _marker: std::marker::PhantomData, - }; - - // Synthesize the circuit to obtain URS - ConcreteCircuit::FloorPlanner::synthesize( - &mut assembly, - circuit, - config, - cs.constants.clone(), - )?; - - let mut fixed = batch_invert_assigned(assembly.fixed); - let (cs, selector_polys) = if compress_selectors { - cs.compress_selectors(assembly.selectors.clone()) - } else { - // After this, the ConstraintSystem should not have any selectors: `verify` does not need them, and `keygen_pk` regenerates `cs` from scratch anyways. - let selectors = std::mem::take(&mut assembly.selectors); - cs.directly_convert_selectors_to_fixed(selectors) - }; - fixed.extend( - selector_polys - .into_iter() - .map(|poly| domain.lagrange_from_vec(poly)), - ); - - let permutation_vk = assembly - .permutation - .build_vk(params, &domain, &cs.permutation); - - let fixed_commitments = fixed - .iter() - .map(|poly| params.commit_lagrange(poly, Blind::default()).to_affine()) - .collect(); - - Ok(VerifyingKey::from_parts( - domain, - fixed_commitments, - permutation_vk, - cs, - assembly.selectors, - compress_selectors, - )) + let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, compress_selectors)?; + Ok(backend_keygen_vk(params, &compiled_circuit)?) } /// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `Circuit`. -pub fn keygen_pk<'params, C, P, ConcreteCircuit>( +/// By default, selector compression is turned **ON**. +/// +/// **NOTE**: This `keygen_pk` is legacy one, assuming that `compress_selector: true`. +/// Hence, it is HIGHLY recommended to pair this util with `keygen_vk`. +/// In addition, when using this for key generation, user MUST use `compress_selectors: true`. +pub fn keygen_pk( params: &P, vk: VerifyingKey, circuit: &ConcreteCircuit, ) -> Result, Error> where C: CurveAffine, - P: Params<'params, C>, + P: Params, ConcreteCircuit: Circuit, { - let mut cs = ConstraintSystem::default(); - #[cfg(feature = "circuit-params")] - let config = ConcreteCircuit::configure_with_params(&mut cs, circuit.params()); - #[cfg(not(feature = "circuit-params"))] - let config = ConcreteCircuit::configure(&mut cs); - - let cs = cs; - - if (params.n() as usize) < cs.minimum_rows() { - return Err(Error::not_enough_rows_available(params.k())); - } - - let mut assembly: Assembly = Assembly { - k: params.k(), - fixed: vec![vk.domain.empty_lagrange_assigned(); cs.num_fixed_columns], - permutation: permutation::keygen::Assembly::new(params.n() as usize, &cs.permutation), - selectors: vec![vec![false; params.n() as usize]; cs.num_selectors], - usable_rows: 0..params.n() as usize - (cs.blinding_factors() + 1), - _marker: std::marker::PhantomData, - }; - - // Synthesize the circuit to obtain URS - ConcreteCircuit::FloorPlanner::synthesize( - &mut assembly, - circuit, - config, - cs.constants.clone(), - )?; - - let mut fixed = batch_invert_assigned(assembly.fixed); - let (cs, selector_polys) = if vk.compress_selectors { - cs.compress_selectors(assembly.selectors) - } else { - cs.directly_convert_selectors_to_fixed(assembly.selectors) - }; - fixed.extend( - selector_polys - .into_iter() - .map(|poly| vk.domain.lagrange_from_vec(poly)), - ); - - let fixed_polys: Vec<_> = fixed - .iter() - .map(|poly| vk.domain.lagrange_to_coeff(poly.clone())) - .collect(); - - let fixed_cosets = fixed_polys - .iter() - .map(|poly| vk.domain.coeff_to_extended(poly.clone())) - .collect(); - - let permutation_pk = assembly - .permutation - .build_pk(params, &vk.domain, &cs.permutation); - - // Compute l_0(X) - // TODO: this can be done more efficiently - let mut l0 = vk.domain.empty_lagrange(); - l0[0] = C::Scalar::ONE; - let l0 = vk.domain.lagrange_to_coeff(l0); - let l0 = vk.domain.coeff_to_extended(l0); - - // Compute l_blind(X) which evaluates to 1 for each blinding factor row - // and 0 otherwise over the domain. - let mut l_blind = vk.domain.empty_lagrange(); - for evaluation in l_blind[..].iter_mut().rev().take(cs.blinding_factors()) { - *evaluation = C::Scalar::ONE; - } - let l_blind = vk.domain.lagrange_to_coeff(l_blind); - let l_blind = vk.domain.coeff_to_extended(l_blind); - - // Compute l_last(X) which evaluates to 1 on the first inactive row (just - // before the blinding factors) and 0 otherwise over the domain - let mut l_last = vk.domain.empty_lagrange(); - l_last[params.n() as usize - cs.blinding_factors() - 1] = C::Scalar::ONE; - let l_last = vk.domain.lagrange_to_coeff(l_last); - let l_last = vk.domain.coeff_to_extended(l_last); - - // Compute l_active_row(X) - let one = C::Scalar::ONE; - let mut l_active_row = vk.domain.empty_extended(); - parallelize(&mut l_active_row, |values, start| { - for (i, value) in values.iter_mut().enumerate() { - let idx = i + start; - *value = one - (l_last[idx] + l_blind[idx]); - } - }); - - // Compute the optimized evaluation data structure - let ev = Evaluator::new(&vk.cs); + keygen_pk_custom(params, vk, circuit, false) +} - Ok(ProvingKey { - vk, - l0, - l_last, - l_active_row, - fixed_values: fixed, - fixed_polys, - fixed_cosets, - permutation: permutation_pk, - ev, - }) +/// Generate a `ProvingKey` from an instance of `Circuit`. +/// +/// The selector compression optimization is turned on only if `compress_selectors` is `true`. +/// +/// **NOTE**: This `keygen_pk_custom` MUST share the same `compress_selectors` with +/// `VerifyingKey` generation process. +/// Otherwise, the user could get unmatching pk/vk pair. +/// Hence, it is HIGHLY recommended to pair this util with `keygen_vk_custom`. +pub fn keygen_pk_custom( + params: &P, + vk: VerifyingKey, + circuit: &ConcreteCircuit, + compress_selectors: bool, +) -> Result, Error> +where + C: CurveAffine, + P: Params, + ConcreteCircuit: Circuit, +{ + let (compiled_circuit, _, _) = compile_circuit(params.k(), circuit, compress_selectors)?; + Ok(backend_keygen_pk(params, vk, &compiled_circuit)?) } diff --git a/halo2_proofs/src/plonk/permutation/keygen.rs b/halo2_proofs/src/plonk/permutation/keygen.rs deleted file mode 100644 index 0d78f00ac5..0000000000 --- a/halo2_proofs/src/plonk/permutation/keygen.rs +++ /dev/null @@ -1,460 +0,0 @@ -use ff::{Field, PrimeField}; -use group::Curve; - -use super::{Argument, ProvingKey, VerifyingKey}; -use crate::{ - arithmetic::{parallelize, CurveAffine}, - plonk::{Any, Column, Error}, - poly::{ - commitment::{Blind, Params}, - EvaluationDomain, - }, -}; - -#[cfg(feature = "thread-safe-region")] -use crate::multicore::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; - -#[cfg(not(feature = "thread-safe-region"))] -use crate::multicore::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator}; - -#[cfg(feature = "thread-safe-region")] -use std::collections::{BTreeSet, HashMap}; - -#[cfg(not(feature = "thread-safe-region"))] -/// Struct that accumulates all the necessary data in order to construct the permutation argument. -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct Assembly { - /// Columns that participate on the copy permutation argument. - columns: Vec>, - /// Mapping of the actual copies done. - mapping: Vec>, - /// Some aux data used to swap positions directly when sorting. - aux: Vec>, - /// More aux data - sizes: Vec>, -} - -#[cfg(not(feature = "thread-safe-region"))] -impl Assembly { - pub(crate) fn new(n: usize, p: &Argument) -> Self { - // Initialize the copy vector to keep track of copy constraints in all - // the permutation arguments. - let mut columns = vec![]; - for i in 0..p.columns.len() { - // Computes [(i, 0), (i, 1), ..., (i, n - 1)] - columns.push((0..n).map(|j| (i, j)).collect()); - } - - // Before any equality constraints are applied, every cell in the permutation is - // in a 1-cycle; therefore mapping and aux are identical, because every cell is - // its own distinguished element. - Assembly { - columns: p.columns.clone(), - mapping: columns.clone(), - aux: columns, - sizes: vec![vec![1usize; n]; p.columns.len()], - } - } - - pub(crate) fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), Error> { - let left_column = self - .columns - .iter() - .position(|c| c == &left_column) - .ok_or(Error::ColumnNotInPermutation(left_column))?; - let right_column = self - .columns - .iter() - .position(|c| c == &right_column) - .ok_or(Error::ColumnNotInPermutation(right_column))?; - - // Check bounds - if left_row >= self.mapping[left_column].len() - || right_row >= self.mapping[right_column].len() - { - return Err(Error::BoundsFailure); - } - - // See book/src/design/permutation.md for a description of this algorithm. - - let mut left_cycle = self.aux[left_column][left_row]; - let mut right_cycle = self.aux[right_column][right_row]; - - // If left and right are in the same cycle, do nothing. - if left_cycle == right_cycle { - return Ok(()); - } - - if self.sizes[left_cycle.0][left_cycle.1] < self.sizes[right_cycle.0][right_cycle.1] { - std::mem::swap(&mut left_cycle, &mut right_cycle); - } - - // Merge the right cycle into the left one. - self.sizes[left_cycle.0][left_cycle.1] += self.sizes[right_cycle.0][right_cycle.1]; - let mut i = right_cycle; - loop { - self.aux[i.0][i.1] = left_cycle; - i = self.mapping[i.0][i.1]; - if i == right_cycle { - break; - } - } - - let tmp = self.mapping[left_column][left_row]; - self.mapping[left_column][left_row] = self.mapping[right_column][right_row]; - self.mapping[right_column][right_row] = tmp; - - Ok(()) - } - - pub(crate) fn build_vk<'params, C: CurveAffine, P: Params<'params, C>>( - self, - params: &P, - domain: &EvaluationDomain, - p: &Argument, - ) -> VerifyingKey { - build_vk(params, domain, p, |i, j| self.mapping[i][j]) - } - - pub(crate) fn build_pk<'params, C: CurveAffine, P: Params<'params, C>>( - self, - params: &P, - domain: &EvaluationDomain, - p: &Argument, - ) -> ProvingKey { - build_pk(params, domain, p, |i, j| self.mapping[i][j]) - } - - /// Returns columns that participate in the permutation argument. - pub fn columns(&self) -> &[Column] { - &self.columns - } - - /// Returns mappings of the copies. - pub fn mapping( - &self, - ) -> impl Iterator + '_> { - self.mapping.iter().map(|c| c.par_iter().copied()) - } -} - -#[cfg(feature = "thread-safe-region")] -/// Struct that accumulates all the necessary data in order to construct the permutation argument. -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct Assembly { - /// Columns that participate on the copy permutation argument. - columns: Vec>, - /// Mapping of the actual copies done. - cycles: Vec>, - /// Mapping of the actual copies done. - ordered_cycles: Vec>, - /// Mapping of the actual copies done. - aux: HashMap<(usize, usize), usize>, - /// total length of a column - col_len: usize, - /// number of columns - num_cols: usize, -} - -#[cfg(feature = "thread-safe-region")] -impl Assembly { - pub(crate) fn new(n: usize, p: &Argument) -> Self { - Assembly { - columns: p.columns.clone(), - cycles: Vec::with_capacity(n), - ordered_cycles: Vec::with_capacity(n), - aux: HashMap::new(), - col_len: n, - num_cols: p.columns.len(), - } - } - - pub(crate) fn copy( - &mut self, - left_column: Column, - left_row: usize, - right_column: Column, - right_row: usize, - ) -> Result<(), Error> { - let left_column = self - .columns - .iter() - .position(|c| c == &left_column) - .ok_or(Error::ColumnNotInPermutation(left_column))?; - let right_column = self - .columns - .iter() - .position(|c| c == &right_column) - .ok_or(Error::ColumnNotInPermutation(right_column))?; - - // Check bounds - if left_row >= self.col_len || right_row >= self.col_len { - return Err(Error::BoundsFailure); - } - - let left_cycle = self.aux.get(&(left_column, left_row)); - let right_cycle = self.aux.get(&(right_column, right_row)); - - // extract cycle elements - let right_cycle_elems = match right_cycle { - Some(i) => { - let entry = self.cycles[*i].clone(); - self.cycles[*i] = vec![]; - entry - } - None => [(right_column, right_row)].into(), - }; - - assert!(right_cycle_elems.contains(&(right_column, right_row))); - - // merge cycles - let cycle_idx = match left_cycle { - Some(i) => { - let entry = &mut self.cycles[*i]; - entry.extend(right_cycle_elems.clone()); - *i - } - // if they were singletons -- create a new cycle entry - None => { - let mut set: Vec<(usize, usize)> = right_cycle_elems.clone(); - set.push((left_column, left_row)); - self.cycles.push(set); - let cycle_idx = self.cycles.len() - 1; - self.aux.insert((left_column, left_row), cycle_idx); - cycle_idx - } - }; - - let index_updates = vec![cycle_idx; right_cycle_elems.len()].into_iter(); - let updates = right_cycle_elems.into_iter().zip(index_updates); - - self.aux.extend(updates); - - Ok(()) - } - - /// Builds the ordered mapping of the cycles. - /// This will only get executed once. - pub fn build_ordered_mapping(&mut self) { - use crate::multicore::IntoParallelRefMutIterator; - - // will only get called once - if self.ordered_cycles.is_empty() && !self.cycles.is_empty() { - self.ordered_cycles = self - .cycles - .par_iter_mut() - .map(|col| { - let mut set = BTreeSet::new(); - set.extend(col.clone()); - // free up memory - *col = vec![]; - set - }) - .collect(); - } - } - - fn mapping_at_idx(&self, col: usize, row: usize) -> (usize, usize) { - assert!( - !self.ordered_cycles.is_empty() || self.cycles.is_empty(), - "cycles have not been ordered" - ); - - if let Some(cycle_idx) = self.aux.get(&(col, row)) { - let cycle = &self.ordered_cycles[*cycle_idx]; - let mut cycle_iter = cycle.range(( - std::ops::Bound::Excluded((col, row)), - std::ops::Bound::Unbounded, - )); - // point to the next node in the cycle - match cycle_iter.next() { - Some((i, j)) => (*i, *j), - // wrap back around to the first element which SHOULD exist - None => *(cycle.iter().next().unwrap()), - } - // is a singleton - } else { - (col, row) - } - } - - pub(crate) fn build_vk<'params, C: CurveAffine, P: Params<'params, C>>( - &mut self, - params: &P, - domain: &EvaluationDomain, - p: &Argument, - ) -> VerifyingKey { - self.build_ordered_mapping(); - build_vk(params, domain, p, |i, j| self.mapping_at_idx(i, j)) - } - - pub(crate) fn build_pk<'params, C: CurveAffine, P: Params<'params, C>>( - &mut self, - params: &P, - domain: &EvaluationDomain, - p: &Argument, - ) -> ProvingKey { - self.build_ordered_mapping(); - build_pk(params, domain, p, |i, j| self.mapping_at_idx(i, j)) - } - - /// Returns columns that participate in the permutation argument. - pub fn columns(&self) -> &[Column] { - &self.columns - } - - /// Returns mappings of the copies. - pub fn mapping( - &self, - ) -> impl Iterator + '_> { - (0..self.num_cols).map(move |i| { - (0..self.col_len) - .into_par_iter() - .map(move |j| self.mapping_at_idx(i, j)) - }) - } -} - -pub(crate) fn build_pk<'params, C: CurveAffine, P: Params<'params, C>>( - params: &P, - domain: &EvaluationDomain, - p: &Argument, - mapping: impl Fn(usize, usize) -> (usize, usize) + Sync, -) -> ProvingKey { - // Compute [omega^0, omega^1, ..., omega^{params.n - 1}] - let mut omega_powers = vec![C::Scalar::ZERO; params.n() as usize]; - { - let omega = domain.get_omega(); - parallelize(&mut omega_powers, |o, start| { - let mut cur = omega.pow_vartime([start as u64]); - for v in o.iter_mut() { - *v = cur; - cur *= ω - } - }) - } - - // Compute [omega_powers * \delta^0, omega_powers * \delta^1, ..., omega_powers * \delta^m] - let mut deltaomega = vec![omega_powers; p.columns.len()]; - { - parallelize(&mut deltaomega, |o, start| { - let mut cur = C::Scalar::DELTA.pow_vartime([start as u64]); - for omega_powers in o.iter_mut() { - for v in omega_powers { - *v *= &cur; - } - cur *= &C::Scalar::DELTA; - } - }); - } - - // Compute permutation polynomials, convert to coset form. - let mut permutations = vec![domain.empty_lagrange(); p.columns.len()]; - { - parallelize(&mut permutations, |o, start| { - for (x, permutation_poly) in o.iter_mut().enumerate() { - let i = start + x; - for (j, p) in permutation_poly.iter_mut().enumerate() { - let (permuted_i, permuted_j) = mapping(i, j); - *p = deltaomega[permuted_i][permuted_j]; - } - } - }); - } - - let mut polys = vec![domain.empty_coeff(); p.columns.len()]; - { - parallelize(&mut polys, |o, start| { - for (x, poly) in o.iter_mut().enumerate() { - let i = start + x; - let permutation_poly = permutations[i].clone(); - *poly = domain.lagrange_to_coeff(permutation_poly); - } - }); - } - - let mut cosets = vec![domain.empty_extended(); p.columns.len()]; - { - parallelize(&mut cosets, |o, start| { - for (x, coset) in o.iter_mut().enumerate() { - let i = start + x; - let poly = polys[i].clone(); - *coset = domain.coeff_to_extended(poly); - } - }); - } - - ProvingKey { - permutations, - polys, - cosets, - } -} - -pub(crate) fn build_vk<'params, C: CurveAffine, P: Params<'params, C>>( - params: &P, - domain: &EvaluationDomain, - p: &Argument, - mapping: impl Fn(usize, usize) -> (usize, usize) + Sync, -) -> VerifyingKey { - // Compute [omega^0, omega^1, ..., omega^{params.n - 1}] - let mut omega_powers = vec![C::Scalar::ZERO; params.n() as usize]; - { - let omega = domain.get_omega(); - parallelize(&mut omega_powers, |o, start| { - let mut cur = omega.pow_vartime([start as u64]); - for v in o.iter_mut() { - *v = cur; - cur *= ω - } - }) - } - - // Compute [omega_powers * \delta^0, omega_powers * \delta^1, ..., omega_powers * \delta^m] - let mut deltaomega = vec![omega_powers; p.columns.len()]; - { - parallelize(&mut deltaomega, |o, start| { - let mut cur = C::Scalar::DELTA.pow_vartime([start as u64]); - for omega_powers in o.iter_mut() { - for v in omega_powers { - *v *= &cur; - } - cur *= &::DELTA; - } - }); - } - - // Computes the permutation polynomial based on the permutation - // description in the assembly. - let mut permutations = vec![domain.empty_lagrange(); p.columns.len()]; - { - parallelize(&mut permutations, |o, start| { - for (x, permutation_poly) in o.iter_mut().enumerate() { - let i = start + x; - for (j, p) in permutation_poly.iter_mut().enumerate() { - let (permuted_i, permuted_j) = mapping(i, j); - *p = deltaomega[permuted_i][permuted_j]; - } - } - }); - } - - // Pre-compute commitments for the URS. - let mut commitments = Vec::with_capacity(p.columns.len()); - for permutation in &permutations { - // Compute commitment to permutation polynomial - commitments.push( - params - .commit_lagrange(permutation, Blind::default()) - .to_affine(), - ); - } - - VerifyingKey { commitments } -} diff --git a/halo2_proofs/src/plonk/permutation/prover.rs b/halo2_proofs/src/plonk/permutation/prover.rs deleted file mode 100644 index d6b108554d..0000000000 --- a/halo2_proofs/src/plonk/permutation/prover.rs +++ /dev/null @@ -1,329 +0,0 @@ -use ff::PrimeField; -use group::{ - ff::{BatchInvert, Field}, - Curve, -}; -use rand_core::RngCore; -use std::iter::{self, ExactSizeIterator}; - -use super::super::{circuit::Any, ChallengeBeta, ChallengeGamma, ChallengeX}; -use super::{Argument, ProvingKey}; -use crate::{ - arithmetic::{eval_polynomial, parallelize, CurveAffine}, - plonk::{self, Error}, - poly::{ - commitment::{Blind, Params}, - Coeff, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, ProverQuery, Rotation, - }, - transcript::{EncodedChallenge, TranscriptWrite}, -}; - -pub(crate) struct CommittedSet { - pub(crate) permutation_product_poly: Polynomial, - pub(crate) permutation_product_coset: Polynomial, - permutation_product_blind: Blind, -} - -pub(crate) struct Committed { - pub(crate) sets: Vec>, -} - -pub struct ConstructedSet { - permutation_product_poly: Polynomial, - permutation_product_blind: Blind, -} - -pub(crate) struct Constructed { - sets: Vec>, -} - -pub(crate) struct Evaluated { - constructed: Constructed, -} - -impl Argument { - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit< - 'params, - C: CurveAffine, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - params: &P, - pk: &plonk::ProvingKey, - pkey: &ProvingKey, - advice: &[Polynomial], - fixed: &[Polynomial], - instance: &[Polynomial], - beta: ChallengeBeta, - gamma: ChallengeGamma, - mut rng: R, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - - // How many columns can be included in a single permutation polynomial? - // We need to multiply by z(X) and (1 - (l_last(X) + l_blind(X))). This - // will never underflow because of the requirement of at least a degree - // 3 circuit for the permutation argument. - assert!(pk.vk.cs_degree >= 3); - let chunk_len = pk.vk.cs_degree - 2; - let blinding_factors = pk.vk.cs.blinding_factors(); - - // Each column gets its own delta power. - let mut deltaomega = C::Scalar::ONE; - - // Track the "last" value from the previous column set - let mut last_z = C::Scalar::ONE; - - let mut sets = vec![]; - - for (columns, permutations) in self - .columns - .chunks(chunk_len) - .zip(pkey.permutations.chunks(chunk_len)) - { - // Goal is to compute the products of fractions - // - // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / - // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) - // - // where p_j(X) is the jth column in this permutation, - // and i is the ith row of the column. - - let mut modified_values = vec![C::Scalar::ONE; params.n() as usize]; - - // Iterate over each column of the permutation - for (&column, permuted_column_values) in columns.iter().zip(permutations.iter()) { - let values = match column.column_type() { - Any::Advice(_) => advice, - Any::Fixed => fixed, - Any::Instance => instance, - }; - parallelize(&mut modified_values, |modified_values, start| { - for ((modified_values, value), permuted_value) in modified_values - .iter_mut() - .zip(values[column.index()][start..].iter()) - .zip(permuted_column_values[start..].iter()) - { - *modified_values *= &(*beta * permuted_value + &*gamma + value); - } - }); - } - - // Invert to obtain the denominator for the permutation product polynomial - modified_values.batch_invert(); - - // Iterate over each column again, this time finishing the computation - // of the entire fraction by computing the numerators - for &column in columns.iter() { - let omega = domain.get_omega(); - let values = match column.column_type() { - Any::Advice(_) => advice, - Any::Fixed => fixed, - Any::Instance => instance, - }; - parallelize(&mut modified_values, |modified_values, start| { - let mut deltaomega = deltaomega * &omega.pow_vartime([start as u64, 0, 0, 0]); - for (modified_values, value) in modified_values - .iter_mut() - .zip(values[column.index()][start..].iter()) - { - // Multiply by p_j(\omega^i) + \delta^j \omega^i \beta - *modified_values *= &(deltaomega * &*beta + &*gamma + value); - deltaomega *= ω - } - }); - deltaomega *= &::DELTA; - } - - // The modified_values vector is a vector of products of fractions - // of the form - // - // (p_j(\omega^i) + \delta^j \omega^i \beta + \gamma) / - // (p_j(\omega^i) + \beta s_j(\omega^i) + \gamma) - // - // where i is the index into modified_values, for the jth column in - // the permutation - - // Compute the evaluations of the permutation product polynomial - // over our domain, starting with z[0] = 1 - let mut z = vec![last_z]; - for row in 1..(params.n() as usize) { - let mut tmp = z[row - 1]; - - tmp *= &modified_values[row - 1]; - z.push(tmp); - } - let mut z = domain.lagrange_from_vec(z); - // Set blinding factors - for z in &mut z[params.n() as usize - blinding_factors..] { - *z = C::Scalar::random(&mut rng); - } - // Set new last_z - last_z = z[params.n() as usize - (blinding_factors + 1)]; - - let blind = Blind(C::Scalar::random(&mut rng)); - - let permutation_product_commitment_projective = params.commit_lagrange(&z, blind); - let permutation_product_blind = blind; - let z = domain.lagrange_to_coeff(z); - let permutation_product_poly = z.clone(); - - let permutation_product_coset = domain.coeff_to_extended(z.clone()); - - let permutation_product_commitment = - permutation_product_commitment_projective.to_affine(); - - // Hash the permutation product commitment - transcript.write_point(permutation_product_commitment)?; - - sets.push(CommittedSet { - permutation_product_poly, - permutation_product_coset, - permutation_product_blind, - }); - } - - Ok(Committed { sets }) - } -} - -impl Committed { - pub(in crate::plonk) fn construct(self) -> Constructed { - Constructed { - sets: self - .sets - .iter() - .map(|set| ConstructedSet { - permutation_product_poly: set.permutation_product_poly.clone(), - permutation_product_blind: set.permutation_product_blind, - }) - .collect(), - } - } -} - -impl super::ProvingKey { - pub(in crate::plonk) fn open( - &self, - x: ChallengeX, - ) -> impl Iterator> + Clone { - self.polys.iter().map(move |poly| ProverQuery { - point: *x, - poly, - blind: Blind::default(), - }) - } - - pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( - &self, - x: ChallengeX, - transcript: &mut T, - ) -> Result<(), Error> { - // Hash permutation evals - for eval in self.polys.iter().map(|poly| eval_polynomial(poly, *x)) { - transcript.write_scalar(eval)?; - } - - Ok(()) - } -} - -impl Constructed { - pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( - self, - pk: &plonk::ProvingKey, - x: ChallengeX, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - let blinding_factors = pk.vk.cs.blinding_factors(); - - { - let mut sets = self.sets.iter(); - - while let Some(set) = sets.next() { - let permutation_product_eval = eval_polynomial(&set.permutation_product_poly, *x); - - let permutation_product_next_eval = eval_polynomial( - &set.permutation_product_poly, - domain.rotate_omega(*x, Rotation::next()), - ); - - // Hash permutation product evals - for eval in iter::empty() - .chain(Some(&permutation_product_eval)) - .chain(Some(&permutation_product_next_eval)) - { - transcript.write_scalar(*eval)?; - } - - // If we have any remaining sets to process, evaluate this set at omega^u - // so we can constrain the last value of its running product to equal the - // first value of the next set's running product, chaining them together. - if sets.len() > 0 { - let permutation_product_last_eval = eval_polynomial( - &set.permutation_product_poly, - domain.rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))), - ); - - transcript.write_scalar(permutation_product_last_eval)?; - } - } - } - - Ok(Evaluated { constructed: self }) - } -} - -impl Evaluated { - pub(in crate::plonk) fn open<'a>( - &'a self, - pk: &'a plonk::ProvingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let blinding_factors = pk.vk.cs.blinding_factors(); - let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); - let x_last = pk - .vk - .domain - .rotate_omega(*x, Rotation(-((blinding_factors + 1) as i32))); - - iter::empty() - .chain(self.constructed.sets.iter().flat_map(move |set| { - iter::empty() - // Open permutation product commitments at x and \omega x - .chain(Some(ProverQuery { - point: *x, - poly: &set.permutation_product_poly, - blind: set.permutation_product_blind, - })) - .chain(Some(ProverQuery { - point: x_next, - poly: &set.permutation_product_poly, - blind: set.permutation_product_blind, - })) - })) - // Open it at \omega^{last} x for all but the last set. This rotation is only - // sensical for the first row, but we only use this rotation in a constraint - // that is gated on l_0. - .chain( - self.constructed - .sets - .iter() - .rev() - .skip(1) - .flat_map(move |set| { - Some(ProverQuery { - point: x_last, - poly: &set.permutation_product_poly, - blind: set.permutation_product_blind, - }) - }), - ) - } -} diff --git a/halo2_proofs/src/plonk/prover.rs b/halo2_proofs/src/plonk/prover.rs index cd0d7306a9..1c6a3a7e0b 100644 --- a/halo2_proofs/src/plonk/prover.rs +++ b/halo2_proofs/src/plonk/prover.rs @@ -1,729 +1,115 @@ -use ff::{Field, FromUniformBytes, WithSmallOrderMulGroup}; -use group::Curve; -use rand_core::RngCore; -use std::collections::{BTreeSet, HashSet}; -use std::ops::RangeTo; -use std::{collections::HashMap, iter}; - -use super::{ - circuit::{ - sealed::{self}, - Advice, Any, Assignment, Challenge, Circuit, Column, ConstraintSystem, Fixed, FloorPlanner, - Instance, Selector, - }, - lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, - ChallengeX, ChallengeY, Error, ProvingKey, -}; - -use crate::{ - arithmetic::{eval_polynomial, CurveAffine}, - circuit::Value, - plonk::Assigned, - poly::{ - commitment::{Blind, CommitmentScheme, Params, Prover}, - Basis, Coeff, LagrangeCoeff, Polynomial, ProverQuery, - }, -}; -use crate::{ - poly::batch_invert_assigned, - transcript::{EncodedChallenge, TranscriptWrite}, +use crate::plonk::{Error, ErrorBack}; +use crate::poly::commitment::{self, CommitmentScheme, Params}; +use crate::transcript::{EncodedChallenge, TranscriptWrite}; +use halo2_backend::plonk::{prover::Prover, ProvingKey}; +use halo2_frontend::circuit::WitnessCalculator; +use halo2_frontend::plonk::{Circuit, ConstraintSystem}; +use halo2_middleware::ff::{FromUniformBytes, WithSmallOrderMulGroup}; +use halo2_middleware::zal::{ + impls::{PlonkEngine, PlonkEngineConfig}, + traits::MsmAccel, }; -use group::prime::PrimeCurveAffine; +use rand_core::RngCore; +use std::collections::HashMap; /// This creates a proof for the provided `circuit` when given the public /// parameters `params` and the proving key [`ProvingKey`] that was /// generated previously for the same circuit. The provided `instances` /// are zero-padded internally. -pub fn create_proof< +pub fn create_proof_with_engine< 'params, Scheme: CommitmentScheme, - P: Prover<'params, Scheme>, + P: commitment::Prover<'params, Scheme>, E: EncodedChallenge, R: RngCore, T: TranscriptWrite, ConcreteCircuit: Circuit, + M: MsmAccel, >( + engine: PlonkEngine, params: &'params Scheme::ParamsProver, pk: &ProvingKey, circuits: &[ConcreteCircuit], - instances: &[&[&[Scheme::Scalar]]], - mut rng: R, + instances: &[Vec>], + rng: R, transcript: &mut T, ) -> Result<(), Error> where Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, { if circuits.len() != instances.len() { - return Err(Error::InvalidInstances); - } - - for instance in instances.iter() { - if instance.len() != pk.vk.cs.num_instance_columns { - return Err(Error::InvalidInstances); - } + return Err(Error::Backend(ErrorBack::InvalidInstances)); } - // Hash verification key into transcript - pk.vk.hash_into(transcript)?; - - let domain = &pk.vk.domain; - let mut meta = ConstraintSystem::default(); + let mut cs = ConstraintSystem::default(); #[cfg(feature = "circuit-params")] - let config = ConcreteCircuit::configure_with_params(&mut meta, circuits[0].params()); + let config = ConcreteCircuit::configure_with_params(&mut cs, circuits[0].params()); #[cfg(not(feature = "circuit-params"))] - let config = ConcreteCircuit::configure(&mut meta); - - // Selector optimizations cannot be applied here; use the ConstraintSystem - // from the verification key. - let meta = &pk.vk.cs; - - struct InstanceSingle { - pub instance_values: Vec>, - pub instance_polys: Vec>, - } - - let instance: Vec> = instances - .iter() - .map(|instance| -> Result, Error> { - let instance_values = instance - .iter() - .map(|values| { - let mut poly = domain.empty_lagrange(); - assert_eq!(poly.len(), params.n() as usize); - if values.len() > (poly.len() - (meta.blinding_factors() + 1)) { - return Err(Error::InstanceTooLarge); - } - for (poly, value) in poly.iter_mut().zip(values.iter()) { - if !P::QUERY_INSTANCE { - transcript.common_scalar(*value)?; - } - *poly = *value; - } - Ok(poly) - }) - .collect::, _>>()?; - - if P::QUERY_INSTANCE { - let instance_commitments_projective: Vec<_> = instance_values - .iter() - .map(|poly| params.commit_lagrange(poly, Blind::default())) - .collect(); - let mut instance_commitments = - vec![Scheme::Curve::identity(); instance_commitments_projective.len()]; - ::CurveExt::batch_normalize( - &instance_commitments_projective, - &mut instance_commitments, - ); - let instance_commitments = instance_commitments; - drop(instance_commitments_projective); - - for commitment in &instance_commitments { - transcript.common_point(*commitment)?; - } - } - - let instance_polys: Vec<_> = instance_values - .iter() - .map(|poly| { - let lagrange_vec = domain.lagrange_from_vec(poly.to_vec()); - domain.lagrange_to_coeff(lagrange_vec) - }) - .collect(); - - Ok(InstanceSingle { - instance_values, - instance_polys, - }) - }) - .collect::, _>>()?; - - #[derive(Clone)] - struct AdviceSingle { - pub advice_polys: Vec>, - pub advice_blinds: Vec>, - } - - struct WitnessCollection<'a, F: Field> { - k: u32, - current_phase: sealed::Phase, - advice: Vec, LagrangeCoeff>>, - unblinded_advice: HashSet, - challenges: &'a HashMap, - instances: &'a [&'a [F]], - usable_rows: RangeTo, - _marker: std::marker::PhantomData, - } - - impl<'a, F: Field> Assignment for WitnessCollection<'a, F> { - fn enter_region(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about regions in this context. - } - - fn exit_region(&mut self) { - // Do nothing; we don't care about regions in this context. - } - - fn enable_selector(&mut self, _: A, _: &Selector, _: usize) -> Result<(), Error> - where - A: FnOnce() -> AR, - AR: Into, - { - // We only care about advice columns here - - Ok(()) - } - - fn annotate_column(&mut self, _annotation: A, _column: Column) - where - A: FnOnce() -> AR, - AR: Into, - { - // Do nothing - } - - fn query_instance(&self, column: Column, row: usize) -> Result, Error> { - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - self.instances - .get(column.index()) - .and_then(|column| column.get(row)) - .map(|v| Value::known(*v)) - .ok_or(Error::BoundsFailure) - } - - fn assign_advice( - &mut self, - _: A, - column: Column, - row: usize, - to: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // Ignore assignment of advice column in different phase than current one. - if self.current_phase != column.column_type().phase { - return Ok(()); - } - - if !self.usable_rows.contains(&row) { - return Err(Error::not_enough_rows_available(self.k)); - } - - *self - .advice - .get_mut(column.index()) - .and_then(|v| v.get_mut(row)) - .ok_or(Error::BoundsFailure)? = to().into_field().assign()?; - - Ok(()) - } - - fn assign_fixed( - &mut self, - _: A, - _: Column, - _: usize, - _: V, - ) -> Result<(), Error> - where - V: FnOnce() -> Value, - VR: Into>, - A: FnOnce() -> AR, - AR: Into, - { - // We only care about advice columns here - - Ok(()) - } - - fn copy( - &mut self, - _: Column, - _: usize, - _: Column, - _: usize, - ) -> Result<(), Error> { - // We only care about advice columns here - - Ok(()) - } - - fn fill_from_row( - &mut self, - _: Column, - _: usize, - _: Value>, - ) -> Result<(), Error> { - Ok(()) - } - - fn get_challenge(&self, challenge: Challenge) -> Value { - self.challenges - .get(&challenge.index()) - .cloned() - .map(Value::known) - .unwrap_or_else(Value::unknown) - } - - fn push_namespace(&mut self, _: N) - where - NR: Into, - N: FnOnce() -> NR, - { - // Do nothing; we don't care about namespaces in this context. - } - - fn pop_namespace(&mut self, _: Option) { - // Do nothing; we don't care about namespaces in this context. - } - } - - let (advice, challenges) = { - let mut advice = vec![ - AdviceSingle:: { - advice_polys: vec![domain.empty_lagrange(); meta.num_advice_columns], - advice_blinds: vec![Blind::default(); meta.num_advice_columns], - }; - instances.len() - ]; - let mut challenges = HashMap::::with_capacity(meta.num_challenges); - - let unusable_rows_start = params.n() as usize - (meta.blinding_factors() + 1); - for current_phase in pk.vk.cs.phases() { - let column_indices = meta - .advice_column_phase - .iter() - .enumerate() - .filter_map(|(column_index, phase)| { - if current_phase == *phase { - Some(column_index) - } else { - None - } - }) - .collect::>(); - - for ((circuit, advice), instances) in - circuits.iter().zip(advice.iter_mut()).zip(instances) - { - let mut witness = WitnessCollection { - k: params.k(), - current_phase, - advice: vec![domain.empty_lagrange_assigned(); meta.num_advice_columns], - unblinded_advice: HashSet::from_iter(meta.unblinded_advice_columns.clone()), - instances, - challenges: &challenges, - // The prover will not be allowed to assign values to advice - // cells that exist within inactive rows, which include some - // number of blinding factors and an extra row for use in the - // permutation argument. - usable_rows: ..unusable_rows_start, - _marker: std::marker::PhantomData, - }; - - // Synthesize the circuit to obtain the witness and other information. - ConcreteCircuit::FloorPlanner::synthesize( - &mut witness, - circuit, - config.clone(), - meta.constants.clone(), - )?; - - let mut advice_values = batch_invert_assigned::( - witness - .advice - .into_iter() - .enumerate() - .filter_map(|(column_index, advice)| { - if column_indices.contains(&column_index) { - Some(advice) - } else { - None - } - }) - .collect(), - ); - - // Add blinding factors to advice columns - for (column_index, advice_values) in column_indices.iter().zip(&mut advice_values) { - if !witness.unblinded_advice.contains(column_index) { - for cell in &mut advice_values[unusable_rows_start..] { - *cell = Scheme::Scalar::random(&mut rng); - } - } else { - #[cfg(feature = "sanity-checks")] - for cell in &advice_values[unusable_rows_start..] { - assert_eq!(*cell, Scheme::Scalar::ZERO); - } - } - } - - // Compute commitments to advice column polynomials - let blinds: Vec<_> = column_indices - .iter() - .map(|i| { - if witness.unblinded_advice.contains(i) { - Blind::default() - } else { - Blind(Scheme::Scalar::random(&mut rng)) - } - }) - .collect(); - let advice_commitments_projective: Vec<_> = advice_values - .iter() - .zip(blinds.iter()) - .map(|(poly, blind)| params.commit_lagrange(poly, *blind)) - .collect(); - let mut advice_commitments = - vec![Scheme::Curve::identity(); advice_commitments_projective.len()]; - ::CurveExt::batch_normalize( - &advice_commitments_projective, - &mut advice_commitments, - ); - let advice_commitments = advice_commitments; - drop(advice_commitments_projective); - - for commitment in &advice_commitments { - transcript.write_point(*commitment)?; - } - for ((column_index, advice_values), blind) in - column_indices.iter().zip(advice_values).zip(blinds) - { - advice.advice_polys[*column_index] = advice_values; - advice.advice_blinds[*column_index] = blind; - } - } + let config = ConcreteCircuit::configure(&mut cs); + let cs = cs; - for (index, phase) in meta.challenge_phase.iter().enumerate() { - if current_phase == *phase { - let existing = - challenges.insert(index, *transcript.squeeze_challenge_scalar::<()>()); - assert!(existing.is_none()); - } - } - } - - assert_eq!(challenges.len(), meta.num_challenges); - let challenges = (0..meta.num_challenges) - .map(|index| challenges.remove(&index).unwrap()) - .collect::>(); - - (advice, challenges) - }; - - // Sample theta challenge for keeping lookup columns linearly independent - let theta: ChallengeTheta<_> = transcript.squeeze_challenge_scalar(); - - let lookups: Vec>> = instance + let mut witness_calcs: Vec<_> = circuits .iter() - .zip(advice.iter()) - .map(|(instance, advice)| -> Result, Error> { - // Construct and commit to permuted values for each lookup - pk.vk - .cs - .lookups - .iter() - .map(|lookup| { - lookup.commit_permuted( - pk, - params, - domain, - theta, - &advice.advice_polys, - &pk.fixed_values, - &instance.instance_values, - &challenges, - &mut rng, - transcript, - ) - }) - .collect() + .enumerate() + .map(|(i, circuit)| { + WitnessCalculator::new(params.k(), circuit, &config, &cs, instances[i].as_slice()) }) - .collect::, _>>()?; - - // Sample beta challenge - let beta: ChallengeBeta<_> = transcript.squeeze_challenge_scalar(); - - // Sample gamma challenge - let gamma: ChallengeGamma<_> = transcript.squeeze_challenge_scalar(); - - // Commit to permutations. - let permutations: Vec> = instance - .iter() - .zip(advice.iter()) - .map(|(instance, advice)| { - pk.vk.cs.permutation.commit( - params, - pk, - &pk.permutation, - &advice.advice_polys, - &pk.fixed_values, - &instance.instance_values, - beta, - gamma, - &mut rng, - transcript, - ) - }) - .collect::, _>>()?; - - let lookups: Vec>> = lookups - .into_iter() - .map(|lookups| -> Result, _> { - // Construct and commit to products for each lookup - lookups - .into_iter() - .map(|lookup| lookup.commit_product(pk, params, beta, gamma, &mut rng, transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let shuffles: Vec>> = instance - .iter() - .zip(advice.iter()) - .map(|(instance, advice)| -> Result, _> { - // Compress expressions for each shuffle - pk.vk - .cs - .shuffles - .iter() - .map(|shuffle| { - shuffle.commit_product( - pk, - params, - domain, - theta, - gamma, - &advice.advice_polys, - &pk.fixed_values, - &instance.instance_values, - &challenges, - &mut rng, - transcript, - ) - }) - .collect::, _>>() - }) - .collect::, _>>()?; - - // Commit to the vanishing argument's random polynomial for blinding h(x_3) - let vanishing = vanishing::Argument::commit(params, domain, &mut rng, transcript)?; - - // Obtain challenge for keeping all separate gates linearly independent - let y: ChallengeY<_> = transcript.squeeze_challenge_scalar(); - - // Calculate the advice polys - let advice: Vec> = advice - .into_iter() - .map( - |AdviceSingle { - advice_polys, - advice_blinds, - }| { - AdviceSingle { - advice_polys: advice_polys - .into_iter() - .map(|poly| domain.lagrange_to_coeff(poly)) - .collect::>(), - advice_blinds, - } - }, - ) .collect(); - - // Evaluate the h(X) polynomial - let h_poly = pk.ev.evaluate_h( - pk, - &advice - .iter() - .map(|a| a.advice_polys.as_slice()) - .collect::>(), - &instance - .iter() - .map(|i| i.instance_polys.as_slice()) - .collect::>(), - &challenges, - *y, - *beta, - *gamma, - *theta, - &lookups, - &shuffles, - &permutations, - ); - - // Construct the vanishing argument's h(X) commitments - let vanishing = vanishing.construct(params, domain, h_poly, &mut rng, transcript)?; - - let x: ChallengeX<_> = transcript.squeeze_challenge_scalar(); - let xn = x.pow([params.n()]); - - if P::QUERY_INSTANCE { - // Compute and hash instance evals for each circuit instance - for instance in instance.iter() { - // Evaluate polynomials at omega^i x - let instance_evals: Vec<_> = meta - .instance_queries - .iter() - .map(|&(column, at)| { - eval_polynomial( - &instance.instance_polys[column.index()], - domain.rotate_omega(*x, at), - ) - }) - .collect(); - - // Hash each instance column evaluation - for eval in instance_evals.iter() { - transcript.write_scalar(*eval)?; - } - } - } - - // Compute and hash advice evals for each circuit instance - for advice in advice.iter() { - // Evaluate polynomials at omega^i x - let advice_evals: Vec<_> = meta - .advice_queries - .iter() - .map(|&(column, at)| { - eval_polynomial( - &advice.advice_polys[column.index()], - domain.rotate_omega(*x, at), - ) - }) - .collect(); - - // Hash each advice column evaluation - for eval in advice_evals.iter() { - transcript.write_scalar(*eval)?; - } - } - - // Compute and hash fixed evals (shared across all circuit instances) - let fixed_evals: Vec<_> = meta - .fixed_queries - .iter() - .map(|&(column, at)| { - eval_polynomial(&pk.fixed_polys[column.index()], domain.rotate_omega(*x, at)) - }) - .collect(); - - // Hash each fixed column evaluation - for eval in fixed_evals.iter() { - transcript.write_scalar(*eval)?; + let mut prover = Prover::::new_with_engine( + engine, params, pk, instances, rng, transcript, + )?; + let mut challenges = HashMap::new(); + let phases = prover.phases().to_vec(); + for phase in phases.iter() { + let mut witnesses = Vec::with_capacity(circuits.len()); + for witness_calc in witness_calcs.iter_mut() { + witnesses.push(witness_calc.calc(*phase, &challenges)?); + } + challenges = prover.commit_phase(*phase, witnesses).unwrap(); } - - let vanishing = vanishing.evaluate(x, xn, domain, transcript)?; - - // Evaluate common permutation data - pk.permutation.evaluate(x, transcript)?; - - // Evaluate the permutations, if any, at omega^i x. - let permutations: Vec> = permutations - .into_iter() - .map(|permutation| -> Result<_, _> { permutation.construct().evaluate(pk, x, transcript) }) - .collect::, _>>()?; - - // Evaluate the lookups, if any, at omega^i x. - let lookups: Vec>> = lookups - .into_iter() - .map(|lookups| -> Result, _> { - lookups - .into_iter() - .map(|p| p.evaluate(pk, x, transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - // Evaluate the shuffles, if any, at omega^i x. - let shuffles: Vec>> = shuffles - .into_iter() - .map(|shuffles| -> Result, _> { - shuffles - .into_iter() - .map(|p| p.evaluate(pk, x, transcript)) - .collect::, _>>() - }) - .collect::, _>>()?; - - let instances = instance - .iter() - .zip(advice.iter()) - .zip(permutations.iter()) - .zip(lookups.iter()) - .zip(shuffles.iter()) - .flat_map(|((((instance, advice), permutation), lookups), shuffles)| { - iter::empty() - .chain( - P::QUERY_INSTANCE - .then_some(pk.vk.cs.instance_queries.iter().map(move |&(column, at)| { - ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &instance.instance_polys[column.index()], - blind: Blind::default(), - } - })) - .into_iter() - .flatten(), - ) - .chain( - pk.vk - .cs - .advice_queries - .iter() - .map(move |&(column, at)| ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &advice.advice_polys[column.index()], - blind: advice.advice_blinds[column.index()], - }), - ) - .chain(permutation.open(pk, x)) - .chain(lookups.iter().flat_map(move |p| p.open(pk, x))) - .chain(shuffles.iter().flat_map(move |p| p.open(pk, x))) - }) - .chain( - pk.vk - .cs - .fixed_queries - .iter() - .map(|&(column, at)| ProverQuery { - point: domain.rotate_omega(*x, at), - poly: &pk.fixed_polys[column.index()], - blind: Blind::default(), - }), - ) - .chain(pk.permutation.open(x)) - // We query the h(X) polynomial at x - .chain(vanishing.open(x)); - - let prover = P::new(params); - prover - .create_proof(rng, transcript, instances) - .map_err(|_| Error::ConstraintSystemFailure) + Ok(prover.create_proof()?) +} +/// This creates a proof for the provided `circuit` when given the public +/// parameters `params` and the proving key [`ProvingKey`] that was +/// generated previously for the same circuit. The provided `instances` +/// are zero-padded internally. +pub fn create_proof< + 'params, + Scheme: CommitmentScheme, + P: commitment::Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWrite, + ConcreteCircuit: Circuit, +>( + params: &'params Scheme::ParamsProver, + pk: &ProvingKey, + circuits: &[ConcreteCircuit], + instances: &[Vec>], + rng: R, + transcript: &mut T, +) -> Result<(), Error> +where + Scheme::Scalar: WithSmallOrderMulGroup<3> + FromUniformBytes<64>, +{ + let engine = PlonkEngineConfig::build_default(); + create_proof_with_engine::( + engine, params, pk, circuits, instances, rng, transcript, + ) } #[test] fn test_create_proof() { use crate::{ circuit::SimpleFloorPlanner, - plonk::{keygen_pk, keygen_vk}, + plonk::{keygen_pk, keygen_vk, ConstraintSystem, ErrorFront}, poly::kzg::{ commitment::{KZGCommitmentScheme, ParamsKZG}, multiopen::ProverSHPLONK, }, transcript::{Blake2bWrite, Challenge255, TranscriptWriterBuffer}, }; + use halo2_middleware::ff::Field; use halo2curves::bn256::Bn256; use rand_core::OsRng; @@ -746,7 +132,7 @@ fn test_create_proof() { &self, _config: Self::Config, _layouter: impl crate::circuit::Layouter, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { Ok(()) } } @@ -765,14 +151,77 @@ fn test_create_proof() { OsRng, &mut transcript, ); - assert!(matches!(proof.unwrap_err(), Error::InvalidInstances)); + assert!(matches!( + proof.unwrap_err(), + Error::Backend(ErrorBack::InvalidInstances) + )); // Create proof with correct number of instances create_proof::, ProverSHPLONK<_>, _, _, _, _>( ¶ms, &pk, &[MyCircuit, MyCircuit], - &[&[], &[]], + &[vec![], vec![]], + OsRng, + &mut transcript, + ) + .expect("proof generation should not fail"); +} + +#[test] +fn test_create_proof_custom() { + use crate::{ + circuit::SimpleFloorPlanner, + plonk::{keygen_pk_custom, keygen_vk_custom, ConstraintSystem, ErrorFront}, + poly::kzg::{ + commitment::{KZGCommitmentScheme, ParamsKZG}, + multiopen::ProverSHPLONK, + }, + transcript::{Blake2bWrite, Challenge255, TranscriptWriterBuffer}, + }; + use halo2_middleware::ff::Field; + use halo2curves::bn256::Bn256; + use rand_core::OsRng; + + #[derive(Clone, Copy)] + struct MyCircuit; + + impl Circuit for MyCircuit { + type Config = (); + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + *self + } + + fn configure(_meta: &mut ConstraintSystem) -> Self::Config {} + + fn synthesize( + &self, + _config: Self::Config, + _layouter: impl crate::circuit::Layouter, + ) -> Result<(), ErrorFront> { + Ok(()) + } + } + + let params: ParamsKZG = ParamsKZG::setup(3, OsRng); + let compress_selectors = true; + let vk = keygen_vk_custom(¶ms, &MyCircuit, compress_selectors) + .expect("keygen_vk_custom should not fail"); + let pk = keygen_pk_custom(¶ms, vk, &MyCircuit, compress_selectors) + .expect("keygen_pk_custom should not fail"); + let mut transcript = Blake2bWrite::<_, _, Challenge255<_>>::init(vec![]); + let engine = PlonkEngineConfig::build_default(); + + create_proof_with_engine::, ProverSHPLONK<_>, _, _, _, _, _>( + engine, + ¶ms, + &pk, + &[MyCircuit, MyCircuit], + &[vec![], vec![]], OsRng, &mut transcript, ) diff --git a/halo2_proofs/src/plonk/shuffle/prover.rs b/halo2_proofs/src/plonk/shuffle/prover.rs deleted file mode 100644 index fd30436a47..0000000000 --- a/halo2_proofs/src/plonk/shuffle/prover.rs +++ /dev/null @@ -1,250 +0,0 @@ -use super::super::{ - circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX, Error, ProvingKey, -}; -use super::Argument; -use crate::plonk::evaluation::evaluate; -use crate::{ - arithmetic::{eval_polynomial, parallelize, CurveAffine}, - poly::{ - commitment::{Blind, Params}, - Coeff, EvaluationDomain, LagrangeCoeff, Polynomial, ProverQuery, Rotation, - }, - transcript::{EncodedChallenge, TranscriptWrite}, -}; -use ff::WithSmallOrderMulGroup; -use group::{ff::BatchInvert, Curve}; -use rand_core::RngCore; -use std::{ - iter, - ops::{Mul, MulAssign}, -}; - -#[derive(Debug)] -struct Compressed { - input_expression: Polynomial, - shuffle_expression: Polynomial, -} - -#[derive(Debug)] -pub(in crate::plonk) struct Committed { - pub(in crate::plonk) product_poly: Polynomial, - product_blind: Blind, -} - -pub(in crate::plonk) struct Evaluated { - constructed: Committed, -} - -impl> Argument { - /// Given a Shuffle with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions - /// [S_0, S_1, ..., S_{m-1}], this method - /// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1} - /// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1}, - #[allow(clippy::too_many_arguments)] - fn compress<'a, 'params: 'a, C, P: Params<'params, C>>( - &self, - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - ) -> Compressed - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - // Closure to get values of expressions and compress them - let compress_expressions = |expressions: &[Expression]| { - let compressed_expression = expressions - .iter() - .map(|expression| { - pk.vk.domain.lagrange_from_vec(evaluate( - expression, - params.n() as usize, - 1, - fixed_values, - advice_values, - instance_values, - challenges, - )) - }) - .fold(domain.empty_lagrange(), |acc, expression| { - acc * *theta + &expression - }); - compressed_expression - }; - - // Get values of input expressions involved in the shuffle and compress them - let input_expression = compress_expressions(&self.input_expressions); - - // Get values of table expressions involved in the shuffle and compress them - let shuffle_expression = compress_expressions(&self.shuffle_expressions); - - Compressed { - input_expression, - shuffle_expression, - } - } - - /// Given a Shuffle with input expressions and table expressions this method - /// constructs the grand product polynomial over the shuffle. - /// The grand product polynomial is used to populate the Product struct. - /// The Product struct is added to the Shuffle and finally returned by the method. - #[allow(clippy::too_many_arguments)] - pub(in crate::plonk) fn commit_product< - 'a, - 'params: 'a, - C, - P: Params<'params, C>, - E: EncodedChallenge, - R: RngCore, - T: TranscriptWrite, - >( - &self, - pk: &ProvingKey, - params: &P, - domain: &EvaluationDomain, - theta: ChallengeTheta, - gamma: ChallengeGamma, - advice_values: &'a [Polynomial], - fixed_values: &'a [Polynomial], - instance_values: &'a [Polynomial], - challenges: &'a [C::Scalar], - mut rng: R, - transcript: &mut T, - ) -> Result, Error> - where - C: CurveAffine, - C::Curve: Mul + MulAssign, - { - let compressed = self.compress( - pk, - params, - domain, - theta, - advice_values, - fixed_values, - instance_values, - challenges, - ); - - let blinding_factors = pk.vk.cs.blinding_factors(); - - let mut shuffle_product = vec![C::Scalar::ZERO; params.n() as usize]; - parallelize(&mut shuffle_product, |shuffle_product, start| { - for (shuffle_product, shuffle_value) in shuffle_product - .iter_mut() - .zip(compressed.shuffle_expression[start..].iter()) - { - *shuffle_product = *gamma + shuffle_value; - } - }); - - shuffle_product.iter_mut().batch_invert(); - - parallelize(&mut shuffle_product, |product, start| { - for (i, product) in product.iter_mut().enumerate() { - let i = i + start; - *product *= &(*gamma + compressed.input_expression[i]); - } - }); - - // Compute the evaluations of the shuffle product polynomial - // over our domain, starting with z[0] = 1 - let z = iter::once(C::Scalar::ONE) - .chain(shuffle_product) - .scan(C::Scalar::ONE, |state, cur| { - *state *= &cur; - Some(*state) - }) - // Take all rows including the "last" row which should - // be a boolean (and ideally 1, else soundness is broken) - .take(params.n() as usize - blinding_factors) - // Chain random blinding factors. - .chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng))) - .collect::>(); - assert_eq!(z.len(), params.n() as usize); - let z = pk.vk.domain.lagrange_from_vec(z); - - #[cfg(feature = "sanity-checks")] - { - // While in Lagrange basis, check that product is correctly constructed - let u = (params.n() as usize) - (blinding_factors + 1); - assert_eq!(z[0], C::Scalar::ONE); - for i in 0..u { - let mut left = z[i + 1]; - let input_value = &compressed.input_expression[i]; - let shuffle_value = &compressed.shuffle_expression[i]; - left *= &(*gamma + shuffle_value); - let mut right = z[i]; - right *= &(*gamma + input_value); - assert_eq!(left, right); - } - assert_eq!(z[u], C::Scalar::ONE); - } - - let product_blind = Blind(C::Scalar::random(rng)); - let product_commitment = params.commit_lagrange(&z, product_blind).to_affine(); - let z = pk.vk.domain.lagrange_to_coeff(z); - - // Hash product commitment - transcript.write_point(product_commitment)?; - - Ok(Committed:: { - product_poly: z, - product_blind, - }) - } -} - -impl Committed { - pub(in crate::plonk) fn evaluate, T: TranscriptWrite>( - self, - pk: &ProvingKey, - x: ChallengeX, - transcript: &mut T, - ) -> Result, Error> { - let domain = &pk.vk.domain; - let x_next = domain.rotate_omega(*x, Rotation::next()); - - let product_eval = eval_polynomial(&self.product_poly, *x); - let product_next_eval = eval_polynomial(&self.product_poly, x_next); - - // Hash each advice evaluation - for eval in iter::empty() - .chain(Some(product_eval)) - .chain(Some(product_next_eval)) - { - transcript.write_scalar(eval)?; - } - - Ok(Evaluated { constructed: self }) - } -} - -impl Evaluated { - pub(in crate::plonk) fn open<'a>( - &'a self, - pk: &'a ProvingKey, - x: ChallengeX, - ) -> impl Iterator> + Clone { - let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next()); - - iter::empty() - // Open shuffle product commitments at x - .chain(Some(ProverQuery { - point: *x, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - // Open shuffle product commitments at x_next - .chain(Some(ProverQuery { - point: x_next, - poly: &self.constructed.product_poly, - blind: self.constructed.product_blind, - })) - } -} diff --git a/halo2_proofs/src/poly/kzg/multiopen/gwc.rs b/halo2_proofs/src/poly/kzg/multiopen/gwc.rs deleted file mode 100644 index 3fd28dd00a..0000000000 --- a/halo2_proofs/src/poly/kzg/multiopen/gwc.rs +++ /dev/null @@ -1,50 +0,0 @@ -mod prover; -mod verifier; - -pub use prover::ProverGWC; -pub use verifier::VerifierGWC; - -use crate::{poly::query::Query, transcript::ChallengeScalar}; -use ff::Field; -use std::marker::PhantomData; - -#[derive(Clone, Copy, Debug)] -struct U {} -type ChallengeU = ChallengeScalar; - -#[derive(Clone, Copy, Debug)] -struct V {} -type ChallengeV = ChallengeScalar; - -struct CommitmentData> { - queries: Vec, - point: F, - _marker: PhantomData, -} - -fn construct_intermediate_sets>(queries: I) -> Vec> -where - I: IntoIterator + Clone, -{ - let mut point_query_map: Vec<(F, Vec)> = Vec::new(); - for query in queries { - if let Some(pos) = point_query_map - .iter() - .position(|(point, _)| *point == query.get_point()) - { - let (_, queries) = &mut point_query_map[pos]; - queries.push(query); - } else { - point_query_map.push((query.get_point(), vec![query])); - } - } - - point_query_map - .into_iter() - .map(|(point, queries)| CommitmentData { - queries, - point, - _marker: PhantomData, - }) - .collect() -} diff --git a/halo2_proofs/src/poly/kzg/multiopen/gwc/prover.rs b/halo2_proofs/src/poly/kzg/multiopen/gwc/prover.rs deleted file mode 100644 index ecea01cb01..0000000000 --- a/halo2_proofs/src/poly/kzg/multiopen/gwc/prover.rs +++ /dev/null @@ -1,89 +0,0 @@ -use super::{construct_intermediate_sets, ChallengeV, Query}; -use crate::arithmetic::{kate_division, powers}; -use crate::helpers::SerdeCurveAffine; -use crate::poly::commitment::ParamsProver; -use crate::poly::commitment::Prover; -use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; -use crate::poly::query::ProverQuery; -use crate::poly::{commitment::Blind, Polynomial}; -use crate::transcript::{EncodedChallenge, TranscriptWrite}; - -use group::Curve; -use halo2curves::pairing::Engine; -use halo2curves::CurveExt; -use rand_core::RngCore; -use std::fmt::Debug; -use std::io; -use std::marker::PhantomData; - -/// Concrete KZG prover with GWC variant -#[derive(Debug)] -pub struct ProverGWC<'params, E: Engine> { - params: &'params ParamsKZG, -} - -/// Create a multi-opening proof -impl<'params, E: Engine + Debug> Prover<'params, KZGCommitmentScheme> for ProverGWC<'params, E> -where - E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, - E::G2Affine: SerdeCurveAffine, -{ - const QUERY_INSTANCE: bool = false; - - fn new(params: &'params ParamsKZG) -> Self { - Self { params } - } - - /// Create a multi-opening proof - fn create_proof< - 'com, - Ch: EncodedChallenge, - T: TranscriptWrite, - R, - I, - >( - &self, - _: R, - transcript: &mut T, - queries: I, - ) -> io::Result<()> - where - I: IntoIterator> + Clone, - R: RngCore, - { - let v: ChallengeV<_> = transcript.squeeze_challenge_scalar(); - let commitment_data = construct_intermediate_sets(queries); - - for commitment_at_a_point in commitment_data.iter() { - let z = commitment_at_a_point.point; - let (poly_batch, eval_batch) = commitment_at_a_point - .queries - .iter() - .zip(powers(*v)) - .map(|(query, power_of_v)| { - assert_eq!(query.get_point(), z); - - let poly = query.get_commitment().poly; - let eval = query.get_eval(); - - (poly.clone() * power_of_v, eval * power_of_v) - }) - .reduce(|(poly_acc, eval_acc), (poly, eval)| (poly_acc + &poly, eval_acc + eval)) - .unwrap(); - - let poly_batch = &poly_batch - eval_batch; - let witness_poly = Polynomial { - values: kate_division(&poly_batch.values, z), - _marker: PhantomData, - }; - let w = self - .params - .commit(&witness_poly, Blind::default()) - .to_affine(); - - transcript.write_point(w)?; - } - Ok(()) - } -} diff --git a/halo2_proofs/src/poly/kzg/multiopen/gwc/verifier.rs b/halo2_proofs/src/poly/kzg/multiopen/gwc/verifier.rs deleted file mode 100644 index fcfda6941f..0000000000 --- a/halo2_proofs/src/poly/kzg/multiopen/gwc/verifier.rs +++ /dev/null @@ -1,124 +0,0 @@ -use std::fmt::Debug; - -use super::{construct_intermediate_sets, ChallengeU, ChallengeV}; -use crate::arithmetic::powers; -use crate::helpers::SerdeCurveAffine; -use crate::poly::commitment::Verifier; -use crate::poly::commitment::MSM; -use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; -use crate::poly::kzg::msm::{DualMSM, MSMKZG}; -use crate::poly::kzg::strategy::GuardKZG; -use crate::poly::query::Query; -use crate::poly::query::{CommitmentReference, VerifierQuery}; -use crate::poly::Error; -use crate::transcript::{EncodedChallenge, TranscriptRead}; - -use ff::Field; -use halo2curves::pairing::{Engine, MultiMillerLoop}; -use halo2curves::CurveExt; - -#[derive(Debug)] -/// Concrete KZG verifier with GWC variant -pub struct VerifierGWC<'params, E: Engine> { - params: &'params ParamsKZG, -} - -impl<'params, E> Verifier<'params, KZGCommitmentScheme> for VerifierGWC<'params, E> -where - E: MultiMillerLoop + Debug, - E::G1Affine: SerdeCurveAffine::Fr, CurveExt = ::G1>, - E::G1: CurveExt, - E::G2Affine: SerdeCurveAffine, -{ - type Guard = GuardKZG<'params, E>; - type MSMAccumulator = DualMSM<'params, E>; - - const QUERY_INSTANCE: bool = false; - - fn new(params: &'params ParamsKZG) -> Self { - Self { params } - } - - fn verify_proof< - 'com, - Ch: EncodedChallenge, - T: TranscriptRead, - I, - >( - &self, - transcript: &mut T, - queries: I, - mut msm_accumulator: DualMSM<'params, E>, - ) -> Result - where - I: IntoIterator>> + Clone, - { - let v: ChallengeV<_> = transcript.squeeze_challenge_scalar(); - - let commitment_data = construct_intermediate_sets(queries); - - let w: Vec = (0..commitment_data.len()) - .map(|_| transcript.read_point().map_err(|_| Error::SamplingError)) - .collect::, Error>>()?; - - let u: ChallengeU<_> = transcript.squeeze_challenge_scalar(); - - let mut commitment_multi = MSMKZG::::new(); - let mut eval_multi = E::Fr::ZERO; - - let mut witness = MSMKZG::::new(); - let mut witness_with_aux = MSMKZG::::new(); - - for ((commitment_at_a_point, wi), power_of_u) in - commitment_data.iter().zip(w.into_iter()).zip(powers(*u)) - { - assert!(!commitment_at_a_point.queries.is_empty()); - let z = commitment_at_a_point.point; - - let (mut commitment_batch, eval_batch) = commitment_at_a_point - .queries - .iter() - .zip(powers(*v)) - .map(|(query, power_of_v)| { - assert_eq!(query.get_point(), z); - - let commitment = match query.get_commitment() { - CommitmentReference::Commitment(c) => { - let mut msm = MSMKZG::::new(); - msm.append_term(power_of_v, (*c).into()); - msm - } - CommitmentReference::MSM(msm) => { - let mut msm = msm.clone(); - msm.scale(power_of_v); - msm - } - }; - let eval = power_of_v * query.get_eval(); - - (commitment, eval) - }) - .reduce(|(mut commitment_acc, eval_acc), (commitment, eval)| { - commitment_acc.add_msm(&commitment); - (commitment_acc, eval_acc + eval) - }) - .unwrap(); - - commitment_batch.scale(power_of_u); - commitment_multi.add_msm(&commitment_batch); - eval_multi += power_of_u * eval_batch; - - witness_with_aux.append_term(power_of_u * z, wi.into()); - witness.append_term(power_of_u, wi.into()); - } - - msm_accumulator.left.add_msm(&witness); - - msm_accumulator.right.add_msm(&witness_with_aux); - msm_accumulator.right.add_msm(&commitment_multi); - let g0: E::G1 = self.params.g[0].into(); - msm_accumulator.right.append_term(eval_multi, -g0); - - Ok(Self::Guard::new(msm_accumulator)) - } -} diff --git a/halo2_proofs/tests/compress_selectors.rs b/halo2_proofs/tests/compress_selectors.rs new file mode 100644 index 0000000000..b335634f78 --- /dev/null +++ b/halo2_proofs/tests/compress_selectors.rs @@ -0,0 +1,516 @@ +#![allow(non_snake_case)] + +use std::marker::PhantomData; + +use ff::PrimeField; +use halo2_debug::display::expr_disp_names; +use halo2_debug::{test_result, test_rng}; +use halo2_frontend::circuit::compile_circuit; +use halo2_frontend::plonk::Error; +use halo2_proofs::circuit::{Cell, Layouter, SimpleFloorPlanner, Value}; +use halo2_proofs::poly::Rotation; + +use halo2_backend::transcript::{ + Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer, +}; +use halo2_middleware::circuit::{Any, ColumnMid}; +use halo2_middleware::zal::impls::{H2cEngine, PlonkEngineConfig}; +use halo2_proofs::arithmetic::Field; +use halo2_proofs::plonk::{ + create_proof_with_engine, keygen_pk_custom, keygen_vk_custom, verify_proof, Advice, Assigned, + Circuit, Column, ConstraintSystem, Instance, Selector, +}; +use halo2_proofs::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; +use halo2_proofs::poly::kzg::multiopen::{ProverSHPLONK, VerifierSHPLONK}; +use halo2_proofs::poly::kzg::strategy::SingleStrategy; +use halo2curves::bn256::{Bn256, Fr, G1Affine}; + +#[derive(Debug, Clone)] +struct MyCircuitConfig { + l: Column, + r: Column, + o: Column, + + s_add: Selector, + s_mul: Selector, + #[allow(dead_code)] + s_cubed: Selector, + + PI: Column, +} + +#[derive(Debug)] +struct MyCircuitChip { + config: MyCircuitConfig, + marker: PhantomData, +} + +trait MyCircuitComposer { + fn raw_multiply( + &self, + layouter: &mut impl Layouter, + f: FM, + ) -> Result<(Cell, Cell, Cell), Error> + where + FM: FnMut() -> Value<(Assigned, Assigned, Assigned)>; + + fn raw_add( + &self, + layouter: &mut impl Layouter, + f: FM, + ) -> Result<(Cell, Cell, Cell), Error> + where + FM: FnMut() -> Value<(Assigned, Assigned, Assigned)>; + + fn copy(&self, layouter: &mut impl Layouter, a: Cell, b: Cell) -> Result<(), Error>; + + fn expose_public( + &self, + layouter: &mut impl Layouter, + cell: Cell, + row: usize, + ) -> Result<(), Error>; + + #[allow(dead_code)] + fn cube(&self, layouter: &mut impl Layouter, f: FM) -> Result<(Cell, Cell), Error> + where + FM: FnMut() -> Value<(Assigned, Assigned)>; +} + +impl MyCircuitChip { + fn construct(config: MyCircuitConfig) -> Self { + Self { + config, + marker: PhantomData, + } + } + + fn configure(meta: &mut ConstraintSystem) -> MyCircuitConfig { + let l = meta.advice_column(); + let r = meta.advice_column(); + let o = meta.advice_column(); + meta.annotate_column(l, || "l"); + meta.annotate_column(r, || "r"); + meta.annotate_column(o, || "o"); + + let s_add = meta.selector(); + let s_mul = meta.selector(); + let s_cubed = meta.selector(); + + let PI = meta.instance_column(); + meta.annotate_column(PI, || "pi"); + + meta.enable_equality(l); + meta.enable_equality(r); + meta.enable_equality(o); + + meta.enable_equality(PI); + + meta.create_gate("add", |meta| { + let l = meta.query_advice(l, Rotation::cur()); + let r = meta.query_advice(r, Rotation::cur()); + let o = meta.query_advice(o, Rotation::cur()); + + let s_add = meta.query_selector(s_add); + + vec![s_add * (l + r - o)] + }); + + meta.create_gate("mul", |meta| { + let l = meta.query_advice(l, Rotation::cur()); + let r = meta.query_advice(r, Rotation::cur()); + let o = meta.query_advice(o, Rotation::cur()); + + let s_mul = meta.query_selector(s_mul); + + vec![s_mul * (l * r - o)] + }); + + // NOTE: This gate is placement for "compress_selectors" logic testing. Not really used. + meta.create_gate("cubed", |meta| { + let l = meta.query_advice(l, Rotation::cur()); + let o = meta.query_advice(o, Rotation::cur()); + + let s_cubed = meta.query_selector(s_cubed); + + vec![s_cubed * (l.clone() * l.clone() * l - o)] + }); + + MyCircuitConfig { + l, + r, + o, + s_add, + s_mul, + s_cubed, + PI, + } + } +} + +impl MyCircuitComposer for MyCircuitChip { + fn raw_multiply( + &self, + layouter: &mut impl Layouter, + mut f: FM, + ) -> Result<(Cell, Cell, Cell), Error> + where + FM: FnMut() -> Value<(Assigned, Assigned, Assigned)>, + { + let mut values = None; + layouter.assign_region( + || "multiply", + |mut region| { + let lhs = region.assign_advice( + || "lhs", + self.config.l, + 0, + || { + values = Some(f()); + values.unwrap().map(|x| x.0) + }, + )?; + let rhs = region.assign_advice( + || "rhs", + self.config.r, + 0, + || values.unwrap().map(|x| x.1), + )?; + let out = region.assign_advice( + || "out", + self.config.o, + 0, + || values.unwrap().map(|x| x.2), + )?; + + region.enable_selector(|| "mul", &self.config.s_mul, 0)?; + + Ok((lhs.cell(), rhs.cell(), out.cell())) + }, + ) + } + + fn raw_add( + &self, + layouter: &mut impl Layouter, + mut f: FM, + ) -> Result<(Cell, Cell, Cell), Error> + where + FM: FnMut() -> Value<(Assigned, Assigned, Assigned)>, + { + let mut values = None; + layouter.assign_region( + || "add", + |mut region| { + let lhs = region.assign_advice( + || "lhs", + self.config.l, + 0, + || { + values = Some(f()); + values.unwrap().map(|x| x.0) + }, + )?; + let rhs = region.assign_advice( + || "rhs", + self.config.r, + 0, + || values.unwrap().map(|x| x.1), + )?; + let out = region.assign_advice( + || "out", + self.config.o, + 0, + || values.unwrap().map(|x| x.2), + )?; + + region.enable_selector(|| "add", &self.config.s_add, 0)?; + + Ok((lhs.cell(), rhs.cell(), out.cell())) + }, + ) + } + + fn copy(&self, layouter: &mut impl Layouter, a: Cell, b: Cell) -> Result<(), Error> { + layouter.assign_region(|| "copy values", |mut region| region.constrain_equal(a, b)) + } + + fn expose_public( + &self, + layouter: &mut impl Layouter, + cell: Cell, + row: usize, + ) -> Result<(), Error> { + layouter.constrain_instance(cell, self.config.PI, row) + } + + fn cube(&self, layouter: &mut impl Layouter, mut f: FM) -> Result<(Cell, Cell), Error> + where + FM: FnMut() -> Value<(Assigned, Assigned)>, + { + let mut values = None; + layouter.assign_region( + || "cube", + |mut region| { + let lhs = region.assign_advice( + || "lhs", + self.config.l, + 0, + || { + values = Some(f()); + values.unwrap().map(|x| x.0) + }, + )?; + let out = region.assign_advice( + || "out", + self.config.o, + 0, + || values.unwrap().map(|x| x.1), + )?; + + region.enable_selector(|| "cube", &self.config.s_cubed, 0)?; + + Ok((lhs.cell(), out.cell())) + }, + ) + } +} + +#[derive(Debug, Clone, Default)] +struct MyCircuit { + x: Value, + y: Value, + constant: F, +} + +impl Circuit for MyCircuit { + type Config = MyCircuitConfig; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + Self::default() + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + MyCircuitChip::configure(meta) + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + let cs = MyCircuitChip::construct(config); + + let x: Value> = self.x.into(); + let y: Value> = self.y.into(); + let consty = Assigned::from(self.constant); + + let (a0, b0, c0) = cs.raw_multiply(&mut layouter, || x.map(|x| (x, x, x * x)))?; + cs.copy(&mut layouter, a0, b0)?; + + let (a1, b1, c1) = cs.raw_multiply(&mut layouter, || y.map(|y| (y, y, y * y)))?; + cs.copy(&mut layouter, a1, b1)?; + + let (a2, b2, c2) = cs.raw_add(&mut layouter, || { + x.zip(y).map(|(x, y)| (x * x, y * y, x * x + y * y)) + })?; + cs.copy(&mut layouter, a2, c0)?; + cs.copy(&mut layouter, b2, c1)?; + + let (a3, b3, c3) = cs.raw_add(&mut layouter, || { + x.zip(y) + .map(|(x, y)| (x * x + y * y, consty, x * x + y * y + consty)) + })?; + cs.copy(&mut layouter, a3, c2)?; + cs.expose_public(&mut layouter, b3, 0)?; + + cs.expose_public(&mut layouter, c3, 1)?; + + Ok(()) + } +} + +fn test_mycircuit( + vk_keygen_compress_selectors: bool, + pk_keygen_compress_selectors: bool, +) -> Result, halo2_proofs::plonk::Error> { + let engine = PlonkEngineConfig::new() + .set_curve::() + .set_msm(H2cEngine::new()) + .build(); + let k = 4; + let circuit: MyCircuit = MyCircuit { + x: Value::known(Fr::one()), + y: Value::known(Fr::one()), + constant: Fr::one(), + }; + + let mut rng = test_rng(); + + // Setup + let params = ParamsKZG::::setup(k, &mut rng); + let verifier_params = params.verifier_params(); + let vk = keygen_vk_custom(¶ms, &circuit, vk_keygen_compress_selectors)?; + let pk = keygen_pk_custom(¶ms, vk.clone(), &circuit, pk_keygen_compress_selectors)?; + + // Proving + #[allow(clippy::useless_vec)] + let instances = vec![vec![vec![Fr::one(), Fr::from_u128(3)]]]; + + let mut transcript = Blake2bWrite::<_, G1Affine, Challenge255<_>>::init(vec![]); + create_proof_with_engine::, ProverSHPLONK<'_, Bn256>, _, _, _, _, _>( + engine, + ¶ms, + &pk, + &[circuit], + instances.as_slice(), + &mut rng, + &mut transcript, + )?; + let proof = transcript.finalize(); + + // Verify + let mut verifier_transcript = + Blake2bRead::<_, G1Affine, Challenge255<_>>::init(proof.as_slice()); + let strategy = SingleStrategy::new(&verifier_params); + + verify_proof::, VerifierSHPLONK, _, _, _>( + &verifier_params, + &vk, + strategy, + instances.as_slice(), + &mut verifier_transcript, + ) + .map_err(halo2_proofs::plonk::Error::Backend)?; + + Ok(proof) +} + +/* + +How the `compress_selectors` works in `MyCircuit` under the hood: + +# compress = false + + selector `s_add` -> fixed `s_add` + - 1 when `s_add` enabled, 0 otherwise + + selector `s_mul` -> fixed `s_mul` + - 1 when `s_mul` enabled, 0 otherwise + + selector `s_cubed` -> fixed `s_cubed` + - 1 when `s_cubed` enabled, 0 otherwise + + Selector queries in expressions become the corresponding fixed column queries + at rotation 0. + + +# compress = true + + selector `s_add`, `s_mul` -> fixed `s_add_mul` + - 0 when `s_add` disabled and `s_mul` disabled + - 1 when only `s_add` enabled + - 2 when only `s_mul` enabled + + selector `s_cubed` -> fixed `s_cubed` + - 1 when `s_cubed` enabled, 0 otherwise + - NOTE: `s_cubed` is not compressed to avoid growing the max degree which is 3 + + Selector query for `s_add` becomes (`s_add_mul`)*(2 - `s_add_mul`) + Selector query for `s_mul` becomes (`s_add_mul`)*(1 - `s_add_mul`) + Selector query for `s_cubed` becomes the corresponding fixed column query + at rotation 0. + +*/ + +#[test] +fn test_compress_gates() { + let k = 4; + let circuit: MyCircuit = MyCircuit { + x: Value::known(Fr::one()), + y: Value::known(Fr::one()), + constant: Fr::one(), + }; + + // Without compression + + let (mut compress_false, _, _) = compile_circuit(k, &circuit, false).unwrap(); + + let names = &mut compress_false.cs.general_column_annotations; + names.insert(ColumnMid::new(Any::Fixed, 0), "s_add".to_string()); + names.insert(ColumnMid::new(Any::Fixed, 1), "s_mul".to_string()); + names.insert(ColumnMid::new(Any::Fixed, 2), "s_cubed".to_string()); + let cs = &compress_false.cs; + let names = &cs.general_column_annotations; + assert_eq!(3, cs.gates.len()); + assert_eq!( + "s_add * (l + r - o)", + format!("{}", expr_disp_names(&cs.gates[0].poly, names)) + ); + assert_eq!( + "s_mul * (l * r - o)", + format!("{}", expr_disp_names(&cs.gates[1].poly, names)) + ); + assert_eq!( + "s_cubed * (l * l * l - o)", + format!("{}", expr_disp_names(&cs.gates[2].poly, names)) + ); + + // With compression + + let (mut compress_true, _, _) = compile_circuit(k, &circuit, true).unwrap(); + + let names = &mut compress_true.cs.general_column_annotations; + names.insert(ColumnMid::new(Any::Fixed, 0), "s_add_mul".to_string()); + names.insert(ColumnMid::new(Any::Fixed, 1), "s_cubed".to_string()); + let cs = &compress_true.cs; + let names = &cs.general_column_annotations; + assert_eq!(3, cs.gates.len()); + assert_eq!( + "s_add_mul * (2 - s_add_mul) * (l + r - o)", + format!("{}", expr_disp_names(&cs.gates[0].poly, names)) + ); + assert_eq!( + "s_add_mul * (1 - s_add_mul) * (l * r - o)", + format!("{}", expr_disp_names(&cs.gates[1].poly, names)) + ); + assert_eq!( + "s_cubed * (l * l * l - o)", + format!("{}", expr_disp_names(&cs.gates[2].poly, names)) + ); +} + +#[test] +fn test_key_compression() -> Result<(), halo2_proofs::plonk::Error> { + // vk & pk keygen both WITH compression + test_result( + || test_mycircuit(true, true).expect("should pass"), + "acae50508de5ead584170dd83b139daf40e1026b6debbb78eb05d515173fc2dd", + ); + + // vk & pk keygen both WITHOUT compression + test_result( + || test_mycircuit(false, false).expect("should pass"), + "f9c99bd341705ac6a13724a526dd28df0bac1c745e0cde40ab39cab3e1b95309", + ); + + Ok(()) +} + +#[should_panic] +#[test] +fn test_key_compression_failure_1() { + // vk keygen WITH compress + // pk keygen WITHOUT compress + assert!(test_mycircuit(false, true).is_err()); +} + +#[test] +fn test_key_compression_failure_2() { + // vk keygen WITHOUT compress + // pk keygen WITH compress + assert!(test_mycircuit(true, false).is_err()); +} diff --git a/halo2_proofs/tests/frontend_backend_split.rs b/halo2_proofs/tests/frontend_backend_split.rs new file mode 100644 index 0000000000..e6e4024ab1 --- /dev/null +++ b/halo2_proofs/tests/frontend_backend_split.rs @@ -0,0 +1,624 @@ +#![allow(clippy::many_single_char_names)] +#![allow(clippy::op_ref)] + +use halo2_backend::{ + plonk::{ + keygen::{keygen_pk, keygen_vk}, + prover::ProverSingle, + verifier::{verify_proof, verify_proof_single}, + }, + transcript::{ + Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer, + }, +}; +use halo2_debug::test_rng; +use halo2_frontend::{ + circuit::{ + compile_circuit, AssignedCell, Layouter, Region, SimpleFloorPlanner, Value, + WitnessCalculator, + }, + dev::MockProver, + plonk::{ + circuit::{Challenge, Column}, + Advice, Circuit, ConstraintSystem, Error as ErrorFront, Expression, FirstPhase, Fixed, + Instance, SecondPhase, Selector, + }, +}; +use halo2_middleware::{ff::Field, poly::Rotation}; +use std::collections::HashMap; + +#[derive(Clone)] +struct MyCircuitConfig { + // A gate that uses selector, fixed, advice, has addition, multiplication and rotation + // s_gate[0] * (a[0] + b[0] * c[0] * d[0] - a[1]) + s_gate: Selector, + a: Column, + b: Column, + c: Column, + d: Column, + + // Copy constraints between columns (a, b) and (a, d) + + // A dynamic lookup: s_lookup * [1, a[0], b[0]] in s_ltable * [1, d[0], c[0]] + s_lookup: Column, + s_ltable: Column, + + // A shuffle: s_shufle * [1, a[0]] shuffle_of s_stable * [1, b[0]] + s_shuffle: Column, + s_stable: Column, + + // A FirstPhase challenge and SecondPhase column. We define the following gates: + // s_rlc * (a[0] + challenge * b[0] - e[0]) + // s_rlc * (c[0] + challenge * d[0] - e[0]) + s_rlc: Selector, + e: Column, + challenge: Challenge, + + // Instance with a gate: s_instance * (a[0] - instance[0]) + s_instance: Selector, + instance: Column, +} + +impl MyCircuitConfig { + #[allow(clippy::type_complexity)] + fn assign_gate>( + &self, + region: &mut Region<'_, F>, + offset: &mut usize, + a_assigned: Option>, + abcd: [u64; 4], + ) -> Result<(AssignedCell, [AssignedCell; 4]), ErrorFront> { + let [a, b, c, d] = abcd; + self.s_gate.enable(region, *offset)?; + let a_assigned = if let Some(a_assigned) = a_assigned { + a_assigned + } else { + region.assign_advice(|| "", self.a, *offset, || Value::known(F::from(a)))? + }; + let a = a_assigned.value(); + let [b, c, d] = [b, c, d].map(|v| Value::known(F::from(v))); + let b_assigned = region.assign_advice(|| "", self.b, *offset, || b)?; + let c_assigned = region.assign_advice(|| "", self.c, *offset, || c)?; + let d_assigned = region.assign_fixed(|| "", self.d, *offset, || d)?; + *offset += 1; + // let res = a + b * c * d; + let res = a + .zip(b.zip(c.zip(d))) + .map(|(a, (b, (c, d)))| *a + b * c * d); + let res_assigned = region.assign_advice(|| "", self.a, *offset, || res)?; + Ok(( + res_assigned, + [a_assigned, b_assigned, c_assigned, d_assigned], + )) + } +} + +#[derive(Clone)] +struct MyCircuit { + k: u32, + input: u64, + _marker: std::marker::PhantomData, +} + +impl, const WIDTH_FACTOR: usize> MyCircuit { + fn new(k: u32, input: u64) -> Self { + Self { + k, + input, + _marker: std::marker::PhantomData {}, + } + } + + fn instance(&self) -> Vec { + let mut instance = Vec::new(); + let res = F::from(self.input); + instance.push(res); + let (b, c, d) = (3, 4, 1); + let res = res + F::from(b) * F::from(c) * F::from(d); + instance.push(res); + let (b, c, d) = (6, 7, 1); + let res = res + F::from(b) * F::from(c) * F::from(d); + instance.push(res); + let (b, c, d) = (8, 9, 1); + let res = res + F::from(b) * F::from(c) * F::from(d); + instance.push(res); + instance.push(F::from(2)); + instance.push(F::from(2)); + instance + } + + fn instances(&self) -> Vec> { + let instance = self.instance(); + (0..WIDTH_FACTOR).map(|_| instance.clone()).collect() + } + + fn configure_single(meta: &mut ConstraintSystem, id: usize) -> MyCircuitConfig { + let s_gate = meta.selector(); + let a = meta.advice_column(); + let b = meta.advice_column(); + let c = meta.advice_column(); + let d = meta.fixed_column(); + + meta.enable_equality(a); + meta.enable_equality(b); + meta.enable_equality(d); + + let s_lookup = meta.fixed_column(); + let s_ltable = meta.fixed_column(); + + let s_shuffle = meta.fixed_column(); + let s_stable = meta.fixed_column(); + + let s_rlc = meta.selector(); + let e = meta.advice_column_in(SecondPhase); + let challenge = meta.challenge_usable_after(FirstPhase); + + let s_instance = meta.selector(); + let instance = meta.instance_column(); + meta.enable_equality(instance); + + let one = Expression::Constant(F::ONE); + + meta.create_gate(format!("gate_a.{id}"), |meta| { + let s_gate = meta.query_selector(s_gate); + let b = meta.query_advice(b, Rotation::cur()); + let a1 = meta.query_advice(a, Rotation::next()); + let a0 = meta.query_advice(a, Rotation::cur()); + let c = meta.query_advice(c, Rotation::cur()); + let d = meta.query_fixed(d, Rotation::cur()); + + vec![s_gate * (a0 + b * c * d - a1)] + }); + + meta.lookup_any(format!("lookup.{id}"), |meta| { + let s_lookup = meta.query_fixed(s_lookup, Rotation::cur()); + let s_ltable = meta.query_fixed(s_ltable, Rotation::cur()); + let a = meta.query_advice(a, Rotation::cur()); + let b = meta.query_advice(b, Rotation::cur()); + let c = meta.query_advice(c, Rotation::cur()); + let d = meta.query_fixed(d, Rotation::cur()); + let lhs = [one.clone(), a, b].map(|c| c * s_lookup.clone()); + let rhs = [one.clone(), d, c].map(|c| c * s_ltable.clone()); + lhs.into_iter().zip(rhs).collect() + }); + + meta.shuffle(format!("shuffle.{id}"), |meta| { + let s_shuffle = meta.query_fixed(s_shuffle, Rotation::cur()); + let s_stable = meta.query_fixed(s_stable, Rotation::cur()); + let a = meta.query_advice(a, Rotation::cur()); + let b = meta.query_advice(b, Rotation::cur()); + let lhs = [one.clone(), a].map(|c| c * s_shuffle.clone()); + let rhs = [one.clone(), b].map(|c| c * s_stable.clone()); + lhs.into_iter().zip(rhs).collect() + }); + + meta.create_gate(format!("gate_rlc.{id}"), |meta| { + let s_rlc = meta.query_selector(s_rlc); + let a = meta.query_advice(a, Rotation::cur()); + let b = meta.query_advice(b, Rotation::cur()); + let c = meta.query_advice(c, Rotation::cur()); + let d = meta.query_fixed(d, Rotation::cur()); + let e = meta.query_advice(e, Rotation::cur()); + let challenge = meta.query_challenge(challenge); + + vec![ + s_rlc.clone() * (a + challenge.clone() * b - e.clone()), + s_rlc * (c + challenge * d - e), + ] + }); + + MyCircuitConfig { + s_gate, + a, + b, + c, + d, + s_lookup, + s_ltable, + s_rlc, + e, + challenge, + s_shuffle, + s_stable, + s_instance, + instance, + } + } + + fn synthesize_unit( + &self, + config: &MyCircuitConfig, + layouter: &mut impl Layouter, + id: usize, + unit_id: usize, + ) -> Result<(usize, Vec>), ErrorFront> { + let challenge = layouter.get_challenge(config.challenge); + let (rows, instance_copy) = layouter.assign_region( + || format!("unit.{id}-{unit_id}"), + |mut region| { + // Column annotations + region.name_column(|| format!("a.{id}"), config.a); + region.name_column(|| format!("b.{id}"), config.b); + region.name_column(|| format!("c.{id}"), config.c); + region.name_column(|| format!("d.{id}"), config.d); + region.name_column(|| format!("e.{id}"), config.e); + region.name_column(|| format!("instance.{id}"), config.instance); + region.name_column(|| format!("s_lookup.{id}"), config.s_lookup); + region.name_column(|| format!("s_ltable.{id}"), config.s_ltable); + region.name_column(|| format!("s_shuffle.{id}"), config.s_shuffle); + region.name_column(|| format!("s_stable.{id}"), config.s_stable); + + let mut offset = 0; + let mut instance_copy = Vec::new(); + // First "a" value comes from instance + config.s_instance.enable(&mut region, offset).expect("todo"); + let res = region + .assign_advice_from_instance(|| "", config.instance, 0, config.a, offset) + .expect("todo"); + // Enable the gate on a few consecutive rows with rotations + let (res, _) = config + .assign_gate(&mut region, &mut offset, Some(res), [0, 3, 4, 1]) + .expect("todo"); + instance_copy.push(res.clone()); + let (res, _) = config + .assign_gate(&mut region, &mut offset, Some(res), [0, 6, 7, 1]) + .expect("todo"); + instance_copy.push(res.clone()); + let (res, _) = config + .assign_gate(&mut region, &mut offset, Some(res), [0, 8, 9, 1]) + .expect("todo"); + instance_copy.push(res.clone()); + let (res, _) = config + .assign_gate( + &mut region, + &mut offset, + Some(res), + [0, 0xffffffff, 0xdeadbeef, 1], + ) + .expect("todo"); + let _ = config + .assign_gate( + &mut region, + &mut offset, + Some(res), + [0, 0xabad1d3a, 0x12345678, 0x42424242], + ) + .expect("todo"); + offset += 1; + + // Enable the gate on non-consecutive rows with advice-advice copy constraints enabled + let (_, abcd1) = config + .assign_gate(&mut region, &mut offset, None, [5, 2, 1, 1]) + .expect("todo"); + offset += 1; + let (_, abcd2) = config + .assign_gate(&mut region, &mut offset, None, [2, 3, 1, 1]) + .expect("todo"); + offset += 1; + let (_, abcd3) = config + .assign_gate(&mut region, &mut offset, None, [4, 2, 1, 1]) + .expect("todo"); + offset += 1; + region + .constrain_equal(abcd1[1].cell(), abcd2[0].cell()) + .expect("todo"); + region + .constrain_equal(abcd2[0].cell(), abcd3[1].cell()) + .expect("todo"); + instance_copy.push(abcd1[1].clone()); + instance_copy.push(abcd2[0].clone()); + + // Enable the gate on non-consecutive rows with advice-fixed copy constraints enabled + let (_, abcd1) = config + .assign_gate(&mut region, &mut offset, None, [5, 9, 1, 9]) + .expect("todo"); + offset += 1; + let (_, abcd2) = config + .assign_gate(&mut region, &mut offset, None, [2, 9, 1, 1]) + .expect("todo"); + offset += 1; + let (_, abcd3) = config + .assign_gate(&mut region, &mut offset, None, [9, 2, 1, 1]) + .expect("todo"); + offset += 1; + region + .constrain_equal(abcd1[1].cell(), abcd1[3].cell()) + .expect("todo"); + region + .constrain_equal(abcd2[1].cell(), abcd1[3].cell()) + .expect("todo"); + region + .constrain_equal(abcd3[0].cell(), abcd1[3].cell()) + .expect("todo"); + + // Enable a dynamic lookup (powers of two) + let table: Vec<_> = (0u64..=10).map(|exp| (exp, 2u64.pow(exp as u32))).collect(); + let lookups = [(2, 4), (2, 4), (10, 1024), (0, 1), (2, 4)]; + for (table_row, lookup_row) in table + .iter() + .zip(lookups.iter().chain(std::iter::repeat(&(0, 1)))) + { + region + .assign_fixed(|| "", config.s_lookup, offset, || Value::known(F::ONE)) + .expect("todo"); + region + .assign_fixed(|| "", config.s_ltable, offset, || Value::known(F::ONE)) + .expect("todo"); + let lookup_row0 = Value::known(F::from(lookup_row.0)); + let lookup_row1 = Value::known(F::from(lookup_row.1)); + region + .assign_advice(|| "", config.a, offset, || lookup_row0) + .expect("todo"); + region + .assign_advice(|| "", config.b, offset, || lookup_row1) + .expect("todo"); + let table_row0 = Value::known(F::from(table_row.0)); + let table_row1 = Value::known(F::from(table_row.1)); + region + .assign_fixed(|| "", config.d, offset, || table_row0) + .expect("todo"); + region + .assign_advice(|| "", config.c, offset, || table_row1) + .expect("todo"); + offset += 1; + } + + // Enable RLC gate 3 times + for abcd in [[3, 5, 3, 5], [8, 9, 8, 9], [111, 222, 111, 222]] { + config.s_rlc.enable(&mut region, offset)?; + let (_, _) = config + .assign_gate(&mut region, &mut offset, None, abcd) + .expect("todo"); + let rlc = challenge.map(|ch| { + let [a, b, ..] = abcd; + F::from(a) + ch * F::from(b) + }); + region + .assign_advice(|| "", config.e, offset - 1, || rlc) + .expect("todo"); + offset += 1; + } + + // Enable a dynamic shuffle (sequence from 0 to 15) + let table: Vec<_> = (0u64..16).collect(); + let shuffle = [0u64, 2, 4, 6, 8, 10, 12, 14, 1, 3, 5, 7, 9, 11, 13, 15]; + assert_eq!(table.len(), shuffle.len()); + + for (table_row, shuffle_row) in table.iter().zip(shuffle.iter()) { + region + .assign_fixed(|| "", config.s_shuffle, offset, || Value::known(F::ONE)) + .expect("todo"); + region + .assign_fixed(|| "", config.s_stable, offset, || Value::known(F::ONE)) + .expect("todo"); + let shuffle_row0 = Value::known(F::from(*shuffle_row)); + region + .assign_advice(|| "", config.a, offset, || shuffle_row0) + .expect("todo"); + let table_row0 = Value::known(F::from(*table_row)); + region + .assign_advice(|| "", config.b, offset, || table_row0) + .expect("todo"); + offset += 1; + } + + Ok((offset, instance_copy)) + }, + )?; + + Ok((rows, instance_copy)) + } +} + +impl, const WIDTH_FACTOR: usize> Circuit for MyCircuit { + type Config = Vec; + type FloorPlanner = SimpleFloorPlanner; + #[cfg(feature = "circuit-params")] + type Params = (); + + fn without_witnesses(&self) -> Self { + self.clone() + } + + fn configure(meta: &mut ConstraintSystem) -> Vec { + assert!(WIDTH_FACTOR > 0); + (0..WIDTH_FACTOR) + .map(|id| Self::configure_single(meta, id)) + .collect() + } + + fn synthesize( + &self, + config: Vec, + mut layouter: impl Layouter, + ) -> Result<(), ErrorFront> { + // - 2 queries from first gate + // - 3 for permutation argument + // - 1 for multipoen + // - 1 for the last row of grand product poly to check that the product result is 1 + // - 1 for off-by-one errors + let unusable_rows = 2 + 3 + 1 + 1 + 1; + let max_rows = 2usize.pow(self.k) - unusable_rows; + for (id, config) in config.iter().enumerate() { + let mut total_rows = 0; + let mut unit_id = 0; + loop { + let (rows, instance_copy) = self + .synthesize_unit(config, &mut layouter, id, unit_id) + .expect("todo"); + if total_rows == 0 { + for (i, instance) in instance_copy.iter().enumerate() { + layouter.constrain_instance(instance.cell(), config.instance, 1 + i)?; + } + } + total_rows += rows; + if total_rows + rows > max_rows { + break; + } + unit_id += 1; + } + assert!(total_rows <= max_rows); + } + Ok(()) + } +} + +use halo2_proofs::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; +use halo2_proofs::poly::kzg::multiopen::{ProverSHPLONK, VerifierSHPLONK}; +use halo2_proofs::poly::kzg::strategy::SingleStrategy; +use halo2curves::bn256::{Bn256, Fr, G1Affine}; + +#[test] +fn test_mycircuit_mock() { + let k = 6; + const WIDTH_FACTOR: usize = 2; + let circuit: MyCircuit = MyCircuit::new(k, 42); + let instances = circuit.instances(); + let prover = MockProver::run(k, &circuit, instances).unwrap(); + prover.assert_satisfied(); +} + +use std::time::Instant; + +const K: u32 = 6; +const WIDTH_FACTOR: usize = 1; + +#[test] +fn test_mycircuit_full_legacy() { + halo2_debug::test_result( + || { + use halo2_proofs::plonk::{ + create_proof, keygen_pk as keygen_pk_legacy, keygen_vk as keygen_vk_legacy, + }; + + let k = K; + let circuit: MyCircuit = MyCircuit::new(k, 42); + + // Setup + let mut rng = test_rng(); + let params = ParamsKZG::::setup(k, &mut rng); + let start = Instant::now(); + let vk = keygen_vk_legacy(¶ms, &circuit).expect("keygen_vk should not fail"); + let pk = + keygen_pk_legacy(¶ms, vk.clone(), &circuit).expect("keygen_pk should not fail"); + println!("Keygen: {:?}", start.elapsed()); + + // Proving + let instances = vec![circuit.instances()]; + + let start = Instant::now(); + let mut transcript = Blake2bWrite::<_, G1Affine, Challenge255<_>>::init(vec![]); + create_proof::, ProverSHPLONK<'_, Bn256>, _, _, _, _>( + ¶ms, + &pk, + &[circuit], + instances.as_slice(), + &mut rng, + &mut transcript, + ) + .expect("proof generation should not fail"); + let proof = transcript.finalize(); + println!("Prove: {:?}", start.elapsed()); + + // Verify + let start = Instant::now(); + let mut verifier_transcript = + Blake2bRead::<_, G1Affine, Challenge255<_>>::init(proof.as_slice()); + let verifier_params = params.verifier_params(); + let strategy = SingleStrategy::new(&verifier_params); + + verify_proof::, VerifierSHPLONK, _, _, _>( + &verifier_params, + &vk, + strategy, + instances.as_slice(), + &mut verifier_transcript, + ) + .expect("verify succeeds"); + println!("Verify: {:?}", start.elapsed()); + + proof + }, + "062603b4c2ab114921ffbfa95c3365b1d0d1037512062f96db1f1e4311dc0109", + ); +} + +#[test] +fn test_mycircuit_full_split() { + halo2_debug::test_result( + || { + use halo2_middleware::zal::impls::{H2cEngine, PlonkEngineConfig}; + + let engine = PlonkEngineConfig::new() + .set_curve::() + .set_msm(H2cEngine::new()) + .build(); + let k = K; + let circuit: MyCircuit = MyCircuit::new(k, 42); + let (compiled_circuit, config, cs) = compile_circuit(k, &circuit, true).unwrap(); + + // Setup + let mut rng = test_rng(); + let params = ParamsKZG::::setup(k, &mut rng); + let start = Instant::now(); + let vk = keygen_vk(¶ms, &compiled_circuit).expect("keygen_vk should not fail"); + let pk = keygen_pk(¶ms, vk.clone(), &compiled_circuit) + .expect("keygen_pk should not fail"); + println!("Keygen: {:?}", start.elapsed()); + drop(compiled_circuit); + + let instances = circuit.instances(); + // Proving + println!("Proving..."); + let start = Instant::now(); + let mut witness_calc = WitnessCalculator::new(k, &circuit, &config, &cs, &instances); + let mut transcript = Blake2bWrite::<_, G1Affine, Challenge255<_>>::init(vec![]); + let mut prover = ProverSingle::< + KZGCommitmentScheme, + ProverSHPLONK<'_, Bn256>, + _, + _, + _, + _, + >::new_with_engine( + engine, + ¶ms, + &pk, + instances.clone(), + &mut rng, + &mut transcript, + ) + .unwrap(); + let mut challenges = HashMap::new(); + for phase in 0..cs.phases().count() { + println!("phase {phase}"); + let witness = witness_calc.calc(phase as u8, &challenges).unwrap(); + challenges = prover.commit_phase(phase as u8, witness).unwrap(); + } + prover.create_proof().unwrap(); + let proof = transcript.finalize(); + println!("Prove: {:?}", start.elapsed()); + + // Verify + let start = Instant::now(); + println!("Verifying..."); + let mut verifier_transcript = + Blake2bRead::<_, G1Affine, Challenge255<_>>::init(proof.as_slice()); + let verifier_params = params.verifier_params(); + let strategy = SingleStrategy::new(&verifier_params); + + verify_proof_single::, VerifierSHPLONK, _, _, _>( + &verifier_params, + &vk, + strategy, + instances, + &mut verifier_transcript, + ) + .expect("verify succeeds"); + println!("Verify: {:?}", start.elapsed()); + + proof + }, + "78aadfd46b5cc58b90d832ee47e4df57af3dfc28d1457c4ceeb5d0323a72f130", + ); +} diff --git a/halo2_proofs/tests/plonk_api.rs b/halo2_proofs/tests/plonk_api.rs index 28ffb399ff..f7277ba88a 100644 --- a/halo2_proofs/tests/plonk_api.rs +++ b/halo2_proofs/tests/plonk_api.rs @@ -3,13 +3,18 @@ use assert_matches::assert_matches; use ff::{FromUniformBytes, WithSmallOrderMulGroup}; +use halo2_debug::test_rng; +use halo2_middleware::zal::{ + impls::{PlonkEngine, PlonkEngineConfig}, + traits::MsmAccel, +}; use halo2_proofs::arithmetic::Field; use halo2_proofs::circuit::{Cell, Layouter, SimpleFloorPlanner, Value}; use halo2_proofs::dev::MockProver; use halo2_proofs::plonk::{ - create_proof as create_plonk_proof, keygen_pk, keygen_vk, verify_proof as verify_plonk_proof, - Advice, Assigned, Circuit, Column, ConstraintSystem, Error, Fixed, ProvingKey, TableColumn, - VerifyingKey, + create_proof_with_engine as create_plonk_proof_with_engine, keygen_pk, keygen_vk, + verify_proof as verify_plonk_proof, Advice, Assigned, Circuit, Column, ConstraintSystem, Error, + ErrorFront, Fixed, ProvingKey, TableColumn, VerifyingKey, }; use halo2_proofs::poly::commitment::{CommitmentScheme, ParamsProver, Prover, Verifier}; use halo2_proofs::poly::Rotation; @@ -18,7 +23,7 @@ use halo2_proofs::transcript::{ Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, TranscriptReadBuffer, TranscriptWriterBuffer, }; -use rand_core::{OsRng, RngCore}; +use rand_core::RngCore; use std::marker::PhantomData; #[test] @@ -27,6 +32,7 @@ fn plonk_api() { /// This represents an advice column at a certain row in the ConstraintSystem #[derive(Copy, Clone, Debug)] + #[allow(dead_code)] pub struct Variable(Column, usize); #[derive(Clone)] @@ -51,25 +57,34 @@ fn plonk_api() { &self, layouter: &mut impl Layouter, f: F, - ) -> Result<(Cell, Cell, Cell), Error> + ) -> Result<(Cell, Cell, Cell), ErrorFront> where F: FnMut() -> Value<(Assigned, Assigned, Assigned)>; fn raw_add( &self, layouter: &mut impl Layouter, f: F, - ) -> Result<(Cell, Cell, Cell), Error> + ) -> Result<(Cell, Cell, Cell), ErrorFront> where F: FnMut() -> Value<(Assigned, Assigned, Assigned)>; - fn copy(&self, layouter: &mut impl Layouter, a: Cell, b: Cell) -> Result<(), Error>; - fn public_input(&self, layouter: &mut impl Layouter, f: F) -> Result + fn copy( + &self, + layouter: &mut impl Layouter, + a: Cell, + b: Cell, + ) -> Result<(), ErrorFront>; + fn public_input( + &self, + layouter: &mut impl Layouter, + f: F, + ) -> Result where F: FnMut() -> Value; fn lookup_table( &self, layouter: &mut impl Layouter, values: &[FF], - ) -> Result<(), Error>; + ) -> Result<(), ErrorFront>; } #[derive(Clone)] @@ -97,7 +112,7 @@ fn plonk_api() { &self, layouter: &mut impl Layouter, mut f: F, - ) -> Result<(Cell, Cell, Cell), Error> + ) -> Result<(Cell, Cell, Cell), ErrorFront> where F: FnMut() -> Value<(Assigned, Assigned, Assigned)>, { @@ -151,7 +166,7 @@ fn plonk_api() { &self, layouter: &mut impl Layouter, mut f: F, - ) -> Result<(Cell, Cell, Cell), Error> + ) -> Result<(Cell, Cell, Cell), ErrorFront> where F: FnMut() -> Value<(Assigned, Assigned, Assigned)>, { @@ -211,7 +226,7 @@ fn plonk_api() { layouter: &mut impl Layouter, left: Cell, right: Cell, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { layouter.assign_region( || "copy", |mut region| { @@ -220,7 +235,11 @@ fn plonk_api() { }, ) } - fn public_input(&self, layouter: &mut impl Layouter, mut f: F) -> Result + fn public_input( + &self, + layouter: &mut impl Layouter, + mut f: F, + ) -> Result where F: FnMut() -> Value, { @@ -243,7 +262,7 @@ fn plonk_api() { &self, layouter: &mut impl Layouter, values: &[FF], - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { layouter.assign_table( || "", |mut table| { @@ -369,7 +388,7 @@ fn plonk_api() { &self, config: PlonkConfig, mut layouter: impl Layouter, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { let cs = StandardPlonk::new(config); let _ = cs.public_input(&mut layouter, || Value::known(F::ONE + F::ONE))?; @@ -421,9 +440,9 @@ fn plonk_api() { let much_too_small_params= <$scheme as CommitmentScheme>::ParamsProver::new(1); assert_matches!( keygen_vk(&much_too_small_params, &empty_circuit), - Err(Error::NotEnoughRowsAvailable { + Err(Error::Frontend(ErrorFront::NotEnoughRowsAvailable { current_k, - }) if current_k == 1 + })) if current_k == 1 ); // Check that we get an error if we try to initialize the proving key with a value of @@ -431,9 +450,9 @@ fn plonk_api() { let slightly_too_small_params = <$scheme as CommitmentScheme>::ParamsProver::new(K-1); assert_matches!( keygen_vk(&slightly_too_small_params, &empty_circuit), - Err(Error::NotEnoughRowsAvailable { + Err(Error::Frontend(ErrorFront::NotEnoughRowsAvailable { current_k, - }) if current_k == K - 1 + })) if current_k == K - 1 ); }}; } @@ -454,14 +473,16 @@ fn plonk_api() { keygen_pk(params, vk, &empty_circuit).expect("keygen_pk should not fail") } - fn create_proof< + fn create_proof_with_engine< 'params, Scheme: CommitmentScheme, P: Prover<'params, Scheme>, E: EncodedChallenge, R: RngCore, T: TranscriptWriterBuffer, Scheme::Curve, E>, + M: MsmAccel, >( + engine: PlonkEngine, rng: R, params: &'params Scheme::ParamsProver, pk: &ProvingKey, @@ -469,7 +490,7 @@ fn plonk_api() { where Scheme::Scalar: Ord + WithSmallOrderMulGroup<3> + FromUniformBytes<64>, { - let (a, instance, lookup_table) = common!(Scheme); + let (a, instance_val, lookup_table) = common!(Scheme); let circuit: MyCircuit = MyCircuit { a: Value::known(a), @@ -478,18 +499,20 @@ fn plonk_api() { let mut transcript = T::init(vec![]); - create_plonk_proof::( + let instance = [vec![vec![instance_val]], vec![vec![instance_val]]]; + create_plonk_proof_with_engine::( + engine, params, pk, &[circuit.clone(), circuit.clone()], - &[&[&[instance]], &[&[instance]]], + &instance, rng, &mut transcript, ) .expect("proof generation should not fail"); // Check this circuit is satisfied. - let prover = match MockProver::run(K, &circuit, vec![vec![instance]]) { + let prover = match MockProver::run(K, &circuit, vec![vec![instance_val]]) { Ok(prover) => prover, Err(e) => panic!("{e:?}"), }; @@ -498,6 +521,25 @@ fn plonk_api() { transcript.finalize() } + fn create_proof< + 'params, + Scheme: CommitmentScheme, + P: Prover<'params, Scheme>, + E: EncodedChallenge, + R: RngCore, + T: TranscriptWriterBuffer, Scheme::Curve, E>, + >( + rng: R, + params: &'params Scheme::ParamsProver, + pk: &ProvingKey, + ) -> Vec + where + Scheme::Scalar: Ord + WithSmallOrderMulGroup<3> + FromUniformBytes<64>, + { + let engine = PlonkEngineConfig::build_default(); + create_proof_with_engine::(engine, rng, params, pk) + } + fn verify_proof< 'a, 'params, @@ -513,80 +555,91 @@ fn plonk_api() { ) where Scheme::Scalar: Ord + WithSmallOrderMulGroup<3> + FromUniformBytes<64>, { - let (_, instance, _) = common!(Scheme); - let pubinputs = [instance]; + let (_, instance_val, _) = common!(Scheme); let mut transcript = T::init(proof); + let instance = [vec![vec![instance_val]], vec![vec![instance_val]]]; let strategy = Strategy::new(params_verifier); - let strategy = verify_plonk_proof( - params_verifier, - vk, - strategy, - &[&[&pubinputs[..]], &[&pubinputs[..]]], - &mut transcript, - ) - .unwrap(); + let strategy = + verify_plonk_proof(params_verifier, vk, strategy, &instance, &mut transcript).unwrap(); assert!(strategy.finalize()); } fn test_plonk_api_gwc() { - use halo2_proofs::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; - use halo2_proofs::poly::kzg::multiopen::{ProverGWC, VerifierGWC}; - use halo2_proofs::poly::kzg::strategy::AccumulatorStrategy; - use halo2curves::bn256::Bn256; + halo2_debug::test_result( + || { + use halo2_proofs::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; + use halo2_proofs::poly::kzg::multiopen::{ProverGWC, VerifierGWC}; + use halo2_proofs::poly::kzg::strategy::AccumulatorStrategy; + use halo2curves::bn256::Bn256; - type Scheme = KZGCommitmentScheme; - bad_keys!(Scheme); + type Scheme = KZGCommitmentScheme; - let params = ParamsKZG::::new(K); - let rng = OsRng; + bad_keys!(Scheme); - let pk = keygen::>(¶ms); + let mut rng = test_rng(); - let proof = create_proof::<_, ProverGWC<_>, _, _, Blake2bWrite<_, _, Challenge255<_>>>( - rng, ¶ms, &pk, - ); + let params = ParamsKZG::::setup(K, &mut rng); + let pk = keygen::>(¶ms); - let verifier_params = params.verifier_params(); + let proof = + create_proof::<_, ProverGWC<_>, _, _, Blake2bWrite<_, _, Challenge255<_>>>( + &mut rng, ¶ms, &pk, + ); - verify_proof::< - _, - VerifierGWC<_>, - _, - Blake2bRead<_, _, Challenge255<_>>, - AccumulatorStrategy<_>, - >(verifier_params, pk.get_vk(), &proof[..]); + let verifier_params = params.verifier_params(); + + verify_proof::< + _, + VerifierGWC<_>, + _, + Blake2bRead<_, _, Challenge255<_>>, + AccumulatorStrategy<_>, + >(&verifier_params, pk.get_vk(), &proof[..]); + + proof + }, + "d9c2fa0fa42f7ce1f156647069cf20f41ef01f783640410098aef91bc47d7da4", + ); } fn test_plonk_api_shplonk() { - use halo2_proofs::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; - use halo2_proofs::poly::kzg::multiopen::{ProverSHPLONK, VerifierSHPLONK}; - use halo2_proofs::poly::kzg::strategy::AccumulatorStrategy; - use halo2curves::bn256::Bn256; + halo2_debug::test_result( + || { + use halo2_proofs::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; + use halo2_proofs::poly::kzg::multiopen::{ProverSHPLONK, VerifierSHPLONK}; + use halo2_proofs::poly::kzg::strategy::AccumulatorStrategy; + use halo2curves::bn256::Bn256; - type Scheme = KZGCommitmentScheme; - bad_keys!(Scheme); + type Scheme = KZGCommitmentScheme; + bad_keys!(Scheme); - let params = ParamsKZG::::new(K); - let rng = OsRng; + let mut rng = test_rng(); + let params = ParamsKZG::::setup(K, &mut rng); - let pk = keygen::>(¶ms); + let pk = keygen::>(¶ms); - let proof = create_proof::<_, ProverSHPLONK<_>, _, _, Blake2bWrite<_, _, Challenge255<_>>>( - rng, ¶ms, &pk, - ); + let proof = + create_proof::<_, ProverSHPLONK<_>, _, _, Blake2bWrite<_, _, Challenge255<_>>>( + rng, ¶ms, &pk, + ); - let verifier_params = params.verifier_params(); + let verifier_params = params.verifier_params(); - verify_proof::< - _, - VerifierSHPLONK<_>, - _, - Blake2bRead<_, _, Challenge255<_>>, - AccumulatorStrategy<_>, - >(verifier_params, pk.get_vk(), &proof[..]); + verify_proof::< + _, + VerifierSHPLONK<_>, + _, + Blake2bRead<_, _, Challenge255<_>>, + AccumulatorStrategy<_>, + >(&verifier_params, pk.get_vk(), &proof[..]); + + proof + }, + "0fc67d890faef0ef8ea7ef680cc566b2ab7dabef12fcceb74d3655a0fb08c708", + ); } fn test_plonk_api_ipa() { @@ -598,16 +651,16 @@ fn plonk_api() { type Scheme = IPACommitmentScheme; bad_keys!(Scheme); + let mut rng = test_rng(); let params = ParamsIPA::::new(K); - let rng = OsRng; let pk = keygen::>(¶ms); let proof = create_proof::<_, ProverIPA<_>, _, _, Blake2bWrite<_, _, Challenge255<_>>>( - rng, ¶ms, &pk, + &mut rng, ¶ms, &pk, ); - let verifier_params = params.verifier_params(); + let verifier_params = params; verify_proof::< _, @@ -615,14 +668,14 @@ fn plonk_api() { _, Blake2bRead<_, _, Challenge255<_>>, AccumulatorStrategy<_>, - >(verifier_params, pk.get_vk(), &proof[..]); + >(&verifier_params, pk.get_vk(), &proof[..]); // Check that the verification key has not changed unexpectedly { - //panic!("{:#?}", pk.get_vk().pinned()); + // panic!("{:#?}", pk.get_vk().pinned()); assert_eq!( format!("{:#?}", pk.get_vk().pinned()), - r#####"PinnedVerificationKey { + r#"PinnedVerificationKey { base_modulus: "0x40000000000000000000000000000000224698fc0994a8dd8c46eb2100000001", scalar_modulus: "0x40000000000000000000000000000000224698fc094cf91b992d30ed00000001", domain: PinnedEvaluationDomain { @@ -634,203 +687,286 @@ fn plonk_api() { num_fixed_columns: 7, num_advice_columns: 5, num_instance_columns: 1, - num_selectors: 0, + num_challenges: 0, + advice_column_phase: [ + 0, + 0, + 0, + 0, + 0, + ], + challenge_phase: [], gates: [ Sum( Sum( Sum( Sum( Product( - Advice { - query_index: 0, - column_index: 1, - rotation: Rotation( - 0, + Var( + Query( + QueryBack { + index: 0, + column_index: 1, + column_type: Advice, + rotation: Rotation( + 0, + ), + }, ), - }, - Fixed { - query_index: 2, - column_index: 2, - rotation: Rotation( - 0, + ), + Var( + Query( + QueryBack { + index: 0, + column_index: 2, + column_type: Fixed, + rotation: Rotation( + 0, + ), + }, ), - }, + ), ), Product( - Advice { - query_index: 1, - column_index: 2, - rotation: Rotation( - 0, + Var( + Query( + QueryBack { + index: 1, + column_index: 2, + column_type: Advice, + rotation: Rotation( + 0, + ), + }, ), - }, - Fixed { - query_index: 3, - column_index: 3, - rotation: Rotation( - 0, + ), + Var( + Query( + QueryBack { + index: 1, + column_index: 3, + column_type: Fixed, + rotation: Rotation( + 0, + ), + }, ), - }, + ), ), ), Product( Product( - Advice { - query_index: 0, - column_index: 1, - rotation: Rotation( - 0, + Var( + Query( + QueryBack { + index: 0, + column_index: 1, + column_type: Advice, + rotation: Rotation( + 0, + ), + }, ), - }, - Advice { - query_index: 1, - column_index: 2, - rotation: Rotation( - 0, + ), + Var( + Query( + QueryBack { + index: 1, + column_index: 2, + column_type: Advice, + rotation: Rotation( + 0, + ), + }, ), - }, + ), ), - Fixed { - query_index: 5, - column_index: 1, - rotation: Rotation( - 0, + Var( + Query( + QueryBack { + index: 2, + column_index: 1, + column_type: Fixed, + rotation: Rotation( + 0, + ), + }, ), - }, + ), ), ), Negated( Product( - Advice { - query_index: 2, - column_index: 3, - rotation: Rotation( - 0, + Var( + Query( + QueryBack { + index: 2, + column_index: 3, + column_type: Advice, + rotation: Rotation( + 0, + ), + }, ), - }, - Fixed { - query_index: 4, - column_index: 4, - rotation: Rotation( - 0, + ), + Var( + Query( + QueryBack { + index: 3, + column_index: 4, + column_type: Fixed, + rotation: Rotation( + 0, + ), + }, ), - }, + ), ), ), ), Product( - Fixed { - query_index: 1, - column_index: 0, - rotation: Rotation( - 0, + Var( + Query( + QueryBack { + index: 4, + column_index: 0, + column_type: Fixed, + rotation: Rotation( + 0, + ), + }, ), - }, + ), Product( - Advice { - query_index: 3, - column_index: 4, - rotation: Rotation( - 1, + Var( + Query( + QueryBack { + index: 3, + column_index: 4, + column_type: Advice, + rotation: Rotation( + 1, + ), + }, ), - }, - Advice { - query_index: 4, - column_index: 0, - rotation: Rotation( - -1, + ), + Var( + Query( + QueryBack { + index: 4, + column_index: 0, + column_type: Advice, + rotation: Rotation( + -1, + ), + }, ), - }, + ), ), ), ), Product( - Fixed { - query_index: 6, - column_index: 5, - rotation: Rotation( - 0, + Var( + Query( + QueryBack { + index: 5, + column_index: 5, + column_type: Fixed, + rotation: Rotation( + 0, + ), + }, ), - }, + ), Sum( - Advice { - query_index: 0, - column_index: 1, - rotation: Rotation( - 0, + Var( + Query( + QueryBack { + index: 0, + column_index: 1, + column_type: Advice, + rotation: Rotation( + 0, + ), + }, ), - }, + ), Negated( - Instance { - query_index: 0, - column_index: 0, - rotation: Rotation( - 0, + Var( + Query( + QueryBack { + index: 0, + column_index: 0, + column_type: Instance, + rotation: Rotation( + 0, + ), + }, ), - }, + ), ), ), ), ], advice_queries: [ ( - Column { - index: 1, + ColumnMid { column_type: Advice, + index: 1, }, Rotation( 0, ), ), ( - Column { - index: 2, + ColumnMid { column_type: Advice, + index: 2, }, Rotation( 0, ), ), ( - Column { - index: 3, + ColumnMid { column_type: Advice, + index: 3, }, Rotation( 0, ), ), ( - Column { - index: 4, + ColumnMid { column_type: Advice, + index: 4, }, Rotation( 1, ), ), ( - Column { - index: 0, + ColumnMid { column_type: Advice, + index: 0, }, Rotation( -1, ), ), ( - Column { - index: 0, + ColumnMid { column_type: Advice, + index: 0, }, Rotation( 0, ), ), ( - Column { - index: 4, + ColumnMid { column_type: Advice, + index: 4, }, Rotation( 0, @@ -839,9 +975,9 @@ fn plonk_api() { ], instance_queries: [ ( - Column { - index: 0, + ColumnMid { column_type: Instance, + index: 0, }, Rotation( 0, @@ -850,144 +986,155 @@ fn plonk_api() { ], fixed_queries: [ ( - Column { - index: 6, + ColumnMid { column_type: Fixed, + index: 2, }, Rotation( 0, ), ), ( - Column { - index: 0, + ColumnMid { column_type: Fixed, + index: 3, }, Rotation( 0, ), ), ( - Column { - index: 2, + ColumnMid { column_type: Fixed, + index: 1, }, Rotation( 0, ), ), ( - Column { - index: 3, + ColumnMid { column_type: Fixed, + index: 4, }, Rotation( 0, ), ), ( - Column { - index: 4, + ColumnMid { column_type: Fixed, + index: 0, }, Rotation( 0, ), ), ( - Column { - index: 1, + ColumnMid { column_type: Fixed, + index: 5, }, Rotation( 0, ), ), ( - Column { - index: 5, + ColumnMid { column_type: Fixed, + index: 6, }, Rotation( 0, ), ), ], - permutation: Argument { + permutation: ArgumentMid { columns: [ - Column { - index: 1, + ColumnMid { column_type: Advice, + index: 1, }, - Column { - index: 2, + ColumnMid { column_type: Advice, + index: 2, }, - Column { - index: 3, + ColumnMid { column_type: Advice, + index: 3, }, - Column { - index: 0, + ColumnMid { column_type: Fixed, - }, - Column { index: 0, + }, + ColumnMid { column_type: Advice, + index: 0, }, - Column { - index: 4, + ColumnMid { column_type: Advice, + index: 4, }, - Column { - index: 0, + ColumnMid { column_type: Instance, + index: 0, }, - Column { - index: 1, + ColumnMid { column_type: Fixed, + index: 1, }, - Column { - index: 2, + ColumnMid { column_type: Fixed, + index: 2, }, - Column { - index: 3, + ColumnMid { column_type: Fixed, + index: 3, }, - Column { - index: 4, + ColumnMid { column_type: Fixed, + index: 4, }, - Column { - index: 5, + ColumnMid { column_type: Fixed, + index: 5, }, ], }, lookups: [ Argument { + name: "lookup", input_expressions: [ - Advice { - query_index: 0, - column_index: 1, - rotation: Rotation( - 0, + Var( + Query( + QueryBack { + index: 0, + column_index: 1, + column_type: Advice, + rotation: Rotation( + 0, + ), + }, ), - }, + ), ], table_expressions: [ - Fixed { - query_index: 0, - column_index: 6, - rotation: Rotation( - 0, + Var( + Query( + QueryBack { + index: 6, + column_index: 6, + column_type: Fixed, + rotation: Rotation( + 0, + ), + }, ), - }, + ), ], }, ], - constants: [], + shuffles: [], minimum_degree: None, }, fixed_commitments: [ @@ -1015,7 +1162,7 @@ fn plonk_api() { (0x3d907e0591343bd285c2c846f3e871a6ac70d80ec29e9500b8cb57f544e60202, 0x1034e48df35830244cabea076be8a16d67d7896e27c6ac22b285d017105da9c3), ], }, -}"##### +}"# ); } } diff --git a/halo2_proofs/examples/serialization.rs b/halo2_proofs/tests/serialization.rs similarity index 55% rename from halo2_proofs/examples/serialization.rs rename to halo2_proofs/tests/serialization.rs index 39b6b1192f..9fc5997a52 100644 --- a/halo2_proofs/examples/serialization.rs +++ b/halo2_proofs/tests/serialization.rs @@ -4,11 +4,12 @@ use std::{ }; use ff::Field; +use halo2_debug::test_rng; use halo2_proofs::{ circuit::{Layouter, SimpleFloorPlanner, Value}, plonk::{ - create_proof, keygen_pk, keygen_vk, verify_proof, Advice, Circuit, Column, - ConstraintSystem, Error, Fixed, Instance, ProvingKey, + create_proof, keygen_pk, keygen_vk_custom, pk_read, verify_proof, Advice, Circuit, Column, + ConstraintSystem, ErrorFront, Fixed, Instance, }, poly::{ kzg::{ @@ -24,7 +25,6 @@ use halo2_proofs::{ SerdeFormat, }; use halo2curves::bn256::{Bn256, Fr, G1Affine}; -use rand_core::OsRng; #[derive(Clone, Copy)] struct StandardPlonkConfig { @@ -101,7 +101,7 @@ impl Circuit for StandardPlonk { &self, config: Self::Config, mut layouter: impl Layouter, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { layouter.assign_region( || "", |mut region| { @@ -128,65 +128,80 @@ impl Circuit for StandardPlonk { } } -fn main() { - let k = 4; - let circuit = StandardPlonk(Fr::random(OsRng)); - let params = ParamsKZG::::setup(k, OsRng); - let vk = keygen_vk(¶ms, &circuit).expect("vk should not fail"); - let pk = keygen_pk(¶ms, vk, &circuit).expect("pk should not fail"); - - let f = File::create("serialization-test.pk").unwrap(); - let mut writer = BufWriter::new(f); - pk.write(&mut writer, SerdeFormat::RawBytes).unwrap(); - writer.flush().unwrap(); - - let f = File::open("serialization-test.pk").unwrap(); - let mut reader = BufReader::new(f); - #[allow(clippy::unit_arg)] - let pk = ProvingKey::::read::<_, StandardPlonk>( - &mut reader, - SerdeFormat::RawBytes, - #[cfg(feature = "circuit-params")] - circuit.params(), - ) - .unwrap(); - - std::fs::remove_file("serialization-test.pk").unwrap(); - - let instances: &[&[Fr]] = &[&[circuit.0]]; - let mut transcript = Blake2bWrite::<_, _, Challenge255<_>>::init(vec![]); - create_proof::< - KZGCommitmentScheme, - ProverGWC<'_, Bn256>, - Challenge255, - _, - Blake2bWrite, G1Affine, Challenge255<_>>, - _, - >( - ¶ms, - &pk, - &[circuit], - &[instances], - OsRng, - &mut transcript, - ) - .expect("prover should not fail"); - let proof = transcript.finalize(); - - let strategy = SingleStrategy::new(¶ms); - let mut transcript = Blake2bRead::<_, _, Challenge255<_>>::init(&proof[..]); - assert!(verify_proof::< - KZGCommitmentScheme, - VerifierGWC<'_, Bn256>, - Challenge255, - Blake2bRead<&[u8], G1Affine, Challenge255>, - SingleStrategy<'_, Bn256>, - >( - ¶ms, - pk.get_vk(), - strategy, - &[instances], - &mut transcript - ) - .is_ok()); +#[test] +fn test_serialization() { + halo2_debug::test_result( + || { + let k = 4; + + let mut rng = test_rng(); + + let circuit = StandardPlonk(Fr::random(&mut rng)); + let params = ParamsKZG::::setup(k, &mut rng); + let compress_selectors = true; + let vk = keygen_vk_custom(¶ms, &circuit, compress_selectors) + .expect("vk should not fail"); + let pk = keygen_pk(¶ms, vk, &circuit).expect("pk should not fail"); + + let f = File::create("serialization-test.pk").unwrap(); + let mut writer = BufWriter::new(f); + pk.write(&mut writer, SerdeFormat::RawBytes).unwrap(); + writer.flush().unwrap(); + + let f = File::open("serialization-test.pk").unwrap(); + let mut reader = BufReader::new(f); + #[allow(clippy::unit_arg)] + let pk = pk_read::( + &mut reader, + SerdeFormat::RawBytes, + k, + &circuit, + compress_selectors, + ) + .unwrap(); + + std::fs::remove_file("serialization-test.pk").unwrap(); + + let instances: Vec>> = vec![vec![vec![circuit.0]]]; + let mut transcript = Blake2bWrite::<_, _, Challenge255<_>>::init(vec![]); + create_proof::< + KZGCommitmentScheme, + ProverGWC<'_, Bn256>, + Challenge255, + _, + Blake2bWrite, G1Affine, Challenge255<_>>, + _, + >( + ¶ms, + &pk, + &[circuit], + instances.as_slice(), + test_rng(), + &mut transcript, + ) + .expect("prover should not fail"); + let proof = transcript.finalize(); + + let verifier_params = params.verifier_params(); + let strategy = SingleStrategy::new(&verifier_params); + let mut transcript = Blake2bRead::<_, _, Challenge255<_>>::init(&proof[..]); + assert!(verify_proof::< + KZGCommitmentScheme, + VerifierGWC, + Challenge255, + Blake2bRead<&[u8], G1Affine, Challenge255>, + SingleStrategy, + >( + &verifier_params, + pk.get_vk(), + strategy, + instances.as_slice(), + &mut transcript + ) + .is_ok()); + + proof + }, + "e0f8ba887b3d617a982f27b9b89780f6421285c396258f65cc47eec3a910295f", + ); } diff --git a/halo2_proofs/examples/shuffle.rs b/halo2_proofs/tests/shuffle.rs similarity index 93% rename from halo2_proofs/examples/shuffle.rs rename to halo2_proofs/tests/shuffle.rs index 35a85cb9f0..b5f114e31f 100644 --- a/halo2_proofs/examples/shuffle.rs +++ b/halo2_proofs/tests/shuffle.rs @@ -1,4 +1,5 @@ use ff::{BatchInvert, FromUniformBytes}; +use halo2_debug::test_rng; use halo2_proofs::{ arithmetic::{CurveAffine, Field}, circuit::{floor_planner::V1, Layouter, Value}, @@ -18,7 +19,7 @@ use halo2_proofs::{ Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer, }, }; -use rand_core::{OsRng, RngCore}; +use rand_core::RngCore; use std::iter; fn rand_2d_array(rng: &mut R) -> [[F; H]; W] { @@ -145,7 +146,7 @@ impl Circuit for MyCircuit &self, config: Self::Config, mut layouter: impl Layouter, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { let theta = layouter.get_challenge(config.theta); let gamma = layouter.get_challenge(config.gamma); @@ -273,9 +274,12 @@ fn test_prover( k: u32, circuit: MyCircuit, expected: bool, -) where +) -> Vec +where C::Scalar: FromUniformBytes<64>, { + let rng = test_rng(); + let params = ParamsIPA::::new(k); let vk = keygen_vk(¶ms, &circuit).unwrap(); let pk = keygen_pk(¶ms, vk, &circuit).unwrap(); @@ -287,8 +291,8 @@ fn test_prover( ¶ms, &pk, &[circuit], - &[&[]], - OsRng, + &[vec![]], + rng, &mut transcript, ) .expect("proof generation should not fail"); @@ -304,7 +308,7 @@ fn test_prover( ¶ms, pk.get_vk(), strategy, - &[&[]], + &[vec![]], &mut transcript, ) .map(|strategy| strategy.finalize()) @@ -312,19 +316,24 @@ fn test_prover( }; assert_eq!(accepted, expected); + + proof } -fn main() { +#[test] +fn test_shuffle() { const W: usize = 4; const H: usize = 32; const K: u32 = 8; - let circuit = &MyCircuit::<_, W, H>::rand(&mut OsRng); + let circuit = &MyCircuit::<_, W, H>::rand(&mut test_rng()); - { - test_mock_prover(K, circuit.clone(), Ok(())); - test_prover::(K, circuit.clone(), true); - } + test_mock_prover(K, circuit.clone(), Ok(())); + + halo2_debug::test_result( + || test_prover::(K, circuit.clone(), true), + "f0b00695b05569895ca28526be6211f58cb5d3375b7143e905edc75397d4fa3c", + ); #[cfg(not(feature = "sanity-checks"))] { @@ -347,6 +356,9 @@ fn main() { }, )]), ); - test_prover::(K, circuit, false); + halo2_debug::test_result( + || test_prover::(K, circuit.clone(), false), + "27ad558ee60a6675911b87a0df5de49d7c9b5673d723bb05a9811aa33bf486d1", + ); } } diff --git a/halo2_proofs/examples/shuffle_api.rs b/halo2_proofs/tests/shuffle_api.rs similarity index 80% rename from halo2_proofs/examples/shuffle_api.rs rename to halo2_proofs/tests/shuffle_api.rs index 259e038d06..7dc8d73bc5 100644 --- a/halo2_proofs/examples/shuffle_api.rs +++ b/halo2_proofs/tests/shuffle_api.rs @@ -1,16 +1,17 @@ use std::{marker::PhantomData, vec}; use ff::FromUniformBytes; +use halo2_debug::test_rng; +use halo2_proofs::poly::commitment::ParamsProver; use halo2_proofs::{ arithmetic::Field, circuit::{Layouter, SimpleFloorPlanner, Value}, plonk::{ create_proof, keygen_pk, keygen_vk, verify_proof, Advice, Circuit, Column, - ConstraintSystem, Error, Fixed, Selector, + ConstraintSystem, ErrorFront, Fixed, Selector, }, poly::Rotation, poly::{ - commitment::ParamsProver, ipa::{ commitment::{IPACommitmentScheme, ParamsIPA}, multiopen::{ProverIPA, VerifierIPA}, @@ -23,7 +24,6 @@ use halo2_proofs::{ }, }; use halo2curves::{pasta::EqAffine, CurveAffine}; -use rand_core::OsRng; struct ShuffleChip { config: ShuffleConfig, @@ -111,7 +111,7 @@ impl Circuit for MyCircuit { &self, config: Self::Config, mut layouter: impl Layouter, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { let ch = ShuffleChip::::construct(config); layouter.assign_region( || "load inputs", @@ -148,10 +148,12 @@ impl Circuit for MyCircuit { } } -fn test_prover(k: u32, circuit: MyCircuit, expected: bool) +fn test_prover(k: u32, circuit: MyCircuit, expected: bool) -> Vec where C::Scalar: FromUniformBytes<64>, { + let rng = test_rng(); + let params = ParamsIPA::::new(k); let vk = keygen_vk(¶ms, &circuit).unwrap(); let pk = keygen_pk(¶ms, vk, &circuit).unwrap(); @@ -163,8 +165,8 @@ where ¶ms, &pk, &[circuit], - &[&[]], - OsRng, + &[vec![]], + rng, &mut transcript, ) .expect("proof generation should not fail"); @@ -180,7 +182,7 @@ where ¶ms, pk.get_vk(), strategy, - &[&[]], + &[vec![]], &mut transcript, ) .map(|strategy| strategy.finalize()) @@ -188,29 +190,37 @@ where }; assert_eq!(accepted, expected); + + proof } -fn main() { - use halo2_proofs::dev::MockProver; - use halo2curves::pasta::Fp; - const K: u32 = 4; - let input_0 = [1, 2, 4, 1] - .map(|e: u64| Value::known(Fp::from(e))) - .to_vec(); - let input_1 = [10, 20, 40, 10].map(Fp::from).to_vec(); - let shuffle_0 = [4, 1, 1, 2] - .map(|e: u64| Value::known(Fp::from(e))) - .to_vec(); - let shuffle_1 = [40, 10, 10, 20] - .map(|e: u64| Value::known(Fp::from(e))) - .to_vec(); - let circuit = MyCircuit { - input_0, - input_1, - shuffle_0, - shuffle_1, - }; - let prover = MockProver::run(K, &circuit, vec![]).unwrap(); - prover.assert_satisfied(); - test_prover::(K, circuit, true); +#[test] +fn test_shuffle_api() { + halo2_debug::test_result( + || { + use halo2_proofs::dev::MockProver; + use halo2curves::pasta::Fp; + const K: u32 = 4; + let input_0 = [1, 2, 4, 1] + .map(|e: u64| Value::known(Fp::from(e))) + .to_vec(); + let input_1 = [10, 20, 40, 10].map(Fp::from).to_vec(); + let shuffle_0 = [4, 1, 1, 2] + .map(|e: u64| Value::known(Fp::from(e))) + .to_vec(); + let shuffle_1 = [40, 10, 10, 20] + .map(|e: u64| Value::known(Fp::from(e))) + .to_vec(); + let circuit = MyCircuit { + input_0, + input_1, + shuffle_0, + shuffle_1, + }; + let prover = MockProver::run(K, &circuit, vec![]).unwrap(); + prover.assert_satisfied(); + test_prover::(K, circuit, true) + }, + "54f4fec1776178aadf8816754d7877f1de685e0ffb5b6af4db20f557d87550d6", + ); } diff --git a/halo2_proofs/examples/vector-ops-unblinded.rs b/halo2_proofs/tests/vector-ops-unblinded.rs similarity index 92% rename from halo2_proofs/examples/vector-ops-unblinded.rs rename to halo2_proofs/tests/vector-ops-unblinded.rs index 7e9ebd1d81..6a3f54a4a9 100644 --- a/halo2_proofs/examples/vector-ops-unblinded.rs +++ b/halo2_proofs/tests/vector-ops-unblinded.rs @@ -4,6 +4,7 @@ use std::marker::PhantomData; use ff::FromUniformBytes; +use halo2_debug::test_rng; use halo2_proofs::{ arithmetic::{CurveAffine, Field}, circuit::{AssignedCell, Chip, Layouter, Region, SimpleFloorPlanner, Value}, @@ -21,7 +22,6 @@ use halo2_proofs::{ Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer, }, }; -use rand_core::OsRng; // ANCHOR: instructions trait NumericInstructions: Chip { @@ -33,7 +33,7 @@ trait NumericInstructions: Chip { &self, layouter: impl Layouter, a: &[Value], - ) -> Result, Error>; + ) -> Result, ErrorFront>; /// Returns `c = a * b`. The caller is responsible for ensuring that `a.len() == b.len()`. fn mul( @@ -41,7 +41,7 @@ trait NumericInstructions: Chip { layouter: impl Layouter, a: &[Self::Num], b: &[Self::Num], - ) -> Result, Error>; + ) -> Result, ErrorFront>; /// Returns `c = a + b`. The caller is responsible for ensuring that `a.len() == b.len()`. fn add( @@ -49,7 +49,7 @@ trait NumericInstructions: Chip { layouter: impl Layouter, a: &[Self::Num], b: &[Self::Num], - ) -> Result, Error>; + ) -> Result, ErrorFront>; /// Exposes a number as a public input to the circuit. fn expose_public( @@ -57,7 +57,7 @@ trait NumericInstructions: Chip { layouter: impl Layouter, num: &Self::Num, row: usize, - ) -> Result<(), Error>; + ) -> Result<(), ErrorFront>; } // ANCHOR_END: instructions @@ -204,7 +204,7 @@ impl NumericInstructions for MultChip { &self, mut layouter: impl Layouter, values: &[Value], - ) -> Result, Error> { + ) -> Result, ErrorFront> { let config = self.config(); layouter.assign_region( @@ -228,7 +228,7 @@ impl NumericInstructions for MultChip { _: impl Layouter, _: &[Self::Num], _: &[Self::Num], - ) -> Result, Error> { + ) -> Result, ErrorFront> { panic!("Not implemented") } @@ -237,7 +237,7 @@ impl NumericInstructions for MultChip { mut layouter: impl Layouter, a: &[Self::Num], b: &[Self::Num], - ) -> Result, Error> { + ) -> Result, ErrorFront> { let config = self.config(); assert_eq!(a.len(), b.len()); @@ -271,7 +271,7 @@ impl NumericInstructions for MultChip { mut layouter: impl Layouter, num: &Self::Num, row: usize, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { let config = self.config(); layouter.constrain_instance(num.0.cell(), config.instance, row) @@ -286,7 +286,7 @@ impl NumericInstructions for AddChip { &self, mut layouter: impl Layouter, values: &[Value], - ) -> Result, Error> { + ) -> Result, ErrorFront> { let config = self.config(); layouter.assign_region( @@ -310,7 +310,7 @@ impl NumericInstructions for AddChip { _: impl Layouter, _: &[Self::Num], _: &[Self::Num], - ) -> Result, Error> { + ) -> Result, ErrorFront> { panic!("Not implemented") } @@ -319,7 +319,7 @@ impl NumericInstructions for AddChip { mut layouter: impl Layouter, a: &[Self::Num], b: &[Self::Num], - ) -> Result, Error> { + ) -> Result, ErrorFront> { let config = self.config(); assert_eq!(a.len(), b.len()); @@ -353,14 +353,14 @@ impl NumericInstructions for AddChip { mut layouter: impl Layouter, num: &Self::Num, row: usize, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { let config = self.config(); layouter.constrain_instance(num.0.cell(), config.instance, row) } } -#[derive(Default)] +#[derive(Default, Clone)] struct MulCircuit { a: Vec>, b: Vec>, @@ -395,7 +395,7 @@ impl Circuit for MulCircuit { &self, config: Self::Config, mut layouter: impl Layouter, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { let field_chip = MultChip::::construct(config); // Load our unblinded values into the circuit. @@ -413,7 +413,7 @@ impl Circuit for MulCircuit { } // ANCHOR_END: circuit -#[derive(Default)] +#[derive(Default, Clone)] struct AddCircuit { a: Vec>, b: Vec>, @@ -448,7 +448,7 @@ impl Circuit for AddCircuit { &self, config: Self::Config, mut layouter: impl Layouter, - ) -> Result<(), Error> { + ) -> Result<(), ErrorFront> { let field_chip = AddChip::::construct(config); // Load our unblinded values into the circuit. @@ -475,10 +475,13 @@ fn test_prover( where C::Scalar: FromUniformBytes<64>, { + let rng = test_rng(); + let params = ParamsIPA::::new(k); let vk = keygen_vk(¶ms, &circuit).unwrap(); let pk = keygen_pk(¶ms, vk, &circuit).unwrap(); + let instances = vec![vec![instances]]; let proof = { let mut transcript = Blake2bWrite::<_, _, Challenge255<_>>::init(vec![]); @@ -486,8 +489,8 @@ where ¶ms, &pk, &[circuit], - &[&[&instances]], - OsRng, + &instances, + rng, &mut transcript, ) .expect("proof generation should not fail"); @@ -503,7 +506,7 @@ where ¶ms, pk.get_vk(), strategy, - &[&[&instances]], + &instances, &mut transcript, ) .map(|strategy| strategy.finalize()) @@ -515,7 +518,8 @@ where proof } -fn main() { +#[test] +fn test_vector_ops_unbinded() { use halo2curves::pasta::Fp; const N: usize = 10; @@ -543,9 +547,16 @@ fn main() { }; // the commitments will be the first columns of the proof transcript so we can compare them easily - let proof_1 = test_prover::(k, mul_circuit, true, c_mul); + let proof_1 = halo2_debug::test_result( + || test_prover::(k, mul_circuit.clone(), true, c_mul.clone()), + "1f726eaddd926057e6c2aa8a364d1b4192da27f53c38c9f21d8924ef3eb0f0ab", + ); + // the commitments will be the first columns of the proof transcript so we can compare them easily - let proof_2 = test_prover::(k, add_circuit, true, c_add); + let proof_2 = halo2_debug::test_result( + || test_prover::(k, add_circuit.clone(), true, c_add.clone()), + "a42eb2f3e4761e6588bfd8db7e7035ead1cc1331017b6b09a7b75ddfbefefc58", + ); // the commitments will be the first columns of the proof transcript so we can compare them easily // here we compare the first 10 bytes of the commitments diff --git a/p3_frontend/Cargo.toml b/p3_frontend/Cargo.toml new file mode 100644 index 0000000000..34dceddde5 --- /dev/null +++ b/p3_frontend/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "p3_frontend" +version = "0.4.0" +authors = [ + "Privacy Scaling Explorations team", +] +edition = "2021" +description = """ +Plonky3 frontend implementation. Allows using a circuit defined with the Air trait from plonky3 to be proved with a halo2 backend. +""" +license = "MIT OR Apache-2.0" +categories = ["cryptography"] +keywords = ["halo", "proofs", "zkp", "zkSNARKs", "plonky3"] + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"] + +[dependencies] +p3-air = { git = "https://github.com/Plonky3/Plonky3", rev = "7b5b8a6" } +p3-util = { git = "https://github.com/Plonky3/Plonky3", rev = "7b5b8a6" } +p3-matrix = { git = "https://github.com/Plonky3/Plonky3", rev = "7b5b8a6" } +p3-field = { git = "https://github.com/Plonky3/Plonky3", rev = "7b5b8a6" } +p3-uni-stark = { git = "https://github.com/Plonky3/Plonky3", rev = "7b5b8a6" } +halo2_middleware = { path = "../halo2_middleware" } +halo2_debug = { path = "../halo2_debug" } +serde = { version = "1.0", default-features = false, features = ["derive", "alloc"] } +num-bigint = { version = "0.4.3", default-features = false } + +[dev-dependencies] +halo2curves = { version = "0.7.0", default-features = false } +rand_core = { version = "0.6", default-features = false, features = ["getrandom"] } +halo2_backend = { path = "../halo2_backend" } +serde_test = { version = "1.0" } +p3-keccak-air = { git = "https://github.com/Plonky3/Plonky3", rev = "7b5b8a6" } +p3-keccak = { git = "https://github.com/Plonky3/Plonky3", rev = "7b5b8a6" } +p3-util = { git = "https://github.com/Plonky3/Plonky3", rev = "7b5b8a6" } +rand = "0.8.5" diff --git a/p3_frontend/src/air.rs b/p3_frontend/src/air.rs new file mode 100644 index 0000000000..171d3e9efd --- /dev/null +++ b/p3_frontend/src/air.rs @@ -0,0 +1,7 @@ +//! Alternative `AirBuilderWithPublicValues` trait that uses `Self::Var` instead of `Self::F`. + +use p3_air::AirBuilder; + +pub trait AirBuilderWithPublicValues: AirBuilder { + fn public_values(&self) -> &[Self::Var]; +} diff --git a/p3_frontend/src/fwrap.rs b/p3_frontend/src/fwrap.rs new file mode 100644 index 0000000000..4106027da3 --- /dev/null +++ b/p3_frontend/src/fwrap.rs @@ -0,0 +1,362 @@ +//! `FWrap` is a Wrapper type over `ff::Field` (halo2 compatible field type) that satisfies the +//! plonky3 field traits. + +use halo2_middleware::ff::{Field, PrimeField}; +use num_bigint::BigUint; +use p3_field::{ + AbstractField, Field as p3Field, Packable, PrimeField as p3PrimeField, PrimeField64, +}; +use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; +use std::fmt; +use std::hash::Hash; +use std::iter::{Product, Sum}; +use std::marker::PhantomData; +use std::ops::{Add, AddAssign, Div, Mul, MulAssign, Neg, Sub, SubAssign}; + +#[derive(Default, Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct FWrap(pub F); + +unsafe impl Send for FWrap {} +unsafe impl Sync for FWrap {} + +impl Serialize for FWrap { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_bytes(self.0.to_repr().as_ref()) + } +} + +struct FWrapVisitor(PhantomData); + +impl<'de, F: PrimeField> de::Visitor<'de> for FWrapVisitor { + type Value = FWrap; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a field repr as bytes") + } + + fn visit_bytes(self, value: &[u8]) -> Result + where + E: de::Error, + { + let mut repr = F::Repr::default(); + repr.as_mut().copy_from_slice(value); + let v: Option = F::from_repr(repr).into(); + v.map(|v| FWrap(v)) + .ok_or(E::custom("invalid field repr: {:value?}")) + } +} + +impl<'de, F: PrimeField> Deserialize<'de> for FWrap { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_bytes(FWrapVisitor::(PhantomData {})) + } +} + +impl fmt::Display for FWrap { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl AbstractField for FWrap { + type F = Self; + + fn zero() -> Self { + Self(F::ZERO) + } + fn one() -> Self { + Self(F::ONE) + } + fn two() -> Self { + Self(F::from(2u64)) + } + fn neg_one() -> Self { + Self(F::ZERO - F::ONE) + } + + #[inline] + fn from_f(f: Self::F) -> Self { + f + } + + fn from_bool(b: bool) -> Self { + Self(F::from(u64::from(b))) + } + + fn from_canonical_u8(n: u8) -> Self { + Self(F::from(u64::from(n))) + } + + fn from_canonical_u16(n: u16) -> Self { + Self(F::from(u64::from(n))) + } + + fn from_canonical_u32(n: u32) -> Self { + Self(F::from(u64::from(n))) + } + + fn from_canonical_u64(n: u64) -> Self { + Self(F::from(n)) + } + + fn from_canonical_usize(n: usize) -> Self { + Self(F::from(n as u64)) + } + + fn from_wrapped_u32(n: u32) -> Self { + // A u32 must be canonical, plus we don't store canonical encodings anyway, so there's no + // need for a reduction. + Self(F::from(u64::from(n))) + } + + fn from_wrapped_u64(n: u64) -> Self { + // There's no need to reduce `n` to canonical form, as our internal encoding is + // non-canonical, so there's no need for a reduction. + Self(F::from(n)) + } + + fn generator() -> Self { + Self(F::MULTIPLICATIVE_GENERATOR) + } +} + +impl Add for FWrap { + type Output = Self; + + fn add(self, rhs: Self) -> Self { + Self(self.0.add(rhs.0)) + } +} + +impl AddAssign for FWrap { + fn add_assign(&mut self, rhs: Self) { + self.0.add_assign(rhs.0) + } +} + +impl Sum for FWrap { + fn sum>(iter: I) -> Self { + FWrap(iter.map(|x| x.0).sum::()) + } +} + +impl Sub for FWrap { + type Output = Self; + + fn sub(self, rhs: Self) -> Self { + Self(self.0.sub(rhs.0)) + } +} + +impl SubAssign for FWrap { + fn sub_assign(&mut self, rhs: Self) { + self.0.sub_assign(rhs.0) + } +} + +impl Neg for FWrap { + type Output = Self; + + fn neg(self) -> Self::Output { + Self(self.0.neg()) + } +} + +impl Mul for FWrap { + type Output = Self; + + fn mul(self, rhs: Self) -> Self { + Self(self.0.mul(rhs.0)) + } +} + +impl MulAssign for FWrap { + fn mul_assign(&mut self, rhs: Self) { + self.0.mul_assign(rhs.0) + } +} + +impl Product for FWrap { + fn product>(iter: I) -> Self { + iter.reduce(|x, y| x * y).unwrap_or(Self(F::ONE)) + } +} + +impl Div for FWrap { + type Output = Self; + + fn div(self, rhs: Self) -> Self { + let rhs_inv = rhs.0.invert().expect("division by 0"); + #[allow(clippy::suspicious_arithmetic_impl)] + Self(self.0 * rhs_inv) + } +} + +impl Packable for FWrap {} +impl p3Field for FWrap { + type Packing = Self; + + fn is_zero(&self) -> bool { + self.0.is_zero().into() + } + + fn try_inverse(&self) -> Option { + let inverse: Option = self.0.invert().into(); + inverse.map(|u| FWrap(u)) + } + + #[allow(clippy::let_and_return)] + fn order() -> BigUint { + let minus_one = F::ZERO - F::ONE; + let minus_one_repr = minus_one.to_repr(); + let le = F::ONE.to_repr().as_ref()[0] == 1; + let mut minus_one = if le { + BigUint::from_bytes_le(minus_one_repr.as_ref()) + } else { + BigUint::from_bytes_be(minus_one_repr.as_ref()) + }; + minus_one += 1u64; + let p = minus_one; + p + } +} + +impl p3PrimeField for FWrap { + fn as_canonical_biguint(&self) -> BigUint { + let le = F::ONE.to_repr().as_ref()[0] == 1; + if le { + BigUint::from_bytes_le(self.0.to_repr().as_ref()) + } else { + BigUint::from_bytes_be(self.0.to_repr().as_ref()) + } + } +} + +// HACK: In general an `FWrap` will need more than 64 bits. This trait is only implemented in +// order to use `FWrap` with witness generation from plonky3 that requries this trait but doesn't +// use the order. Do not use an `ff::PrimeField` on a circuit that requires a 64 bit prime field +// (i.e. relies on the `ORDER_U64` value), only use it on circuits that always assign less than 64 +// bit values on the field elements. +impl PrimeField64 for FWrap { + const ORDER_U64: u64 = 0; + + fn as_canonical_u64(&self) -> u64 { + self.as_canonical_biguint() + .try_into() + .expect("field fits in u64") + } +} + +#[allow(clippy::bool_assert_comparison)] +#[cfg(test)] +mod test { + use super::*; + use halo2curves::bn256::Fr; + use serde_test::{assert_tokens, Token}; + + type F = FWrap; + + #[test] + fn test_fwrap() { + // Serialize & Deserialize + + let v = F::two(); + assert_tokens( + &v, + &[Token::Bytes(&[ + 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, + ])], + ); + + // AbstractField + + assert_eq!(F::zero().as_canonical_u64(), 0); + assert_eq!(F::one().as_canonical_u64(), 1); + assert_eq!(F::two().as_canonical_u64(), 2); + assert_eq!( + format!("{}", F::neg_one()), + "0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000" + ); + assert_eq!(F::from_f(F::two()).as_canonical_u64(), 2); + assert_eq!(F::from_bool(true).as_canonical_u64(), 1); + assert_eq!(F::from_canonical_u8(0x12).as_canonical_u64(), 0x12); + assert_eq!(F::from_canonical_u16(0x1234).as_canonical_u64(), 0x1234); + assert_eq!(F::from_canonical_u32(0x123456).as_canonical_u64(), 0x123456); + assert_eq!( + F::from_canonical_u64(0xfffffff12).as_canonical_u64(), + 0xfffffff12 + ); + assert_eq!( + F::from_canonical_usize(0xfffffff12).as_canonical_u64(), + 0xfffffff12 + ); + assert_eq!(F::from_wrapped_u32(0x123456).as_canonical_u64(), 0x123456); + assert_eq!( + F::from_wrapped_u64(0xfffffff12).as_canonical_u64(), + 0xfffffff12 + ); + assert_eq!( + format!("{}", F::generator()), + "0x0000000000000000000000000000000000000000000000000000000000000007" + ); + + // Arithmetic operators + + assert_eq!(F::one() + F::one(), F::two()); + let mut v = F::one(); + v += F::one(); + assert_eq!(v, F::two()); + assert_eq!([F::one(), F::one()].into_iter().sum::(), F::two()); + + assert_eq!(F::two() - F::one(), F::one()); + let mut v = F::two(); + v -= F::one(); + assert_eq!(v, F::one()); + + assert_eq!(-F::one(), F::neg_one()); + + assert_eq!(F::two() + F::two(), F::from_canonical_u64(4)); + let mut v = F::two(); + v *= F::two(); + assert_eq!(v, F::from_canonical_u64(4)); + assert_eq!( + [F::two(), F::two()].into_iter().product::(), + F::from_canonical_u64(4) + ); + + assert_eq!( + F::from_canonical_u64(10) / F::from_canonical_u64(5), + F::from_canonical_u64(2) + ); + + // p3Field + assert_eq!(F::zero().is_zero(), true); + assert_eq!(F::one().is_zero(), false); + assert_eq!( + format!("{}", F::two().try_inverse().unwrap()), + "0x183227397098d014dc2822db40c0ac2e9419f4243cdcb848a1f0fac9f8000001" + ); + assert_eq!( + format!("{}", F::order()), + "21888242871839275222246405745257275088548364400416034343698204186575808495617" + ); + + // p3PrimeField + + assert_eq!( + format!("{}", F::from_canonical_u64(1234).as_canonical_biguint()), + "1234" + ); + + // PrimeField64 + + assert_eq!(F::from_canonical_u64(1234).as_canonical_u64(), 1234); + } +} diff --git a/p3_frontend/src/lib.rs b/p3_frontend/src/lib.rs new file mode 100644 index 0000000000..5ba49e25c7 --- /dev/null +++ b/p3_frontend/src/lib.rs @@ -0,0 +1,357 @@ +//! Conversion from a circuit that implements the `Air` trait into a halo2_backend compatible +//! circuit. Includes helper functions to convert plonky3 witness format into halo2_backend +//! witness format. + +extern crate alloc; + +use halo2_middleware::circuit::{ + Any, Cell, ColumnMid, CompiledCircuit, ConstraintSystemMid, ExpressionMid, GateMid, + Preprocessing, QueryMid, VarMid, +}; +use halo2_middleware::ff::{Field, PrimeField}; +use halo2_middleware::permutation; +use halo2_middleware::poly::Rotation; +use p3_air::Air; +use p3_matrix::dense::RowMajorMatrix; +use std::collections::HashMap; +use std::hash::Hash; + +mod air; +mod fwrap; +mod symbolic_builder; +mod symbolic_expression; +mod symbolic_variable; + +pub use air::*; +pub use fwrap::*; +pub use symbolic_builder::*; +pub use symbolic_expression::*; +pub use symbolic_variable::*; + +fn fixed_query_r0(index: usize) -> ExpressionMid { + ExpressionMid::Var(VarMid::Query(QueryMid { + column_index: index, + column_type: Any::Fixed, + rotation: Rotation(0), + })) +} + +const LOCATION_COLUMNS: usize = 3; // First, Last, Transition +const COL_FIRST: usize = 0; +const COL_LAST: usize = 1; +const COL_TRANS: usize = 2; + +// If the gate is enabled everywhere, transform it to only be enabled in usable rows so that it +// gets disabled in poisoned rows. +fn disable_in_unusable_rows( + e: &SymbolicExpression>, +) -> SymbolicExpression> { + use SymbolicExpression as SE; + if let SE::Mul(lhs, _) = e { + if let SE::Location(_) = &**lhs { + return e.clone(); + } + } + let usable_location = SE::Location(Location::Transition) + SE::Location(Location::LastRow); + usable_location * e.clone() +} + +fn sym_to_expr(e: &SymbolicExpression>) -> ExpressionMid { + use SymbolicExpression as SE; + match e { + SE::Variable(SymbolicVariable(Var::Query(query), _)) => { + ExpressionMid::Var(VarMid::Query(QueryMid { + column_index: query.column, + column_type: Any::Advice, + rotation: if query.is_next { + Rotation(1) + } else { + Rotation(0) + }, + })) + } + SE::Variable(SymbolicVariable(Var::Public(public), _)) => { + panic!("unexpected public variable {:?} in expression", public) + } + SE::Location(Location::FirstRow) => fixed_query_r0(COL_FIRST), + SE::Location(Location::LastRow) => fixed_query_r0(COL_LAST), + SE::Location(Location::Transition) => fixed_query_r0(COL_TRANS), + SE::Constant(c) => ExpressionMid::Constant(c.0), + SE::Add(lhs, rhs) => sym_to_expr(lhs) + sym_to_expr(rhs), + SE::Sub(lhs, rhs) => sym_to_expr(lhs) - sym_to_expr(rhs), + SE::Neg(e) => -sym_to_expr(e), + SE::Mul(lhs, rhs) => sym_to_expr(lhs) * sym_to_expr(rhs), + } +} + +pub fn compile_preprocessing( + k: u32, + size: usize, + pre: &PreprocessingInfo, + _air: &A, +) -> Preprocessing +where + F: PrimeField + Hash, + A: Air>>, +{ + let n = 2usize.pow(k); + let num_fixed_columns = LOCATION_COLUMNS; + let mut fixed = vec![vec![F::ZERO; n]; num_fixed_columns]; + + // From the ethSTARK paper section 3.3: + // > An execution trace is valid if (1) certain boundary constraints hold and (2) each pair + // > of consecutive states satisfies the constraints dictated by the computation. + // We enable the constraints with fixed columns used as selectors: + // For (1) we have "first" and "last". + // For (2) we have "trans(ition)". + + fixed[COL_FIRST][0] = F::ONE; + fixed[COL_LAST][size - 1] = F::ONE; + + for i in 0..size - 1 { + fixed[COL_TRANS][i] = F::ONE; + } + + let mut copies = Vec::new(); + for (cell, public) in &pre.copy_public { + let advice_row = match cell.1 { + Location::FirstRow => 0, + Location::LastRow => size - 1, + Location::Transition => unreachable!(), + }; + copies.push(( + Cell { + column: ColumnMid { + column_type: Any::Advice, + index: cell.0, + }, + row: advice_row, + }, + Cell { + column: ColumnMid { + column_type: Any::Instance, + index: 0, + }, + row: *public, + }, + )); + } + + Preprocessing { + permutation: permutation::AssemblyMid { copies }, + fixed, + } +} + +// Check if the constraint is an equality against a public input and extract the copy constraint as +// `(advice_column_index, Location)` and `public_index`. If there's no copy constriant, return +// None. +fn extract_copy_public( + e: &SymbolicExpression>, +) -> Option<((usize, Location), usize)> { + use SymbolicExpression as SE; + use SymbolicVariable as SV; + // Example: + // Mul(Location(FirstRow), + // Sub(Variable(SymbolicVariable(Query(Query { is_next: false, column: 0 }))), + // Variable(SymbolicVariable(Public(Public { index: 0 }))))) + let (mul_lhs, mul_rhs) = match e { + SE::Mul(lhs, rhs) => (&**lhs, &**rhs), + _ => return None, + }; + let (cell_location, (sub_lhs, sub_rhs)) = match (mul_lhs, mul_rhs) { + (SE::Location(location @ (Location::FirstRow | Location::LastRow)), SE::Sub(lhs, rhs)) => { + (*location, (&**lhs, &**rhs)) + } + _ => return None, + }; + let (cell_column, public) = match (sub_lhs, sub_rhs) { + ( + SE::Variable(SV( + Var::Query(Query { + is_next: false, + column, + }), + _, + )), + SE::Variable(SV(Var::Public(Public { index }), _)), + ) => (*column, *index), + _ => return None, + }; + Some(((cell_column, cell_location), public)) +} + +pub fn get_public_inputs( + preprocessing_info: &PreprocessingInfo, + size: usize, + witness: &[Option>], +) -> Vec> { + if preprocessing_info.num_public_values == 0 { + return Vec::new(); + } + let mut public_inputs = vec![F::ZERO; preprocessing_info.num_public_values]; + for (cell, public_index) in &preprocessing_info.copy_public { + let offset = match cell.1 { + Location::FirstRow => 0, + Location::LastRow => size - 1, + Location::Transition => unreachable!(), + }; + public_inputs[*public_index] = witness[cell.0].as_ref().unwrap()[offset] + } + vec![public_inputs] +} + +#[derive(Debug, Clone)] +pub struct PreprocessingInfo { + copy_public: Vec<((usize, Location), usize)>, + num_public_values: usize, +} + +#[derive(Debug, Clone, Default)] +pub struct CompileParams { + pub disable_zk: bool, +} + +pub fn compile_circuit_cs( + air: &A, + params: &CompileParams, + num_public_values: usize, +) -> (ConstraintSystemMid, PreprocessingInfo) +where + F: PrimeField + Hash, + A: Air>>, +{ + let mut builder = SymbolicAirBuilder::new(air.width(), num_public_values); + air.eval(&mut builder); + + let num_fixed_columns = LOCATION_COLUMNS; + let num_advice_columns = air.width(); + + let mut gates: Vec> = Vec::new(); + // copy between `(advice_column_index, Location)` and `public_index`. + let mut copy_public: Vec<((usize, Location), usize)> = Vec::new(); + let mut copy_columns: Vec = Vec::new(); + for (i, constraint) in builder.constraints.iter().enumerate() { + // Check if the constraint is an equality against a public input and store it as a copy + // constraint. Otherwise it's a gate that can't have public variables. + if let Some((cell, public)) = extract_copy_public(constraint) { + copy_public.push((cell, public)); + let column = ColumnMid { + column_type: Any::Advice, + index: cell.0, + }; + if !copy_columns.contains(&column) { + copy_columns.push(column); + } + continue; + }; + let constraint = if params.disable_zk { + constraint.clone() + } else { + disable_in_unusable_rows(constraint) + }; + gates.push(GateMid { + name: format!("constraint{i}"), + poly: sym_to_expr(&constraint), + }); + } + let mut num_instance_columns = 0; + if !copy_public.is_empty() { + copy_columns.push(ColumnMid { + column_type: Any::Instance, + index: 0, + }); + num_instance_columns += 1; + } + let unblinded_advice_columns = if params.disable_zk { + (0..num_advice_columns).collect() + } else { + Vec::new() + }; + + let cs = ConstraintSystemMid:: { + num_fixed_columns, + num_advice_columns, + num_instance_columns, + num_challenges: 0, + unblinded_advice_columns, + advice_column_phase: (0..num_advice_columns).map(|_| 0).collect(), + challenge_phase: Vec::new(), + gates, + permutation: permutation::ArgumentMid { + columns: copy_columns, + }, + lookups: Vec::new(), + shuffles: Vec::new(), + general_column_annotations: HashMap::new(), + minimum_degree: None, + }; + let preprocessing_info = PreprocessingInfo { + copy_public, + num_public_values, + }; + (cs, preprocessing_info) +} + +pub fn trace_to_wit(k: u32, trace: RowMajorMatrix>) -> Vec>> { + let n = 2usize.pow(k); + let num_columns = trace.width; + let mut witness = vec![vec![F::ZERO; n]; num_columns]; + for (row_offset, row) in trace.rows().enumerate() { + for column_index in 0..num_columns { + witness[column_index][row_offset] = row[column_index].0; + } + } + witness.into_iter().map(Some).collect() +} + +// TODO: Move to middleware +pub fn check_witness( + circuit: &CompiledCircuit, + k: u32, + witness: &[Option>], + public: &[Vec], +) { + let n = 2usize.pow(k); + let cs = &circuit.cs; + let preprocessing = &circuit.preprocessing; + // TODO: Simulate blinding rows + // Verify all gates + for (i, gate) in cs.gates.iter().enumerate() { + for offset in 0..n { + let res = gate.poly.evaluate( + &|s| s, + &|v| match v { + VarMid::Query(q) => { + let offset = offset as i32 + q.rotation.0; + // TODO: Try to do mod n with a rust function + let offset = if offset < 0 { + (offset + n as i32) as usize + } else if offset >= n as i32 { + (offset - n as i32) as usize + } else { + offset as usize + }; + match q.column_type { + Any::Instance => public[q.column_index][offset], + Any::Advice => witness[q.column_index].as_ref().unwrap()[offset], + Any::Fixed => preprocessing.fixed[q.column_index][offset], + } + } + VarMid::Challenge(_c) => unimplemented!(), + }, + &|ne| -ne, + &|a, b| a + b, + &|a, b| a * b, + ); + if !res.is_zero_vartime() { + println!( + "Unsatisfied gate {} \"{}\" at offset {}", + i, gate.name, offset + ); + panic!("KO"); + } + } + } + println!("Check witness: OK"); +} diff --git a/p3_frontend/src/symbolic_builder.rs b/p3_frontend/src/symbolic_builder.rs new file mode 100644 index 0000000000..428204e21a --- /dev/null +++ b/p3_frontend/src/symbolic_builder.rs @@ -0,0 +1,98 @@ +//! `SymbolicAirBuilder` copied from plonky3 and adapted for the Air to Plonkish usecase, at commit +//! `7b5b8a69f633bc61c530f3722701e5f701b11963`. + +// The MIT License (MIT) +// +// Copyright (c) 2022 The Plonky3 Authors +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +use alloc::vec; +use alloc::vec::Vec; + +use p3_air::AirBuilder; +use p3_field::Field; +use p3_matrix::dense::RowMajorMatrix; + +use crate::air::AirBuilderWithPublicValues; +use crate::symbolic_expression::{Location, SymbolicExpression}; +use crate::symbolic_variable::SymbolicVariable; + +/// An `AirBuilder` for evaluating constraints symbolically, and recording them for later use. +pub struct SymbolicAirBuilder { + pub(crate) main: RowMajorMatrix>, + pub(crate) public_values: Vec>, + pub(crate) constraints: Vec>, +} + +impl SymbolicAirBuilder { + pub(crate) fn new(width: usize, num_public_values: usize) -> Self { + let values = [false, true] + .into_iter() + .flat_map(|is_next| { + (0..width).map(move |column| SymbolicVariable::new_query(is_next, column)) + }) + .collect(); + Self { + main: RowMajorMatrix::new(values, width), + public_values: (0..num_public_values) + .map(|i| SymbolicVariable::new_public(i)) + .collect(), + constraints: vec![], + } + } +} + +impl AirBuilder for SymbolicAirBuilder { + type F = F; + type Expr = SymbolicExpression; + type Var = SymbolicVariable; + type M = RowMajorMatrix; + + fn main(&self) -> Self::M { + self.main.clone() + } + + fn is_first_row(&self) -> Self::Expr { + SymbolicExpression::Location(Location::FirstRow) + } + + fn is_last_row(&self) -> Self::Expr { + SymbolicExpression::Location(Location::LastRow) + } + + // TODO: Figure out what's a window size > 2. + fn is_transition_window(&self, size: usize) -> Self::Expr { + if size == 2 { + SymbolicExpression::Location(Location::Transition) + } else { + panic!("uni-stark only supports a window size of 2") + } + } + + fn assert_zero>(&mut self, x: I) { + self.constraints.push(x.into()); + } +} + +impl AirBuilderWithPublicValues for SymbolicAirBuilder { + fn public_values(&self) -> &[Self::Var] { + self.public_values.as_slice() + } +} diff --git a/p3_frontend/src/symbolic_expression.rs b/p3_frontend/src/symbolic_expression.rs new file mode 100644 index 0000000000..cc416f2f2a --- /dev/null +++ b/p3_frontend/src/symbolic_expression.rs @@ -0,0 +1,427 @@ +//! `SymbolicExpression` copied from plonky3 and adapted for the Air to Plonkish usecase, at commit +//! `7b5b8a69f633bc61c530f3722701e5f701b11963`. + +// The MIT License (MIT) +// +// Copyright (c) 2022 The Plonky3 Authors +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +use alloc::rc::Rc; +use core::fmt::{self, Debug}; +use core::iter::{Product, Sum}; +use core::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; + +use p3_field::{AbstractField, Field}; + +use crate::symbolic_variable::SymbolicVariable; + +#[derive(Clone, Copy, Debug)] +pub enum Location { + FirstRow, + LastRow, + Transition, +} + +impl fmt::Display for Location { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::FirstRow => write!(f, "fst"), + Self::LastRow => write!(f, "lst"), + Self::Transition => write!(f, "trn"), + } + } +} + +/// An expression over `SymbolicVariable`s. +#[derive(Clone, Debug)] +pub enum SymbolicExpression { + Variable(SymbolicVariable), + Location(Location), + Constant(F), + Add(Rc, Rc), + Sub(Rc, Rc), + Neg(Rc), + Mul(Rc, Rc), +} + +impl SymbolicExpression { + pub fn is_zero(&self) -> bool { + match self { + Self::Constant(c) => c.is_zero(), + _ => false, + } + } + pub fn is_one(&self) -> bool { + match self { + Self::Constant(c) => c.is_one(), + _ => false, + } + } +} + +impl fmt::Display for SymbolicExpression { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Variable(var) => write!(f, "{}", var), + Self::Location(loc) => write!(f, "{}", loc), + Self::Constant(c) => { + if *c == F::zero() { + write!(f, "0") + } else if *c == F::one() { + write!(f, "1") + } else if *c == F::two() { + write!(f, "2") + } else { + write!(f, "{}", c) + } + } + Self::Add(lhs, rhs) => { + if let Self::Neg(neg_rhs) = &**rhs { + write!(f, "({} - {})", lhs, neg_rhs) + } else { + write!(f, "({} + {})", lhs, rhs) + } + } + Self::Sub(lhs, rhs) => { + write!(f, "({} - {})", lhs, rhs) + } + Self::Neg(neg) => write!(f, "-({})", neg), + Self::Mul(lhs, rhs) => write!(f, "{} * {}", lhs, rhs), + } + } +} + +impl Default for SymbolicExpression { + fn default() -> Self { + Self::Constant(F::zero()) + } +} + +impl From for SymbolicExpression { + fn from(value: F) -> Self { + Self::Constant(value) + } +} + +impl AbstractField for SymbolicExpression { + type F = F; + + fn zero() -> Self { + Self::Constant(F::zero()) + } + fn one() -> Self { + Self::Constant(F::one()) + } + fn two() -> Self { + Self::Constant(F::two()) + } + fn neg_one() -> Self { + Self::Constant(F::neg_one()) + } + + #[inline] + fn from_f(f: Self::F) -> Self { + f.into() + } + + fn from_bool(b: bool) -> Self { + Self::Constant(F::from_bool(b)) + } + + fn from_canonical_u8(n: u8) -> Self { + Self::Constant(F::from_canonical_u8(n)) + } + + fn from_canonical_u16(n: u16) -> Self { + Self::Constant(F::from_canonical_u16(n)) + } + + fn from_canonical_u32(n: u32) -> Self { + Self::Constant(F::from_canonical_u32(n)) + } + + fn from_canonical_u64(n: u64) -> Self { + Self::Constant(F::from_canonical_u64(n)) + } + + fn from_canonical_usize(n: usize) -> Self { + Self::Constant(F::from_canonical_usize(n)) + } + + fn from_wrapped_u32(n: u32) -> Self { + Self::Constant(F::from_wrapped_u32(n)) + } + + fn from_wrapped_u64(n: u64) -> Self { + Self::Constant(F::from_wrapped_u64(n)) + } + + fn generator() -> Self { + Self::Constant(F::generator()) + } + + fn double(&self) -> Self { + self.clone() * F::from_canonical_u64(2) + } +} + +impl Add for SymbolicExpression { + type Output = Self; + + fn add(self, rhs: Self) -> Self { + if rhs.is_zero() { + self + } else if self.is_zero() { + rhs + } else { + Self::Add(Rc::new(self), Rc::new(rhs)) + } + } +} + +impl Add for SymbolicExpression { + type Output = Self; + + fn add(self, rhs: F) -> Self { + if rhs.is_zero() { + self + } else if self.is_zero() { + Self::Constant(rhs) + } else { + self + Self::from(rhs) + } + } +} + +impl AddAssign for SymbolicExpression { + fn add_assign(&mut self, rhs: Self) { + *self = self.clone() + rhs; + } +} + +impl AddAssign for SymbolicExpression { + fn add_assign(&mut self, rhs: F) { + *self += Self::from(rhs); + } +} + +impl Sum for SymbolicExpression { + fn sum>(iter: I) -> Self { + iter.reduce(|x, y| x + y).unwrap_or(Self::zero()) + } +} + +impl Sum for SymbolicExpression { + fn sum>(iter: I) -> Self { + iter.map(|x| Self::from(x)).sum() + } +} + +impl Sub for SymbolicExpression { + type Output = Self; + + fn sub(self, rhs: Self) -> Self { + Self::Sub(Rc::new(self), Rc::new(rhs)) + } +} + +impl Sub for SymbolicExpression { + type Output = Self; + + fn sub(self, rhs: F) -> Self { + self - Self::from(rhs) + } +} + +impl SubAssign for SymbolicExpression { + fn sub_assign(&mut self, rhs: Self) { + *self = self.clone() - rhs; + } +} + +impl SubAssign for SymbolicExpression { + fn sub_assign(&mut self, rhs: F) { + *self -= Self::from(rhs); + } +} + +impl Neg for SymbolicExpression { + type Output = Self; + + fn neg(self) -> Self { + Self::Neg(Rc::new(self)) + } +} + +impl Mul for SymbolicExpression { + type Output = Self; + + fn mul(self, rhs: Self) -> Self { + if rhs.is_zero() || self.is_zero() { + Self::Constant(F::zero()) + } else if rhs.is_one() { + self + } else { + Self::Mul(Rc::new(self), Rc::new(rhs)) + } + } +} + +impl Mul for SymbolicExpression { + type Output = Self; + + fn mul(self, rhs: F) -> Self { + if rhs.is_zero() || self.is_zero() { + Self::Constant(F::zero()) + } else if rhs.is_one() { + self + } else { + self * Self::from(rhs) + } + } +} + +impl MulAssign for SymbolicExpression { + fn mul_assign(&mut self, rhs: Self) { + *self = self.clone() * rhs; + } +} + +impl MulAssign for SymbolicExpression { + fn mul_assign(&mut self, rhs: F) { + *self *= Self::from(rhs); + } +} + +impl Product for SymbolicExpression { + fn product>(iter: I) -> Self { + iter.reduce(|x, y| x * y).unwrap_or(Self::one()) + } +} + +impl Product for SymbolicExpression { + fn product>(iter: I) -> Self { + iter.map(|x| Self::from(x)).product() + } +} + +#[allow(clippy::bool_assert_comparison)] +#[cfg(test)] +mod test { + use super::*; + use crate::fwrap::FWrap; + use halo2curves::bn256::Fr; + + type F = FWrap; + type V = SymbolicVariable; + type E = SymbolicExpression; + + #[test] + fn test_symbolic_expression() { + assert_eq!(E::from(F::zero()).is_zero(), true); + assert_eq!(E::from(F::one()).is_zero(), false); + assert_eq!(E::from(F::one()).is_one(), true); + assert_eq!(E::from(F::zero()).is_one(), false); + + assert_eq!(format!("{}", E::default()), "0"); + + // AbstractField + + assert_eq!(format!("{}", E::zero()), "0"); + assert_eq!(format!("{}", E::one()), "1"); + assert_eq!(format!("{}", E::two()), "2"); + assert_eq!( + format!("{}", E::neg_one()), + "0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000" + ); + assert_eq!(format!("{}", E::from_f(F::two())), "2"); + assert_eq!(format!("{}", E::from_bool(true)), "1"); + assert_eq!( + format!("{}", E::from_canonical_u8(0x12)), + "0x0000000000000000000000000000000000000000000000000000000000000012" + ); + assert_eq!( + format!("{}", E::from_canonical_u16(0x1234)), + "0x0000000000000000000000000000000000000000000000000000000000001234" + ); + assert_eq!( + format!("{}", E::from_canonical_u32(0x123456)), + "0x0000000000000000000000000000000000000000000000000000000000123456" + ); + assert_eq!( + format!("{}", E::from_canonical_u64(0xfffffff12)), + "0x0000000000000000000000000000000000000000000000000000000fffffff12" + ); + assert_eq!( + format!("{}", E::from_canonical_usize(0xfffffff12)), + "0x0000000000000000000000000000000000000000000000000000000fffffff12" + ); + assert_eq!( + format!("{}", E::from_wrapped_u32(0x123456)), + "0x0000000000000000000000000000000000000000000000000000000000123456" + ); + assert_eq!( + format!("{}", E::from_wrapped_u64(0xfffffff12)), + "0x0000000000000000000000000000000000000000000000000000000fffffff12" + ); + assert_eq!( + format!("{}", E::generator()), + "0x0000000000000000000000000000000000000000000000000000000000000007" + ); + assert_eq!(format!("{}", E::two().double()), "2 * 2"); + + // Arithmetic operators + + let w1 = E::from(V::new_query(false, 1)); + let w2 = E::from(V::new_query(false, 2)); + let f = F::two(); + assert_eq!(format!("{}", w1.clone() + w2.clone()), "(w1 + w2)"); + assert_eq!(format!("{}", w1.clone() + f), "(w1 + 2)"); + let mut v = w1.clone(); + v += w2.clone(); + v += f; + assert_eq!(format!("{}", v), "((w1 + w2) + 2)"); + assert_eq!( + format!("{}", [w1.clone(), w2.clone()].into_iter().sum::()), + "(w1 + w2)" + ); + assert_eq!(format!("{}", [f, f].into_iter().sum::()), "(2 + 2)"); + + assert_eq!(format!("{}", w1.clone() - w2.clone()), "(w1 - w2)"); + assert_eq!(format!("{}", w1.clone() - f), "(w1 - 2)"); + let mut v = w1.clone(); + v -= w2.clone(); + v -= f; + assert_eq!(format!("{}", v), "((w1 - w2) - 2)"); + assert_eq!(format!("{}", -w1.clone()), "-(w1)"); + + assert_eq!(format!("{}", w1.clone() * w2.clone()), "w1 * w2"); + assert_eq!(format!("{}", w1.clone() * f), "w1 * 2"); + let mut v = w1.clone(); + v *= w2.clone(); + v *= f; + assert_eq!(format!("{}", v), "w1 * w2 * 2"); + assert_eq!( + format!("{}", [w1.clone(), w2.clone()].into_iter().product::()), + "w1 * w2" + ); + assert_eq!(format!("{}", [f, f].into_iter().product::()), "2 * 2"); + } +} diff --git a/p3_frontend/src/symbolic_variable.rs b/p3_frontend/src/symbolic_variable.rs new file mode 100644 index 0000000000..da0ba236df --- /dev/null +++ b/p3_frontend/src/symbolic_variable.rs @@ -0,0 +1,214 @@ +//! `SymbolicVariable` copied from plonky3 and adapted for the Air to Plonkish usecase, at commit +//! `7b5b8a69f633bc61c530f3722701e5f701b11963`. + +// The MIT License (MIT) +// +// Copyright (c) 2022 The Plonky3 Authors +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +use core::fmt; +use core::marker::PhantomData; +use core::ops::{Add, Mul, Sub}; + +use p3_field::Field; + +use crate::symbolic_expression::SymbolicExpression; + +/// A variable within the evaluation window, i.e. a column in either the local or next row. +#[derive(Copy, Clone, Debug)] +pub struct SymbolicVariable(pub Var, pub PhantomData); + +impl fmt::Display for SymbolicVariable { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.0 { + Var::Query(q) => write!(f, "w{}{}", q.column, if q.is_next { "'" } else { "" }), + Var::Public(p) => write!(f, "p{}", p.index), + } + } +} + +#[derive(Copy, Clone, Debug)] +pub enum Var { + Query(Query), + Public(Public), +} + +#[derive(Copy, Clone, Debug)] +pub struct Query { + pub is_next: bool, + pub column: usize, +} + +#[derive(Copy, Clone, Debug)] +pub struct Public { + pub index: usize, +} + +impl SymbolicVariable { + pub fn new_query(is_next: bool, column: usize) -> Self { + Self(Var::Query(Query { is_next, column }), PhantomData) + } + pub fn new_public(index: usize) -> Self { + Self(Var::Public(Public { index }), PhantomData) + } +} + +impl From> for SymbolicExpression { + fn from(value: SymbolicVariable) -> Self { + SymbolicExpression::Variable(value) + } +} + +impl Add for SymbolicVariable { + type Output = SymbolicExpression; + + fn add(self, rhs: Self) -> Self::Output { + SymbolicExpression::from(self) + SymbolicExpression::from(rhs) + } +} + +impl Add for SymbolicVariable { + type Output = SymbolicExpression; + + fn add(self, rhs: F) -> Self::Output { + SymbolicExpression::from(self) + SymbolicExpression::from(rhs) + } +} + +impl Add> for SymbolicVariable { + type Output = SymbolicExpression; + + fn add(self, rhs: SymbolicExpression) -> Self::Output { + SymbolicExpression::from(self) + rhs + } +} + +impl Add> for SymbolicExpression { + type Output = Self; + + fn add(self, rhs: SymbolicVariable) -> Self::Output { + self + Self::from(rhs) + } +} + +impl Sub for SymbolicVariable { + type Output = SymbolicExpression; + + fn sub(self, rhs: Self) -> Self::Output { + SymbolicExpression::from(self) - SymbolicExpression::from(rhs) + } +} + +impl Sub for SymbolicVariable { + type Output = SymbolicExpression; + + fn sub(self, rhs: F) -> Self::Output { + SymbolicExpression::from(self) - SymbolicExpression::from(rhs) + } +} + +impl Sub> for SymbolicVariable { + type Output = SymbolicExpression; + + fn sub(self, rhs: SymbolicExpression) -> Self::Output { + SymbolicExpression::from(self) - rhs + } +} + +impl Sub> for SymbolicExpression { + type Output = Self; + + fn sub(self, rhs: SymbolicVariable) -> Self::Output { + self - Self::from(rhs) + } +} + +impl Mul for SymbolicVariable { + type Output = SymbolicExpression; + + fn mul(self, rhs: Self) -> Self::Output { + SymbolicExpression::from(self) * SymbolicExpression::from(rhs) + } +} + +impl Mul for SymbolicVariable { + type Output = SymbolicExpression; + + fn mul(self, rhs: F) -> Self::Output { + SymbolicExpression::from(self) * SymbolicExpression::from(rhs) + } +} + +impl Mul> for SymbolicVariable { + type Output = SymbolicExpression; + + fn mul(self, rhs: SymbolicExpression) -> Self::Output { + SymbolicExpression::from(self) * rhs + } +} + +impl Mul> for SymbolicExpression { + type Output = Self; + + fn mul(self, rhs: SymbolicVariable) -> Self::Output { + self * Self::from(rhs) + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::fwrap::FWrap; + use halo2curves::bn256::Fr; + use p3_field::AbstractField; + + type F = FWrap; + type V = SymbolicVariable; + type E = SymbolicExpression; + + #[test] + fn test_symbolic_variable() { + assert_eq!(format!("{}", V::new_query(false, 1)), "w1"); + assert_eq!(format!("{}", V::new_query(true, 1)), "w1'"); + assert_eq!(format!("{}", V::new_public(1)), "p1"); + + let w1 = V::new_query(false, 1); + let w2 = V::new_query(false, 2); + let f = F::two(); + assert_eq!(format!("{}", E::from(w1)), "w1"); + + // Arithmetic operators + + assert_eq!(format!("{}", w1 + w2), "(w1 + w2)"); + assert_eq!(format!("{}", w1 + E::from(w2)), "(w1 + w2)"); + assert_eq!(format!("{}", E::from(w1) + w2), "(w1 + w2)"); + assert_eq!(format!("{}", w1 + f), "(w1 + 2)"); + + assert_eq!(format!("{}", w1 - w2), "(w1 - w2)"); + assert_eq!(format!("{}", w1 - E::from(w2)), "(w1 - w2)"); + assert_eq!(format!("{}", E::from(w1) - w2), "(w1 - w2)"); + assert_eq!(format!("{}", w1 - f), "(w1 - 2)"); + + assert_eq!(format!("{}", w1 * w2), "w1 * w2"); + assert_eq!(format!("{}", w1 * E::from(w2)), "w1 * w2"); + assert_eq!(format!("{}", E::from(w1) * w2), "w1 * w2"); + assert_eq!(format!("{}", w1 * f), "w1 * 2"); + } +} diff --git a/p3_frontend/tests/common/mod.rs b/p3_frontend/tests/common/mod.rs new file mode 100644 index 0000000000..d762b5f2d5 --- /dev/null +++ b/p3_frontend/tests/common/mod.rs @@ -0,0 +1,107 @@ +use halo2_backend::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; +use halo2_backend::poly::kzg::multiopen::{ProverSHPLONK, VerifierSHPLONK}; +use halo2_backend::poly::kzg::strategy::SingleStrategy; +use halo2_backend::{ + plonk::{ + keygen::{keygen_pk, keygen_vk}, + prover::ProverSingle, + verifier::verify_proof_single, + }, + transcript::{ + Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer, + }, +}; +use halo2_debug::test_rng; +use halo2_middleware::circuit::CompiledCircuit; +use halo2_middleware::zal::impls::H2cEngine; +use halo2curves::bn256::{Bn256, Fr, G1Affine}; +use p3_air::Air; +use p3_frontend::{ + check_witness, compile_circuit_cs, compile_preprocessing, get_public_inputs, trace_to_wit, + CompileParams, FWrap, SymbolicAirBuilder, +}; +use p3_matrix::dense::RowMajorMatrix; +use std::time::Instant; + +#[allow(clippy::type_complexity)] +pub(crate) fn compile_witgen( + air: A, + params: &CompileParams, + k: u32, + size: usize, + num_public_values: usize, + trace: RowMajorMatrix>, +) -> (CompiledCircuit, Vec>>, Vec>) +where + A: Air>>, +{ + let n = 2usize.pow(k); + println!("k = {k}"); + println!("n = {n}"); + println!("size = {size}"); + println!("columns = {}", A::width(&air)); + let (cs, preprocessing_info) = compile_circuit_cs::(&air, params, num_public_values); + println!( + "degree = {}", + cs.gates.iter().map(|g| g.poly.degree()).max().unwrap() + ); + let preprocessing = compile_preprocessing::(k, size, &preprocessing_info, &air); + let compiled_circuit = CompiledCircuit { cs, preprocessing }; + let witness = trace_to_wit(k, trace); + let pis = get_public_inputs(&preprocessing_info, size, &witness); + + check_witness(&compiled_circuit, k, &witness, &pis); + (compiled_circuit, witness, pis) +} + +pub(crate) fn setup_prove_verify( + compiled_circuit: &CompiledCircuit, + k: u32, + pis: &[Vec], + witness: Vec>>, +) { + // Setup + let mut rng = test_rng(); + let params = ParamsKZG::::setup(k, &mut rng); + let verifier_params = params.verifier_params(); + let start = Instant::now(); + let vk = keygen_vk(¶ms, compiled_circuit).expect("keygen_vk should not fail"); + let pk = keygen_pk(¶ms, vk.clone(), compiled_circuit).expect("keygen_pk should not fail"); + println!("Keygen: {:?}", start.elapsed()); + + // Proving + println!("Proving..."); + let start = Instant::now(); + let mut transcript = Blake2bWrite::<_, G1Affine, Challenge255<_>>::init(vec![]); + let mut prover = ProverSingle::< + KZGCommitmentScheme, + ProverSHPLONK<'_, Bn256>, + _, + _, + _, + H2cEngine, + >::new(¶ms, &pk, pis.to_vec(), &mut rng, &mut transcript) + .unwrap(); + println!("phase 0"); + prover.commit_phase(0, witness).unwrap(); + prover.create_proof().unwrap(); + let proof = transcript.finalize(); + println!("Prove: {:?}", start.elapsed()); + + // Verify + let start = Instant::now(); + println!("Verifying..."); + let mut verifier_transcript = + Blake2bRead::<_, G1Affine, Challenge255<_>>::init(proof.as_slice()); + let strategy = SingleStrategy::new(&verifier_params); + + verify_proof_single::, VerifierSHPLONK, _, _, _>( + &verifier_params, + &vk, + strategy, + pis.to_vec(), + &mut verifier_transcript, + ) + .expect("verify succeeds"); + println!("Verify: {:?}", start.elapsed()); +} diff --git a/p3_frontend/tests/fib_air.rs b/p3_frontend/tests/fib_air.rs new file mode 100644 index 0000000000..7f70a5d4e5 --- /dev/null +++ b/p3_frontend/tests/fib_air.rs @@ -0,0 +1,134 @@ +// The MIT License (MIT) +// +// Copyright (c) 2022 The Plonky3 Authors +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +use std::borrow::Borrow; + +use p3_air::{Air, AirBuilder, BaseAir}; +use p3_field::PrimeField; +use p3_frontend::AirBuilderWithPublicValues; +use p3_matrix::dense::RowMajorMatrix; +use p3_matrix::MatrixRowSlices; + +/// For testing the public values feature + +pub struct FibonacciAir {} + +impl BaseAir for FibonacciAir { + fn width(&self) -> usize { + NUM_FIBONACCI_COLS + } +} + +impl Air for FibonacciAir { + fn eval(&self, builder: &mut AB) { + let main = builder.main(); + let pis = builder.public_values(); + + let a = pis[0]; + let b = pis[1]; + let x = pis[2]; + + let local: &FibonacciRow = main.row_slice(0).borrow(); + let next: &FibonacciRow = main.row_slice(1).borrow(); + + let mut when_first_row = builder.when_first_row(); + + when_first_row.assert_eq(local.left, a); + when_first_row.assert_eq(local.right, b); + + let mut when_transition = builder.when_transition(); + + // a' <- b + when_transition.assert_eq(local.right, next.left); + + // b' <- a + b + when_transition.assert_eq(local.left + local.right, next.right); + + builder.when_last_row().assert_eq(local.right, x); + } +} + +pub fn generate_trace_rows(a: u64, b: u64, n: usize) -> RowMajorMatrix { + assert!(n.is_power_of_two()); + + let mut trace = + RowMajorMatrix::new(vec![F::zero(); n * NUM_FIBONACCI_COLS], NUM_FIBONACCI_COLS); + + let (prefix, rows, suffix) = unsafe { trace.values.align_to_mut::>() }; + assert!(prefix.is_empty(), "Alignment should match"); + assert!(suffix.is_empty(), "Alignment should match"); + assert_eq!(rows.len(), n); + + rows[0] = FibonacciRow::new(F::from_canonical_u64(a), F::from_canonical_u64(b)); + + for i in 1..n { + rows[i].left = rows[i - 1].right; + rows[i].right = rows[i - 1].left + rows[i - 1].right; + } + + trace +} + +const NUM_FIBONACCI_COLS: usize = 2; + +pub struct FibonacciRow { + pub left: F, + pub right: F, +} + +impl FibonacciRow { + fn new(left: F, right: F) -> FibonacciRow { + FibonacciRow { left, right } + } +} + +impl Borrow> for [F] { + fn borrow(&self) -> &FibonacciRow { + debug_assert_eq!(self.len(), NUM_FIBONACCI_COLS); + let (prefix, shorts, suffix) = unsafe { self.align_to::>() }; + debug_assert!(prefix.is_empty(), "Alignment should match"); + debug_assert!(suffix.is_empty(), "Alignment should match"); + debug_assert_eq!(shorts.len(), 1); + &shorts[0] + } +} + +use halo2curves::bn256::Fr; +use p3_frontend::{CompileParams, FWrap}; + +mod common; + +#[test] +fn test_fib() { + let k = 5; + let n = 2usize.pow(k); + // TODO: 6 must be bigger than unusable rows. Add a helper function to calculate this + let size = n - 6; + let air = FibonacciAir {}; + let num_public_values = 3; + let params = CompileParams::default(); + let trace = generate_trace_rows::>(0, 1, n); + let (compiled_circuit, witness, pis) = + common::compile_witgen(air, ¶ms, k, size, num_public_values, trace); + + common::setup_prove_verify(&compiled_circuit, k, &pis, witness); +} diff --git a/p3_frontend/tests/keccak_air.rs b/p3_frontend/tests/keccak_air.rs new file mode 100644 index 0000000000..83b0be44f3 --- /dev/null +++ b/p3_frontend/tests/keccak_air.rs @@ -0,0 +1,28 @@ +use p3_keccak_air::{generate_trace_rows, KeccakAir, NUM_ROUNDS}; +use p3_util::log2_ceil_usize; + +use halo2curves::bn256::Fr; +use p3_frontend::{CompileParams, FWrap}; +use rand::random; + +mod common; + +#[test] +fn test_keccak() { + let num_hashes = 4; + // TODO: Replace `random()` with a pseudorandom generator with known seed for deterministic + // results. + let inputs = (0..num_hashes).map(|_| random()).collect::>(); + let size = inputs.len() * NUM_ROUNDS; + // TODO: 6 must be bigger than unusable rows. Add a helper function to calculate this + let n = (size + 6).next_power_of_two(); + let k = log2_ceil_usize(n) as u32; + let air = KeccakAir {}; + let num_public_values = 0; + let params = CompileParams { disable_zk: false }; + let trace = generate_trace_rows::>(inputs); + let (compiled_circuit, witness, pis) = + common::compile_witgen(air, ¶ms, k, size, num_public_values, trace); + + common::setup_prove_verify(&compiled_circuit, k, &pis, witness); +} diff --git a/rust-toolchain b/rust-toolchain index 65ee095984..aaceec04e0 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -1.67.0 +1.80.0