diff --git a/halo2/katex-header.html b/.github/katex-header.html
similarity index 99%
rename from halo2/katex-header.html
rename to .github/katex-header.html
index 98e85904fa..32ac35a411 100644
--- a/halo2/katex-header.html
+++ b/.github/katex-header.html
@@ -12,4 +12,4 @@
]
});
});
-
\ No newline at end of file
+
diff --git a/.github/scripts/run-examples.sh b/.github/scripts/run-examples.sh
new file mode 100755
index 0000000000..c96c68e1d0
--- /dev/null
+++ b/.github/scripts/run-examples.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+# Get the list of examples from "examples" dir & Cargo.toml
+EXAMPLES_WITH_FEATURES=$(awk '/^\[\[example\]\]/ { getline; name=$3; name=substr(name, 2, length(name)-2); getline; if ($1 == "required-features") { features=$NF; gsub(/["\[\]]/, "", features); print name "#" features } }' ./halo2_proofs/Cargo.toml)
+EXAMPLES_WITHOUT_FEATURES=$(ls ./halo2_proofs/examples/*.rs | xargs -n1 basename -s .rs)
+
+# Remove examples with features listed in Cargo.toml from examples without features
+EXAMPLES_WITHOUT_FEATURES=$(echo "$EXAMPLES_WITHOUT_FEATURES" | grep -vFx "$(echo "$EXAMPLES_WITH_FEATURES" | cut -d '#' -f 1)")
+
+# Combine examples with and without features
+EXAMPLES=$(echo "$EXAMPLES_WITH_FEATURES $EXAMPLES_WITHOUT_FEATURES" | tr ' ' '\n' | sort -u | tr '\n' ' ')
+
+# Run the examples
+for example in $EXAMPLES; do
+ if [ "$(echo "$example" | grep '#')" ]; then
+ name=$(echo $example | cut -d '#' -f 1)
+ features=$(echo $example | cut -d '#' -f 2)
+ cargo run --package halo2_proofs --example $name --features $features
+ else
+ cargo run --package halo2_proofs --example $example
+ fi
+done
diff --git a/.github/scripts/wasm-target-test-build.sh b/.github/scripts/wasm-target-test-build.sh
new file mode 100755
index 0000000000..eb486aed8e
--- /dev/null
+++ b/.github/scripts/wasm-target-test-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+GIT_ROOT=$(pwd)
+
+cd /tmp
+
+# create test project
+cargo new foobar
+cd foobar
+
+# set rust-toolchain same as "halo2"
+cp "${GIT_ROOT}/rust-toolchain" .
+
+# add wasm32-* targets
+rustup target add wasm32-unknown-unknown wasm32-wasi
+
+# add dependencies
+cargo add --path "${GIT_ROOT}/halo2_proofs" --features batch,dev-graph,gadget-traces,lookup-any-sanity-checks
+cargo add getrandom --features js --target wasm32-unknown-unknown
+
+# test build for wasm32-* targets
+cargo build --release --target wasm32-unknown-unknown
+cargo build --release --target wasm32-wasi
+
+# delete test project
+cd ../
+rm -rf foobar
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index cdce1546c0..5d92f02ffb 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -17,10 +17,9 @@ jobs:
os: [ubuntu-latest, windows-latest, macOS-latest]
include:
- feature_set: basic
- features: batch,dev-graph,gadget-traces
+ features: --features batch,dev-graph,gadget-traces
- feature_set: all
- features: batch,dev-graph,gadget-traces,test-dev-graph,thread-safe-region,sanity-checks,circuit-params
-
+ features: --all-features
steps:
- uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
@@ -30,7 +29,18 @@ jobs:
uses: actions-rs/cargo@v1
with:
command: test
- args: --verbose --release --workspace --no-default-features --features "${{ matrix.features }}"
+ args: --verbose --release --workspace --no-default-features ${{ matrix.features }}
+
+ examples:
+ name: Run the examples
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v3
+ - uses: actions-rs/toolchain@v1
+ - name: Run examples
+ run: |
+ .github/scripts/run-examples.sh
build:
name: Build target ${{ matrix.target }}
@@ -46,13 +56,12 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
override: false
+ default: true
- name: Add target
run: rustup target add ${{ matrix.target }}
- - name: cargo build
- uses: actions-rs/cargo@v1
- with:
- command: build
- args: --no-default-features --features batch,dev-graph,gadget-traces --target ${{ matrix.target }}
+ - name: Run script file
+ run: |
+ .github/scripts/wasm-target-test-build.sh
bitrot:
name: Bitrot check
diff --git a/.github/workflows/docs-ghpages.yml b/.github/workflows/docs-ghpages.yml
index 358c8f2950..56dd07cbd9 100644
--- a/.github/workflows/docs-ghpages.yml
+++ b/.github/workflows/docs-ghpages.yml
@@ -16,9 +16,17 @@ jobs:
- uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
with:
- toolchain: nightly
+ toolchain: nightly-2024-05-17
override: true
+ - name: Copy the html file to workspace crates
+ run: |
+ for cargo_toml in $(find . -name Cargo.toml); do
+ crate_dir=$(dirname $cargo_toml)
+ cp .github/katex-header.html $crate_dir
+ echo "Copied html file to $crate_dir"
+ done
+
- name: Build latest rustdocs
uses: actions-rs/cargo@v1
with:
@@ -33,8 +41,16 @@ jobs:
cp -R ./target/doc ./docs
echo "" > ./docs/index.html
+ - name: Delete the html files copied to every crate
+ run: |
+ for cargo_toml in $(find . -name Cargo.toml); do
+ crate_dir=$(dirname $cargo_toml)
+ rm -f $crate_dir/katex-header.html
+ echo "Deleted html file in $crate_dir"
+ done
+
- name: Deploy to GitHub Pages
- uses: peaceiris/actions-gh-pages@v3
+ uses: peaceiris/actions-gh-pages@v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./docs
diff --git a/.gitignore b/.gitignore
index f2af733bf1..123de7b254 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,3 +4,6 @@ Cargo.lock
.vscode
**/*.html
.DS_Store
+
+layout.png
+serialization-test.pk
diff --git a/Cargo.toml b/Cargo.toml
index b44700ec43..1e75bcb87e 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -2,4 +2,10 @@
members = [
"halo2",
"halo2_proofs",
+ "halo2_frontend",
+ "halo2_middleware",
+ "halo2_backend",
+ "halo2_debug",
+ "p3_frontend",
]
+resolver = "2"
diff --git a/README.md b/README.md
index 3c7aa59572..da1d24b174 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,15 @@
-# halo2 [![Crates.io](https://img.shields.io/crates/v/halo2.svg)](https://crates.io/crates/halo2) #
+# halo2
## [Documentation](https://privacy-scaling-explorations.github.io/halo2/halo2_proofs)
+This repository contains the [halo2](https://github.com/zcash/halo2) fork from
+PSE and includes contributions from the community.
+
+We use the `main` branch for development, which means it may contain
+unstable/unfinished features. For end-users we recomend using the tag releases
+which can be seen as curated checkpoints with some level of guarantee of
+stability.
+
For experimental features `privacy-scaling-explorations/halo2` fork adds, please refer to [`experimental-features.md`](./book/src/user/experimental-features.md).
## Minimum Supported Rust Version
diff --git a/book/src/user/experimental-features.md b/book/src/user/experimental-features.md
index 78ea802ce4..5677fd38b9 100644
--- a/book/src/user/experimental-features.md
+++ b/book/src/user/experimental-features.md
@@ -138,3 +138,38 @@ For some use cases that want to keep configured `ConstraintSystem` unchanged the
## `Evaluator` and `evaluate_h`
They are introduced to improve quotient computation speed and memory usage for circuit with complicated `Expression`.
+
+## Modular design (frontend-backend split)
+
+The halo2 implementation has been split into two separate parts: the frontend
+and the backend, following these definitions:
+- frontend: allows the user to specify the circuit logic and its satisfying
+ witness. It must provide a way to translate this logic into a low level
+ arithmetization format specified in the middleware module.
+- backend: the proving system implementation that receives the middleware
+ circuit arithmetization and performs the following tasks:
+ - Generate the setup (proving and verifying keys)
+ - Generate a proof (with witness as input)
+ - Verify a proof
+
+A note on naming: "halo2" can mean different things:
+- halo2 proof system, the protocol
+- halo2 proof system implementation, the backend
+- halo2 circuit library, the frontend (includes the halo2 circuit API, the
+ layouter, the selector to fixed column transformation, etc.)
+- halo2 full-stack, the proof system full stack (the combination of the backend
+ and frontend)
+
+Currently the backend implements the "original" halo2 proof system extended
+with the features discussed in this document. Nevertheless, the public
+interface that the backend uses is generic for plonkish arithmetization. This
+allows for alternative frontend implementations as well as alternative plonkish
+proof system implementations. The middleware contains the type definitions
+used to connect the frontend and backend.
+
+Summary of crates:
+- `halo2_frontend`: library used to define circuits and calculate their witness.
+- `halo2_backend`: implementation of the halo2 proof system (the protocol).
+- `halo2_middleware`: type definitions used to interface the backend with the frontend.
+- `halo2_proofs`: legacy API built by re-exporting from the frontend and
+ backend as well as function wrappers.
diff --git a/codecov.yml b/codecov.yml
new file mode 100644
index 0000000000..ec65ebc105
--- /dev/null
+++ b/codecov.yml
@@ -0,0 +1,8 @@
+ignore:
+ - halo2_proofs/benches
+ - halo2_proofs/examples
+ - halo2_proofs/tests
+ - halo2_frontend/src/dev/graph
+ - halo2_frontend/src/dev/graph.rs
+ - halo2_frontend/src/dev/costs.rs
+ - halo2_frontend/src/dev/cost_model.rs
\ No newline at end of file
diff --git a/halo2/Cargo.toml b/halo2/Cargo.toml
index 5618165271..bea2227877 100644
--- a/halo2/Cargo.toml
+++ b/halo2/Cargo.toml
@@ -5,11 +5,11 @@ authors = [
"Jack Grigg ",
]
edition = "2021"
-rust-version = "1.56.1"
+rust-version = "1.73.0"
description = "[BETA] Fast zero-knowledge proof-carrying data implementation with no trusted setup"
license = "MIT OR Apache-2.0"
-repository = "https://github.com/zcash/halo2"
-documentation = "https://docs.rs/halo2"
+repository = "https://github.com/privacy-scaling-explorations/halo2"
+documentation = "https://privacy-scaling-explorations.github.io/halo2/"
readme = "../README.md"
categories = ["cryptography"]
keywords = ["halo", "proofs", "recursive", "zkp", "zkSNARKs"]
@@ -19,7 +19,7 @@ all-features = true
rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"]
[dependencies]
-halo2_proofs = { version = "0.3", path = "../halo2_proofs", default-features = false }
+halo2_proofs = { version = "0.4", path = "../halo2_proofs", default-features = false }
[lib]
bench = false
diff --git a/halo2_backend/Cargo.toml b/halo2_backend/Cargo.toml
new file mode 100644
index 0000000000..4ef90ae843
--- /dev/null
+++ b/halo2_backend/Cargo.toml
@@ -0,0 +1,62 @@
+[package]
+name = "halo2_backend"
+version = "0.4.0"
+authors = [
+ "Sean Bowe ",
+ "Ying Tong Lai ",
+ "Daira Hopwood ",
+ "Jack Grigg ",
+ "Privacy Scaling Explorations team",
+]
+edition = "2021"
+rust-version = "1.73.0"
+description = """
+Halo2 backend implementation. This package implements the halo2 proof system which includes setup (key generation), proving and verifying.
+"""
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/privacy-scaling-explorations/halo2"
+documentation = "https://privacy-scaling-explorations.github.io/halo2/"
+readme = "README.md"
+categories = ["cryptography"]
+keywords = ["halo", "proofs", "zkp", "zkSNARKs"]
+
+[package.metadata.docs.rs]
+all-features = true
+rustdoc-args = ["--cfg", "docsrs", "--html-in-header", "katex-header.html"]
+
+[dependencies]
+backtrace = { version = "0.3", optional = true }
+ff = "0.13"
+group = "0.13"
+halo2curves = { version = "0.7.0", default-features = false }
+rand_core = { version = "0.6", default-features = false }
+tracing = "0.1"
+blake2b_simd = "1"
+sha3 = "0.9.1"
+rand_chacha = "0.3"
+serde = { version = "1", optional = true, features = ["derive"] }
+serde_derive = { version = "1", optional = true}
+rayon = "1.8"
+halo2_middleware = { path = "../halo2_middleware" }
+num-bigint = "0.4.6"
+
+[dev-dependencies]
+assert_matches = "1.5"
+criterion = "0.3"
+gumdrop = "0.8"
+proptest = "1"
+rand_core = { version = "0.6", default-features = false, features = ["getrandom"] }
+serde_json = "1"
+
+[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies]
+getrandom = { version = "0.2", features = ["js"] }
+
+[features]
+default = ["batch", "bits"]
+bits = ["halo2curves/bits"]
+sanity-checks = []
+batch = ["rand_core/getrandom"]
+derive_serde = ["halo2curves/derive_serde"]
+
+[lib]
+bench = false
diff --git a/halo2_proofs/src/arithmetic.rs b/halo2_backend/src/arithmetic.rs
similarity index 51%
rename from halo2_proofs/src/arithmetic.rs
rename to halo2_backend/src/arithmetic.rs
index 0163e355eb..34e746f327 100644
--- a/halo2_proofs/src/arithmetic.rs
+++ b/halo2_backend/src/arithmetic.rs
@@ -1,19 +1,21 @@
//! This module provides common utilities, traits and structures for group,
//! field and polynomial arithmetic.
-use super::multicore;
-pub use ff::Field;
use group::{
ff::{BatchInvert, PrimeField},
- Curve, Group, GroupOpsOwned, ScalarMulOwned,
+ Curve, GroupOpsOwned, ScalarMulOwned,
};
+pub use halo2_middleware::ff::Field;
+use halo2_middleware::multicore;
+use halo2curves::fft::best_fft;
pub use halo2curves::{CurveAffine, CurveExt};
/// This represents an element of a group with basic operations that can be
/// performed. This allows an FFT implementation (for example) to operate
/// generically over either a field or elliptic curve group.
-pub trait FftGroup:
+#[allow(dead_code)]
+pub(crate) trait FftGroup:
Copy + Send + Sync + 'static + GroupOpsOwned + ScalarMulOwned
{
}
@@ -25,271 +27,8 @@ where
{
}
-fn multiexp_serial(coeffs: &[C::Scalar], bases: &[C], acc: &mut C::Curve) {
- let coeffs: Vec<_> = coeffs.iter().map(|a| a.to_repr()).collect();
-
- let c = if bases.len() < 4 {
- 1
- } else if bases.len() < 32 {
- 3
- } else {
- (f64::from(bases.len() as u32)).ln().ceil() as usize
- };
-
- fn get_at(segment: usize, c: usize, bytes: &F::Repr) -> usize {
- let skip_bits = segment * c;
- let skip_bytes = skip_bits / 8;
-
- if skip_bytes >= (F::NUM_BITS as usize + 7) / 8 {
- return 0;
- }
-
- let mut v = [0; 8];
- for (v, o) in v.iter_mut().zip(bytes.as_ref()[skip_bytes..].iter()) {
- *v = *o;
- }
-
- let mut tmp = u64::from_le_bytes(v);
- tmp >>= skip_bits - (skip_bytes * 8);
- tmp %= 1 << c;
-
- tmp as usize
- }
-
- let segments = (C::Scalar::NUM_BITS as usize / c) + 1;
-
- for current_segment in (0..segments).rev() {
- for _ in 0..c {
- *acc = acc.double();
- }
-
- #[derive(Clone, Copy)]
- enum Bucket {
- None,
- Affine(C),
- Projective(C::Curve),
- }
-
- impl Bucket {
- fn add_assign(&mut self, other: &C) {
- *self = match *self {
- Bucket::None => Bucket::Affine(*other),
- Bucket::Affine(a) => Bucket::Projective(a + *other),
- Bucket::Projective(mut a) => {
- a += *other;
- Bucket::Projective(a)
- }
- }
- }
-
- fn add(self, mut other: C::Curve) -> C::Curve {
- match self {
- Bucket::None => other,
- Bucket::Affine(a) => {
- other += a;
- other
- }
- Bucket::Projective(a) => other + &a,
- }
- }
- }
-
- let mut buckets: Vec> = vec![Bucket::None; (1 << c) - 1];
-
- for (coeff, base) in coeffs.iter().zip(bases.iter()) {
- let coeff = get_at::(current_segment, c, coeff);
- if coeff != 0 {
- buckets[coeff - 1].add_assign(base);
- }
- }
-
- // Summation by parts
- // e.g. 3a + 2b + 1c = a +
- // (a) + b +
- // ((a) + b) + c
- let mut running_sum = C::Curve::identity();
- for exp in buckets.into_iter().rev() {
- running_sum = exp.add(running_sum);
- *acc += &running_sum;
- }
- }
-}
-
-/// Performs a small multi-exponentiation operation.
-/// Uses the double-and-add algorithm with doublings shared across points.
-pub fn small_multiexp(coeffs: &[C::Scalar], bases: &[C]) -> C::Curve {
- let coeffs: Vec<_> = coeffs.iter().map(|a| a.to_repr()).collect();
- let mut acc = C::Curve::identity();
-
- // for byte idx
- for byte_idx in (0..((C::Scalar::NUM_BITS as usize + 7) / 8)).rev() {
- // for bit idx
- for bit_idx in (0..8).rev() {
- acc = acc.double();
- // for each coeff
- for coeff_idx in 0..coeffs.len() {
- let byte = coeffs[coeff_idx].as_ref()[byte_idx];
- if ((byte >> bit_idx) & 1) != 0 {
- acc += bases[coeff_idx];
- }
- }
- }
- }
-
- acc
-}
-
-/// Performs a multi-exponentiation operation.
-///
-/// This function will panic if coeffs and bases have a different length.
-///
-/// This will use multithreading if beneficial.
-pub fn best_multiexp(coeffs: &[C::Scalar], bases: &[C]) -> C::Curve {
- assert_eq!(coeffs.len(), bases.len());
-
- let num_threads = multicore::current_num_threads();
- if coeffs.len() > num_threads {
- let chunk = coeffs.len() / num_threads;
- let num_chunks = coeffs.chunks(chunk).len();
- let mut results = vec![C::Curve::identity(); num_chunks];
- multicore::scope(|scope| {
- let chunk = coeffs.len() / num_threads;
-
- for ((coeffs, bases), acc) in coeffs
- .chunks(chunk)
- .zip(bases.chunks(chunk))
- .zip(results.iter_mut())
- {
- scope.spawn(move |_| {
- multiexp_serial(coeffs, bases, acc);
- });
- }
- });
- results.iter().fold(C::Curve::identity(), |a, b| a + b)
- } else {
- let mut acc = C::Curve::identity();
- multiexp_serial(coeffs, bases, &mut acc);
- acc
- }
-}
-
-/// Performs a radix-$2$ Fast-Fourier Transformation (FFT) on a vector of size
-/// $n = 2^k$, when provided `log_n` = $k$ and an element of multiplicative
-/// order $n$ called `omega` ($\omega$). The result is that the vector `a`, when
-/// interpreted as the coefficients of a polynomial of degree $n - 1$, is
-/// transformed into the evaluations of this polynomial at each of the $n$
-/// distinct powers of $\omega$. This transformation is invertible by providing
-/// $\omega^{-1}$ in place of $\omega$ and dividing each resulting field element
-/// by $n$.
-///
-/// This will use multithreading if beneficial.
-pub fn best_fft>(a: &mut [G], omega: Scalar, log_n: u32) {
- fn bitreverse(mut n: usize, l: usize) -> usize {
- let mut r = 0;
- for _ in 0..l {
- r = (r << 1) | (n & 1);
- n >>= 1;
- }
- r
- }
-
- let threads = multicore::current_num_threads();
- let log_threads = log2_floor(threads);
- let n = a.len();
- assert_eq!(n, 1 << log_n);
-
- for k in 0..n {
- let rk = bitreverse(k, log_n as usize);
- if k < rk {
- a.swap(rk, k);
- }
- }
-
- // precompute twiddle factors
- let twiddles: Vec<_> = (0..(n / 2))
- .scan(Scalar::ONE, |w, _| {
- let tw = *w;
- *w *= ω
- Some(tw)
- })
- .collect();
-
- if log_n <= log_threads {
- let mut chunk = 2_usize;
- let mut twiddle_chunk = n / 2;
- for _ in 0..log_n {
- a.chunks_mut(chunk).for_each(|coeffs| {
- let (left, right) = coeffs.split_at_mut(chunk / 2);
-
- // case when twiddle factor is one
- let (a, left) = left.split_at_mut(1);
- let (b, right) = right.split_at_mut(1);
- let t = b[0];
- b[0] = a[0];
- a[0] += &t;
- b[0] -= &t;
-
- left.iter_mut()
- .zip(right.iter_mut())
- .enumerate()
- .for_each(|(i, (a, b))| {
- let mut t = *b;
- t *= &twiddles[(i + 1) * twiddle_chunk];
- *b = *a;
- *a += &t;
- *b -= &t;
- });
- });
- chunk *= 2;
- twiddle_chunk /= 2;
- }
- } else {
- recursive_butterfly_arithmetic(a, n, 1, &twiddles)
- }
-}
-
-/// This perform recursive butterfly arithmetic
-pub fn recursive_butterfly_arithmetic>(
- a: &mut [G],
- n: usize,
- twiddle_chunk: usize,
- twiddles: &[Scalar],
-) {
- if n == 2 {
- let t = a[1];
- a[1] = a[0];
- a[0] += &t;
- a[1] -= &t;
- } else {
- let (left, right) = a.split_at_mut(n / 2);
- multicore::join(
- || recursive_butterfly_arithmetic(left, n / 2, twiddle_chunk * 2, twiddles),
- || recursive_butterfly_arithmetic(right, n / 2, twiddle_chunk * 2, twiddles),
- );
-
- // case when twiddle factor is one
- let (a, left) = left.split_at_mut(1);
- let (b, right) = right.split_at_mut(1);
- let t = b[0];
- b[0] = a[0];
- a[0] += &t;
- b[0] -= &t;
-
- left.iter_mut()
- .zip(right.iter_mut())
- .enumerate()
- .for_each(|(i, (a, b))| {
- let mut t = *b;
- t *= &twiddles[(i + 1) * twiddle_chunk];
- *b = *a;
- *a += &t;
- *b -= &t;
- });
- }
-}
-
/// Convert coefficient bases group elements to lagrange basis by inverse FFT.
-pub fn g_to_lagrange(g_projective: Vec, k: u32) -> Vec {
+pub(crate) fn g_to_lagrange(g_projective: Vec, k: u32) -> Vec {
let n_inv = C::Scalar::TWO_INV.pow_vartime([k as u64, 0, 0, 0]);
let mut omega_inv = C::Scalar::ROOT_OF_UNITY_INV;
for _ in k..C::Scalar::S {
@@ -316,7 +55,7 @@ pub fn g_to_lagrange(g_projective: Vec, k: u32) -> Vec
}
/// This evaluates a provided polynomial (in coefficient form) at `point`.
-pub fn eval_polynomial(poly: &[F], point: F) -> F {
+pub(crate) fn eval_polynomial(poly: &[F], point: F) -> F {
fn evaluate(poly: &[F], point: F) -> F {
poly.iter()
.rev()
@@ -346,7 +85,7 @@ pub fn eval_polynomial(poly: &[F], point: F) -> F {
/// This computes the inner product of two vectors `a` and `b`.
///
/// This function will panic if the two vectors are not the same size.
-pub fn compute_inner_product(a: &[F], b: &[F]) -> F {
+pub(crate) fn compute_inner_product(a: &[F], b: &[F]) -> F {
// TODO: parallelize?
assert_eq!(a.len(), b.len());
@@ -360,7 +99,7 @@ pub fn compute_inner_product(a: &[F], b: &[F]) -> F {
/// Divides polynomial `a` in `X` by `X - b` with
/// no remainder.
-pub fn kate_division<'a, F: Field, I: IntoIterator- >(a: I, mut b: F) -> Vec
+pub(crate) fn kate_division<'a, F: Field, I: IntoIterator
- >(a: I, mut b: F) -> Vec
where
I::IntoIter: DoubleEndedIterator + ExactSizeIterator,
{
@@ -433,22 +172,10 @@ pub fn parallelize(v: &mu
});
}
-fn log2_floor(num: usize) -> u32 {
- assert!(num > 0);
-
- let mut pow = 0;
-
- while (1 << (pow + 1)) <= num {
- pow += 1;
- }
-
- pow
-}
-
/// Returns coefficients of an n - 1 degree polynomial given a set of n points
/// and their evaluations. This function will panic if two values in `points`
/// are the same.
-pub fn lagrange_interpolate(points: &[F], evals: &[F]) -> Vec {
+pub(crate) fn lagrange_interpolate(points: &[F], evals: &[F]) -> Vec {
assert_eq!(points.len(), evals.len());
if points.len() == 1 {
// Constant polynomial
@@ -527,11 +254,24 @@ pub(crate) fn powers(base: F) -> impl Iterator
- {
std::iter::successors(Some(F::ONE), move |power| Some(base * power))
}
+pub(crate) fn truncate(scalar: F) -> F {
+ let nb_bytes = F::NUM_BITS.div_ceil(8).div_ceil(2) as usize;
+ let bytes = scalar.to_repr().as_ref()[..nb_bytes].to_vec();
+ let bi = BigUint::from_bytes_le(&bytes);
+ F::from_str_vartime(&BigUint::to_string(&bi)).unwrap()
+}
+
+pub(crate) fn truncated_powers(base: F) -> impl Iterator
- {
+ powers(base).map(truncate)
+}
+
+use num_bigint::BigUint;
+
#[cfg(test)]
use rand_core::OsRng;
#[cfg(test)]
-use crate::halo2curves::pasta::Fp;
+use halo2curves::pasta::Fp;
#[test]
fn test_lagrange_interpolate() {
diff --git a/halo2_proofs/src/helpers.rs b/halo2_backend/src/helpers.rs
similarity index 84%
rename from halo2_proofs/src/helpers.rs
rename to halo2_backend/src/helpers.rs
index faf7351a3e..ce69fd0516 100644
--- a/halo2_proofs/src/helpers.rs
+++ b/halo2_backend/src/helpers.rs
@@ -1,8 +1,9 @@
-use crate::poly::Polynomial;
-use ff::PrimeField;
+use halo2_middleware::ff::PrimeField;
use halo2curves::{serde::SerdeObject, CurveAffine};
use std::io;
+use crate::poly::Polynomial;
+
/// This enum specifies how various types are serialized and deserialized.
#[derive(Clone, Copy, Debug)]
pub enum SerdeFormat {
@@ -20,7 +21,7 @@ pub enum SerdeFormat {
}
// Keep this trait for compatibility with IPA serialization
-pub(crate) trait CurveRead: CurveAffine {
+pub trait CurveRead: CurveAffine {
/// Reads a compressed element from the buffer and attempts to parse it
/// using `from_bytes`.
fn read(reader: &mut R) -> io::Result {
@@ -36,9 +37,9 @@ pub trait SerdeCurveAffine: CurveAffine + SerdeObject {
/// Reads an element from the buffer and parses it according to the `format`:
/// - `Processed`: Reads a compressed curve element and decompress it
/// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form.
- /// Checks that field elements are less than modulus, and then checks that the point is on the curve.
+ /// Checks that field elements are less than modulus, and then checks that the point is on the curve.
/// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form;
- /// does not perform any checks
+ /// does not perform any checks
fn read(reader: &mut R, format: SerdeFormat) -> io::Result {
match format {
SerdeFormat::Processed => ::read(reader),
@@ -69,9 +70,9 @@ impl SerdeCurveAffine for C {}
pub trait SerdePrimeField: PrimeField + SerdeObject {
/// Reads a field element as bytes from the buffer according to the `format`:
/// - `Processed`: Reads a field element in standard form, with endianness specified by the
- /// `PrimeField` implementation, and checks that the element is less than the modulus.
+ /// `PrimeField` implementation, and checks that the element is less than the modulus.
/// - `RawBytes`: Reads a field element from raw bytes in its internal Montgomery representations,
- /// and checks that the element is less than the modulus.
+ /// and checks that the element is less than the modulus.
/// - `RawBytesUnchecked`: Reads a field element in Montgomery form and performs no checks.
fn read(reader: &mut R, format: SerdeFormat) -> io::Result {
match format {
@@ -89,9 +90,9 @@ pub trait SerdePrimeField: PrimeField + SerdeObject {
/// Writes a field element as bytes to the buffer according to the `format`:
/// - `Processed`: Writes a field element in standard form, with endianness specified by the
- /// `PrimeField` implementation.
+ /// `PrimeField` implementation.
/// - Otherwise: Writes a field element into raw bytes in its internal Montgomery representation,
- /// WITHOUT performing the expensive Montgomery reduction.
+ /// WITHOUT performing the expensive Montgomery reduction.
fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> {
match format {
SerdeFormat::Processed => writer.write_all(self.to_repr().as_ref()),
@@ -101,25 +102,6 @@ pub trait SerdePrimeField: PrimeField + SerdeObject {
}
impl SerdePrimeField for F {}
-/// Convert a slice of `bool` into a `u8`.
-///
-/// Panics if the slice has length greater than 8.
-pub fn pack(bits: &[bool]) -> u8 {
- let mut value = 0u8;
- assert!(bits.len() <= 8);
- for (bit_index, bit) in bits.iter().enumerate() {
- value |= (*bit as u8) << bit_index;
- }
- value
-}
-
-/// Writes the first `bits.len()` bits of a `u8` into `bits`.
-pub fn unpack(byte: u8, bits: &mut [bool]) {
- for (bit_index, bit) in bits.iter_mut().enumerate() {
- *bit = (byte >> bit_index) & 1 == 1;
- }
-}
-
/// Reads a vector of polynomials from buffer
pub(crate) fn read_polynomial_vec(
reader: &mut R,
@@ -150,5 +132,5 @@ pub(crate) fn write_polynomial_slice(
/// Gets the total number of bytes of a slice of polynomials, assuming all polynomials are the same length
pub(crate) fn polynomial_slice_byte_length(slice: &[Polynomial]) -> usize {
let field_len = F::default().to_repr().as_ref().len();
- 4 + slice.len() * (4 + field_len * slice.get(0).map(|poly| poly.len()).unwrap_or(0))
+ 4 + slice.len() * (4 + field_len * slice.first().map(|poly| poly.len()).unwrap_or(0))
}
diff --git a/halo2_backend/src/lib.rs b/halo2_backend/src/lib.rs
new file mode 100644
index 0000000000..e11dc54525
--- /dev/null
+++ b/halo2_backend/src/lib.rs
@@ -0,0 +1,8 @@
+pub mod arithmetic;
+pub mod helpers;
+pub mod plonk;
+pub mod poly;
+pub mod transcript;
+
+// Internal re-exports
+pub use halo2_middleware::multicore;
diff --git a/halo2_backend/src/plonk.rs b/halo2_backend/src/plonk.rs
new file mode 100644
index 0000000000..62223ab7cf
--- /dev/null
+++ b/halo2_backend/src/plonk.rs
@@ -0,0 +1,410 @@
+//! This module provides an implementation of a variant of (Turbo)[PLONK][plonk]
+//! that is designed specifically for the polynomial commitment scheme described
+//! in the [Halo][halo] paper.
+//!
+//! [halo]: https://eprint.iacr.org/2019/1021
+//! [plonk]: https://eprint.iacr.org/2019/953
+
+use blake2b_simd::Params as Blake2bParams;
+use group::ff::{Field, FromUniformBytes, PrimeField};
+
+use crate::arithmetic::CurveAffine;
+use crate::helpers::{
+ polynomial_slice_byte_length, read_polynomial_vec, write_polynomial_slice, SerdeCurveAffine,
+ SerdeFormat, SerdePrimeField,
+};
+pub use crate::plonk::circuit::ConstraintSystemBack;
+use crate::plonk::circuit::PinnedConstraintSystem;
+use crate::poly::{
+ Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, PinnedEvaluationDomain,
+ Polynomial,
+};
+use crate::transcript::{ChallengeScalar, EncodedChallenge, Transcript};
+pub use circuit::{ExpressionBack, VarBack};
+pub(crate) use evaluation::Evaluator;
+
+use std::io;
+
+mod circuit;
+mod error;
+mod evaluation;
+pub mod keygen;
+mod lookup;
+mod permutation;
+pub mod prover;
+mod shuffle;
+mod vanishing;
+pub mod verifier;
+
+pub use error::*;
+
+/// This is a verifying key which allows for the verification of proofs for a
+/// particular circuit.
+#[derive(Clone, Debug)]
+pub struct VerifyingKey {
+ /// Evaluation domain
+ domain: EvaluationDomain,
+ /// Commitments to fixed columns
+ fixed_commitments: Vec,
+ /// Permutation verifying key
+ permutation: permutation::VerifyingKey,
+ /// Constraint system
+ cs: ConstraintSystemBack,
+ /// Cached maximum degree of `cs` (which doesn't change after construction).
+ cs_degree: usize,
+ /// The representative of this `VerifyingKey` in transcripts.
+ transcript_repr: C::Scalar,
+}
+
+// Current version of the VK
+const VERSION: u8 = 0x04;
+
+impl VerifyingKey
+where
+ C::Scalar: SerdePrimeField + FromUniformBytes<64>,
+{
+ /// Writes a verifying key to a buffer.
+ ///
+ /// Writes a curve element according to `format`:
+ /// - `Processed`: Writes a compressed curve element with coordinates in standard form.
+ /// Writes a field element in standard form, with endianness specified by the
+ /// `PrimeField` implementation.
+ /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form
+ /// Writes a field element into raw bytes in its internal Montgomery representation,
+ /// WITHOUT performing the expensive Montgomery reduction.
+ pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> {
+ // Version byte that will be checked on read.
+ writer.write_all(&[VERSION])?;
+ let k = &self.domain.k();
+ assert!(*k <= C::Scalar::S);
+ // k value fits in 1 byte
+ writer.write_all(&[*k as u8])?;
+ writer.write_all(&(self.fixed_commitments.len() as u32).to_le_bytes())?;
+ for commitment in &self.fixed_commitments {
+ commitment.write(writer, format)?;
+ }
+ self.permutation.write(writer, format)?;
+
+ Ok(())
+ }
+
+ /// Reads a verification key from a buffer.
+ ///
+ /// Reads a curve element from the buffer and parses it according to the `format`:
+ /// - `Processed`: Reads a compressed curve element and decompresses it.
+ /// Reads a field element in standard form, with endianness specified by the
+ /// `PrimeField` implementation, and checks that the element is less than the modulus.
+ /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form.
+ /// Checks that field elements are less than modulus, and then checks that the point is on the curve.
+ /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form;
+ /// does not perform any checks
+ pub fn read(
+ reader: &mut R,
+ format: SerdeFormat,
+ cs: ConstraintSystemBack,
+ ) -> io::Result {
+ let mut version_byte = [0u8; 1];
+ reader.read_exact(&mut version_byte)?;
+ if VERSION != version_byte[0] {
+ return Err(io::Error::new(
+ io::ErrorKind::InvalidData,
+ "unexpected version byte",
+ ));
+ }
+
+ let mut k = [0u8; 1];
+ reader.read_exact(&mut k)?;
+ let k = u8::from_le_bytes(k);
+ if k as u32 > C::Scalar::S {
+ return Err(io::Error::new(
+ io::ErrorKind::InvalidData,
+ format!(
+ "circuit size value (k): {} exceeds maxium: {}",
+ k,
+ C::Scalar::S
+ ),
+ ));
+ }
+ let domain = keygen::create_domain::(&cs, k as u32);
+ let mut num_fixed_columns = [0u8; 4];
+ reader.read_exact(&mut num_fixed_columns)?;
+ let num_fixed_columns = u32::from_le_bytes(num_fixed_columns);
+
+ let fixed_commitments: Vec<_> = (0..num_fixed_columns)
+ .map(|_| C::read(reader, format))
+ .collect::>()?;
+
+ let permutation = permutation::VerifyingKey::read(reader, &cs.permutation, format)?;
+
+ Ok(Self::from_parts(domain, fixed_commitments, permutation, cs))
+ }
+
+ /// Writes a verifying key to a vector of bytes using [`Self::write`].
+ pub fn to_bytes(&self, format: SerdeFormat) -> Vec {
+ let mut bytes = Vec::::with_capacity(self.bytes_length(format));
+ Self::write(self, &mut bytes, format).expect("Writing to vector should not fail");
+ bytes
+ }
+
+ /// Reads a verification key from a slice of bytes using [`Self::read`].
+ pub fn from_bytes(
+ mut bytes: &[u8],
+ format: SerdeFormat,
+ cs: ConstraintSystemBack,
+ ) -> io::Result {
+ Self::read(&mut bytes, format, cs)
+ }
+}
+
+impl VerifyingKey {
+ fn bytes_length(&self, format: SerdeFormat) -> usize
+ where
+ C: SerdeCurveAffine,
+ {
+ 6 // bytes used for encoding VERSION(u8), "domain.k"(u8) & num_fixed_columns(u32)
+ + (self.fixed_commitments.len() * C::byte_length(format))
+ + self.permutation.bytes_length(format)
+ }
+
+ fn from_parts(
+ domain: EvaluationDomain,
+ fixed_commitments: Vec,
+ permutation: permutation::VerifyingKey,
+ cs: ConstraintSystemBack,
+ ) -> Self
+ where
+ C::ScalarExt: FromUniformBytes<64>,
+ {
+ // Compute cached values.
+ let cs_degree = cs.degree();
+
+ let mut vk = Self {
+ domain,
+ fixed_commitments,
+ permutation,
+ cs,
+ cs_degree,
+ // Temporary, this is not pinned.
+ transcript_repr: C::Scalar::ZERO,
+ };
+
+ let mut hasher = Blake2bParams::new()
+ .hash_length(64)
+ .personal(b"Halo2-Verify-Key")
+ .to_state();
+
+ let s = format!("{:?}", vk.pinned());
+
+ hasher.update(&(s.len() as u64).to_le_bytes());
+ hasher.update(s.as_bytes());
+
+ // Hash in final Blake2bState
+ vk.transcript_repr = C::Scalar::from_uniform_bytes(hasher.finalize().as_array());
+
+ vk
+ }
+
+ /// Hashes a verification key into a transcript.
+ pub fn hash_into, T: Transcript>(
+ &self,
+ transcript: &mut T,
+ ) -> io::Result<()> {
+ transcript.common_scalar(self.transcript_repr)?;
+
+ Ok(())
+ }
+
+ /// Obtains a pinned representation of this verification key that contains
+ /// the minimal information necessary to reconstruct the verification key.
+ pub fn pinned(&self) -> PinnedVerificationKey<'_, C> {
+ PinnedVerificationKey {
+ base_modulus: C::Base::MODULUS,
+ scalar_modulus: C::Scalar::MODULUS,
+ domain: self.domain.pinned(),
+ fixed_commitments: &self.fixed_commitments,
+ permutation: &self.permutation,
+ cs: self.cs.pinned(),
+ }
+ }
+
+ /// Returns commitments of fixed polynomials
+ pub fn fixed_commitments(&self) -> &Vec {
+ &self.fixed_commitments
+ }
+
+ /// Returns the permutation commitments
+ pub fn permutation(&self) -> &permutation::VerifyingKey {
+ &self.permutation
+ }
+
+ /// Returns `ConstraintSystem`
+ pub(crate) fn cs(&self) -> &ConstraintSystemBack {
+ &self.cs
+ }
+
+ /// Returns representative of this `VerifyingKey` in transcripts
+ pub fn transcript_repr(&self) -> C::Scalar {
+ self.transcript_repr
+ }
+}
+
+/// Minimal representation of a verification key that can be used to identify
+/// its active contents.
+#[allow(dead_code)]
+#[derive(Debug)]
+pub struct PinnedVerificationKey<'a, C: CurveAffine> {
+ base_modulus: &'static str,
+ scalar_modulus: &'static str,
+ domain: PinnedEvaluationDomain<'a, C::Scalar>,
+ cs: PinnedConstraintSystem<'a, C::Scalar>,
+ fixed_commitments: &'a Vec,
+ permutation: &'a permutation::VerifyingKey,
+}
+
+/// This is a proving key which allows for the creation of proofs for a
+/// particular circuit.
+#[derive(Clone, Debug)]
+pub struct ProvingKey {
+ vk: VerifyingKey,
+ l0: Polynomial,
+ l_last: Polynomial,
+ l_active_row: Polynomial,
+ fixed_values: Vec>,
+ fixed_polys: Vec>,
+ fixed_cosets: Vec>,
+ permutation: permutation::ProvingKey,
+ ev: Evaluator,
+}
+
+impl ProvingKey
+where
+ C::Scalar: FromUniformBytes<64>,
+{
+ /// Get the underlying [`VerifyingKey`].
+ pub fn get_vk(&self) -> &VerifyingKey {
+ &self.vk
+ }
+
+ /// Gets the total number of bytes in the serialization of `self`
+ fn bytes_length(&self, format: SerdeFormat) -> usize
+ where
+ C: SerdeCurveAffine,
+ {
+ let scalar_len = C::Scalar::default().to_repr().as_ref().len();
+ self.vk.bytes_length(format)
+ + 12 // bytes used for encoding the length(u32) of "l0", "l_last" & "l_active_row" polys
+ + scalar_len * (self.l0.len() + self.l_last.len() + self.l_active_row.len())
+ + polynomial_slice_byte_length(&self.fixed_values)
+ + polynomial_slice_byte_length(&self.fixed_polys)
+ + polynomial_slice_byte_length(&self.fixed_cosets)
+ + self.permutation.bytes_length()
+ }
+}
+
+impl ProvingKey
+where
+ C::Scalar: SerdePrimeField + FromUniformBytes<64>,
+{
+ /// Writes a proving key to a buffer.
+ ///
+ /// Writes a curve element according to `format`:
+ /// - `Processed`: Writes a compressed curve element with coordinates in standard form.
+ /// Writes a field element in standard form, with endianness specified by the
+ /// `PrimeField` implementation.
+ /// - Otherwise: Writes an uncompressed curve element with coordinates in Montgomery form
+ /// Writes a field element into raw bytes in its internal Montgomery representation,
+ /// WITHOUT performing the expensive Montgomery reduction.
+ /// Does so by first writing the verifying key and then serializing the rest of the data (in the form of field polynomials)
+ pub fn write(&self, writer: &mut W, format: SerdeFormat) -> io::Result<()> {
+ self.vk.write(writer, format)?;
+ self.l0.write(writer, format)?;
+ self.l_last.write(writer, format)?;
+ self.l_active_row.write(writer, format)?;
+ write_polynomial_slice(&self.fixed_values, writer, format)?;
+ write_polynomial_slice(&self.fixed_polys, writer, format)?;
+ write_polynomial_slice(&self.fixed_cosets, writer, format)?;
+ self.permutation.write(writer, format)?;
+ Ok(())
+ }
+
+ /// Reads a proving key from a buffer.
+ /// Does so by reading verification key first, and then deserializing the rest of the file into the remaining proving key data.
+ ///
+ /// Reads a curve element from the buffer and parses it according to the `format`:
+ /// - `Processed`: Reads a compressed curve element and decompresses it.
+ /// Reads a field element in standard form, with endianness specified by the
+ /// `PrimeField` implementation, and checks that the element is less than the modulus.
+ /// - `RawBytes`: Reads an uncompressed curve element with coordinates in Montgomery form.
+ /// Checks that field elements are less than modulus, and then checks that the point is on the curve.
+ /// - `RawBytesUnchecked`: Reads an uncompressed curve element with coordinates in Montgomery form;
+ /// does not perform any checks
+ pub fn read(
+ reader: &mut R,
+ format: SerdeFormat,
+ cs: ConstraintSystemBack,
+ ) -> io::Result {
+ let vk = VerifyingKey::::read::(reader, format, cs)?;
+ let l0 = Polynomial::read(reader, format)?;
+ let l_last = Polynomial::read(reader, format)?;
+ let l_active_row = Polynomial::read(reader, format)?;
+ let fixed_values = read_polynomial_vec(reader, format)?;
+ let fixed_polys = read_polynomial_vec(reader, format)?;
+ let fixed_cosets = read_polynomial_vec(reader, format)?;
+ let permutation = permutation::ProvingKey::read(reader, format)?;
+ let ev = Evaluator::new(vk.cs());
+ Ok(Self {
+ vk,
+ l0,
+ l_last,
+ l_active_row,
+ fixed_values,
+ fixed_polys,
+ fixed_cosets,
+ permutation,
+ ev,
+ })
+ }
+
+ /// Writes a proving key to a vector of bytes using [`Self::write`].
+ pub fn to_bytes(&self, format: SerdeFormat) -> Vec {
+ let mut bytes = Vec::::with_capacity(self.bytes_length(format));
+ Self::write(self, &mut bytes, format).expect("Writing to vector should not fail");
+ bytes
+ }
+
+ /// Reads a proving key from a slice of bytes using [`Self::read`].
+ pub fn from_bytes(
+ mut bytes: &[u8],
+ format: SerdeFormat,
+ cs: ConstraintSystemBack,
+ ) -> io::Result {
+ Self::read(&mut bytes, format, cs)
+ }
+}
+
+impl VerifyingKey {
+ /// Get the underlying [`EvaluationDomain`].
+ pub fn get_domain(&self) -> &EvaluationDomain {
+ &self.domain
+ }
+}
+
+#[derive(Clone, Copy, Debug)]
+pub(crate) struct Theta;
+pub(crate) type ChallengeTheta = ChallengeScalar;
+
+#[derive(Clone, Copy, Debug)]
+pub(crate) struct Beta;
+pub(crate) type ChallengeBeta = ChallengeScalar;
+
+#[derive(Clone, Copy, Debug)]
+pub(crate) struct Gamma;
+pub(crate) type ChallengeGamma = ChallengeScalar;
+
+#[derive(Clone, Copy, Debug)]
+pub(crate) struct Y;
+pub(crate) type ChallengeY = ChallengeScalar;
+
+#[derive(Clone, Copy, Debug)]
+pub(crate) struct X;
+pub(crate) type ChallengeX = ChallengeScalar;
diff --git a/halo2_backend/src/plonk/circuit.rs b/halo2_backend/src/plonk/circuit.rs
new file mode 100644
index 0000000000..ec4972e164
--- /dev/null
+++ b/halo2_backend/src/plonk/circuit.rs
@@ -0,0 +1,421 @@
+use group::ff::Field;
+use halo2_middleware::circuit::{Any, ChallengeMid, ColumnMid, Gate};
+use halo2_middleware::expression::{Expression, Variable};
+use halo2_middleware::poly::Rotation;
+use halo2_middleware::{lookup, permutation::ArgumentMid, shuffle};
+
+// TODO: Reuse ColumnMid inside this.
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub struct QueryBack {
+ /// Query index
+ pub(crate) index: usize,
+ /// Column index
+ pub(crate) column_index: usize,
+ /// The type of the column.
+ pub(crate) column_type: Any,
+ /// Rotation of this query
+ pub(crate) rotation: Rotation,
+}
+
+impl QueryBack {
+ /// Query index
+ pub fn index(&self) -> usize {
+ self.index
+ }
+
+ /// Column index
+ pub fn column_index(&self) -> usize {
+ self.column_index
+ }
+
+ /// The type of the column
+ pub fn column_type(&self) -> Any {
+ self.column_type
+ }
+
+ /// Rotation of this query
+ pub fn rotation(&self) -> Rotation {
+ self.rotation
+ }
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum VarBack {
+ /// This is a generic column query
+ Query(QueryBack),
+ /// This is a challenge
+ Challenge(ChallengeMid),
+}
+
+impl std::fmt::Display for VarBack {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{:?}", self)
+ }
+}
+
+impl Variable for VarBack {
+ fn degree(&self) -> usize {
+ match self {
+ VarBack::Query(_) => 1,
+ VarBack::Challenge(_) => 0,
+ }
+ }
+
+ fn complexity(&self) -> usize {
+ match self {
+ VarBack::Query(_) => 1,
+ VarBack::Challenge(_) => 0,
+ }
+ }
+
+ fn write_identifier(&self, writer: &mut W) -> std::io::Result<()> {
+ write!(writer, "{}", self)
+ }
+}
+
+pub type ExpressionBack = Expression;
+pub(crate) type GateBack = Gate;
+pub(crate) type LookupArgumentBack = lookup::Argument;
+pub(crate) type ShuffleArgumentBack = shuffle::Argument;
+pub type PermutationArgumentBack = ArgumentMid;
+
+/// This is a description of the circuit environment, such as the gate, column and permutation
+/// arrangements. This type is internal to the backend and will appear in the verifying key.
+#[derive(Debug, Clone)]
+pub struct ConstraintSystemBack {
+ pub(crate) num_fixed_columns: usize,
+ pub(crate) num_advice_columns: usize,
+ pub(crate) num_instance_columns: usize,
+ pub(crate) num_challenges: usize,
+
+ /// Contains the index of each advice column that is left unblinded.
+ pub(crate) unblinded_advice_columns: Vec,
+
+ /// Contains the phase for each advice column. Should have same length as num_advice_columns.
+ pub(crate) advice_column_phase: Vec,
+ /// Contains the phase for each challenge. Should have same length as num_challenges.
+ pub(crate) challenge_phase: Vec,
+
+ pub(crate) gates: Vec>,
+ pub advice_queries: Vec<(ColumnMid, Rotation)>,
+ // Contains an integer for each advice column
+ // identifying how many distinct queries it has
+ // so far; should be same length as num_advice_columns.
+ pub(crate) num_advice_queries: Vec,
+ pub(crate) instance_queries: Vec<(ColumnMid, Rotation)>,
+ pub fixed_queries: Vec<(ColumnMid, Rotation)>,
+
+ // Permutation argument for performing equality constraints
+ pub(crate) permutation: PermutationArgumentBack,
+
+ // Vector of lookup arguments, where each corresponds to a sequence of
+ // input expressions and a sequence of table expressions involved in the lookup.
+ pub(crate) lookups: Vec>,
+
+ // Vector of shuffle arguments, where each corresponds to a sequence of
+ // input expressions and a sequence of shuffle expressions involved in the shuffle.
+ pub(crate) shuffles: Vec>,
+
+ // The minimum degree required by the circuit, which can be set to a
+ // larger amount than actually needed. This can be used, for example, to
+ // force the permutation argument to involve more columns in the same set.
+ pub(crate) minimum_degree: Option,
+}
+
+impl ConstraintSystemBack {
+ /// Compute the degree of the constraint system (the maximum degree of all
+ /// constraints).
+ pub fn degree(&self) -> usize {
+ // The permutation argument will serve alongside the gates, so must be
+ // accounted for.
+ let mut degree = permutation_argument_required_degree();
+
+ // The lookup argument also serves alongside the gates and must be accounted
+ // for.
+ degree = std::cmp::max(
+ degree,
+ self.lookups
+ .iter()
+ .map(|l| lookup_argument_required_degree(l))
+ .max()
+ .unwrap_or(1),
+ );
+
+ // The lookup argument also serves alongside the gates and must be accounted
+ // for.
+ degree = std::cmp::max(
+ degree,
+ self.shuffles
+ .iter()
+ .map(|l| shuffle_argument_required_degree(l))
+ .max()
+ .unwrap_or(1),
+ );
+
+ // Account for each gate to ensure our quotient polynomial is the
+ // correct degree and that our extended domain is the right size.
+ degree = std::cmp::max(
+ degree,
+ self.gates
+ .iter()
+ .map(|gate| gate.poly.degree())
+ .max()
+ .unwrap_or(0),
+ );
+
+ std::cmp::max(degree, self.minimum_degree.unwrap_or(1))
+ }
+
+ /// Compute the number of blinding factors necessary to perfectly blind
+ /// each of the prover's witness polynomials.
+ pub fn blinding_factors(&self) -> usize {
+ // All of the prover's advice columns are evaluated at no more than
+ let factors = *self.num_advice_queries.iter().max().unwrap_or(&1);
+ // distinct points during gate checks.
+
+ // - The permutation argument witness polynomials are evaluated at most 3 times.
+ // - Each lookup argument has independent witness polynomials, and they are
+ // evaluated at most 2 times.
+ let factors = std::cmp::max(3, factors);
+
+ // Each polynomial is evaluated at most an additional time during
+ // multiopen (at x_3 to produce q_evals):
+ let factors = factors + 1;
+
+ // h(x) is derived by the other evaluations so it does not reveal
+ // anything; in fact it does not even appear in the proof.
+
+ // h(x_3) is also not revealed; the verifier only learns a single
+ // evaluation of a polynomial in x_1 which has h(x_3) and another random
+ // polynomial evaluated at x_3 as coefficients -- this random polynomial
+ // is "random_poly" in the vanishing argument.
+
+ // Add an additional blinding factor as a slight defense against
+ // off-by-one errors.
+ factors + 1
+ }
+
+ /// Returns the minimum necessary rows that need to exist in order to
+ /// account for e.g. blinding factors.
+ pub(crate) fn minimum_rows(&self) -> usize {
+ self.blinding_factors() // m blinding factors
+ + 1 // for l_{-(m + 1)} (l_last)
+ + 1 // for l_0 (just for extra breathing room for the permutation
+ // argument, to essentially force a separation in the
+ // permutation polynomial between the roles of l_last, l_0
+ // and the interstitial values.)
+ + 1 // for at least one row
+ }
+
+ pub(crate) fn get_any_query_index(&self, column: ColumnMid, at: Rotation) -> usize {
+ let queries = match column.column_type {
+ Any::Advice => &self.advice_queries,
+ Any::Fixed => &self.fixed_queries,
+ Any::Instance => &self.instance_queries,
+ };
+ for (index, instance_query) in queries.iter().enumerate() {
+ if instance_query == &(column, at) {
+ return index;
+ }
+ }
+ panic!("get_any_query_index called for non-existent query");
+ }
+
+ /// Returns the list of phases
+ pub fn phases(&self) -> impl Iterator
- {
+ let max_phase = self
+ .advice_column_phase
+ .iter()
+ .max()
+ .copied()
+ .unwrap_or_default();
+ 0..=max_phase
+ }
+
+ /// Number of fixed columns
+ pub fn num_fixed_columns(&self) -> usize {
+ self.num_fixed_columns
+ }
+ /// Number of advice columns
+ pub fn num_advice_columns(&self) -> usize {
+ self.num_advice_columns
+ }
+ /// Number of instance columns
+ pub fn num_instance_columns(&self) -> usize {
+ self.num_instance_columns
+ }
+ /// Return gates of the constraint system
+ pub fn gates(&self) -> &Vec> {
+ &self.gates
+ }
+ /// Returns the instance queries
+ pub fn instance_queries(&self) -> &Vec<(ColumnMid, Rotation)> {
+ &self.instance_queries
+ }
+ // Permutation argument for performing equality constraints
+ pub fn permutation(&self) -> &PermutationArgumentBack {
+ &self.permutation
+ }
+
+ // Vector of lookup arguments, where each corresponds to a sequence of
+ // input expressions and a sequence of table expressions involved in the lookup.
+ pub fn lookups(&self) -> &Vec> {
+ &self.lookups
+ }
+
+ /// Obtain a pinned version of this constraint system; a structure with the
+ /// minimal parameters needed to determine the rest of the constraint
+ /// system.
+ pub(crate) fn pinned(&self) -> PinnedConstraintSystem<'_, F> {
+ PinnedConstraintSystem {
+ num_fixed_columns: &self.num_fixed_columns,
+ num_advice_columns: &self.num_advice_columns,
+ num_instance_columns: &self.num_instance_columns,
+ num_challenges: &self.num_challenges,
+ advice_column_phase: &self.advice_column_phase,
+ challenge_phase: &self.challenge_phase,
+ gates: PinnedGates(&self.gates),
+ fixed_queries: &self.fixed_queries,
+ advice_queries: &self.advice_queries,
+ instance_queries: &self.instance_queries,
+ permutation: &self.permutation,
+ lookups: &self.lookups,
+ shuffles: &self.shuffles,
+ minimum_degree: &self.minimum_degree,
+ }
+ }
+}
+
+struct PinnedGates<'a, F: Field>(&'a Vec>);
+
+impl<'a, F: Field> std::fmt::Debug for PinnedGates<'a, F> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
+ f.debug_list()
+ .entries(self.0.iter().map(|gate| &gate.poly))
+ .finish()
+ }
+}
+
+/// Represents the minimal parameters that determine a `ConstraintSystem`.
+#[allow(dead_code)]
+#[derive(Debug)]
+pub(crate) struct PinnedConstraintSystem<'a, F: Field> {
+ num_fixed_columns: &'a usize,
+ num_advice_columns: &'a usize,
+ num_instance_columns: &'a usize,
+ num_challenges: &'a usize,
+ advice_column_phase: &'a Vec,
+ challenge_phase: &'a Vec,
+ gates: PinnedGates<'a, F>,
+ advice_queries: &'a Vec<(ColumnMid, Rotation)>,
+ instance_queries: &'a Vec<(ColumnMid, Rotation)>,
+ fixed_queries: &'a Vec<(ColumnMid, Rotation)>,
+ permutation: &'a PermutationArgumentBack,
+ lookups: &'a Vec>,
+ shuffles: &'a Vec>,
+ minimum_degree: &'a Option,
+}
+
+// Cost functions: arguments required degree
+
+/// Returns the minimum circuit degree required by the permutation argument.
+/// The argument may use larger degree gates depending on the actual
+/// circuit's degree and how many columns are involved in the permutation.
+fn permutation_argument_required_degree() -> usize {
+ // degree 2:
+ // l_0(X) * (1 - z(X)) = 0
+ //
+ // We will fit as many polynomials p_i(X) as possible
+ // into the required degree of the circuit, so the
+ // following will not affect the required degree of
+ // this middleware.
+ //
+ // (1 - (l_last(X) + l_blind(X))) * (
+ // z(\omega X) \prod (p(X) + \beta s_i(X) + \gamma)
+ // - z(X) \prod (p(X) + \delta^i \beta X + \gamma)
+ // )
+ //
+ // On the first sets of columns, except the first
+ // set, we will do
+ //
+ // l_0(X) * (z(X) - z'(\omega^(last) X)) = 0
+ //
+ // where z'(X) is the permutation for the previous set
+ // of columns.
+ //
+ // On the final set of columns, we will do
+ //
+ // degree 3:
+ // l_last(X) * (z'(X)^2 - z'(X)) = 0
+ //
+ // which will allow the last value to be zero to
+ // ensure the argument is perfectly complete.
+
+ // There are constraints of degree 3 regardless of the
+ // number of columns involved.
+ 3
+}
+
+fn lookup_argument_required_degree(arg: &lookup::Argument) -> usize {
+ assert_eq!(arg.input_expressions.len(), arg.table_expressions.len());
+
+ // The first value in the permutation poly should be one.
+ // degree 2:
+ // l_0(X) * (1 - z(X)) = 0
+ //
+ // The "last" value in the permutation poly should be a boolean, for
+ // completeness and soundness.
+ // degree 3:
+ // l_last(X) * (z(X)^2 - z(X)) = 0
+ //
+ // Enable the permutation argument for only the rows involved.
+ // degree (2 + input_degree + table_degree) or 4, whichever is larger:
+ // (1 - (l_last(X) + l_blind(X))) * (
+ // z(\omega X) (a'(X) + \beta) (s'(X) + \gamma)
+ // - z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma)
+ // ) = 0
+ //
+ // The first two values of a' and s' should be the same.
+ // degree 2:
+ // l_0(X) * (a'(X) - s'(X)) = 0
+ //
+ // Either the two values are the same, or the previous
+ // value of a' is the same as the current value.
+ // degree 3:
+ // (1 - (l_last(X) + l_blind(X))) * (a′(X) − s′(X))⋅(a′(X) − a′(\omega^{-1} X)) = 0
+ let mut input_degree = 1;
+ for expr in arg.input_expressions.iter() {
+ input_degree = std::cmp::max(input_degree, expr.degree());
+ }
+ let mut table_degree = 1;
+ for expr in arg.table_expressions.iter() {
+ table_degree = std::cmp::max(table_degree, expr.degree());
+ }
+
+ // In practice because input_degree and table_degree are initialized to
+ // one, the latter half of this max() invocation is at least 4 always,
+ // rendering this call pointless except to be explicit in case we change
+ // the initialization of input_degree/table_degree in the future.
+ std::cmp::max(
+ // (1 - (l_last + l_blind)) z(\omega X) (a'(X) + \beta) (s'(X) + \gamma)
+ 4,
+ // (1 - (l_last + l_blind)) z(X) (\theta^{m-1} a_0(X) + ... + a_{m-1}(X) + \beta) (\theta^{m-1} s_0(X) + ... + s_{m-1}(X) + \gamma)
+ 2 + input_degree + table_degree,
+ )
+}
+
+fn shuffle_argument_required_degree(arg: &shuffle::Argument) -> usize {
+ assert_eq!(arg.input_expressions.len(), arg.shuffle_expressions.len());
+
+ let mut input_degree = 1;
+ for expr in arg.input_expressions.iter() {
+ input_degree = std::cmp::max(input_degree, expr.degree());
+ }
+ let mut shuffle_degree = 1;
+ for expr in arg.shuffle_expressions.iter() {
+ shuffle_degree = std::cmp::max(shuffle_degree, expr.degree());
+ }
+
+ // (1 - (l_last + l_blind)) (z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma))
+ std::cmp::max(2 + shuffle_degree, 2 + input_degree)
+}
diff --git a/halo2_backend/src/plonk/error.rs b/halo2_backend/src/plonk/error.rs
new file mode 100644
index 0000000000..716e466d1b
--- /dev/null
+++ b/halo2_backend/src/plonk/error.rs
@@ -0,0 +1,76 @@
+use std::error;
+use std::fmt;
+use std::io;
+
+use halo2_middleware::circuit::ColumnMid;
+
+/// This is an error that could occur during proving.
+#[derive(Debug)]
+pub enum Error {
+ /// The provided instances do not match the circuit parameters.
+ InvalidInstances,
+ /// The constraint system is not satisfied.
+ ConstraintSystemFailure,
+ /// Out of bounds index passed to a backend
+ BoundsFailure,
+ /// Opening error
+ Opening,
+ /// Transcript error
+ Transcript(io::Error),
+ /// `k` is too small for the given circuit.
+ NotEnoughRowsAvailable {
+ /// The current value of `k` being used.
+ current_k: u32,
+ },
+ /// Instance provided exceeds number of available rows
+ InstanceTooLarge,
+ /// The instance sets up a copy constraint involving a column that has not been
+ /// included in the permutation.
+ ColumnNotInPermutation(ColumnMid),
+ /// Generic error not covered by previous cases
+ Other(String),
+}
+
+impl From for Error {
+ fn from(error: io::Error) -> Self {
+ // The only place we can get io::Error from is the transcript.
+ Error::Transcript(error)
+ }
+}
+
+impl Error {
+ /// Constructs an `Error::NotEnoughRowsAvailable`.
+ pub fn not_enough_rows_available(current_k: u32) -> Self {
+ Error::NotEnoughRowsAvailable { current_k }
+ }
+}
+
+impl fmt::Display for Error {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Error::InvalidInstances => write!(f, "Provided instances do not match the circuit"),
+ Error::ConstraintSystemFailure => write!(f, "The constraint system is not satisfied"),
+ Error::BoundsFailure => write!(f, "An out-of-bounds index was passed to the backend"),
+ Error::Opening => write!(f, "Multi-opening proof was invalid"),
+ Error::Transcript(e) => write!(f, "Transcript error: {e}"),
+ Error::NotEnoughRowsAvailable { current_k } => write!(
+ f,
+ "k = {current_k} is too small for the given circuit. Try using a larger value of k",
+ ),
+ Error::InstanceTooLarge => write!(f, "Instance vectors are larger than the circuit"),
+ Error::ColumnNotInPermutation(column) => {
+ write!(f, "Column {column:?} must be included in the permutation",)
+ }
+ Error::Other(error) => write!(f, "Other: {error}"),
+ }
+ }
+}
+
+impl error::Error for Error {
+ fn source(&self) -> Option<&(dyn error::Error + 'static)> {
+ match self {
+ Error::Transcript(e) => Some(e),
+ _ => None,
+ }
+ }
+}
diff --git a/halo2_proofs/src/plonk/evaluation.rs b/halo2_backend/src/plonk/evaluation.rs
similarity index 69%
rename from halo2_proofs/src/plonk/evaluation.rs
rename to halo2_backend/src/plonk/evaluation.rs
index 431c487c7e..09d8b452d3 100644
--- a/halo2_proofs/src/plonk/evaluation.rs
+++ b/halo2_backend/src/plonk/evaluation.rs
@@ -1,22 +1,31 @@
+//! This module:
+//! - Evaluates the h polynomial: Evaluator::new(ConstraintSystem).evaluate_h(...)
+//! - Evaluates an Expression using Lagrange basis
+
use crate::multicore;
-use crate::plonk::{lookup, permutation, Any, ProvingKey};
-use crate::poly::Basis;
+use crate::plonk::{
+ circuit::{ConstraintSystemBack, ExpressionBack, VarBack},
+ lookup, permutation, ProvingKey,
+};
+use crate::poly::{Basis, LagrangeBasis};
use crate::{
arithmetic::{parallelize, CurveAffine},
- poly::{Coeff, ExtendedLagrangeCoeff, Polynomial, Rotation},
+ poly::{Coeff, ExtendedLagrangeCoeff, Polynomial},
};
use group::ff::{Field, PrimeField, WithSmallOrderMulGroup};
+use halo2_middleware::circuit::Any;
+use halo2_middleware::poly::Rotation;
-use super::{shuffle, ConstraintSystem, Expression};
+use super::shuffle;
/// Return the index in the polynomial of size `isize` after rotation `rot`.
fn get_rotation_idx(idx: usize, rot: i32, rot_scale: i32, isize: i32) -> usize {
(((idx as i32) + (rot * rot_scale)).rem_euclid(isize)) as usize
}
-/// Value used in a calculation
+/// Value used in [`Calculation`]
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd)]
-pub enum ValueSource {
+enum ValueSource {
/// This is a constant value
Constant(usize),
/// This is an intermediate value
@@ -50,7 +59,7 @@ impl Default for ValueSource {
impl ValueSource {
/// Get the value for this source
#[allow(clippy::too_many_arguments)]
- pub fn get(
+ fn get(
&self,
rotations: &[usize],
constants: &[F],
@@ -89,7 +98,7 @@ impl ValueSource {
/// Calculation
#[derive(Clone, Debug, PartialEq, Eq)]
-pub enum Calculation {
+enum Calculation {
/// This is an addition
Add(ValueSource, ValueSource),
/// This is a subtraction
@@ -111,7 +120,7 @@ pub enum Calculation {
impl Calculation {
/// Get the resulting value of this calculation
#[allow(clippy::too_many_arguments)]
- pub fn evaluate(
+ fn evaluate(
&self,
rotations: &[usize],
constants: &[F],
@@ -164,59 +173,67 @@ impl Calculation {
/// Evaluator
#[derive(Clone, Default, Debug)]
-pub struct Evaluator {
+pub(crate) struct Evaluator {
/// Custom gates evalution
- pub custom_gates: GraphEvaluator,
+ custom_gates: GraphEvaluator,
/// Lookups evalution
- pub lookups: Vec>,
+ lookups: Vec>,
/// Shuffle evalution
- pub shuffles: Vec>,
+ shuffles: Vec>,
}
-/// GraphEvaluator
+/// The purpose of GraphEvaluator to is to collect a set of computations and compute them by making a graph of
+/// its internal operations to avoid repeating computations.
+///
+/// Computations can be added in two ways:
+///
+/// - using [`Self::add_expression`] where expressions are added and internally turned into a graph.
+/// A reference to the computation is returned in the form of [ `ValueSource::Intermediate`] reference
+/// index.
+/// - using [`Self::add_calculation`] where you can add only a single operation or a
+/// [Horner polynomial evaluation](https://en.wikipedia.org/wiki/Horner's_method) by using
+/// Calculation::Horner
+///
+/// Finally, call [`Self::evaluate`] to get the result of the last calculation added.
+///
#[derive(Clone, Debug)]
-pub struct GraphEvaluator {
+struct GraphEvaluator {
/// Constants
- pub constants: Vec,
+ constants: Vec,
/// Rotations
- pub rotations: Vec,
+ rotations: Vec,
/// Calculations
- pub calculations: Vec,
+ calculations: Vec,
/// Number of intermediates
- pub num_intermediates: usize,
+ num_intermediates: usize,
}
/// EvaluationData
#[derive(Default, Debug)]
-pub struct EvaluationData {
+struct EvaluationData {
/// Intermediates
- pub intermediates: Vec,
+ intermediates: Vec,
/// Rotations
- pub rotations: Vec,
+ rotations: Vec,
}
-/// CaluclationInfo
+/// CalculationInfo contains a calculation to perform and in [`Self::target`] the [`EvaluationData::intermediates`] where the value is going to be stored.
#[derive(Clone, Debug)]
-pub struct CalculationInfo {
+struct CalculationInfo {
/// Calculation
- pub calculation: Calculation,
+ calculation: Calculation,
/// Target
- pub target: usize,
+ target: usize,
}
impl Evaluator {
- /// Creates a new evaluation structure
- pub fn new(cs: &ConstraintSystem) -> Self {
+ /// Creates a new evaluation structure from a [`ConstraintSystemBack`]
+ pub fn new(cs: &ConstraintSystemBack) -> Self {
let mut ev = Evaluator::default();
- // Custom gates
let mut parts = Vec::new();
for gate in cs.gates.iter() {
- parts.extend(
- gate.polynomials()
- .iter()
- .map(|poly| ev.custom_gates.add_expression(poly)),
- );
+ parts.push(ev.custom_gates.add_expression(&gate.poly));
}
ev.custom_gates.add_calculation(Calculation::Horner(
ValueSource::PreviousValue(),
@@ -228,7 +245,7 @@ impl Evaluator {
for lookup in cs.lookups.iter() {
let mut graph = GraphEvaluator::default();
- let mut evaluate_lc = |expressions: &Vec>| {
+ let mut evaluate_lc = |expressions: &Vec>| {
let parts = expressions
.iter()
.map(|expr| graph.add_expression(expr))
@@ -260,7 +277,8 @@ impl Evaluator {
// Shuffles
for shuffle in cs.shuffles.iter() {
- let evaluate_lc = |expressions: &Vec>, graph: &mut GraphEvaluator| {
+ let evaluate_lc = |expressions: &Vec>,
+ graph: &mut GraphEvaluator| {
let parts = expressions
.iter()
.map(|expr| graph.add_expression(expr))
@@ -388,10 +406,18 @@ impl Evaluator {
let blinding_factors = pk.vk.cs.blinding_factors();
let last_rotation = Rotation(-((blinding_factors + 1) as i32));
let chunk_len = pk.vk.cs.degree() - 2;
- let delta_start = beta * &C::Scalar::ZETA;
+ let delta_start = beta * C::Scalar::ZETA;
+
+ let permutation_product_cosets: Vec<
+ Polynomial,
+ > = sets
+ .iter()
+ .map(|set| domain.coeff_to_extended(set.permutation_product_poly.clone()))
+ .collect();
- let first_set = sets.first().unwrap();
- let last_set = sets.last().unwrap();
+ let first_set_permutation_product_coset =
+ permutation_product_cosets.first().unwrap();
+ let last_set_permutation_product_coset = permutation_product_cosets.last().unwrap();
// Permutation constraints
parallelize(&mut values, |values, start| {
@@ -404,22 +430,21 @@ impl Evaluator {
// Enforce only for the first set.
// l_0(X) * (1 - z_0(X)) = 0
*value = *value * y
- + ((one - first_set.permutation_product_coset[idx]) * l0[idx]);
+ + ((one - first_set_permutation_product_coset[idx]) * l0[idx]);
// Enforce only for the last set.
// l_last(X) * (z_l(X)^2 - z_l(X)) = 0
*value = *value * y
- + ((last_set.permutation_product_coset[idx]
- * last_set.permutation_product_coset[idx]
- - last_set.permutation_product_coset[idx])
+ + ((last_set_permutation_product_coset[idx]
+ * last_set_permutation_product_coset[idx]
+ - last_set_permutation_product_coset[idx])
* l_last[idx]);
// Except for the first set, enforce.
// l_0(X) * (z_i(X) - z_{i-1}(\omega^(last) X)) = 0
- for (set_idx, set) in sets.iter().enumerate() {
+ for set_idx in 0..sets.len() {
if set_idx != 0 {
*value = *value * y
- + ((set.permutation_product_coset[idx]
- - permutation.sets[set_idx - 1].permutation_product_coset
- [r_last])
+ + ((permutation_product_cosets[set_idx][idx]
+ - permutation_product_cosets[set_idx - 1][r_last])
* l0[idx]);
}
}
@@ -429,29 +454,30 @@ impl Evaluator {
// - z_i(X) \prod_j (p(X) + \delta^j \beta X + \gamma)
// )
let mut current_delta = delta_start * beta_term;
- for ((set, columns), cosets) in sets
- .iter()
- .zip(p.columns.chunks(chunk_len))
- .zip(pk.permutation.cosets.chunks(chunk_len))
+ for ((permutation_product_coset, columns), cosets) in
+ permutation_product_cosets
+ .iter()
+ .zip(p.columns.chunks(chunk_len))
+ .zip(pk.permutation.cosets.chunks(chunk_len))
{
- let mut left = set.permutation_product_coset[r_next];
+ let mut left = permutation_product_coset[r_next];
for (values, permutation) in columns
.iter()
- .map(|&column| match column.column_type() {
- Any::Advice(_) => &advice[column.index()],
- Any::Fixed => &fixed[column.index()],
- Any::Instance => &instance[column.index()],
+ .map(|&column| match column.column_type {
+ Any::Advice => &advice[column.index],
+ Any::Fixed => &fixed[column.index],
+ Any::Instance => &instance[column.index],
})
.zip(cosets.iter())
{
left *= values[idx] + beta * permutation[idx] + gamma;
}
- let mut right = set.permutation_product_coset[idx];
- for values in columns.iter().map(|&column| match column.column_type() {
- Any::Advice(_) => &advice[column.index()],
- Any::Fixed => &fixed[column.index()],
- Any::Instance => &instance[column.index()],
+ let mut right = permutation_product_coset[idx];
+ for values in columns.iter().map(|&column| match column.column_type {
+ Any::Advice => &advice[column.index],
+ Any::Fixed => &fixed[column.index],
+ Any::Instance => &instance[column.index],
}) {
right *= values[idx] + current_delta + gamma;
current_delta *= &C::Scalar::DELTA;
@@ -670,36 +696,30 @@ impl GraphEvaluator {
}
/// Generates an optimized evaluation for the expression
- fn add_expression(&mut self, expr: &Expression) -> ValueSource {
+ fn add_expression(&mut self, expr: &ExpressionBack) -> ValueSource {
match expr {
- Expression::Constant(scalar) => self.add_constant(scalar),
- Expression::Selector(_selector) => unreachable!(),
- Expression::Fixed(query) => {
- let rot_idx = self.add_rotation(&query.rotation);
- self.add_calculation(Calculation::Store(ValueSource::Fixed(
- query.column_index,
- rot_idx,
- )))
- }
- Expression::Advice(query) => {
+ ExpressionBack::Constant(scalar) => self.add_constant(scalar),
+ ExpressionBack::Var(VarBack::Query(query)) => {
let rot_idx = self.add_rotation(&query.rotation);
- self.add_calculation(Calculation::Store(ValueSource::Advice(
- query.column_index,
- rot_idx,
- )))
- }
- Expression::Instance(query) => {
- let rot_idx = self.add_rotation(&query.rotation);
- self.add_calculation(Calculation::Store(ValueSource::Instance(
- query.column_index,
- rot_idx,
- )))
+ match query.column_type {
+ Any::Fixed => self.add_calculation(Calculation::Store(ValueSource::Fixed(
+ query.column_index,
+ rot_idx,
+ ))),
+ Any::Advice => self.add_calculation(Calculation::Store(ValueSource::Advice(
+ query.column_index,
+ rot_idx,
+ ))),
+ Any::Instance => self.add_calculation(Calculation::Store(
+ ValueSource::Instance(query.column_index, rot_idx),
+ )),
+ }
}
- Expression::Challenge(challenge) => self.add_calculation(Calculation::Store(
- ValueSource::Challenge(challenge.index()),
- )),
- Expression::Negated(a) => match **a {
- Expression::Constant(scalar) => self.add_constant(&-scalar),
+ ExpressionBack::Var(VarBack::Challenge(challenge)) => self.add_calculation(
+ Calculation::Store(ValueSource::Challenge(challenge.index())),
+ ),
+ ExpressionBack::Negated(a) => match **a {
+ ExpressionBack::Constant(scalar) => self.add_constant(&-scalar),
_ => {
let result_a = self.add_expression(a);
match result_a {
@@ -708,10 +728,10 @@ impl GraphEvaluator {
}
}
},
- Expression::Sum(a, b) => {
+ ExpressionBack::Sum(a, b) => {
// Undo subtraction stored as a + (-b) in expressions
match &**b {
- Expression::Negated(b_int) => {
+ ExpressionBack::Negated(b_int) => {
let result_a = self.add_expression(a);
let result_b = self.add_expression(b_int);
if result_a == ValueSource::Constant(0) {
@@ -737,7 +757,7 @@ impl GraphEvaluator {
}
}
}
- Expression::Product(a, b) => {
+ ExpressionBack::Product(a, b) => {
let result_a = self.add_expression(a);
let result_b = self.add_expression(b);
if result_a == ValueSource::Constant(0) || result_b == ValueSource::Constant(0) {
@@ -758,22 +778,11 @@ impl GraphEvaluator {
self.add_calculation(Calculation::Mul(result_b, result_a))
}
}
- Expression::Scaled(a, f) => {
- if *f == C::ScalarExt::ZERO {
- ValueSource::Constant(0)
- } else if *f == C::ScalarExt::ONE {
- self.add_expression(a)
- } else {
- let cst = self.add_constant(f);
- let result_a = self.add_expression(a);
- self.add_calculation(Calculation::Mul(result_a, cst))
- }
- }
}
}
/// Creates a new evaluation structure
- pub fn instance(&self) -> EvaluationData {
+ fn instance(&self) -> EvaluationData {
EvaluationData {
intermediates: vec![C::ScalarExt::ZERO; self.num_intermediates],
rotations: vec![0usize; self.rotations.len()],
@@ -781,7 +790,11 @@ impl GraphEvaluator {
}
#[allow(clippy::too_many_arguments)]
- pub fn evaluate(
+ /// Fills the EvaluationData
+ /// .intermediaries with the evaluation the calculation
+ /// .rotations with the indexes of the polinomials after rotations
+ /// returns the value of last evaluation done.
+ fn evaluate(
&self,
data: &mut EvaluationData,
fixed: &[Polynomial],
@@ -829,9 +842,9 @@ impl GraphEvaluator {
}
}
-/// Simple evaluation of an expression
-pub fn evaluate(
- expression: &Expression,
+/// Simple evaluation of an [`ExpressionBack`] over the provided lagrange polynomials
+pub(crate) fn evaluate(
+ expression: &ExpressionBack,
size: usize,
rot_scale: i32,
fixed: &[Polynomial],
@@ -846,26 +859,192 @@ pub fn evaluate(
let idx = start + i;
*value = expression.evaluate(
&|scalar| scalar,
- &|_| panic!("virtual selectors are removed during optimization"),
- &|query| {
- fixed[query.column_index]
- [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)]
- },
- &|query| {
- advice[query.column_index]
- [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)]
- },
- &|query| {
- instance[query.column_index]
- [get_rotation_idx(idx, query.rotation.0, rot_scale, isize)]
+ &|var| match var {
+ VarBack::Challenge(challenge) => challenges[challenge.index()],
+ VarBack::Query(query) => {
+ let rot_idx = get_rotation_idx(idx, query.rotation.0, rot_scale, isize);
+ match query.column_type {
+ Any::Fixed => fixed[query.column_index][rot_idx],
+ Any::Advice => advice[query.column_index][rot_idx],
+ Any::Instance => instance[query.column_index][rot_idx],
+ }
+ }
},
- &|challenge| challenges[challenge.index()],
&|a| -a,
- &|a, b| a + &b,
+ &|a, b| a + b,
&|a, b| a * b,
- &|a, scalar| a * scalar,
);
}
});
values
}
+
+#[cfg(test)]
+mod test {
+ use crate::plonk::circuit::{ExpressionBack, QueryBack, VarBack};
+ use crate::poly::LagrangeCoeff;
+ use halo2_middleware::circuit::{Any, ChallengeMid};
+ use halo2_middleware::poly::Rotation;
+ use halo2curves::pasta::pallas::{Affine, Scalar};
+
+ use super::*;
+
+ fn check(calc: Option, expr: Option>, expected: i64) {
+ let lagranges = |v: &[&[u64]]| -> Vec> {
+ v.iter()
+ .map(|vv| {
+ Polynomial::new_lagrange_from_vec(
+ vv.iter().map(|v| Scalar::from(*v)).collect::>(),
+ )
+ })
+ .collect()
+ };
+
+ let mut gv = GraphEvaluator::::default();
+ if let Some(expression) = expr {
+ gv.add_expression(&expression);
+ } else if let Some(calculation) = calc {
+ gv.add_rotation(&Rotation::cur());
+ gv.add_rotation(&Rotation::next());
+ gv.add_calculation(calculation);
+ } else {
+ unreachable!()
+ }
+
+ let mut evaluation_data = gv.instance();
+ let result = gv.evaluate(
+ &mut evaluation_data,
+ &lagranges(&[&[2, 3], &[1002, 1003]]), // fixed
+ &lagranges(&[&[4, 5], &[1004, 1005]]), // advice
+ &lagranges(&[&[6, 7], &[1006, 1007]]), // instance
+ &[8u64.into(), 9u64.into()], // challenges
+ &Scalar::from_raw([10, 0, 0, 0]), // beta
+ &Scalar::from_raw([11, 0, 0, 0]), // gamma
+ &Scalar::from_raw([12, 0, 0, 0]), // theta
+ &Scalar::from_raw([13, 0, 0, 0]), // y
+ &Scalar::from_raw([14, 0, 0, 0]), // previous value
+ 0, // idx
+ 1, // rot_scale
+ 32, // isize
+ );
+ let fq_expected = if expected < 0 {
+ -Scalar::from(-expected as u64)
+ } else {
+ Scalar::from(expected as u64)
+ };
+
+ assert_eq!(
+ result, fq_expected,
+ "Expected {} was {:?}",
+ expected, result
+ );
+ }
+ fn check_expr(expr: ExpressionBack, expected: i64) {
+ check(None, Some(expr), expected);
+ }
+ fn check_calc(calc: Calculation, expected: i64) {
+ check(Some(calc), None, expected);
+ }
+
+ #[test]
+ fn graphevaluator_values() {
+ use VarBack::*;
+ // Check values
+ for (col, rot, expected) in [(0, 0, 2), (0, 1, 3), (1, 0, 1002), (1, 1, 1003)] {
+ check_expr(
+ ExpressionBack::Var(Query(QueryBack {
+ index: 0,
+ column_index: col,
+ column_type: Any::Fixed,
+ rotation: Rotation(rot),
+ })),
+ expected,
+ );
+ }
+ for (col, rot, expected) in [(0, 0, 4), (0, 1, 5), (1, 0, 1004), (1, 1, 1005)] {
+ check_expr(
+ ExpressionBack::Var(Query(QueryBack {
+ index: 0,
+ column_index: col,
+ column_type: Any::Advice,
+ rotation: Rotation(rot),
+ })),
+ expected,
+ );
+ }
+ for (col, rot, expected) in [(0, 0, 6), (0, 1, 7), (1, 0, 1006), (1, 1, 1007)] {
+ check_expr(
+ ExpressionBack::Var(Query(QueryBack {
+ index: 0,
+ column_index: col,
+ column_type: Any::Instance,
+ rotation: Rotation(rot),
+ })),
+ expected,
+ );
+ }
+ for (ch, expected) in [(0, 8), (1, 9)] {
+ check_expr(
+ ExpressionBack::Var(Challenge(ChallengeMid {
+ index: ch,
+ phase: 0,
+ })),
+ expected,
+ );
+ }
+
+ check_calc(Calculation::Store(ValueSource::Beta()), 10);
+ check_calc(Calculation::Store(ValueSource::Gamma()), 11);
+ check_calc(Calculation::Store(ValueSource::Theta()), 12);
+ check_calc(Calculation::Store(ValueSource::Y()), 13);
+ check_calc(Calculation::Store(ValueSource::PreviousValue()), 14);
+ }
+
+ #[test]
+ fn graphevaluator_expr_operations() {
+ use VarBack::*;
+ // Check expression operations
+ let two = || {
+ Box::new(ExpressionBack::::Var(Query(QueryBack {
+ index: 0,
+ column_index: 0,
+ column_type: Any::Fixed,
+ rotation: Rotation(0),
+ })))
+ };
+
+ let three = || {
+ Box::new(ExpressionBack::::Var(Query(QueryBack {
+ index: 0,
+ column_index: 0,
+ column_type: Any::Fixed,
+ rotation: Rotation(1),
+ })))
+ };
+
+ check_expr(ExpressionBack::Sum(two(), three()), 5);
+ check_expr(ExpressionBack::Product(two(), three()), 6);
+ check_expr(
+ ExpressionBack::Sum(ExpressionBack::Negated(two()).into(), three()),
+ 1,
+ );
+ }
+
+ #[test]
+ fn graphevaluator_calc_operations() {
+ // Check calculation operations
+ let two = || ValueSource::Fixed(0, 0);
+ let three = || ValueSource::Fixed(0, 1);
+
+ check_calc(Calculation::Add(two(), three()), 5);
+ check_calc(Calculation::Double(two()), 4);
+ check_calc(Calculation::Mul(two(), three()), 6);
+ check_calc(Calculation::Square(three()), 9);
+ check_calc(Calculation::Negate(two()), -2);
+ check_calc(Calculation::Sub(three(), two()), 1);
+ check_calc(
+ Calculation::Horner(two(), vec![three(), two()], three()),
+ 2 + 3 * 3 + 2 * 9,
+ );
+ }
+}
diff --git a/halo2_backend/src/plonk/keygen.rs b/halo2_backend/src/plonk/keygen.rs
new file mode 100644
index 0000000000..636e3ee4b0
--- /dev/null
+++ b/halo2_backend/src/plonk/keygen.rs
@@ -0,0 +1,394 @@
+//! This module
+//! - creates the proving and verifying keys for a circuit
+//! - crates a domain, constraint system, and configuration for a circuit
+
+#![allow(clippy::int_plus_one)]
+
+use group::Curve;
+use halo2_middleware::ff::{Field, FromUniformBytes};
+use halo2_middleware::zal::impls::H2cEngine;
+
+use super::{evaluation::Evaluator, permutation, Polynomial, ProvingKey, VerifyingKey};
+use crate::{
+ arithmetic::{parallelize, CurveAffine},
+ plonk::circuit::{
+ ConstraintSystemBack, ExpressionBack, GateBack, LookupArgumentBack, QueryBack,
+ ShuffleArgumentBack, VarBack,
+ },
+ plonk::Error,
+ poly::{
+ commitment::{Blind, Params},
+ EvaluationDomain,
+ },
+};
+use halo2_middleware::circuit::{
+ Any, ColumnMid, CompiledCircuit, ConstraintSystemMid, ExpressionMid, VarMid,
+};
+use halo2_middleware::multicore::ParallelIterator;
+use halo2_middleware::{lookup, poly::Rotation, shuffle};
+use rayon::iter::IntoParallelRefIterator;
+use std::collections::HashMap;
+
+/// Creates a domain, constraint system, and configuration for a circuit.
+pub(crate) fn create_domain(
+ cs: &ConstraintSystemBack,
+ k: u32,
+) -> EvaluationDomain
+where
+ C: CurveAffine,
+{
+ let degree = cs.degree();
+ EvaluationDomain::new(degree as u32, k)
+}
+
+/// Generate a `VerifyingKey` from an instance of `CompiledCircuit`.
+pub fn keygen_vk(
+ params: &P,
+ circuit: &CompiledCircuit,
+) -> Result, Error>
+where
+ C: CurveAffine,
+ P: Params,
+ C::Scalar: FromUniformBytes<64>,
+{
+ let cs_mid = &circuit.cs;
+ let cs: ConstraintSystemBack = cs_mid.clone().into();
+ let domain = EvaluationDomain::new(cs.degree() as u32, params.k());
+
+ if (params.n() as usize) < cs.minimum_rows() {
+ return Err(Error::not_enough_rows_available(params.k()));
+ }
+
+ let permutation_vk = permutation::keygen::Assembly::new_from_assembly_mid(
+ params.n() as usize,
+ &cs_mid.permutation,
+ &circuit.preprocessing.permutation,
+ )?
+ .build_vk(params, &domain, &cs.permutation);
+
+ let fixed_commitments = {
+ let fixed_commitments_projective: Vec = circuit
+ .preprocessing
+ .fixed
+ .iter()
+ .map(|poly| {
+ params.commit_lagrange(
+ &H2cEngine::new(),
+ &Polynomial::new_lagrange_from_vec(poly.clone()),
+ Blind::default(),
+ )
+ })
+ .collect();
+ let mut fixed_commitments = vec![C::identity(); fixed_commitments_projective.len()];
+ C::CurveExt::batch_normalize(&fixed_commitments_projective, &mut fixed_commitments);
+ fixed_commitments
+ };
+
+ Ok(VerifyingKey::from_parts(
+ domain,
+ fixed_commitments,
+ permutation_vk,
+ cs,
+ ))
+}
+
+/// Generate a `ProvingKey` from a `VerifyingKey` and an instance of `CompiledCircuit`.
+pub fn keygen_pk