Skip to content

Commit

Permalink
Compatible transcripts between dalek and arkworks!!
Browse files Browse the repository at this point in the history
  • Loading branch information
mmaker committed Jan 26, 2024
1 parent 7d4fcf3 commit a6ff0d0
Show file tree
Hide file tree
Showing 8 changed files with 116 additions and 41 deletions.
12 changes: 7 additions & 5 deletions examples/schnorr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ where
IOPattern<H>: GroupIOPattern<G>,
{
fn add_schnorr_io(self) -> Self {
self.add_points(1, "P")
.add_points(1, "X")
self.add_points(1, "generator (P)")
.add_points(1, "public key (X)")
.ratchet()
.add_points(1, "commitment (K)")
.challenge_scalars(1, "challenge (c)")
Expand All @@ -41,11 +41,13 @@ fn keygen<G: CurveGroup>() -> (G::ScalarField, G) {

/// The prove algorithm takes as input
/// - the prover state `Arthur`, that has access to a random oracle `H` and can absorb/squeeze elements from the group `G`.
/// - The generator `P` in the group.
/// - the secret key $x \in \mathbb{Z}_p$
/// It returns a zero-knowledge proof of knowledge of `x` as a sequence of bytes.
#[allow(non_snake_case)]
fn prove<H, G>(
// the hash function `H` works over bytes, unless otherwise denoted with an additional type argument implementing `nimue::Unit`.
// the hash function `H` works over bytes.
// Algebraic hashes over a particular domain can be denoted with an additional type argument implementing `nimue::Unit`.
arthur: &mut Arthur<H>,
// the generator
P: G,
Expand All @@ -55,7 +57,7 @@ fn prove<H, G>(
where
H: DuplexHash,
G: CurveGroup,
Arthur<H>: FieldChallenges<G::ScalarField>,
Arthur<H>: GroupWriter<G>,
{
// `Arthur` types implement a cryptographically-secure random number generator that is tied to the protocol transcript
// and that can be accessed via the `rng()` funciton.
Expand Down Expand Up @@ -109,7 +111,7 @@ where
if P * r == K + X * c {
Ok(())
} else {
Err(nimue::ProofError::InvalidProof)
Err(ProofError::InvalidProof)
}

// from here, another proof can be verified using the same merlin instance
Expand Down
4 changes: 2 additions & 2 deletions src/hash/legacy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ impl<D: BlockSizeUser + Digest + Clone + Reset> DigestBridge<D> {
let mut squeeze_hasher = D::new();
Digest::update(&mut squeeze_hasher, &Self::mask_squeeze_end());
Digest::update(&mut squeeze_hasher, &self.cv);
Digest::update(&mut squeeze_hasher, &byte_count.to_be_bytes());
Digest::update(&mut squeeze_hasher, byte_count.to_be_bytes());
self.cv = Digest::finalize(squeeze_hasher);

// set the sponge state in absorb mode
Expand Down Expand Up @@ -171,7 +171,7 @@ impl<D: BlockSizeUser + Digest + Clone + FixedOutputReset> DuplexHash<u8> for Di
} else if let Mode::Squeeze(i) = self.mode {
// Add the squeeze mask, current digest, and index
let mut output_hasher_prefix = self.hasher.clone();
Digest::update(&mut output_hasher_prefix, &i.to_be_bytes());
Digest::update(&mut output_hasher_prefix, i.to_be_bytes());
let digest = output_hasher_prefix.finalize();
// Copy the digest into the output, and store the rest for later
let chunk_len = usize::min(output.len(), Self::DIGEST_SIZE);
Expand Down
1 change: 1 addition & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@ mod iopattern;
/// Verifier state and transcript deserialization.
mod merlin;
/// APIs for common zkp libraries.
#[cfg(any(feature = "ark", feature = "group"))]
pub mod plugins;
/// SAFE API.
mod safe;
Expand Down
15 changes: 5 additions & 10 deletions src/plugins/group/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,13 @@ use super::{FieldChallenges, FieldPublic};
use crate::plugins::bytes_uniform_modp;

fn from_bytes_mod_order<F: PrimeField>(bytes: &[u8]) -> F {
let two = F::ONE + F::ONE;
let basis = two.pow(&[64]);
let mut iterator = bytes.chunks_exact(8);
let mut acc = F::ZERO;
let basis = F::from(256);
let bytes = bytes.to_vec();

while let Some(chunk) = iterator.next() {
let chunk = u64::from_be_bytes(chunk.try_into().unwrap());
acc = acc * basis + F::from(chunk);
let mut acc = F::ZERO;
for byte in bytes {
acc = acc * basis + F::from(byte as u64);
}
let reminder = iterator.remainder();
let reminder = u64::from_be_bytes(reminder.try_into().unwrap());
acc = acc * basis + F::from(reminder);

acc
}
Expand Down
44 changes: 39 additions & 5 deletions src/plugins/group/writer.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,48 @@
use crate::DuplexHash;
use group::ff::PrimeField;
use group::{ff::PrimeField, Group, GroupEncoding};
use rand::{CryptoRng, RngCore};

use super::{FieldPublic, FieldWriter};
use crate::{Arthur, ProofResult};
use super::{FieldPublic, FieldWriter, GroupPublic, GroupWriter};
use crate::{Arthur, ByteTranscriptWriter, DuplexHash, ProofResult};

impl<F: PrimeField, H: DuplexHash, R: RngCore + CryptoRng> FieldWriter<F> for Arthur<H, R> {
impl<F, H, R> FieldWriter<F> for Arthur<H, R>
where
F: PrimeField,
H: DuplexHash,
R: RngCore + CryptoRng,
{
fn add_scalars(&mut self, input: &[F]) -> ProofResult<()> {
let serialized = self.public_scalars(input);
self.transcript.extend(serialized?);
Ok(())
}
}

impl<G, H, R, const N: usize> GroupPublic<G> for Arthur<H, R>
where
G: Group + GroupEncoding<Repr = [u8; N]>,
H: DuplexHash,
R: RngCore + CryptoRng,
{
type Repr = Vec<u8>;
fn public_points(&mut self, input: &[G]) -> crate::ProofResult<Self::Repr> {
let mut buf = Vec::new();
for p in input.iter() {
buf.extend_from_slice(&<G as GroupEncoding>::to_bytes(p));
}
self.add_bytes(&buf)?;
Ok(buf)
}
}

impl<G, H, R, const N: usize> GroupWriter<G> for Arthur<H, R>
where
G: Group + GroupEncoding<Repr = [u8; N]>,
H: DuplexHash,
R: RngCore + CryptoRng,
{
fn add_points(&mut self, input: &[G]) -> crate::ProofResult<()> {
let serialized = self.public_points(input);
self.transcript.extend(serialized?);
Ok(())
}
}
66 changes: 54 additions & 12 deletions src/plugins/tests.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
use ark_ec::PrimeGroup;
use ark_serialize::CanonicalSerialize;
use group::{Group, GroupEncoding};

use crate::hash::Keccak;
use crate::plugins;
use crate::{plugins, ByteIOPattern};
use crate::{ByteTranscript, DuplexHash, IOPattern};

fn group_iopattern<G, H>() -> IOPattern<H>
Expand All @@ -11,7 +15,10 @@ where
use plugins::group::{FieldIOPattern, GroupIOPattern};

IOPattern::new("github.com/mmaker/nimue")
.add_scalars(1, "com")
.challenge_bytes(16, "chal")
.add_points(1, "com")
.challenge_bytes(16, "chal")
.challenge_scalars(1, "chal")
}

Expand All @@ -24,12 +31,14 @@ where
use plugins::ark::{FieldIOPattern, GroupIOPattern};

IOPattern::new("github.com/mmaker/nimue")
.add_scalars(1, "com")
.challenge_bytes(16, "chal")
.add_points(1, "com")
.challenge_bytes(16, "chal")
.challenge_scalars(1, "chal")
}

/// Compatibility betweek arkworks and dalek can only be tested when handling scalars.
/// In fact, arkworks does not yet implement ristretto points as per `curve25519_dalek::ristretto::Ristretto`
// Check that the transcripts generated using the Group trait can be compatible with transcripts generated using dalek.
#[test]
fn test_compatible_ark_dalek() {
type ArkG = ark_curve25519::EdwardsProjective;
Expand All @@ -39,23 +48,56 @@ fn test_compatible_ark_dalek() {
type GroupF = curve25519_dalek::scalar::Scalar;
let ark_scalar = ArkF::from(0x42);
let dalek_scalar = GroupF::from(0x42u64);
// ***IMPORTANT***
// Looks like dalek and arkworks use different generator points.
let ark_generator = ArkG::generator();
let dalek_generator = -GroupG::generator();

// **basic checks**
// Check point encoding is the same in both libraries.
let mut ark_generator_bytes = Vec::new();
ark_generator.serialize_compressed(&mut ark_generator_bytes).unwrap();
let dalek_generator_bytes = <GroupG as GroupEncoding>::to_bytes(&dalek_generator);
assert_eq!(&ark_generator_bytes, &dalek_generator_bytes);
// Check scalar encoding is the same in both libraries.
let mut ark_scalar_bytes = Vec::new();
ark_scalar.serialize_compressed(&mut ark_scalar_bytes).unwrap();
let dalek_scalar_bytes = dalek_scalar.to_bytes();
assert_eq!(&ark_scalar_bytes, &dalek_scalar_bytes);

let ark_point = ark_generator * ark_scalar;
let dalek_point = dalek_generator * dalek_scalar;

let ark_io = ark_iopattern::<ArkG, Keccak>();
let dalek_io = group_iopattern::<GroupG, Keccak>();
let mut ark_chal = [0u8; 16];
let mut dalek_chal = [0u8; 16];

// Check that the IO Patterns are the same.
let mut ark_prover = ark_io.to_arthur();
let mut dalek_prover = dalek_io.to_arthur();
assert_eq!(ark_io.as_bytes(), dalek_io.as_bytes());

let mut ark_challenges = [0u8; 16];
let mut ark_prover = ark_io.to_arthur();
// Check that scalars absorption leads to the same transcript.
plugins::ark::FieldWriter::add_scalars(&mut ark_prover, &[ark_scalar]).unwrap();
ark_prover
.fill_challenge_bytes(&mut ark_challenges)
.unwrap();

let mut dalek_chal = [0u8; 16];
let mut dalek_prover = dalek_io.to_arthur();
ark_prover.fill_challenge_bytes(&mut ark_chal).unwrap();
plugins::group::FieldWriter::add_scalars(&mut dalek_prover, &[dalek_scalar]).unwrap();
dalek_prover.fill_challenge_bytes(&mut dalek_chal).unwrap();
assert_eq!(ark_chal, dalek_chal);

// Check that points absorption leads to the same transcript.
plugins::ark::GroupWriter::add_points(&mut ark_prover, &[ark_point]).unwrap();
ark_prover.fill_challenge_bytes(&mut ark_chal).unwrap();
plugins::group::GroupWriter::add_points(&mut dalek_prover, &[dalek_point]).unwrap();
dalek_prover.fill_challenge_bytes(&mut dalek_chal).unwrap();
assert_eq!(ark_chal, dalek_chal);

// Check that scalars challenges are interpreted in the same way from bytes.
let [ark_chal_scalar]: [ArkF; 1] = plugins::ark::FieldChallenges::challenge_scalars(&mut ark_prover).unwrap();
let [dalek_chal_scalar]: [GroupF; 1] = plugins::group::FieldChallenges::challenge_scalars(&mut dalek_prover).unwrap();
let mut ark_scalar_bytes = Vec::new();
ark_chal_scalar.serialize_compressed(&mut ark_scalar_bytes).unwrap();
let dalek_scalar_bytes = dalek_chal_scalar.to_bytes();
assert_eq!(&ark_scalar_bytes, &dalek_scalar_bytes);

assert_eq!(ark_challenges, dalek_chal);
}
10 changes: 5 additions & 5 deletions src/plugins/traits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ macro_rules! field_traits {
}

pub trait FieldChallenges<F: $Field> {
fn fill_challenge_scalars(&mut self, output: &mut [F]) -> crate::ProofResult<()>;
fn fill_challenge_scalars(&mut self, output: &mut [F]) -> $crate::ProofResult<()>;

fn challenge_scalars<const N: usize>(&mut self) -> crate::ProofResult<[F; N]> {
let mut output = [F::default(); N];
Expand Down Expand Up @@ -44,21 +44,21 @@ macro_rules! group_traits {
}

pub trait GroupWriter<G: $Group>: FieldWriter<$ScalarField> {
fn add_points(&mut self, input: &[G]) -> crate::ProofResult<()>;
fn add_points(&mut self, input: &[G]) -> $crate::ProofResult<()>;
}

pub trait GroupReader<G: $Group + Default>: FieldReader<$ScalarField> {
fn fill_next_points(&mut self, output: &mut [G]) -> crate::ProofResult<()>;
fn fill_next_points(&mut self, output: &mut [G]) -> $crate::ProofResult<()>;

fn next_points<const N: usize>(&mut self) -> crate::ProofResult<[G; N]> {
fn next_points<const N: usize>(&mut self) -> $crate::ProofResult<[G; N]> {
let mut output = [G::default(); N];
self.fill_next_points(&mut output).map(|()| output)
}
}

pub trait GroupPublic<G: $Group> {
type Repr;
fn public_points(&mut self, input: &[G]) -> crate::ProofResult<Self::Repr>;
fn public_points(&mut self, input: &[G]) -> $crate::ProofResult<Self::Repr>;
}
};
}
Expand Down
5 changes: 3 additions & 2 deletions src/safe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -103,9 +103,10 @@ impl<U: Unit, H: DuplexHash<U>> Safe<H, U> {
Some(op) => {
self.stack.clear();
Err(format!(
"Invalid tag. Got {:?}, expected {:?}",
"Invalid tag. Got {:?}, expected {:?}. The stack remaining is: {:?}",
Op::Squeeze(output.len()),
op
op,
self.stack
)
.into())
}
Expand Down

0 comments on commit a6ff0d0

Please sign in to comment.