From 08bac73b78f602f4bc46ef4eb508c0870124094b Mon Sep 17 00:00:00 2001
From: Piotr Roslaniec
Date: Thu, 24 Nov 2022 13:43:51 +0100
Subject: [PATCH 01/39] initial work on simple threshold decryption
---
tpke-wasm/benches/benchmarks.rs | 8 ++++++--
tpke-wasm/src/lib.rs | 2 +-
tpke/src/combine.rs | 36 +++++++++++++++++++++++++++++++++
tpke/src/decryption.rs | 3 +++
4 files changed, 46 insertions(+), 3 deletions(-)
diff --git a/tpke-wasm/benches/benchmarks.rs b/tpke-wasm/benches/benchmarks.rs
index 470db5d6..8054ade7 100644
--- a/tpke-wasm/benches/benchmarks.rs
+++ b/tpke-wasm/benches/benchmarks.rs
@@ -3,7 +3,9 @@ use criterion::{black_box, criterion_group, criterion_main, Criterion};
pub fn bench_encrypt_combine(c: &mut Criterion) {
use tpke_wasm::*;
- fn bench_encrypt(num_shares: usize, threshold: usize) -> impl Fn() {
+ fn bench_encrypt(
+ num_shares: usize,
+ threshold: usize) -> impl Fn() {
let message = "my-secret-message".as_bytes().to_vec();
let aad = "my-aad".as_bytes().to_vec();
let setup = Setup::new(threshold, num_shares);
@@ -14,7 +16,9 @@ pub fn bench_encrypt_combine(c: &mut Criterion) {
}
}
- fn bench_combine(num_shares: usize, threshold: usize) -> impl Fn() {
+ fn bench_combine(
+ num_shares: usize,
+ threshold: usize) -> impl Fn() {
let message = "my-secret-message".as_bytes().to_vec();
let aad = "my-aad".as_bytes().to_vec();
let setup = Setup::new(threshold, num_shares);
diff --git a/tpke-wasm/src/lib.rs b/tpke-wasm/src/lib.rs
index 9f926ffe..bc187327 100644
--- a/tpke-wasm/src/lib.rs
+++ b/tpke-wasm/src/lib.rs
@@ -170,7 +170,7 @@ impl Setup {
let mut rng = rand::thread_rng();
let (public_key, private_key, contexts) =
- tpke::setup_fast::(threshold, shares_num, &mut rng);
+ tpke::setup_fast::(threshold, shares_num, &mut rng);
let private_contexts = contexts
.clone()
.into_iter()
diff --git a/tpke/src/combine.rs b/tpke/src/combine.rs
index bc0b655c..e8ea970b 100644
--- a/tpke/src/combine.rs
+++ b/tpke/src/combine.rs
@@ -71,6 +71,42 @@ pub fn lagrange_basis_at(
lagrange_coeffs
}
+pub fn prepare_combine_simple(
+ public_contexts: &[PublicDecryptionContext],
+ private_contexts: &[PrivateDecryptionContextSimple],
+) -> Vec {
+ let mut lagrange_coeffs = vec![];
+
+ let shares = private_contexts
+ .iter()
+ .map(|priv_ctxt| {
+ let pub_ctxt =
+ &priv_ctxt.public_decryption_contexts[priv_ctxt.index];
+ let x = pub_ctxt.domain[0]; // there's just one
+ // let y = context.private_key_share.private_key_shares[0]; // there's just one
+ // y = private_key_shares * b_inv
+ // why use b_inv here and not h^{-1}? revise this
+ // let y = pub_ctxt.blinded_key_shares.blinded_key_shares[0]
+ // .mul(priv_ctxt.b_inv);
+ // TODO: No idea why this works
+ let y = E::Fr::one();
+ (x, y)
+ })
+ .collect::>();
+
+ for (x_j, _) in shares.clone() {
+ let mut prod = E::Fr::one();
+ for (x_m, _) in shares.clone() {
+ if x_j != x_m {
+ // x_i = 0
+ prod *= (x_m) / (x_m - x_j);
+ }
+ }
+ lagrange_coeffs.push(prod);
+ }
+ lagrange_coeffs
+}
+
pub fn share_combine_fast(
shares: &[DecryptionShareFast],
prepared_key_shares: &[E::G2Prepared],
diff --git a/tpke/src/decryption.rs b/tpke/src/decryption.rs
index b00f7379..06a2ce32 100644
--- a/tpke/src/decryption.rs
+++ b/tpke/src/decryption.rs
@@ -3,6 +3,9 @@
use crate::*;
+use ark_ec::ProjectiveCurve;
+
+
#[derive(Debug, Clone)]
pub struct DecryptionShareFast {
pub decrypter_index: usize,
From 719b4a1aa1d7775413c3a80500ecc87cc83d64ea Mon Sep 17 00:00:00 2001
From: Piotr Roslaniec
Date: Wed, 21 Dec 2022 11:05:42 +0100
Subject: [PATCH 02/39] calculate lagrange using private context
---
tpke/src/combine.rs | 24 +++++++-----------------
1 file changed, 7 insertions(+), 17 deletions(-)
diff --git a/tpke/src/combine.rs b/tpke/src/combine.rs
index e8ea970b..81347a24 100644
--- a/tpke/src/combine.rs
+++ b/tpke/src/combine.rs
@@ -72,31 +72,21 @@ pub fn lagrange_basis_at(
}
pub fn prepare_combine_simple(
- public_contexts: &[PublicDecryptionContext],
private_contexts: &[PrivateDecryptionContextSimple],
) -> Vec {
let mut lagrange_coeffs = vec![];
- let shares = private_contexts
+ let shares_x = private_contexts
.iter()
- .map(|priv_ctxt| {
- let pub_ctxt =
- &priv_ctxt.public_decryption_contexts[priv_ctxt.index];
- let x = pub_ctxt.domain[0]; // there's just one
- // let y = context.private_key_share.private_key_shares[0]; // there's just one
- // y = private_key_shares * b_inv
- // why use b_inv here and not h^{-1}? revise this
- // let y = pub_ctxt.blinded_key_shares.blinded_key_shares[0]
- // .mul(priv_ctxt.b_inv);
- // TODO: No idea why this works
- let y = E::Fr::one();
- (x, y)
- })
+ .map(|ctxt|
+ // There's just one x in the domain:
+ ctxt.public_decryption_contexts[ctxt.index].domain[0]
+ )
.collect::>();
- for (x_j, _) in shares.clone() {
+ for x_j in shares_x.clone() {
let mut prod = E::Fr::one();
- for (x_m, _) in shares.clone() {
+ for x_m in shares_x.clone() {
if x_j != x_m {
// x_i = 0
prod *= (x_m) / (x_m - x_j);
From 17cdb5b6850c54815f3629fa4e441e0caa815f7e Mon Sep 17 00:00:00 2001
From: Piotr Roslaniec
Date: Wed, 21 Dec 2022 11:57:16 +0100
Subject: [PATCH 03/39] calculate lagrange using public context
---
tpke/src/combine.rs | 21 +++++++--------------
1 file changed, 7 insertions(+), 14 deletions(-)
diff --git a/tpke/src/combine.rs b/tpke/src/combine.rs
index 81347a24..a8fd7875 100644
--- a/tpke/src/combine.rs
+++ b/tpke/src/combine.rs
@@ -72,24 +72,17 @@ pub fn lagrange_basis_at(
}
pub fn prepare_combine_simple(
- private_contexts: &[PrivateDecryptionContextSimple],
+ context: &[PublicDecryptionContextSimple],
) -> Vec {
+ let shares_x = &context.iter().map(|ctxt| ctxt.domain).collect::>();
+ // Calculate lagrange coefficients using optimized formula, see https://en.wikipedia.org/wiki/Lagrange_polynomial#Optimal_algorithm
let mut lagrange_coeffs = vec![];
-
- let shares_x = private_contexts
- .iter()
- .map(|ctxt|
- // There's just one x in the domain:
- ctxt.public_decryption_contexts[ctxt.index].domain[0]
- )
- .collect::>();
-
- for x_j in shares_x.clone() {
+ for x_j in shares_x {
let mut prod = E::Fr::one();
- for x_m in shares_x.clone() {
+ for x_m in shares_x {
if x_j != x_m {
- // x_i = 0
- prod *= (x_m) / (x_m - x_j);
+ // In this formula x_i = 0, hence numerator is x_m
+ prod *= (*x_m) / (*x_m - *x_j);
}
}
lagrange_coeffs.push(prod);
From 8cb52d8577027414bd1300d40ed9c96669e85f00 Mon Sep 17 00:00:00 2001
From: Piotr Roslaniec
Date: Tue, 27 Dec 2022 17:05:40 +0100
Subject: [PATCH 04/39] wip
---
ferveo/src/dkg/pv.rs | 2 +-
ferveo/src/lib.rs | 121 +++++++++++++++++++++++++++++++++++++++++
ferveo/src/vss/pvss.rs | 34 ++++++++++--
3 files changed, 151 insertions(+), 6 deletions(-)
diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs
index 34ed7565..a8208150 100644
--- a/ferveo/src/dkg/pv.rs
+++ b/ferveo/src/dkg/pv.rs
@@ -272,7 +272,7 @@ pub(crate) mod test_common {
ValidatorSet::new(
(0..4)
.map(|i| TendermintValidator {
- power: i,
+ power: i, // TODO: Should set to 1 in order to force partitioning to give one share to each validator. Replace with 1 by reworking how partitioning works.
address: format!("validator_{}", i),
public_key: keypairs[i as usize].public(),
})
diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs
index 6b9f509f..b3c35c16 100644
--- a/ferveo/src/lib.rs
+++ b/ferveo/src/lib.rs
@@ -4,6 +4,7 @@ pub mod msg;
pub mod vss;
pub mod primitives;
+use itertools::izip;
pub use primitives::*;
use ferveo_common::Rng;
@@ -30,3 +31,123 @@ use ark_ec::PairingEngine;
use ark_ff::PrimeField;
use measure_time::print_time;
+
+pub fn share_combine_simple(
+ shares: &[E::Fqk],
+ lagrange: &[E::Fr],
+ // prepared_key_shares: &[E::G2Affine],
+) -> E::Fqk {
+ let mut product_of_shares = E::Fqk::one();
+
+ // Sum of C_i^{L_i}
+ for (c_i, alpha_i) in izip!(shares.iter(), lagrange.iter()) {
+ // Exponentiation by alpha_i
+ let ss = c_i.pow(alpha_i.into_repr());
+ product_of_shares *= ss;
+ }
+
+ product_of_shares
+}
+
+#[cfg(test)]
+mod test_dkg_full {
+ use super::*;
+
+ use crate::dkg::pv::test_common::*;
+ use ark_bls12_381::Bls12_381 as EllipticCurve;
+ use ark_ff::UniformRand;
+ use ferveo_common::{TendermintValidator, ValidatorSet};
+ use group_threshold_cryptography as tpke;
+
+ type E = ark_bls12_381::Bls12_381;
+
+ /// Test happy flow for a full DKG with simple threshold decryption variant
+ #[test]
+ fn test_dkg_simple_decryption_variant() {
+ //
+ // The following is copied from other tests
+ //
+
+ let rng = &mut ark_std::test_rng();
+ let dkg = setup_dealt_dkg();
+ let aggregate = aggregate(&dkg);
+ // check that a polynomial of the correct degree was created
+ assert_eq!(aggregate.coeffs.len(), 5);
+ // check that the correct number of shares were created
+ assert_eq!(aggregate.shares.len(), 4);
+ // check that the optimistic verify returns true
+ assert!(aggregate.verify_optimistic());
+ // check that the full verify returns true
+ assert!(aggregate.verify_full(&dkg, rng));
+ // check that the verification of aggregation passes
+ assert_eq!(
+ aggregate
+ .verify_aggregation(&dkg, rng)
+ .expect("Test failed"),
+ 6
+ );
+
+ //
+ // Now, we start the actual test
+ //
+
+ // At this point, we have a DKG that has been dealt and aggregated
+ // We now want to test the decryption of a message
+
+ // First, we encrypt a message using a DKG public key
+
+ let msg: &[u8] = "abc".as_bytes();
+ let aad: &[u8] = "my-aad".as_bytes();
+ let public_key = dkg.final_key();
+ let ciphertext = tpke::encrypt::<_, E>(msg, aad, &public_key, rng);
+
+ // TODO: Update test utils so that we can easily get a validator keypair for each validator
+ let validator_keypairs = gen_keypairs();
+
+ // TODO: Check ciphertext validity, https://nikkolasg.github.io/ferveo/tpke.html#to-validate-ciphertext-for-ind-cca2-security
+
+ // Each validator computes a decryption share
+ let decryption_shares = validator_keypairs.iter().map(|keypair| {
+ // let decryption_shares = aggregate
+ let decryption_shares = aggregate
+ .shares[0]
+ .iter()
+ .map(|share| {
+ // TODO: In simple decryption variant, we only have one share per validator
+ // assert_eq!(z_i.len(), 1);
+ let z_i = share.mul(keypair.decryption_key);
+
+ // Validator decryption of private key shares, https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares
+ let u = ciphertext.commitment;
+ let c_i = E::pairing(u, z_i);
+ c_i
+ })
+ .collect::>();
+
+ // TODO: In simple decryption variant, we only have one share per validator
+ // assert_eq!(decryption_shares.len(), 1);
+ // decryption_shares[0]
+ decryption_shares
+ });
+
+
+ // let s = share_combine_simple::(&aggregate.shares, &aggregate.coeffs);
+
+ /*
+ TODO: This variant seems to be outdated/unused in simple threshold decryption variant
+
+ // Following section 4.4.8 of the paper, we need to compute the following:
+ let decryption_shares = validator_keypairs.iter().map(|validator| {
+ // TODO: Check the validity of (U, W)
+
+ // Compute the decryption share D_{i,j} = [dk_j^{-1}]*U_i
+ // We only have one U in this case
+ let u = ciphertext.commitment;
+ let dk_j = validator.decryption_key;
+ let dk_j_inv = dk_j.inverse().unwrap();
+ let d_ij = u.mul(dk_j_inv);
+ d_ij
+ });
+ */
+ }
+}
diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs
index 451caae3..9c8a79b3 100644
--- a/ferveo/src/vss/pvss.rs
+++ b/ferveo/src/vss/pvss.rs
@@ -42,7 +42,7 @@ pub struct PubliclyVerifiableParams {
/// 2/3 the total), this will be aggregated into a final key
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)]
pub struct PubliclyVerifiableSS {
- /// Feldman commitment to the VSS polynomial, F = g^{\phi}
+ /// Used in Feldman commitment to the VSS polynomial, F = g^{\phi}
pub coeffs: Vec,
/// The shares to be dealt to each validator
@@ -66,11 +66,13 @@ impl PubliclyVerifiableSS {
dkg: &PubliclyVerifiableDkg,
rng: &mut R,
) -> Result {
+ // Our random polynomial, \phi(x) = s + \sum_{i=1}^{t-1} a_i x^i
let mut phi = DensePolynomial::::rand(
(dkg.params.total_weight - dkg.params.security_threshold) as usize,
rng,
);
- phi.coeffs[0] = *s;
+ phi.coeffs[0] = *s; // setting the first coefficient to secret value
+ // Evaluations of the polynomial over the domain
let evals = phi.evaluate_over_domain_by_ref(dkg.domain);
// commitment to coeffs, F_i
let coeffs = fast_multiexp(&phi.coeffs, dkg.pvss_params.g);
@@ -92,7 +94,10 @@ impl PubliclyVerifiableSS {
));
}
//phi.zeroize(); // TODO zeroize?
+ // TODO: Cross check proof of knowledge check with the whitepaper; this check proves that there is a relationship between the secret and the pvss transcript
+ // Sigma is a proof of knowledge of the secret, sigma = h^s
let sigma = E::G2Affine::prime_subgroup_generator().mul(*s).into(); //todo hash to curve
+ // So at this point, we have a commitment to the polynomial, a number of shares, and a proof of knowledge
let vss = Self {
coeffs,
shares,
@@ -106,10 +111,15 @@ impl PubliclyVerifiableSS {
/// i.e. we optimistically do not check the commitment. This is deferred
/// until the aggregation step
pub fn verify_optimistic(&self) -> bool {
+ // We're only checking the proof of knowledge here, sigma ?= h^s
+ // "Does the first coefficient of the secret polynomial match the proof of knowledge?"
E::pairing(
- self.coeffs[0].into_projective(),
- E::G2Affine::prime_subgroup_generator(),
- ) == E::pairing(E::G1Affine::prime_subgroup_generator(), self.sigma)
+ self.coeffs[0].into_projective(), // F_0 = g^s
+ E::G2Affine::prime_subgroup_generator(), // h
+ ) == E::pairing(
+ E::G1Affine::prime_subgroup_generator(), // g
+ self.sigma // h^s
+ )
}
/// Part of checking the validity of an aggregated PVSS transcript
@@ -127,8 +137,11 @@ impl PubliclyVerifiableSS {
print_time!("commitment fft");
dkg.domain.fft_in_place(&mut commitment);
+ // Each validator checks that their share is correct
dkg.validators.iter().zip(self.shares.iter()).all(
|(validator, shares)| {
+ // ek is the public key of the validator
+ // TODO: Is that the ek = [dk]H key?
let ek = validator
.validator
.public_key
@@ -138,14 +151,19 @@ impl PubliclyVerifiableSS {
let mut powers_of_alpha = alpha;
let mut y = E::G2Projective::zero();
let mut a = E::G1Projective::zero();
+ // Validator checks checks aggregated shares against commitment
for (y_i, a_i) in shares.iter().zip_eq(
commitment[validator.share_start..validator.share_end]
.iter(),
) {
+ // We iterate over shares (y_i) and commitment (a_i)
+ // TODO: Check #3 is missing
+ // See #3 in 4.2.3 section of https://eprint.iacr.org/2022/898.pdf
y += y_i.mul(powers_of_alpha.into_repr());
a += a_i.mul(powers_of_alpha.into_repr());
powers_of_alpha *= alpha;
}
+ // See #4 in 4.2.3 section of https://eprint.iacr.org/2022/898.pdf
// Y = \sum_i y_i \alpha^i
// A = \sum_i a_i \alpha^i
// e(G,Y) = e(A, ek)
@@ -168,6 +186,8 @@ impl PubliclyVerifiableSS {
) -> Result {
print_time!("PVSS verify_aggregation");
self.verify_full(dkg, rng);
+ // Now, we verify that the aggregated PVSS transcript is a valid aggregation
+ // If it is, we return the total weights of the PVSS transcripts
let mut y = E::G1Projective::zero();
let mut weight = 0u32;
for (dealer, pvss) in dkg.vss.iter() {
@@ -200,6 +220,10 @@ pub fn aggregate(
.iter()
.map(|a| batch_to_projective(a))
.collect::>();
+
+ // So now we're iterating over the PVSS instances, and adding their coefficients and shares, and their sigma
+ // sigma is the sum of all the sigma_i, which is the proof of knowledge of the secret polynomial
+ // Aggregating is just adding the corresponding values in pvss instances, so pvss = pvss + pvss_j
for (_, next) in pvss_iter {
sigma = sigma.add(next.sigma);
coeffs
From 81d4dd2c67026f2a672c2c421efa38bdfc5f226b Mon Sep 17 00:00:00 2001
From: Piotr Roslaniec
Date: Wed, 28 Dec 2022 17:15:00 +0100
Subject: [PATCH 05/39] incorrect length of decrypted shares after pvss
combination
---
ferveo/src/dkg/pv.rs | 12 ++-
ferveo/src/lib.rs | 125 ++++++++++++++++++++++++--------
ferveo/src/vss/pvss.rs | 65 +++++++++++++++--
tpke-wasm/benches/benchmarks.rs | 8 +-
tpke-wasm/src/lib.rs | 2 +-
tpke/src/decryption.rs | 1 -
6 files changed, 168 insertions(+), 45 deletions(-)
diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs
index a8208150..58c01817 100644
--- a/ferveo/src/dkg/pv.rs
+++ b/ferveo/src/dkg/pv.rs
@@ -305,10 +305,12 @@ pub(crate) mod test_common {
///
/// The correctness of this function is tested in the module [`test_dealing`]
pub fn setup_dealt_dkg() -> PubliclyVerifiableDkg {
+ let n = 4;
let rng = &mut ark_std::test_rng();
// gather everyone's transcripts
let mut transcripts = vec![];
- for i in 0..4 {
+ for i in 0..n {
+ // All of the dkg instances have the same validators
let mut dkg = setup_dkg(i);
transcripts.push(dkg.share(rng).expect("Test failed"));
}
@@ -317,11 +319,17 @@ pub(crate) mod test_common {
// iterate over transcripts from lowest weight to highest
for (sender, pvss) in transcripts.into_iter().rev().enumerate() {
dkg.apply_message(
- dkg.validators[3 - sender].validator.clone(),
+ dkg.validators[n - 1 - sender].validator.clone(),
pvss,
)
.expect("Setup failed");
}
+ // At this point, the dkg should contain n transcripts, each containing n shares
+ // TODO: Remove this check
+ assert_eq!(dkg.vss.len(), n);
+ for i in 0..n {
+ assert_eq!(dkg.vss[&(i as u32)].shares.len(), n);
+ }
dkg
}
}
diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs
index b3c35c16..953eeab2 100644
--- a/ferveo/src/lib.rs
+++ b/ferveo/src/lib.rs
@@ -1,10 +1,12 @@
#![allow(unused_imports)]
+
pub mod dkg;
pub mod msg;
pub mod vss;
pub mod primitives;
-use itertools::izip;
+
+use itertools::{izip, zip_eq};
pub use primitives::*;
use ferveo_common::Rng;
@@ -32,15 +34,33 @@ use ark_ff::PrimeField;
use measure_time::print_time;
+pub fn prepare_combine_simple(
+ shares_x: &[E::Fr],
+) -> Vec {
+ // Calculate lagrange coefficients using optimized formula, see https://en.wikipedia.org/wiki/Lagrange_polynomial#Optimal_algorithm
+ let mut lagrange_coeffs = vec![];
+ for x_j in shares_x {
+ let mut prod = E::Fr::one();
+ for x_m in shares_x {
+ if x_j != x_m {
+ // In this formula x_i = 0, hence numerator is x_m
+ prod *= (*x_m) / (*x_m - *x_j);
+ }
+ }
+ lagrange_coeffs.push(prod);
+ }
+ lagrange_coeffs
+}
+
pub fn share_combine_simple(
- shares: &[E::Fqk],
- lagrange: &[E::Fr],
+ shares: &Vec,
+ lagrange_coeffs: &Vec,
// prepared_key_shares: &[E::G2Affine],
) -> E::Fqk {
let mut product_of_shares = E::Fqk::one();
- // Sum of C_i^{L_i}
- for (c_i, alpha_i) in izip!(shares.iter(), lagrange.iter()) {
+ // Sum of C_i^{L_i}z
+ for (c_i, alpha_i) in zip_eq(shares.iter(), lagrange_coeffs.iter()) {
// Exponentiation by alpha_i
let ss = c_i.pow(alpha_i.into_repr());
product_of_shares *= ss;
@@ -58,6 +78,7 @@ mod test_dkg_full {
use ark_ff::UniformRand;
use ferveo_common::{TendermintValidator, ValidatorSet};
use group_threshold_cryptography as tpke;
+ use itertools::{zip_eq, Itertools};
type E = ark_bls12_381::Bls12_381;
@@ -106,32 +127,76 @@ mod test_dkg_full {
// TODO: Check ciphertext validity, https://nikkolasg.github.io/ferveo/tpke.html#to-validate-ciphertext-for-ind-cca2-security
- // Each validator computes a decryption share
- let decryption_shares = validator_keypairs.iter().map(|keypair| {
- // let decryption_shares = aggregate
- let decryption_shares = aggregate
- .shares[0]
- .iter()
- .map(|share| {
- // TODO: In simple decryption variant, we only have one share per validator
- // assert_eq!(z_i.len(), 1);
- let z_i = share.mul(keypair.decryption_key);
-
- // Validator decryption of private key shares, https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares
- let u = ciphertext.commitment;
- let c_i = E::pairing(u, z_i);
- c_i
- })
- .collect::>();
-
- // TODO: In simple decryption variant, we only have one share per validator
- // assert_eq!(decryption_shares.len(), 1);
- // decryption_shares[0]
- decryption_shares
- });
-
+ //
- // let s = share_combine_simple::(&aggregate.shares, &aggregate.coeffs);
+ // Each validator attempts to aggregate and decrypt the secret shares
+ // let decryption_shares = validator_keypairs
+ validator_keypairs
+ .iter()
+ .enumerate()
+ // Assuming that the ordering of the validator keypairs is the same as the ordering of the validators in the validator set
+ // TODO: Check this assumption
+ .for_each(|(validator_i, keypair)| {
+ let decrypted_shares: Vec> =
+ shares_for_validator(validator_i, &dkg)
+ .iter()
+ // Each "share" the validator has is actually a vector of shares
+ // This because of domain partitioning - the amount of shares is the same as the validator's "power"
+ .map(|share| {
+ // Decrypt the share by decrypting each of the G2 elements within ShareEncryptions
+ share
+ .iter()
+ .map(|s| s.mul(keypair.decryption_key))
+ .collect()
+ })
+ .collect();
+
+ let combined_shares = decrypted_shares.iter().fold(
+ vec![
+ ark_bls12_381::G2Projective::zero();
+ decrypted_shares[0].len()
+ ],
+ |acc, share| {
+ zip_eq(acc, share).map(|(a, b)| a + b).collect()
+ },
+ );
+
+ let decryption_shares = combined_shares
+ .iter()
+ .map(|z_i| {
+ // Validator decryption of private key shares, https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares
+ let u = ciphertext.commitment;
+ let c_i = E::pairing(u, *z_i);
+ c_i
+ })
+ .collect::>();
+
+ let shares_x = &dkg.domain.elements().collect::>();
+ let lagrange_coeffs = prepare_combine_simple::(&shares_x);
+
+ let s =
+ share_combine_simple::(&decryption_shares, &lagrange_coeffs);
+
+ let plaintext =
+ tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s);
+ assert_eq!(plaintext, msg);
+ });
+
+ // TODO: Perform decryption here!
+
+ // For every validator, we're collecting all the decryption shares from all of the PVSS transcripts
+ // .flatten()
+ // .collect();
+
+ // let shares_x = &dkg.domain.elements().collect::>();
+ // let lagrange_coeffs = prepare_combine_simple::(&shares_x);
+ //
+ // let s =
+ // share_combine_simple::(&decryption_shares, &lagrange_coeffs);
+ //
+ // let plaintext =
+ // tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s);
+ // assert_eq!(plaintext, msg);
/*
TODO: This variant seems to be outdated/unused in simple threshold decryption variant
diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs
index 9c8a79b3..fb8f552c 100644
--- a/ferveo/src/vss/pvss.rs
+++ b/ferveo/src/vss/pvss.rs
@@ -8,20 +8,23 @@ use ark_ec::PairingEngine;
use ark_ff::UniformRand;
use ark_serialize::*;
use ferveo_common::PublicKey;
-use itertools::Itertools;
+use itertools::{zip_eq, Itertools};
use subproductdomain::fast_multiexp;
/// These are the blinded evaluations of weight shares of a single random polynomial
pub type ShareEncryptions = Vec<::G2Affine>;
+
/// Marker struct for unaggregated PVSS transcripts
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)]
pub struct Unaggregated;
+
/// Marker struct for aggregated PVSS transcripts
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)]
pub struct Aggregated;
/// Trait gate used to add extra methods to aggregated PVSS transcripts
pub trait Aggregate {}
+
/// Apply trait gate to Aggregated marker struct
impl Aggregate for Aggregated {}
@@ -72,7 +75,7 @@ impl PubliclyVerifiableSS {
rng,
);
phi.coeffs[0] = *s; // setting the first coefficient to secret value
- // Evaluations of the polynomial over the domain
+ // Evaluations of the polynomial over the domain
let evals = phi.evaluate_over_domain_by_ref(dkg.domain);
// commitment to coeffs, F_i
let coeffs = fast_multiexp(&phi.coeffs, dkg.pvss_params.g);
@@ -97,7 +100,7 @@ impl PubliclyVerifiableSS {
// TODO: Cross check proof of knowledge check with the whitepaper; this check proves that there is a relationship between the secret and the pvss transcript
// Sigma is a proof of knowledge of the secret, sigma = h^s
let sigma = E::G2Affine::prime_subgroup_generator().mul(*s).into(); //todo hash to curve
- // So at this point, we have a commitment to the polynomial, a number of shares, and a proof of knowledge
+ // So at this point, we have a commitment to the polynomial, a number of shares, and a proof of knowledge
let vss = Self {
coeffs,
shares,
@@ -117,8 +120,8 @@ impl PubliclyVerifiableSS {
self.coeffs[0].into_projective(), // F_0 = g^s
E::G2Affine::prime_subgroup_generator(), // h
) == E::pairing(
- E::G1Affine::prime_subgroup_generator(), // g
- self.sigma // h^s
+ E::G1Affine::prime_subgroup_generator(), // g
+ self.sigma, // h^s
)
}
@@ -252,6 +255,57 @@ pub fn aggregate(
}
}
+// pub fn aggregate_for_decryption(
+// dkg: &PubliclyVerifiableDkg,
+// ) -> ShareEncryptions {
+// let aggregate = dkg
+// .vss
+// .iter()
+// .map(|(_, pvss)| {
+// assert_eq!(dkg.validators.len(), pvss.shares.len());
+//
+// let shares = pvss
+// .shares
+// .iter()
+// .map(|a| batch_to_projective(a))
+// .collect::>();
+//
+// // Combine PVSS transcripts into a share aggregate
+// let mut share_iter = shares.iter();
+// let first_share = share_iter.next().unwrap();
+// share_iter
+// .fold(first_share, |acc, share| {
+// &zip_eq(acc, share)
+// .map(|(a, b)| *a + *b)
+// .collect::>()
+// })
+// .iter()
+// .map(|a| a.into_affine())
+// .collect::>()
+// })
+// .collect::>>();
+//
+// E::G2Projective::batch_normalization_into_affine(&aggregate)
+// }
+
+/// Returns ShareEncryptions from DKG PVSS transcripts for a selected validator
+pub fn shares_for_validator(
+ validator: usize,
+ dkg: &PubliclyVerifiableDkg,
+) -> Vec> {
+ // DKG contains multiple PVSS transcripts, one for each dealer
+ dkg.vss
+ .iter()
+ .map(|(_, pvss)| {
+ // Each PVSS transcript contains multiple shares, one for each validator
+ assert_eq!(dkg.validators.len(), pvss.shares.len());
+ pvss.shares[validator].clone()
+ })
+ // Each validator has a share from each PVSS transcript
+ // One share is represented by ShareEncryptions, which is a vector of G2 points
+ .collect::>>()
+}
+
#[cfg(test)]
mod test_pvss {
use super::*;
@@ -260,6 +314,7 @@ mod test_pvss {
use ark_bls12_381::Bls12_381 as EllipticCurve;
use ark_ff::UniformRand;
use ferveo_common::{TendermintValidator, ValidatorSet};
+
type Fr = ::Fr;
type G1 = ::G1Affine;
type G2 = ::G2Affine;
diff --git a/tpke-wasm/benches/benchmarks.rs b/tpke-wasm/benches/benchmarks.rs
index 8054ade7..470db5d6 100644
--- a/tpke-wasm/benches/benchmarks.rs
+++ b/tpke-wasm/benches/benchmarks.rs
@@ -3,9 +3,7 @@ use criterion::{black_box, criterion_group, criterion_main, Criterion};
pub fn bench_encrypt_combine(c: &mut Criterion) {
use tpke_wasm::*;
- fn bench_encrypt(
- num_shares: usize,
- threshold: usize) -> impl Fn() {
+ fn bench_encrypt(num_shares: usize, threshold: usize) -> impl Fn() {
let message = "my-secret-message".as_bytes().to_vec();
let aad = "my-aad".as_bytes().to_vec();
let setup = Setup::new(threshold, num_shares);
@@ -16,9 +14,7 @@ pub fn bench_encrypt_combine(c: &mut Criterion) {
}
}
- fn bench_combine(
- num_shares: usize,
- threshold: usize) -> impl Fn() {
+ fn bench_combine(num_shares: usize, threshold: usize) -> impl Fn() {
let message = "my-secret-message".as_bytes().to_vec();
let aad = "my-aad".as_bytes().to_vec();
let setup = Setup::new(threshold, num_shares);
diff --git a/tpke-wasm/src/lib.rs b/tpke-wasm/src/lib.rs
index bc187327..9f926ffe 100644
--- a/tpke-wasm/src/lib.rs
+++ b/tpke-wasm/src/lib.rs
@@ -170,7 +170,7 @@ impl Setup {
let mut rng = rand::thread_rng();
let (public_key, private_key, contexts) =
- tpke::setup_fast::(threshold, shares_num, &mut rng);
+ tpke::setup_fast::(threshold, shares_num, &mut rng);
let private_contexts = contexts
.clone()
.into_iter()
diff --git a/tpke/src/decryption.rs b/tpke/src/decryption.rs
index 06a2ce32..910ff2d8 100644
--- a/tpke/src/decryption.rs
+++ b/tpke/src/decryption.rs
@@ -5,7 +5,6 @@ use crate::*;
use ark_ec::ProjectiveCurve;
-
#[derive(Debug, Clone)]
pub struct DecryptionShareFast {
pub decrypter_index: usize,
From 9d38f62f5ae7f4a4b25e149e84aad77a02bc4a03 Mon Sep 17 00:00:00 2001
From: Piotr Roslaniec
Date: Thu, 29 Dec 2022 12:29:27 +0100
Subject: [PATCH 06/39] initial removal of share partitioning
---
ferveo-common/src/lib.rs | 17 +---
ferveo/benches/benchmarks/pvdkg.rs | 2 +-
ferveo/examples/pvdkg.rs | 2 +-
ferveo/src/dkg.rs | 20 ++---
ferveo/src/dkg/common.rs | 62 +++-----------
ferveo/src/dkg/pv.rs | 128 +++++++++--------------------
ferveo/src/lib.rs | 3 +-
ferveo/src/vss/pvss.rs | 28 ++++---
8 files changed, 79 insertions(+), 183 deletions(-)
diff --git a/ferveo-common/src/lib.rs b/ferveo-common/src/lib.rs
index b4c32651..e443e876 100644
--- a/ferveo-common/src/lib.rs
+++ b/ferveo-common/src/lib.rs
@@ -64,24 +64,13 @@ impl ValidatorSet {
#[derive(Clone, Debug, CanonicalSerialize, CanonicalDeserialize)]
pub struct Validator {
pub validator: TendermintValidator,
- pub weight: u32,
- pub share_start: usize,
- pub share_end: usize,
+ pub share_index: usize,
}
impl PartialEq for Validator {
fn eq(&self, other: &Self) -> bool {
- (
- &self.validator,
- self.weight,
- self.share_start,
- self.share_end,
- ) == (
- &other.validator,
- other.weight,
- other.share_start,
- other.share_end,
- )
+ (&self.validator, self.share_index)
+ == (&other.validator, other.share_index)
}
}
diff --git a/ferveo/benches/benchmarks/pvdkg.rs b/ferveo/benches/benchmarks/pvdkg.rs
index 02df0341..67803331 100644
--- a/ferveo/benches/benchmarks/pvdkg.rs
+++ b/ferveo/benches/benchmarks/pvdkg.rs
@@ -51,7 +51,7 @@ pub fn gen_validators(
ValidatorSet::new(
(0..keypairs.len())
.map(|i| TendermintValidator {
- power: i as u64,
+ power: 1,// TODO: Remove it. //i as u64,
address: format!("validator_{}", i),
public_key: keypairs[i].public(),
})
diff --git a/ferveo/examples/pvdkg.rs b/ferveo/examples/pvdkg.rs
index c1dcf071..b741e622 100644
--- a/ferveo/examples/pvdkg.rs
+++ b/ferveo/examples/pvdkg.rs
@@ -47,7 +47,7 @@ pub fn setup_dkg(
Params {
tau: 0,
security_threshold: shares / 3,
- total_weight: shares,
+ shares_num: shares,
retry_after: 1,
},
me,
diff --git a/ferveo/src/dkg.rs b/ferveo/src/dkg.rs
index daba9172..c395ff66 100644
--- a/ferveo/src/dkg.rs
+++ b/ferveo/src/dkg.rs
@@ -23,9 +23,9 @@ pub use pv::*;
#[derive(Copy, Clone, Debug, CanonicalSerialize, CanonicalDeserialize)]
pub struct Params {
pub tau: u64,
- pub security_threshold: u32, // threshold
- pub total_weight: u32, // total weight
- pub retry_after: u32,
+ pub security_threshold: u32,
+ pub shares_num: u32,
+ pub retry_after: u32, // TODO: Remove. Not relevant in our scheme.
}
#[derive(Clone, Debug, Eq, PartialEq)]
@@ -36,7 +36,7 @@ pub enum PvssScheduler {
#[derive(Debug, Clone)]
pub enum DkgState {
- Sharing { accumulated_weight: u32, block: u32 },
+ Sharing { accumulated_shares: u32, block: u32 },
Dealt,
Success { final_key: E::G1Affine },
Invalid,
@@ -50,12 +50,12 @@ impl CanonicalSerialize for DkgState {
) -> Result<(), SerializationError> {
match self {
Self::Sharing {
- accumulated_weight,
+ accumulated_shares,
block,
} => {
CanonicalSerialize::serialize(&0u8, &mut writer)?;
CanonicalSerialize::serialize(
- &(*accumulated_weight, *block),
+ &(*accumulated_shares, *block),
&mut writer,
)
}
@@ -72,11 +72,11 @@ impl CanonicalSerialize for DkgState {
fn serialized_size(&self) -> usize {
match self {
Self::Sharing {
- accumulated_weight,
+ accumulated_shares,
block,
} => {
0u8.serialized_size()
- + (*accumulated_weight, *block).serialized_size()
+ + (*accumulated_shares, *block).serialized_size()
}
Self::Dealt => 1u8.serialized_size(),
Self::Success { final_key } => {
@@ -93,12 +93,12 @@ impl CanonicalDeserialize for DkgState {
let variant = ::deserialize(&mut reader)?;
match variant {
0 => {
- let (accumulated_weight, block) =
+ let (accumulated_shares, block) =
<(u32, u32) as CanonicalDeserialize>::deserialize(
&mut reader,
)?;
Ok(Self::Sharing {
- accumulated_weight,
+ accumulated_shares,
block,
})
}
diff --git a/ferveo/src/dkg/common.rs b/ferveo/src/dkg/common.rs
index b69c314f..55d65abb 100644
--- a/ferveo/src/dkg/common.rs
+++ b/ferveo/src/dkg/common.rs
@@ -2,58 +2,16 @@ use crate::*;
use ferveo_common::ValidatorSet;
use itertools::izip;
-/// partition_domain takes as input a vector of validators from
-/// participants in the DKG, containing their total stake amounts
-/// and public address (as Bech32m string)
-///
-/// The validators are *assumed to be* stable-sorted by staking weight
-/// (so highest weight participants come first), then by address
-/// and the DKG share domain is partitioned into continuous segments roughly
-/// the same relative size as the staked weight.
-///
-/// partition_domain returns a vector of DKG participants
-pub fn partition_domain(
- params: &Params,
- mut validator_set: ValidatorSet,
-) -> Result>> {
- // Sort participants from greatest to least stake
-
- // Compute the total amount staked
- let total_voting_power =
- params.total_weight as f64 / validator_set.total_voting_power() as f64;
-
- // Compute the weight of each participant rounded down
- let mut weights = validator_set
+pub fn make_validators(
+ validator_set: ValidatorSet,
+) -> Vec> {
+ validator_set
.validators
.iter()
- .map(|p| (p.power as f64 * total_voting_power).floor() as u32)
- .collect::>();
-
- // Add any excess weight to the largest weight participants
- let adjust_weight = params
- .total_weight
- .checked_sub(weights.iter().sum())
- .ok_or_else(|| anyhow!("adjusted weight negative"))?
- as usize;
- for i in &mut weights[0..adjust_weight] {
- *i += 1;
- }
-
- let mut allocated_weight = 0usize;
- let mut participants = vec![];
- // note that the order of `participants` corresponds to the same
- // order as `validator_set`
- for (ix, validator) in validator_set.validators.drain(0..).enumerate() {
- participants.push(ferveo_common::Validator:: {
- validator,
- weight: weights[ix],
- share_start: allocated_weight,
- share_end: allocated_weight + weights[ix] as usize,
- });
- allocated_weight =
- allocated_weight
- .checked_add(weights[ix] as usize)
- .ok_or_else(|| anyhow!("allocated weight overflow"))?;
- }
- Ok(participants)
+ .enumerate()
+ .map(|(index, validator)| ferveo_common::Validator:: {
+ validator: validator.clone(),
+ share_index: index,
+ })
+ .collect()
}
diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs
index 58c01817..00550130 100644
--- a/ferveo/src/dkg/pv.rs
+++ b/ferveo/src/dkg/pv.rs
@@ -37,7 +37,7 @@ impl PubliclyVerifiableDkg {
) -> Result {
use ark_std::UniformRand;
let domain = ark_poly::Radix2EvaluationDomain::::new(
- params.total_weight as usize,
+ params.shares_num as usize,
)
.ok_or_else(|| anyhow!("unable to construct domain"))?;
@@ -47,8 +47,9 @@ impl PubliclyVerifiableDkg {
.binary_search_by(|probe| me.cmp(probe))
.map_err(|_| anyhow!("could not find this validator in the provided validator set"))?;
- // partition out weight shares of validators based on their voting power
- let validators = partition_domain(¶ms, validator_set)?;
+ // partition out shares shares of validators based on their voting power
+ let validators = make_validators(validator_set);
+
// we further partition out valdiators into partitions to submit pvss transcripts
// so as to minimize network load and enable retrying
let my_partition =
@@ -63,7 +64,7 @@ impl PubliclyVerifiableDkg {
vss: BTreeMap::new(),
domain,
state: DkgState::Sharing {
- accumulated_weight: 0,
+ accumulated_shares: 0,
block: 0,
},
me,
@@ -153,7 +154,7 @@ impl PubliclyVerifiableDkg {
match payload {
Message::Deal(pvss) if matches!(self.state, DkgState::Sharing{..} | DkgState::Dealt) => {
// TODO: If this is two slow, we can convert self.validators to
- // an address keyed hashmap after partitioning the weight shares
+ // an address keyed hashmap after partitioning the shares shares
// in the [`new`] method
let sender = self.validators
.binary_search_by(|probe| sender.cmp(&probe.validator))
@@ -167,13 +168,13 @@ impl PubliclyVerifiableDkg {
}
}
Message::Aggregate(Aggregation{vss, final_key}) if matches!(self.state, DkgState::Dealt) => {
- let minimum_weight = self.params.total_weight
+ let minimum_shares = self.params.shares_num
- self.params.security_threshold;
- let verified_weight = vss.verify_aggregation(self, rng)?;
+ let verified_shares = vss.verify_aggregation(self, rng)?;
// we reject aggregations that fail to meet the security threshold
- if verified_weight < minimum_weight {
+ if verified_shares < minimum_shares {
Err(
- anyhow!("Aggregation failed because the verified weight was insufficient")
+ anyhow!("Aggregation failed because the verified shares was insufficient")
)
} else if &self.final_key() == final_key {
Ok(())
@@ -203,12 +204,11 @@ impl PubliclyVerifiableDkg {
.map_err(|_| anyhow!("dkg received unknown dealer"))?;
self.vss.insert(sender as u32, pvss);
- // we keep track of the amount of weight seen until the security
+ // we keep track of the amount of shares seen until the security
// threshold is met. Then we may change the state of the DKG
- if let DkgState::Sharing{ref mut accumulated_weight, ..} = &mut self.state {
- *accumulated_weight += self.validators[sender].weight;
- if *accumulated_weight
- >= self.params.total_weight - self.params.security_threshold {
+ if let DkgState::Sharing{ref mut accumulated_shares, ..} = &mut self.state {
+ *accumulated_shares += 1;
+ if *accumulated_shares >= self.params.shares_num - self.params.security_threshold {
self.state = DkgState::Dealt;
}
}
@@ -272,7 +272,7 @@ pub(crate) mod test_common {
ValidatorSet::new(
(0..4)
.map(|i| TendermintValidator {
- power: i, // TODO: Should set to 1 in order to force partitioning to give one share to each validator. Replace with 1 by reworking how partitioning works.
+ power: 1, // TODO: Should set to 1 in order to force partitioning to give one share to each validator. Replace with 1 by reworking how partitioning works.
address: format!("validator_{}", i),
public_key: keypairs[i as usize].public(),
})
@@ -292,7 +292,7 @@ pub(crate) mod test_common {
Params {
tau: 0,
security_threshold: 2,
- total_weight: 6,
+ shares_num: 6,
retry_after: 2,
},
me,
@@ -316,7 +316,7 @@ pub(crate) mod test_common {
}
// our test dkg
let mut dkg = setup_dkg(0);
- // iterate over transcripts from lowest weight to highest
+ // iterate over transcripts from lowest shares to highest
for (sender, pvss) in transcripts.into_iter().rev().enumerate() {
dkg.apply_message(
dkg.validators[n - 1 - sender].validator.clone(),
@@ -338,60 +338,6 @@ pub(crate) mod test_common {
#[cfg(test)]
mod test_dkg_init {
use super::test_common::*;
-
- /// Test that validators are correctly sorted
- #[test]
- fn test_validator_set() {
- let rng = &mut ark_std::test_rng();
- let validators = vec![
- TendermintValidator:: {
- power: 0,
- address: "validator_0".into(),
- public_key: ferveo_common::Keypair::::new(rng)
- .public(),
- },
- TendermintValidator:: {
- power: 2,
- address: "validator_1".into(),
- public_key: ferveo_common::Keypair::::new(rng)
- .public(),
- },
- TendermintValidator:: {
- power: 2,
- address: "validator_2".into(),
- public_key: ferveo_common::Keypair::::new(rng)
- .public(),
- },
- TendermintValidator:: {
- power: 1,
- address: "validator_3".into(),
- public_key: ferveo_common::Keypair::::new(rng)
- .public(),
- },
- ];
- let expected = vec![
- validators[2].clone(),
- validators[1].clone(),
- validators[3].clone(),
- validators[0].clone(),
- ];
- let validator_set = ValidatorSet::new(validators);
- assert_eq!(validator_set.validators, expected);
- let params = Params {
- tau: 0,
- security_threshold: 2,
- total_weight: 6,
- retry_after: 2,
- };
- let validator_set: Vec> =
- partition_domain(¶ms, validator_set)
- .expect("Test failed")
- .iter()
- .map(|v| v.validator.clone())
- .collect();
- assert_eq!(validator_set, expected);
- }
-
/// Test that dkg fails to start if the `me` input
/// is not in the validator set
#[test]
@@ -404,7 +350,7 @@ mod test_dkg_init {
Params {
tau: 0,
security_threshold: 4,
- total_weight: 6,
+ shares_num: 6,
retry_after: 2,
},
TendermintValidator:: {
@@ -452,7 +398,7 @@ mod test_dealing {
}
// our test dkg
let mut dkg = setup_dkg(0);
- // iterate over transcripts from lowest weight to highest
+ // iterate over transcripts from lowest shares to highest
let mut expected = 0u32;
for (sender, pvss) in transcripts.into_iter().rev().enumerate() {
// check the verification passes
@@ -470,19 +416,19 @@ mod test_dealing {
pvss
)
.is_ok());
- expected += dkg.validators[3 - sender].validator.power as u32;
+ expected += 1; // dkg.validators[3 - sender].validator.power as u32;
if sender < 3 {
- // check that weight accumulates correctly
+ // check that shares accumulates correctly
match dkg.state {
DkgState::Sharing {
- accumulated_weight, ..
+ accumulated_shares, ..
} => {
- assert_eq!(accumulated_weight, expected)
+ assert_eq!(accumulated_shares, expected)
}
_ => panic!("Test failed"),
}
} else {
- // check that when enough weight is accumulated, we transition state
+ // check that when enough shares is accumulated, we transition state
assert!(matches!(dkg.state, DkgState::Dealt));
}
}
@@ -498,7 +444,7 @@ mod test_dealing {
assert!(matches!(
dkg.state,
DkgState::Sharing {
- accumulated_weight: 0,
+ accumulated_shares: 0,
block: 0
}
));
@@ -517,7 +463,7 @@ mod test_dealing {
assert!(matches!(
dkg.state,
DkgState::Sharing {
- accumulated_weight: 0,
+ accumulated_shares: 0,
block: 0,
}
));
@@ -532,7 +478,7 @@ mod test_dealing {
assert!(matches!(
dkg.state,
DkgState::Sharing {
- accumulated_weight: 0,
+ accumulated_shares: 0,
block: 0,
}
));
@@ -546,7 +492,7 @@ mod test_dealing {
assert!(matches!(
dkg.state,
DkgState::Sharing {
- accumulated_weight: 0,
+ accumulated_shares: 1,
block: 0,
}
));
@@ -563,7 +509,7 @@ mod test_dealing {
assert!(matches!(
dkg.state,
DkgState::Sharing {
- accumulated_weight: 0,
+ accumulated_shares: 0,
block: 0,
}
));
@@ -572,7 +518,7 @@ mod test_dealing {
assert!(matches!(
dkg.state,
DkgState::Sharing {
- accumulated_weight: 0,
+ accumulated_shares: 0,
block: 0,
}
));
@@ -584,7 +530,7 @@ mod test_dealing {
assert!(matches!(
dkg.state,
DkgState::Sharing {
- accumulated_weight: 3,
+ accumulated_shares: 1,
block: 0,
}
));
@@ -599,7 +545,7 @@ mod test_dealing {
assert!(matches!(
dkg.state,
DkgState::Sharing {
- accumulated_weight: 0,
+ accumulated_shares: 0,
block: 0,
}
));
@@ -625,7 +571,7 @@ mod test_dealing {
assert!(matches!(
dkg.state,
DkgState::Sharing {
- accumulated_weight: 0,
+ accumulated_shares: 0,
block: 0,
}
));
@@ -693,7 +639,7 @@ mod test_dealing {
fn test_pvss_reissue() {
let mut dkg = setup_dkg(0);
dkg.state = DkgState::Sharing {
- accumulated_weight: 0,
+ accumulated_shares: 0,
block: 2,
};
assert_eq!(dkg.increase_block(), PvssScheduler::Issue);
@@ -739,7 +685,7 @@ mod test_aggregation {
fn test_aggregate_state_guards() {
let mut dkg = setup_dealt_dkg();
dkg.state = DkgState::Sharing {
- accumulated_weight: 0,
+ accumulated_shares: 0,
block: 0,
};
assert!(dkg.aggregate().is_err());
@@ -759,7 +705,7 @@ mod test_aggregation {
let aggregate = dkg.aggregate().expect("Test failed");
let sender = dkg.validators[dkg.me].validator.clone();
dkg.state = DkgState::Sharing {
- accumulated_weight: 0,
+ accumulated_shares: 0,
block: 0,
};
assert!(dkg.verify_message(&sender, &aggregate, rng).is_err());
@@ -779,7 +725,7 @@ mod test_aggregation {
fn test_aggregate_wont_verify_if_under_threshold() {
let rng = &mut ark_std::test_rng();
let mut dkg = setup_dealt_dkg();
- dkg.params.total_weight = 10;
+ dkg.params.shares_num = 10;
let aggregate = dkg.aggregate().expect("Test failed");
let sender = dkg.validators[dkg.me].validator.clone();
assert!(dkg.verify_message(&sender, &aggregate, rng).is_err());
diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs
index 953eeab2..a5d518fb 100644
--- a/ferveo/src/lib.rs
+++ b/ferveo/src/lib.rs
@@ -84,6 +84,7 @@ mod test_dkg_full {
/// Test happy flow for a full DKG with simple threshold decryption variant
#[test]
+ #[ignore]
fn test_dkg_simple_decryption_variant() {
//
// The following is copied from other tests
@@ -105,7 +106,7 @@ mod test_dkg_full {
aggregate
.verify_aggregation(&dkg, rng)
.expect("Test failed"),
- 6
+ 4
);
//
diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs
index fb8f552c..cacd2420 100644
--- a/ferveo/src/vss/pvss.rs
+++ b/ferveo/src/vss/pvss.rs
@@ -71,11 +71,12 @@ impl PubliclyVerifiableSS {
) -> Result {
// Our random polynomial, \phi(x) = s + \sum_{i=1}^{t-1} a_i x^i
let mut phi = DensePolynomial::::rand(
- (dkg.params.total_weight - dkg.params.security_threshold) as usize,
+ (dkg.params.shares_num - dkg.params.security_threshold) as usize,
rng,
);
phi.coeffs[0] = *s; // setting the first coefficient to secret value
- // Evaluations of the polynomial over the domain
+
+ // Evaluations of the polynomial over the domain
let evals = phi.evaluate_over_domain_by_ref(dkg.domain);
// commitment to coeffs, F_i
let coeffs = fast_multiexp(&phi.coeffs, dkg.pvss_params.g);
@@ -86,7 +87,7 @@ impl PubliclyVerifiableSS {
// ek_{i}^{eval_i}, i = validator index
fast_multiexp(
// &evals.evals[i..i] = &evals.evals[i]
- &evals.evals[val.share_start..val.share_end],
+ &[evals.evals[val.share_index]],
val.validator.public_key.encryption_key.into_projective(),
)
})
@@ -96,7 +97,7 @@ impl PubliclyVerifiableSS {
"Not all validator session keys have been announced"
));
}
- //phi.zeroize(); // TODO zeroize?
+ // phi.zeroize(); // TODO zeroize?
// TODO: Cross check proof of knowledge check with the whitepaper; this check proves that there is a relationship between the secret and the pvss transcript
// Sigma is a proof of knowledge of the secret, sigma = h^s
let sigma = E::G2Affine::prime_subgroup_generator().mul(*s).into(); //todo hash to curve
@@ -155,8 +156,9 @@ impl PubliclyVerifiableSS {
let mut y = E::G2Projective::zero();
let mut a = E::G1Projective::zero();
// Validator checks checks aggregated shares against commitment
+ // TODO: Just one commitment per validator. Consider rewriting this.
for (y_i, a_i) in shares.iter().zip_eq(
- commitment[validator.share_start..validator.share_end]
+ [commitment[validator.share_index]]
.iter(),
) {
// We iterate over shares (y_i) and commitment (a_i)
@@ -192,13 +194,14 @@ impl PubliclyVerifiableSS {
// Now, we verify that the aggregated PVSS transcript is a valid aggregation
// If it is, we return the total weights of the PVSS transcripts
let mut y = E::G1Projective::zero();
- let mut weight = 0u32;
- for (dealer, pvss) in dkg.vss.iter() {
+ // TODO: If we don't deal with share weights anymore, do we even need to call `verify_aggregation`?
+ let mut shares_total = 0u32;
+ for (_, pvss) in dkg.vss.iter() {
y += pvss.coeffs[0].into_projective();
- weight += dkg.validators[*dealer as usize].weight;
+ shares_total += 1
}
if y.into_affine() == self.coeffs[0] {
- Ok(weight)
+ Ok(shares_total)
} else {
Err(anyhow!(
"aggregation does not match received PVSS instances"
@@ -298,11 +301,10 @@ pub fn shares_for_validator(
.iter()
.map(|(_, pvss)| {
// Each PVSS transcript contains multiple shares, one for each validator
- assert_eq!(dkg.validators.len(), pvss.shares.len());
pvss.shares[validator].clone()
})
- // Each validator has a share from each PVSS transcript
- // One share is represented by ShareEncryptions, which is a vector of G2 points
+ // Each validator has a vector of shares from each PVSS transcript
+ // Vector of shares represented by ShareEncryptions, which is a vector of G2 points
.collect::>>()
}
@@ -381,7 +383,7 @@ mod test_pvss {
aggregate
.verify_aggregation(&dkg, rng)
.expect("Test failed"),
- 6
+ 4
);
}
From 9759860de694bc35cfb878f5908886283ed83ac7 Mon Sep 17 00:00:00 2001
From: Piotr Roslaniec
Date: Thu, 29 Dec 2022 13:23:20 +0100
Subject: [PATCH 07/39] updating scheme
---
ferveo/benches/benchmarks/pvdkg.rs | 2 +-
ferveo/examples/pvdkg.rs | 2 +-
ferveo/src/dkg/pv.rs | 41 +++++--------
ferveo/src/lib.rs | 94 ++++++++++++------------------
ferveo/src/vss/pvss.rs | 71 ++++++----------------
5 files changed, 74 insertions(+), 136 deletions(-)
diff --git a/ferveo/benches/benchmarks/pvdkg.rs b/ferveo/benches/benchmarks/pvdkg.rs
index 67803331..122bb749 100644
--- a/ferveo/benches/benchmarks/pvdkg.rs
+++ b/ferveo/benches/benchmarks/pvdkg.rs
@@ -51,7 +51,7 @@ pub fn gen_validators(
ValidatorSet::new(
(0..keypairs.len())
.map(|i| TendermintValidator {
- power: 1,// TODO: Remove it. //i as u64,
+ power: 1, // TODO: Remove it. //i as u64,
address: format!("validator_{}", i),
public_key: keypairs[i].public(),
})
diff --git a/ferveo/examples/pvdkg.rs b/ferveo/examples/pvdkg.rs
index b741e622..b6b7139c 100644
--- a/ferveo/examples/pvdkg.rs
+++ b/ferveo/examples/pvdkg.rs
@@ -71,7 +71,7 @@ pub fn setup_dealt_dkg(num: u64, shares: u32) {
for (sender, pvss) in transcripts.into_iter().rev().enumerate() {
if let Message::Deal(ss) = pvss.clone() {
print_time!("PVSS verify pvdkg");
- ss.verify_full(&dkg, rng);
+ ss.verify_full(&dkg);
}
dkg.apply_message(
dkg.validators[num as usize - 1 - sender].validator.clone(),
diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs
index 00550130..7a07e028 100644
--- a/ferveo/src/dkg/pv.rs
+++ b/ferveo/src/dkg/pv.rs
@@ -50,7 +50,7 @@ impl PubliclyVerifiableDkg {
// partition out shares shares of validators based on their voting power
let validators = make_validators(validator_set);
- // we further partition out valdiators into partitions to submit pvss transcripts
+ // we further partition out validators into partitions to submit pvss transcripts
// so as to minimize network load and enable retrying
let my_partition =
params.retry_after * (2 * me as u32 / params.retry_after);
@@ -145,11 +145,10 @@ impl PubliclyVerifiableDkg {
/// Verify a DKG related message in a block proposal
/// `sender` is the validator of the sender of the message
/// `payload` is the content of the message
- pub fn verify_message(
+ pub fn verify_message(
&self,
sender: &TendermintValidator,
payload: &Message,
- rng: &mut R,
) -> Result<()> {
match payload {
Message::Deal(pvss) if matches!(self.state, DkgState::Sharing{..} | DkgState::Dealt) => {
@@ -170,7 +169,7 @@ impl PubliclyVerifiableDkg {
Message::Aggregate(Aggregation{vss, final_key}) if matches!(self.state, DkgState::Dealt) => {
let minimum_shares = self.params.shares_num
- self.params.security_threshold;
- let verified_shares = vss.verify_aggregation(self, rng)?;
+ let verified_shares = vss.verify_aggregation(self)?;
// we reject aggregations that fail to meet the security threshold
if verified_shares < minimum_shares {
Err(
@@ -403,11 +402,7 @@ mod test_dealing {
for (sender, pvss) in transcripts.into_iter().rev().enumerate() {
// check the verification passes
assert!(dkg
- .verify_message(
- &dkg.validators[3 - sender].validator,
- &pvss,
- rng
- )
+ .verify_message(&dkg.validators[3 - sender].validator, &pvss,)
.is_ok());
// check that application passes
assert!(dkg
@@ -456,7 +451,7 @@ mod test_dealing {
.public(),
};
// check that verification fails
- assert!(dkg.verify_message(&sender, &pvss, rng).is_err());
+ assert!(dkg.verify_message(&sender, &pvss).is_err());
// check that application fails
assert!(dkg.apply_message(sender, pvss).is_err());
// check that state has not changed
@@ -485,7 +480,7 @@ mod test_dealing {
let pvss = dkg.share(rng).expect("Test failed");
let sender = dkg.validators[3].validator.clone();
// check that verification fails
- assert!(dkg.verify_message(&sender, &pvss, rng).is_ok());
+ assert!(dkg.verify_message(&sender, &pvss).is_ok());
// check that application fails
assert!(dkg.apply_message(sender.clone(), pvss.clone()).is_ok());
// check that state has appropriately changed
@@ -497,7 +492,7 @@ mod test_dealing {
}
));
// check that sending another pvss from same sender fails
- assert!(dkg.verify_message(&sender, &pvss, rng).is_err());
+ assert!(dkg.verify_message(&sender, &pvss).is_err());
}
/// Test that if a validators tries to verify it's own
@@ -524,7 +519,7 @@ mod test_dealing {
));
let sender = dkg.validators[0].validator.clone();
// check that verification fails
- assert!(dkg.verify_message(&sender, &pvss, rng).is_ok());
+ assert!(dkg.verify_message(&sender, &pvss).is_ok());
assert!(dkg.apply_message(sender, pvss).is_ok());
// check that state did not change
assert!(matches!(
@@ -579,12 +574,12 @@ mod test_dealing {
dkg.state = DkgState::Success {
final_key: G1::zero(),
};
- assert!(dkg.verify_message(&sender, &pvss, rng).is_err());
+ assert!(dkg.verify_message(&sender, &pvss).is_err());
assert!(dkg.apply_message(sender.clone(), pvss.clone()).is_err());
// check that we can still accept pvss transcripts after meeting threshold
dkg.state = DkgState::Dealt;
- assert!(dkg.verify_message(&sender, &pvss, rng).is_ok());
+ assert!(dkg.verify_message(&sender, &pvss).is_ok());
assert!(dkg.apply_message(sender, pvss).is_ok());
assert!(matches!(dkg.state, DkgState::Dealt))
}
@@ -628,7 +623,7 @@ mod test_dealing {
let pvss = dkg.share(rng).expect("Test failed");
let sender = dkg.validators[0].validator.clone();
// check that verification fails
- assert!(dkg.verify_message(&sender, &pvss, rng).is_ok());
+ assert!(dkg.verify_message(&sender, &pvss).is_ok());
assert!(dkg.apply_message(sender, pvss).is_ok());
assert_eq!(dkg.increase_block(), PvssScheduler::Wait);
}
@@ -670,11 +665,10 @@ mod test_aggregation {
/// met, we can create a final key
#[test]
fn test_aggregate() {
- let rng = &mut ark_std::test_rng();
let mut dkg = setup_dealt_dkg();
let aggregate = dkg.aggregate().expect("Test failed");
let sender = dkg.validators[dkg.me].validator.clone();
- assert!(dkg.verify_message(&sender, &aggregate, rng).is_ok());
+ assert!(dkg.verify_message(&sender, &aggregate).is_ok());
assert!(dkg.apply_message(sender, aggregate).is_ok());
assert!(matches!(dkg.state, DkgState::Success { .. }));
}
@@ -700,7 +694,6 @@ mod test_aggregation {
/// [`DkgState::Dealt`]
#[test]
fn test_aggregate_message_state_guards() {
- let rng = &mut ark_std::test_rng();
let mut dkg = setup_dealt_dkg();
let aggregate = dkg.aggregate().expect("Test failed");
let sender = dkg.validators[dkg.me].validator.clone();
@@ -708,14 +701,14 @@ mod test_aggregation {
accumulated_shares: 0,
block: 0,
};
- assert!(dkg.verify_message(&sender, &aggregate, rng).is_err());
+ assert!(dkg.verify_message(&sender, &aggregate).is_err());
assert!(dkg
.apply_message(sender.clone(), aggregate.clone())
.is_err());
dkg.state = DkgState::Success {
final_key: G1::zero(),
};
- assert!(dkg.verify_message(&sender, &aggregate, rng).is_err());
+ assert!(dkg.verify_message(&sender, &aggregate).is_err());
assert!(dkg.apply_message(sender, aggregate).is_err())
}
@@ -723,19 +716,17 @@ mod test_aggregation {
/// security threshold is not met
#[test]
fn test_aggregate_wont_verify_if_under_threshold() {
- let rng = &mut ark_std::test_rng();
let mut dkg = setup_dealt_dkg();
dkg.params.shares_num = 10;
let aggregate = dkg.aggregate().expect("Test failed");
let sender = dkg.validators[dkg.me].validator.clone();
- assert!(dkg.verify_message(&sender, &aggregate, rng).is_err());
+ assert!(dkg.verify_message(&sender, &aggregate).is_err());
}
/// If the aggregated pvss passes, check that the announced
/// key is correct. Verification should fail if it is not
#[test]
fn test_aggregate_wont_verify_if_wrong_key() {
- let rng = &mut ark_std::test_rng();
let mut dkg = setup_dealt_dkg();
let mut aggregate = dkg.aggregate().expect("Test failed");
while dkg.final_key() == G1::zero() {
@@ -747,6 +738,6 @@ mod test_aggregation {
*final_key = G1::zero();
}
let sender = dkg.validators[dkg.me].validator.clone();
- assert!(dkg.verify_message(&sender, &aggregate, rng).is_err());
+ assert!(dkg.verify_message(&sender, &aggregate).is_err());
}
}
diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs
index a5d518fb..62939eae 100644
--- a/ferveo/src/lib.rs
+++ b/ferveo/src/lib.rs
@@ -55,7 +55,6 @@ pub fn prepare_combine_simple(
pub fn share_combine_simple(
shares: &Vec,
lagrange_coeffs: &Vec,
- // prepared_key_shares: &[E::G2Affine],
) -> E::Fqk {
let mut product_of_shares = E::Fqk::one();
@@ -84,7 +83,6 @@ mod test_dkg_full {
/// Test happy flow for a full DKG with simple threshold decryption variant
#[test]
- #[ignore]
fn test_dkg_simple_decryption_variant() {
//
// The following is copied from other tests
@@ -100,14 +98,9 @@ mod test_dkg_full {
// check that the optimistic verify returns true
assert!(aggregate.verify_optimistic());
// check that the full verify returns true
- assert!(aggregate.verify_full(&dkg, rng));
+ assert!(aggregate.verify_full(&dkg));
// check that the verification of aggregation passes
- assert_eq!(
- aggregate
- .verify_aggregation(&dkg, rng)
- .expect("Test failed"),
- 4
- );
+ assert_eq!(aggregate.verify_aggregation(&dkg).expect("Test failed"), 4);
//
// Now, we start the actual test
@@ -128,66 +121,53 @@ mod test_dkg_full {
// TODO: Check ciphertext validity, https://nikkolasg.github.io/ferveo/tpke.html#to-validate-ciphertext-for-ind-cca2-security
- //
-
// Each validator attempts to aggregate and decrypt the secret shares
- // let decryption_shares = validator_keypairs
- validator_keypairs
+ let decryption_shares = validator_keypairs
.iter()
.enumerate()
// Assuming that the ordering of the validator keypairs is the same as the ordering of the validators in the validator set
// TODO: Check this assumption
- .for_each(|(validator_i, keypair)| {
- let decrypted_shares: Vec> =
- shares_for_validator(validator_i, &dkg)
+ .map(|(validator_i, keypair)| {
+ let decrypted_shares: Vec =
+ // shares_for_validator(validator_i, &dkg)
+ dkg.vss[&(validator_i as u32)].shares
.iter()
// Each "share" the validator has is actually a vector of shares
// This because of domain partitioning - the amount of shares is the same as the validator's "power"
- .map(|share| {
+ .map(|share|
// Decrypt the share by decrypting each of the G2 elements within ShareEncryptions
- share
- .iter()
- .map(|s| s.mul(keypair.decryption_key))
- .collect()
- })
+ share.mul(keypair.decryption_key))
.collect();
- let combined_shares = decrypted_shares.iter().fold(
- vec![
- ark_bls12_381::G2Projective::zero();
- decrypted_shares[0].len()
- ],
- |acc, share| {
- zip_eq(acc, share).map(|(a, b)| a + b).collect()
- },
- );
-
- let decryption_shares = combined_shares
+ let z_i = decrypted_shares
.iter()
- .map(|z_i| {
- // Validator decryption of private key shares, https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares
- let u = ciphertext.commitment;
- let c_i = E::pairing(u, *z_i);
- c_i
- })
- .collect::>();
-
- let shares_x = &dkg.domain.elements().collect::>();
- let lagrange_coeffs = prepare_combine_simple::(&shares_x);
-
- let s =
- share_combine_simple::(&decryption_shares, &lagrange_coeffs);
-
- let plaintext =
- tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s);
- assert_eq!(plaintext, msg);
- });
-
- // TODO: Perform decryption here!
-
- // For every validator, we're collecting all the decryption shares from all of the PVSS transcripts
- // .flatten()
- // .collect();
+ .fold(ark_bls12_381::G2Projective::zero(), |acc, share| {
+ acc + *share
+ });
+
+ // Validator decryption of private key shares, https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares
+ let u = ciphertext.commitment;
+ let c_i = E::pairing(u, z_i);
+
+ c_i
+ })
+ .collect::>();
+
+ // TODO: Am I taking a correct amount of x cooridnates herer? The domain contains 2^n=8 elements total, but I'm taking 4
+ let shares_x = &dkg.domain.elements().take(decryption_shares.len()).collect::>();
+ let lagrange_coeffs = prepare_combine_simple::(&shares_x);
+
+ let s = share_combine_simple::(&decryption_shares, &lagrange_coeffs);
+
+ let plaintext =
+ tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s);
+ assert_eq!(plaintext, msg);
+
+ // TODO: Perform decryption here!
+
+ // For every validator, we're collecting all the decryption shares from all of the PVSS transcripts
+ // .flatten()
+ // .collect();
// let shares_x = &dkg.domain.elements().collect::>();
// let lagrange_coeffs = prepare_combine_simple::(&shares_x);
diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs
index cacd2420..4b7d29a2 100644
--- a/ferveo/src/vss/pvss.rs
+++ b/ferveo/src/vss/pvss.rs
@@ -12,7 +12,7 @@ use itertools::{zip_eq, Itertools};
use subproductdomain::fast_multiexp;
/// These are the blinded evaluations of weight shares of a single random polynomial
-pub type ShareEncryptions = Vec<::G2Affine>;
+pub type ShareEncryptions = ::G2Affine;
/// Marker struct for unaggregated PVSS transcripts
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)]
@@ -89,7 +89,7 @@ impl PubliclyVerifiableSS {
// &evals.evals[i..i] = &evals.evals[i]
&[evals.evals[val.share_index]],
val.validator.public_key.encryption_key.into_projective(),
- )
+ )[0]
})
.collect::>>();
if shares.len() != dkg.validators.len() {
@@ -131,11 +131,7 @@ impl PubliclyVerifiableSS {
/// If aggregation fails, a validator needs to know that their pvss
/// transcript was at fault so that the can issue a new one. This
/// function may also be used for that purpose.
- pub fn verify_full(
- &self,
- dkg: &PubliclyVerifiableDkg,
- rng: &mut R,
- ) -> bool {
+ pub fn verify_full(&self, dkg: &PubliclyVerifiableDkg) -> bool {
// compute the commitment
let mut commitment = batch_to_projective(&self.coeffs);
print_time!("commitment fft");
@@ -143,7 +139,7 @@ impl PubliclyVerifiableSS {
// Each validator checks that their share is correct
dkg.validators.iter().zip(self.shares.iter()).all(
- |(validator, shares)| {
+ |(validator, share)| {
// ek is the public key of the validator
// TODO: Is that the ek = [dk]H key?
let ek = validator
@@ -151,23 +147,13 @@ impl PubliclyVerifiableSS {
.public_key
.encryption_key
.into_projective();
- let alpha = E::Fr::rand(rng);
- let mut powers_of_alpha = alpha;
- let mut y = E::G2Projective::zero();
- let mut a = E::G1Projective::zero();
// Validator checks checks aggregated shares against commitment
- // TODO: Just one commitment per validator. Consider rewriting this.
- for (y_i, a_i) in shares.iter().zip_eq(
- [commitment[validator.share_index]]
- .iter(),
- ) {
- // We iterate over shares (y_i) and commitment (a_i)
- // TODO: Check #3 is missing
- // See #3 in 4.2.3 section of https://eprint.iacr.org/2022/898.pdf
- y += y_i.mul(powers_of_alpha.into_repr());
- a += a_i.mul(powers_of_alpha.into_repr());
- powers_of_alpha *= alpha;
- }
+ // TODO: Check #3 is missing
+ // See #3 in 4.2.3 section of https://eprint.iacr.org/2022/898.pdf
+ let y = *share;
+ let a = commitment[validator.share_index];
+ // At this point, y = \sum_{i=1}^{t-1} y_i \alpha^i and a = \sum_{i=1}^{t-1} a_i \alpha^i
+ // We verify that e(G, Y_j) = e(A_j, ek_j) for all j
// See #4 in 4.2.3 section of https://eprint.iacr.org/2022/898.pdf
// Y = \sum_i y_i \alpha^i
// A = \sum_i a_i \alpha^i
@@ -184,13 +170,12 @@ impl PubliclyVerifiableSS {
/// the PVSS instances, produced by [`aggregate`],
/// and received by the DKG context `dkg`
/// Returns the total valid weight of the aggregated PVSS
- pub fn verify_aggregation(
+ pub fn verify_aggregation(
&self,
dkg: &PubliclyVerifiableDkg,
- rng: &mut R,
) -> Result {
print_time!("PVSS verify_aggregation");
- self.verify_full(dkg, rng);
+ self.verify_full(dkg);
// Now, we verify that the aggregated PVSS transcript is a valid aggregation
// If it is, we return the total weights of the PVSS transcripts
let mut y = E::G1Projective::zero();
@@ -221,11 +206,7 @@ pub fn aggregate(
let mut coeffs = batch_to_projective(&first_pvss.coeffs);
let mut sigma = first_pvss.sigma;
- let mut shares = first_pvss
- .shares
- .iter()
- .map(|a| batch_to_projective(a))
- .collect::>();
+ let mut shares = batch_to_projective(&first_pvss.shares);
// So now we're iterating over the PVSS instances, and adding their coefficients and shares, and their sigma
// sigma is the sum of all the sigma_i, which is the proof of knowledge of the secret polynomial
@@ -239,16 +220,9 @@ pub fn aggregate(
shares
.iter_mut()
.zip_eq(next.shares.iter())
- .for_each(|(a, b)| {
- a.iter_mut()
- .zip_eq(b.iter())
- .for_each(|(c, d)| *c += d.into_projective())
- });
+ .for_each(|(a, b)| *a += b.into_projective());
}
- let shares = shares
- .iter()
- .map(|a| E::G2Projective::batch_normalization_into_affine(a))
- .collect::>();
+ let shares = E::G2Projective::batch_normalization_into_affine(&shares);
PubliclyVerifiableSS {
coeffs: E::G1Projective::batch_normalization_into_affine(&coeffs),
@@ -341,7 +315,7 @@ mod test_pvss {
// check that the optimistic verify returns true
assert!(pvss.verify_optimistic());
// check that the full verify returns true
- assert!(pvss.verify_full(&dkg, rng));
+ assert!(pvss.verify_full(&dkg));
}
/// Check that if the proof of knowledge is wrong,
@@ -367,7 +341,6 @@ mod test_pvss {
/// Should have the correct form and validations pass
#[test]
fn test_aggregate_pvss() {
- let rng = &mut ark_std::test_rng();
let dkg = setup_dealt_dkg();
let aggregate = aggregate(&dkg);
//check that a polynomial of the correct degree was created
@@ -377,14 +350,9 @@ mod test_pvss {
// check that the optimistic verify returns true
assert!(aggregate.verify_optimistic());
// check that the full verify returns true
- assert!(aggregate.verify_full(&dkg, rng));
+ assert!(aggregate.verify_full(&dkg));
// check that the verification of aggregation passes
- assert_eq!(
- aggregate
- .verify_aggregation(&dkg, rng)
- .expect("Test failed"),
- 4
- );
+ assert_eq!(aggregate.verify_aggregation(&dkg).expect("Test failed"), 4);
}
/// Check that if the aggregated pvss transcript has an
@@ -392,7 +360,6 @@ mod test_pvss {
#[test]
fn test_verify_aggregation_fails_if_constant_term_wrong() {
use std::ops::Neg;
- let rng = &mut ark_std::test_rng();
let dkg = setup_dealt_dkg();
let mut aggregated = aggregate(&dkg);
while aggregated.coeffs[0] == G1::zero() {
@@ -402,7 +369,7 @@ mod test_pvss {
aggregated.coeffs[0] = G1::zero();
assert_eq!(
aggregated
- .verify_aggregation(&dkg, rng)
+ .verify_aggregation(&dkg)
.expect_err("Test failed")
.to_string(),
"aggregation does not match received PVSS instances"
From 32f9c49e7267a4a1d982dccb023e4f683effeb5a Mon Sep 17 00:00:00 2001
From: Piotr Roslaniec
Date: Thu, 29 Dec 2022 15:40:39 +0100
Subject: [PATCH 08/39] update aggregation
---
ferveo/src/dkg/pv.rs | 56 ++++++++++++++++-----------------
ferveo/src/lib.rs | 71 ++++++------------------------------------
ferveo/src/vss/pvss.rs | 61 ++++++++++--------------------------
3 files changed, 54 insertions(+), 134 deletions(-)
diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs
index 7a07e028..05e799c6 100644
--- a/ferveo/src/dkg/pv.rs
+++ b/ferveo/src/dkg/pv.rs
@@ -166,7 +166,7 @@ impl PubliclyVerifiableDkg {
Ok(())
}
}
- Message::Aggregate(Aggregation{vss, final_key}) if matches!(self.state, DkgState::Dealt) => {
+ Message::Aggregate(Aggregation { vss, final_key }) if matches!(self.state, DkgState::Dealt) => {
let minimum_shares = self.params.shares_num
- self.params.security_threshold;
let verified_shares = vss.verify_aggregation(self)?;
@@ -205,17 +205,17 @@ impl PubliclyVerifiableDkg {
// we keep track of the amount of shares seen until the security
// threshold is met. Then we may change the state of the DKG
- if let DkgState::Sharing{ref mut accumulated_shares, ..} = &mut self.state {
+ if let DkgState::Sharing { ref mut accumulated_shares, .. } = &mut self.state {
*accumulated_shares += 1;
if *accumulated_shares >= self.params.shares_num - self.params.security_threshold {
- self.state = DkgState::Dealt;
+ self.state = DkgState::Dealt;
}
}
Ok(())
}
Message::Aggregate(_) if matches!(self.state, DkgState::Dealt) => {
// change state and cache the final key
- self.state = DkgState::Success {final_key: self.final_key()};
+ self.state = DkgState::Success { final_key: self.final_key() };
Ok(())
}
_ => Err(anyhow!("DKG state machine is not in correct state to apply this message"))
@@ -254,6 +254,7 @@ pub(crate) mod test_common {
pub use super::*;
pub use ark_bls12_381::Bls12_381 as EllipticCurve;
pub use ark_ff::UniformRand;
+
pub type G1 = ::G1Affine;
/// Generate a set of keypairs for each validator
@@ -306,29 +307,27 @@ pub(crate) mod test_common {
pub fn setup_dealt_dkg() -> PubliclyVerifiableDkg {
let n = 4;
let rng = &mut ark_std::test_rng();
- // gather everyone's transcripts
- let mut transcripts = vec![];
- for i in 0..n {
- // All of the dkg instances have the same validators
- let mut dkg = setup_dkg(i);
- transcripts.push(dkg.share(rng).expect("Test failed"));
- }
- // our test dkg
+
+ // Gather everyone's transcripts
+ let transcripts = (0..n)
+ .map(|i| {
+ let mut dkg = setup_dkg(i);
+ dkg.share(rng).expect("Test failed")
+ })
+ .collect::>();
+
+ // Our test dkg
let mut dkg = setup_dkg(0);
- // iterate over transcripts from lowest shares to highest
- for (sender, pvss) in transcripts.into_iter().rev().enumerate() {
- dkg.apply_message(
- dkg.validators[n - 1 - sender].validator.clone(),
- pvss,
- )
- .expect("Setup failed");
- }
- // At this point, the dkg should contain n transcripts, each containing n shares
- // TODO: Remove this check
- assert_eq!(dkg.vss.len(), n);
- for i in 0..n {
- assert_eq!(dkg.vss[&(i as u32)].shares.len(), n);
- }
+ transcripts
+ .into_iter()
+ .enumerate()
+ .for_each(|(sender, pvss)| {
+ dkg.apply_message(
+ dkg.validators[sender].validator.clone(),
+ pvss,
+ )
+ .expect("Setup failed");
+ });
dkg
}
}
@@ -337,6 +336,7 @@ pub(crate) mod test_common {
#[cfg(test)]
mod test_dkg_init {
use super::test_common::*;
+
/// Test that dkg fails to start if the `me` input
/// is not in the validator set
#[test]
@@ -402,13 +402,13 @@ mod test_dealing {
for (sender, pvss) in transcripts.into_iter().rev().enumerate() {
// check the verification passes
assert!(dkg
- .verify_message(&dkg.validators[3 - sender].validator, &pvss,)
+ .verify_message(&dkg.validators[3 - sender].validator, &pvss)
.is_ok());
// check that application passes
assert!(dkg
.apply_message(
dkg.validators[3 - sender].validator.clone(),
- pvss
+ pvss,
)
.is_ok());
expected += 1; // dkg.validators[3 - sender].validator.power as u32;
diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs
index 62939eae..bf73bba7 100644
--- a/ferveo/src/lib.rs
+++ b/ferveo/src/lib.rs
@@ -110,7 +110,6 @@ mod test_dkg_full {
// We now want to test the decryption of a message
// First, we encrypt a message using a DKG public key
-
let msg: &[u8] = "abc".as_bytes();
let aad: &[u8] = "my-aad".as_bytes();
let public_key = dkg.final_key();
@@ -118,43 +117,24 @@ mod test_dkg_full {
// TODO: Update test utils so that we can easily get a validator keypair for each validator
let validator_keypairs = gen_keypairs();
-
// TODO: Check ciphertext validity, https://nikkolasg.github.io/ferveo/tpke.html#to-validate-ciphertext-for-ind-cca2-security
+ let aggregate = aggregate_for_decryption(&dkg);
// Each validator attempts to aggregate and decrypt the secret shares
- let decryption_shares = validator_keypairs
- .iter()
- .enumerate()
- // Assuming that the ordering of the validator keypairs is the same as the ordering of the validators in the validator set
- // TODO: Check this assumption
- .map(|(validator_i, keypair)| {
- let decrypted_shares: Vec =
- // shares_for_validator(validator_i, &dkg)
- dkg.vss[&(validator_i as u32)].shares
- .iter()
- // Each "share" the validator has is actually a vector of shares
- // This because of domain partitioning - the amount of shares is the same as the validator's "power"
- .map(|share|
- // Decrypt the share by decrypting each of the G2 elements within ShareEncryptions
- share.mul(keypair.decryption_key))
- .collect();
-
- let z_i = decrypted_shares
- .iter()
- .fold(ark_bls12_381::G2Projective::zero(), |acc, share| {
- acc + *share
- });
-
- // Validator decryption of private key shares, https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares
+ let decryption_shares = zip_eq(validator_keypairs, aggregate)
+ .map(|(keypair, encrypted_shares)| {
+ let z_i = encrypted_shares.mul(keypair.decryption_key);
let u = ciphertext.commitment;
let c_i = E::pairing(u, z_i);
-
c_i
})
.collect::>();
- // TODO: Am I taking a correct amount of x cooridnates herer? The domain contains 2^n=8 elements total, but I'm taking 4
- let shares_x = &dkg.domain.elements().take(decryption_shares.len()).collect::>();
+ let shares_x = &dkg
+ .domain
+ .elements()
+ .take(decryption_shares.len())
+ .collect::>();
let lagrange_coeffs = prepare_combine_simple::(&shares_x);
let s = share_combine_simple::(&decryption_shares, &lagrange_coeffs);
@@ -162,38 +142,5 @@ mod test_dkg_full {
let plaintext =
tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s);
assert_eq!(plaintext, msg);
-
- // TODO: Perform decryption here!
-
- // For every validator, we're collecting all the decryption shares from all of the PVSS transcripts
- // .flatten()
- // .collect();
-
- // let shares_x = &dkg.domain.elements().collect::>();
- // let lagrange_coeffs = prepare_combine_simple::(&shares_x);
- //
- // let s =
- // share_combine_simple::(&decryption_shares, &lagrange_coeffs);
- //
- // let plaintext =
- // tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s);
- // assert_eq!(plaintext, msg);
-
- /*
- TODO: This variant seems to be outdated/unused in simple threshold decryption variant
-
- // Following section 4.4.8 of the paper, we need to compute the following:
- let decryption_shares = validator_keypairs.iter().map(|validator| {
- // TODO: Check the validity of (U, W)
-
- // Compute the decryption share D_{i,j} = [dk_j^{-1}]*U_i
- // We only have one U in this case
- let u = ciphertext.commitment;
- let dk_j = validator.decryption_key;
- let dk_j_inv = dk_j.inverse().unwrap();
- let d_ij = u.mul(dk_j_inv);
- d_ij
- });
- */
}
}
diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs
index 4b7d29a2..0e2db31b 100644
--- a/ferveo/src/vss/pvss.rs
+++ b/ferveo/src/vss/pvss.rs
@@ -232,54 +232,27 @@ pub fn aggregate(
}
}
-// pub fn aggregate_for_decryption(
-// dkg: &PubliclyVerifiableDkg,
-// ) -> ShareEncryptions {
-// let aggregate = dkg
-// .vss
-// .iter()
-// .map(|(_, pvss)| {
-// assert_eq!(dkg.validators.len(), pvss.shares.len());
-//
-// let shares = pvss
-// .shares
-// .iter()
-// .map(|a| batch_to_projective(a))
-// .collect::>();
-//
-// // Combine PVSS transcripts into a share aggregate
-// let mut share_iter = shares.iter();
-// let first_share = share_iter.next().unwrap();
-// share_iter
-// .fold(first_share, |acc, share| {
-// &zip_eq(acc, share)
-// .map(|(a, b)| *a + *b)
-// .collect::>()
-// })
-// .iter()
-// .map(|a| a.into_affine())
-// .collect::>()
-// })
-// .collect::>>();
-//
-// E::G2Projective::batch_normalization_into_affine(&aggregate)
-// }
-
-/// Returns ShareEncryptions from DKG PVSS transcripts for a selected validator
-pub fn shares_for_validator(
- validator: usize,
+pub fn aggregate_for_decryption(
dkg: &PubliclyVerifiableDkg,
) -> Vec> {
- // DKG contains multiple PVSS transcripts, one for each dealer
- dkg.vss
+ // From docs: https://nikkolasg.github.io/ferveo/pvss.html?highlight=aggregate#aggregation
+ // "Two PVSS instances may be aggregated into a single PVSS instance by adding elementwise each of the corresponding group elements."
+ let shares = dkg
+ .vss
.iter()
- .map(|(_, pvss)| {
- // Each PVSS transcript contains multiple shares, one for each validator
- pvss.shares[validator].clone()
+ .map(|(_, pvss)| pvss.shares.clone())
+ .collect::>();
+ let first_share = shares.first().unwrap().to_vec();
+ shares
+ .into_iter()
+ .skip(1)
+ // We're assuming that in every PVSS instance, the shares are in the same order
+ .fold(first_share, |acc, shares| {
+ acc.into_iter()
+ .zip_eq(shares.into_iter())
+ .map(|(a, b)| a + b)
+ .collect()
})
- // Each validator has a vector of shares from each PVSS transcript
- // Vector of shares represented by ShareEncryptions, which is a vector of G2 points
- .collect::>>()
}
#[cfg(test)]
From 57255f5befb64f3c4cce8d97b2d28db0f0c4f0eb Mon Sep 17 00:00:00 2001
From: Piotr Roslaniec
Date: Thu, 29 Dec 2022 18:56:32 +0100
Subject: [PATCH 09/39] simple decryption with one validator works with ferveo
dkg
---
ferveo/src/dkg/pv.rs | 112 +++++++++++++++++++++++++------------------
ferveo/src/lib.rs | 78 ++++++++++++++++--------------
tpke/src/combine.rs | 3 +-
3 files changed, 108 insertions(+), 85 deletions(-)
diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs
index 05e799c6..bbce4f3b 100644
--- a/ferveo/src/dkg/pv.rs
+++ b/ferveo/src/dkg/pv.rs
@@ -39,7 +39,7 @@ impl PubliclyVerifiableDkg {
let domain = ark_poly::Radix2EvaluationDomain::::new(
params.shares_num as usize,
)
- .ok_or_else(|| anyhow!("unable to construct domain"))?;
+ .ok_or_else(|| anyhow!("unable to construct domain"))?;
// keep track of the owner of this instance in the validator set
let me = validator_set
@@ -47,10 +47,8 @@ impl PubliclyVerifiableDkg