From 4d09b6f2e58f08c3f9989c2990cd96e6b428d953 Mon Sep 17 00:00:00 2001 From: Georgio Nicolas Date: Wed, 4 Jun 2025 23:21:49 +0200 Subject: [PATCH 1/4] bte/proof_chunking.rs: Check for potential arithmentic overflows --- common/dkg/src/bte/proof_chunking.rs | 103 ++++++++++++++++++++++----- common/dkg/src/error.rs | 6 ++ 2 files changed, 92 insertions(+), 17 deletions(-) diff --git a/common/dkg/src/bte/proof_chunking.rs b/common/dkg/src/bte/proof_chunking.rs index 1365aecbd7..d756f08733 100644 --- a/common/dkg/src/bte/proof_chunking.rs +++ b/common/dkg/src/bte/proof_chunking.rs @@ -28,6 +28,7 @@ const SECURITY_PARAMETER: usize = 256; /// ceil(SECURITY_PARAMETER / PARALLEL_RUNS) in the paper const NUM_CHALLENGE_BITS: usize = SECURITY_PARAMETER.div_ceil(PARALLEL_RUNS); +const EE: usize = 1 << NUM_CHALLENGE_BITS; // type alias for ease of use type FirstChallenge = Vec>>; @@ -110,21 +111,20 @@ impl ProofOfChunking { // define bounds for the blinding factors let n = instance.public_keys.len(); let m = NUM_CHUNKS; - let ee = 1 << NUM_CHALLENGE_BITS; - // CHUNK_MAX corresponds to paper's B - let ss = (n * m * (CHUNK_SIZE - 1) * (ee - 1)) as u64; - let zz = (2 * (PARALLEL_RUNS as u64)) - .checked_mul(ss) - .expect("overflow in Z = 2 * l * S"); + // ss = (n * m * (CHUNK_SIZE - 1) * (ee - 1)) + // Z = 2 * l * S + let (ss, zz): (u64, u64) = compute_ss_zz(n, m)?; let ss_scalar = Scalar::from(ss); // rather than generating blinding factors in [-S, Z-1] directly, // do it via [0, Z - 1 + S + 1] and deal with the shift later. - let combined_upper_range = (zz - 1) - .checked_add(ss + 1) - .expect("overflow in Z - 1 + S + 1"); + // combined_upper_range = Z - 1 + S + 1 + + let combined_upper_range = zz.checked_add(ss).ok_or(DkgError::ArithmeticOverflow { + info: "ProofOfChunking::construct | Z - 1 + S + 1", + })?; let mut betas = Vec::with_capacity(PARALLEL_RUNS); let mut bs = Vec::with_capacity(PARALLEL_RUNS); @@ -178,12 +178,23 @@ impl ProofOfChunking { // I think this part is more readable with a range loop #[allow(clippy::needless_range_loop)] for l in 0..PARALLEL_RUNS { - let mut sum = 0; + let mut sum: u64 = 0; for (i, witness_i) in witnesses_s.iter().enumerate() { for (j, witness_ij) in witness_i.to_chunks().chunks.iter().enumerate() { debug_assert!(std::mem::size_of::() <= std::mem::size_of::()); - sum += first_challenge[i][j][l] * (*witness_ij as u64) + // sum += first_challenge[i][j][l] * (*witness_ij as u64) + sum = sum + .checked_add( + first_challenge[i][j][l] + .checked_mul(*witness_ij as u64) + .ok_or(DkgError::ArithmeticOverflow { + info: "ProofOfChunking::construct | first_challenge[i][j][l] * witness_ij", + })?, + ) + .ok_or(DkgError::ArithmeticOverflow { + info: "ProofOfChunking::construct | sum + (first_challenge[i][j][l] * witness_ij)", + })?; } } @@ -191,7 +202,18 @@ impl ProofOfChunking { continue 'retry_loop; } // shifted_blinding_factors[l] - ss restores it to "proper" [-S, Z - 1] range - let response = sum + shifted_blinding_factors[l] - ss; + // let response = sum + shifted_blinding_factors[l] - ss; + let response = sum + .checked_add(shifted_blinding_factors[l]) + .ok_or(DkgError::ArithmeticOverflow { + info: + "ProofOfChunking::construct | sum + (shifted_blinding_factors[l] - ss)", + })? + .checked_sub(ss) + .ok_or(DkgError::ArithmeticUnderflow { + info: "ProofOfChunking::construct | shifted_blinding_factors[l] - ss", + })?; + if response < zz { responses_chunks.push(response) } else { @@ -276,11 +298,14 @@ impl ProofOfChunking { ensure_len!(&self.responses_r, n); ensure_len!(&self.responses_chunks, PARALLEL_RUNS); - let ee = 1 << NUM_CHALLENGE_BITS; + // ss = (n * m * (CHUNK_SIZE - 1) * (ee - 1)) + // Z = 2 * l * S - // CHUNK_MAX corresponds to paper's B - let ss = (n * m * (CHUNK_SIZE - 1) * (ee - 1)) as u64; - let zz = 2 * (PARALLEL_RUNS as u64) * ss; + let zz: u64; + match compute_ss_zz(n, m) { + Ok((_, zz_res)) => zz = zz_res, + _ => return false, + }; for response_chunk in &self.responses_chunks { if response_chunk >= &zz { @@ -411,7 +436,7 @@ impl ProofOfChunking { random_oracle_builder.update(lambda_e.to_be_bytes()); let mut oracle = rand_chacha::ChaCha20Rng::from_seed(random_oracle_builder.finalize()); - let range_max_excl = 1 << NUM_CHALLENGE_BITS; + let range_max_excl = EE as u64; (0..n) .map(|_| { @@ -637,6 +662,50 @@ impl ProofOfChunking { } } +fn compute_ss_zz(n: usize, m: usize) -> Result<(u64, u64), DkgError> { + // let ss = (n * m * (CHUNK_SIZE - 1) * (ee - 1)) as u64; + // CHUNK_MAX corresponds to paper's B + + let ee = EE; + + let ss = n + .checked_mul(m) + .ok_or(DkgError::ArithmeticOverflow { + info: "ProofOfChunking::compute_ss_zz | n * m", + })? + .checked_mul( + CHUNK_SIZE + .checked_sub(1) + .ok_or(DkgError::ArithmeticUnderflow { + info: "ProofOfChunking::compute_ss_zz | (CHUNK_SIZE - 1)", + })? + .checked_mul(ee.checked_sub(1).ok_or(DkgError::ArithmeticUnderflow { + info: "ProofOfChunking::compute_ss_zz | (ee - 1)", + })?) + .ok_or(DkgError::ArithmeticOverflow { + info: "ProofOfChunking::compute_ss_zz | (CHUNK_SIZE - 1) * (ee - 1)", + })?, + ) + .ok_or(DkgError::ArithmeticOverflow { + info: "ProofOfChunking::compute_ss_zz | ss_lhs * ss_rhs", + })? as u64; + + // let zz = 2 * PARALLEL_RUNS as u64 * ss; + // Z = 2 * l * S + + let zz = 2u64 + .checked_mul(PARALLEL_RUNS as u64) + .ok_or(DkgError::ArithmeticOverflow { + info: "ProofOfChunking::compute_ss_zz | 2 * l", + })? + .checked_mul(ss) + .ok_or(DkgError::ArithmeticOverflow { + info: "ProofOfChunking::compute_ss_zz | (2 * l) * S", + })?; + + Ok((ss, zz)) +} + #[cfg(test)] mod tests { use super::*; diff --git a/common/dkg/src/error.rs b/common/dkg/src/error.rs index 9329f32794..5bea0fe3a3 100644 --- a/common/dkg/src/error.rs +++ b/common/dkg/src/error.rs @@ -99,6 +99,12 @@ pub enum DkgError { "The reshared dealing has different public constant coefficient than its prior variant" )] InvalidResharing, + + #[error("Arithmetic Overflow: {info}")] + ArithmeticOverflow { info: &'static str }, + + #[error("Arithmetic Underflow: {info}")] + ArithmeticUnderflow { info: &'static str }, } impl DkgError { From e2f2ab89ecc1bb4e496fbf171415de8b36c686de Mon Sep 17 00:00:00 2001 From: Georgio Nicolas Date: Wed, 4 Jun 2025 23:33:45 +0200 Subject: [PATCH 2/4] dkg: add CryptoRng trait requirement --- common/dkg/benches/benchmarks.rs | 5 +++-- common/dkg/src/bte/encryption.rs | 8 ++++++-- common/dkg/src/bte/keys.rs | 8 ++++++-- common/dkg/src/bte/proof_chunking.rs | 8 +++++--- common/dkg/src/bte/proof_discrete_log.rs | 7 ++++++- common/dkg/src/bte/proof_sharing.rs | 6 ++++-- common/dkg/src/dealing.rs | 3 ++- common/dkg/src/interpolation/polynomial.rs | 3 ++- 8 files changed, 34 insertions(+), 14 deletions(-) diff --git a/common/dkg/benches/benchmarks.rs b/common/dkg/benches/benchmarks.rs index 2951a57a7c..37217bf45d 100644 --- a/common/dkg/benches/benchmarks.rs +++ b/common/dkg/benches/benchmarks.rs @@ -14,6 +14,7 @@ use nym_dkg::bte::{ }; use nym_dkg::interpolation::polynomial::Polynomial; use nym_dkg::{combine_shares, Dealing, NodeIndex, Share, Threshold}; +use rand::CryptoRng; use rand_core::{RngCore, SeedableRng}; use std::collections::BTreeMap; @@ -31,7 +32,7 @@ pub fn precomputing_g2_generator_for_miller_loop(c: &mut Criterion) { } fn prepare_keys( - mut rng: impl RngCore, + mut rng: impl RngCore + CryptoRng, nodes: usize, ) -> (BTreeMap, Vec) { let params = setup(); @@ -50,7 +51,7 @@ fn prepare_keys( } fn prepare_resharing( - mut rng: impl RngCore, + mut rng: impl RngCore + CryptoRng, params: &Params, nodes: usize, threshold: Threshold, diff --git a/common/dkg/src/bte/encryption.rs b/common/dkg/src/bte/encryption.rs index 8cd6a3e774..56fdd148cd 100644 --- a/common/dkg/src/bte/encryption.rs +++ b/common/dkg/src/bte/encryption.rs @@ -9,6 +9,7 @@ use crate::{Chunk, ChunkedShare, Share}; use bls12_381::{G1Affine, G1Projective, G2Prepared, G2Projective, Gt, Scalar}; use ff::Field; use group::{Curve, Group, GroupEncoding}; +use rand::CryptoRng; use rand_core::RngCore; use std::collections::HashMap; use std::ops::Neg; @@ -191,7 +192,7 @@ impl HazmatRandomness { pub fn encrypt_shares( shares: &[(&Share, &PublicKey)], params: &Params, - mut rng: impl RngCore, + mut rng: impl RngCore + CryptoRng, ) -> (Ciphertexts, HazmatRandomness) { let g1 = G1Projective::generator(); @@ -574,7 +575,10 @@ mod tests { #[test] fn ciphertexts_roundtrip() { - fn random_ciphertexts(mut rng: impl RngCore, num_receivers: usize) -> Ciphertexts { + fn random_ciphertexts( + mut rng: impl RngCore + CryptoRng, + num_receivers: usize, + ) -> Ciphertexts { Ciphertexts { rr: (0..NUM_CHUNKS) .map(|_| G1Projective::random(&mut rng)) diff --git a/common/dkg/src/bte/keys.rs b/common/dkg/src/bte/keys.rs index 132f161f5e..20d896a547 100644 --- a/common/dkg/src/bte/keys.rs +++ b/common/dkg/src/bte/keys.rs @@ -9,11 +9,15 @@ use bls12_381::{G1Projective, G2Projective, Scalar}; use ff::Field; use group::GroupEncoding; use nym_pemstore::traits::{PemStorableKey, PemStorableKeyPair}; +use rand::CryptoRng; use rand_core::RngCore; use zeroize::Zeroize; // produces public key and a decryption key for the root of the tree -pub fn keygen(params: &Params, mut rng: impl RngCore) -> (DecryptionKey, PublicKeyWithProof) { +pub fn keygen( + params: &Params, + mut rng: impl RngCore + CryptoRng, +) -> (DecryptionKey, PublicKeyWithProof) { let g1 = G1Projective::generator(); let g2 = G2Projective::generator(); @@ -244,7 +248,7 @@ pub struct KeyPair { } impl KeyPair { - pub fn new(params: &Params, rng: impl RngCore) -> Self { + pub fn new(params: &Params, rng: impl RngCore + CryptoRng) -> Self { let (dk, pk) = keygen(params, rng); Self { private_key: dk, diff --git a/common/dkg/src/bte/proof_chunking.rs b/common/dkg/src/bte/proof_chunking.rs index d756f08733..1021ac9fb4 100644 --- a/common/dkg/src/bte/proof_chunking.rs +++ b/common/dkg/src/bte/proof_chunking.rs @@ -10,7 +10,7 @@ use crate::utils::{deserialize_scalar, RandomOracleBuilder}; use bls12_381::{G1Projective, Scalar}; use ff::Field; use group::{Group, GroupEncoding}; -use rand::Rng; +use rand::{CryptoRng, Rng}; use rand_core::{RngCore, SeedableRng}; const CHUNKING_ORACLE_DOMAIN: &[u8] = @@ -95,7 +95,7 @@ impl ProofOfChunking { // Scalar(-1) would in reality be Scalar(q - 1), which is greater than Scalar(1) and opposite to // what we wanted. pub fn construct( - mut rng: impl RngCore, + mut rng: impl RngCore + CryptoRng, instance: Instance, witness_r: &[Scalar; NUM_CHUNKS], witnesses_s: &[Share], @@ -721,7 +721,9 @@ mod tests { ciphertext_chunks: Vec<[G1Projective; NUM_CHUNKS]>, } - fn setup(mut rng: impl RngCore) -> (OwnedInstance, [Scalar; NUM_CHUNKS], Vec) { + fn setup( + mut rng: impl RngCore + CryptoRng, + ) -> (OwnedInstance, [Scalar; NUM_CHUNKS], Vec) { let g1 = G1Projective::generator(); let mut pks = Vec::with_capacity(NODES); diff --git a/common/dkg/src/bte/proof_discrete_log.rs b/common/dkg/src/bte/proof_discrete_log.rs index 95eef6eb42..3956e3837d 100644 --- a/common/dkg/src/bte/proof_discrete_log.rs +++ b/common/dkg/src/bte/proof_discrete_log.rs @@ -5,6 +5,7 @@ use crate::utils::hash_to_scalar; use bls12_381::{G1Projective, Scalar}; use ff::Field; use group::GroupEncoding; +use rand::CryptoRng; use rand_core::RngCore; use zeroize::Zeroize; @@ -20,7 +21,11 @@ pub struct ProofOfDiscreteLog { } impl ProofOfDiscreteLog { - pub fn construct(mut rng: impl RngCore, public: &G1Projective, witness: &Scalar) -> Self { + pub fn construct( + mut rng: impl RngCore + CryptoRng, + public: &G1Projective, + witness: &Scalar, + ) -> Self { let mut rand_x = Scalar::random(&mut rng); let rand_commitment = G1Projective::generator() * rand_x; let challenge = Self::compute_challenge(public, &rand_commitment); diff --git a/common/dkg/src/bte/proof_sharing.rs b/common/dkg/src/bte/proof_sharing.rs index 4b843bb8f4..fe088c772f 100644 --- a/common/dkg/src/bte/proof_sharing.rs +++ b/common/dkg/src/bte/proof_sharing.rs @@ -9,6 +9,7 @@ use crate::{NodeIndex, Share}; use bls12_381::{G1Projective, G2Projective, Scalar}; use ff::Field; use group::GroupEncoding; +use rand::CryptoRng; use rand_core::RngCore; use std::collections::BTreeMap; @@ -87,7 +88,7 @@ pub struct ProofOfSecretSharing { impl ProofOfSecretSharing { pub fn construct( - mut rng: impl RngCore, + mut rng: impl RngCore + CryptoRng, instance: Instance, witness_r: &Scalar, witnesses_s: &[Share], @@ -309,13 +310,14 @@ mod tests { use super::*; use crate::interpolation::polynomial::Polynomial; use group::Group; + use rand::CryptoRng; use rand_core::SeedableRng; const NODES: u64 = 50; const THRESHOLD: u64 = 40; fn setup( - mut rng: impl RngCore, + mut rng: impl RngCore + CryptoRng, ) -> ( BTreeMap, PublicCoefficients, diff --git a/common/dkg/src/dealing.rs b/common/dkg/src/dealing.rs index a051f9f88c..fac5f2a9e6 100644 --- a/common/dkg/src/dealing.rs +++ b/common/dkg/src/dealing.rs @@ -13,6 +13,7 @@ use crate::utils::deserialize_g2; use crate::{NodeIndex, Share, Threshold}; use bls12_381::{G2Projective, Scalar}; use group::GroupEncoding; +use rand::CryptoRng; use rand_core::RngCore; use std::collections::BTreeMap; use zeroize::Zeroize; @@ -94,7 +95,7 @@ impl Dealing { // I'm not a big fan of this function signature, but I'm not clear on how to improve it while // allowing the dealer to skip decryption of its own share if it was also one of the receivers pub fn create( - mut rng: impl RngCore, + mut rng: impl RngCore + CryptoRng + CryptoRng, params: &Params, dealer_index: NodeIndex, threshold: Threshold, diff --git a/common/dkg/src/interpolation/polynomial.rs b/common/dkg/src/interpolation/polynomial.rs index 671b8e6832..f3874806e1 100644 --- a/common/dkg/src/interpolation/polynomial.rs +++ b/common/dkg/src/interpolation/polynomial.rs @@ -6,6 +6,7 @@ use crate::utils::deserialize_g2; use bls12_381::{G2Projective, Scalar}; use ff::Field; use group::GroupEncoding; +use rand::CryptoRng; use rand_core::RngCore; use std::ops::{Add, Index, IndexMut}; use zeroize::Zeroize; @@ -120,7 +121,7 @@ impl Polynomial { // for polynomial of degree n, we generate n+1 values // (for example for degree 1, like y = x + 2, we need [2,1]) /// Creates new pseudorandom polynomial of specified degree. - pub fn new_random(mut rng: impl RngCore, degree: u64) -> Self { + pub fn new_random(mut rng: impl RngCore + CryptoRng + CryptoRng, degree: u64) -> Self { Polynomial { coefficients: (0..=degree).map(|_| Scalar::random(&mut rng)).collect(), } From 56aad75220c674bd5ad5c9f68659c247c46eb210 Mon Sep 17 00:00:00 2001 From: Georgio Nicolas Date: Wed, 4 Jun 2025 23:59:20 +0200 Subject: [PATCH 3/4] dkg: verify integrity of ciphertexts during decryption --- common/dkg/benches/benchmarks.rs | 16 ++++++--- common/dkg/src/bte/encryption.rs | 45 ++++++++++++++++++++----- common/dkg/src/dealing.rs | 4 +-- common/dkg/tests/integration.rs | 15 +++++---- nym-api/src/ecash/dkg/key_derivation.rs | 8 ++++- 5 files changed, 65 insertions(+), 23 deletions(-) diff --git a/common/dkg/benches/benchmarks.rs b/common/dkg/benches/benchmarks.rs index 37217bf45d..d12305bba7 100644 --- a/common/dkg/benches/benchmarks.rs +++ b/common/dkg/benches/benchmarks.rs @@ -69,7 +69,7 @@ fn prepare_resharing( for (i, ref mut dk) in dks.iter_mut().enumerate() { let shares = first_dealings .iter() - .map(|dealing| decrypt_share(dk, i, &dealing.ciphertexts, None).unwrap()) + .map(|dealing| decrypt_share(params, dk, i, &dealing.ciphertexts, None).unwrap()) .collect(); let recovered_secret = @@ -155,7 +155,9 @@ pub fn verifying_dealing_made_for_3_parties_and_recovering_share(c: &mut Criteri |b| { b.iter(|| { assert!(dealing.verify(¶ms, threshold, &receivers, None).is_ok()); - black_box(decrypt_share(first_key, 0, &dealing.ciphertexts, None).unwrap()); + black_box( + decrypt_share(¶ms, first_key, 0, &dealing.ciphertexts, None).unwrap(), + ); }) }, ); @@ -238,7 +240,9 @@ pub fn verifying_dealing_made_for_20_parties_and_recovering_share(c: &mut Criter |b| { b.iter(|| { assert!(dealing.verify(¶ms, threshold, &receivers, None).is_ok()); - black_box(decrypt_share(first_key, 0, &dealing.ciphertexts, None).unwrap()); + black_box( + decrypt_share(¶ms, first_key, 0, &dealing.ciphertexts, None).unwrap(), + ); }) }, ); @@ -321,7 +325,9 @@ pub fn verifying_dealing_made_for_100_parties_and_recovering_share(c: &mut Crite |b| { b.iter(|| { assert!(dealing.verify(¶ms, threshold, &receivers, None).is_ok()); - black_box(decrypt_share(first_key, 0, &dealing.ciphertexts, None).unwrap()); + black_box( + decrypt_share(¶ms, first_key, 0, &dealing.ciphertexts, None).unwrap(), + ); }) }, ); @@ -548,7 +554,7 @@ pub fn share_decryption(c: &mut Criterion) { let (ciphertexts, _) = encrypt_shares(&[(&share, pk.public_key())], ¶ms, &mut rng); c.bench_function("single share decryption", |b| { - b.iter(|| black_box(decrypt_share(&dk, 0, &ciphertexts, None))) + b.iter(|| black_box(decrypt_share(¶ms, &dk, 0, &ciphertexts, None))) }); } diff --git a/common/dkg/src/bte/encryption.rs b/common/dkg/src/bte/encryption.rs index 56fdd148cd..300a72441f 100644 --- a/common/dkg/src/bte/encryption.rs +++ b/common/dkg/src/bte/encryption.rs @@ -263,6 +263,7 @@ pub fn encrypt_shares( } pub fn decrypt_share( + params: &Params, dk: &DecryptionKey, // in the case of multiple receivers, specifies which index of ciphertext chunks should be used i: usize, @@ -271,6 +272,10 @@ pub fn decrypt_share( ) -> Result { let mut plaintext = ChunkedShare::default(); + if !ciphertext.verify_integrity(¶ms) { + return Err(DkgError::FailedCiphertextIntegrityCheck); + } + if i >= ciphertext.ciphertext_chunks.len() { return Err(DkgError::UnavailableCiphertext(i)); } @@ -462,10 +467,22 @@ mod tests { let (ciphertext, hazmat) = encrypt_shares(shares, ¶ms, &mut rng); verify_hazmat_rand(&ciphertext, &hazmat); - let recovered1 = - decrypt_share(&decryption_key1, 0, &ciphertext, Some(lookup_table)).unwrap(); - let recovered2 = - decrypt_share(&decryption_key2, 1, &ciphertext, Some(lookup_table)).unwrap(); + let recovered1 = decrypt_share( + ¶ms, + &decryption_key1, + 0, + &ciphertext, + Some(lookup_table), + ) + .unwrap(); + let recovered2 = decrypt_share( + ¶ms, + &decryption_key2, + 1, + &ciphertext, + Some(lookup_table), + ) + .unwrap(); assert_eq!(m1, recovered1); assert_eq!(m2, recovered2); } @@ -491,10 +508,22 @@ mod tests { let (ciphertext, hazmat) = encrypt_shares(shares, ¶ms, &mut rng); verify_hazmat_rand(&ciphertext, &hazmat); - let recovered1 = - decrypt_share(&decryption_key1, 0, &ciphertext, Some(lookup_table)).unwrap(); - let recovered2 = - decrypt_share(&decryption_key2, 1, &ciphertext, Some(lookup_table)).unwrap(); + let recovered1 = decrypt_share( + ¶ms, + &decryption_key1, + 0, + &ciphertext, + Some(lookup_table), + ) + .unwrap(); + let recovered2 = decrypt_share( + ¶ms, + &decryption_key2, + 1, + &ciphertext, + Some(lookup_table), + ) + .unwrap(); assert_eq!(m1, recovered1); assert_eq!(m2, recovered2); } diff --git a/common/dkg/src/dealing.rs b/common/dkg/src/dealing.rs index fac5f2a9e6..e9a96886c2 100644 --- a/common/dkg/src/dealing.rs +++ b/common/dkg/src/dealing.rs @@ -485,7 +485,7 @@ mod tests { for (i, (ref dk, _)) in full_keys.iter().enumerate() { let shares = dealings .values() - .map(|dealing| decrypt_share(dk, i, &dealing.ciphertexts, None).unwrap()) + .map(|dealing| decrypt_share(¶ms, dk, i, &dealing.ciphertexts, None).unwrap()) .collect(); derived_secrets.push( combine_shares(shares, &receivers.keys().copied().collect::>()).unwrap(), @@ -594,7 +594,7 @@ mod tests { for (i, (dk, _)) in full_keys.iter().enumerate() { let shares = dealings .values() - .map(|dealing| decrypt_share(dk, i, &dealing.ciphertexts, None).unwrap()) + .map(|dealing| decrypt_share(¶ms, dk, i, &dealing.ciphertexts, None).unwrap()) .collect(); let recovered_secret = combine_shares(shares, &dealer_indices).unwrap(); diff --git a/common/dkg/tests/integration.rs b/common/dkg/tests/integration.rs index 81c861425d..e34195488e 100644 --- a/common/dkg/tests/integration.rs +++ b/common/dkg/tests/integration.rs @@ -53,11 +53,12 @@ fn single_sender() { // make sure each share is actually decryptable (even though proofs say they must be, perform this sanity check) for (i, (ref dk, _)) in full_keys.iter().enumerate() { - let _recovered = decrypt_share(dk, i, &dealing.ciphertexts, None).unwrap(); + let _recovered = decrypt_share(¶ms, dk, i, &dealing.ciphertexts, None).unwrap(); } // and for good measure, check that the dealer's share matches decryption result - let recovered_dealer = decrypt_share(&full_keys[0].0, 0, &dealing.ciphertexts, None).unwrap(); + let recovered_dealer = + decrypt_share(¶ms, &full_keys[0].0, 0, &dealing.ciphertexts, None).unwrap(); assert_eq!(recovered_dealer, dealer_share.unwrap()); } @@ -115,7 +116,7 @@ fn full_threshold_secret_sharing() { for (i, (ref dk, _)) in full_keys.iter().enumerate() { let shares = dealings .values() - .map(|dealing| decrypt_share(dk, i, &dealing.ciphertexts, None).unwrap()) + .map(|dealing| decrypt_share(¶ms, dk, i, &dealing.ciphertexts, None).unwrap()) .collect(); // we know dealer_share matches, but it would be inconvenient to try to put them in here, @@ -189,7 +190,7 @@ fn full_threshold_secret_resharing() { for (i, (ref dk, _)) in full_keys.iter().enumerate() { let shares = first_dealings .values() - .map(|dealing| decrypt_share(dk, i, &dealing.ciphertexts, None).unwrap()) + .map(|dealing| decrypt_share(¶ms, dk, i, &dealing.ciphertexts, None).unwrap()) .collect(); let recovered_secret = @@ -240,7 +241,7 @@ fn full_threshold_secret_resharing() { for (i, (ref dk, _)) in full_keys.iter().enumerate() { let shares = resharing_dealings .values() - .map(|dealing| decrypt_share(dk, i, &dealing.ciphertexts, None).unwrap()) + .map(|dealing| decrypt_share(¶ms, dk, i, &dealing.ciphertexts, None).unwrap()) .collect(); let recovered_secret = @@ -305,7 +306,7 @@ fn full_threshold_secret_resharing_left_party() { for (i, (ref dk, _)) in full_keys.iter().enumerate() { let shares = first_dealings .values() - .map(|dealing| decrypt_share(dk, i, &dealing.ciphertexts, None).unwrap()) + .map(|dealing| decrypt_share(¶ms, dk, i, &dealing.ciphertexts, None).unwrap()) .collect(); let recovered_secret = @@ -369,7 +370,7 @@ fn full_threshold_secret_resharing_left_party() { for (i, (ref dk, _)) in full_keys.iter().enumerate() { let shares = resharing_dealings .values() - .map(|dealing| decrypt_share(dk, i, &dealing.ciphertexts, None).unwrap()) + .map(|dealing| decrypt_share(¶ms, dk, i, &dealing.ciphertexts, None).unwrap()) .collect(); let recovered_secret = combine_shares(shares, &node_indices).unwrap(); diff --git a/nym-api/src/ecash/dkg/key_derivation.rs b/nym-api/src/ecash/dkg/key_derivation.rs index 41cfd11d36..1dc0b0908e 100644 --- a/nym-api/src/ecash/dkg/key_derivation.rs +++ b/nym-api/src/ecash/dkg/key_derivation.rs @@ -399,7 +399,13 @@ impl DkgController { for (dealer_index, dealing) in dealings.into_iter() { // attempt to decrypt our portion let dk = self.state.dkg_keypair().private_key(); - let share = match decrypt_share(dk, receiver_index, &dealing.ciphertexts, None) { + let share = match decrypt_share( + dkg::params(), + dk, + receiver_index, + &dealing.ciphertexts, + None, + ) { Ok(share) => share, Err(err) => { error!("failed to decrypt share {human_index}/{total} generated from dealer {dealer_index}: {err} - can't generate the full key"); From a7cd8efc04f371b9dd66843a3a6299757ae72b36 Mon Sep 17 00:00:00 2001 From: Georgio Nicolas Date: Tue, 17 Jun 2025 16:37:50 +0200 Subject: [PATCH 4/4] dkg: fix clippy suggestions --- common/dkg/src/bte/encryption.rs | 2 +- common/dkg/src/bte/proof_chunking.rs | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/common/dkg/src/bte/encryption.rs b/common/dkg/src/bte/encryption.rs index 300a72441f..81b304bc7b 100644 --- a/common/dkg/src/bte/encryption.rs +++ b/common/dkg/src/bte/encryption.rs @@ -272,7 +272,7 @@ pub fn decrypt_share( ) -> Result { let mut plaintext = ChunkedShare::default(); - if !ciphertext.verify_integrity(¶ms) { + if !ciphertext.verify_integrity(params) { return Err(DkgError::FailedCiphertextIntegrityCheck); } diff --git a/common/dkg/src/bte/proof_chunking.rs b/common/dkg/src/bte/proof_chunking.rs index 1021ac9fb4..2da6e83b33 100644 --- a/common/dkg/src/bte/proof_chunking.rs +++ b/common/dkg/src/bte/proof_chunking.rs @@ -301,9 +301,8 @@ impl ProofOfChunking { // ss = (n * m * (CHUNK_SIZE - 1) * (ee - 1)) // Z = 2 * l * S - let zz: u64; - match compute_ss_zz(n, m) { - Ok((_, zz_res)) => zz = zz_res, + let zz: u64 = match compute_ss_zz(n, m) { + Ok((_, zz_res)) => zz_res, _ => return false, };