Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion common/dkg/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ nym-contracts-common = { path = "../cosmwasm-smart-contracts/contracts-common",
bs58 = { workspace = true }


lazy_static = { workspace = true }
rand = { workspace = true }
rand_chacha = { workspace = true }
rand_core = { workspace = true }
Expand Down
57 changes: 16 additions & 41 deletions common/dkg/src/bte/encryption.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@ use rand::CryptoRng;
use rand_core::RngCore;
use std::collections::HashMap;
use std::ops::Neg;
use zeroize::Zeroize;
use zeroize::{Zeroize, ZeroizeOnDrop};

#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct Ciphertexts {
pub rr: [G1Projective; NUM_CHUNKS],
pub ss: [G1Projective; NUM_CHUNKS],
Expand Down Expand Up @@ -164,8 +164,7 @@ impl Ciphertexts {
}
}

#[derive(Zeroize)]
#[zeroize(drop)]
#[derive(Zeroize, ZeroizeOnDrop)]
/// Randomness generated during ciphertext generation that is required for proofs of knowledge.
///
/// It must be handled with extreme care as its misuse might help malicious parties to recover
Expand Down Expand Up @@ -399,7 +398,7 @@ pub fn baby_step_giant_step(
#[cfg(test)]
mod tests {
use super::*;
use crate::bte::{keygen, setup, DEFAULT_BSGS_TABLE};
use crate::bte::{keygen, setup, BSGS_TABLE};
use rand_core::SeedableRng;

fn verify_hazmat_rand(ciphertext: &Ciphertexts, randomness: &HazmatRandomness) {
Expand Down Expand Up @@ -457,8 +456,6 @@ mod tests {
let (decryption_key1, public_key1) = keygen(&params, &mut rng);
let (decryption_key2, public_key2) = keygen(&params, &mut rng);

let lookup_table = &DEFAULT_BSGS_TABLE;

for _ in 0..10 {
let m1 = Share::random(&mut rng);
let m2 = Share::random(&mut rng);
Expand All @@ -467,22 +464,12 @@ mod tests {
let (ciphertext, hazmat) = encrypt_shares(shares, &params, &mut rng);
verify_hazmat_rand(&ciphertext, &hazmat);

let recovered1 = decrypt_share(
&params,
&decryption_key1,
0,
&ciphertext,
Some(lookup_table),
)
.unwrap();
let recovered2 = decrypt_share(
&params,
&decryption_key2,
1,
&ciphertext,
Some(lookup_table),
)
.unwrap();
let recovered1 =
decrypt_share(&params, &decryption_key1, 0, &ciphertext, Some(&BSGS_TABLE))
.unwrap();
let recovered2 =
decrypt_share(&params, &decryption_key2, 1, &ciphertext, Some(&BSGS_TABLE))
.unwrap();
assert_eq!(m1, recovered1);
assert_eq!(m2, recovered2);
}
Expand All @@ -498,8 +485,6 @@ mod tests {
let (decryption_key1, public_key1) = keygen(&params, &mut rng);
let (decryption_key2, public_key2) = keygen(&params, &mut rng);

let lookup_table = &DEFAULT_BSGS_TABLE;

for _ in 0..10 {
let m1 = Share::random(&mut rng);
let m2 = Share::random(&mut rng);
Expand All @@ -508,22 +493,12 @@ mod tests {
let (ciphertext, hazmat) = encrypt_shares(shares, &params, &mut rng);
verify_hazmat_rand(&ciphertext, &hazmat);

let recovered1 = decrypt_share(
&params,
&decryption_key1,
0,
&ciphertext,
Some(lookup_table),
)
.unwrap();
let recovered2 = decrypt_share(
&params,
&decryption_key2,
1,
&ciphertext,
Some(lookup_table),
)
.unwrap();
let recovered1 =
decrypt_share(&params, &decryption_key1, 0, &ciphertext, Some(&BSGS_TABLE))
.unwrap();
let recovered2 =
decrypt_share(&params, &decryption_key2, 1, &ciphertext, Some(&BSGS_TABLE))
.unwrap();
assert_eq!(m1, recovered1);
assert_eq!(m2, recovered2);
}
Expand Down
10 changes: 5 additions & 5 deletions common/dkg/src/bte/keys.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use group::GroupEncoding;
use nym_pemstore::traits::{PemStorableKey, PemStorableKeyPair};
use rand::CryptoRng;
use rand_core::RngCore;
use zeroize::Zeroize;
use zeroize::{Zeroize, ZeroizeOnDrop};

// produces public key and a decryption key for the root of the tree
pub fn keygen(
Expand Down Expand Up @@ -48,7 +48,7 @@ pub fn keygen(
(dk, key_with_proof)
}

#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Zeroize)]
pub struct PublicKey(pub(crate) G1Projective);

impl PublicKey {
Expand All @@ -57,7 +57,7 @@ impl PublicKey {
}
}

#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(Clone, Debug, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct PublicKeyWithProof {
pub(crate) key: PublicKey,
pub(crate) proof: ProofOfDiscreteLog,
Expand Down Expand Up @@ -136,8 +136,7 @@ impl PublicKeyWithProof {
}
}

#[derive(Debug, Zeroize)]
#[zeroize(drop)]
#[derive(Debug, Zeroize, ZeroizeOnDrop)]
#[cfg_attr(test, derive(PartialEq, Eq))]
pub struct DecryptionKey {
// g1^rho
Expand Down Expand Up @@ -242,6 +241,7 @@ impl DecryptionKey {
}
}

#[derive(Zeroize, ZeroizeOnDrop)]
pub struct KeyPair {
pub(crate) private_key: DecryptionKey,
pub(crate) public_key: PublicKeyWithProof,
Expand Down
18 changes: 9 additions & 9 deletions common/dkg/src/bte/mod.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
// Copyright 2022 - Nym Technologies SA <[email protected]>
// SPDX-License-Identifier: Apache-2.0

use std::sync::LazyLock;

use crate::bte::encryption::BabyStepGiantStepLookup;
use crate::utils::hash_g2;
use crate::{Chunk, Share};
use bls12_381::{G1Affine, G2Affine, G2Prepared, G2Projective, Gt};
use group::Curve;
use lazy_static::lazy_static;

pub mod encryption;
pub mod keys;
Expand All @@ -16,14 +18,12 @@ pub mod proof_sharing;
pub use encryption::{decrypt_share, encrypt_shares, Ciphertexts};
pub use keys::{keygen, DecryptionKey, PublicKey, PublicKeyWithProof};

lazy_static! {
pub(crate) static ref PAIRING_BASE: Gt =
bls12_381::pairing(&G1Affine::generator(), &G2Affine::generator());
pub(crate) static ref G2_GENERATOR_PREPARED: G2Prepared =
G2Prepared::from(G2Affine::generator());
pub(crate) static ref DEFAULT_BSGS_TABLE: encryption::BabyStepGiantStepLookup =
encryption::BabyStepGiantStepLookup::default();
}
pub(crate) static PAIRING_BASE: LazyLock<Gt> =
LazyLock::new(|| bls12_381::pairing(&G1Affine::generator(), &G2Affine::generator()));
pub(crate) static G2_GENERATOR_PREPARED: LazyLock<G2Prepared> =
LazyLock::new(|| G2Prepared::from(G2Affine::generator()));
pub static BSGS_TABLE: LazyLock<BabyStepGiantStepLookup> =
LazyLock::new(BabyStepGiantStepLookup::default);

// Domain tries to follow guidelines specified by:
// https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-11#section-3.1
Expand Down
3 changes: 2 additions & 1 deletion common/dkg/src/bte/proof_chunking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use ff::Field;
use group::{Group, GroupEncoding};
use rand::{CryptoRng, Rng};
use rand_core::{RngCore, SeedableRng};
use zeroize::{Zeroize, ZeroizeOnDrop};

const CHUNKING_ORACLE_DOMAIN: &[u8] =
b"NYM_COCONUT_NIDKG_V01_CS01_SHA-256_CHACHA20_CHUNKING_ORACLE";
Expand Down Expand Up @@ -67,7 +68,7 @@ impl<'a> Instance<'a> {
}
}

#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct ProofOfChunking {
y0: G1Projective,
bb: Vec<G1Projective>,
Expand Down
10 changes: 5 additions & 5 deletions common/dkg/src/bte/proof_discrete_log.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@ use ff::Field;
use group::GroupEncoding;
use rand::CryptoRng;
use rand_core::RngCore;
use zeroize::Zeroize;
use zeroize::{Zeroize, ZeroizeOnDrop};

// Domain tries to follow guidelines specified by:
// https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-11#section-3.1
const DISCRETE_LOG_DOMAIN: &[u8] =
b"NYM_COCONUT_NIDKG_V01_CS01_WITH_BLS12381_XMD:SHA-256_SSWU_RO_PROOF_DISCRETE_LOG";

#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(Clone, Debug, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct ProofOfDiscreteLog {
pub(crate) rand_commitment: G1Projective,
pub(crate) response: Scalar,
Expand Down Expand Up @@ -52,9 +52,9 @@ impl ProofOfDiscreteLog {
let public_bytes = public.to_bytes();
let rand_commit_bytes = rand_commit.to_bytes();

let mut bytes = Vec::with_capacity(96);
bytes.extend_from_slice(public_bytes.as_ref());
bytes.extend_from_slice(rand_commit_bytes.as_ref());
let mut bytes = [0u8; 96];
bytes[0..48].copy_from_slice(public_bytes.as_ref());
bytes[48..96].copy_from_slice(rand_commit_bytes.as_ref());

hash_to_scalar(bytes, DISCRETE_LOG_DOMAIN)
}
Expand Down
3 changes: 2 additions & 1 deletion common/dkg/src/bte/proof_sharing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use group::GroupEncoding;
use rand::CryptoRng;
use rand_core::RngCore;
use std::collections::BTreeMap;
use zeroize::{Zeroize, ZeroizeOnDrop};

// Domain tries to follow guidelines specified by:
// https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-11#section-3.1
Expand Down Expand Up @@ -77,7 +78,7 @@ impl<'a> Instance<'a> {
}
}

#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct ProofOfSecretSharing {
ff: G1Projective,
aa: G2Projective,
Expand Down
4 changes: 2 additions & 2 deletions common/dkg/src/dealing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use rand_core::RngCore;
use std::collections::BTreeMap;
use zeroize::Zeroize;

#[derive(Clone, Debug)]
#[derive(Clone, Debug, Zeroize)]
#[cfg_attr(test, derive(PartialEq, Eq))]
pub struct RecoveredVerificationKeys {
pub recovered_master: G2Projective,
Expand Down Expand Up @@ -83,7 +83,7 @@ impl RecoveredVerificationKeys {
}
}

#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, Zeroize)]
pub struct Dealing {
pub public_coefficients: PublicCoefficients,
pub ciphertexts: Ciphertexts,
Expand Down
7 changes: 3 additions & 4 deletions common/dkg/src/interpolation/polynomial.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ use group::GroupEncoding;
use rand::CryptoRng;
use rand_core::RngCore;
use std::ops::{Add, Index, IndexMut};
use zeroize::Zeroize;
use zeroize::{Zeroize, ZeroizeOnDrop};

#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(Clone, Debug, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct PublicCoefficients {
pub(crate) coefficients: Vec<G2Projective>,
}
Expand Down Expand Up @@ -111,8 +111,7 @@ impl IndexMut<usize> for PublicCoefficients {
}
}

#[derive(Clone, Debug, PartialEq, Eq, Zeroize)]
#[zeroize(drop)]
#[derive(Clone, Debug, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct Polynomial {
coefficients: Vec<Scalar>,
}
Expand Down
8 changes: 3 additions & 5 deletions common/dkg/src/share.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,13 @@ use crate::error::DkgError;
use crate::interpolation::perform_lagrangian_interpolation_at_origin;
use crate::NodeIndex;
use bls12_381::Scalar;
use zeroize::Zeroize;
use zeroize::{Zeroize, ZeroizeOnDrop};

// if this type is changed, one must ensure all values can fit in it
pub type Chunk = u16;

#[derive(PartialEq, Eq, Debug, Zeroize)]
#[derive(PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
#[cfg_attr(test, derive(Clone))]
#[zeroize(drop)]
pub struct Share(pub(crate) Scalar);

pub fn combine_shares(shares: Vec<Share>, node_indices: &[NodeIndex]) -> Result<Scalar, DkgError> {
Expand Down Expand Up @@ -66,9 +65,8 @@ impl From<Scalar> for Share {
}
}

#[derive(Default, Zeroize)]
#[derive(Default, Zeroize, ZeroizeOnDrop)]
#[cfg_attr(test, derive(Clone))]
#[zeroize(drop)]
pub(crate) struct ChunkedShare {
pub(crate) chunks: [Chunk; NUM_CHUNKS],
}
Expand Down
Loading