Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor of Refresh & Recovery procedures #158

Merged
merged 17 commits into from
Sep 15, 2023
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion ferveo/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ ark-serialize = "0.4"
ark-std = "0.4"
bincode = "1.3"
ferveo-common = { package = "ferveo-common-pre-release", path = "../ferveo-common", version = "^0.1.1" }
group-threshold-cryptography = { package = "group-threshold-cryptography-pre-release", path = "../tpke", features = ["api"], version = "^0.2.0" }
group-threshold-cryptography = { package = "group-threshold-cryptography-pre-release", path = "../tpke", features = ["api", "test-common"], version = "^0.2.0" }
hex = "0.4.3"
itertools = "0.10.5"
measure_time = "0.8"
Expand Down
14 changes: 7 additions & 7 deletions ferveo/src/dkg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use serde_with::serde_as;

use crate::{
aggregate, utils::is_sorted, AggregatedPvss, Error, EthereumAddress,
PubliclyVerifiableParams, PubliclyVerifiableSS, Pvss, Result, Validator,
PubliclyVerifiableParams, PubliclyVerifiableSS, Result, Validator,
};

#[derive(Copy, Clone, Debug, Serialize, Deserialize)]
Expand Down Expand Up @@ -156,7 +156,7 @@ impl<E: Pairing> PubliclyVerifiableDkg<E> {
rng: &mut R,
) -> Result<PubliclyVerifiableSS<E>> {
use ark_std::UniformRand;
Pvss::<E>::new(&E::ScalarField::rand(rng), self, rng)
PubliclyVerifiableSS::<E>::new(&E::ScalarField::rand(rng), self, rng)
}

/// Aggregate all received PVSS messages into a single message, prepared to post on-chain
Expand Down Expand Up @@ -279,7 +279,7 @@ impl<E: Pairing> PubliclyVerifiableDkg<E> {
pub fn deal(
&mut self,
sender: &Validator<E>,
pvss: &Pvss<E>,
pvss: &PubliclyVerifiableSS<E>,
) -> Result<()> {
// Add the ephemeral public key and pvss transcript
let (sender_address, _) = self
Expand All @@ -306,11 +306,11 @@ pub struct Aggregation<E: Pairing> {

#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(bound(
serialize = "AggregatedPvss<E>: Serialize, Pvss<E>: Serialize",
deserialize = "AggregatedPvss<E>: DeserializeOwned, Pvss<E>: DeserializeOwned"
serialize = "AggregatedPvss<E>: Serialize, PubliclyVerifiableSS<E>: Serialize",
deserialize = "AggregatedPvss<E>: DeserializeOwned, PubliclyVerifiableSS<E>: DeserializeOwned"
))]
pub enum Message<E: Pairing> {
Deal(Pvss<E>),
Deal(PubliclyVerifiableSS<E>),
Aggregate(Aggregation<E>),
}

Expand Down Expand Up @@ -355,7 +355,7 @@ pub(crate) mod test_common {
my_index: usize,
) -> TestSetup {
let keypairs = gen_keypairs(shares_num);
let mut validators = gen_validators(&keypairs);
let mut validators = gen_validators(&keypairs.as_slice());
validators.sort();
let me = validators[my_index].clone();
let dkg = PubliclyVerifiableDkg::new(
Expand Down
187 changes: 127 additions & 60 deletions ferveo/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,15 @@ pub mod api;
pub mod dkg;
pub mod primitives;
pub mod pvss;
pub mod refresh;
pub mod validator;

mod utils;

pub use dkg::*;
pub use primitives::*;
pub use pvss::*;
pub use refresh::*;
pub use validator::*;

#[derive(Debug, thiserror::Error)]
Expand Down Expand Up @@ -212,7 +214,7 @@ mod test_dkg_full {
&dkg,
aad,
&ciphertext.header().unwrap(),
&validator_keypairs,
validator_keypairs.as_slice(),
);

let plaintext = tpke::decrypt_with_shared_secret(
Expand Down Expand Up @@ -308,7 +310,7 @@ mod test_dkg_full {
&dkg,
aad,
&ciphertext.header().unwrap(),
&validator_keypairs,
validator_keypairs.as_slice(),
);

izip!(
Expand Down Expand Up @@ -355,7 +357,10 @@ mod test_dkg_full {
fn test_dkg_simple_tdec_share_recovery() {
let rng = &mut test_rng();

let (dkg, validator_keypairs) = setup_dealt_dkg_with_n_validators(3, 4);
let security_threshold = 3;
let shares_num = 4;
let (dkg, validator_keypairs) =
setup_dealt_dkg_with_n_validators(security_threshold, shares_num);
let msg = "my-msg".as_bytes().to_vec();
let aad: &[u8] = "my-aad".as_bytes();
let public_key = &dkg.public_key();
Expand All @@ -368,29 +373,33 @@ mod test_dkg_full {
&dkg,
aad,
&ciphertext.header().unwrap(),
&validator_keypairs,
validator_keypairs.as_slice(),
);

// Now, we're going to recover a new share at a random point and check that the shared secret is still the same

// Our random point
let x_r = Fr::rand(rng);

// Remove one participant from the contexts and all nested structure
let removed_validator_addr =
dkg.validators.keys().last().unwrap().clone();
let mut remaining_validators = dkg.validators.clone();
remaining_validators.remove(&removed_validator_addr);
remaining_validators
.remove(&removed_validator_addr)
.unwrap();
// dkg.vss.remove(&removed_validator_addr); // TODO: Test whether it makes any difference

// Remember to remove one domain point too
let mut domain_points = dkg.domain.elements().collect::<Vec<_>>();
domain_points.pop().unwrap();

// Now, we're going to recover a new share at a random point,
// and check that the shared secret is still the same.

// Our random point:
let x_r = Fr::rand(rng);

// Each participant prepares an update for each other participant
let share_updates = remaining_validators
.keys()
.map(|v_addr| {
let deltas_i = tpke::prepare_share_updates_for_recovery::<E>(
let deltas_i = prepare_share_updates_for_recovery::<E>(
&domain_points,
&dkg.pvss_params.h.into_affine(),
&x_r,
Expand All @@ -402,15 +411,17 @@ mod test_dkg_full {
.collect::<HashMap<_, _>>();

// Participants share updates and update their shares
let pvss_aggregated = aggregate(&dkg.vss);

// Now, every participant separately:
// TODO: Move this logic outside tests (see #162, #163)
let updated_shares: Vec<_> = remaining_validators
.iter()
.map(|(validator_address, validator)| {
// Receives updates from other participants
let updates_for_participant =
share_updates.get(validator_address).unwrap();
.map(|(_validator_address, validator)| {
// Current participant receives updates from other participants
let updates_for_participant: Vec<_> = share_updates
.values()
.map(|updates| *updates.get(validator.share_index).unwrap())
.collect();

// Each validator uses their decryption key to update their share
let decryption_key = validator_keypairs
Expand All @@ -419,28 +430,37 @@ mod test_dkg_full {
.decryption_key;

// Creates updated private key shares
// TODO: Why not using dkg.aggregate()?
Copy link

@piotr-roslaniec piotr-roslaniec Sep 14, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

IIRC dkg.aggregate would throw if we didn't feed enough transcripts into it.

dkg has an internal state machine that tracks the progress of the DKG ritual, and we would have to strictly adhere to its protocol in order to successfully aggregate here. Since refreshing/recovery is not implemented in the said state machine, it's easier to skip different DKG states and just call aggregate directly here.

let pvss_aggregated = aggregate(&dkg.vss);
pvss_aggregated.update_private_key_share_for_recovery(
&decryption_key,
validator.share_index,
updates_for_participant,
updates_for_participant.as_slice(),
)
})
.collect();

// TODO: Rename updated_private_shares to something that doesn't imply mutation (see #162, #163)

// Now, we have to combine new share fragments into a new share
let new_private_key_share =
tpke::recover_share_from_updated_private_shares(
&x_r,
&domain_points,
&updated_shares,
);
let new_private_key_share = recover_share_from_updated_private_shares(
&x_r,
&domain_points,
&updated_shares,
);

// Get decryption shares from remaining participants
let mut remaining_validator_keypairs = validator_keypairs;
remaining_validator_keypairs
.pop()
.expect("Should have a keypair");
let mut decryption_shares: Vec<DecryptionShareSimple<E>> =
validator_keypairs
remaining_validator_keypairs
.iter()
.enumerate()
.map(|(share_index, validator_keypair)| {
// TODO: Why not using dkg.aggregate()?
let pvss_aggregated = aggregate(&dkg.vss);
pvss_aggregated
.make_decryption_share_simple(
&ciphertext.header().unwrap(),
Expand All @@ -466,73 +486,120 @@ mod test_dkg_full {
.unwrap(),
);

domain_points.push(x_r);
assert_eq!(domain_points.len(), shares_num as usize);
assert_eq!(decryption_shares.len(), shares_num as usize);

// Maybe parametrize this test with [1..] and [..threshold]
let domain_points = &domain_points[1..];
let decryption_shares = &decryption_shares[1..];
assert_eq!(domain_points.len(), security_threshold as usize);
assert_eq!(decryption_shares.len(), security_threshold as usize);

let lagrange = tpke::prepare_combine_simple::<E>(&domain_points);
let new_shared_secret =
tpke::share_combine_simple::<E>(&decryption_shares, &lagrange);

assert_eq!(old_shared_secret, new_shared_secret);
assert_eq!(
old_shared_secret, new_shared_secret,
"Shared secret reconstruction failed"
);
}

#[test]
fn simple_tdec_share_refreshing() {
fn test_dkg_simple_tdec_share_refreshing() {
let rng = &mut test_rng();
let (dkg, validator_keypairs) = setup_dealt_dkg_with_n_validators(3, 4);

let security_threshold = 3;
let shares_num = 4;
let (dkg, validator_keypairs) =
setup_dealt_dkg_with_n_validators(security_threshold, shares_num);
let msg = "my-msg".as_bytes().to_vec();
let aad: &[u8] = "my-aad".as_bytes();
let public_key = dkg.public_key();
let public_key = &dkg.public_key();
let ciphertext =
tpke::encrypt::<E>(SecretBox::new(msg), aad, &public_key, rng)
tpke::encrypt::<E>(SecretBox::new(msg), aad, public_key, rng)
.unwrap();

let pvss_aggregated = aggregate(&dkg.vss);

// Create an initial shared secret
let (_, _, old_shared_secret) = make_shared_secret_simple_tdec(
&dkg,
aad,
&ciphertext.header().unwrap(),
&validator_keypairs,
validator_keypairs.as_slice(),
);

// Now, we're going to refresh the shares and check that the shared secret is the same
let domain_points = dkg.domain.elements().collect::<Vec<_>>();

// Dealer computes a new random polynomial with constant term x_r = 0
let polynomial = tpke::make_random_polynomial_at::<E>(
dkg.dkg_params.security_threshold as usize,
&Fr::zero(),
rng,
);
// Each participant prepares an update for each other participant
let share_updates = dkg
.validators
.keys()
.map(|v_addr| {
let deltas_i = prepare_share_updates_for_refresh::<E>(
&domain_points,
&dkg.pvss_params.h.into_affine(),
dkg.dkg_params.security_threshold as usize,
rng,
);
Comment on lines +539 to +544

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just a comment on the subject of the future refactor: We can see how this could be rewritten as let deltas_i = dkg.prepare_share_updates_for_refresh(rng);

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Absolutely

(v_addr.clone(), deltas_i)
})
.collect::<HashMap<_, _>>();

// Dealer shares the polynomial with participants
// Participants share updates and update their shares

// Now, every participant separately:
// TODO: Move this logic outside tests (see #162, #163)
let updated_shares: Vec<_> = dkg
.validators
.iter()
.map(|(_validator_address, validator)| {
// Current participant receives updates from other participants
let updates_for_participant: Vec<_> = share_updates
.values()
.map(|updates| *updates.get(validator.share_index).unwrap())
.collect();

// Each validator uses their decryption key to update their share
let decryption_key = validator_keypairs
.get(validator.share_index)
.unwrap()
.decryption_key;

// Creates updated private key shares
// TODO: Why not using dkg.aggregate()?
let pvss_aggregated = aggregate(&dkg.vss);
pvss_aggregated.update_private_key_share_for_recovery(
&decryption_key,
validator.share_index,
updates_for_participant.as_slice(),
)
})
.collect();

// Participants computes new decryption shares
let new_decryption_shares: Vec<DecryptionShareSimple<E>> =
// Get decryption shares, now with refreshed private shares:
let decryption_shares: Vec<DecryptionShareSimple<E>> =
validator_keypairs
.iter()
.enumerate()
.map(|(validator_address, validator_keypair)| {
pvss_aggregated
.refresh_decryption_share(
&ciphertext.header().unwrap(),
aad,
&validator_keypair.decryption_key,
validator_address,
&polynomial,
&dkg,
)
.unwrap()
.map(|(share_index, validator_keypair)| {
DecryptionShareSimple::create(
&validator_keypair.decryption_key,
&updated_shares.get(share_index).unwrap(),
&ciphertext.header().unwrap(),
aad,
&dkg.pvss_params.g_inv(),
)
.unwrap()

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not specific to this PR but to the future refactor:
Perhaps we could avoid this section by creating a DecryptionShareSimple in pvss_aggregated.update_private_key_share_for_recovery? I think there is no other reason for moving updated_shares around other than eventually creating a DecryptionShareSimple, so we could collapse this logic into one step. Naming TBD.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not really, updating private shares and creating decryption shares are absolutely independent. We create decryption shares here since it's an acceptance test.

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You're right, this is completely the other way around - The nucypher usage of ferveo API confirms that it happens at two different time steps. Ursulas would never update their shares & create a decryption share at the same time, these are two completely different user flows.

})
.collect();

// Create a new shared secret
let domain = &dkg.domain.elements().collect::<Vec<_>>();
// TODO: Combine `tpke::prepare_combine_simple` and `tpke::share_combine_simple` into
// one function and expose it in the tpke::api?
let lagrange_coeffs = tpke::prepare_combine_simple::<E>(domain);
let lagrange = tpke::prepare_combine_simple::<E>(
&domain_points[..security_threshold as usize],
);
let new_shared_secret = tpke::share_combine_simple::<E>(
&new_decryption_shares,
&lagrange_coeffs,
&decryption_shares[..security_threshold as usize],
&lagrange,
);

assert_eq!(old_shared_secret, new_shared_secret);
Expand Down
Loading
Loading