Skip to content

Commit

Permalink
Merge pull request paritytech#1304 from subspace/erasure-coding-commi…
Browse files Browse the repository at this point in the history
…tments

Erasure coding commitments
  • Loading branch information
nazar-pc authored Mar 28, 2023
2 parents 9a55130 + 6e14833 commit 8ec9eb9
Show file tree
Hide file tree
Showing 9 changed files with 276 additions and 62 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

36 changes: 6 additions & 30 deletions crates/subspace-archiving/src/archiver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -726,36 +726,12 @@ impl Archiver {

// Collect hashes to commitments from all records
let record_commitments = self
.incremental_record_commitments
.drain()
.zip(
// TODO: Replace with erasure coding of incrementally created record commitments
pieces.parity().map(|piece| {
let record_chunks = piece.record().full_scalar_arrays();
let number_of_chunks = record_chunks.len();
let mut scalars = Vec::with_capacity(number_of_chunks.next_power_of_two());

record_chunks
.map(|bytes| {
Scalar::try_from(bytes).expect(
"Bytes correspond to scalars we have just erasure coded; qed",
)
})
.collect_into(&mut scalars);

// Number of scalars for KZG must be a power of two elements
scalars.resize(scalars.capacity(), Scalar::default());

let polynomial = self.kzg.poly(&scalars).expect(
"KZG instance must be configured to support this many scalars; qed",
);
self.kzg
.commit(&polynomial)
.expect("KZG instance must be configured to support this many scalars; qed")
}),
)
.flat_map(|(a, b)| [a, b])
.collect::<Vec<_>>();
.erasure_coding
.extend_commitments(&self.incremental_record_commitments)
.expect(
"Erasure coding instance is deliberately configured to support this input; qed",
);
self.incremental_record_commitments.clear();

let polynomial = self
.kzg
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
extern crate alloc;

use crate::archiver::Segment;
use alloc::collections::VecDeque;
use alloc::vec::Vec;
use core::ops::Deref;
use parity_scale_codec::{Encode, Output};
use subspace_core_primitives::crypto::kzg::{Commitment, Kzg};
use subspace_core_primitives::crypto::Scalar;
Expand All @@ -16,19 +16,28 @@ pub(super) struct IncrementalRecordCommitmentsState {
///
/// NOTE: Until full segment is processed, this will not contain commitment to the first record
/// since it is not ready yet. This in turn means all commitments will be at `-1` offset.
state: VecDeque<Commitment>,
state: Vec<Commitment>,
}

impl Deref for IncrementalRecordCommitmentsState {
type Target = [Commitment];

fn deref(&self) -> &Self::Target {
&self.state
}
}

impl IncrementalRecordCommitmentsState {
/// Creates an empty state with space for at least capacity records.
pub(super) fn with_capacity(capacity: usize) -> Self {
Self {
state: VecDeque::with_capacity(capacity),
state: Vec::with_capacity(capacity),
}
}

pub(super) fn drain(&mut self) -> impl Iterator<Item = Commitment> + '_ {
self.state.drain(..)
/// Clears internal state before start of the next segment
pub(super) fn clear(&mut self) {
self.state.clear();
}
}

Expand Down Expand Up @@ -178,9 +187,7 @@ impl<'a> IncrementalRecordCommitmentsProcessor<'a> {
.commit(&polynomial)
.expect("KZG instance must be configured to support this many scalars; qed");

self.incremental_record_commitments
.state
.push_back(commitment);
self.incremental_record_commitments.state.push(commitment);
}
}
}
59 changes: 39 additions & 20 deletions crates/subspace-archiving/src/piece_reconstructor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@ extern crate alloc;
use alloc::string::String;
use alloc::vec::Vec;
use core::num::NonZeroUsize;
use subspace_core_primitives::crypto::kzg::{Kzg, Polynomial};
use subspace_core_primitives::crypto::kzg::{Commitment, Kzg, Polynomial};
use subspace_core_primitives::crypto::{blake2b_256_254_hash_to_scalar, Scalar};
use subspace_core_primitives::{FlatPieces, Piece, RawRecord, PIECES_IN_SEGMENT};
use subspace_core_primitives::{
FlatPieces, Piece, RawRecord, RecordedHistorySegment, PIECES_IN_SEGMENT,
};
use subspace_erasure_coding::ErasureCoding;

/// Reconstructor-related instantiation error.
Expand All @@ -31,6 +33,10 @@ pub enum ReconstructorError {
)]
DataShardsReconstruction(String),

/// Commitment of input piece is invalid.
#[cfg_attr(feature = "thiserror", error("Commitment of input piece is invalid."))]
InvalidInputPieceCommitment,

/// Incorrect piece position provided.
#[cfg_attr(feature = "thiserror", error("Incorrect piece position provided."))]
IncorrectPiecePosition,
Expand Down Expand Up @@ -66,11 +72,11 @@ impl PiecesReconstructor {
/// necessary witnesses later.
fn reconstruct_shards(
&self,
segment_pieces: &[Option<Piece>],
input_pieces: &[Option<Piece>],
) -> Result<(FlatPieces, Polynomial), ReconstructorError> {
let mut reconstructed_pieces = FlatPieces::new(PIECES_IN_SEGMENT as usize);

if !segment_pieces
if !input_pieces
.iter()
// Take each source shards here
.step_by(2)
Expand Down Expand Up @@ -103,7 +109,7 @@ impl PiecesReconstructor {
// Iterate over the chunks of `Scalar::SAFE_BYTES` bytes of all records
for record_offset in 0..RawRecord::SIZE / Scalar::SAFE_BYTES {
// Collect chunks of each record at the same offset
for maybe_piece in segment_pieces.iter() {
for maybe_piece in input_pieces.iter() {
let maybe_scalar = maybe_piece
.as_ref()
.map(|piece| {
Expand Down Expand Up @@ -139,16 +145,15 @@ impl PiecesReconstructor {
}
}

let mut record_commitment_hashes = Vec::with_capacity(PIECES_IN_SEGMENT as usize);
// TODO: Parity hashes will be erasure coded instead in the future
for (piece, maybe_input_piece) in reconstructed_pieces.iter_mut().zip(segment_pieces) {
let mut source_record_commitments = Vec::with_capacity(RecordedHistorySegment::RAW_RECORDS);
for (piece, maybe_input_piece) in
reconstructed_pieces.iter_mut().zip(input_pieces).step_by(2)
{
if let Some(input_piece) = maybe_input_piece {
piece
.commitment_mut()
.copy_from_slice(input_piece.commitment().as_ref());
record_commitment_hashes.push(blake2b_256_254_hash_to_scalar(
input_piece.commitment().as_ref(),
));
source_record_commitments.push(
Commitment::try_from_bytes(input_piece.commitment())
.map_err(|_error| ReconstructorError::InvalidInputPieceCommitment)?,
);
} else {
let scalars = {
let record_chunks = piece.record().full_scalar_arrays();
Expand Down Expand Up @@ -176,14 +181,28 @@ impl PiecesReconstructor {
.kzg
.commit(&polynomial)
.expect("KZG instance must be configured to support this many scalars; qed");

piece
.commitment_mut()
.copy_from_slice(&commitment.to_bytes());
record_commitment_hashes
.push(blake2b_256_254_hash_to_scalar(&commitment.to_bytes()));
source_record_commitments.push(commitment);
}
}
let record_commitments = self
.erasure_coding
.extend_commitments(&source_record_commitments)
.expect(
"Erasure coding instance is deliberately configured to support this input; qed",
);
drop(source_record_commitments);

let record_commitment_hashes = reconstructed_pieces
.iter_mut()
.zip(record_commitments)
.map(|(reconstructed_piece, commitment)| {
let commitment_bytes = commitment.to_bytes();
reconstructed_piece
.commitment_mut()
.copy_from_slice(&commitment_bytes);
blake2b_256_254_hash_to_scalar(&commitment_bytes)
})
.collect::<Vec<_>>();

let polynomial = self
.kzg
Expand Down
132 changes: 130 additions & 2 deletions crates/subspace-core-primitives/src/crypto/kzg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ use blst_from_scratch::types::g1::FsG1;
use blst_from_scratch::types::kzg_settings::FsKZGSettings;
use blst_from_scratch::types::poly::FsPoly;
use core::hash::{Hash, Hasher};
use core::mem;
use derive_more::{AsMut, AsRef, Deref, DerefMut, From, Into};
use kzg::{FFTFr, FFTSettings, KZGSettings};
use parity_scale_codec::{Decode, Encode, EncodeLike, Input, MaxEncodedLen};
#[cfg(feature = "std")]
Expand Down Expand Up @@ -94,7 +96,8 @@ pub fn embedded_kzg_settings() -> FsKZGSettings {
pub struct Polynomial(FsPoly);

/// Commitment to polynomial
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)]
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, From, Into, AsRef, AsMut, Deref, DerefMut)]
#[repr(transparent)]
pub struct Commitment(FsG1);

impl Commitment {
Expand All @@ -110,6 +113,130 @@ impl Commitment {
pub fn try_from_bytes(bytes: &[u8; Self::SIZE]) -> Result<Self, String> {
Ok(Commitment(bytes_to_g1_rust(bytes)?))
}

/// Convenient conversion from slice of commitment to underlying representation for efficiency
/// purposes.
pub fn slice_to_repr(value: &[Self]) -> &[FsG1] {
// SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory
// layout
unsafe { mem::transmute(value) }
}

/// Convenient conversion from slice of underlying representation to commitment for efficiency
/// purposes.
pub fn slice_from_repr(value: &[FsG1]) -> &[Self] {
// SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory
// layout
unsafe { mem::transmute(value) }
}

/// Convenient conversion from slice of optional commitment to underlying representation for
/// efficiency purposes.
pub fn slice_option_to_repr(value: &[Option<Self>]) -> &[Option<FsG1>] {
// SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory
// layout
unsafe { mem::transmute(value) }
}

/// Convenient conversion from slice of optional underlying representation to commitment for
/// efficiency purposes.
pub fn slice_option_from_repr(value: &[Option<FsG1>]) -> &[Option<Self>] {
// SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory
// layout
unsafe { mem::transmute(value) }
}

/// Convenient conversion from mutable slice of commitment to underlying representation for
/// efficiency purposes.
pub fn slice_mut_to_repr(value: &mut [Self]) -> &mut [FsG1] {
// SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory
// layout
unsafe { mem::transmute(value) }
}

/// Convenient conversion from mutable slice of underlying representation to commitment for
/// efficiency purposes.
pub fn slice_mut_from_repr(value: &mut [FsG1]) -> &mut [Self] {
// SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory
// layout
unsafe { mem::transmute(value) }
}

/// Convenient conversion from optional mutable slice of commitment to underlying representation
/// for efficiency purposes.
pub fn slice_option_mut_to_repr(value: &mut [Option<Self>]) -> &mut [Option<FsG1>] {
// SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory
// layout
unsafe { mem::transmute(value) }
}

/// Convenient conversion from optional mutable slice of underlying representation to commitment
/// for efficiency purposes.
pub fn slice_option_mut_from_repr(value: &mut [Option<FsG1>]) -> &mut [Option<Self>] {
// SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory
// layout
unsafe { mem::transmute(value) }
}

/// Convenient conversion from vector of commitment to underlying representation for efficiency
/// purposes.
pub fn vec_to_repr(value: Vec<Self>) -> Vec<FsG1> {
// SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory
// layout, original vector is not dropped
unsafe {
let mut value = mem::ManuallyDrop::new(value);
Vec::from_raw_parts(
value.as_mut_ptr() as *mut FsG1,
value.len(),
value.capacity(),
)
}
}

/// Convenient conversion from vector of underlying representation to commitment for efficiency
/// purposes.
pub fn vec_from_repr(value: Vec<FsG1>) -> Vec<Self> {
// SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory
// layout, original vector is not dropped
unsafe {
let mut value = mem::ManuallyDrop::new(value);
Vec::from_raw_parts(
value.as_mut_ptr() as *mut Self,
value.len(),
value.capacity(),
)
}
}

/// Convenient conversion from vector of optional commitment to underlying representation for
/// efficiency purposes.
pub fn vec_option_to_repr(value: Vec<Option<Self>>) -> Vec<Option<FsG1>> {
// SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory
// layout, original vector is not dropped
unsafe {
let mut value = mem::ManuallyDrop::new(value);
Vec::from_raw_parts(
value.as_mut_ptr() as *mut Option<FsG1>,
value.len(),
value.capacity(),
)
}
}

/// Convenient conversion from vector of optional underlying representation to commitment for
/// efficiency purposes.
pub fn vec_option_from_repr(value: Vec<Option<FsG1>>) -> Vec<Option<Self>> {
// SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory
// layout, original vector is not dropped
unsafe {
let mut value = mem::ManuallyDrop::new(value);
Vec::from_raw_parts(
value.as_mut_ptr() as *mut Option<Self>,
value.len(),
value.capacity(),
)
}
}
}

impl Hash for Commitment {
Expand Down Expand Up @@ -200,7 +327,8 @@ impl TypeInfo for Commitment {
}

/// Witness for polynomial evaluation
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)]
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, From, Into, AsRef, AsMut, Deref, DerefMut)]
#[repr(transparent)]
pub struct Witness(FsG1);

impl Witness {
Expand Down
6 changes: 6 additions & 0 deletions crates/subspace-erasure-coding/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ kzg = { git = "https://github.com/subspace/rust-kzg", rev = "49e7b60ea51d918f047
subspace-core-primitives = { version = "0.1.0", path = "../subspace-core-primitives", default-features = false }

[dev-dependencies]
# TODO: Switch to upstream `main` once https://github.com/sifraitech/rust-kzg/pull/204 is merged and blst has upstream no_std support
blst_from_scratch = { git = "https://github.com/subspace/rust-kzg", rev = "49e7b60ea51d918f04779dd83191ae0e01afcb30" }
criterion = "0.4.0"
rand = "0.8.5"

Expand All @@ -28,3 +30,7 @@ std = [
"kzg/std",
"subspace-core-primitives/std",
]

[[bench]]
name = "commitments"
harness = false
Loading

0 comments on commit 8ec9eb9

Please sign in to comment.