Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 17 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ commonware-storage = { version = "0.0.64", path = "storage", default-features =
commonware-stream = { version = "0.0.64", path = "stream" }
commonware-utils = { version = "0.0.64", path = "utils", default-features = false }
console-subscriber = "0.5.0"
crc32fast = "1.5.0"
crc-fast = "1.10.0"
criterion = "0.7.0"
crossterm = "0.29.0"
ecdsa = { version = "0.16.9", default-features = false }
Expand Down
4 changes: 2 additions & 2 deletions storage/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ commonware-cryptography = { workspace = true, default-features = false }
commonware-macros.workspace = true
commonware-runtime = { workspace = true, optional = true }
commonware-utils = { workspace = true, default-features = false }
crc32fast = { workspace = true, optional = true }
crc-fast = { workspace = true, optional = true }
futures = { workspace = true, optional = true }
futures-util = { workspace = true, optional = true }
prometheus-client = { workspace = true, optional = true }
Expand All @@ -50,7 +50,7 @@ std = [
"commonware-cryptography/std",
"commonware-runtime",
"commonware-utils/std",
"crc32fast/std",
"crc-fast/std",
"futures",
"futures-util",
"prometheus-client",
Expand Down
6 changes: 3 additions & 3 deletions storage/conformance.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,19 +40,19 @@ hash = "b4f2eb51a158d964317fb1714dbe708ffbe6673453dc648eabdd72409fb30440"

["commonware_storage::journal::conformance::FixedJournal"]
n_cases = 512
hash = "9cd764e31b5dbc0bd78cd0908851ba1d645f083884beacd2c8a63f66de0fb9db"
hash = "cd54a3c986fd26f2692e344b80b2e83a13d55b1569536493c74863f09e3bc0b7"

["commonware_storage::journal::conformance::VariableJournal"]
n_cases = 512
hash = "c0af6899248693a3262f31b9a8554cd64c014d9b59f2514840c8828ad74ddf85"
hash = "923a5a5b32ffc608fa625e0176c2bea9955e212bf823640df7860e47b1a63ecd"

["commonware_storage::mmr::proof::tests::conformance::CodecConformance<Proof<Sha256Digest>>"]
n_cases = 65536
hash = "e3e6735a810f1002164333013fbff442c91a690483e75fe7a78618a96d5afd62"

["commonware_storage::ordinal::storage::conformance::CodecConformance<Record<u32>>"]
n_cases = 65536
hash = "07a88b442e9f86b5395a73584211cb6abbb58e51c6f3954b29095c56d77d370c"
hash = "f93b1dda40f6d9f1ccb3f05994be56189205dcb558551e056c2f3db03a79182d"

["commonware_storage::qmdb::any::operation::tests::conformance::CodecConformance<OrderedOperation<U64,FixedEncoding<U64>>>"]
n_cases = 65536
Expand Down
110 changes: 110 additions & 0 deletions storage/src/crc32.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
//! CRC32 checksum utilities.
//!
//! This module provides CRC32C checksum computation using the iSCSI polynomial
//! (0x1EDC6F41) as specified in RFC 3720.

/// Size of a CRC32 checksum in bytes.
pub const SIZE: usize = 4;

/// The CRC32 algorithm used (CRC32C/iSCSI/Castagnoli).
const ALGORITHM: crc_fast::CrcAlgorithm = crc_fast::CrcAlgorithm::Crc32Iscsi;

/// Incremental CRC32 hasher for computing checksums over multiple data chunks.
pub struct Crc32 {
inner: crc_fast::Digest,
}

impl Default for Crc32 {
fn default() -> Self {
Self::new()
}
}

impl Crc32 {
/// Create a new incremental hasher.
#[inline]
pub fn new() -> Self {
Self {
inner: crc_fast::Digest::new(ALGORITHM),
}
}

/// Add data to the checksum computation.
#[inline]
pub fn update(&mut self, data: &[u8]) {
self.inner.update(data);
}

/// Finalize and return the checksum.
#[inline]
pub fn finalize(self) -> u32 {
self.inner.finalize() as u32
}

/// Compute a CRC32 checksum of the given data.
#[inline]
pub fn checksum(data: &[u8]) -> u32 {
crc_fast::checksum(ALGORITHM, data) as u32
}
}

#[cfg(test)]
mod tests {
use super::*;

/// Test vectors from RFC 3720 Appendix B.4 "CRC Examples".
/// https://datatracker.ietf.org/doc/html/rfc3720#appendix-B.4
#[test]
fn rfc3720_test_vectors() {
// 32 bytes of zeros -> CRC = aa 36 91 8a
assert_eq!(Crc32::checksum(&[0x00; 32]), 0x8A9136AA);

// 32 bytes of 0xFF -> CRC = 43 ab a8 62
assert_eq!(Crc32::checksum(&[0xFF; 32]), 0x62A8AB43);

// 32 bytes ascending (0x00..0x1F) -> CRC = 4e 79 dd 46
let ascending: Vec<u8> = (0x00..0x20).collect();
assert_eq!(Crc32::checksum(&ascending), 0x46DD794E);

// 32 bytes descending (0x1F..0x00) -> CRC = 5c db 3f 11
let descending: Vec<u8> = (0x00..0x20).rev().collect();
assert_eq!(Crc32::checksum(&descending), 0x113FDB5C);

// iSCSI SCSI Read (10) Command PDU -> CRC = 56 3a 96 d9
let iscsi_read_pdu: [u8; 48] = [
0x01, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x14,
0x00, 0x00, 0x00, 0x18, 0x28, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
];
assert_eq!(Crc32::checksum(&iscsi_read_pdu), 0xD9963A56);
}

/// Check value from the CRC catalogue.
/// https://reveng.sourceforge.io/crc-catalogue/17plus.htm#crc.cat.crc-32c
#[test]
fn crc_catalogue_check_value() {
assert_eq!(Crc32::checksum(b"123456789"), 0xE3069283);
}

#[test]
fn incremental_matches_oneshot() {
let data = b"The quick brown fox jumps over the lazy dog";

let oneshot = Crc32::checksum(data);

// Chunked
let mut hasher = Crc32::new();
hasher.update(&data[..10]);
hasher.update(&data[10..25]);
hasher.update(&data[25..]);
assert_eq!(hasher.finalize(), oneshot);

// Byte-by-byte
let mut hasher = Crc32::new();
for byte in data {
hasher.update(&[*byte]);
}
assert_eq!(hasher.finalize(), oneshot);
}
}
22 changes: 15 additions & 7 deletions storage/src/freezer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -961,19 +961,22 @@ mod tests {
.unwrap();

// Insert keys to trigger resize
// key0 -> entry 0, key2 -> entry 1
freezer.put(test_key("key0"), 0).await.unwrap();
freezer.put(test_key("key1"), 1).await.unwrap();
freezer.put(test_key("key2"), 1).await.unwrap();
freezer.sync().await.unwrap(); // should start resize

// Verify resize started
assert!(freezer.resizing().is_some());

// Insert during resize (to first entry)
freezer.put(test_key("key2"), 2).await.unwrap();
// key6 -> entry 0
freezer.put(test_key("key6"), 2).await.unwrap();
assert!(context.encode().contains("unnecessary_writes_total 1"));
assert_eq!(freezer.resizable(), 3);

// Insert another key (to unmodified entry)
// key3 -> entry 1
freezer.put(test_key("key3"), 3).await.unwrap();
assert!(context.encode().contains("unnecessary_writes_total 1"));
assert_eq!(freezer.resizable(), 3);
Expand All @@ -984,17 +987,21 @@ mod tests {
assert_eq!(freezer.resizable(), 2);

// More inserts
// key4 -> entry 1, key7 -> entry 0
freezer.put(test_key("key4"), 4).await.unwrap();
freezer.put(test_key("key5"), 5).await.unwrap();
freezer.put(test_key("key7"), 5).await.unwrap();
freezer.sync().await.unwrap();

// Another resize should've started
assert!(freezer.resizing().is_some());

// Verify all can be retrieved during resize
for i in 0..6 {
let key = test_key(&format!("key{i}"));
assert_eq!(freezer.get(Identifier::Key(&key)).await.unwrap(), Some(i));
let keys = ["key0", "key2", "key6", "key3", "key4", "key7"];
for (i, k) in keys.iter().enumerate() {
assert_eq!(
freezer.get(Identifier::Key(&test_key(k))).await.unwrap(),
Some(i as i32)
);
}

// Sync until resize completes
Expand Down Expand Up @@ -1033,8 +1040,9 @@ mod tests {
.unwrap();

// Insert keys to trigger resize
// key0 -> entry 0, key2 -> entry 1
freezer.put(test_key("key0"), 0).await.unwrap();
freezer.put(test_key("key1"), 1).await.unwrap();
freezer.put(test_key("key2"), 1).await.unwrap();
let checkpoint = freezer.sync().await.unwrap();

// Verify resize started
Expand Down
9 changes: 5 additions & 4 deletions storage/src/freezer/storage.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
use super::{Config, Error, Identifier};
use crate::{
crc32,
journal::segmented::variable::{Config as JournalConfig, Journal},
kv, Persistable,
kv, Crc32, Persistable,
};
use bytes::{Buf, BufMut};
use commonware_codec::{Codec, Encode, EncodeSize, FixedSize, Read, ReadExt, Write as CodecWrite};
Expand Down Expand Up @@ -187,7 +188,7 @@ impl Entry {

/// Compute a checksum for [Entry].
fn compute_crc(epoch: u64, section: u64, offset: u32, added: u8) -> u32 {
let mut hasher = crc32fast::Hasher::new();
let mut hasher = Crc32::new();
hasher.update(&epoch.to_be_bytes());
hasher.update(&section.to_be_bytes());
hasher.update(&offset.to_be_bytes());
Expand Down Expand Up @@ -218,7 +219,7 @@ impl Entry {
}

impl FixedSize for Entry {
const SIZE: usize = u64::SIZE + u64::SIZE + u32::SIZE + u8::SIZE + u32::SIZE;
const SIZE: usize = u64::SIZE + u64::SIZE + u32::SIZE + u8::SIZE + crc32::SIZE;
}

impl CodecWrite for Entry {
Expand Down Expand Up @@ -721,7 +722,7 @@ impl<E: Storage + Metrics + Clock, K: Array, V: Codec> Freezer<E, K, V> {
///
/// To determine the appropriate entry, we AND the key's hash with the current table size.
fn table_index(&self, key: &K) -> u32 {
let hash = crc32fast::hash(key.as_ref());
let hash = Crc32::checksum(key.as_ref());
hash & (self.table_size - 1)
}

Expand Down
20 changes: 11 additions & 9 deletions storage/src/journal/contiguous/fixed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,11 +56,12 @@
//! The `replay` method supports fast reading of all unpruned items into memory.

use crate::{
crc32,
journal::{contiguous::MutableContiguous, Error},
Persistable,
Crc32, Persistable,
};
use bytes::BufMut;
use commonware_codec::{CodecFixed, DecodeExt as _, FixedSize};
use commonware_codec::{CodecFixed, DecodeExt as _};
use commonware_runtime::{
buffer::{Append, PoolRef, Read},
telemetry::metrics::status::GaugeExt,
Expand Down Expand Up @@ -137,7 +138,7 @@ pub struct Journal<E: Storage + Metrics, A: CodecFixed> {
}

impl<E: Storage + Metrics, A: CodecFixed<Cfg = ()>> Journal<E, A> {
pub(crate) const CHUNK_SIZE: usize = u32::SIZE + A::SIZE;
pub(crate) const CHUNK_SIZE: usize = crc32::SIZE + A::SIZE;
pub(crate) const CHUNK_SIZE_U64: u64 = Self::CHUNK_SIZE as u64;

/// Initialize a new `Journal` instance.
Expand Down Expand Up @@ -334,7 +335,7 @@ impl<E: Storage + Metrics, A: CodecFixed<Cfg = ()>> Journal<E, A> {
assert_eq!(size % Self::CHUNK_SIZE_U64, 0);
let mut buf: Vec<u8> = Vec::with_capacity(Self::CHUNK_SIZE);
let item = item.encode();
let checksum = crc32fast::hash(&item);
let checksum = Crc32::checksum(&item);
buf.extend_from_slice(&item);
buf.put_u32(checksum);

Expand Down Expand Up @@ -469,7 +470,7 @@ impl<E: Storage + Metrics, A: CodecFixed<Cfg = ()>> Journal<E, A> {
/// Error::Codec likely indicates a logic error rather than a corruption issue.
fn verify_integrity(buf: &[u8]) -> Result<A, Error> {
let stored_checksum = u32::from_be_bytes(buf[A::SIZE..].try_into().unwrap());
let checksum = crc32fast::hash(&buf[..A::SIZE]);
let checksum = Crc32::checksum(&buf[..A::SIZE]);
if checksum != stored_checksum {
return Err(Error::ChecksumMismatch(stored_checksum, checksum));
}
Expand Down Expand Up @@ -695,6 +696,7 @@ impl<E: Storage + Metrics, A: CodecFixed<Cfg = ()>> Persistable for Journal<E, A
#[cfg(test)]
mod tests {
use super::*;
use commonware_codec::FixedSize;
use commonware_cryptography::{sha256::Digest, Hasher as _, Sha256};
use commonware_macros::test_traced;
use commonware_runtime::{
Expand Down Expand Up @@ -970,7 +972,7 @@ mod tests {

// Corrupt one of the checksums and make sure it's detected.
let checksum_offset = Digest::SIZE as u64
+ (ITEMS_PER_BLOB.get() / 2) * (Digest::SIZE + u32::SIZE) as u64;
+ (ITEMS_PER_BLOB.get() / 2) * (Digest::SIZE + crc32::SIZE) as u64;
let (blob, _) = context
.open(&cfg.partition, &40u64.to_be_bytes())
.await
Expand Down Expand Up @@ -1108,7 +1110,7 @@ mod tests {

// Write incorrect checksum into the second item in the blob, which should result in the
// second item being trimmed.
let checksum_offset = Digest::SIZE + u32::SIZE + Digest::SIZE;
let checksum_offset = Digest::SIZE + crc32::SIZE + Digest::SIZE;

let bad_checksum = 123456789u32;
blob.write_at(bad_checksum.to_be_bytes().to_vec(), checksum_offset as u64)
Expand Down Expand Up @@ -1531,7 +1533,7 @@ mod tests {
let digest = Sha256::hash(buf.as_ref());
assert_eq!(
hex(&digest),
"ed2ea67208cde2ee8c16cca5aa4f369f55b1402258c6b7760e5baf134e38944a",
"0d6e882dae02b8df8d7dd8396fa0305450ffdf3061a8781bffa791315dd30ec9",
);
blob.sync().await.expect("Failed to sync blob");
let (blob, size) = context
Expand All @@ -1546,7 +1548,7 @@ mod tests {
let digest = Sha256::hash(buf.as_ref());
assert_eq!(
hex(&digest),
"cc7efd4fc999aff36b9fd4213ba8da5810dc1849f92ae2ddf7c6dc40545f9aff",
"e0f4c82f12a7635a3bc6af7fa37f98a56f73a33c906202b07d66dc70003120e5",
);
blob.sync().await.expect("Failed to sync blob");

Expand Down
Loading
Loading