-
Notifications
You must be signed in to change notification settings - Fork 10
chore: add back tests with poseidon #17
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 3 commits
090a200
dee2803
ee1a6c4
11337fe
8e992b7
335e8ec
269d088
861fdfd
419b628
cad926f
f5dbf6c
88cbde3
cec71aa
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,82 @@ | ||
| use crate::{ | ||
| MESSAGE_LENGTH, | ||
| symmetric::message_hash::{MessageHash, bytes_to_chunks}, | ||
| }; | ||
|
|
||
| use super::IncomparableEncoding; | ||
|
|
||
| /// Incomparable Encoding Scheme based on the basic Winternitz scheme, implemented from a given message hash. | ||
| /// CHUNK_SIZE must be 1, 2, 4, or 8 and MH::BASE must be 2^CHUNK_SIZE. | ||
| /// NUM_CHUNKS_CHECKSUM is the precomputed number of checksum chunks (see original Winternitz description). | ||
| pub struct WinternitzEncoding< | ||
| MH: MessageHash, | ||
| const CHUNK_SIZE: usize, | ||
| const NUM_CHUNKS_CHECKSUM: usize, | ||
| > { | ||
| _marker_mh: std::marker::PhantomData<MH>, | ||
| } | ||
|
|
||
| impl<MH: MessageHash, const CHUNK_SIZE: usize, const NUM_CHUNKS_CHECKSUM: usize> | ||
| IncomparableEncoding for WinternitzEncoding<MH, CHUNK_SIZE, NUM_CHUNKS_CHECKSUM> | ||
| { | ||
| type Parameter = MH::Parameter; | ||
|
|
||
| type Randomness = MH::Randomness; | ||
|
|
||
| type Error = (); | ||
|
|
||
| const DIMENSION: usize = MH::DIMENSION + NUM_CHUNKS_CHECKSUM; | ||
|
|
||
| const MAX_TRIES: usize = 1; | ||
|
|
||
| const BASE: usize = MH::BASE; | ||
|
|
||
| fn rand<R: rand::Rng>(rng: &mut R) -> Self::Randomness { | ||
| MH::rand(rng) | ||
| } | ||
|
|
||
| fn encode( | ||
| parameter: &Self::Parameter, | ||
| message: &[u8; MESSAGE_LENGTH], | ||
| randomness: &Self::Randomness, | ||
| epoch: u32, | ||
| ) -> Result<Vec<u8>, Self::Error> { | ||
| // apply the message hash to get chunks | ||
| let mut chunks_message = MH::apply(parameter, epoch, randomness, message); | ||
|
|
||
| // compute checksum and split into chunks in little endian | ||
| let checksum: u64 = chunks_message | ||
| .iter() | ||
| .map(|&x| Self::BASE as u64 - 1 - x as u64) | ||
| .sum(); | ||
| let checksum_bytes = checksum.to_le_bytes(); | ||
| let chunks_checksum = bytes_to_chunks(&checksum_bytes, CHUNK_SIZE); | ||
|
|
||
| // append checksum chunks (truncate to the expected number) | ||
| chunks_message.extend_from_slice(&chunks_checksum[..NUM_CHUNKS_CHECKSUM]); | ||
|
|
||
| Ok(chunks_message) | ||
| } | ||
|
|
||
| #[cfg(test)] | ||
| fn internal_consistency_check() { | ||
| assert!( | ||
| [1, 2, 4, 8].contains(&CHUNK_SIZE), | ||
| "Winternitz Encoding: Chunk Size must be 1, 2, 4, or 8" | ||
| ); | ||
| assert!( | ||
| CHUNK_SIZE <= 8, | ||
| "Winternitz Encoding: Base must be at most 2^8" | ||
| ); | ||
| assert!( | ||
| Self::DIMENSION <= 1 << 8, | ||
| "Winternitz Encoding: Dimension must be at most 2^8" | ||
| ); | ||
| assert!( | ||
| MH::BASE == Self::BASE && MH::BASE == 1 << CHUNK_SIZE, | ||
| "Winternitz Encoding: Base and chunk size not consistent with message hash" | ||
| ); | ||
|
|
||
| MH::internal_consistency_check(); | ||
| } | ||
| } |
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -584,10 +584,13 @@ pub mod instantiations_poseidon_top_level; | |||||
| mod tests { | ||||||
| use crate::{ | ||||||
| array::FieldArray, | ||||||
| inc_encoding::target_sum::TargetSumEncoding, | ||||||
| inc_encoding::{basic_winternitz::WinternitzEncoding, target_sum::TargetSumEncoding}, | ||||||
| signature::test_templates::test_signature_scheme_correctness, | ||||||
| symmetric::{ | ||||||
| message_hash::{MessageHash, poseidon::PoseidonMessageHashW1}, | ||||||
| message_hash::{ | ||||||
| MessageHash, | ||||||
| poseidon::{PoseidonMessageHash, PoseidonMessageHashW1}, | ||||||
| }, | ||||||
| prf::shake_to_field::ShakePRFtoF, | ||||||
| tweak_hash::poseidon::PoseidonTweakW1L5, | ||||||
| }, | ||||||
|
|
@@ -602,6 +605,32 @@ mod tests { | |||||
|
|
||||||
| type TestTH = PoseidonTweakHash<5, 7, 2, 9, 155>; | ||||||
|
|
||||||
| #[test] | ||||||
| pub fn test_winternitz_poseidon() { | ||||||
| // Note: do not use these parameters, they are just for testing | ||||||
| type PRF = ShakePRFtoF<7, 5>; | ||||||
| type MH = PoseidonMessageHashW1; | ||||||
| const CHUNK_SIZE: usize = 1; | ||||||
| const NUM_CHUNKS_CHECKSUM: usize = 8; | ||||||
| const NUM_CHAINS: usize = MH::DIMENSION + NUM_CHUNKS_CHECKSUM; | ||||||
| type IE = WinternitzEncoding<MH, CHUNK_SIZE, NUM_CHUNKS_CHECKSUM>; | ||||||
| type TH = PoseidonTweakHash<5, 7, 2, 9, NUM_CHAINS>; | ||||||
| const LOG_LIFETIME: usize = 6; | ||||||
| type Sig = GeneralizedXMSSSignatureScheme<PRF, IE, TH, LOG_LIFETIME>; | ||||||
|
|
||||||
| Sig::internal_consistency_check(); | ||||||
|
|
||||||
| test_signature_scheme_correctness::<Sig>(2, 0, Sig::LIFETIME as usize); | ||||||
| test_signature_scheme_correctness::<Sig>(19, 0, Sig::LIFETIME as usize); | ||||||
| test_signature_scheme_correctness::<Sig>(0, 0, Sig::LIFETIME as usize); | ||||||
| test_signature_scheme_correctness::<Sig>(11, 0, Sig::LIFETIME as usize); | ||||||
|
|
||||||
| test_signature_scheme_correctness::<Sig>(12, 10, (1 << 5) - 10); | ||||||
| test_signature_scheme_correctness::<Sig>(19, 4, 20); | ||||||
| test_signature_scheme_correctness::<Sig>(16, 16, 4); | ||||||
| test_signature_scheme_correctness::<Sig>(11, 1, 29); | ||||||
| } | ||||||
|
|
||||||
| #[test] | ||||||
| pub fn test_target_sum_poseidon() { | ||||||
| // Note: do not use these parameters, they are just for testing | ||||||
|
|
@@ -666,17 +695,15 @@ mod tests { | |||||
| assert_eq!(rho1, rho2); | ||||||
| } | ||||||
|
|
||||||
| /*#[test] | ||||||
| pub fn test_large_base_sha() { | ||||||
| #[test] | ||||||
| pub fn test_large_base_poseidon() { | ||||||
| // Note: do not use these parameters, they are just for testing | ||||||
| type PRF = ShaPRF<24, 8>; | ||||||
| type TH = ShaTweak192192; | ||||||
|
|
||||||
| // use chunk size 8 | ||||||
| type MH = ShaMessageHash<24, 8, 32, 8>; | ||||||
| const TARGET_SUM: usize = 1 << 12; | ||||||
| type PRF = ShakePRFtoF<4, 4>; | ||||||
|
||||||
| type PRF = ShakePRFtoF<4, 4>; | |
| type PRF = ShakePRFtoF<24, 8>; |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Understood , will revert them back
Outdated
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Why we can't have
| const TARGET_SUM: usize = 8 * (256 - 1) / 2; | |
| const TARGET_SUM: usize = 1 << 12; |
Outdated
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Why can't we use this?
| const LOG_LIFETIME: usize = 6; | |
| const LOG_LIFETIME: usize = 10; |
Outdated
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Why can't we use this as before?
| const LOG_LIFETIME: usize = 6; | |
| const LOG_LIFETIME: usize = 10; |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -43,3 +43,111 @@ pub trait MessageHash { | |
|
|
||
| pub mod poseidon; | ||
| pub mod top_level_poseidon; | ||
|
|
||
| #[cfg(test)] | ||
| /// Splits a list of bytes into smaller fixed-size bit chunks. | ||
| /// | ||
| /// Each byte in the input slice is divided into `chunk_size`-bit chunks, | ||
| /// starting from the least significant bits. The `chunk_size` must divide 8 exactly | ||
| /// (i.e., valid values are 1, 2, 4, or 8), since each byte contains 8 bits. | ||
| #[must_use] | ||
| #[inline] | ||
| pub fn bytes_to_chunks(bytes: &[u8], chunk_size: usize) -> Vec<u8> { | ||
| // Only the chunk sizes 1, 2, 4, or 8 are valid. | ||
| assert!( | ||
| matches!(chunk_size, 1 | 2 | 4 | 8), | ||
| "chunk_size must be 1, 2, 4, or 8" | ||
| ); | ||
|
|
||
| // Calculate how many chunks each byte will produce and preallocate exactly. | ||
| let chunks_per_byte = 8 / chunk_size; | ||
| let mut out = Vec::with_capacity(bytes.len() * chunks_per_byte); | ||
|
|
||
| match chunk_size { | ||
| 8 => { | ||
| out.extend_from_slice(bytes); | ||
| } | ||
| 4 => { | ||
| for &b in bytes { | ||
| out.push(b & 0x0F); | ||
| out.push(b >> 4); | ||
| } | ||
| } | ||
| 2 => { | ||
| for &b in bytes { | ||
| out.push(b & 0b11); | ||
| out.push((b >> 2) & 0b11); | ||
| out.push((b >> 4) & 0b11); | ||
| out.push((b >> 6) & 0b11); | ||
| } | ||
| } | ||
| 1 => { | ||
| for &b in bytes { | ||
| out.push(b & 1); | ||
| out.push((b >> 1) & 1); | ||
| out.push((b >> 2) & 1); | ||
| out.push((b >> 3) & 1); | ||
| out.push((b >> 4) & 1); | ||
| out.push((b >> 5) & 1); | ||
| out.push((b >> 6) & 1); | ||
| out.push((b >> 7) & 1); | ||
| } | ||
| } | ||
| _ => unreachable!(), | ||
| } | ||
|
|
||
| out | ||
| } | ||
|
|
||
| #[cfg(test)] | ||
| mod tests { | ||
| use super::bytes_to_chunks; | ||
| use proptest::prelude::*; | ||
|
|
||
| #[test] | ||
| fn test_bytes_to_chunks() { | ||
| let byte_a: u8 = 0b0110_1100; | ||
| let byte_b: u8 = 0b1010_0110; | ||
|
|
||
| let bytes = [byte_a, byte_b]; | ||
| let expected_chunks = [0b00, 0b11, 0b10, 0b01, 0b10, 0b01, 0b10, 0b10]; | ||
|
|
||
| let chunks = bytes_to_chunks(&bytes, 2); | ||
|
|
||
| assert_eq!(chunks.len(), 8); | ||
|
|
||
| for i in 0..chunks.len() { | ||
| assert_eq!(chunks[i], expected_chunks[i]); | ||
| } | ||
|
|
||
| // now test chunk size 8 | ||
| let chunks = bytes_to_chunks(&bytes, 8); | ||
|
|
||
| assert_eq!(chunks.len(), 2); | ||
| assert_eq!(chunks[0], byte_a); | ||
| assert_eq!(chunks[1], byte_b); | ||
| } | ||
|
|
||
| proptest! { | ||
| #[test] | ||
| fn prop_bytes_to_chunks_matches_manual_bit_extraction( | ||
| bytes in proptest::collection::vec(any::<u8>(), 0..32), | ||
| chunk_size in prop_oneof![Just(1usize), Just(2), Just(4), Just(8)], | ||
| ) { | ||
| let chunks = bytes_to_chunks(&bytes, chunk_size); | ||
|
|
||
| let chunks_per_byte = 8 / chunk_size; | ||
| let mut expected = Vec::with_capacity(bytes.len() * chunks_per_byte); | ||
|
|
||
| for &b in &bytes { | ||
| for i in 0..chunks_per_byte { | ||
| let shifted = b >> (i * chunk_size); | ||
| let mask = if chunk_size == 8 { 0xFF } else { (1u8 << chunk_size) - 1 }; | ||
| expected.push(shifted & mask); | ||
| } | ||
| } | ||
|
|
||
| prop_assert_eq!(chunks.as_slice(), expected.as_slice()); | ||
| } | ||
| } | ||
| } | ||
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I think that we removed this here ea9fc1d so I don't think this is a good idea to put this back