diff --git a/.github/workflows/check-platforms.yml b/.github/workflows/check-platforms.yml index 079e502..53bf14e 100644 --- a/.github/workflows/check-platforms.yml +++ b/.github/workflows/check-platforms.yml @@ -2,6 +2,7 @@ name: Check multiple platforms on: pull_request: + types: [ opened, edited ] paths: - '**.rs' - '**/Cargo.toml' @@ -11,7 +12,6 @@ env: jobs: check-targets: - if: ${{ github.event.label.name == 'ready-to-merge' }} strategy: matrix: os: [ubuntu-latest, windows-latest, macOS-latest] diff --git a/.github/workflows/check-targets.yml b/.github/workflows/check-targets.yml index 9fea72c..7583439 100644 --- a/.github/workflows/check-targets.yml +++ b/.github/workflows/check-targets.yml @@ -2,6 +2,7 @@ name: Check multiple targets on: pull_request: + types: [ opened, edited ] paths: - '**.rs' - '**/Cargo.toml' @@ -11,7 +12,6 @@ env: jobs: check-targets: - if: ${{ github.event.label.name == 'ready-to-merge' }} runs-on: ubuntu-latest strategy: matrix: diff --git a/.github/workflows/check-toolchains.yml b/.github/workflows/check-toolchains.yml index 34de23c..ac8b25b 100644 --- a/.github/workflows/check-toolchains.yml +++ b/.github/workflows/check-toolchains.yml @@ -2,6 +2,7 @@ name: Check multiple toolchains on: pull_request: + types: [ opened, edited ] paths: - '**.rs' - '**/Cargo.toml' @@ -11,7 +12,6 @@ env: jobs: check-toolchains: - if: ${{ github.event.label.name == 'ready-to-merge' }} runs-on: ubuntu-latest strategy: matrix: diff --git a/.github/workflows/lints.yml b/.github/workflows/lints.yml index 9b59707..61b1597 100644 --- a/.github/workflows/lints.yml +++ b/.github/workflows/lints.yml @@ -2,6 +2,7 @@ name: Lints on: pull_request: + types: [ opened, edited ] paths: - '**.rs' - '**/Cargo.toml' diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml index 2d7a041..19c61b2 100644 --- a/.github/workflows/security.yml +++ b/.github/workflows/security.yml @@ -2,6 +2,7 @@ name: Security audit on: pull_request: + types: [ opened, edited ] paths: - '**/Cargo.toml' @@ -10,7 +11,6 @@ env: jobs: security_audit: - if: ${{ github.event.label.name == 'ready-to-merge' }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/test-features.yml b/.github/workflows/test-features.yml index 1789dbf..d396320 100644 --- a/.github/workflows/test-features.yml +++ b/.github/workflows/test-features.yml @@ -2,6 +2,7 @@ name: Test features on: pull_request: + types: [ opened, edited ] paths: - '**.rs' - '**/Cargo.toml' diff --git a/Cargo.toml b/Cargo.toml index 2dcf88b..0b676e9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,20 +11,21 @@ readme = "README.md" description = "Fantastic serialization library with zero-overhead serialization and zero-copy deserialization" [features] -alloc = [] # enables impls for types from `alloc` crate. +alloc = [] # enables impls for types from `alloc` crate. std = ["alloc"] derive = ["alkahest-proc"] inline-more = [] ## TODO: Control on value or type level? ## Keep features for defaults? -fixed8 = [] # sets size of `FixedUsize` and `FixedIsize` to 8 bits. +fixed8 = [] # sets size of `FixedUsize` and `FixedIsize` to 8 bits. fixed16 = [] # sets size of `FixedUsize` and `FixedIsize` to 16 bits. fixed32 = [] # sets size of `FixedUsize` and `FixedIsize` to 32 bits. Default. fixed64 = [] # sets size of `FixedUsize` and `FixedIsize` to 64 bits. + default = ["alloc", "fixed32", "inline-more"] -bincoded = ["bincode", "serde", "std"] +bincoded = ["dep:bincode", "dep:serde", "std"] [dependencies] alkahest-proc = { version = "=0.3.0", path = "proc", optional = true } diff --git a/benchmark/benches/benchmark.rs b/benchmark/benches/benchmark.rs index fbfe73d..46260f5 100644 --- a/benchmark/benches/benchmark.rs +++ b/benchmark/benches/benchmark.rs @@ -24,7 +24,10 @@ use rand::{ #[derive(Debug, Clone, Formula, Serialize, Deserialize)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "rkyv", derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize))] +#[cfg_attr( + feature = "rkyv", + derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize) +)] #[cfg_attr(feature = "rkyv", archive_attr(derive(CheckBytes)))] #[cfg_attr(feature = "speedy", derive(speedy::Writable, speedy::Readable))] pub enum GameMessage { @@ -42,7 +45,10 @@ pub enum GameMessageRead<'de> { #[derive(Debug, Clone, Formula, Serialize, Deserialize)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "rkyv", derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize))] +#[cfg_attr( + feature = "rkyv", + derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize) +)] #[cfg_attr(feature = "rkyv", archive_attr(derive(CheckBytes)))] #[cfg_attr(feature = "speedy", derive(speedy::Writable, speedy::Readable))] pub enum ClientMessage { @@ -60,7 +66,10 @@ pub enum ClientMessageRead<'de> { #[derive(Debug, Clone, Formula, Serialize, Deserialize)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "rkyv", derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize))] +#[cfg_attr( + feature = "rkyv", + derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize) +)] #[cfg_attr(feature = "rkyv", archive_attr(derive(CheckBytes)))] #[cfg_attr(feature = "speedy", derive(speedy::Writable, speedy::Readable))] pub enum ServerMessage { @@ -78,7 +87,10 @@ pub enum ServerMessageRead<'de> { #[derive(Debug, Formula, Serialize, Deserialize)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "rkyv", derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize))] +#[cfg_attr( + feature = "rkyv", + derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize) +)] #[cfg_attr(feature = "rkyv", archive_attr(derive(CheckBytes)))] #[cfg_attr(feature = "speedy", derive(speedy::Writable, speedy::Readable))] pub struct NetPacket { @@ -179,18 +191,15 @@ pub fn criterion_benchmark(c: &mut Criterion) { group.bench_function("deserialize", |b| { b.iter(|| { - let packet = alkahest::deserialize::< - NetPacket, - NetPacket, - >(&buffer[..size]) - .unwrap(); + let packet = + alkahest::deserialize::, NetPacket>( + &buffer[..size], + ) + .unwrap(); for message in packet.game_messages.iter() { match message { - GameMessage::Client(ClientMessage::ClientData { - nickname, - clan, - }) => { + GameMessage::Client(ClientMessage::ClientData { nickname, clan }) => { black_box(nickname); black_box(clan); } @@ -200,10 +209,7 @@ pub fn criterion_benchmark(c: &mut Criterion) { GameMessage::Server(ServerMessage::ServerData(data)) => { black_box(data); } - GameMessage::Server(ServerMessage::ClientChat { - client_id, - message, - }) => { + GameMessage::Server(ServerMessage::ClientChat { client_id, message }) => { black_box(client_id); black_box(message); } @@ -307,15 +313,14 @@ pub fn criterion_benchmark(c: &mut Criterion) { group.bench_function("deserialize", |b| { b.iter(|| { use rkyv::Deserialize; - let archive = rkyv::check_archived_root::>(&vec[..]).unwrap(); - let packet: NetPacket = archive.deserialize(&mut rkyv::Infallible).unwrap(); + let archive = + rkyv::check_archived_root::>(&vec[..]).unwrap(); + let packet: NetPacket = + archive.deserialize(&mut rkyv::Infallible).unwrap(); for message in packet.game_messages.iter() { match message { - GameMessage::Client(ClientMessage::ClientData { - nickname, - clan, - }) => { + GameMessage::Client(ClientMessage::ClientData { nickname, clan }) => { black_box(nickname); black_box(clan); } @@ -325,10 +330,7 @@ pub fn criterion_benchmark(c: &mut Criterion) { GameMessage::Server(ServerMessage::ServerData(data)) => { black_box(data); } - GameMessage::Server(ServerMessage::ClientChat { - client_id, - message, - }) => { + GameMessage::Server(ServerMessage::ClientChat { client_id, message }) => { black_box(client_id); black_box(message); } @@ -389,7 +391,7 @@ pub fn criterion_benchmark(c: &mut Criterion) { } }) }); - + group.bench_function("deserialize", |b| { b.iter(|| { let packet = @@ -398,10 +400,7 @@ pub fn criterion_benchmark(c: &mut Criterion) { for message in packet.game_messages.iter() { match message { - GameMessage::Client(ClientMessage::ClientData { - nickname, - clan, - }) => { + GameMessage::Client(ClientMessage::ClientData { nickname, clan }) => { black_box(nickname); black_box(clan); } @@ -411,10 +410,7 @@ pub fn criterion_benchmark(c: &mut Criterion) { GameMessage::Server(ServerMessage::ServerData(data)) => { black_box(data); } - GameMessage::Server(ServerMessage::ClientChat { - client_id, - message, - }) => { + GameMessage::Server(ServerMessage::ClientChat { client_id, message }) => { black_box(client_id); black_box(message); } diff --git a/proc/src/deserialize.rs b/proc/src/deserialize.rs index 029b58a..8c5cdac 100644 --- a/proc/src/deserialize.rs +++ b/proc/src/deserialize.rs @@ -172,7 +172,7 @@ pub fn derive(args: DeserializeArgs, input: &syn::DeriveInput) -> syn::Result { let field_checks = if cfg.check_fields { - struct_field_order_checks(&data, None, &input.ident, &cfg.formula) + struct_field_order_checks(data, None, &input.ident, &cfg.formula) } else { TokenStream::new() }; @@ -185,9 +185,7 @@ pub fn derive(args: DeserializeArgs, input: &syn::DeriveInput) -> syn::Result syn::Result { let field_checks = if cfg.check_fields { - enum_field_order_checks(&data, &input.ident, &cfg.formula) + enum_field_order_checks(data, &input.ident, &cfg.formula) } else { TokenStream::new() }; @@ -343,9 +341,7 @@ pub fn derive(args: DeserializeArgs, input: &syn::DeriveInput) -> syn::Result { let field_checks = if cfg.check_fields { - struct_field_order_checks(&data, cfg.variant.as_ref(), &input.ident, &cfg.formula) + struct_field_order_checks(data, cfg.variant.as_ref(), &input.ident, &cfg.formula) } else { TokenStream::new() }; @@ -240,7 +240,7 @@ pub fn derive( generics.lt_token = generics.lt_token.or(cfg.generics.lt_token); generics.gt_token = generics.gt_token.or(cfg.generics.gt_token); - generics.params.extend(cfg.generics.params.into_iter()); + generics.params.extend(cfg.generics.params); if let Some(where_clause) = cfg.generics.where_clause { generics @@ -343,7 +343,7 @@ pub fn derive( } syn::Data::Enum(data) => { let field_checks = if cfg.check_fields { - enum_field_order_checks(&data, &input.ident, &cfg.formula) + enum_field_order_checks(data, &input.ident, &cfg.formula) } else { TokenStream::new() }; @@ -452,7 +452,7 @@ pub fn derive( generics.lt_token = generics.lt_token.or(cfg.generics.lt_token); generics.gt_token = generics.gt_token.or(cfg.generics.gt_token); - generics.params.extend(cfg.generics.params.into_iter()); + generics.params.extend(cfg.generics.params); if let Some(where_clause) = cfg.generics.where_clause { generics diff --git a/src/bincoded.rs b/src/bincoded.rs index 1c644bc..16cb1e2 100644 --- a/src/bincoded.rs +++ b/src/bincoded.rs @@ -140,6 +140,24 @@ where } } +impl Serialize> for &T +where + T: serde::Serialize, +{ + #[inline(always)] + fn serialize(self, sizes: &mut Sizes, buffer: B) -> Result<(), B::Error> + where + B: Buffer, + { + <&T as Serialize>::serialize(self, sizes, buffer) + } + + #[inline(always)] + fn size_hint(&self) -> Option { + <&T as Serialize>::size_hint(self) + } +} + impl<'de, T> Deserialize<'de, Bincoded> for T where T: serde::Deserialize<'de>, diff --git a/src/deserialize.rs b/src/deserialize.rs index ab87d32..f7154b3 100644 --- a/src/deserialize.rs +++ b/src/deserialize.rs @@ -762,11 +762,15 @@ where if F::EXACT_SIZE { let mut de = Deserializer::new(reference_size, &input[..reference_size]).unwrap(); - let Ok(address) = de.read_value::(true) else { unreachable!(); }; + let Ok(address) = de.read_value::(true) else { + unreachable!(); + }; (address, unwrap_size(F::MAX_STACK_SIZE).min(len)) } else { let mut de = Deserializer::new(reference_size, &input[..reference_size]).unwrap(); - let Ok([size, address]) = de.read_value::<[FixedUsize; 2], [usize; 2]>(true) else { unreachable!(); }; + let Ok([size, address]) = de.read_value::<[FixedUsize; 2], [usize; 2]>(true) else { + unreachable!(); + }; (address, size) } } diff --git a/src/packet.rs b/src/packet.rs index c057d8a..2c5d25e 100644 --- a/src/packet.rs +++ b/src/packet.rs @@ -143,7 +143,6 @@ where /// # Errors /// /// Returns `DeserializeError` if deserialization fails. -#[must_use] #[inline(always)] pub fn read_packet<'de, F, T>(input: &'de [u8]) -> Result<(T, usize), DeserializeError> where @@ -179,7 +178,6 @@ where /// # Errors /// /// Returns `DeserializeError` if deserialization fails. -#[must_use] #[inline(always)] pub fn read_packet_in_place<'de, F, T>( place: &mut T, diff --git a/src/serialize.rs b/src/serialize.rs index 1b7b1bc..4501891 100644 --- a/src/serialize.rs +++ b/src/serialize.rs @@ -496,7 +496,10 @@ where let size = FixedUsize::truncate_unchecked(size); if F::EXACT_SIZE { - debug_assert_eq!(size, FixedUsize::truncate_unchecked(F::MAX_STACK_SIZE.unwrap())); + debug_assert_eq!( + size, + FixedUsize::truncate_unchecked(F::MAX_STACK_SIZE.unwrap()) + ); buffer.write_stack(heap, stack, &address.to_le_bytes())?; } else { buffer.write_stack(heap, stack, &size.to_le_bytes())?; @@ -625,7 +628,6 @@ where /// # Errors /// /// Returns error if buffer write fails. -#[must_use] #[inline(always)] pub fn write_ref(value: T, sizes: &mut Sizes, mut buffer: B) -> Result where diff --git a/src/tests/mod.rs b/src/tests/mod.rs index 2d2a24f..291c5ce 100644 --- a/src/tests/mod.rs +++ b/src/tests/mod.rs @@ -466,7 +466,7 @@ fn test_bincoded() { let mut buffer = [0u8; 1024]; let size = serialize::(Value(102414), &mut buffer).unwrap(); - let (de, _) = deserialize::(&buffer[..size]).unwrap(); + let de = deserialize::(&buffer[..size.0]).unwrap(); assert_eq!(de.0, 102414); } diff --git a/src/tests/net.rs b/src/tests/net.rs index 6a85d62..de6424a 100644 --- a/src/tests/net.rs +++ b/src/tests/net.rs @@ -6,7 +6,8 @@ use rand::{ }; use crate::{ - alkahest, read_packet, write_packet_to_vec, Deserialize, Formula, Lazy, SerIter, Serialize, SerializeRef, Ref, + alkahest, read_packet, write_packet_to_vec, Deserialize, Formula, Lazy, Ref, SerIter, + Serialize, SerializeRef, }; #[alkahest(Formula)] @@ -145,8 +146,11 @@ fn test_net_packet() { assert_eq!(size, size2); assert_eq!(buffer[..size], buffer2[..size]); - let (packet, _) = - read_packet::, NetPacketRead>(&buffer[..]).unwrap(); + let (packet, _) = read_packet::< + NetPacketFormula, + NetPacketRead, + >(&buffer[..]) + .unwrap(); for message in packet.game_messages.iter::() { match message.unwrap() {