Skip to content

Commit

Permalink
Merge pull request #17 from zakarumych/bincoded-ref
Browse files Browse the repository at this point in the history
Allow serializing &T as Bincoded<T>
  • Loading branch information
zakarumych authored Sep 19, 2023
2 parents bc0e59c + 37e4364 commit a0e85ba
Show file tree
Hide file tree
Showing 16 changed files with 86 additions and 65 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/check-platforms.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ name: Check multiple platforms

on:
pull_request:
types: [ opened, edited ]
paths:
- '**.rs'
- '**/Cargo.toml'
Expand All @@ -11,7 +12,6 @@ env:

jobs:
check-targets:
if: ${{ github.event.label.name == 'ready-to-merge' }}
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/check-targets.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ name: Check multiple targets

on:
pull_request:
types: [ opened, edited ]
paths:
- '**.rs'
- '**/Cargo.toml'
Expand All @@ -11,7 +12,6 @@ env:

jobs:
check-targets:
if: ${{ github.event.label.name == 'ready-to-merge' }}
runs-on: ubuntu-latest
strategy:
matrix:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/check-toolchains.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ name: Check multiple toolchains

on:
pull_request:
types: [ opened, edited ]
paths:
- '**.rs'
- '**/Cargo.toml'
Expand All @@ -11,7 +12,6 @@ env:

jobs:
check-toolchains:
if: ${{ github.event.label.name == 'ready-to-merge' }}
runs-on: ubuntu-latest
strategy:
matrix:
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/lints.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ name: Lints

on:
pull_request:
types: [ opened, edited ]
paths:
- '**.rs'
- '**/Cargo.toml'
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/security.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ name: Security audit

on:
pull_request:
types: [ opened, edited ]
paths:
- '**/Cargo.toml'

Expand All @@ -10,7 +11,6 @@ env:

jobs:
security_audit:
if: ${{ github.event.label.name == 'ready-to-merge' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/test-features.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ name: Test features

on:
pull_request:
types: [ opened, edited ]
paths:
- '**.rs'
- '**/Cargo.toml'
Expand Down
7 changes: 4 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,20 +11,21 @@ readme = "README.md"
description = "Fantastic serialization library with zero-overhead serialization and zero-copy deserialization"

[features]
alloc = [] # enables impls for types from `alloc` crate.
alloc = [] # enables impls for types from `alloc` crate.
std = ["alloc"]
derive = ["alkahest-proc"]
inline-more = []

## TODO: Control on value or type level?
## Keep features for defaults?
fixed8 = [] # sets size of `FixedUsize` and `FixedIsize` to 8 bits.
fixed8 = [] # sets size of `FixedUsize` and `FixedIsize` to 8 bits.
fixed16 = [] # sets size of `FixedUsize` and `FixedIsize` to 16 bits.
fixed32 = [] # sets size of `FixedUsize` and `FixedIsize` to 32 bits. Default.
fixed64 = [] # sets size of `FixedUsize` and `FixedIsize` to 64 bits.

default = ["alloc", "fixed32", "inline-more"]

bincoded = ["bincode", "serde", "std"]
bincoded = ["dep:bincode", "dep:serde", "std"]

[dependencies]
alkahest-proc = { version = "=0.3.0", path = "proc", optional = true }
Expand Down
68 changes: 32 additions & 36 deletions benchmark/benches/benchmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,10 @@ use rand::{

#[derive(Debug, Clone, Formula, Serialize, Deserialize)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rkyv", derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize))]
#[cfg_attr(
feature = "rkyv",
derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)
)]
#[cfg_attr(feature = "rkyv", archive_attr(derive(CheckBytes)))]
#[cfg_attr(feature = "speedy", derive(speedy::Writable, speedy::Readable))]
pub enum GameMessage {
Expand All @@ -42,7 +45,10 @@ pub enum GameMessageRead<'de> {

#[derive(Debug, Clone, Formula, Serialize, Deserialize)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rkyv", derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize))]
#[cfg_attr(
feature = "rkyv",
derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)
)]
#[cfg_attr(feature = "rkyv", archive_attr(derive(CheckBytes)))]
#[cfg_attr(feature = "speedy", derive(speedy::Writable, speedy::Readable))]
pub enum ClientMessage {
Expand All @@ -60,7 +66,10 @@ pub enum ClientMessageRead<'de> {

#[derive(Debug, Clone, Formula, Serialize, Deserialize)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rkyv", derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize))]
#[cfg_attr(
feature = "rkyv",
derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)
)]
#[cfg_attr(feature = "rkyv", archive_attr(derive(CheckBytes)))]
#[cfg_attr(feature = "speedy", derive(speedy::Writable, speedy::Readable))]
pub enum ServerMessage {
Expand All @@ -78,7 +87,10 @@ pub enum ServerMessageRead<'de> {

#[derive(Debug, Formula, Serialize, Deserialize)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rkyv", derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize))]
#[cfg_attr(
feature = "rkyv",
derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)
)]
#[cfg_attr(feature = "rkyv", archive_attr(derive(CheckBytes)))]
#[cfg_attr(feature = "speedy", derive(speedy::Writable, speedy::Readable))]
pub struct NetPacket<G> {
Expand Down Expand Up @@ -179,18 +191,15 @@ pub fn criterion_benchmark(c: &mut Criterion) {

group.bench_function("deserialize", |b| {
b.iter(|| {
let packet = alkahest::deserialize::<
NetPacket<GameMessage>,
NetPacket<GameMessage>,
>(&buffer[..size])
.unwrap();
let packet =
alkahest::deserialize::<NetPacket<GameMessage>, NetPacket<GameMessage>>(
&buffer[..size],
)
.unwrap();

for message in packet.game_messages.iter() {
match message {
GameMessage::Client(ClientMessage::ClientData {
nickname,
clan,
}) => {
GameMessage::Client(ClientMessage::ClientData { nickname, clan }) => {
black_box(nickname);
black_box(clan);
}
Expand All @@ -200,10 +209,7 @@ pub fn criterion_benchmark(c: &mut Criterion) {
GameMessage::Server(ServerMessage::ServerData(data)) => {
black_box(data);
}
GameMessage::Server(ServerMessage::ClientChat {
client_id,
message,
}) => {
GameMessage::Server(ServerMessage::ClientChat { client_id, message }) => {
black_box(client_id);
black_box(message);
}
Expand Down Expand Up @@ -307,15 +313,14 @@ pub fn criterion_benchmark(c: &mut Criterion) {
group.bench_function("deserialize", |b| {
b.iter(|| {
use rkyv::Deserialize;
let archive = rkyv::check_archived_root::<NetPacket<GameMessage>>(&vec[..]).unwrap();
let packet: NetPacket<GameMessage> = archive.deserialize(&mut rkyv::Infallible).unwrap();
let archive =
rkyv::check_archived_root::<NetPacket<GameMessage>>(&vec[..]).unwrap();
let packet: NetPacket<GameMessage> =
archive.deserialize(&mut rkyv::Infallible).unwrap();

for message in packet.game_messages.iter() {
match message {
GameMessage::Client(ClientMessage::ClientData {
nickname,
clan,
}) => {
GameMessage::Client(ClientMessage::ClientData { nickname, clan }) => {
black_box(nickname);
black_box(clan);
}
Expand All @@ -325,10 +330,7 @@ pub fn criterion_benchmark(c: &mut Criterion) {
GameMessage::Server(ServerMessage::ServerData(data)) => {
black_box(data);
}
GameMessage::Server(ServerMessage::ClientChat {
client_id,
message,
}) => {
GameMessage::Server(ServerMessage::ClientChat { client_id, message }) => {
black_box(client_id);
black_box(message);
}
Expand Down Expand Up @@ -389,7 +391,7 @@ pub fn criterion_benchmark(c: &mut Criterion) {
}
})
});

group.bench_function("deserialize", |b| {
b.iter(|| {
let packet =
Expand All @@ -398,10 +400,7 @@ pub fn criterion_benchmark(c: &mut Criterion) {

for message in packet.game_messages.iter() {
match message {
GameMessage::Client(ClientMessage::ClientData {
nickname,
clan,
}) => {
GameMessage::Client(ClientMessage::ClientData { nickname, clan }) => {
black_box(nickname);
black_box(clan);
}
Expand All @@ -411,10 +410,7 @@ pub fn criterion_benchmark(c: &mut Criterion) {
GameMessage::Server(ServerMessage::ServerData(data)) => {
black_box(data);
}
GameMessage::Server(ServerMessage::ClientChat {
client_id,
message,
}) => {
GameMessage::Server(ServerMessage::ClientChat { client_id, message }) => {
black_box(client_id);
black_box(message);
}
Expand Down
12 changes: 4 additions & 8 deletions proc/src/deserialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ pub fn derive(args: DeserializeArgs, input: &syn::DeriveInput) -> syn::Result<To
)),
syn::Data::Struct(data) => {
let field_checks = if cfg.check_fields {
struct_field_order_checks(&data, None, &input.ident, &cfg.formula)
struct_field_order_checks(data, None, &input.ident, &cfg.formula)
} else {
TokenStream::new()
};
Expand All @@ -185,9 +185,7 @@ pub fn derive(args: DeserializeArgs, input: &syn::DeriveInput) -> syn::Result<To

deserialize_generics.lt_token = deserialize_generics.lt_token.or(cfg.generics.lt_token);
deserialize_generics.gt_token = deserialize_generics.gt_token.or(cfg.generics.gt_token);
deserialize_generics
.params
.extend(cfg.generics.params.into_iter());
deserialize_generics.params.extend(cfg.generics.params);

if let Some(where_clause) = cfg.generics.where_clause {
deserialize_generics
Expand Down Expand Up @@ -330,7 +328,7 @@ pub fn derive(args: DeserializeArgs, input: &syn::DeriveInput) -> syn::Result<To
}
syn::Data::Enum(data) => {
let field_checks = if cfg.check_fields {
enum_field_order_checks(&data, &input.ident, &cfg.formula)
enum_field_order_checks(data, &input.ident, &cfg.formula)
} else {
TokenStream::new()
};
Expand All @@ -343,9 +341,7 @@ pub fn derive(args: DeserializeArgs, input: &syn::DeriveInput) -> syn::Result<To

deserialize_generics.lt_token = deserialize_generics.lt_token.or(cfg.generics.lt_token);
deserialize_generics.gt_token = deserialize_generics.gt_token.or(cfg.generics.gt_token);
deserialize_generics
.params
.extend(cfg.generics.params.into_iter());
deserialize_generics.params.extend(cfg.generics.params);

if let Some(where_clause) = cfg.generics.where_clause {
deserialize_generics
Expand Down
8 changes: 4 additions & 4 deletions proc/src/serialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ pub fn derive(
)),
syn::Data::Struct(data) => {
let field_checks = if cfg.check_fields {
struct_field_order_checks(&data, cfg.variant.as_ref(), &input.ident, &cfg.formula)
struct_field_order_checks(data, cfg.variant.as_ref(), &input.ident, &cfg.formula)
} else {
TokenStream::new()
};
Expand Down Expand Up @@ -240,7 +240,7 @@ pub fn derive(

generics.lt_token = generics.lt_token.or(cfg.generics.lt_token);
generics.gt_token = generics.gt_token.or(cfg.generics.gt_token);
generics.params.extend(cfg.generics.params.into_iter());
generics.params.extend(cfg.generics.params);

if let Some(where_clause) = cfg.generics.where_clause {
generics
Expand Down Expand Up @@ -343,7 +343,7 @@ pub fn derive(
}
syn::Data::Enum(data) => {
let field_checks = if cfg.check_fields {
enum_field_order_checks(&data, &input.ident, &cfg.formula)
enum_field_order_checks(data, &input.ident, &cfg.formula)
} else {
TokenStream::new()
};
Expand Down Expand Up @@ -452,7 +452,7 @@ pub fn derive(

generics.lt_token = generics.lt_token.or(cfg.generics.lt_token);
generics.gt_token = generics.gt_token.or(cfg.generics.gt_token);
generics.params.extend(cfg.generics.params.into_iter());
generics.params.extend(cfg.generics.params);

if let Some(where_clause) = cfg.generics.where_clause {
generics
Expand Down
18 changes: 18 additions & 0 deletions src/bincoded.rs
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,24 @@ where
}
}

impl<T> Serialize<Bincoded<T>> for &T
where
T: serde::Serialize,
{
#[inline(always)]
fn serialize<B>(self, sizes: &mut Sizes, buffer: B) -> Result<(), B::Error>
where
B: Buffer,
{
<&T as Serialize<Bincode>>::serialize(self, sizes, buffer)
}

#[inline(always)]
fn size_hint(&self) -> Option<Sizes> {
<&T as Serialize<Bincode>>::size_hint(self)
}
}

impl<'de, T> Deserialize<'de, Bincoded<T>> for T
where
T: serde::Deserialize<'de>,
Expand Down
8 changes: 6 additions & 2 deletions src/deserialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -762,11 +762,15 @@ where

if F::EXACT_SIZE {
let mut de = Deserializer::new(reference_size, &input[..reference_size]).unwrap();
let Ok(address) = de.read_value::<FixedUsize, usize>(true) else { unreachable!(); };
let Ok(address) = de.read_value::<FixedUsize, usize>(true) else {
unreachable!();
};
(address, unwrap_size(F::MAX_STACK_SIZE).min(len))
} else {
let mut de = Deserializer::new(reference_size, &input[..reference_size]).unwrap();
let Ok([size, address]) = de.read_value::<[FixedUsize; 2], [usize; 2]>(true) else { unreachable!(); };
let Ok([size, address]) = de.read_value::<[FixedUsize; 2], [usize; 2]>(true) else {
unreachable!();
};
(address, size)
}
}
2 changes: 0 additions & 2 deletions src/packet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,6 @@ where
/// # Errors
///
/// Returns `DeserializeError` if deserialization fails.
#[must_use]
#[inline(always)]
pub fn read_packet<'de, F, T>(input: &'de [u8]) -> Result<(T, usize), DeserializeError>
where
Expand Down Expand Up @@ -179,7 +178,6 @@ where
/// # Errors
///
/// Returns `DeserializeError` if deserialization fails.
#[must_use]
#[inline(always)]
pub fn read_packet_in_place<'de, F, T>(
place: &mut T,
Expand Down
Loading

0 comments on commit a0e85ba

Please sign in to comment.