Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Exposed the as_ptr api, better inlining #34

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
95 changes: 48 additions & 47 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ unsafe impl<T: Copy + Send> Sync for Atomic<T> {}
impl<T: Copy + RefUnwindSafe> RefUnwindSafe for Atomic<T> {}

impl<T: Copy + Default> Default for Atomic<T> {
#[inline]
#[inline(always)]
fn default() -> Self {
Self::new(Default::default())
}
Expand All @@ -90,9 +90,9 @@ impl<T: Copy + fmt::Debug> fmt::Debug for Atomic<T> {

impl<T> Atomic<T> {
/// Creates a new `Atomic`.
#[inline]
pub const fn new(v: T) -> Atomic<T> {
Atomic {
#[inline(always)]
pub const fn new(v: T) -> Self {
Self {
v: UnsafeCell::new(MaybeUninit::new(v)),
}
}
Expand All @@ -102,32 +102,35 @@ impl<T> Atomic<T> {
/// If an `Atomic` is not lock-free then it may be implemented using locks
/// internally, which makes it unsuitable for some situations (such as
/// communicating with a signal handler).
#[inline]
#[inline(always)]
pub const fn is_lock_free() -> bool {
ops::atomic_is_lock_free::<T>()
}
}

impl<T: Copy> Atomic<T> {
#[inline]
fn inner_ptr(&self) -> *mut T {
/// Returns a mutable pointer to the underlying type.
///

Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Remove the empty lines.

#[inline(always)]
pub fn as_ptr(&self) -> *mut T {
self.v.get() as *mut T
}

/// Returns a mutable reference to the underlying type.
///
/// This is safe because the mutable reference guarantees that no other threads are
/// concurrently accessing the atomic data.
#[inline]
#[inline(always)]
pub fn get_mut(&mut self) -> &mut T {
unsafe { &mut *self.inner_ptr() }
unsafe { &mut *self.as_ptr() }
}

/// Consumes the atomic and returns the contained value.
///
/// This is safe because passing `self` by value guarantees that no other threads are
/// concurrently accessing the atomic data.
#[inline]
#[inline(always)]
pub fn into_inner(self) -> T {
unsafe { self.v.into_inner().assume_init() }
}
Expand All @@ -140,9 +143,9 @@ impl<T: Copy> Atomic<T> {
/// # Panics
///
/// Panics if `order` is `Release` or `AcqRel`.
#[inline]
#[inline(always)]
pub fn load(&self, order: Ordering) -> T {
unsafe { ops::atomic_load(self.inner_ptr(), order) }
unsafe { ops::atomic_load(self.as_ptr(), order) }
}

/// Stores a value into the `Atomic`.
Expand All @@ -153,20 +156,20 @@ impl<T: Copy> Atomic<T> {
/// # Panics
///
/// Panics if `order` is `Acquire` or `AcqRel`.
#[inline]
#[inline(always)]
pub fn store(&self, val: T, order: Ordering) {
unsafe {
ops::atomic_store(self.inner_ptr(), val, order);
ops::atomic_store(self.as_ptr(), val, order);
}
}

/// Stores a value into the `Atomic`, returning the old value.
///
/// `swap` takes an `Ordering` argument which describes the memory ordering
/// of this operation.
#[inline]
#[inline(always)]
pub fn swap(&self, val: T, order: Ordering) -> T {
unsafe { ops::atomic_swap(self.inner_ptr(), val, order) }
unsafe { ops::atomic_swap(self.as_ptr(), val, order) }
}

/// Stores a value into the `Atomic` if the current value is the same as the
Expand All @@ -181,15 +184,15 @@ impl<T: Copy> Atomic<T> {
/// the operation succeeds while the second describes the required ordering
/// when the operation fails. The failure ordering can't be `Release` or
/// `AcqRel` and must be equivalent or weaker than the success ordering.
#[inline]
#[inline(always)]
pub fn compare_exchange(
&self,
current: T,
new: T,
success: Ordering,
failure: Ordering,
) -> Result<T, T> {
unsafe { ops::atomic_compare_exchange(self.inner_ptr(), current, new, success, failure) }
unsafe { ops::atomic_compare_exchange(self.as_ptr(), current, new, success, failure) }
}

/// Stores a value into the `Atomic` if the current value is the same as the
Expand All @@ -206,17 +209,15 @@ impl<T: Copy> Atomic<T> {
/// when the operation fails. The failure ordering can't be `Release` or
/// `AcqRel` and must be equivalent or weaker than the success ordering.
/// success ordering.
#[inline]
#[inline(always)]
pub fn compare_exchange_weak(
&self,
current: T,
new: T,
success: Ordering,
failure: Ordering,
) -> Result<T, T> {
unsafe {
ops::atomic_compare_exchange_weak(self.inner_ptr(), current, new, success, failure)
}
unsafe { ops::atomic_compare_exchange_weak(self.as_ptr(), current, new, success, failure) }
}

/// Fetches the value, and applies a function to it that returns an optional
Expand Down Expand Up @@ -255,7 +256,7 @@ impl<T: Copy> Atomic<T> {
/// assert_eq!(x.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |x| Some(x + 1)), Ok(8));
/// assert_eq!(x.load(Ordering::SeqCst), 9);
/// ```
#[inline]
#[inline(always)]
pub fn fetch_update<F>(
&self,
set_order: Ordering,
Expand Down Expand Up @@ -283,9 +284,9 @@ impl Atomic<bool> {
/// `val`, and sets the new value to the result.
///
/// Returns the previous value.
#[inline]
#[inline(always)]
pub fn fetch_and(&self, val: bool, order: Ordering) -> bool {
unsafe { ops::atomic_and(self.inner_ptr(), val, order) }
unsafe { ops::atomic_and(self.as_ptr(), val, order) }
}

/// Logical "or" with a boolean value.
Expand All @@ -294,9 +295,9 @@ impl Atomic<bool> {
/// `val`, and sets the new value to the result.
///
/// Returns the previous value.
#[inline]
#[inline(always)]
pub fn fetch_or(&self, val: bool, order: Ordering) -> bool {
unsafe { ops::atomic_or(self.inner_ptr(), val, order) }
unsafe { ops::atomic_or(self.as_ptr(), val, order) }
}

/// Logical "xor" with a boolean value.
Expand All @@ -305,43 +306,43 @@ impl Atomic<bool> {
/// `val`, and sets the new value to the result.
///
/// Returns the previous value.
#[inline]
#[inline(always)]
pub fn fetch_xor(&self, val: bool, order: Ordering) -> bool {
unsafe { ops::atomic_xor(self.inner_ptr(), val, order) }
unsafe { ops::atomic_xor(self.as_ptr(), val, order) }
}
}

macro_rules! atomic_ops_common {
($($t:ty)*) => ($(
impl Atomic<$t> {
/// Add to the current value, returning the previous value.
#[inline]
#[inline(always)]
pub fn fetch_add(&self, val: $t, order: Ordering) -> $t {
unsafe { ops::atomic_add(self.inner_ptr(), val, order) }
unsafe { ops::atomic_add(self.as_ptr(), val, order) }
}

/// Subtract from the current value, returning the previous value.
#[inline]
#[inline(always)]
pub fn fetch_sub(&self, val: $t, order: Ordering) -> $t {
unsafe { ops::atomic_sub(self.inner_ptr(), val, order) }
unsafe { ops::atomic_sub(self.as_ptr(), val, order) }
}

/// Bitwise and with the current value, returning the previous value.
#[inline]
#[inline(always)]
pub fn fetch_and(&self, val: $t, order: Ordering) -> $t {
unsafe { ops::atomic_and(self.inner_ptr(), val, order) }
unsafe { ops::atomic_and(self.as_ptr(), val, order) }
}

/// Bitwise or with the current value, returning the previous value.
#[inline]
#[inline(always)]
pub fn fetch_or(&self, val: $t, order: Ordering) -> $t {
unsafe { ops::atomic_or(self.inner_ptr(), val, order) }
unsafe { ops::atomic_or(self.as_ptr(), val, order) }
}

/// Bitwise xor with the current value, returning the previous value.
#[inline]
#[inline(always)]
pub fn fetch_xor(&self, val: $t, order: Ordering) -> $t {
unsafe { ops::atomic_xor(self.inner_ptr(), val, order) }
unsafe { ops::atomic_xor(self.as_ptr(), val, order) }
}
}
)*);
Expand All @@ -352,15 +353,15 @@ macro_rules! atomic_ops_signed {
$(
impl Atomic<$t> {
/// Minimum with the current value.
#[inline]
#[inline(always)]
pub fn fetch_min(&self, val: $t, order: Ordering) -> $t {
unsafe { ops::atomic_min(self.inner_ptr(), val, order) }
unsafe { ops::atomic_min(self.as_ptr(), val, order) }
}

/// Maximum with the current value.
#[inline]
#[inline(always)]
pub fn fetch_max(&self, val: $t, order: Ordering) -> $t {
unsafe { ops::atomic_max(self.inner_ptr(), val, order) }
unsafe { ops::atomic_max(self.as_ptr(), val, order) }
}
}
)*
Expand All @@ -372,15 +373,15 @@ macro_rules! atomic_ops_unsigned {
$(
impl Atomic<$t> {
/// Minimum with the current value.
#[inline]
#[inline(always)]
pub fn fetch_min(&self, val: $t, order: Ordering) -> $t {
unsafe { ops::atomic_umin(self.inner_ptr(), val, order) }
unsafe { ops::atomic_umin(self.as_ptr(), val, order) }
}

/// Maximum with the current value.
#[inline]
#[inline(always)]
pub fn fetch_max(&self, val: $t, order: Ordering) -> $t {
unsafe { ops::atomic_umax(self.inner_ptr(), val, order) }
unsafe { ops::atomic_umax(self.as_ptr(), val, order) }
}
}
)*
Expand Down