1818#endif // !__APPLE__
1919#include <pthread.h>
2020
21- #if !defined(OS_INLINE )
22- #if __GNUC__
23- #define OS_INLINE static __inline__
24- #else
25- #define OS_INLINE
26- #endif // !__GNUC__
27- #endif // !OS_INLINE
28-
2921#if !defined(OS_ALWAYS_INLINE )
3022#if __GNUC__
3123#define OS_ALWAYS_INLINE __attribute__((__always_inline__))
@@ -60,8 +52,8 @@ typedef struct {
6052 } __impl ;
6153} bnr_spinlock_t ;
6254
63- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeNativeLock . setup ( self :))
64- void bnr_native_lock_create (bnr_spinlock_t * _Nonnull address ) {
55+ OS_ALWAYS_INLINE
56+ static inline void bnr_native_lock_init (bnr_spinlock_t * _Nonnull address ) {
6557#if defined(__APPLE__ )
6658 if (& os_unfair_lock_trylock != NULL) {
6759 address -> __impl .modern = OS_UNFAIR_LOCK_INIT ;
@@ -72,8 +64,8 @@ void bnr_native_lock_create(bnr_spinlock_t *_Nonnull address) {
7264 pthread_mutex_init (& address -> __impl .legacy , NULL );
7365}
7466
75- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeNativeLock . invalidate ( self :))
76- void bnr_native_lock_destroy (bnr_spinlock_t * _Nonnull address ) {
67+ OS_ALWAYS_INLINE
68+ static inline void bnr_native_lock_destroy (bnr_spinlock_t * _Nonnull address ) {
7769#if defined(__APPLE__ )
7870 if (& os_unfair_lock_trylock != NULL ) {
7971 return ;
@@ -83,8 +75,8 @@ void bnr_native_lock_destroy(bnr_spinlock_t *_Nonnull address) {
8375 pthread_mutex_destroy (& address -> __impl .legacy );
8476}
8577
86- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeNativeLock . lock ( self :))
87- void bnr_native_lock_lock (bnr_spinlock_t * _Nonnull address ) {
78+ OS_ALWAYS_INLINE
79+ static inline void bnr_native_lock_lock (bnr_spinlock_t * _Nonnull address ) {
8880#if defined(__APPLE__ )
8981 if (& os_unfair_lock_lock != NULL ) {
9082 return os_unfair_lock_lock (& address -> __impl .modern );
@@ -94,8 +86,8 @@ void bnr_native_lock_lock(bnr_spinlock_t *_Nonnull address) {
9486 pthread_mutex_lock (& address -> __impl .legacy );
9587}
9688
97- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeNativeLock . try ( self :))
98- bool bnr_native_lock_trylock (bnr_spinlock_t * _Nonnull address ) {
89+ OS_ALWAYS_INLINE
90+ static inline bool bnr_native_lock_trylock (bnr_spinlock_t * _Nonnull address ) {
9991#if defined(__APPLE__ )
10092 if (& os_unfair_lock_trylock != NULL ) {
10193 return os_unfair_lock_trylock (& address -> __impl .modern );
@@ -105,8 +97,8 @@ bool bnr_native_lock_trylock(bnr_spinlock_t *_Nonnull address) {
10597 return pthread_mutex_trylock (& address -> __impl .legacy ) == 0 ;
10698}
10799
108- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeNativeLock . unlock ( self :))
109- void bnr_native_lock_unlock (bnr_spinlock_t * _Nonnull address ) {
100+ OS_ALWAYS_INLINE
101+ static inline void bnr_native_lock_unlock (bnr_spinlock_t * _Nonnull address ) {
110102#if defined(__APPLE__ )
111103 if (& os_unfair_lock_unlock != NULL ) {
112104 return os_unfair_lock_unlock (& address -> __impl .modern );
@@ -121,13 +113,13 @@ typedef struct {
121113 _Atomic(void * _Nullable ) value ;
122114} bnr_atomic_ptr_t ;
123115
124- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeAtomicRawPointer . load ( self : order :))
125- void * _Nullable bnr_atomic_ptr_load (volatile bnr_atomic_ptr_t * _Nonnull target , bnr_atomic_memory_order_t order ) {
116+ OS_ALWAYS_INLINE
117+ static inline void * _Nullable bnr_atomic_ptr_load (volatile bnr_atomic_ptr_t * _Nonnull target , bnr_atomic_memory_order_t order ) {
126118 return __c11_atomic_load (& target -> value , order );
127119}
128120
129- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeAtomicRawPointer . compareAndSwap ( self : from : to : order :))
130- bool bnr_atomic_ptr_compare_and_swap (volatile bnr_atomic_ptr_t * _Nonnull target , void * _Nullable expected , void * _Nullable desired , bnr_atomic_memory_order_t order ) {
121+ OS_ALWAYS_INLINE
122+ static inline bool bnr_atomic_ptr_compare_and_swap (volatile bnr_atomic_ptr_t * _Nonnull target , void * _Nullable expected , void * _Nullable desired , bnr_atomic_memory_order_t order ) {
131123 return __c11_atomic_compare_exchange_strong (& target -> value , & expected , desired , __ATOMIC_ACQ_REL , __ATOMIC_RELAXED );
132124}
133125
@@ -136,13 +128,13 @@ typedef struct {
136128 _Atomic(bool ) value ;
137129} bnr_atomic_flag_t ;
138130
139- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeAtomicBool . testAndSet ( self :))
140- bool bnr_atomic_flag_test_and_set (volatile bnr_atomic_flag_t * _Nonnull target ) {
131+ OS_ALWAYS_INLINE
132+ static inline bool bnr_atomic_flag_test_and_set (volatile bnr_atomic_flag_t * _Nonnull target ) {
141133 return __c11_atomic_exchange (& target -> value , 1 , __ATOMIC_RELAXED );
142134}
143135
144- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeAtomicBool . test ( self :))
145- bool bnr_atomic_flag_test (volatile bnr_atomic_flag_t * _Nonnull target ) {
136+ OS_ALWAYS_INLINE
137+ static inline bool bnr_atomic_flag_test (volatile bnr_atomic_flag_t * _Nonnull target ) {
146138 return __c11_atomic_load (& target -> value , __ATOMIC_RELAXED );
147139}
148140
@@ -151,23 +143,23 @@ typedef struct {
151143 _Atomic(uint_fast8_t ) value ;
152144} bnr_atomic_bitmask_t ;
153145
154- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeAtomicBitmask . setInitialValue ( self : _ :))
155- void bnr_atomic_bitmask_init (volatile bnr_atomic_bitmask_t * _Nonnull target , uint_fast8_t value ) {
146+ OS_ALWAYS_INLINE
147+ static inline void bnr_atomic_bitmask_init (volatile bnr_atomic_bitmask_t * _Nonnull target , uint_fast8_t value ) {
156148 __c11_atomic_init (& target -> value , value );
157149}
158150
159- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeAtomicBitmask . or ( self : with : order :))
160- uint_fast8_t bnr_atomic_bitmask_or (volatile bnr_atomic_bitmask_t * _Nonnull target , uint_fast8_t value , bnr_atomic_memory_order_t order ) {
151+ OS_ALWAYS_INLINE
152+ static inline uint_fast8_t bnr_atomic_bitmask_or (volatile bnr_atomic_bitmask_t * _Nonnull target , uint_fast8_t value , bnr_atomic_memory_order_t order ) {
161153 return __c11_atomic_fetch_or (& target -> value , value , order );
162154}
163155
164- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeAtomicBitmask . and ( self : with : order :))
165- uint_fast8_t bnr_atomic_bitmask_and (volatile bnr_atomic_bitmask_t * _Nonnull target , uint_fast8_t value , bnr_atomic_memory_order_t order ) {
156+ OS_ALWAYS_INLINE
157+ static inline uint_fast8_t bnr_atomic_bitmask_and (volatile bnr_atomic_bitmask_t * _Nonnull target , uint_fast8_t value , bnr_atomic_memory_order_t order ) {
166158 return __c11_atomic_fetch_and (& target -> value , value , order );
167159}
168160
169- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeAtomicBitmask . test ( self : for :))
170- bool bnr_atomic_bitmask_test (const bnr_atomic_bitmask_t * _Nonnull target , uint_fast8_t value ) {
161+ OS_ALWAYS_INLINE
162+ static inline bool bnr_atomic_bitmask_test (const bnr_atomic_bitmask_t * _Nonnull target , uint_fast8_t value ) {
171163 return (__c11_atomic_load ((_Atomic (uint_fast8_t ) * )& target -> value , __ATOMIC_RELAXED ) & value ) != 0 ;
172164}
173165
@@ -176,18 +168,18 @@ typedef struct {
176168 _Atomic(int_fast32_t ) value ;
177169} bnr_atomic_counter_t ;
178170
179- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeAtomicCounter . increment ( self :))
180- int_fast32_t bnr_atomic_counter_increment (volatile bnr_atomic_counter_t * _Nonnull target ) {
171+ OS_ALWAYS_INLINE
172+ static inline int_fast32_t bnr_atomic_counter_increment (volatile bnr_atomic_counter_t * _Nonnull target ) {
181173 return __c11_atomic_fetch_add (& target -> value , 1 , __ATOMIC_SEQ_CST ) + 1 ;
182174}
183175
184- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeAtomicCounter . decrement ( self :))
185- int_fast32_t bnr_atomic_counter_decrement (volatile bnr_atomic_counter_t * _Nonnull target ) {
176+ OS_ALWAYS_INLINE
177+ static inline int_fast32_t bnr_atomic_counter_decrement (volatile bnr_atomic_counter_t * _Nonnull target ) {
186178 return __c11_atomic_fetch_sub (& target -> value , 1 , __ATOMIC_SEQ_CST ) - 1 ;
187179}
188180
189- OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME ( UnsafeAtomicCounter . load ( self :))
190- int_fast32_t bnr_atomic_counter_load (volatile bnr_atomic_counter_t * _Nonnull target ) {
181+ OS_ALWAYS_INLINE
182+ static inline int_fast32_t bnr_atomic_counter_load (volatile bnr_atomic_counter_t * _Nonnull target ) {
191183 return __c11_atomic_load (& target -> value , __ATOMIC_SEQ_CST );
192184}
193185
0 commit comments