Skip to content
This repository was archived by the owner on Aug 29, 2022. It is now read-only.

Commit 2b77c02

Browse files
authored
Merge pull request #187 from bignerdranch/zwaldowski/swift-3_2-tsan
Avoid using Swift inout to guarantee C pointer semantics for atomics
2 parents 79b6e9f + 8e4ad0d commit 2b77c02

File tree

9 files changed

+80
-124
lines changed

9 files changed

+80
-124
lines changed

.fastlane/Fastfile

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -65,8 +65,4 @@ platform :ios do
6565
test
6666
build_for_other_platforms
6767
end
68-
69-
error do |lane, exception|
70-
sh "cat ~/Library/Logs/DiagnosticReports/xctest*.crash | true"
71-
end
7268
end

.travis.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,9 @@ matrix:
1515
include:
1616
- script: fastlane ios ci
1717
env: JOB=iOS
18+
after_failure:
19+
- cat ~/Library/Developer/Xcode/DerivedData/Deferred-*/Logs/Test/*/Session-MobileDeferredTests-*.log | true
20+
- cat ~/Library/Logs/scan/Deferred-*.log | true
1821
- language: generic
1922
os: linux
2023
dist: trusty

Sources/Atomics/include/Atomics.h

Lines changed: 32 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,6 @@
1818
#endif // !__APPLE__
1919
#include <pthread.h>
2020

21-
#if !defined(OS_INLINE)
22-
#if __GNUC__
23-
#define OS_INLINE static __inline__
24-
#else
25-
#define OS_INLINE
26-
#endif // !__GNUC__
27-
#endif // !OS_INLINE
28-
2921
#if !defined(OS_ALWAYS_INLINE)
3022
#if __GNUC__
3123
#define OS_ALWAYS_INLINE __attribute__((__always_inline__))
@@ -60,8 +52,8 @@ typedef struct {
6052
} __impl;
6153
} bnr_spinlock_t;
6254

63-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeNativeLock.setup(self:))
64-
void bnr_native_lock_create(bnr_spinlock_t *_Nonnull address) {
55+
OS_ALWAYS_INLINE
56+
static inline void bnr_native_lock_init(bnr_spinlock_t *_Nonnull address) {
6557
#if defined(__APPLE__)
6658
if (&os_unfair_lock_trylock != NULL) {
6759
address->__impl.modern = OS_UNFAIR_LOCK_INIT;
@@ -72,8 +64,8 @@ void bnr_native_lock_create(bnr_spinlock_t *_Nonnull address) {
7264
pthread_mutex_init(&address->__impl.legacy, NULL);
7365
}
7466

75-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeNativeLock.invalidate(self:))
76-
void bnr_native_lock_destroy(bnr_spinlock_t *_Nonnull address) {
67+
OS_ALWAYS_INLINE
68+
static inline void bnr_native_lock_destroy(bnr_spinlock_t *_Nonnull address) {
7769
#if defined(__APPLE__)
7870
if (&os_unfair_lock_trylock != NULL) {
7971
return;
@@ -83,8 +75,8 @@ void bnr_native_lock_destroy(bnr_spinlock_t *_Nonnull address) {
8375
pthread_mutex_destroy(&address->__impl.legacy);
8476
}
8577

86-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeNativeLock.lock(self:))
87-
void bnr_native_lock_lock(bnr_spinlock_t *_Nonnull address) {
78+
OS_ALWAYS_INLINE
79+
static inline void bnr_native_lock_lock(bnr_spinlock_t *_Nonnull address) {
8880
#if defined(__APPLE__)
8981
if (&os_unfair_lock_lock != NULL) {
9082
return os_unfair_lock_lock(&address->__impl.modern);
@@ -94,8 +86,8 @@ void bnr_native_lock_lock(bnr_spinlock_t *_Nonnull address) {
9486
pthread_mutex_lock(&address->__impl.legacy);
9587
}
9688

97-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeNativeLock.try(self:))
98-
bool bnr_native_lock_trylock(bnr_spinlock_t *_Nonnull address) {
89+
OS_ALWAYS_INLINE
90+
static inline bool bnr_native_lock_trylock(bnr_spinlock_t *_Nonnull address) {
9991
#if defined(__APPLE__)
10092
if (&os_unfair_lock_trylock != NULL) {
10193
return os_unfair_lock_trylock(&address->__impl.modern);
@@ -105,8 +97,8 @@ bool bnr_native_lock_trylock(bnr_spinlock_t *_Nonnull address) {
10597
return pthread_mutex_trylock(&address->__impl.legacy) == 0;
10698
}
10799

108-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeNativeLock.unlock(self:))
109-
void bnr_native_lock_unlock(bnr_spinlock_t *_Nonnull address) {
100+
OS_ALWAYS_INLINE
101+
static inline void bnr_native_lock_unlock(bnr_spinlock_t *_Nonnull address) {
110102
#if defined(__APPLE__)
111103
if (&os_unfair_lock_unlock != NULL) {
112104
return os_unfair_lock_unlock(&address->__impl.modern);
@@ -121,13 +113,13 @@ typedef struct {
121113
_Atomic(void *_Nullable) value;
122114
} bnr_atomic_ptr_t;
123115

124-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeAtomicRawPointer.load(self:order:))
125-
void *_Nullable bnr_atomic_ptr_load(volatile bnr_atomic_ptr_t *_Nonnull target, bnr_atomic_memory_order_t order) {
116+
OS_ALWAYS_INLINE
117+
static inline void *_Nullable bnr_atomic_ptr_load(volatile bnr_atomic_ptr_t *_Nonnull target, bnr_atomic_memory_order_t order) {
126118
return __c11_atomic_load(&target->value, order);
127119
}
128120

129-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeAtomicRawPointer.compareAndSwap(self:from:to:order:))
130-
bool bnr_atomic_ptr_compare_and_swap(volatile bnr_atomic_ptr_t *_Nonnull target, void *_Nullable expected, void *_Nullable desired, bnr_atomic_memory_order_t order) {
121+
OS_ALWAYS_INLINE
122+
static inline bool bnr_atomic_ptr_compare_and_swap(volatile bnr_atomic_ptr_t *_Nonnull target, void *_Nullable expected, void *_Nullable desired, bnr_atomic_memory_order_t order) {
131123
return __c11_atomic_compare_exchange_strong(&target->value, &expected, desired, __ATOMIC_ACQ_REL, __ATOMIC_RELAXED);
132124
}
133125

@@ -136,13 +128,13 @@ typedef struct {
136128
_Atomic(bool) value;
137129
} bnr_atomic_flag_t;
138130

139-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeAtomicBool.testAndSet(self:))
140-
bool bnr_atomic_flag_test_and_set(volatile bnr_atomic_flag_t *_Nonnull target) {
131+
OS_ALWAYS_INLINE
132+
static inline bool bnr_atomic_flag_test_and_set(volatile bnr_atomic_flag_t *_Nonnull target) {
141133
return __c11_atomic_exchange(&target->value, 1, __ATOMIC_RELAXED);
142134
}
143135

144-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeAtomicBool.test(self:))
145-
bool bnr_atomic_flag_test(volatile bnr_atomic_flag_t *_Nonnull target) {
136+
OS_ALWAYS_INLINE
137+
static inline bool bnr_atomic_flag_test(volatile bnr_atomic_flag_t *_Nonnull target) {
146138
return __c11_atomic_load(&target->value, __ATOMIC_RELAXED);
147139
}
148140

@@ -151,23 +143,23 @@ typedef struct {
151143
_Atomic(uint_fast8_t) value;
152144
} bnr_atomic_bitmask_t;
153145

154-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeAtomicBitmask.setInitialValue(self:_:))
155-
void bnr_atomic_bitmask_init(volatile bnr_atomic_bitmask_t *_Nonnull target, uint_fast8_t value) {
146+
OS_ALWAYS_INLINE
147+
static inline void bnr_atomic_bitmask_init(volatile bnr_atomic_bitmask_t *_Nonnull target, uint_fast8_t value) {
156148
__c11_atomic_init(&target->value, value);
157149
}
158150

159-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeAtomicBitmask.or(self:with:order:))
160-
uint_fast8_t bnr_atomic_bitmask_or(volatile bnr_atomic_bitmask_t *_Nonnull target, uint_fast8_t value, bnr_atomic_memory_order_t order) {
151+
OS_ALWAYS_INLINE
152+
static inline uint_fast8_t bnr_atomic_bitmask_or(volatile bnr_atomic_bitmask_t *_Nonnull target, uint_fast8_t value, bnr_atomic_memory_order_t order) {
161153
return __c11_atomic_fetch_or(&target->value, value, order);
162154
}
163155

164-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeAtomicBitmask.and(self:with:order:))
165-
uint_fast8_t bnr_atomic_bitmask_and(volatile bnr_atomic_bitmask_t *_Nonnull target, uint_fast8_t value, bnr_atomic_memory_order_t order) {
156+
OS_ALWAYS_INLINE
157+
static inline uint_fast8_t bnr_atomic_bitmask_and(volatile bnr_atomic_bitmask_t *_Nonnull target, uint_fast8_t value, bnr_atomic_memory_order_t order) {
166158
return __c11_atomic_fetch_and(&target->value, value, order);
167159
}
168160

169-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeAtomicBitmask.test(self:for:))
170-
bool bnr_atomic_bitmask_test(const bnr_atomic_bitmask_t *_Nonnull target, uint_fast8_t value) {
161+
OS_ALWAYS_INLINE
162+
static inline bool bnr_atomic_bitmask_test(const bnr_atomic_bitmask_t *_Nonnull target, uint_fast8_t value) {
171163
return (__c11_atomic_load((_Atomic(uint_fast8_t) *)&target->value, __ATOMIC_RELAXED) & value) != 0;
172164
}
173165

@@ -176,18 +168,18 @@ typedef struct {
176168
_Atomic(int_fast32_t) value;
177169
} bnr_atomic_counter_t;
178170

179-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeAtomicCounter.increment(self:))
180-
int_fast32_t bnr_atomic_counter_increment(volatile bnr_atomic_counter_t *_Nonnull target) {
171+
OS_ALWAYS_INLINE
172+
static inline int_fast32_t bnr_atomic_counter_increment(volatile bnr_atomic_counter_t *_Nonnull target) {
181173
return __c11_atomic_fetch_add(&target->value, 1, __ATOMIC_SEQ_CST) + 1;
182174
}
183175

184-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeAtomicCounter.decrement(self:))
185-
int_fast32_t bnr_atomic_counter_decrement(volatile bnr_atomic_counter_t *_Nonnull target) {
176+
OS_ALWAYS_INLINE
177+
static inline int_fast32_t bnr_atomic_counter_decrement(volatile bnr_atomic_counter_t *_Nonnull target) {
186178
return __c11_atomic_fetch_sub(&target->value, 1, __ATOMIC_SEQ_CST) - 1;
187179
}
188180

189-
OS_INLINE OS_ALWAYS_INLINE OS_SWIFT_NAME(UnsafeAtomicCounter.load(self:))
190-
int_fast32_t bnr_atomic_counter_load(volatile bnr_atomic_counter_t *_Nonnull target) {
181+
OS_ALWAYS_INLINE
182+
static inline int_fast32_t bnr_atomic_counter_load(volatile bnr_atomic_counter_t *_Nonnull target) {
191183
return __c11_atomic_load(&target->value, __ATOMIC_SEQ_CST);
192184
}
193185

Sources/Deferred/Deferred.swift

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ public final class Deferred<Value>: FutureProtocol, PromiseProtocol {
5454

5555
private func notify(flags: DispatchWorkItemFlags, upon queue: DispatchQueue, execute body: @escaping(Value) -> Void) {
5656
group.notify(flags: flags, queue: queue) { [storage] in
57-
guard let ptr = storage.withAtomicPointerToElement({ $0.load(order: .none) }) else { return }
57+
guard let ptr = storage.withAtomicPointerToElement({ bnr_atomic_ptr_load($0, .none) }) else { return }
5858
body(Storage.unbox(from: ptr))
5959
}
6060
}
@@ -79,7 +79,7 @@ public final class Deferred<Value>: FutureProtocol, PromiseProtocol {
7979

8080
public func wait(until time: DispatchTime) -> Value? {
8181
guard case .success = group.wait(timeout: time),
82-
let ptr = storage.withAtomicPointerToElement({ $0.load(order: .none) }) else { return nil }
82+
let ptr = storage.withAtomicPointerToElement({ bnr_atomic_ptr_load($0, .none) }) else { return nil }
8383

8484
return Storage.unbox(from: ptr)
8585
}
@@ -88,7 +88,7 @@ public final class Deferred<Value>: FutureProtocol, PromiseProtocol {
8888

8989
public var isFilled: Bool {
9090
return storage.withAtomicPointerToElement {
91-
$0.load(order: .none) != nil
91+
bnr_atomic_ptr_load($0, .none) != nil
9292
}
9393
}
9494

@@ -97,7 +97,7 @@ public final class Deferred<Value>: FutureProtocol, PromiseProtocol {
9797
let box = Storage.box(value)
9898

9999
let wonRace = storage.withAtomicPointerToElement {
100-
$0.compareAndSwap(from: nil, to: box.toOpaque(), order: .thread)
100+
bnr_atomic_ptr_compare_and_swap($0, nil, box.toOpaque(), .thread)
101101
}
102102

103103
if wonRace {
@@ -135,16 +135,14 @@ private final class DeferredStorage<Value>: ManagedBuffer<Void, DeferredRaw<Valu
135135
return unsafeDowncast(super.create(minimumCapacity: 1, makingHeaderWith: { _ in }), to: _Self.self)
136136
}
137137

138-
func withAtomicPointerToElement<Return>(_ body: (inout UnsafeAtomicRawPointer) throws -> Return) rethrows -> Return {
138+
func withAtomicPointerToElement<Return>(_ body: (UnsafeMutablePointer<UnsafeAtomicRawPointer>) throws -> Return) rethrows -> Return {
139139
return try withUnsafeMutablePointerToElements { target in
140-
try target.withMemoryRebound(to: UnsafeAtomicRawPointer.self, capacity: 1) { (atomicPointertoElement) in
141-
try body(&atomicPointertoElement.pointee)
142-
}
140+
try target.withMemoryRebound(to: UnsafeAtomicRawPointer.self, capacity: 1, body)
143141
}
144142
}
145143

146144
deinit {
147-
guard let ptr = withAtomicPointerToElement({ $0.load(order: .global) }) else { return }
145+
guard let ptr = withAtomicPointerToElement({ bnr_atomic_ptr_load($0, .global) }) else { return }
148146
Element.fromOpaque(ptr).release()
149147
}
150148

Sources/Deferred/Locking.swift

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -70,22 +70,22 @@ public final class NativeLock: Locking, MaybeLocking {
7070

7171
/// Creates a standard platform lock.
7272
public init() {
73-
lock.setup()
73+
bnr_native_lock_init(&lock)
7474
}
7575

7676
deinit {
77-
lock.invalidate()
77+
bnr_native_lock_destroy(&lock)
7878
}
7979

8080
public func withReadLock<Return>(_ body: () throws -> Return) rethrows -> Return {
81-
lock.lock()
82-
defer { lock.unlock() }
81+
bnr_native_lock_lock(&lock)
82+
defer { bnr_native_lock_unlock(&lock) }
8383
return try body()
8484
}
8585

8686
public func withAttemptedReadLock<Return>(_ body: () -> Return) -> Return? {
87-
guard lock.try() else { return nil }
88-
defer { lock.unlock() }
87+
guard bnr_native_lock_trylock(&lock) else { return nil }
88+
defer { bnr_native_lock_unlock(&lock) }
8989
return body()
9090
}
9191

Sources/Task/ExistentialTask.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ extension Task {
127127

128128
#if !os(macOS) && !os(iOS) && !os(tvOS) && !os(watchOS)
129129
fileprivate func markCancelled() {
130-
_ = rawIsCancelled.testAndSet()
130+
_ = bnr_atomic_flag_test_and_set(&rawIsCancelled)
131131
}
132132
#endif
133133

@@ -136,7 +136,7 @@ extension Task {
136136
#if os(macOS) || os(iOS) || os(tvOS) || os(watchOS)
137137
return progress.isCancelled
138138
#else
139-
return rawIsCancelled.test()
139+
return bnr_atomic_flag_test(&rawIsCancelled)
140140
#endif
141141
}
142142
}

Sources/Task/TaskProgress.swift

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ private final class ProxyProgress: Progress {
117117

118118
init(observing observee: Progress, observer: ProxyProgress) {
119119
self.observer = observer
120-
state.setInitialValue(State.ready.rawValue)
120+
bnr_atomic_bitmask_init(&state, State.ready.rawValue)
121121
super.init()
122122

123123
for key in Observation.attributes {
@@ -126,13 +126,13 @@ private final class ProxyProgress: Progress {
126126
observee.addObserver(self, forKeyPath: #keyPath(Progress.cancelled), options: Observation.options, context: &Observation.cancelledContext)
127127
observee.addObserver(self, forKeyPath: #keyPath(Progress.paused), options: Observation.options, context: &Observation.pausedContext)
128128

129-
state.or(with: State.observing.rawValue, order: .write)
129+
bnr_atomic_bitmask_or(&state, State.observing.rawValue, .write)
130130
}
131131

132132
func cancel(observing observee: Progress) {
133-
let oldState = State(rawValue: state.and(with: ~State.ready.rawValue, order: .none))
133+
let oldState = State(rawValue: bnr_atomic_bitmask_and(&state, ~State.ready.rawValue, .none))
134134
guard !oldState.isStrictSuperset(of: .cancellable) else { return }
135-
state.or(with: State.cancelled.rawValue, order: .none)
135+
bnr_atomic_bitmask_or(&state, State.cancelled.rawValue, .none)
136136

137137
for key in Observation.attributes {
138138
observee.removeObserver(self, forKeyPath: key, context: &Observation.attributesContext)
@@ -142,7 +142,7 @@ private final class ProxyProgress: Progress {
142142
}
143143

144144
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
145-
guard let keyPath = keyPath, object != nil, state.test(for: State.ready.rawValue), let observer = observer, let newValue = change?[.newKey] else { return }
145+
guard let keyPath = keyPath, object != nil, bnr_atomic_bitmask_test(&state, State.ready.rawValue), let observer = observer, let newValue = change?[.newKey] else { return }
146146
switch context {
147147
case (&Observation.cancelledContext)?:
148148
// Gotta trust KVO a little
@@ -239,7 +239,7 @@ extension Progress {
239239
progress.totalUnitCount = 1
240240
progress.completedUnitCount = 1
241241
}
242-
242+
243243
return progress
244244
}
245245
}
@@ -317,3 +317,4 @@ extension Task {
317317
}
318318

319319
#endif
320+

Tests/DeferredTests/FutureTests.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ class FutureTests: XCTestCase {
112112
var counter = UnsafeAtomicCounter()
113113

114114
let mapped = d.every { (value) -> (Int) in
115-
counter.increment()
115+
bnr_atomic_counter_increment(&counter)
116116
return value * 2
117117
}
118118

@@ -125,6 +125,6 @@ class FutureTests: XCTestCase {
125125

126126
XCTAssertEqual(mapped.waitShort(), 2)
127127

128-
XCTAssertEqual(counter.load(), 2)
128+
XCTAssertEqual(bnr_atomic_counter_load(&counter), 2)
129129
}
130130
}

0 commit comments

Comments
 (0)