|
| 1 | +// Implementation vendored from SwiftNIO: |
| 2 | +// https://github.com/apple/swift-nio |
| 3 | + |
| 4 | +//===----------------------------------------------------------------------===// |
| 5 | +// |
| 6 | +// This source file is part of the SwiftNIO open source project |
| 7 | +// |
| 8 | +// Copyright (c) 2017-2022 Apple Inc. and the SwiftNIO project authors |
| 9 | +// Licensed under Apache License v2.0 |
| 10 | +// |
| 11 | +// See LICENSE.txt for license information |
| 12 | +// See CONTRIBUTORS.txt for the list of SwiftNIO project authors |
| 13 | +// |
| 14 | +// SPDX-License-Identifier: Apache-2.0 |
| 15 | +// |
| 16 | +//===----------------------------------------------------------------------===// |
| 17 | + |
| 18 | +#if canImport(Darwin) |
| 19 | +import Darwin |
| 20 | +#elseif os(Windows) |
| 21 | +import ucrt |
| 22 | +import WinSDK |
| 23 | +#elseif canImport(Glibc) |
| 24 | +import Glibc |
| 25 | +#elseif canImport(Musl) |
| 26 | +import Musl |
| 27 | +#else |
| 28 | +#error("The concurrency NIOLock module was unable to identify your C library.") |
| 29 | +#endif |
| 30 | + |
| 31 | +#if os(Windows) |
| 32 | +@usableFromInline |
| 33 | +typealias LockPrimitive = SRWLOCK |
| 34 | +#else |
| 35 | +@usableFromInline |
| 36 | +typealias LockPrimitive = pthread_mutex_t |
| 37 | +#endif |
| 38 | + |
| 39 | +@usableFromInline |
| 40 | +enum LockOperations { } |
| 41 | + |
| 42 | +extension LockOperations { |
| 43 | + @inlinable |
| 44 | + static func create(_ mutex: UnsafeMutablePointer<LockPrimitive>) { |
| 45 | + mutex.assertValidAlignment() |
| 46 | + |
| 47 | +#if os(Windows) |
| 48 | + InitializeSRWLock(mutex) |
| 49 | +#else |
| 50 | + var attr = pthread_mutexattr_t() |
| 51 | + pthread_mutexattr_init(&attr) |
| 52 | + debugOnly { |
| 53 | + pthread_mutexattr_settype(&attr, .init(PTHREAD_MUTEX_ERRORCHECK)) |
| 54 | + } |
| 55 | + |
| 56 | + let err = pthread_mutex_init(mutex, &attr) |
| 57 | + precondition(err == 0, "\(#function) failed in pthread_mutex with error \(err)") |
| 58 | +#endif |
| 59 | + } |
| 60 | + |
| 61 | + @inlinable |
| 62 | + static func destroy(_ mutex: UnsafeMutablePointer<LockPrimitive>) { |
| 63 | + mutex.assertValidAlignment() |
| 64 | + |
| 65 | +#if os(Windows) |
| 66 | + // SRWLOCK does not need to be free'd |
| 67 | +#else |
| 68 | + let err = pthread_mutex_destroy(mutex) |
| 69 | + precondition(err == 0, "\(#function) failed in pthread_mutex with error \(err)") |
| 70 | +#endif |
| 71 | + } |
| 72 | + |
| 73 | + @inlinable |
| 74 | + static func lock(_ mutex: UnsafeMutablePointer<LockPrimitive>) { |
| 75 | + mutex.assertValidAlignment() |
| 76 | + |
| 77 | +#if os(Windows) |
| 78 | + AcquireSRWLockExclusive(mutex) |
| 79 | +#else |
| 80 | + let err = pthread_mutex_lock(mutex) |
| 81 | + precondition(err == 0, "\(#function) failed in pthread_mutex with error \(err)") |
| 82 | +#endif |
| 83 | + } |
| 84 | + |
| 85 | + @inlinable |
| 86 | + static func unlock(_ mutex: UnsafeMutablePointer<LockPrimitive>) { |
| 87 | + mutex.assertValidAlignment() |
| 88 | + |
| 89 | +#if os(Windows) |
| 90 | + ReleaseSRWLockExclusive(mutex) |
| 91 | +#else |
| 92 | + let err = pthread_mutex_unlock(mutex) |
| 93 | + precondition(err == 0, "\(#function) failed in pthread_mutex with error \(err)") |
| 94 | +#endif |
| 95 | + } |
| 96 | +} |
| 97 | + |
| 98 | +// Tail allocate both the mutex and a generic value using ManagedBuffer. |
| 99 | +// Both the header pointer and the elements pointer are stable for |
| 100 | +// the class's entire lifetime. |
| 101 | +// |
| 102 | +// However, for safety reasons, we elect to place the lock in the "elements" |
| 103 | +// section of the buffer instead of the head. The reasoning here is subtle, |
| 104 | +// so buckle in. |
| 105 | +// |
| 106 | +// _As a practical matter_, the implementation of ManagedBuffer ensures that |
| 107 | +// the pointer to the header is stable across the lifetime of the class, and so |
| 108 | +// each time you call `withUnsafeMutablePointers` or `withUnsafeMutablePointerToHeader` |
| 109 | +// the value of the header pointer will be the same. This is because ManagedBuffer uses |
| 110 | +// `Builtin.addressOf` to load the value of the header, and that does ~magic~ to ensure |
| 111 | +// that it does not invoke any weird Swift accessors that might copy the value. |
| 112 | +// |
| 113 | +// _However_, the header is also available via the `.header` field on the ManagedBuffer. |
| 114 | +// This presents a problem! The reason there's an issue is that `Builtin.addressOf` and friends |
| 115 | +// do not interact with Swift's exclusivity model. That is, the various `with` functions do not |
| 116 | +// conceptually trigger a mutating access to `.header`. For elements this isn't a concern because |
| 117 | +// there's literally no other way to perform the access, but for `.header` it's entirely possible |
| 118 | +// to accidentally recursively read it. |
| 119 | +// |
| 120 | +// Our implementation is free from these issues, so we don't _really_ need to worry about it. |
| 121 | +// However, out of an abundance of caution, we store the Value in the header, and the LockPrimitive |
| 122 | +// in the trailing elements. We still don't use `.header`, but it's better to be safe than sorry, |
| 123 | +// and future maintainers will be happier that we were cautious. |
| 124 | +// |
| 125 | +// See also: https://github.com/apple/swift/pull/40000 |
| 126 | +@usableFromInline |
| 127 | +final class LockStorage<Value>: ManagedBuffer<Value, LockPrimitive> { |
| 128 | + |
| 129 | + @inlinable |
| 130 | + static func create(value: Value) -> Self { |
| 131 | + let buffer = Self.create(minimumCapacity: 1) { _ in |
| 132 | + return value |
| 133 | + } |
| 134 | + let storage = unsafeDowncast(buffer, to: Self.self) |
| 135 | + |
| 136 | + storage.withUnsafeMutablePointers { _, lockPtr in |
| 137 | + LockOperations.create(lockPtr) |
| 138 | + } |
| 139 | + |
| 140 | + return storage |
| 141 | + } |
| 142 | + |
| 143 | + @inlinable |
| 144 | + func lock() { |
| 145 | + self.withUnsafeMutablePointerToElements { lockPtr in |
| 146 | + LockOperations.lock(lockPtr) |
| 147 | + } |
| 148 | + } |
| 149 | + |
| 150 | + @inlinable |
| 151 | + func unlock() { |
| 152 | + self.withUnsafeMutablePointerToElements { lockPtr in |
| 153 | + LockOperations.unlock(lockPtr) |
| 154 | + } |
| 155 | + } |
| 156 | + |
| 157 | + @inlinable |
| 158 | + deinit { |
| 159 | + self.withUnsafeMutablePointerToElements { lockPtr in |
| 160 | + LockOperations.destroy(lockPtr) |
| 161 | + } |
| 162 | + } |
| 163 | + |
| 164 | + @inlinable |
| 165 | + func withLockPrimitive<T>(_ body: (UnsafeMutablePointer<LockPrimitive>) throws -> T) rethrows -> T { |
| 166 | + try self.withUnsafeMutablePointerToElements { lockPtr in |
| 167 | + return try body(lockPtr) |
| 168 | + } |
| 169 | + } |
| 170 | + |
| 171 | + @inlinable |
| 172 | + func withLockedValue<T>(_ mutate: (inout Value) throws -> T) rethrows -> T { |
| 173 | + try self.withUnsafeMutablePointers { valuePtr, lockPtr in |
| 174 | + LockOperations.lock(lockPtr) |
| 175 | + defer { LockOperations.unlock(lockPtr) } |
| 176 | + return try mutate(&valuePtr.pointee) |
| 177 | + } |
| 178 | + } |
| 179 | +} |
| 180 | + |
| 181 | +extension LockStorage: @unchecked Sendable { } |
| 182 | + |
| 183 | +/// A threading lock based on `libpthread` instead of `libdispatch`. |
| 184 | +/// |
| 185 | +/// - note: ``NIOLock`` has reference semantics. |
| 186 | +/// |
| 187 | +/// This object provides a lock on top of a single `pthread_mutex_t`. This kind |
| 188 | +/// of lock is safe to use with `libpthread`-based threading models, such as the |
| 189 | +/// one used by NIO. On Windows, the lock is based on the substantially similar |
| 190 | +/// `SRWLOCK` type. |
| 191 | +@usableFromInline |
| 192 | +struct NIOLock { |
| 193 | + @usableFromInline |
| 194 | + internal let _storage: LockStorage<Void> |
| 195 | + |
| 196 | + /// Create a new lock. |
| 197 | + @inlinable |
| 198 | + init() { |
| 199 | + self._storage = .create(value: ()) |
| 200 | + } |
| 201 | + |
| 202 | + /// Acquire the lock. |
| 203 | + /// |
| 204 | + /// Whenever possible, consider using `withLock` instead of this method and |
| 205 | + /// `unlock`, to simplify lock handling. |
| 206 | + @inlinable |
| 207 | + func lock() { |
| 208 | + self._storage.lock() |
| 209 | + } |
| 210 | + |
| 211 | + /// Release the lock. |
| 212 | + /// |
| 213 | + /// Whenever possible, consider using `withLock` instead of this method and |
| 214 | + /// `lock`, to simplify lock handling. |
| 215 | + @inlinable |
| 216 | + func unlock() { |
| 217 | + self._storage.unlock() |
| 218 | + } |
| 219 | + |
| 220 | + @inlinable |
| 221 | + internal func withLockPrimitive<T>(_ body: (UnsafeMutablePointer<LockPrimitive>) throws -> T) rethrows -> T { |
| 222 | + return try self._storage.withLockPrimitive(body) |
| 223 | + } |
| 224 | +} |
| 225 | + |
| 226 | +extension NIOLock { |
| 227 | + /// Acquire the lock for the duration of the given block. |
| 228 | + /// |
| 229 | + /// This convenience method should be preferred to `lock` and `unlock` in |
| 230 | + /// most situations, as it ensures that the lock will be released regardless |
| 231 | + /// of how `body` exits. |
| 232 | + /// |
| 233 | + /// - Parameter body: The block to execute while holding the lock. |
| 234 | + /// - Returns: The value returned by the block. |
| 235 | + @inlinable |
| 236 | + func withLock<T>(_ body: () throws -> T) rethrows -> T { |
| 237 | + self.lock() |
| 238 | + defer { |
| 239 | + self.unlock() |
| 240 | + } |
| 241 | + return try body() |
| 242 | + } |
| 243 | + |
| 244 | + @inlinable |
| 245 | + func withLockVoid(_ body: () throws -> Void) rethrows -> Void { |
| 246 | + try self.withLock(body) |
| 247 | + } |
| 248 | +} |
| 249 | + |
| 250 | +extension NIOLock: Sendable {} |
| 251 | + |
| 252 | +extension UnsafeMutablePointer { |
| 253 | + @inlinable |
| 254 | + func assertValidAlignment() { |
| 255 | + assert(UInt(bitPattern: self) % UInt(MemoryLayout<Pointee>.alignment) == 0) |
| 256 | + } |
| 257 | +} |
| 258 | + |
| 259 | +/// A utility function that runs the body code only in debug builds, without |
| 260 | +/// emitting compiler warnings. |
| 261 | +/// |
| 262 | +/// This is currently the only way to do this in Swift: see |
| 263 | +/// https://forums.swift.org/t/support-debug-only-code/11037 for a discussion. |
| 264 | +@inlinable |
| 265 | +internal func debugOnly(_ body: () -> Void) { |
| 266 | + // FIXME: duplicated with NIO. |
| 267 | + assert({ body(); return true }()) |
| 268 | +} |
0 commit comments