-
Notifications
You must be signed in to change notification settings - Fork 10.5k
/
Copy pathRefCount.cpp
218 lines (190 loc) · 6.8 KB
/
RefCount.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
//===--- RefCount.cpp -----------------------------------------------------===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
#include <cstdio>
#include "swift/Runtime/HeapObject.h"
namespace swift {
// Return an object's side table, allocating it if necessary.
// Returns null if the object is deiniting.
// SideTableRefCountBits specialization intentionally does not exist.
template <>
HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::allocateSideTable(bool failIfDeiniting)
{
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
// Preflight failures before allocating a new side table.
if (oldbits.hasSideTable()) {
// Already have a side table. Return it.
return oldbits.getSideTable();
}
else if (failIfDeiniting && oldbits.getIsDeiniting()) {
// Already past the start of deinit. Do nothing.
return nullptr;
}
// Preflight passed. Allocate a side table.
// FIXME: custom side table allocator
auto side = swift_cxx_newObject<HeapObjectSideTableEntry>(getHeapObject());
auto newbits = InlineRefCountBits(side);
do {
if (oldbits.hasSideTable()) {
// Already have a side table. Return it and delete ours.
// Read before delete to streamline barriers.
auto result = oldbits.getSideTable();
swift_cxx_deleteObject(side);
return result;
}
else if (failIfDeiniting && oldbits.getIsDeiniting()) {
// Already past the start of deinit. Do nothing.
return nullptr;
}
side->initRefCounts(oldbits);
} while (! refCounts.compare_exchange_weak(oldbits, newbits,
std::memory_order_release,
std::memory_order_relaxed));
return side;
}
template <>
HeapObject *RefCounts<InlineRefCountBits>::incrementSlow(InlineRefCountBits oldbits,
uint32_t n) {
if (oldbits.isImmortal(false)) {
return getHeapObject();
}
else if (oldbits.hasSideTable()) {
// Out-of-line slow path.
auto side = oldbits.getSideTable();
side->incrementStrong(n);
}
else {
// Overflow into a new side table.
auto side = allocateSideTable(false);
side->incrementStrong(n);
}
return getHeapObject();
}
template <>
HeapObject *RefCounts<SideTableRefCountBits>::incrementSlow(SideTableRefCountBits oldbits,
uint32_t n) {
if (oldbits.isImmortal(false)) {
return getHeapObject();
}
else {
// Retain count overflow.
swift::swift_abortRetainOverflow();
}
return getHeapObject();
}
template <>
void RefCounts<InlineRefCountBits>::incrementNonAtomicSlow(InlineRefCountBits oldbits,
uint32_t n) {
if (oldbits.isImmortal(false)) {
return;
}
else if (oldbits.hasSideTable()) {
// Out-of-line slow path.
auto side = oldbits.getSideTable();
side->incrementStrong(n); // FIXME: can there be a nonatomic impl?
} else {
// Overflow into a new side table.
auto side = allocateSideTable(false);
side->incrementStrong(n); // FIXME: can there be a nonatomic impl?
}
}
template <>
void RefCounts<SideTableRefCountBits>::incrementNonAtomicSlow(SideTableRefCountBits oldbits, uint32_t n) {
if (oldbits.isImmortal(false)) {
return;
} else {
swift::swift_abortRetainOverflow();
}
}
template <typename RefCountBits>
bool RefCounts<RefCountBits>::tryIncrementSlow(RefCountBits oldbits) {
if (oldbits.isImmortal(false)) {
return true;
}
else if (oldbits.hasSideTable())
return oldbits.getSideTable()->tryIncrement();
else
swift::swift_abortRetainOverflow();
}
template bool RefCounts<InlineRefCountBits>::tryIncrementSlow(InlineRefCountBits oldbits);
template bool RefCounts<SideTableRefCountBits>::tryIncrementSlow(SideTableRefCountBits oldbits);
template <typename RefCountBits>
bool RefCounts<RefCountBits>::tryIncrementNonAtomicSlow(RefCountBits oldbits) {
if (oldbits.isImmortal(false)) {
return true;
}
else if (oldbits.hasSideTable())
return oldbits.getSideTable()->tryIncrementNonAtomic();
else
swift::swift_abortRetainOverflow();
}
template bool RefCounts<InlineRefCountBits>::tryIncrementNonAtomicSlow(InlineRefCountBits oldbits);
template bool RefCounts<SideTableRefCountBits>::tryIncrementNonAtomicSlow(SideTableRefCountBits oldbits);
// SideTableRefCountBits specialization intentionally does not exist.
template <>
HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::formWeakReference()
{
auto side = allocateSideTable(true);
if (side)
return side->incrementWeak();
else
return nullptr;
}
template <typename RefCountBits>
void RefCounts<RefCountBits>::incrementUnownedSlow(uint32_t n) {
auto side = allocateSideTable(false);
if (side)
return side->incrementUnowned(n);
// Overflow but side table allocation failed.
swift_abortUnownedRetainOverflow();
}
template void RefCounts<InlineRefCountBits>::incrementUnownedSlow(uint32_t n);
template <>
void RefCounts<SideTableRefCountBits>::incrementUnownedSlow(uint32_t n) {
// Overflow from side table to a new side table?!
swift_abortUnownedRetainOverflow();
}
SWIFT_CC(swift) SWIFT_RUNTIME_STDLIB_API
void _swift_stdlib_immortalize(void *obj) {
auto heapObj = reinterpret_cast<HeapObject *>(obj);
heapObj->refCounts.setIsImmortal(true);
}
#ifndef NDEBUG
// SideTableRefCountBits specialization intentionally does not exist.
template <>
bool RefCounts<InlineRefCountBits>::isImmutableCOWBuffer() {
if (!hasSideTable())
return false;
HeapObjectSideTableEntry *sideTable = allocateSideTable(false);
assert(sideTable);
return sideTable->isImmutableCOWBuffer();
}
template <>
bool RefCounts<InlineRefCountBits>::setIsImmutableCOWBuffer(bool immutable) {
HeapObjectSideTableEntry *sideTable = allocateSideTable(false);
assert(sideTable);
bool oldValue = sideTable->isImmutableCOWBuffer();
sideTable->setIsImmutableCOWBuffer(immutable);
return oldValue;
}
#endif
template <typename RefCountBits>
void RefCounts<RefCountBits>::dump() const {
printf("Location: %p\n", this);
printf("Strong Ref Count: %d.\n", getCount());
printf("Unowned Ref Count: %d.\n", getUnownedCount());
printf("Weak Ref Count: %d.\n", getWeakCount());
printf("RefCount Side Table: %p.\n", getSideTable());
printf("Is Deiniting: %s.\n", isDeiniting() ? "true" : "false");
printf("Is Immortal: %s.\n", refCounts.load().isImmortal(false) ? "true" : "false");
}
// namespace swift
} // namespace swift