Line data Source code
1 : /*
2 : * Copyright 2006 The Android Open Source Project
3 : *
4 : * Use of this source code is governed by a BSD-style license that can be
5 : * found in the LICENSE file.
6 : */
7 :
8 : #ifndef SkRefCnt_DEFINED
9 : #define SkRefCnt_DEFINED
10 :
11 : #include "../private/SkTLogic.h"
12 : #include "SkTypes.h"
13 : #include <atomic>
14 : #include <functional>
15 : #include <memory>
16 : #include <type_traits>
17 : #include <utility>
18 :
19 : /** \class SkRefCntBase
20 :
21 : SkRefCntBase is the base class for objects that may be shared by multiple
22 : objects. When an existing owner wants to share a reference, it calls ref().
23 : When an owner wants to release its reference, it calls unref(). When the
24 : shared object's reference count goes to zero as the result of an unref()
25 : call, its (virtual) destructor is called. It is an error for the
26 : destructor to be called explicitly (or via the object going out of scope on
27 : the stack or calling delete) if getRefCnt() > 1.
28 : */
29 : class SK_API SkRefCntBase : SkNoncopyable {
30 : public:
31 : /** Default construct, initializing the reference count to 1.
32 : */
33 833 : SkRefCntBase() : fRefCnt(1) {}
34 :
35 : /** Destruct, asserting that the reference count is 1.
36 : */
37 1424 : virtual ~SkRefCntBase() {
38 : #ifdef SK_DEBUG
39 712 : SkASSERTF(getRefCnt() == 1, "fRefCnt was %d", getRefCnt());
40 : // illegal value, to catch us if we reuse after delete
41 712 : fRefCnt.store(0, std::memory_order_relaxed);
42 : #endif
43 712 : }
44 :
45 : /** Return the reference count. Use only for debugging. */
46 4970 : int32_t getRefCnt() const {
47 9940 : return fRefCnt.load(std::memory_order_relaxed);
48 : }
49 :
50 : #ifdef SK_DEBUG
51 0 : void validate() const {
52 0 : SkASSERT(getRefCnt() > 0);
53 0 : }
54 : #endif
55 :
56 : /** May return true if the caller is the only owner.
57 : * Ensures that all previous owner's actions are complete.
58 : */
59 0 : bool unique() const {
60 0 : if (1 == fRefCnt.load(std::memory_order_acquire)) {
61 : // The acquire barrier is only really needed if we return true. It
62 : // prevents code conditioned on the result of unique() from running
63 : // until previous owners are all totally done calling unref().
64 0 : return true;
65 : }
66 0 : return false;
67 : }
68 :
69 : /** Increment the reference count. Must be balanced by a call to unref().
70 : */
71 1431 : void ref() const {
72 1431 : SkASSERT(getRefCnt() > 0);
73 : // No barrier required.
74 1431 : (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed);
75 1431 : }
76 :
77 : /** Decrement the reference count. If the reference count is 1 before the
78 : decrement, then delete the object. Note that if this is the case, then
79 : the object needs to have been allocated via new, and not on the stack.
80 : */
81 2115 : void unref() const {
82 2115 : SkASSERT(getRefCnt() > 0);
83 : // A release here acts in place of all releases we "should" have been doing in ref().
84 4230 : if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
85 : // Like unique(), the acquire is only needed on success, to make sure
86 : // code in internal_dispose() doesn't happen before the decrement.
87 712 : this->internal_dispose();
88 : }
89 2115 : }
90 :
91 : protected:
92 : /**
93 : * Allow subclasses to call this if they've overridden internal_dispose
94 : * so they can reset fRefCnt before the destructor is called or if they
95 : * choose not to call the destructor (e.g. using a free list).
96 : */
97 712 : void internal_dispose_restore_refcnt_to_1() const {
98 712 : SkASSERT(0 == getRefCnt());
99 712 : fRefCnt.store(1, std::memory_order_relaxed);
100 712 : }
101 :
102 : private:
103 : /**
104 : * Called when the ref count goes to 0.
105 : */
106 712 : virtual void internal_dispose() const {
107 712 : this->internal_dispose_restore_refcnt_to_1();
108 712 : delete this;
109 712 : }
110 :
111 : // The following friends are those which override internal_dispose()
112 : // and conditionally call SkRefCnt::internal_dispose().
113 : friend class SkWeakRefCnt;
114 :
115 : mutable std::atomic<int32_t> fRefCnt;
116 :
117 : typedef SkNoncopyable INHERITED;
118 : };
119 :
120 : #ifdef SK_REF_CNT_MIXIN_INCLUDE
121 : // It is the responsibility of the following include to define the type SkRefCnt.
122 : // This SkRefCnt should normally derive from SkRefCntBase.
123 : #include SK_REF_CNT_MIXIN_INCLUDE
124 : #else
125 1545 : class SK_API SkRefCnt : public SkRefCntBase {
126 : // "#include SK_REF_CNT_MIXIN_INCLUDE" doesn't work with this build system.
127 : #if defined(GOOGLE3)
128 : public:
129 : void deref() const { this->unref(); }
130 : #endif
131 : };
132 : #endif
133 :
134 : ///////////////////////////////////////////////////////////////////////////////
135 :
136 : /** Helper macro to safely assign one SkRefCnt[TS]* to another, checking for
137 : null in on each side of the assignment, and ensuring that ref() is called
138 : before unref(), in case the two pointers point to the same object.
139 : */
140 :
141 : #if defined(SK_BUILD_FOR_ANDROID_FRAMEWORK)
142 : // This version heuristically detects data races, since those otherwise result
143 : // in redundant reference count decrements, which are exceedingly
144 : // difficult to debug.
145 :
146 : #define SkRefCnt_SafeAssign(dst, src) \
147 : do { \
148 : typedef typename std::remove_reference<decltype(dst)>::type \
149 : SkRefCntPtrT; \
150 : SkRefCntPtrT old_dst = *const_cast<SkRefCntPtrT volatile *>(&dst); \
151 : if (src) src->ref(); \
152 : if (old_dst) old_dst->unref(); \
153 : if (old_dst != *const_cast<SkRefCntPtrT volatile *>(&dst)) { \
154 : SkDebugf("Detected racing Skia calls at %s:%d\n", \
155 : __FILE__, __LINE__); \
156 : } \
157 : dst = src; \
158 : } while (0)
159 :
160 : #else /* !SK_BUILD_FOR_ANDROID_FRAMEWORK */
161 :
162 : #define SkRefCnt_SafeAssign(dst, src) \
163 : do { \
164 : if (src) src->ref(); \
165 : if (dst) dst->unref(); \
166 : dst = src; \
167 : } while (0)
168 :
169 : #endif
170 :
171 :
172 : /** Call obj->ref() and return obj. The obj must not be nullptr.
173 : */
174 643 : template <typename T> static inline T* SkRef(T* obj) {
175 643 : SkASSERT(obj);
176 643 : obj->ref();
177 643 : return obj;
178 : }
179 :
180 : /** Check if the argument is non-null, and if so, call obj->ref() and return obj.
181 : */
182 7937 : template <typename T> static inline T* SkSafeRef(T* obj) {
183 7937 : if (obj) {
184 1181 : obj->ref();
185 : }
186 7937 : return obj;
187 : }
188 :
189 : /** Check if the argument is non-null, and if so, call obj->unref()
190 : */
191 32326 : template <typename T> static inline void SkSafeUnref(T* obj) {
192 32326 : if (obj) {
193 2824 : obj->unref();
194 : }
195 32326 : }
196 :
197 0 : template<typename T> static inline void SkSafeSetNull(T*& obj) {
198 0 : if (obj) {
199 0 : obj->unref();
200 0 : obj = nullptr;
201 : }
202 0 : }
203 :
204 : ///////////////////////////////////////////////////////////////////////////////
205 :
206 : // This is a variant of SkRefCnt that's Not Virtual, so weighs 4 bytes instead of 8 or 16.
207 : // There's only benefit to using this if the deriving class does not otherwise need a vtable.
208 : template <typename Derived>
209 : class SkNVRefCnt : SkNoncopyable {
210 : public:
211 488 : SkNVRefCnt() : fRefCnt(1) {}
212 424 : ~SkNVRefCnt() { SkASSERTF(1 == getRefCnt(), "NVRefCnt was %d", getRefCnt()); }
213 :
214 : // Implementation is pretty much the same as SkRefCntBase. All required barriers are the same:
215 : // - unique() needs acquire when it returns true, and no barrier if it returns false;
216 : // - ref() doesn't need any barrier;
217 : // - unref() needs a release barrier, and an acquire if it's going to call delete.
218 :
219 6130 : bool unique() const { return 1 == fRefCnt.load(std::memory_order_acquire); }
220 1198 : void ref() const { (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed); }
221 1023 : void unref() const {
222 2046 : if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
223 : // restore the 1 for our destructor's assert
224 424 : SkDEBUGCODE(fRefCnt.store(1, std::memory_order_relaxed));
225 424 : delete (const Derived*)this;
226 : }
227 1023 : }
228 : void deref() const { this->unref(); }
229 :
230 : private:
231 : mutable std::atomic<int32_t> fRefCnt;
232 424 : int32_t getRefCnt() const {
233 848 : return fRefCnt.load(std::memory_order_relaxed);
234 : }
235 : };
236 :
237 : ///////////////////////////////////////////////////////////////////////////////////////////////////
238 :
239 : /**
240 : * Shared pointer class to wrap classes that support a ref()/unref() interface.
241 : *
242 : * This can be used for classes inheriting from SkRefCnt, but it also works for other
243 : * classes that match the interface, but have different internal choices: e.g. the hosted class
244 : * may have its ref/unref be thread-safe, but that is not assumed/imposed by sk_sp.
245 : */
246 : template <typename T> class sk_sp {
247 : /** Supports safe bool idiom. Obsolete with explicit operator bool. */
248 : using unspecified_bool_type = T* sk_sp::*;
249 : public:
250 : using element_type = T;
251 :
252 5497 : constexpr sk_sp() : fPtr(nullptr) {}
253 3974 : constexpr sk_sp(std::nullptr_t) : fPtr(nullptr) {}
254 :
255 : /**
256 : * Shares the underlying object by calling ref(), so that both the argument and the newly
257 : * created sk_sp both have a reference to it.
258 : */
259 4912 : sk_sp(const sk_sp<T>& that) : fPtr(SkSafeRef(that.get())) {}
260 : template <typename U, typename = skstd::enable_if_t<std::is_convertible<U*, T*>::value>>
261 0 : sk_sp(const sk_sp<U>& that) : fPtr(SkSafeRef(that.get())) {}
262 :
263 : /**
264 : * Move the underlying object from the argument to the newly created sk_sp. Afterwards only
265 : * the new sk_sp will have a reference to the object, and the argument will point to null.
266 : * No call to ref() or unref() will be made.
267 : */
268 9100 : sk_sp(sk_sp<T>&& that) : fPtr(that.release()) {}
269 : template <typename U, typename = skstd::enable_if_t<std::is_convertible<U*, T*>::value>>
270 450 : sk_sp(sk_sp<U>&& that) : fPtr(that.release()) {}
271 :
272 : /**
273 : * Adopt the bare pointer into the newly created sk_sp.
274 : * No call to ref() or unref() will be made.
275 : */
276 1851 : explicit sk_sp(T* obj) : fPtr(obj) {}
277 :
278 : /**
279 : * Calls unref() on the underlying object pointer.
280 : */
281 25727 : ~sk_sp() {
282 25727 : SkSafeUnref(fPtr);
283 25727 : SkDEBUGCODE(fPtr = nullptr);
284 25727 : }
285 :
286 1161 : sk_sp<T>& operator=(std::nullptr_t) { this->reset(); return *this; }
287 :
288 : /**
289 : * Shares the underlying object referenced by the argument by calling ref() on it. If this
290 : * sk_sp previously had a reference to an object (i.e. not null) it will call unref() on that
291 : * object.
292 : */
293 1960 : sk_sp<T>& operator=(const sk_sp<T>& that) {
294 1960 : this->reset(SkSafeRef(that.get()));
295 1960 : return *this;
296 : }
297 : template <typename U, typename = skstd::enable_if_t<std::is_convertible<U*, T*>::value>>
298 : sk_sp<T>& operator=(const sk_sp<U>& that) {
299 : this->reset(SkSafeRef(that.get()));
300 : return *this;
301 : }
302 :
303 : /**
304 : * Move the underlying object from the argument to the sk_sp. If the sk_sp previously held
305 : * a reference to another object, unref() will be called on that object. No call to ref()
306 : * will be made.
307 : */
308 1884 : sk_sp<T>& operator=(sk_sp<T>&& that) {
309 1884 : this->reset(that.release());
310 1884 : return *this;
311 : }
312 : template <typename U, typename = skstd::enable_if_t<std::is_convertible<U*, T*>::value>>
313 0 : sk_sp<T>& operator=(sk_sp<U>&& that) {
314 0 : this->reset(that.release());
315 0 : return *this;
316 : }
317 :
318 268 : T& operator*() const {
319 268 : SkASSERT(this->get() != nullptr);
320 268 : return *this->get();
321 : }
322 :
323 : // MSVC 2013 does not work correctly with explicit operator bool.
324 : // https://chromium-cpp.appspot.com/#core-blacklist
325 : // When explicit operator bool can be used, remove operator! and operator unspecified_bool_type.
326 : //explicit operator bool() const { return this->get() != nullptr; }
327 9293 : operator unspecified_bool_type() const { return this->get() ? &sk_sp::fPtr : nullptr; }
328 3468 : bool operator!() const { return this->get() == nullptr; }
329 :
330 34563 : T* get() const { return fPtr; }
331 30277 : T* operator->() const { return fPtr; }
332 :
333 : /**
334 : * Adopt the new bare pointer, and call unref() on any previously held object (if not null).
335 : * No call to ref() will be made.
336 : */
337 5703 : void reset(T* ptr = nullptr) {
338 : // Calling fPtr->unref() may call this->~() or this->reset(T*).
339 : // http://wg21.cmeerw.net/lwg/issue998
340 : // http://wg21.cmeerw.net/lwg/issue2262
341 5703 : T* oldPtr = fPtr;
342 5703 : fPtr = ptr;
343 5703 : SkSafeUnref(oldPtr);
344 5703 : }
345 :
346 : /**
347 : * Return the bare pointer, and set the internal object pointer to nullptr.
348 : * The caller must assume ownership of the object, and manage its reference count directly.
349 : * No call to unref() will be made.
350 : */
351 11637 : T* SK_WARN_UNUSED_RESULT release() {
352 11637 : T* ptr = fPtr;
353 11637 : fPtr = nullptr;
354 11637 : return ptr;
355 : }
356 :
357 127 : void swap(sk_sp<T>& that) /*noexcept*/ {
358 : using std::swap;
359 127 : swap(fPtr, that.fPtr);
360 127 : }
361 :
362 : private:
363 : T* fPtr;
364 : };
365 :
366 : template <typename T> inline void swap(sk_sp<T>& a, sk_sp<T>& b) /*noexcept*/ {
367 : a.swap(b);
368 : }
369 :
370 0 : template <typename T, typename U> inline bool operator==(const sk_sp<T>& a, const sk_sp<U>& b) {
371 0 : return a.get() == b.get();
372 : }
373 : template <typename T> inline bool operator==(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ {
374 : return !a;
375 : }
376 18 : template <typename T> inline bool operator==(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ {
377 18 : return !b;
378 : }
379 :
380 392 : template <typename T, typename U> inline bool operator!=(const sk_sp<T>& a, const sk_sp<U>& b) {
381 392 : return a.get() != b.get();
382 : }
383 4 : template <typename T> inline bool operator!=(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ {
384 4 : return static_cast<bool>(a);
385 : }
386 : template <typename T> inline bool operator!=(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ {
387 : return static_cast<bool>(b);
388 : }
389 :
390 : template <typename T, typename U> inline bool operator<(const sk_sp<T>& a, const sk_sp<U>& b) {
391 : // Provide defined total order on sk_sp.
392 : // http://wg21.cmeerw.net/lwg/issue1297
393 : // http://wg21.cmeerw.net/lwg/issue1401 .
394 : return std::less<skstd::common_type_t<T*, U*>>()(a.get(), b.get());
395 : }
396 : template <typename T> inline bool operator<(const sk_sp<T>& a, std::nullptr_t) {
397 : return std::less<T*>()(a.get(), nullptr);
398 : }
399 : template <typename T> inline bool operator<(std::nullptr_t, const sk_sp<T>& b) {
400 : return std::less<T*>()(nullptr, b.get());
401 : }
402 :
403 : template <typename T, typename U> inline bool operator<=(const sk_sp<T>& a, const sk_sp<U>& b) {
404 : return !(b < a);
405 : }
406 : template <typename T> inline bool operator<=(const sk_sp<T>& a, std::nullptr_t) {
407 : return !(nullptr < a);
408 : }
409 : template <typename T> inline bool operator<=(std::nullptr_t, const sk_sp<T>& b) {
410 : return !(b < nullptr);
411 : }
412 :
413 : template <typename T, typename U> inline bool operator>(const sk_sp<T>& a, const sk_sp<U>& b) {
414 : return b < a;
415 : }
416 : template <typename T> inline bool operator>(const sk_sp<T>& a, std::nullptr_t) {
417 : return nullptr < a;
418 : }
419 : template <typename T> inline bool operator>(std::nullptr_t, const sk_sp<T>& b) {
420 : return b < nullptr;
421 : }
422 :
423 : template <typename T, typename U> inline bool operator>=(const sk_sp<T>& a, const sk_sp<U>& b) {
424 : return !(a < b);
425 : }
426 : template <typename T> inline bool operator>=(const sk_sp<T>& a, std::nullptr_t) {
427 : return !(a < nullptr);
428 : }
429 : template <typename T> inline bool operator>=(std::nullptr_t, const sk_sp<T>& b) {
430 : return !(nullptr < b);
431 : }
432 :
433 : template <typename T, typename... Args>
434 448 : sk_sp<T> sk_make_sp(Args&&... args) {
435 896 : return sk_sp<T>(new T(std::forward<Args>(args)...));
436 : }
437 :
438 : /*
439 : * Returns a sk_sp wrapping the provided ptr AND calls ref on it (if not null).
440 : *
441 : * This is different than the semantics of the constructor for sk_sp, which just wraps the ptr,
442 : * effectively "adopting" it.
443 : */
444 363 : template <typename T> sk_sp<T> sk_ref_sp(T* obj) {
445 363 : return sk_sp<T>(SkSafeRef(obj));
446 : }
447 :
448 : #endif
|