Line data Source code
1 : /*
2 : * Copyright 2011 Google Inc.
3 : *
4 : * Use of this source code is governed by a BSD-style license that can be
5 : * found in the LICENSE file.
6 : */
7 :
8 : #include "SkBitmapCache.h"
9 : #include "SkMutex.h"
10 : #include "SkPixelRef.h"
11 : #include "SkTraceEvent.h"
12 :
13 : //#define SK_SUPPORT_LEGACY_UNBALANCED_PIXELREF_LOCKCOUNT
14 : //#define SK_TRACE_PIXELREF_LIFETIME
15 :
16 : #include "SkNextID.h"
17 :
18 211 : uint32_t SkNextID::ImageID() {
19 : static uint32_t gID = 0;
20 : uint32_t id;
21 : // Loop in case our global wraps around, as we never want to return a 0.
22 0 : do {
23 211 : id = sk_atomic_fetch_add(&gID, 2u) + 2; // Never set the low bit.
24 211 : } while (0 == id);
25 211 : return id;
26 : }
27 :
28 : ///////////////////////////////////////////////////////////////////////////////
29 :
30 : // just need a > 0 value, so pick a funny one to aid in debugging
31 : #define SKPIXELREF_PRELOCKED_LOCKCOUNT 123456789
32 :
33 270 : static SkImageInfo validate_info(const SkImageInfo& info) {
34 270 : SkAlphaType newAlphaType = info.alphaType();
35 270 : SkAssertResult(SkColorTypeValidateAlphaType(info.colorType(), info.alphaType(), &newAlphaType));
36 270 : return info.makeAlphaType(newAlphaType);
37 : }
38 :
39 921 : static void validate_pixels_ctable(const SkImageInfo& info, const SkColorTable* ctable) {
40 921 : if (info.isEmpty()) {
41 0 : return; // can't require ctable if the dimensions are empty
42 : }
43 921 : if (kIndex_8_SkColorType == info.colorType()) {
44 0 : SkASSERT(ctable);
45 : } else {
46 921 : SkASSERT(nullptr == ctable);
47 : }
48 : }
49 :
50 : #ifdef SK_TRACE_PIXELREF_LIFETIME
51 : static int32_t gInstCounter;
52 : #endif
53 :
54 : #ifdef SK_SUPPORT_LEGACY_NO_ADDR_PIXELREF
55 : SkPixelRef::SkPixelRef(const SkImageInfo& info)
56 : : fInfo(validate_info(info))
57 : #ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
58 : , fStableID(SkNextID::ImageID())
59 : #endif
60 :
61 : {
62 : #ifdef SK_TRACE_PIXELREF_LIFETIME
63 : SkDebugf(" pixelref %d\n", sk_atomic_inc(&gInstCounter));
64 : #endif
65 : fRec.zero();
66 : fLockCount = 0;
67 : this->needsNewGenID();
68 : fMutability = kMutable;
69 : fPreLocked = false;
70 : fAddedToCache.store(false);
71 : }
72 : #endif
73 :
74 270 : SkPixelRef::SkPixelRef(const SkImageInfo& info, void* pixels, size_t rowBytes,
75 270 : sk_sp<SkColorTable> ctable)
76 : : fInfo(validate_info(info))
77 270 : , fCTable(std::move(ctable))
78 : #ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
79 : , fStableID(SkNextID::ImageID())
80 : #endif
81 : {
82 270 : validate_pixels_ctable(fInfo, fCTable.get());
83 270 : SkASSERT(rowBytes >= info.minRowBytes());
84 : #ifdef SK_TRACE_PIXELREF_LIFETIME
85 : SkDebugf(" pixelref %d\n", sk_atomic_inc(&gInstCounter));
86 : #endif
87 270 : fRec.fPixels = pixels;
88 270 : fRec.fRowBytes = rowBytes;
89 270 : fRec.fColorTable = fCTable.get();
90 :
91 270 : fLockCount = SKPIXELREF_PRELOCKED_LOCKCOUNT;
92 270 : this->needsNewGenID();
93 270 : fMutability = kMutable;
94 270 : fPreLocked = true;
95 270 : fAddedToCache.store(false);
96 270 : }
97 :
98 450 : SkPixelRef::~SkPixelRef() {
99 : #ifndef SK_SUPPORT_LEGACY_UNBALANCED_PIXELREF_LOCKCOUNT
100 225 : SkASSERT(SKPIXELREF_PRELOCKED_LOCKCOUNT == fLockCount || 0 == fLockCount);
101 : #endif
102 :
103 : #ifdef SK_TRACE_PIXELREF_LIFETIME
104 : SkDebugf("~pixelref %d\n", sk_atomic_dec(&gInstCounter) - 1);
105 : #endif
106 225 : this->callGenIDChangeListeners();
107 225 : }
108 :
109 : #ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
110 : // This is undefined if there are clients in-flight trying to use us
111 : void SkPixelRef::android_only_reset(const SkImageInfo& info, size_t rowBytes,
112 : sk_sp<SkColorTable> ctable) {
113 : validate_pixels_ctable(info, ctable.get());
114 :
115 : *const_cast<SkImageInfo*>(&fInfo) = info;
116 : fCTable = std::move(ctable);
117 : // note: we do not change fRec.fPixels
118 : fRec.fRowBytes = rowBytes;
119 : fRec.fColorTable = fCTable.get();
120 :
121 : // conservative, since its possible the "new" settings are the same as the old.
122 : this->notifyPixelsChanged();
123 : }
124 : #endif
125 :
126 743 : void SkPixelRef::needsNewGenID() {
127 743 : fTaggedGenID.store(0);
128 743 : SkASSERT(!this->genIDIsUnique()); // This method isn't threadsafe, so the assert should be fine.
129 743 : }
130 :
131 0 : void SkPixelRef::cloneGenID(const SkPixelRef& that) {
132 : // This is subtle. We must call that.getGenerationID() to make sure its genID isn't 0.
133 0 : uint32_t genID = that.getGenerationID();
134 :
135 : // Neither ID is unique any more.
136 : // (These & ~1u are actually redundant. that.getGenerationID() just did it for us.)
137 0 : this->fTaggedGenID.store(genID & ~1u);
138 0 : that. fTaggedGenID.store(genID & ~1u);
139 :
140 : // This method isn't threadsafe, so these asserts should be fine.
141 0 : SkASSERT(!this->genIDIsUnique());
142 0 : SkASSERT(!that. genIDIsUnique());
143 0 : }
144 :
145 : #ifdef SK_SUPPORT_LEGACY_NO_ADDR_PIXELREF
146 : void SkPixelRef::setPreLocked(void* pixels, size_t rowBytes, SkColorTable* ctable) {
147 : SkASSERT(pixels);
148 : validate_pixels_ctable(fInfo, ctable);
149 : // only call me in your constructor, otherwise fLockCount tracking can get
150 : // out of sync.
151 : fRec.fPixels = pixels;
152 : fRec.fColorTable = ctable;
153 : fRec.fRowBytes = rowBytes;
154 : fLockCount = SKPIXELREF_PRELOCKED_LOCKCOUNT;
155 : fPreLocked = true;
156 : }
157 : #endif
158 :
159 : // Increments fLockCount only on success
160 0 : bool SkPixelRef::lockPixelsInsideMutex() {
161 0 : fMutex.assertHeld();
162 :
163 0 : if (1 == ++fLockCount) {
164 0 : SkASSERT(fRec.isZero());
165 0 : if (!this->onNewLockPixels(&fRec)) {
166 0 : fRec.zero();
167 0 : fLockCount -= 1; // we return fLockCount unchanged if we fail.
168 0 : return false;
169 : }
170 : }
171 0 : if (fRec.fPixels) {
172 0 : validate_pixels_ctable(fInfo, fRec.fColorTable);
173 0 : return true;
174 : }
175 : // no pixels, so we failed (somehow)
176 0 : --fLockCount;
177 0 : return false;
178 : }
179 :
180 : // For historical reasons, we always inc fLockCount, even if we return false.
181 : // It would be nice to change this (it seems), and only inc if we actually succeed...
182 499 : bool SkPixelRef::lockPixels() {
183 499 : SkASSERT(!fPreLocked || SKPIXELREF_PRELOCKED_LOCKCOUNT == fLockCount);
184 :
185 499 : if (!fPreLocked) {
186 0 : TRACE_EVENT_BEGIN0("skia", "SkPixelRef::lockPixelsMutex");
187 0 : SkAutoMutexAcquire ac(fMutex);
188 0 : TRACE_EVENT_END0("skia", "SkPixelRef::lockPixelsMutex");
189 0 : SkDEBUGCODE(int oldCount = fLockCount;)
190 0 : bool success = this->lockPixelsInsideMutex();
191 : // lockPixelsInsideMutex only increments the count if it succeeds.
192 0 : SkASSERT(oldCount + (int)success == fLockCount);
193 :
194 0 : if (!success) {
195 : // For compatibility with SkBitmap calling lockPixels, we still want to increment
196 : // fLockCount even if we failed. If we updated SkBitmap we could remove this oddity.
197 0 : fLockCount += 1;
198 0 : return false;
199 : }
200 : }
201 499 : if (fRec.fPixels) {
202 499 : validate_pixels_ctable(fInfo, fRec.fColorTable);
203 499 : return true;
204 : }
205 0 : return false;
206 : }
207 :
208 0 : bool SkPixelRef::lockPixels(LockRec* rec) {
209 0 : if (this->lockPixels()) {
210 0 : *rec = fRec;
211 0 : return true;
212 : }
213 0 : return false;
214 : }
215 :
216 451 : void SkPixelRef::unlockPixels() {
217 451 : SkASSERT(!fPreLocked || SKPIXELREF_PRELOCKED_LOCKCOUNT == fLockCount);
218 :
219 451 : if (!fPreLocked) {
220 0 : SkAutoMutexAcquire ac(fMutex);
221 :
222 0 : SkASSERT(fLockCount > 0);
223 0 : if (0 == --fLockCount) {
224 : // don't call onUnlockPixels unless onLockPixels succeeded
225 0 : if (fRec.fPixels) {
226 0 : this->onUnlockPixels();
227 0 : fRec.zero();
228 : } else {
229 0 : SkASSERT(fRec.isZero());
230 : }
231 : }
232 : }
233 451 : }
234 :
235 152 : bool SkPixelRef::requestLock(const LockRequest& request, LockResult* result) {
236 152 : SkASSERT(result);
237 152 : if (request.fSize.isEmpty()) {
238 0 : return false;
239 : }
240 : // until we support subsets, we have to check this...
241 152 : if (request.fSize.width() != fInfo.width() || request.fSize.height() != fInfo.height()) {
242 0 : return false;
243 : }
244 :
245 152 : if (fPreLocked) {
246 152 : result->fUnlockProc = nullptr;
247 152 : result->fUnlockContext = nullptr;
248 152 : result->fCTable = fRec.fColorTable;
249 152 : result->fPixels = fRec.fPixels;
250 152 : result->fRowBytes = fRec.fRowBytes;
251 152 : result->fSize.set(fInfo.width(), fInfo.height());
252 : } else {
253 0 : SkAutoMutexAcquire ac(fMutex);
254 0 : if (!this->internalRequestLock(request, result)) {
255 0 : return false;
256 : }
257 : }
258 152 : if (result->fPixels) {
259 152 : validate_pixels_ctable(fInfo, result->fCTable);
260 152 : return true;
261 : }
262 0 : return false;
263 : }
264 :
265 22 : uint32_t SkPixelRef::getGenerationID() const {
266 22 : uint32_t id = fTaggedGenID.load();
267 22 : if (0 == id) {
268 19 : uint32_t next = SkNextID::ImageID() | 1u;
269 19 : if (fTaggedGenID.compare_exchange(&id, next)) {
270 19 : id = next; // There was no race or we won the race. fTaggedGenID is next now.
271 : } else {
272 : // We lost a race to set fTaggedGenID. compare_exchange() filled id with the winner.
273 : }
274 : // We can't quite SkASSERT(this->genIDIsUnique()). It could be non-unique
275 : // if we got here via the else path (pretty unlikely, but possible).
276 : }
277 22 : return id & ~1u; // Mask off bottom unique bit.
278 : }
279 :
280 0 : void SkPixelRef::addGenIDChangeListener(GenIDChangeListener* listener) {
281 0 : if (nullptr == listener || !this->genIDIsUnique()) {
282 : // No point in tracking this if we're not going to call it.
283 0 : delete listener;
284 0 : return;
285 : }
286 0 : *fGenIDChangeListeners.append() = listener;
287 : }
288 :
289 : // we need to be called *before* the genID gets changed or zerod
290 698 : void SkPixelRef::callGenIDChangeListeners() {
291 : // We don't invalidate ourselves if we think another SkPixelRef is sharing our genID.
292 698 : if (this->genIDIsUnique()) {
293 11 : for (int i = 0; i < fGenIDChangeListeners.count(); i++) {
294 0 : fGenIDChangeListeners[i]->onChange();
295 : }
296 :
297 : // TODO: SkAtomic could add "old_value = atomic.xchg(new_value)" to make this clearer.
298 11 : if (fAddedToCache.load()) {
299 0 : SkNotifyBitmapGenIDIsStale(this->getGenerationID());
300 0 : fAddedToCache.store(false);
301 : }
302 : }
303 : // Listeners get at most one shot, so whether these triggered or not, blow them away.
304 698 : fGenIDChangeListeners.deleteAll();
305 698 : }
306 :
307 473 : void SkPixelRef::notifyPixelsChanged() {
308 : #ifdef SK_DEBUG
309 473 : if (this->isImmutable()) {
310 0 : SkDebugf("========== notifyPixelsChanged called on immutable pixelref");
311 : }
312 : #endif
313 473 : this->callGenIDChangeListeners();
314 473 : this->needsNewGenID();
315 473 : this->onNotifyPixelsChanged();
316 473 : }
317 :
318 0 : void SkPixelRef::changeAlphaType(SkAlphaType at) {
319 0 : *const_cast<SkImageInfo*>(&fInfo) = fInfo.makeAlphaType(at);
320 0 : }
321 :
322 167 : void SkPixelRef::setImmutable() {
323 167 : fMutability = kImmutable;
324 167 : }
325 :
326 0 : void SkPixelRef::setImmutableWithID(uint32_t genID) {
327 : /*
328 : * We are forcing the genID to match an external value. The caller must ensure that this
329 : * value does not conflict with other content.
330 : *
331 : * One use is to force this pixelref's id to match an SkImage's id
332 : */
333 0 : fMutability = kImmutable;
334 0 : fTaggedGenID.store(genID);
335 0 : }
336 :
337 0 : void SkPixelRef::setTemporarilyImmutable() {
338 0 : SkASSERT(fMutability != kImmutable);
339 0 : fMutability = kTemporarilyImmutable;
340 0 : }
341 :
342 0 : void SkPixelRef::restoreMutability() {
343 0 : SkASSERT(fMutability != kImmutable);
344 0 : fMutability = kMutable;
345 0 : }
346 :
347 : ///////////////////////////////////////////////////////////////////////////////////////////////////
348 :
349 :
350 473 : void SkPixelRef::onNotifyPixelsChanged() { }
351 :
352 0 : size_t SkPixelRef::getAllocatedSizeInBytes() const {
353 0 : return 0;
354 : }
355 :
356 0 : static void unlock_legacy_result(void* ctx) {
357 0 : SkPixelRef* pr = (SkPixelRef*)ctx;
358 0 : pr->unlockPixels();
359 0 : pr->unref(); // balancing the Ref in onRequestLoc
360 0 : }
361 :
362 0 : bool SkPixelRef::internalRequestLock(const LockRequest& request, LockResult* result) {
363 0 : if (!this->lockPixelsInsideMutex()) {
364 0 : return false;
365 : }
366 :
367 0 : result->fUnlockProc = unlock_legacy_result;
368 0 : result->fUnlockContext = SkRef(this); // this is balanced in our fUnlockProc
369 0 : result->fCTable = fRec.fColorTable;
370 0 : result->fPixels = fRec.fPixels;
371 0 : result->fRowBytes = fRec.fRowBytes;
372 0 : result->fSize.set(fInfo.width(), fInfo.height());
373 0 : return true;
374 : }
|