Line data Source code
1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sts=4 et sw=4 tw=99:
3 : * This Source Code Form is subject to the terms of the Mozilla Public
4 : * License, v. 2.0. If a copy of the MPL was not distributed with this
5 : * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 :
7 : #ifndef gc_StoreBuffer_h
8 : #define gc_StoreBuffer_h
9 :
10 : #include "mozilla/Attributes.h"
11 : #include "mozilla/ReentrancyGuard.h"
12 :
13 : #include <algorithm>
14 :
15 : #include "jsalloc.h"
16 :
17 : #include "ds/LifoAlloc.h"
18 : #include "gc/Nursery.h"
19 : #include "js/MemoryMetrics.h"
20 :
21 : namespace js {
22 : namespace gc {
23 :
24 : class ArenaCellSet;
25 :
26 : /*
27 : * BufferableRef represents an abstract reference for use in the generational
28 : * GC's remembered set. Entries in the store buffer that cannot be represented
29 : * with the simple pointer-to-a-pointer scheme must derive from this class and
30 : * use the generic store buffer interface.
31 : *
32 : * A single BufferableRef entry in the generic buffer can represent many entries
33 : * in the remembered set. For example js::OrderedHashTableRef represents all
34 : * the incoming edges corresponding to keys in an ordered hash table.
35 : */
36 6860 : class BufferableRef
37 : {
38 : public:
39 : virtual void trace(JSTracer* trc) = 0;
40 3430 : bool maybeInRememberedSet(const Nursery&) const { return true; }
41 : };
42 :
43 : typedef HashSet<void*, PointerHasher<void*, 3>, SystemAllocPolicy> EdgeSet;
44 :
45 : /* The size of a single block of store buffer storage space. */
46 : static const size_t LifoAllocBlockSize = 1 << 13; /* 8KiB */
47 :
48 : /*
49 : * The StoreBuffer observes all writes that occur in the system and performs
50 : * efficient filtering of them to derive a remembered set for nursery GC.
51 : */
52 0 : class StoreBuffer
53 : {
54 : friend class mozilla::ReentrancyGuard;
55 :
56 : /* The size at which a block is about to overflow. */
57 : static const size_t LowAvailableThreshold = size_t(LifoAllocBlockSize / 2.0);
58 :
59 : /*
60 : * This buffer holds only a single type of edge. Using this buffer is more
61 : * efficient than the generic buffer when many writes will be to the same
62 : * type of edge: e.g. Value or Cell*.
63 : */
64 : template<typename T>
65 : struct MonoTypeBuffer
66 : {
67 : /* The canonical set of stores. */
68 : typedef HashSet<T, typename T::Hasher, SystemAllocPolicy> StoreSet;
69 : StoreSet stores_;
70 :
71 : /*
72 : * A one element cache in front of the canonical set to speed up
73 : * temporary instances of HeapPtr.
74 : */
75 : T last_;
76 :
77 : /* Maximum number of entries before we request a minor GC. */
78 : const static size_t MaxEntries = 48 * 1024 / sizeof(T);
79 :
80 12 : explicit MonoTypeBuffer() : last_(T()) {}
81 0 : ~MonoTypeBuffer() { stores_.finish(); }
82 :
83 21 : MOZ_MUST_USE bool init() {
84 21 : if (!stores_.initialized() && !stores_.init())
85 0 : return false;
86 21 : clear();
87 21 : return true;
88 : }
89 :
90 93 : void clear() {
91 93 : last_ = T();
92 93 : if (stores_.initialized())
93 93 : stores_.clear();
94 93 : }
95 :
96 : /* Add one item to the buffer. */
97 8786 : void put(StoreBuffer* owner, const T& t) {
98 8786 : MOZ_ASSERT(stores_.initialized());
99 8786 : sinkStore(owner);
100 8786 : last_ = t;
101 8786 : }
102 :
103 : /* Remove an item from the store buffer. */
104 1160 : void unput(StoreBuffer* owner, const T& v) {
105 : // Fast, hashless remove of last put.
106 1160 : if (last_ == v) {
107 11 : last_ = T();
108 11 : return;
109 : }
110 1149 : stores_.remove(v);
111 : }
112 :
113 : /* Move any buffered stores to the canonical store set. */
114 8786 : void sinkStore(StoreBuffer* owner) {
115 8786 : MOZ_ASSERT(stores_.initialized());
116 8786 : if (last_) {
117 17410 : AutoEnterOOMUnsafeRegion oomUnsafe;
118 8705 : if (!stores_.put(last_))
119 0 : oomUnsafe.crash("Failed to allocate for MonoTypeBuffer::put.");
120 : }
121 8786 : last_ = T();
122 :
123 8786 : if (MOZ_UNLIKELY(stores_.count() > MaxEntries))
124 0 : owner->setAboutToOverflow();
125 8786 : }
126 :
127 0 : bool has(StoreBuffer* owner, const T& v) {
128 0 : sinkStore(owner);
129 0 : return stores_.has(v);
130 : }
131 :
132 : /* Trace the source of all edges in the store buffer. */
133 : void trace(StoreBuffer* owner, TenuringTracer& mover);
134 :
135 0 : size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
136 0 : return stores_.sizeOfExcludingThis(mallocSizeOf);
137 : }
138 :
139 : private:
140 : MonoTypeBuffer& operator=(const MonoTypeBuffer& other) = delete;
141 : };
142 :
143 : struct GenericBuffer
144 : {
145 : LifoAlloc* storage_;
146 :
147 4 : explicit GenericBuffer() : storage_(nullptr) {}
148 0 : ~GenericBuffer() { js_delete(storage_); }
149 :
150 7 : MOZ_MUST_USE bool init() {
151 7 : if (!storage_)
152 4 : storage_ = js_new<LifoAlloc>(LifoAllocBlockSize);
153 7 : clear();
154 7 : return bool(storage_);
155 : }
156 :
157 31 : void clear() {
158 31 : if (!storage_)
159 0 : return;
160 :
161 31 : storage_->used() ? storage_->releaseAll() : storage_->freeAll();
162 : }
163 :
164 3430 : bool isAboutToOverflow() const {
165 6860 : return !storage_->isEmpty() &&
166 6860 : storage_->availableInCurrentChunk() < LowAvailableThreshold;
167 : }
168 :
169 : /* Trace all generic edges. */
170 : void trace(StoreBuffer* owner, JSTracer* trc);
171 :
172 : template <typename T>
173 3430 : void put(StoreBuffer* owner, const T& t) {
174 3430 : MOZ_ASSERT(storage_);
175 :
176 : /* Ensure T is derived from BufferableRef. */
177 : (void)static_cast<const BufferableRef*>(&t);
178 :
179 6860 : AutoEnterOOMUnsafeRegion oomUnsafe;
180 3430 : unsigned size = sizeof(T);
181 3430 : unsigned* sizep = storage_->pod_malloc<unsigned>();
182 3430 : if (!sizep)
183 0 : oomUnsafe.crash("Failed to allocate for GenericBuffer::put.");
184 3430 : *sizep = size;
185 :
186 3430 : T* tp = storage_->new_<T>(t);
187 3430 : if (!tp)
188 0 : oomUnsafe.crash("Failed to allocate for GenericBuffer::put.");
189 :
190 3430 : if (isAboutToOverflow())
191 18 : owner->setAboutToOverflow();
192 3430 : }
193 :
194 0 : size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
195 0 : return storage_ ? storage_->sizeOfIncludingThis(mallocSizeOf) : 0;
196 : }
197 :
198 : bool isEmpty() {
199 : return !storage_ || storage_->isEmpty();
200 : }
201 :
202 : private:
203 : GenericBuffer& operator=(const GenericBuffer& other) = delete;
204 : };
205 :
206 : template <typename Edge>
207 : struct PointerEdgeHasher
208 : {
209 : typedef Edge Lookup;
210 5260 : static HashNumber hash(const Lookup& l) { return uintptr_t(l.edge) >> 3; }
211 824 : static bool match(const Edge& k, const Lookup& l) { return k == l; }
212 : };
213 :
214 : struct CellPtrEdge
215 : {
216 : Cell** edge;
217 :
218 3518 : CellPtrEdge() : edge(nullptr) {}
219 7802 : explicit CellPtrEdge(Cell** v) : edge(v) {}
220 1238 : bool operator==(const CellPtrEdge& other) const { return edge == other.edge; }
221 : bool operator!=(const CellPtrEdge& other) const { return edge != other.edge; }
222 :
223 7178 : bool maybeInRememberedSet(const Nursery& nursery) const {
224 7178 : MOZ_ASSERT(IsInsideNursery(*edge));
225 7178 : return !nursery.isInside(edge);
226 : }
227 :
228 : void trace(TenuringTracer& mover) const;
229 :
230 : CellPtrEdge tagged() const { return CellPtrEdge((Cell**)(uintptr_t(edge) | 1)); }
231 : CellPtrEdge untagged() const { return CellPtrEdge((Cell**)(uintptr_t(edge) & ~1)); }
232 : bool isTagged() const { return bool(uintptr_t(edge) & 1); }
233 :
234 3494 : explicit operator bool() const { return edge != nullptr; }
235 :
236 : typedef PointerEdgeHasher<CellPtrEdge> Hasher;
237 : };
238 :
239 : struct ValueEdge
240 : {
241 : JS::Value* edge;
242 :
243 731 : ValueEdge() : edge(nullptr) {}
244 5576 : explicit ValueEdge(JS::Value* v) : edge(v) {}
245 746 : bool operator==(const ValueEdge& other) const { return edge == other.edge; }
246 : bool operator!=(const ValueEdge& other) const { return edge != other.edge; }
247 :
248 5435 : Cell* deref() const { return edge->isGCThing() ? static_cast<Cell*>(edge->toGCThing()) : nullptr; }
249 :
250 5040 : bool maybeInRememberedSet(const Nursery& nursery) const {
251 5040 : MOZ_ASSERT(IsInsideNursery(deref()));
252 5040 : return !nursery.isInside(edge);
253 : }
254 :
255 : void trace(TenuringTracer& mover) const;
256 :
257 : ValueEdge tagged() const { return ValueEdge((JS::Value*)(uintptr_t(edge) | 1)); }
258 : ValueEdge untagged() const { return ValueEdge((JS::Value*)(uintptr_t(edge) & ~1)); }
259 : bool isTagged() const { return bool(uintptr_t(edge) & 1); }
260 :
261 716 : explicit operator bool() const { return edge != nullptr; }
262 :
263 : typedef PointerEdgeHasher<ValueEdge> Hasher;
264 : };
265 :
266 : struct SlotsEdge
267 : {
268 : // These definitions must match those in HeapSlot::Kind.
269 : const static int SlotKind = 0;
270 : const static int ElementKind = 1;
271 :
272 : uintptr_t objectAndKind_; // NativeObject* | Kind
273 : int32_t start_;
274 : int32_t count_;
275 :
276 4653 : SlotsEdge() : objectAndKind_(0), start_(0), count_(0) {}
277 24451 : SlotsEdge(NativeObject* object, int kind, int32_t start, int32_t count)
278 24451 : : objectAndKind_(uintptr_t(object) | kind), start_(start), count_(count)
279 : {
280 24451 : MOZ_ASSERT((uintptr_t(object) & 1) == 0);
281 24451 : MOZ_ASSERT(kind <= 1);
282 24451 : MOZ_ASSERT(start >= 0);
283 24451 : MOZ_ASSERT(count > 0);
284 24451 : }
285 :
286 21202 : NativeObject* object() const { return reinterpret_cast<NativeObject*>(objectAndKind_ & ~1); }
287 3917 : int kind() const { return (int)(objectAndKind_ & 1); }
288 :
289 62 : bool operator==(const SlotsEdge& other) const {
290 120 : return objectAndKind_ == other.objectAndKind_ &&
291 94 : start_ == other.start_ &&
292 94 : count_ == other.count_;
293 : }
294 :
295 : bool operator!=(const SlotsEdge& other) const {
296 : return !(*this == other);
297 : }
298 :
299 : // True if this SlotsEdge range overlaps with the other SlotsEdge range,
300 : // false if they do not overlap.
301 31683 : bool overlaps(const SlotsEdge& other) const {
302 31683 : if (objectAndKind_ != other.objectAndKind_)
303 16928 : return false;
304 :
305 : // Widen our range by one on each side so that we consider
306 : // adjacent-but-not-actually-overlapping ranges as overlapping. This
307 : // is particularly useful for coalescing a series of increasing or
308 : // decreasing single index writes 0, 1, 2, ..., N into a SlotsEdge
309 : // range of elements [0, N].
310 14755 : auto end = start_ + count_ + 1;
311 14755 : auto start = start_ - 1;
312 :
313 14755 : auto otherEnd = other.start_ + other.count_;
314 29219 : return (start <= other.start_ && other.start_ <= end) ||
315 15009 : (start <= otherEnd && otherEnd <= end);
316 : }
317 :
318 : // Destructively make this SlotsEdge range the union of the other
319 : // SlotsEdge range and this one. A precondition is that the ranges must
320 : // overlap.
321 7232 : void merge(const SlotsEdge& other) {
322 7232 : MOZ_ASSERT(overlaps(other));
323 7232 : auto end = Max(start_ + count_, other.start_ + other.count_);
324 7232 : start_ = Min(start_, other.start_);
325 7232 : count_ = end - start_;
326 7232 : }
327 :
328 17219 : bool maybeInRememberedSet(const Nursery& n) const {
329 17219 : return !IsInsideNursery(reinterpret_cast<Cell*>(object()));
330 : }
331 :
332 : void trace(TenuringTracer& mover) const;
333 :
334 4639 : explicit operator bool() const { return objectAndKind_ != 0; }
335 :
336 : typedef struct {
337 : typedef SlotsEdge Lookup;
338 4594 : static HashNumber hash(const Lookup& l) { return l.objectAndKind_ ^ l.start_ ^ l.count_; }
339 62 : static bool match(const SlotsEdge& k, const Lookup& l) { return k == l; }
340 : } Hasher;
341 : };
342 :
343 : template <typename Buffer, typename Edge>
344 1160 : void unput(Buffer& buffer, const Edge& edge) {
345 1160 : MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
346 1160 : MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
347 1160 : if (!isEnabled())
348 0 : return;
349 2320 : mozilla::ReentrancyGuard g(*this);
350 1160 : buffer.unput(this, edge);
351 : }
352 :
353 : template <typename Buffer, typename Edge>
354 32867 : void put(Buffer& buffer, const Edge& edge) {
355 32867 : MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy());
356 32867 : MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
357 32867 : if (!isEnabled())
358 0 : return;
359 65734 : mozilla::ReentrancyGuard g(*this);
360 32867 : if (edge.maybeInRememberedSet(nursery_))
361 12216 : buffer.put(this, edge);
362 : }
363 :
364 : MonoTypeBuffer<ValueEdge> bufferVal;
365 : MonoTypeBuffer<CellPtrEdge> bufferCell;
366 : MonoTypeBuffer<SlotsEdge> bufferSlot;
367 : ArenaCellSet* bufferWholeCell;
368 : GenericBuffer bufferGeneric;
369 : bool cancelIonCompilations_;
370 :
371 : JSRuntime* runtime_;
372 : const Nursery& nursery_;
373 :
374 : bool aboutToOverflow_;
375 : bool enabled_;
376 : #ifdef DEBUG
377 : bool mEntered; /* For ReentrancyGuard. */
378 : #endif
379 :
380 : public:
381 4 : explicit StoreBuffer(JSRuntime* rt, const Nursery& nursery)
382 4 : : bufferVal(), bufferCell(), bufferSlot(), bufferWholeCell(nullptr), bufferGeneric(),
383 : cancelIonCompilations_(false), runtime_(rt), nursery_(nursery), aboutToOverflow_(false),
384 : enabled_(false)
385 : #ifdef DEBUG
386 4 : , mEntered(false)
387 : #endif
388 : {
389 4 : }
390 :
391 : MOZ_MUST_USE bool enable();
392 : void disable();
393 34111 : bool isEnabled() const { return enabled_; }
394 :
395 : void clear();
396 :
397 : /* Get the overflowed status. */
398 : bool isAboutToOverflow() const { return aboutToOverflow_; }
399 :
400 31 : bool cancelIonCompilations() const { return cancelIonCompilations_; }
401 :
402 : /* Insert a single edge into the buffer/remembered set. */
403 5040 : void putValue(JS::Value* vp) { put(bufferVal, ValueEdge(vp)); }
404 536 : void unputValue(JS::Value* vp) { unput(bufferVal, ValueEdge(vp)); }
405 7178 : void putCell(Cell** cellp) { put(bufferCell, CellPtrEdge(cellp)); }
406 624 : void unputCell(Cell** cellp) { unput(bufferCell, CellPtrEdge(cellp)); }
407 24451 : void putSlot(NativeObject* obj, int kind, int32_t start, int32_t count) {
408 24451 : SlotsEdge edge(obj, kind, start, count);
409 24451 : if (bufferSlot.last_.overlaps(edge))
410 7232 : bufferSlot.last_.merge(edge);
411 : else
412 17219 : put(bufferSlot, edge);
413 24451 : }
414 : inline void putWholeCell(Cell* cell);
415 :
416 : /* Insert an entry into the generic buffer. */
417 : template <typename T>
418 3430 : void putGeneric(const T& t) { put(bufferGeneric, t);}
419 :
420 32 : void setShouldCancelIonCompilations() {
421 32 : cancelIonCompilations_ = true;
422 32 : }
423 :
424 : /* Methods to trace the source of all edges in the store buffer. */
425 21 : void traceValues(TenuringTracer& mover) { bufferVal.trace(this, mover); }
426 21 : void traceCells(TenuringTracer& mover) { bufferCell.trace(this, mover); }
427 21 : void traceSlots(TenuringTracer& mover) { bufferSlot.trace(this, mover); }
428 21 : void traceGenericEntries(JSTracer *trc) { bufferGeneric.trace(this, trc); }
429 :
430 : void traceWholeCells(TenuringTracer& mover);
431 : void traceWholeCell(TenuringTracer& mover, JS::TraceKind kind, Cell* cell);
432 :
433 : /* For use by our owned buffers and for testing. */
434 : void setAboutToOverflow();
435 :
436 : void addToWholeCellBuffer(ArenaCellSet* set);
437 :
438 : void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes* sizes);
439 : };
440 :
441 : // A set of cells in an arena used to implement the whole cell store buffer.
442 : class ArenaCellSet
443 : {
444 : friend class StoreBuffer;
445 :
446 : // The arena this relates to.
447 : Arena* arena;
448 :
449 : // Pointer to next set forming a linked list.
450 : ArenaCellSet* next;
451 :
452 : // Bit vector for each possible cell start position.
453 : BitArray<MaxArenaCellIndex> bits;
454 :
455 : public:
456 : explicit ArenaCellSet(Arena* arena);
457 :
458 0 : bool hasCell(const TenuredCell* cell) const {
459 0 : return hasCell(getCellIndex(cell));
460 : }
461 :
462 402 : void putCell(const TenuredCell* cell) {
463 402 : putCell(getCellIndex(cell));
464 402 : }
465 :
466 1608 : bool isEmpty() const {
467 1608 : return this == &Empty;
468 : }
469 :
470 : bool hasCell(size_t cellIndex) const;
471 :
472 : void putCell(size_t cellIndex);
473 :
474 : void check() const;
475 :
476 : // Sentinel object used for all empty sets.
477 : static ArenaCellSet Empty;
478 :
479 : static size_t getCellIndex(const TenuredCell* cell);
480 : static void getWordIndexAndMask(size_t cellIndex, size_t* wordp, uint32_t* maskp);
481 :
482 : // Attempt to trigger a minor GC if free space in the nursery (where these
483 : // objects are allocated) falls below this threshold.
484 : static const size_t NurseryFreeThresholdBytes = 64 * 1024;
485 :
486 0 : static size_t offsetOfArena() {
487 0 : return offsetof(ArenaCellSet, arena);
488 : }
489 0 : static size_t offsetOfBits() {
490 0 : return offsetof(ArenaCellSet, bits);
491 : }
492 : };
493 :
494 : ArenaCellSet* AllocateWholeCellSet(Arena* arena);
495 :
496 : } /* namespace gc */
497 : } /* namespace js */
498 :
499 : #endif /* gc_StoreBuffer_h */
|