Line data Source code
1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sts=4 et sw=4 tw=99:
3 : * This Source Code Form is subject to the terms of the Mozilla Public
4 : * License, v. 2.0. If a copy of the MPL was not distributed with this
5 : * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 :
7 : /* JS Garbage Collector. */
8 :
9 : #ifndef jsgc_h
10 : #define jsgc_h
11 :
12 : #include "mozilla/Atomics.h"
13 : #include "mozilla/EnumeratedArray.h"
14 : #include "mozilla/MemoryReporting.h"
15 : #include "mozilla/TimeStamp.h"
16 : #include "mozilla/TypeTraits.h"
17 :
18 : #include "js/GCAPI.h"
19 : #include "js/SliceBudget.h"
20 : #include "js/Vector.h"
21 : #include "threading/ConditionVariable.h"
22 : #include "threading/Thread.h"
23 : #include "vm/NativeObject.h"
24 :
25 : namespace js {
26 :
27 : class AutoLockHelperThreadState;
28 :
29 : namespace gcstats {
30 : struct Statistics;
31 : } // namespace gcstats
32 :
33 : class Nursery;
34 :
35 : namespace gc {
36 :
37 : struct FinalizePhase;
38 :
39 : #define GCSTATES(D) \
40 : D(NotActive) \
41 : D(MarkRoots) \
42 : D(Mark) \
43 : D(Sweep) \
44 : D(Finalize) \
45 : D(Compact) \
46 : D(Decommit)
47 : enum class State {
48 : #define MAKE_STATE(name) name,
49 : GCSTATES(MAKE_STATE)
50 : #undef MAKE_STATE
51 : };
52 :
53 : // Reasons we reset an ongoing incremental GC or perform a non-incremental GC.
54 : #define GC_ABORT_REASONS(D) \
55 : D(None) \
56 : D(NonIncrementalRequested) \
57 : D(AbortRequested) \
58 : D(Unused1) \
59 : D(IncrementalDisabled) \
60 : D(ModeChange) \
61 : D(MallocBytesTrigger) \
62 : D(GCBytesTrigger) \
63 : D(ZoneChange) \
64 : D(CompartmentRevived)
65 : enum class AbortReason {
66 : #define MAKE_REASON(name) name,
67 : GC_ABORT_REASONS(MAKE_REASON)
68 : #undef MAKE_REASON
69 : };
70 :
71 : /*
72 : * Map from C++ type to alloc kind for non-object types. JSObject does not have
73 : * a 1:1 mapping, so must use Arena::thingSize.
74 : *
75 : * The AllocKind is available as MapTypeToFinalizeKind<SomeType>::kind.
76 : */
77 : template <typename T> struct MapTypeToFinalizeKind {};
78 : #define EXPAND_MAPTYPETOFINALIZEKIND(allocKind, traceKind, type, sizedType) \
79 : template <> struct MapTypeToFinalizeKind<type> { \
80 : static const AllocKind kind = AllocKind::allocKind; \
81 : };
82 : FOR_EACH_NONOBJECT_ALLOCKIND(EXPAND_MAPTYPETOFINALIZEKIND)
83 : #undef EXPAND_MAPTYPETOFINALIZEKIND
84 :
85 : template <typename T> struct ParticipatesInCC {};
86 : #define EXPAND_PARTICIPATES_IN_CC(_, type, addToCCKind) \
87 : template <> struct ParticipatesInCC<type> { static const bool value = addToCCKind; };
88 : JS_FOR_EACH_TRACEKIND(EXPAND_PARTICIPATES_IN_CC)
89 : #undef EXPAND_PARTICIPATES_IN_CC
90 :
91 : static inline bool
92 6788 : IsNurseryAllocable(AllocKind kind)
93 : {
94 6788 : MOZ_ASSERT(IsValidAllocKind(kind));
95 : static const bool map[] = {
96 : true, /* AllocKind::FUNCTION */
97 : true, /* AllocKind::FUNCTION_EXTENDED */
98 : false, /* AllocKind::OBJECT0 */
99 : true, /* AllocKind::OBJECT0_BACKGROUND */
100 : false, /* AllocKind::OBJECT2 */
101 : true, /* AllocKind::OBJECT2_BACKGROUND */
102 : false, /* AllocKind::OBJECT4 */
103 : true, /* AllocKind::OBJECT4_BACKGROUND */
104 : false, /* AllocKind::OBJECT8 */
105 : true, /* AllocKind::OBJECT8_BACKGROUND */
106 : false, /* AllocKind::OBJECT12 */
107 : true, /* AllocKind::OBJECT12_BACKGROUND */
108 : false, /* AllocKind::OBJECT16 */
109 : true, /* AllocKind::OBJECT16_BACKGROUND */
110 : false, /* AllocKind::SCRIPT */
111 : false, /* AllocKind::LAZY_SCRIPT */
112 : false, /* AllocKind::SHAPE */
113 : false, /* AllocKind::ACCESSOR_SHAPE */
114 : false, /* AllocKind::BASE_SHAPE */
115 : false, /* AllocKind::OBJECT_GROUP */
116 : false, /* AllocKind::FAT_INLINE_STRING */
117 : false, /* AllocKind::STRING */
118 : false, /* AllocKind::EXTERNAL_STRING */
119 : false, /* AllocKind::FAT_INLINE_ATOM */
120 : false, /* AllocKind::ATOM */
121 : false, /* AllocKind::SYMBOL */
122 : false, /* AllocKind::JITCODE */
123 : false, /* AllocKind::SCOPE */
124 : false, /* AllocKind::REGEXP_SHARED */
125 : };
126 : JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == size_t(AllocKind::LIMIT));
127 6788 : return map[size_t(kind)];
128 : }
129 :
130 : static inline bool
131 249428 : IsBackgroundFinalized(AllocKind kind)
132 : {
133 249428 : MOZ_ASSERT(IsValidAllocKind(kind));
134 : static const bool map[] = {
135 : true, /* AllocKind::FUNCTION */
136 : true, /* AllocKind::FUNCTION_EXTENDED */
137 : false, /* AllocKind::OBJECT0 */
138 : true, /* AllocKind::OBJECT0_BACKGROUND */
139 : false, /* AllocKind::OBJECT2 */
140 : true, /* AllocKind::OBJECT2_BACKGROUND */
141 : false, /* AllocKind::OBJECT4 */
142 : true, /* AllocKind::OBJECT4_BACKGROUND */
143 : false, /* AllocKind::OBJECT8 */
144 : true, /* AllocKind::OBJECT8_BACKGROUND */
145 : false, /* AllocKind::OBJECT12 */
146 : true, /* AllocKind::OBJECT12_BACKGROUND */
147 : false, /* AllocKind::OBJECT16 */
148 : true, /* AllocKind::OBJECT16_BACKGROUND */
149 : false, /* AllocKind::SCRIPT */
150 : true, /* AllocKind::LAZY_SCRIPT */
151 : true, /* AllocKind::SHAPE */
152 : true, /* AllocKind::ACCESSOR_SHAPE */
153 : true, /* AllocKind::BASE_SHAPE */
154 : true, /* AllocKind::OBJECT_GROUP */
155 : true, /* AllocKind::FAT_INLINE_STRING */
156 : true, /* AllocKind::STRING */
157 : true, /* AllocKind::EXTERNAL_STRING */
158 : true, /* AllocKind::FAT_INLINE_ATOM */
159 : true, /* AllocKind::ATOM */
160 : true, /* AllocKind::SYMBOL */
161 : false, /* AllocKind::JITCODE */
162 : true, /* AllocKind::SCOPE */
163 : true, /* AllocKind::REGEXP_SHARED */
164 : };
165 : JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == size_t(AllocKind::LIMIT));
166 249428 : return map[size_t(kind)];
167 : }
168 :
169 : static inline bool
170 166079 : CanBeFinalizedInBackground(AllocKind kind, const Class* clasp)
171 : {
172 166079 : MOZ_ASSERT(IsObjectAllocKind(kind));
173 : /* If the class has no finalizer or a finalizer that is safe to call on
174 : * a different thread, we change the alloc kind. For example,
175 : * AllocKind::OBJECT0 calls the finalizer on the active thread,
176 : * AllocKind::OBJECT0_BACKGROUND calls the finalizer on the gcHelperThread.
177 : * IsBackgroundFinalized is called to prevent recursively incrementing
178 : * the alloc kind; kind may already be a background finalize kind.
179 : */
180 260893 : return (!IsBackgroundFinalized(kind) &&
181 236364 : (!clasp->hasFinalize() || (clasp->flags & JSCLASS_BACKGROUND_FINALIZE)));
182 : }
183 :
184 : /* Capacity for slotsToThingKind */
185 : const size_t SLOTS_TO_THING_KIND_LIMIT = 17;
186 :
187 : extern const AllocKind slotsToThingKind[];
188 :
189 : /* Get the best kind to use when making an object with the given slot count. */
190 : static inline AllocKind
191 64788 : GetGCObjectKind(size_t numSlots)
192 : {
193 64788 : if (numSlots >= SLOTS_TO_THING_KIND_LIMIT)
194 486 : return AllocKind::OBJECT16;
195 64302 : return slotsToThingKind[numSlots];
196 : }
197 :
198 : /* As for GetGCObjectKind, but for dense array allocation. */
199 : static inline AllocKind
200 5740 : GetGCArrayKind(size_t numElements)
201 : {
202 : /*
203 : * Dense arrays can use their fixed slots to hold their elements array
204 : * (less two Values worth of ObjectElements header), but if more than the
205 : * maximum number of fixed slots is needed then the fixed slots will be
206 : * unused.
207 : */
208 : JS_STATIC_ASSERT(ObjectElements::VALUES_PER_HEADER == 2);
209 11480 : if (numElements > NativeObject::MAX_DENSE_ELEMENTS_COUNT ||
210 5740 : numElements + ObjectElements::VALUES_PER_HEADER >= SLOTS_TO_THING_KIND_LIMIT)
211 : {
212 78 : return AllocKind::OBJECT2;
213 : }
214 5662 : return slotsToThingKind[numElements + ObjectElements::VALUES_PER_HEADER];
215 : }
216 :
217 : static inline AllocKind
218 11724 : GetGCObjectFixedSlotsKind(size_t numFixedSlots)
219 : {
220 11724 : MOZ_ASSERT(numFixedSlots < SLOTS_TO_THING_KIND_LIMIT);
221 11724 : return slotsToThingKind[numFixedSlots];
222 : }
223 :
224 : // Get the best kind to use when allocating an object that needs a specific
225 : // number of bytes.
226 : static inline AllocKind
227 107 : GetGCObjectKindForBytes(size_t nbytes)
228 : {
229 107 : MOZ_ASSERT(nbytes <= JSObject::MAX_BYTE_SIZE);
230 :
231 107 : if (nbytes <= sizeof(NativeObject))
232 101 : return AllocKind::OBJECT0;
233 6 : nbytes -= sizeof(NativeObject);
234 :
235 6 : size_t dataSlots = AlignBytes(nbytes, sizeof(Value)) / sizeof(Value);
236 6 : MOZ_ASSERT(nbytes <= dataSlots * sizeof(Value));
237 6 : return GetGCObjectKind(dataSlots);
238 : }
239 :
240 : static inline AllocKind
241 53948 : GetBackgroundAllocKind(AllocKind kind)
242 : {
243 53948 : MOZ_ASSERT(!IsBackgroundFinalized(kind));
244 53948 : MOZ_ASSERT(IsObjectAllocKind(kind));
245 53948 : return AllocKind(size_t(kind) + 1);
246 : }
247 :
248 : /* Get the number of fixed slots and initial capacity associated with a kind. */
249 : static inline size_t
250 275627 : GetGCKindSlots(AllocKind thingKind)
251 : {
252 : /* Using a switch in hopes that thingKind will usually be a compile-time constant. */
253 275627 : switch (thingKind) {
254 : case AllocKind::FUNCTION:
255 : case AllocKind::OBJECT0:
256 : case AllocKind::OBJECT0_BACKGROUND:
257 121155 : return 0;
258 : case AllocKind::FUNCTION_EXTENDED:
259 : case AllocKind::OBJECT2:
260 : case AllocKind::OBJECT2_BACKGROUND:
261 113685 : return 2;
262 : case AllocKind::OBJECT4:
263 : case AllocKind::OBJECT4_BACKGROUND:
264 23137 : return 4;
265 : case AllocKind::OBJECT8:
266 : case AllocKind::OBJECT8_BACKGROUND:
267 13439 : return 8;
268 : case AllocKind::OBJECT12:
269 : case AllocKind::OBJECT12_BACKGROUND:
270 1246 : return 12;
271 : case AllocKind::OBJECT16:
272 : case AllocKind::OBJECT16_BACKGROUND:
273 2965 : return 16;
274 : default:
275 0 : MOZ_CRASH("Bad object alloc kind");
276 : }
277 : }
278 :
279 : static inline size_t
280 266654 : GetGCKindSlots(AllocKind thingKind, const Class* clasp)
281 : {
282 266654 : size_t nslots = GetGCKindSlots(thingKind);
283 :
284 : /* An object's private data uses the space taken by its last fixed slot. */
285 266654 : if (clasp->flags & JSCLASS_HAS_PRIVATE) {
286 20666 : MOZ_ASSERT(nslots > 0);
287 20666 : nslots--;
288 : }
289 :
290 : /*
291 : * Functions have a larger alloc kind than AllocKind::OBJECT to reserve
292 : * space for the extra fields in JSFunction, but have no fixed slots.
293 : */
294 266654 : if (clasp == FunctionClassPtr)
295 191420 : nslots = 0;
296 :
297 266654 : return nslots;
298 : }
299 :
300 : static inline size_t
301 0 : GetGCKindBytes(AllocKind thingKind)
302 : {
303 0 : return sizeof(JSObject_Slots0) + GetGCKindSlots(thingKind) * sizeof(Value);
304 : }
305 :
306 : // Class to assist in triggering background chunk allocation. This cannot be done
307 : // while holding the GC or worker thread state lock due to lock ordering issues.
308 : // As a result, the triggering is delayed using this class until neither of the
309 : // above locks is held.
310 : class AutoMaybeStartBackgroundAllocation;
311 :
312 : /*
313 : * A single segment of a SortedArenaList. Each segment has a head and a tail,
314 : * which track the start and end of a segment for O(1) append and concatenation.
315 : */
316 : struct SortedArenaListSegment
317 : {
318 : Arena* head;
319 : Arena** tailp;
320 :
321 1016 : void clear() {
322 1016 : head = nullptr;
323 1016 : tailp = &head;
324 1016 : }
325 :
326 0 : bool isEmpty() const {
327 0 : return tailp == &head;
328 : }
329 :
330 : // Appends |arena| to this segment.
331 0 : void append(Arena* arena) {
332 0 : MOZ_ASSERT(arena);
333 0 : MOZ_ASSERT_IF(head, head->getAllocKind() == arena->getAllocKind());
334 0 : *tailp = arena;
335 0 : tailp = &arena->next;
336 0 : }
337 :
338 : // Points the tail of this segment at |arena|, which may be null. Note
339 : // that this does not change the tail itself, but merely which arena
340 : // follows it. This essentially turns the tail into a cursor (see also the
341 : // description of ArenaList), but from the perspective of a SortedArenaList
342 : // this makes no difference.
343 0 : void linkTo(Arena* arena) {
344 0 : *tailp = arena;
345 0 : }
346 : };
347 :
348 : /*
349 : * Arena lists have a head and a cursor. The cursor conceptually lies on arena
350 : * boundaries, i.e. before the first arena, between two arenas, or after the
351 : * last arena.
352 : *
353 : * Arenas are usually sorted in order of increasing free space, with the cursor
354 : * following the Arena currently being allocated from. This ordering should not
355 : * be treated as an invariant, however, as the free lists may be cleared,
356 : * leaving arenas previously used for allocation partially full. Sorting order
357 : * is restored during sweeping.
358 :
359 : * Arenas following the cursor should not be full.
360 : */
361 : class ArenaList {
362 : // The cursor is implemented via an indirect pointer, |cursorp_|, to allow
363 : // for efficient list insertion at the cursor point and other list
364 : // manipulations.
365 : //
366 : // - If the list is empty: |head| is null, |cursorp_| points to |head|, and
367 : // therefore |*cursorp_| is null.
368 : //
369 : // - If the list is not empty: |head| is non-null, and...
370 : //
371 : // - If the cursor is at the start of the list: |cursorp_| points to
372 : // |head|, and therefore |*cursorp_| points to the first arena.
373 : //
374 : // - If cursor is at the end of the list: |cursorp_| points to the |next|
375 : // field of the last arena, and therefore |*cursorp_| is null.
376 : //
377 : // - If the cursor is at neither the start nor the end of the list:
378 : // |cursorp_| points to the |next| field of the arena preceding the
379 : // cursor, and therefore |*cursorp_| points to the arena following the
380 : // cursor.
381 : //
382 : // |cursorp_| is never null.
383 : //
384 : Arena* head_;
385 : Arena** cursorp_;
386 :
387 0 : void copy(const ArenaList& other) {
388 0 : other.check();
389 0 : head_ = other.head_;
390 0 : cursorp_ = other.isCursorAtHead() ? &head_ : other.cursorp_;
391 0 : check();
392 0 : }
393 :
394 : public:
395 1364 : ArenaList() {
396 1364 : clear();
397 1364 : }
398 :
399 : ArenaList(const ArenaList& other) {
400 : copy(other);
401 : }
402 :
403 0 : ArenaList& operator=(const ArenaList& other) {
404 0 : copy(other);
405 0 : return *this;
406 : }
407 :
408 0 : explicit ArenaList(const SortedArenaListSegment& segment) {
409 0 : head_ = segment.head;
410 0 : cursorp_ = segment.isEmpty() ? &head_ : segment.tailp;
411 0 : check();
412 0 : }
413 :
414 : // This does checking just of |head_| and |cursorp_|.
415 33689 : void check() const {
416 : #ifdef DEBUG
417 : // If the list is empty, it must have this form.
418 33689 : MOZ_ASSERT_IF(!head_, cursorp_ == &head_);
419 :
420 : // If there's an arena following the cursor, it must not be full.
421 33689 : Arena* cursor = *cursorp_;
422 33689 : MOZ_ASSERT_IF(cursor, cursor->hasFreeThings());
423 : #endif
424 33689 : }
425 :
426 1799 : void clear() {
427 1799 : head_ = nullptr;
428 1799 : cursorp_ = &head_;
429 1799 : check();
430 1799 : }
431 :
432 : ArenaList copyAndClear() {
433 : ArenaList result = *this;
434 : clear();
435 : return result;
436 : }
437 :
438 0 : bool isEmpty() const {
439 0 : check();
440 0 : return !head_;
441 : }
442 :
443 : // This returns nullptr if the list is empty.
444 1391 : Arena* head() const {
445 1391 : check();
446 1391 : return head_;
447 : }
448 :
449 0 : bool isCursorAtHead() const {
450 0 : check();
451 0 : return cursorp_ == &head_;
452 : }
453 :
454 7126 : bool isCursorAtEnd() const {
455 7126 : check();
456 7126 : return !*cursorp_;
457 : }
458 :
459 464 : void moveCursorToEnd() {
460 464 : while (!isCursorAtEnd())
461 0 : cursorp_ = &(*cursorp_)->next;
462 464 : }
463 :
464 : // This can return nullptr.
465 0 : Arena* arenaAfterCursor() const {
466 0 : check();
467 0 : return *cursorp_;
468 : }
469 :
470 : // This returns the arena after the cursor and moves the cursor past it.
471 6875 : Arena* takeNextArena() {
472 6875 : check();
473 6875 : Arena* arena = *cursorp_;
474 6875 : if (!arena)
475 6657 : return nullptr;
476 218 : cursorp_ = &arena->next;
477 218 : check();
478 218 : return arena;
479 : }
480 :
481 : // This does two things.
482 : // - Inserts |a| at the cursor.
483 : // - Leaves the cursor sitting just before |a|, if |a| is not full, or just
484 : // after |a|, if |a| is full.
485 810 : void insertAtCursor(Arena* a) {
486 810 : check();
487 810 : a->next = *cursorp_;
488 810 : *cursorp_ = a;
489 : // At this point, the cursor is sitting before |a|. Move it after |a|
490 : // if necessary.
491 810 : if (!a->hasFreeThings())
492 579 : cursorp_ = &a->next;
493 810 : check();
494 810 : }
495 :
496 : // Inserts |a| at the cursor, then moves the cursor past it.
497 6657 : void insertBeforeCursor(Arena* a) {
498 6657 : check();
499 6657 : a->next = *cursorp_;
500 6657 : *cursorp_ = a;
501 6657 : cursorp_ = &a->next;
502 6657 : check();
503 6657 : }
504 :
505 : // This inserts |other|, which must be full, at the cursor of |this|.
506 0 : ArenaList& insertListWithCursorAtEnd(const ArenaList& other) {
507 0 : check();
508 0 : other.check();
509 0 : MOZ_ASSERT(other.isCursorAtEnd());
510 0 : if (other.isCursorAtHead())
511 0 : return *this;
512 : // Insert the full arenas of |other| after those of |this|.
513 0 : *other.cursorp_ = *cursorp_;
514 0 : *cursorp_ = other.head_;
515 0 : cursorp_ = other.cursorp_;
516 0 : check();
517 0 : return *this;
518 : }
519 :
520 : Arena* removeRemainingArenas(Arena** arenap);
521 : Arena** pickArenasToRelocate(size_t& arenaTotalOut, size_t& relocTotalOut);
522 : Arena* relocateArenas(Arena* toRelocate, Arena* relocated,
523 : SliceBudget& sliceBudget, gcstats::Statistics& stats);
524 : };
525 :
526 : /*
527 : * A class that holds arenas in sorted order by appending arenas to specific
528 : * segments. Each segment has a head and a tail, which can be linked up to
529 : * other segments to create a contiguous ArenaList.
530 : */
531 : class SortedArenaList
532 : {
533 : public:
534 : // The minimum size, in bytes, of a GC thing.
535 : static const size_t MinThingSize = 16;
536 :
537 : static_assert(ArenaSize <= 4096, "When increasing the Arena size, please consider how"\
538 : " this will affect the size of a SortedArenaList.");
539 :
540 : static_assert(MinThingSize >= 16, "When decreasing the minimum thing size, please consider"\
541 : " how this will affect the size of a SortedArenaList.");
542 :
543 : private:
544 : // The maximum number of GC things that an arena can hold.
545 : static const size_t MaxThingsPerArena = (ArenaSize - ArenaHeaderSize) / MinThingSize;
546 :
547 : size_t thingsPerArena_;
548 : SortedArenaListSegment segments[MaxThingsPerArena + 1];
549 :
550 : // Convenience functions to get the nth head and tail.
551 0 : Arena* headAt(size_t n) { return segments[n].head; }
552 : Arena** tailAt(size_t n) { return segments[n].tailp; }
553 :
554 : public:
555 4 : explicit SortedArenaList(size_t thingsPerArena = MaxThingsPerArena) {
556 4 : reset(thingsPerArena);
557 4 : }
558 :
559 4 : void setThingsPerArena(size_t thingsPerArena) {
560 4 : MOZ_ASSERT(thingsPerArena && thingsPerArena <= MaxThingsPerArena);
561 4 : thingsPerArena_ = thingsPerArena;
562 4 : }
563 :
564 : // Resets the first |thingsPerArena| segments of this list for further use.
565 4 : void reset(size_t thingsPerArena = MaxThingsPerArena) {
566 4 : setThingsPerArena(thingsPerArena);
567 : // Initialize the segments.
568 1020 : for (size_t i = 0; i <= thingsPerArena; ++i)
569 1016 : segments[i].clear();
570 4 : }
571 :
572 : // Inserts an arena, which has room for |nfree| more things, in its segment.
573 0 : void insertAt(Arena* arena, size_t nfree) {
574 0 : MOZ_ASSERT(nfree <= thingsPerArena_);
575 0 : segments[nfree].append(arena);
576 0 : }
577 :
578 : // Remove all empty arenas, inserting them as a linked list.
579 0 : void extractEmpty(Arena** empty) {
580 0 : SortedArenaListSegment& segment = segments[thingsPerArena_];
581 0 : if (segment.head) {
582 0 : *segment.tailp = *empty;
583 0 : *empty = segment.head;
584 0 : segment.clear();
585 : }
586 0 : }
587 :
588 : // Links up the tail of each non-empty segment to the head of the next
589 : // non-empty segment, creating a contiguous list that is returned as an
590 : // ArenaList. This is not a destructive operation: neither the head nor tail
591 : // of any segment is modified. However, note that the Arenas in the
592 : // resulting ArenaList should be treated as read-only unless the
593 : // SortedArenaList is no longer needed: inserting or removing arenas would
594 : // invalidate the SortedArenaList.
595 0 : ArenaList toArenaList() {
596 : // Link the non-empty segment tails up to the non-empty segment heads.
597 0 : size_t tailIndex = 0;
598 0 : for (size_t headIndex = 1; headIndex <= thingsPerArena_; ++headIndex) {
599 0 : if (headAt(headIndex)) {
600 0 : segments[tailIndex].linkTo(headAt(headIndex));
601 0 : tailIndex = headIndex;
602 : }
603 : }
604 : // Point the tail of the final non-empty segment at null. Note that if
605 : // the list is empty, this will just set segments[0].head to null.
606 0 : segments[tailIndex].linkTo(nullptr);
607 : // Create an ArenaList with head and cursor set to the head and tail of
608 : // the first segment (if that segment is empty, only the head is used).
609 0 : return ArenaList(segments[0]);
610 : }
611 : };
612 :
613 : enum ShouldCheckThresholds
614 : {
615 : DontCheckThresholds = 0,
616 : CheckThresholds = 1
617 : };
618 :
619 : class ArenaLists
620 : {
621 : JSRuntime* const runtime_;
622 :
623 : /*
624 : * For each arena kind its free list is represented as the first span with
625 : * free things. Initially all the spans are initialized as empty. After we
626 : * find a new arena with available things we move its first free span into
627 : * the list and set the arena as fully allocated. way we do not need to
628 : * update the arena after the initial allocation. When starting the
629 : * GC we only move the head of the of the list of spans back to the arena
630 : * only for the arena that was not fully allocated.
631 : */
632 : ZoneGroupData<AllAllocKindArray<FreeSpan*>> freeLists_;
633 497336 : FreeSpan*& freeLists(AllocKind i) { return freeLists_.ref()[i]; }
634 : FreeSpan* freeLists(AllocKind i) const { return freeLists_.ref()[i]; }
635 :
636 : // Because the JITs can allocate from the free lists, they cannot be null.
637 : // We use a placeholder FreeSpan that is empty (and wihout an associated
638 : // Arena) so the JITs can fall back gracefully.
639 : static FreeSpan placeholder;
640 :
641 : ZoneGroupOrGCTaskData<AllAllocKindArray<ArenaList>> arenaLists_;
642 8680 : ArenaList& arenaLists(AllocKind i) { return arenaLists_.ref()[i]; }
643 492 : const ArenaList& arenaLists(AllocKind i) const { return arenaLists_.ref()[i]; }
644 :
645 : enum BackgroundFinalizeStateEnum { BFS_DONE, BFS_RUN };
646 :
647 : typedef mozilla::Atomic<BackgroundFinalizeStateEnum, mozilla::SequentiallyConsistent>
648 : BackgroundFinalizeState;
649 :
650 : /* The current background finalization state, accessed atomically. */
651 : UnprotectedData<AllAllocKindArray<BackgroundFinalizeState>> backgroundFinalizeState_;
652 9115 : BackgroundFinalizeState& backgroundFinalizeState(AllocKind i) { return backgroundFinalizeState_.ref()[i]; }
653 10818 : const BackgroundFinalizeState& backgroundFinalizeState(AllocKind i) const { return backgroundFinalizeState_.ref()[i]; }
654 :
655 : /* For each arena kind, a list of arenas remaining to be swept. */
656 : ActiveThreadOrGCTaskData<AllAllocKindArray<Arena*>> arenaListsToSweep_;
657 12151 : Arena*& arenaListsToSweep(AllocKind i) { return arenaListsToSweep_.ref()[i]; }
658 492 : Arena* arenaListsToSweep(AllocKind i) const { return arenaListsToSweep_.ref()[i]; }
659 :
660 : /* During incremental sweeping, a list of the arenas already swept. */
661 : ZoneGroupOrGCTaskData<AllocKind> incrementalSweptArenaKind;
662 : ZoneGroupOrGCTaskData<ArenaList> incrementalSweptArenas;
663 :
664 : // Arena lists which have yet to be swept, but need additional foreground
665 : // processing before they are swept.
666 : ZoneGroupData<Arena*> gcShapeArenasToUpdate;
667 : ZoneGroupData<Arena*> gcAccessorShapeArenasToUpdate;
668 : ZoneGroupData<Arena*> gcScriptArenasToUpdate;
669 : ZoneGroupData<Arena*> gcObjectGroupArenasToUpdate;
670 :
671 : // While sweeping type information, these lists save the arenas for the
672 : // objects which have already been finalized in the foreground (which must
673 : // happen at the beginning of the GC), so that type sweeping can determine
674 : // which of the object pointers are marked.
675 : ZoneGroupData<ObjectAllocKindArray<ArenaList>> savedObjectArenas_;
676 0 : ArenaList& savedObjectArenas(AllocKind i) { return savedObjectArenas_.ref()[i]; }
677 : ZoneGroupData<Arena*> savedEmptyObjectArenas;
678 :
679 : public:
680 : explicit ArenaLists(JSRuntime* rt, ZoneGroup* group);
681 : ~ArenaLists();
682 :
683 60 : const void* addressOfFreeList(AllocKind thingKind) const {
684 60 : return reinterpret_cast<const void*>(&freeLists_.refNoCheck()[thingKind]);
685 : }
686 :
687 492 : Arena* getFirstArena(AllocKind thingKind) const {
688 492 : return arenaLists(thingKind).head();
689 : }
690 :
691 492 : Arena* getFirstArenaToSweep(AllocKind thingKind) const {
692 492 : return arenaListsToSweep(thingKind);
693 : }
694 :
695 492 : Arena* getFirstSweptArena(AllocKind thingKind) const {
696 492 : if (thingKind != incrementalSweptArenaKind.ref())
697 492 : return nullptr;
698 0 : return incrementalSweptArenas.ref().head();
699 : }
700 :
701 : Arena* getArenaAfterCursor(AllocKind thingKind) const {
702 : return arenaLists(thingKind).arenaAfterCursor();
703 : }
704 :
705 0 : bool arenaListsAreEmpty() const {
706 0 : for (auto i : AllAllocKinds()) {
707 : /*
708 : * The arena cannot be empty if the background finalization is not yet
709 : * done.
710 : */
711 0 : if (backgroundFinalizeState(i) != BFS_DONE)
712 0 : return false;
713 0 : if (!arenaLists(i).isEmpty())
714 0 : return false;
715 : }
716 0 : return true;
717 : }
718 :
719 16 : void unmarkAll() {
720 480 : for (auto i : AllAllocKinds()) {
721 : /* The background finalization must have stopped at this point. */
722 464 : MOZ_ASSERT(backgroundFinalizeState(i) == BFS_DONE);
723 5392 : for (Arena* arena = arenaLists(i).head(); arena; arena = arena->next)
724 4928 : arena->unmarkAll();
725 : }
726 16 : }
727 :
728 10788 : bool doneBackgroundFinalize(AllocKind kind) const {
729 10788 : return backgroundFinalizeState(kind) == BFS_DONE;
730 : }
731 :
732 30 : bool needBackgroundFinalizeWait(AllocKind kind) const {
733 30 : return backgroundFinalizeState(kind) != BFS_DONE;
734 : }
735 :
736 : /*
737 : * Clear the free lists so we won't try to allocate from swept arenas.
738 : */
739 79 : void purge() {
740 2370 : for (auto i : AllAllocKinds())
741 2291 : freeLists(i) = &placeholder;
742 79 : }
743 :
744 : inline void prepareForIncrementalGC();
745 :
746 : /* Check if this arena is in use. */
747 : bool arenaIsInUse(Arena* arena, AllocKind kind) const {
748 : MOZ_ASSERT(arena);
749 : return arena == freeLists(kind)->getArenaUnchecked();
750 : }
751 :
752 473504 : MOZ_ALWAYS_INLINE TenuredCell* allocateFromFreeList(AllocKind thingKind, size_t thingSize) {
753 473504 : return freeLists(thingKind)->allocate(thingSize);
754 : }
755 :
756 : /*
757 : * Moves all arenas from |fromArenaLists| into |this|.
758 : */
759 : void adoptArenas(JSRuntime* runtime, ArenaLists* fromArenaLists);
760 :
761 : /* True if the Arena in question is found in this ArenaLists */
762 : bool containsArena(JSRuntime* runtime, Arena* arena);
763 :
764 0 : void checkEmptyFreeLists() {
765 : #ifdef DEBUG
766 0 : for (auto i : AllAllocKinds())
767 0 : checkEmptyFreeList(i);
768 : #endif
769 0 : }
770 :
771 0 : bool checkEmptyArenaLists() {
772 0 : bool empty = true;
773 : #ifdef DEBUG
774 0 : for (auto i : AllAllocKinds()) {
775 0 : if (!checkEmptyArenaList(i))
776 0 : empty = false;
777 : }
778 : #endif
779 0 : return empty;
780 : }
781 :
782 6882 : void checkEmptyFreeList(AllocKind kind) {
783 6882 : MOZ_ASSERT(freeLists(kind)->isEmpty());
784 6882 : }
785 :
786 : bool checkEmptyArenaList(AllocKind kind);
787 :
788 : bool relocateArenas(Zone* zone, Arena*& relocatedListOut, JS::gcreason::Reason reason,
789 : SliceBudget& sliceBudget, gcstats::Statistics& stats);
790 :
791 : void queueForegroundObjectsForSweep(FreeOp* fop);
792 : void queueForegroundThingsForSweep(FreeOp* fop);
793 :
794 : void mergeForegroundSweptObjectArenas();
795 :
796 : bool foregroundFinalize(FreeOp* fop, AllocKind thingKind, SliceBudget& sliceBudget,
797 : SortedArenaList& sweepList);
798 : static void backgroundFinalize(FreeOp* fop, Arena* listHead, Arena** empty);
799 :
800 : // When finalizing arenas, whether to keep empty arenas on the list or
801 : // release them immediately.
802 : enum KeepArenasEnum {
803 : RELEASE_ARENAS,
804 : KEEP_ARENAS
805 : };
806 :
807 : private:
808 : inline void queueForForegroundSweep(FreeOp* fop, const FinalizePhase& phase);
809 : inline void queueForBackgroundSweep(FreeOp* fop, const FinalizePhase& phase);
810 : inline void queueForForegroundSweep(FreeOp* fop, AllocKind thingKind);
811 : inline void queueForBackgroundSweep(FreeOp* fop, AllocKind thingKind);
812 : inline void mergeSweptArenas(AllocKind thingKind);
813 :
814 : TenuredCell* allocateFromArena(JS::Zone* zone, AllocKind thingKind,
815 : ShouldCheckThresholds checkThresholds,
816 : AutoMaybeStartBackgroundAllocation& maybeStartBGAlloc);
817 : inline TenuredCell* allocateFromArenaInner(JS::Zone* zone, Arena* arena, AllocKind kind);
818 :
819 : inline void normalizeBackgroundFinalizeState(AllocKind thingKind);
820 :
821 : friend class GCRuntime;
822 : friend class js::Nursery;
823 : friend class js::TenuringTracer;
824 : };
825 :
826 : /* The number of GC cycles an empty chunk can survive before been released. */
827 : const size_t MAX_EMPTY_CHUNK_AGE = 4;
828 :
829 : extern bool
830 : InitializeStaticData();
831 :
832 : } /* namespace gc */
833 :
834 : class InterpreterFrame;
835 :
836 : extern void
837 : TraceRuntime(JSTracer* trc);
838 :
839 : extern void
840 : ReleaseAllJITCode(FreeOp* op);
841 :
842 : extern void
843 : PrepareForDebugGC(JSRuntime* rt);
844 :
845 : /* Functions for managing cross compartment gray pointers. */
846 :
847 : extern void
848 : DelayCrossCompartmentGrayMarking(JSObject* src);
849 :
850 : extern void
851 : NotifyGCNukeWrapper(JSObject* o);
852 :
853 : extern unsigned
854 : NotifyGCPreSwap(JSObject* a, JSObject* b);
855 :
856 : extern void
857 : NotifyGCPostSwap(JSObject* a, JSObject* b, unsigned preResult);
858 :
859 : /*
860 : * Helper state for use when JS helper threads sweep and allocate GC thing kinds
861 : * that can be swept and allocated off thread.
862 : *
863 : * In non-threadsafe builds, all actual sweeping and allocation is performed
864 : * on the active thread, but GCHelperState encapsulates this from clients as
865 : * much as possible.
866 : */
867 0 : class GCHelperState
868 : {
869 : enum State {
870 : IDLE,
871 : SWEEPING
872 : };
873 :
874 : // Associated runtime.
875 : JSRuntime* const rt;
876 :
877 : // Condvar for notifying the active thread when work has finished. This is
878 : // associated with the runtime's GC lock --- the worker thread state
879 : // condvars can't be used here due to lock ordering issues.
880 : js::ConditionVariable done;
881 :
882 : // Activity for the helper to do, protected by the GC lock.
883 : ActiveThreadOrGCTaskData<State> state_;
884 :
885 : // Whether work is being performed on some thread.
886 : GCLockData<bool> hasThread;
887 :
888 : void startBackgroundThread(State newState, const AutoLockGC& lock,
889 : const AutoLockHelperThreadState& helperLock);
890 : void waitForBackgroundThread(js::AutoLockGC& lock);
891 :
892 : State state(const AutoLockGC&);
893 : void setState(State state, const AutoLockGC&);
894 :
895 : friend class js::gc::ArenaLists;
896 :
897 : static void freeElementsAndArray(void** array, void** end) {
898 : MOZ_ASSERT(array <= end);
899 : for (void** p = array; p != end; ++p)
900 : js_free(*p);
901 : js_free(array);
902 : }
903 :
904 : void doSweep(AutoLockGC& lock);
905 :
906 : public:
907 4 : explicit GCHelperState(JSRuntime* rt)
908 4 : : rt(rt),
909 : done(),
910 4 : state_(IDLE)
911 4 : { }
912 :
913 0 : JSRuntime* runtime() { return rt; }
914 :
915 : void finish();
916 :
917 : void work();
918 :
919 : void maybeStartBackgroundSweep(const AutoLockGC& lock,
920 : const AutoLockHelperThreadState& helperLock);
921 : void startBackgroundShrink(const AutoLockGC& lock);
922 :
923 : /* Must be called without the GC lock taken. */
924 : void waitBackgroundSweepEnd();
925 :
926 : #ifdef DEBUG
927 : bool onBackgroundThread();
928 : #endif
929 :
930 : /*
931 : * Outside the GC lock may give true answer when in fact the sweeping has
932 : * been done.
933 : */
934 24490 : bool isBackgroundSweeping() const {
935 24490 : return state_ == SWEEPING;
936 : }
937 : };
938 :
939 : // A generic task used to dispatch work to the helper thread system.
940 : // Users should derive from GCParallelTask add what data they need and
941 : // override |run|.
942 : class GCParallelTask
943 : {
944 : JSRuntime* const runtime_;
945 :
946 : // The state of the parallel computation.
947 : enum TaskState {
948 : NotStarted,
949 : Dispatched,
950 : Finished,
951 : };
952 : UnprotectedData<TaskState> state;
953 :
954 : // Amount of time this task took to execute.
955 : ActiveThreadOrGCTaskData<mozilla::TimeDuration> duration_;
956 :
957 : explicit GCParallelTask(const GCParallelTask&) = delete;
958 :
959 : protected:
960 : // A flag to signal a request for early completion of the off-thread task.
961 : mozilla::Atomic<bool> cancel_;
962 :
963 : virtual void run() = 0;
964 :
965 : public:
966 14 : explicit GCParallelTask(JSRuntime* runtime) : runtime_(runtime), state(NotStarted), duration_(nullptr) {}
967 0 : GCParallelTask(GCParallelTask&& other)
968 0 : : runtime_(other.runtime_),
969 : state(other.state),
970 : duration_(nullptr),
971 0 : cancel_(false)
972 0 : {}
973 :
974 : // Derived classes must override this to ensure that join() gets called
975 : // before members get destructed.
976 : virtual ~GCParallelTask();
977 :
978 114 : JSRuntime* runtime() { return runtime_; }
979 :
980 : // Time spent in the most recent invocation of this task.
981 2 : mozilla::TimeDuration duration() const { return duration_; }
982 :
983 : // The simple interface to a parallel task works exactly like pthreads.
984 : bool start();
985 : void join();
986 :
987 : // If multiple tasks are to be started or joined at once, it is more
988 : // efficient to take the helper thread lock once and use these methods.
989 : bool startWithLockHeld(AutoLockHelperThreadState& locked);
990 : void joinWithLockHeld(AutoLockHelperThreadState& locked);
991 :
992 : // Instead of dispatching to a helper, run the task on the current thread.
993 : void runFromActiveCooperatingThread(JSRuntime* rt);
994 :
995 : // Dispatch a cancelation request.
996 : enum CancelMode { CancelNoWait, CancelAndWait};
997 3 : void cancel(CancelMode mode = CancelNoWait) {
998 3 : cancel_ = true;
999 3 : if (mode == CancelAndWait)
1000 3 : join();
1001 3 : }
1002 :
1003 : // Check if a task is actively running.
1004 : bool isRunningWithLockHeld(const AutoLockHelperThreadState& locked) const;
1005 : bool isRunning() const;
1006 :
1007 : // This should be friended to HelperThread, but cannot be because it
1008 : // would introduce several circular dependencies.
1009 : public:
1010 : void runFromHelperThread(AutoLockHelperThreadState& locked);
1011 : };
1012 :
1013 : typedef void (*IterateChunkCallback)(JSRuntime* rt, void* data, gc::Chunk* chunk);
1014 : typedef void (*IterateZoneCallback)(JSRuntime* rt, void* data, JS::Zone* zone);
1015 : typedef void (*IterateArenaCallback)(JSRuntime* rt, void* data, gc::Arena* arena,
1016 : JS::TraceKind traceKind, size_t thingSize);
1017 : typedef void (*IterateCellCallback)(JSRuntime* rt, void* data, void* thing,
1018 : JS::TraceKind traceKind, size_t thingSize);
1019 :
1020 : /*
1021 : * This function calls |zoneCallback| on every zone, |compartmentCallback| on
1022 : * every compartment, |arenaCallback| on every in-use arena, and |cellCallback|
1023 : * on every in-use cell in the GC heap.
1024 : *
1025 : * Note that no read barrier is triggered on the cells passed to cellCallback,
1026 : * so no these pointers must not escape the callback.
1027 : */
1028 : extern void
1029 : IterateHeapUnbarriered(JSContext* cx, void* data,
1030 : IterateZoneCallback zoneCallback,
1031 : JSIterateCompartmentCallback compartmentCallback,
1032 : IterateArenaCallback arenaCallback,
1033 : IterateCellCallback cellCallback);
1034 :
1035 : /*
1036 : * This function is like IterateZonesCompartmentsArenasCells, but does it for a
1037 : * single zone.
1038 : */
1039 : extern void
1040 : IterateHeapUnbarrieredForZone(JSContext* cx, Zone* zone, void* data,
1041 : IterateZoneCallback zoneCallback,
1042 : JSIterateCompartmentCallback compartmentCallback,
1043 : IterateArenaCallback arenaCallback,
1044 : IterateCellCallback cellCallback);
1045 :
1046 : /*
1047 : * Invoke chunkCallback on every in-use chunk.
1048 : */
1049 : extern void
1050 : IterateChunks(JSContext* cx, void* data, IterateChunkCallback chunkCallback);
1051 :
1052 : typedef void (*IterateScriptCallback)(JSRuntime* rt, void* data, JSScript* script);
1053 :
1054 : /*
1055 : * Invoke scriptCallback on every in-use script for
1056 : * the given compartment or for all compartments if it is null.
1057 : */
1058 : extern void
1059 : IterateScripts(JSContext* cx, JSCompartment* compartment,
1060 : void* data, IterateScriptCallback scriptCallback);
1061 :
1062 : extern void
1063 : FinalizeStringRT(JSRuntime* rt, JSString* str);
1064 :
1065 : JSCompartment*
1066 : NewCompartment(JSContext* cx, JSPrincipals* principals,
1067 : const JS::CompartmentOptions& options);
1068 :
1069 : namespace gc {
1070 :
1071 : /*
1072 : * Merge all contents of source into target. This can only be used if source is
1073 : * the only compartment in its zone.
1074 : */
1075 : void
1076 : MergeCompartments(JSCompartment* source, JSCompartment* target);
1077 :
1078 : /*
1079 : * This structure overlays a Cell in the Nursery and re-purposes its memory
1080 : * for managing the Nursery collection process.
1081 : */
1082 : class RelocationOverlay
1083 : {
1084 : /* The low bit is set so this should never equal a normal pointer. */
1085 : static const uintptr_t Relocated = uintptr_t(0xbad0bad1);
1086 :
1087 : /* Set to Relocated when moved. */
1088 : uintptr_t magic_;
1089 :
1090 : /* The location |this| was moved to. */
1091 : Cell* newLocation_;
1092 :
1093 : /* A list entry to track all relocated things. */
1094 : RelocationOverlay* next_;
1095 :
1096 : public:
1097 155340 : static RelocationOverlay* fromCell(Cell* cell) {
1098 155340 : return reinterpret_cast<RelocationOverlay*>(cell);
1099 : }
1100 :
1101 271952 : bool isForwarded() const {
1102 271952 : return magic_ == Relocated;
1103 : }
1104 :
1105 35786 : Cell* forwardingAddress() const {
1106 35786 : MOZ_ASSERT(isForwarded());
1107 35786 : return newLocation_;
1108 : }
1109 :
1110 : void forwardTo(Cell* cell);
1111 :
1112 22681 : RelocationOverlay*& nextRef() {
1113 22681 : MOZ_ASSERT(isForwarded());
1114 22681 : return next_;
1115 : }
1116 :
1117 22681 : RelocationOverlay* next() const {
1118 22681 : MOZ_ASSERT(isForwarded());
1119 22681 : return next_;
1120 : }
1121 :
1122 0 : static bool isCellForwarded(Cell* cell) {
1123 0 : return fromCell(cell)->isForwarded();
1124 : }
1125 : };
1126 :
1127 : // Functions for checking and updating GC thing pointers that might have been
1128 : // moved by compacting GC. Overloads are also provided that work with Values.
1129 : //
1130 : // IsForwarded - check whether a pointer refers to an GC thing that has been
1131 : // moved.
1132 : //
1133 : // Forwarded - return a pointer to the new location of a GC thing given a
1134 : // pointer to old location.
1135 : //
1136 : // MaybeForwarded - used before dereferencing a pointer that may refer to a
1137 : // moved GC thing without updating it. For JSObjects this will
1138 : // also update the object's shape pointer if it has been moved
1139 : // to allow slots to be accessed.
1140 :
1141 : template <typename T>
1142 : inline bool IsForwarded(T* t);
1143 : inline bool IsForwarded(const JS::Value& value);
1144 :
1145 : template <typename T>
1146 : inline T* Forwarded(T* t);
1147 :
1148 : inline Value Forwarded(const JS::Value& value);
1149 :
1150 : template <typename T>
1151 : inline T MaybeForwarded(T t);
1152 :
1153 : #ifdef JSGC_HASH_TABLE_CHECKS
1154 :
1155 : template <typename T>
1156 : inline bool IsGCThingValidAfterMovingGC(T* t);
1157 :
1158 : template <typename T>
1159 : inline void CheckGCThingAfterMovingGC(T* t);
1160 :
1161 : template <typename T>
1162 : inline void CheckGCThingAfterMovingGC(const ReadBarriered<T*>& t);
1163 :
1164 : inline void CheckValueAfterMovingGC(const JS::Value& value);
1165 :
1166 : #endif // JSGC_HASH_TABLE_CHECKS
1167 :
1168 : #define JS_FOR_EACH_ZEAL_MODE(D) \
1169 : D(RootsChange, 1) \
1170 : D(Alloc, 2) \
1171 : D(FrameGC, 3) \
1172 : D(VerifierPre, 4) \
1173 : D(FrameVerifierPre, 5) \
1174 : D(StackRooting, 6) \
1175 : D(GenerationalGC, 7) \
1176 : D(IncrementalRootsThenFinish, 8) \
1177 : D(IncrementalMarkAllThenFinish, 9) \
1178 : D(IncrementalMultipleSlices, 10) \
1179 : D(IncrementalMarkingValidator, 11) \
1180 : D(ElementsBarrier, 12) \
1181 : D(CheckHashTablesOnMinorGC, 13) \
1182 : D(Compact, 14) \
1183 : D(CheckHeapAfterGC, 15) \
1184 : D(CheckNursery, 16) \
1185 : D(IncrementalSweepThenFinish, 17)
1186 :
1187 : enum class ZealMode {
1188 : #define ZEAL_MODE(name, value) name = value,
1189 : JS_FOR_EACH_ZEAL_MODE(ZEAL_MODE)
1190 : #undef ZEAL_MODE
1191 : Limit = 17
1192 : };
1193 :
1194 : enum VerifierType {
1195 : PreBarrierVerifier
1196 : };
1197 :
1198 : #ifdef JS_GC_ZEAL
1199 :
1200 : extern const char* ZealModeHelpText;
1201 :
1202 : /* Check that write barriers have been used correctly. See jsgc.cpp. */
1203 : void
1204 : VerifyBarriers(JSRuntime* rt, VerifierType type);
1205 :
1206 : void
1207 : MaybeVerifyBarriers(JSContext* cx, bool always = false);
1208 :
1209 : void DumpArenaInfo();
1210 :
1211 : #else
1212 :
1213 : static inline void
1214 : VerifyBarriers(JSRuntime* rt, VerifierType type)
1215 : {
1216 : }
1217 :
1218 : static inline void
1219 : MaybeVerifyBarriers(JSContext* cx, bool always = false)
1220 : {
1221 : }
1222 :
1223 : #endif
1224 :
1225 : /*
1226 : * Instances of this class set the |JSRuntime::suppressGC| flag for the duration
1227 : * that they are live. Use of this class is highly discouraged. Please carefully
1228 : * read the comment in vm/Runtime.h above |suppressGC| and take all appropriate
1229 : * precautions before instantiating this class.
1230 : */
1231 : class MOZ_RAII JS_HAZ_GC_SUPPRESSED AutoSuppressGC
1232 : {
1233 : int32_t& suppressGC_;
1234 :
1235 : public:
1236 : explicit AutoSuppressGC(JSContext* cx);
1237 :
1238 189909 : ~AutoSuppressGC()
1239 189909 : {
1240 189909 : suppressGC_--;
1241 189909 : }
1242 : };
1243 :
1244 : // A singly linked list of zones.
1245 : class ZoneList
1246 : {
1247 : static Zone * const End;
1248 :
1249 : Zone* head;
1250 : Zone* tail;
1251 :
1252 : public:
1253 : ZoneList();
1254 : ~ZoneList();
1255 :
1256 : bool isEmpty() const;
1257 : Zone* front() const;
1258 :
1259 : void append(Zone* zone);
1260 : void transferFrom(ZoneList& other);
1261 : void removeFront();
1262 : void clear();
1263 :
1264 : private:
1265 : explicit ZoneList(Zone* singleZone);
1266 : void check() const;
1267 :
1268 : ZoneList(const ZoneList& other) = delete;
1269 : ZoneList& operator=(const ZoneList& other) = delete;
1270 : };
1271 :
1272 : JSObject*
1273 : NewMemoryStatisticsObject(JSContext* cx);
1274 :
1275 : struct MOZ_RAII AutoAssertNoNurseryAlloc
1276 : {
1277 : #ifdef DEBUG
1278 : AutoAssertNoNurseryAlloc();
1279 : ~AutoAssertNoNurseryAlloc();
1280 : #else
1281 : AutoAssertNoNurseryAlloc() {}
1282 : #endif
1283 : };
1284 :
1285 : /*
1286 : * There are a couple of classes here that serve mostly as "tokens" indicating
1287 : * that a condition holds. Some functions force the caller to possess such a
1288 : * token because they would misbehave if the condition were false, and it is
1289 : * far more clear to make the condition visible at the point where it can be
1290 : * affected rather than just crashing in an assertion down in the place where
1291 : * it is relied upon.
1292 : */
1293 :
1294 : /*
1295 : * Token meaning that the heap is busy and no allocations will be made.
1296 : *
1297 : * This class may be instantiated directly if it is known that the condition is
1298 : * already true, or it can be used as a base class for another RAII class that
1299 : * causes the condition to become true. Such base classes will use the no-arg
1300 : * constructor, establish the condition, then call checkCondition() to assert
1301 : * it and possibly record data needed to re-check the condition during
1302 : * destruction.
1303 : *
1304 : * Ordinarily, you would do something like this with a Maybe<> member that is
1305 : * emplaced during the constructor, but token-requiring functions want to
1306 : * require a reference to a base class instance. That said, you can always pass
1307 : * in the Maybe<> field as the token.
1308 : */
1309 : class MOZ_RAII AutoAssertHeapBusy {
1310 : protected:
1311 : JSRuntime* rt;
1312 :
1313 : // Check that the heap really is busy, and record the rt for the check in
1314 : // the destructor.
1315 : void checkCondition(JSRuntime *rt);
1316 :
1317 : AutoAssertHeapBusy() : rt(nullptr) {
1318 : }
1319 :
1320 : public:
1321 : explicit AutoAssertHeapBusy(JSRuntime* rt) {
1322 : checkCondition(rt);
1323 : }
1324 :
1325 : ~AutoAssertHeapBusy() {
1326 : MOZ_ASSERT(rt); // checkCondition must always be called.
1327 : checkCondition(rt);
1328 : }
1329 : };
1330 :
1331 : /*
1332 : * A class that serves as a token that the nursery in the current thread's zone
1333 : * group is empty.
1334 : */
1335 0 : class MOZ_RAII AutoAssertEmptyNursery
1336 : {
1337 : protected:
1338 : JSContext* cx;
1339 :
1340 : mozilla::Maybe<AutoAssertNoNurseryAlloc> noAlloc;
1341 :
1342 : // Check that the nursery is empty.
1343 : void checkCondition(JSContext* cx);
1344 :
1345 : // For subclasses that need to empty the nursery in their constructors.
1346 0 : AutoAssertEmptyNursery() : cx(nullptr) {
1347 0 : }
1348 :
1349 : public:
1350 0 : explicit AutoAssertEmptyNursery(JSContext* cx) : cx(nullptr) {
1351 0 : checkCondition(cx);
1352 0 : }
1353 :
1354 : AutoAssertEmptyNursery(const AutoAssertEmptyNursery& other) : AutoAssertEmptyNursery(other.cx)
1355 : {
1356 : }
1357 : };
1358 :
1359 : /*
1360 : * Evict the nursery upon construction. Serves as a token indicating that the
1361 : * nursery is empty. (See AutoAssertEmptyNursery, above.)
1362 : *
1363 : * Note that this is very improper subclass of AutoAssertHeapBusy, in that the
1364 : * heap is *not* busy within the scope of an AutoEmptyNursery. I will most
1365 : * likely fix this by removing AutoAssertHeapBusy, but that is currently
1366 : * waiting on jonco's review.
1367 : */
1368 0 : class MOZ_RAII AutoEmptyNursery : public AutoAssertEmptyNursery
1369 : {
1370 : public:
1371 : explicit AutoEmptyNursery(JSContext* cx);
1372 : };
1373 :
1374 : const char*
1375 : StateName(State state);
1376 :
1377 : inline bool
1378 24 : IsOOMReason(JS::gcreason::Reason reason)
1379 : {
1380 24 : return reason == JS::gcreason::LAST_DITCH ||
1381 24 : reason == JS::gcreason::MEM_PRESSURE;
1382 : }
1383 :
1384 : } /* namespace gc */
1385 :
1386 : /* Use this to avoid assertions when manipulating the wrapper map. */
1387 : class MOZ_RAII AutoDisableProxyCheck
1388 : {
1389 : public:
1390 : #ifdef DEBUG
1391 : AutoDisableProxyCheck();
1392 : ~AutoDisableProxyCheck();
1393 : #else
1394 : AutoDisableProxyCheck() {}
1395 : #endif
1396 : };
1397 :
1398 : struct MOZ_RAII AutoDisableCompactingGC
1399 : {
1400 : explicit AutoDisableCompactingGC(JSContext* cx);
1401 : ~AutoDisableCompactingGC();
1402 :
1403 : private:
1404 : JSContext* cx;
1405 : };
1406 :
1407 : // This is the same as IsInsideNursery, but not inlined.
1408 : bool
1409 : UninlinedIsInsideNursery(const gc::Cell* cell);
1410 :
1411 : } /* namespace js */
1412 :
1413 : #endif /* jsgc_h */
|