LCOV - code coverage report
Current view: top level - js/src/gc - Marking.cpp (source / functions) Hit Total Coverage
Test: output.info Lines: 914 1545 59.2 %
Date: 2017-07-14 16:53:18 Functions: 358 1459 24.5 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
       2             :  * vim: set ts=8 sts=4 et sw=4 tw=99:
       3             :  * This Source Code Form is subject to the terms of the Mozilla Public
       4             :  * License, v. 2.0. If a copy of the MPL was not distributed with this
       5             :  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
       6             : 
       7             : #include "gc/Marking.h"
       8             : 
       9             : #include "mozilla/DebugOnly.h"
      10             : #include "mozilla/IntegerRange.h"
      11             : #include "mozilla/ReentrancyGuard.h"
      12             : #include "mozilla/ScopeExit.h"
      13             : #include "mozilla/TypeTraits.h"
      14             : 
      15             : #include "jsgc.h"
      16             : #include "jsprf.h"
      17             : 
      18             : #include "builtin/ModuleObject.h"
      19             : #include "gc/GCInternals.h"
      20             : #include "gc/Policy.h"
      21             : #include "jit/IonCode.h"
      22             : #include "js/SliceBudget.h"
      23             : #include "vm/ArgumentsObject.h"
      24             : #include "vm/ArrayObject.h"
      25             : #include "vm/Debugger.h"
      26             : #include "vm/EnvironmentObject.h"
      27             : #include "vm/RegExpObject.h"
      28             : #include "vm/RegExpShared.h"
      29             : #include "vm/Scope.h"
      30             : #include "vm/Shape.h"
      31             : #include "vm/Symbol.h"
      32             : #include "vm/TypedArrayObject.h"
      33             : #include "vm/UnboxedObject.h"
      34             : #include "wasm/WasmJS.h"
      35             : 
      36             : #include "jscompartmentinlines.h"
      37             : #include "jsgcinlines.h"
      38             : #include "jsobjinlines.h"
      39             : 
      40             : #include "gc/Nursery-inl.h"
      41             : #include "vm/String-inl.h"
      42             : #include "vm/UnboxedObject-inl.h"
      43             : 
      44             : using namespace js;
      45             : using namespace js::gc;
      46             : 
      47             : using JS::MapTypeToTraceKind;
      48             : 
      49             : using mozilla::ArrayLength;
      50             : using mozilla::DebugOnly;
      51             : using mozilla::IntegerRange;
      52             : using mozilla::IsBaseOf;
      53             : using mozilla::IsSame;
      54             : using mozilla::PodCopy;
      55             : 
      56             : // Tracing Overview
      57             : // ================
      58             : //
      59             : // Tracing, in this context, refers to an abstract visitation of some or all of
      60             : // the GC-controlled heap. The effect of tracing an edge of the graph depends
      61             : // on the subclass of the JSTracer on whose behalf we are tracing.
      62             : //
      63             : // Marking
      64             : // -------
      65             : //
      66             : // The primary JSTracer is the GCMarker. The marking tracer causes the target
      67             : // of each traversed edge to be marked black and the target edge's children to
      68             : // be marked either gray (in the gc algorithm sense) or immediately black.
      69             : //
      70             : // Callback
      71             : // --------
      72             : //
      73             : // The secondary JSTracer is the CallbackTracer. This simply invokes a callback
      74             : // on each edge in a child.
      75             : //
      76             : // The following is a rough outline of the general struture of the tracing
      77             : // internals.
      78             : //
      79             : //                                                                                              //
      80             : //   .---------.    .---------.    .--------------------------.       .----------.              //
      81             : //   |TraceEdge|    |TraceRoot|    |TraceManuallyBarrieredEdge|  ...  |TraceRange|   ... etc.   //
      82             : //   '---------'    '---------'    '--------------------------'       '----------'              //
      83             : //        \              \                        /                        /                    //
      84             : //         \              \  .----------------.  /                        /                     //
      85             : //          o------------->o-|DispatchToTracer|-o<-----------------------o                      //
      86             : //                           '----------------'                                                 //
      87             : //                              /          \                                                    //
      88             : //                             /            \                                                   //
      89             : //                       .---------.   .----------.         .-----------------.                 //
      90             : //                       |DoMarking|   |DoCallback|-------> |<JSTraceCallback>|----------->     //
      91             : //                       '---------'   '----------'         '-----------------'                 //
      92             : //                            |                                                                 //
      93             : //                            |                                                                 //
      94             : //                        .--------.                                                            //
      95             : //      o---------------->|traverse| .                                                          //
      96             : //     /_\                '--------'   ' .                                                      //
      97             : //      |                     .     .      ' .                                                  //
      98             : //      |                     .       .        ' .                                              //
      99             : //      |                     .         .          ' .                                          //
     100             : //      |             .-----------.    .-----------.   ' .     .--------------------.           //
     101             : //      |             |markAndScan|    |markAndPush|       ' - |markAndTraceChildren|---->      //
     102             : //      |             '-----------'    '-----------'           '--------------------'           //
     103             : //      |                   |                  \                                                //
     104             : //      |                   |                   \                                               //
     105             : //      |       .----------------------.     .----------------.                                 //
     106             : //      |       |T::eagerlyMarkChildren|     |pushMarkStackTop|<===Oo                           //
     107             : //      |       '----------------------'     '----------------'    ||                           //
     108             : //      |                  |                         ||            ||                           //
     109             : //      |                  |                         ||            ||                           //
     110             : //      |                  |                         ||            ||                           //
     111             : //      o<-----------------o<========================OO============Oo                           //
     112             : //                                                                                              //
     113             : //                                                                                              //
     114             : //   Legend:                                                                                    //
     115             : //     ------  Direct calls                                                                     //
     116             : //     . . .   Static dispatch                                                                  //
     117             : //     ======  Dispatch through a manual stack.                                                 //
     118             : //                                                                                              //
     119             : 
     120             : 
     121             : /*** Tracing Invariants **************************************************************************/
     122             : 
     123             : #if defined(DEBUG)
     124             : template<typename T>
     125             : static inline bool
     126       24490 : IsThingPoisoned(T* thing)
     127             : {
     128             :     const uint8_t poisonBytes[] = {
     129             :         JS_FRESH_NURSERY_PATTERN,
     130             :         JS_SWEPT_NURSERY_PATTERN,
     131             :         JS_ALLOCATED_NURSERY_PATTERN,
     132             :         JS_FRESH_TENURED_PATTERN,
     133             :         JS_MOVED_TENURED_PATTERN,
     134             :         JS_SWEPT_TENURED_PATTERN,
     135             :         JS_ALLOCATED_TENURED_PATTERN,
     136             :         JS_SWEPT_CODE_PATTERN
     137       24490 :     };
     138       24490 :     const int numPoisonBytes = sizeof(poisonBytes) / sizeof(poisonBytes[0]);
     139       24490 :     uint32_t* p = reinterpret_cast<uint32_t*>(reinterpret_cast<FreeSpan*>(thing) + 1);
     140             :     // Note: all free patterns are odd to make the common, not-poisoned case a single test.
     141       24490 :     if ((*p & 1) == 0)
     142       10623 :         return false;
     143      124803 :     for (int i = 0; i < numPoisonBytes; ++i) {
     144      110936 :         const uint8_t pb = poisonBytes[i];
     145      110936 :         const uint32_t pw = pb | (pb << 8) | (pb << 16) | (pb << 24);
     146      110936 :         if (*p == pw)
     147           0 :             return true;
     148             :     }
     149       13867 :     return false;
     150             : }
     151             : 
     152             : static bool
     153       93480 : IsMovingTracer(JSTracer *trc)
     154             : {
     155      103884 :     return trc->isCallbackTracer() &&
     156      103884 :            trc->asCallbackTracer()->getTracerKind() == JS::CallbackTracer::TracerKind::Moving;
     157             : }
     158             : #endif
     159             : 
     160       34702 : bool ThingIsPermanentAtomOrWellKnownSymbol(JSString* str) {
     161       34702 :     return str->isPermanentAtom();
     162             : }
     163           0 : bool ThingIsPermanentAtomOrWellKnownSymbol(JSFlatString* str) {
     164           0 :     return str->isPermanentAtom();
     165             : }
     166           0 : bool ThingIsPermanentAtomOrWellKnownSymbol(JSLinearString* str) {
     167           0 :     return str->isPermanentAtom();
     168             : }
     169        7498 : bool ThingIsPermanentAtomOrWellKnownSymbol(JSAtom* atom) {
     170        7498 :     return atom->isPermanent();
     171             : }
     172           0 : bool ThingIsPermanentAtomOrWellKnownSymbol(PropertyName* name) {
     173           0 :     return name->isPermanent();
     174             : }
     175         164 : bool ThingIsPermanentAtomOrWellKnownSymbol(JS::Symbol* sym) {
     176         164 :     return sym->isWellKnownSymbol();
     177             : }
     178             : 
     179             : template <typename T>
     180             : static inline bool
     181       80831 : IsOwnedByOtherRuntime(JSRuntime* rt, T thing)
     182             : {
     183       80831 :     bool other = thing->runtimeFromAnyThread() != rt;
     184       80831 :     MOZ_ASSERT_IF(other,
     185             :                   ThingIsPermanentAtomOrWellKnownSymbol(thing) ||
     186             :                   thing->zoneFromAnyThread()->isSelfHostingZone());
     187       80831 :     return other;
     188             : }
     189             : 
     190             : template<typename T>
     191             : void
     192       46740 : js::CheckTracedThing(JSTracer* trc, T* thing)
     193             : {
     194             : #ifdef DEBUG
     195       46740 :     MOZ_ASSERT(trc);
     196       46740 :     MOZ_ASSERT(thing);
     197             : 
     198       46740 :     if (!trc->checkEdges())
     199           0 :         return;
     200             : 
     201       46740 :     if (IsForwarded(thing))
     202           0 :         thing = Forwarded(thing);
     203             : 
     204             :     /* This function uses data that's not available in the nursery. */
     205       46740 :     if (IsInsideNursery(thing))
     206           0 :         return;
     207             : 
     208       46740 :     MOZ_ASSERT_IF(!IsMovingTracer(trc) && !trc->isTenuringTracer(), !IsForwarded(thing));
     209             : 
     210             :     /*
     211             :      * Permanent atoms and things in the self-hosting zone are not associated
     212             :      * with this runtime, but will be ignored during marking.
     213             :      */
     214       46740 :     if (IsOwnedByOtherRuntime(trc->runtime(), thing))
     215           0 :         return;
     216             : 
     217       46740 :     Zone* zone = thing->zoneFromAnyThread();
     218       46740 :     JSRuntime* rt = trc->runtime();
     219             : 
     220       46740 :     if (!IsMovingTracer(trc) && !IsBufferGrayRootsTracer(trc) && !IsClearEdgesTracer(trc)) {
     221       41638 :         MOZ_ASSERT(CurrentThreadCanAccessZone(zone));
     222       41638 :         MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
     223             :     }
     224             : 
     225       46740 :     MOZ_ASSERT(zone->runtimeFromAnyThread() == trc->runtime());
     226             : 
     227             :     // It shouldn't be possible to trace into zones used by helper threads.
     228       46740 :     MOZ_ASSERT(!zone->usedByHelperThread());
     229             : 
     230       46740 :     MOZ_ASSERT(thing->isAligned());
     231       46740 :     MOZ_ASSERT(MapTypeToTraceKind<typename mozilla::RemovePointer<T>::Type>::kind ==
     232             :                thing->getTraceKind());
     233             : 
     234             :     /*
     235             :      * Do not check IsMarkingTracer directly -- it should only be used in paths
     236             :      * where we cannot be the gray buffering tracer.
     237             :      */
     238       46740 :     bool isGcMarkingTracer = trc->isMarkingTracer();
     239             : 
     240       46740 :     MOZ_ASSERT_IF(zone->requireGCTracer(), isGcMarkingTracer || IsBufferGrayRootsTracer(trc));
     241             : 
     242       46740 :     if (isGcMarkingTracer) {
     243       41538 :         GCMarker* gcMarker = GCMarker::fromTracer(trc);
     244       41538 :         MOZ_ASSERT_IF(gcMarker->shouldCheckCompartments(),
     245             :                       zone->isCollecting() || zone->isAtomsZone());
     246             : 
     247       41538 :         MOZ_ASSERT_IF(gcMarker->markColor() == MarkColor::Gray,
     248             :                       !zone->isGCMarkingBlack() || zone->isAtomsZone());
     249             : 
     250       41538 :         MOZ_ASSERT(!(zone->isGCSweeping() || zone->isGCFinished() || zone->isGCCompacting()));
     251             :     }
     252             : 
     253             :     /*
     254             :      * Try to assert that the thing is allocated.
     255             :      *
     256             :      * We would like to assert that the thing is not in the free list, but this
     257             :      * check is very slow. Instead we check whether the thing has been poisoned:
     258             :      * if it has not then we assume it is allocated, but if it has then it is
     259             :      * either free or uninitialized in which case we check the free list.
     260             :      *
     261             :      * Further complications are that background sweeping may be running and
     262             :      * concurrently modifiying the free list and that tracing is done off
     263             :      * thread during compacting GC and reading the contents of the thing by
     264             :      * IsThingPoisoned would be racy in this case.
     265             :      */
     266       46740 :     MOZ_ASSERT_IF(JS::CurrentThreadIsHeapBusy() &&
     267             :                   !zone->isGCCompacting() &&
     268             :                   !rt->gc.isBackgroundSweeping(),
     269             :                   !IsThingPoisoned(thing) || !InFreeList(thing->asTenured().arena(), thing));
     270             : #endif
     271             : }
     272             : 
     273             : template <typename S>
     274             : struct CheckTracedFunctor : public VoidDefaultAdaptor<S> {
     275             :     template <typename T> void operator()(T* t, JSTracer* trc) { CheckTracedThing(trc, t); }
     276             : };
     277             : 
     278             : template<typename T>
     279             : void
     280             : js::CheckTracedThing(JSTracer* trc, T thing)
     281             : {
     282             :     DispatchTyped(CheckTracedFunctor<T>(), thing, trc);
     283             : }
     284             : 
     285             : namespace js {
     286             : #define IMPL_CHECK_TRACED_THING(_, type, __) \
     287             :     template void CheckTracedThing<type>(JSTracer*, type*);
     288             : JS_FOR_EACH_TRACEKIND(IMPL_CHECK_TRACED_THING);
     289             : #undef IMPL_CHECK_TRACED_THING
     290             : } // namespace js
     291             : 
     292             : static bool
     293        2111 : ShouldTraceCrossCompartment(JSTracer* trc, JSObject* src, Cell* cell)
     294             : {
     295        2111 :     if (!trc->isMarkingTracer())
     296        2103 :         return true;
     297             : 
     298           8 :     MarkColor color = GCMarker::fromTracer(trc)->markColor();
     299             : 
     300           8 :     if (!cell->isTenured()) {
     301           0 :         MOZ_ASSERT(color == MarkColor::Black);
     302           0 :         return false;
     303             :     }
     304           8 :     TenuredCell& tenured = cell->asTenured();
     305             : 
     306           8 :     JS::Zone* zone = tenured.zone();
     307           8 :     if (color == MarkColor::Black) {
     308             :         /*
     309             :          * Having black->gray edges violates our promise to the cycle
     310             :          * collector. This can happen if we're collecting a compartment and it
     311             :          * has an edge to an uncollected compartment: it's possible that the
     312             :          * source and destination of the cross-compartment edge should be gray,
     313             :          * but the source was marked black by the write barrier.
     314             :          */
     315           8 :         if (tenured.isMarkedGray()) {
     316           0 :             MOZ_ASSERT(!zone->isCollecting());
     317           0 :             trc->runtime()->gc.setFoundBlackGrayEdges(tenured);
     318             :         }
     319           8 :         return zone->isGCMarking();
     320             :     } else {
     321           0 :         if (zone->isGCMarkingBlack()) {
     322             :             /*
     323             :              * The destination compartment is being not being marked gray now,
     324             :              * but it will be later, so record the cell so it can be marked gray
     325             :              * at the appropriate time.
     326             :              */
     327           0 :             if (!tenured.isMarkedAny())
     328           0 :                 DelayCrossCompartmentGrayMarking(src);
     329           0 :             return false;
     330             :         }
     331           0 :         return zone->isGCMarkingGray();
     332             :     }
     333             : }
     334             : 
     335             : static bool
     336        2112 : ShouldTraceCrossCompartment(JSTracer* trc, JSObject* src, const Value& val)
     337             : {
     338        2112 :     return val.isGCThing() && ShouldTraceCrossCompartment(trc, src, val.toGCThing());
     339             : }
     340             : 
     341             : static void
     342       40080 : AssertShouldMarkInZone(Cell* thing)
     343             : {
     344       40080 :     MOZ_ASSERT(thing->asTenured().zone()->shouldMarkInZone());
     345       40080 : }
     346             : 
     347             : static void
     348       22626 : AssertShouldMarkInZone(JSString* str)
     349             : {
     350             : #ifdef DEBUG
     351       22626 :     Zone* zone = str->asTenured().zone();
     352       22626 :     MOZ_ASSERT(zone->shouldMarkInZone() || zone->isAtomsZone());
     353             : #endif
     354       22626 : }
     355             : 
     356             : static void
     357           0 : AssertShouldMarkInZone(JS::Symbol* sym)
     358             : {
     359             : #ifdef DEBUG
     360           0 :     Zone* zone = sym->asTenured().zone();
     361           0 :     MOZ_ASSERT(zone->shouldMarkInZone() || zone->isAtomsZone());
     362             : #endif
     363           0 : }
     364             : 
     365             : static void
     366       20351 : AssertRootMarkingPhase(JSTracer* trc)
     367             : {
     368       20351 :     MOZ_ASSERT_IF(trc->isMarkingTracer(),
     369             :                   trc->runtime()->gc.state() == State::NotActive ||
     370             :                   trc->runtime()->gc.state() == State::MarkRoots);
     371       20351 : }
     372             : 
     373             : 
     374             : /*** Tracing Interface ***************************************************************************/
     375             : 
     376             : // The second parameter to BaseGCType is derived automatically based on T. The
     377             : // relation here is that for any T, the TraceKind will automatically,
     378             : // statically select the correct Cell layout for marking. Below, we instantiate
     379             : // each override with a declaration of the most derived layout type.
     380             : //
     381             : // The use of TraceKind::Null for the case where the type is not matched
     382             : // generates a compile error as no template instantiated for that kind.
     383             : //
     384             : // Usage:
     385             : //   BaseGCType<T>::type
     386             : //
     387             : // Examples:
     388             : //   BaseGCType<JSFunction>::type => JSObject
     389             : //   BaseGCType<UnownedBaseShape>::type => BaseShape
     390             : //   etc.
     391             : template <typename T, JS::TraceKind =
     392             : #define EXPAND_MATCH_TYPE(name, type, _) \
     393             :           IsBaseOf<type, T>::value ? JS::TraceKind::name :
     394             : JS_FOR_EACH_TRACEKIND(EXPAND_MATCH_TYPE)
     395             : #undef EXPAND_MATCH_TYPE
     396             :           JS::TraceKind::Null>
     397             : 
     398             : struct BaseGCType;
     399             : #define IMPL_BASE_GC_TYPE(name, type_, _) \
     400             :     template <typename T> struct BaseGCType<T, JS::TraceKind:: name> { typedef type_ type; };
     401             : JS_FOR_EACH_TRACEKIND(IMPL_BASE_GC_TYPE);
     402             : #undef IMPL_BASE_GC_TYPE
     403             : 
     404             : // Our barrier templates are parameterized on the pointer types so that we can
     405             : // share the definitions with Value and jsid. Thus, we need to strip the
     406             : // pointer before sending the type to BaseGCType and re-add it on the other
     407             : // side. As such:
     408             : template <typename T> struct PtrBaseGCType { typedef T type; };
     409             : template <typename T> struct PtrBaseGCType<T*> { typedef typename BaseGCType<T>::type* type; };
     410             : 
     411             : template <typename T>
     412             : typename PtrBaseGCType<T>::type*
     413      743549 : ConvertToBase(T* thingp)
     414             : {
     415      743549 :     return reinterpret_cast<typename PtrBaseGCType<T>::type*>(thingp);
     416             : }
     417             : 
     418             : template <typename T> void DispatchToTracer(JSTracer* trc, T* thingp, const char* name);
     419             : template <typename T> T DoCallback(JS::CallbackTracer* trc, T* thingp, const char* name);
     420             : template <typename T> void DoMarking(GCMarker* gcmarker, T* thing);
     421             : template <typename T> void DoMarking(GCMarker* gcmarker, const T& thing);
     422             : template <typename T> void NoteWeakEdge(GCMarker* gcmarker, T** thingp);
     423             : template <typename T> void NoteWeakEdge(GCMarker* gcmarker, T* thingp);
     424             : 
     425             : template <typename T>
     426             : void
     427        5605 : js::TraceEdge(JSTracer* trc, WriteBarrieredBase<T>* thingp, const char* name)
     428             : {
     429        5605 :     DispatchToTracer(trc, ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name);
     430        5605 : }
     431             : 
     432             : template <typename T>
     433             : void
     434           0 : js::TraceEdge(JSTracer* trc, ReadBarriered<T>* thingp, const char* name)
     435             : {
     436           0 :     DispatchToTracer(trc, ConvertToBase(thingp->unsafeGet()), name);
     437           0 : }
     438             : 
     439             : template <typename T>
     440             : void
     441       15492 : js::TraceNullableEdge(JSTracer* trc, WriteBarrieredBase<T>* thingp, const char* name)
     442             : {
     443       15492 :     if (InternalBarrierMethods<T>::isMarkable(thingp->get()))
     444       15230 :         DispatchToTracer(trc, ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name);
     445       15492 : }
     446             : 
     447             : template <typename T>
     448             : void
     449           0 : js::TraceNullableEdge(JSTracer* trc, ReadBarriered<T>* thingp, const char* name)
     450             : {
     451           0 :     if (InternalBarrierMethods<T>::isMarkable(thingp->unbarrieredGet()))
     452           0 :         DispatchToTracer(trc, ConvertToBase(thingp->unsafeGet()), name);
     453           0 : }
     454             : 
     455             : template <typename T>
     456             : JS_PUBLIC_API(void)
     457        5525 : js::gc::TraceExternalEdge(JSTracer* trc, T* thingp, const char* name)
     458             : {
     459        5525 :     MOZ_ASSERT(InternalBarrierMethods<T>::isMarkable(*thingp));
     460        5525 :     DispatchToTracer(trc, ConvertToBase(thingp), name);
     461        5525 : }
     462             : 
     463             : template <typename T>
     464             : void
     465       52395 : js::TraceManuallyBarrieredEdge(JSTracer* trc, T* thingp, const char* name)
     466             : {
     467       52395 :     DispatchToTracer(trc, ConvertToBase(thingp), name);
     468       52395 : }
     469             : 
     470             : template <typename T>
     471             : JS_PUBLIC_API(void)
     472         583 : js::UnsafeTraceManuallyBarrieredEdge(JSTracer* trc, T* thingp, const char* name)
     473             : {
     474         583 :     DispatchToTracer(trc, ConvertToBase(thingp), name);
     475         583 : }
     476             : 
     477             : template <typename T>
     478             : void
     479           0 : js::TraceWeakEdge(JSTracer* trc, WeakRef<T>* thingp, const char* name)
     480             : {
     481           0 :     if (!trc->isMarkingTracer()) {
     482             :         // Non-marking tracers can select whether or not they see weak edges.
     483           0 :         if (trc->traceWeakEdges())
     484           0 :             DispatchToTracer(trc, ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name);
     485           0 :         return;
     486             :     }
     487             : 
     488           0 :     NoteWeakEdge(GCMarker::fromTracer(trc),
     489             :                  ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
     490             : }
     491             : 
     492             : template <typename T>
     493             : void
     494        5520 : js::TraceRoot(JSTracer* trc, T* thingp, const char* name)
     495             : {
     496        5520 :     AssertRootMarkingPhase(trc);
     497        5520 :     DispatchToTracer(trc, ConvertToBase(thingp), name);
     498        5520 : }
     499             : 
     500             : template <typename T>
     501             : void
     502           0 : js::TraceRoot(JSTracer* trc, ReadBarriered<T>* thingp, const char* name)
     503             : {
     504           0 :     TraceRoot(trc, thingp->unsafeGet(), name);
     505           0 : }
     506             : 
     507             : template <typename T>
     508             : void
     509        6914 : js::TraceNullableRoot(JSTracer* trc, T* thingp, const char* name)
     510             : {
     511        6914 :     AssertRootMarkingPhase(trc);
     512        6914 :     if (InternalBarrierMethods<T>::isMarkable(*thingp))
     513        5443 :         DispatchToTracer(trc, ConvertToBase(thingp), name);
     514        6914 : }
     515             : 
     516             : template <typename T>
     517             : void
     518           0 : js::TraceNullableRoot(JSTracer* trc, ReadBarriered<T>* thingp, const char* name)
     519             : {
     520           0 :     TraceNullableRoot(trc, thingp->unsafeGet(), name);
     521           0 : }
     522             : 
     523             : template <typename T>
     524             : JS_PUBLIC_API(void)
     525           0 : JS::UnsafeTraceRoot(JSTracer* trc, T* thingp, const char* name)
     526             : {
     527           0 :     MOZ_ASSERT(thingp);
     528           0 :     js::TraceNullableRoot(trc, thingp, name);
     529           0 : }
     530             : 
     531             : template <typename T>
     532             : void
     533        7151 : js::TraceRange(JSTracer* trc, size_t len, WriteBarrieredBase<T>* vec, const char* name)
     534             : {
     535       14302 :     JS::AutoTracingIndex index(trc);
     536       21460 :     for (auto i : IntegerRange(len)) {
     537       14309 :         if (InternalBarrierMethods<T>::isMarkable(vec[i].get()))
     538        2093 :             DispatchToTracer(trc, ConvertToBase(vec[i].unsafeUnbarrieredForTracing()), name);
     539       14309 :         ++index;
     540             :     }
     541        7151 : }
     542             : 
     543             : template <typename T>
     544             : void
     545         407 : js::TraceRootRange(JSTracer* trc, size_t len, T* vec, const char* name)
     546             : {
     547         407 :     AssertRootMarkingPhase(trc);
     548         814 :     JS::AutoTracingIndex index(trc);
     549        1606 :     for (auto i : IntegerRange(len)) {
     550        1199 :         if (InternalBarrierMethods<T>::isMarkable(vec[i]))
     551         911 :             DispatchToTracer(trc, ConvertToBase(&vec[i]), name);
     552        1199 :         ++index;
     553             :     }
     554         407 : }
     555             : 
     556             : // Instantiate a copy of the Tracing templates for each derived type.
     557             : #define INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS(type) \
     558             :     template void js::TraceEdge<type>(JSTracer*, WriteBarrieredBase<type>*, const char*); \
     559             :     template void js::TraceEdge<type>(JSTracer*, ReadBarriered<type>*, const char*); \
     560             :     template void js::TraceNullableEdge<type>(JSTracer*, WriteBarrieredBase<type>*, const char*); \
     561             :     template void js::TraceNullableEdge<type>(JSTracer*, ReadBarriered<type>*, const char*); \
     562             :     template void js::TraceManuallyBarrieredEdge<type>(JSTracer*, type*, const char*); \
     563             :     template void js::TraceWeakEdge<type>(JSTracer*, WeakRef<type>*, const char*); \
     564             :     template void js::TraceRoot<type>(JSTracer*, type*, const char*); \
     565             :     template void js::TraceRoot<type>(JSTracer*, ReadBarriered<type>*, const char*); \
     566             :     template void js::TraceNullableRoot<type>(JSTracer*, type*, const char*); \
     567             :     template void js::TraceNullableRoot<type>(JSTracer*, ReadBarriered<type>*, const char*); \
     568             :     template void js::TraceRange<type>(JSTracer*, size_t, WriteBarrieredBase<type>*, const char*); \
     569             :     template void js::TraceRootRange<type>(JSTracer*, size_t, type*, const char*);
     570             : FOR_EACH_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS)
     571             : #undef INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS
     572             : 
     573             : #define INSTANTIATE_PUBLIC_TRACE_FUNCTIONS(type) \
     574             :     template JS_PUBLIC_API(void) JS::UnsafeTraceRoot<type>(JSTracer*, type*, const char*); \
     575             :     template JS_PUBLIC_API(void) js::UnsafeTraceManuallyBarrieredEdge<type>(JSTracer*, type*, \
     576             :                                                                             const char*); \
     577             :     template JS_PUBLIC_API(void) js::gc::TraceExternalEdge<type>(JSTracer*, type*, const char*);
     578             : FOR_EACH_PUBLIC_GC_POINTER_TYPE(INSTANTIATE_PUBLIC_TRACE_FUNCTIONS)
     579             : FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(INSTANTIATE_PUBLIC_TRACE_FUNCTIONS)
     580             : #undef INSTANTIATE_PUBLIC_TRACE_FUNCTIONS
     581             : 
     582             : template <typename T>
     583             : void
     584           0 : js::TraceManuallyBarrieredCrossCompartmentEdge(JSTracer* trc, JSObject* src, T* dst,
     585             :                                                const char* name)
     586             : {
     587           0 :     if (ShouldTraceCrossCompartment(trc, src, *dst))
     588           0 :         DispatchToTracer(trc, dst, name);
     589           0 : }
     590             : template void js::TraceManuallyBarrieredCrossCompartmentEdge<JSObject*>(JSTracer*, JSObject*,
     591             :                                                                         JSObject**, const char*);
     592             : template void js::TraceManuallyBarrieredCrossCompartmentEdge<JSScript*>(JSTracer*, JSObject*,
     593             :                                                                         JSScript**, const char*);
     594             : 
     595             : template <typename T>
     596             : void
     597        2112 : js::TraceCrossCompartmentEdge(JSTracer* trc, JSObject* src, WriteBarrieredBase<T>* dst,
     598             :                               const char* name)
     599             : {
     600        2112 :     if (ShouldTraceCrossCompartment(trc, src, dst->get()))
     601        2111 :         DispatchToTracer(trc, dst->unsafeUnbarrieredForTracing(), name);
     602        2112 : }
     603             : template void js::TraceCrossCompartmentEdge<Value>(JSTracer*, JSObject*,
     604             :                                                    WriteBarrieredBase<Value>*, const char*);
     605             : 
     606             : template <typename T>
     607             : void
     608        7510 : js::TraceProcessGlobalRoot(JSTracer* trc, T* thing, const char* name)
     609             : {
     610        7510 :     AssertRootMarkingPhase(trc);
     611        7510 :     MOZ_ASSERT(ThingIsPermanentAtomOrWellKnownSymbol(thing));
     612             : 
     613             :     // We have to mark permanent atoms and well-known symbols through a special
     614             :     // method because the default DoMarking implementation automatically skips
     615             :     // them. Fortunately, atoms (permanent and non) cannot refer to other GC
     616             :     // things so they do not need to go through the mark stack and may simply
     617             :     // be marked directly.  Moreover, well-known symbols can refer only to
     618             :     // permanent atoms, so likewise require no subsquent marking.
     619        7510 :     CheckTracedThing(trc, *ConvertToBase(&thing));
     620        7510 :     if (trc->isMarkingTracer())
     621        7510 :         thing->markIfUnmarked(gc::MarkColor::Black);
     622             :     else
     623           0 :         DoCallback(trc->asCallbackTracer(), ConvertToBase(&thing), name);
     624        7510 : }
     625             : template void js::TraceProcessGlobalRoot<JSAtom>(JSTracer*, JSAtom*, const char*);
     626             : template void js::TraceProcessGlobalRoot<JS::Symbol>(JSTracer*, JS::Symbol*, const char*);
     627             : 
     628             : // A typed functor adaptor for TraceRoot.
     629             : struct TraceRootFunctor {
     630             :     template <typename T>
     631           0 :     void operator()(JSTracer* trc, Cell** thingp, const char* name) {
     632           0 :         TraceRoot(trc, reinterpret_cast<T**>(thingp), name);
     633           0 :     }
     634             : };
     635             : 
     636             : void
     637           0 : js::TraceGenericPointerRoot(JSTracer* trc, Cell** thingp, const char* name)
     638             : {
     639           0 :     MOZ_ASSERT(thingp);
     640           0 :     if (!*thingp)
     641           0 :         return;
     642             :     TraceRootFunctor f;
     643           0 :     DispatchTraceKindTyped(f, (*thingp)->getTraceKind(), trc, thingp, name);
     644             : }
     645             : 
     646             : // A typed functor adaptor for TraceManuallyBarrieredEdge.
     647             : struct TraceManuallyBarrieredEdgeFunctor {
     648             :     template <typename T>
     649       20718 :     void operator()(JSTracer* trc, Cell** thingp, const char* name) {
     650       20718 :         TraceManuallyBarrieredEdge(trc, reinterpret_cast<T**>(thingp), name);
     651       20718 :     }
     652             : };
     653             : 
     654             : void
     655       20718 : js::TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, Cell** thingp, const char* name)
     656             : {
     657       20718 :     MOZ_ASSERT(thingp);
     658       20718 :     if (!*thingp)
     659           0 :         return;
     660             :     TraceManuallyBarrieredEdgeFunctor f;
     661       20718 :     DispatchTraceKindTyped(f, (*thingp)->getTraceKind(), trc, thingp, name);
     662             : }
     663             : 
     664             : // This method is responsible for dynamic dispatch to the real tracer
     665             : // implementation. Consider replacing this choke point with virtual dispatch:
     666             : // a sufficiently smart C++ compiler may be able to devirtualize some paths.
     667             : template <typename T>
     668             : void
     669       95416 : DispatchToTracer(JSTracer* trc, T* thingp, const char* name)
     670             : {
     671             : #define IS_SAME_TYPE_OR(name, type, _) mozilla::IsSame<type*, T>::value ||
     672             :     static_assert(
     673             :             JS_FOR_EACH_TRACEKIND(IS_SAME_TYPE_OR)
     674             :             mozilla::IsSame<T, JS::Value>::value ||
     675             :             mozilla::IsSame<T, jsid>::value ||
     676             :             mozilla::IsSame<T, TaggedProto>::value,
     677             :             "Only the base cell layout types are allowed into marking/tracing internals");
     678             : #undef IS_SAME_TYPE_OR
     679       95416 :     if (trc->isMarkingTracer())
     680       34036 :         return DoMarking(GCMarker::fromTracer(trc), *thingp);
     681       61380 :     if (trc->isTenuringTracer())
     682       52328 :         return static_cast<TenuringTracer*>(trc)->traverse(thingp);
     683        9052 :     MOZ_ASSERT(trc->isCallbackTracer());
     684        9052 :     DoCallback(trc->asCallbackTracer(), thingp, name);
     685             : }
     686             : 
     687             : 
     688             : /*** GC Marking Interface *************************************************************************/
     689             : 
     690             : namespace js {
     691             : 
     692             : typedef bool HasNoImplicitEdgesType;
     693             : 
     694             : template <typename T>
     695             : struct ImplicitEdgeHolderType {
     696             :     typedef HasNoImplicitEdgesType Type;
     697             : };
     698             : 
     699             : // For now, we only handle JSObject* and JSScript* keys, but the linear time
     700             : // algorithm can be easily extended by adding in more types here, then making
     701             : // GCMarker::traverse<T> call markPotentialEphemeronKey.
     702             : template <>
     703             : struct ImplicitEdgeHolderType<JSObject*> {
     704             :     typedef JSObject* Type;
     705             : };
     706             : 
     707             : template <>
     708             : struct ImplicitEdgeHolderType<JSScript*> {
     709             :     typedef JSScript* Type;
     710             : };
     711             : 
     712             : void
     713           0 : GCMarker::markEphemeronValues(gc::Cell* markedCell, WeakEntryVector& values)
     714             : {
     715           0 :     size_t initialLen = values.length();
     716           0 :     for (size_t i = 0; i < initialLen; i++)
     717           0 :         values[i].weakmap->markEntry(this, markedCell, values[i].key);
     718             : 
     719             :     // The vector should not be appended to during iteration because the key is
     720             :     // already marked, and even in cases where we have a multipart key, we
     721             :     // should only be inserting entries for the unmarked portions.
     722           0 :     MOZ_ASSERT(values.length() == initialLen);
     723           0 : }
     724             : 
     725             : template <typename T>
     726             : void
     727        4198 : GCMarker::markImplicitEdgesHelper(T markedThing)
     728             : {
     729        4198 :     if (!isWeakMarkingTracer())
     730        4198 :         return;
     731             : 
     732           0 :     Zone* zone = gc::TenuredCell::fromPointer(markedThing)->zone();
     733           0 :     MOZ_ASSERT(zone->isGCMarking());
     734           0 :     MOZ_ASSERT(!zone->isGCSweeping());
     735             : 
     736           0 :     auto p = zone->gcWeakKeys().get(JS::GCCellPtr(markedThing));
     737           0 :     if (!p)
     738           0 :         return;
     739           0 :     WeakEntryVector& markables = p->value;
     740             : 
     741           0 :     markEphemeronValues(markedThing, markables);
     742           0 :     markables.clear(); // If key address is reused, it should do nothing
     743             : }
     744             : 
     745             : template <>
     746             : void
     747        1207 : GCMarker::markImplicitEdgesHelper(HasNoImplicitEdgesType)
     748             : {
     749        1207 : }
     750             : 
     751             : template <typename T>
     752             : void
     753        5405 : GCMarker::markImplicitEdges(T* thing)
     754             : {
     755        5405 :     markImplicitEdgesHelper<typename ImplicitEdgeHolderType<T*>::Type>(thing);
     756        5405 : }
     757             : 
     758             : } // namespace js
     759             : 
     760             : template <typename T>
     761             : static inline bool
     762       26205 : ShouldMark(GCMarker* gcmarker, T thing)
     763             : {
     764             :     // Don't trace things that are owned by another runtime.
     765       26205 :     if (IsOwnedByOtherRuntime(gcmarker->runtime(), thing))
     766           0 :         return false;
     767             : 
     768             :     // Don't mark things outside a zone if we are in a per-zone GC.
     769       26205 :     return thing->zone()->shouldMarkInZone();
     770             : }
     771             : 
     772             : template <>
     773             : bool
     774        7823 : ShouldMark<JSObject*>(GCMarker* gcmarker, JSObject* obj)
     775             : {
     776             :     // Don't trace things that are owned by another runtime.
     777        7823 :     if (IsOwnedByOtherRuntime(gcmarker->runtime(), obj))
     778           0 :         return false;
     779             : 
     780             :     // We may mark a Nursery thing outside the context of the
     781             :     // MinorCollectionTracer because of a pre-barrier. The pre-barrier is not
     782             :     // needed in this case because we perform a minor collection before each
     783             :     // incremental slice.
     784        7823 :     if (IsInsideNursery(obj))
     785           0 :         return false;
     786             : 
     787             :     // Don't mark things outside a zone if we are in a per-zone GC. It is
     788             :     // faster to check our own arena, which we can do since we know that
     789             :     // the object is tenured.
     790        7823 :     return obj->asTenured().zone()->shouldMarkInZone();
     791             : }
     792             : 
     793             : template <typename T>
     794             : void
     795       34028 : DoMarking(GCMarker* gcmarker, T* thing)
     796             : {
     797             :     // Do per-type marking precondition checks.
     798       34028 :     if (!ShouldMark(gcmarker, thing))
     799           0 :         return;
     800             : 
     801       34028 :     CheckTracedThing(gcmarker, thing);
     802       34028 :     gcmarker->traverse(thing);
     803             : 
     804             :     // Mark the compartment as live.
     805       34028 :     SetMaybeAliveFlag(thing);
     806             : }
     807             : 
     808             : template <typename S>
     809             : struct DoMarkingFunctor : public VoidDefaultAdaptor<S> {
     810         378 :     template <typename T> void operator()(T* t, GCMarker* gcmarker) { DoMarking(gcmarker, t); }
     811             : };
     812             : 
     813             : template <typename T>
     814             : void
     815         386 : DoMarking(GCMarker* gcmarker, const T& thing)
     816             : {
     817         386 :     DispatchTyped(DoMarkingFunctor<T>(), thing, gcmarker);
     818         386 : }
     819             : 
     820             : template <typename T>
     821             : void
     822           0 : NoteWeakEdge(GCMarker* gcmarker, T** thingp)
     823             : {
     824             :     // Do per-type marking precondition checks.
     825           0 :     if (!ShouldMark(gcmarker, *thingp))
     826           0 :         return;
     827             : 
     828           0 :     CheckTracedThing(gcmarker, *thingp);
     829             : 
     830             :     // If the target is already marked, there's no need to store the edge.
     831           0 :     if (IsMarkedUnbarriered(gcmarker->runtime(), thingp))
     832           0 :         return;
     833             : 
     834           0 :     gcmarker->noteWeakEdge(thingp);
     835             : }
     836             : 
     837             : template <typename T>
     838             : void
     839           0 : NoteWeakEdge(GCMarker* gcmarker, T* thingp)
     840             : {
     841           0 :     MOZ_CRASH("the gc does not support tagged pointers as weak edges");
     842             : }
     843             : 
     844             : template <typename T>
     845             : void
     846          87 : js::GCMarker::noteWeakEdge(T* edge)
     847             : {
     848             :     static_assert(IsBaseOf<Cell, typename mozilla::RemovePointer<T>::Type>::value,
     849             :                   "edge must point to a GC pointer");
     850          87 :     MOZ_ASSERT((*edge)->isTenured());
     851             : 
     852             :     // Note: we really want the *source* Zone here. The edge may start in a
     853             :     // non-gc heap location, however, so we use the fact that cross-zone weak
     854             :     // references are not allowed and use the *target's* zone.
     855          87 :     JS::Zone::WeakEdges &weakRefs = (*edge)->asTenured().zone()->gcWeakRefs();
     856         174 :     AutoEnterOOMUnsafeRegion oomUnsafe;
     857          87 :     if (!weakRefs.append(reinterpret_cast<TenuredCell**>(edge)))
     858           0 :         oomUnsafe.crash("Failed to record a weak edge for sweeping.");
     859          87 : }
     860             : 
     861             : // The simplest traversal calls out to the fully generic traceChildren function
     862             : // to visit the child edges. In the absence of other traversal mechanisms, this
     863             : // function will rapidly grow the stack past its bounds and crash the process.
     864             : // Thus, this generic tracing should only be used in cases where subsequent
     865             : // tracing will not recurse.
     866             : template <typename T>
     867             : void
     868         815 : js::GCMarker::markAndTraceChildren(T* thing)
     869             : {
     870         815 :     if (ThingIsPermanentAtomOrWellKnownSymbol(thing))
     871          38 :         return;
     872         777 :     if (mark(thing))
     873          32 :         thing->traceChildren(this);
     874             : }
     875             : namespace js {
     876         777 : template <> void GCMarker::traverse(BaseShape* thing) { markAndTraceChildren(thing); }
     877          38 : template <> void GCMarker::traverse(JS::Symbol* thing) { markAndTraceChildren(thing); }
     878           0 : template <> void GCMarker::traverse(RegExpShared* thing) { markAndTraceChildren(thing); }
     879             : } // namespace js
     880             : 
     881             : // Strings, LazyScripts, Shapes, and Scopes are extremely common, but have
     882             : // simple patterns of recursion. We traverse trees of these edges immediately,
     883             : // with aggressive, manual inlining, implemented by eagerlyTraceChildren.
     884             : template <typename T>
     885             : void
     886       25174 : js::GCMarker::markAndScan(T* thing)
     887             : {
     888       25174 :     if (ThingIsPermanentAtomOrWellKnownSymbol(thing))
     889        2794 :         return;
     890       22380 :     if (mark(thing))
     891       10041 :         eagerlyMarkChildren(thing);
     892             : }
     893             : namespace js {
     894       17630 : template <> void GCMarker::traverse(JSString* thing) { markAndScan(thing); }
     895          87 : template <> void GCMarker::traverse(LazyScript* thing) { markAndScan(thing); }
     896        6447 : template <> void GCMarker::traverse(Shape* thing) { markAndScan(thing); }
     897        1010 : template <> void GCMarker::traverse(js::Scope* thing) { markAndScan(thing); }
     898             : } // namespace js
     899             : 
     900             : // Object and ObjectGroup are extremely common and can contain arbitrarily
     901             : // nested graphs, so are not trivially inlined. In this case we use a mark
     902             : // stack to control recursion. JitCode shares none of these properties, but is
     903             : // included for historical reasons. JSScript normally cannot recurse, but may
     904             : // be used as a weakmap key and thereby recurse into weakmapped values.
     905             : template <typename T>
     906             : void
     907       19627 : js::GCMarker::markAndPush(T* thing)
     908             : {
     909       19627 :     if (!mark(thing))
     910       15710 :         return;
     911        3917 :     pushTaggedPtr(thing);
     912        3917 :     markImplicitEdges(thing);
     913             : }
     914             : namespace js {
     915       10117 : template <> void GCMarker::traverse(JSObject* thing) { markAndPush(thing); }
     916        4379 : template <> void GCMarker::traverse(ObjectGroup* thing) { markAndPush(thing); }
     917        4266 : template <> void GCMarker::traverse(jit::JitCode* thing) { markAndPush(thing); }
     918         865 : template <> void GCMarker::traverse(JSScript* thing) { markAndPush(thing); }
     919             : } // namespace js
     920             : 
     921             : namespace js {
     922             : template <>
     923             : void
     924           0 : GCMarker::traverse(AccessorShape* thing) {
     925           0 :     MOZ_CRASH("AccessorShape must be marked as a Shape");
     926             : }
     927             : } // namespace js
     928             : 
     929             : template <typename S, typename T>
     930             : static void
     931       16238 : CheckTraversedEdge(S source, T* target)
     932             : {
     933             :     // Atoms and Symbols do not have or mark their internal pointers, respectively.
     934       16238 :     MOZ_ASSERT(!ThingIsPermanentAtomOrWellKnownSymbol(source));
     935             : 
     936             :     // The Zones must match, unless the target is an atom.
     937       16238 :     MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(target),
     938             :                   target->zone()->isAtomsZone() || target->zone() == source->zone());
     939             : 
     940             :     // If we are marking an atom, that atom must be marked in the source zone's
     941             :     // atom bitmap.
     942       16238 :     MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(target) &&
     943             :                   target->zone()->isAtomsZone() && !source->zone()->isAtomsZone(),
     944             :                   target->runtimeFromAnyThread()->gc.atomMarking
     945             :                       .atomIsMarked(source->zone(), reinterpret_cast<TenuredCell*>(target)));
     946             : 
     947             :     // Atoms and Symbols do not have access to a compartment pointer, or we'd need
     948             :     // to adjust the subsequent check to catch that case.
     949       16238 :     MOZ_ASSERT_IF(ThingIsPermanentAtomOrWellKnownSymbol(target), !target->maybeCompartment());
     950       16238 :     MOZ_ASSERT_IF(target->zoneFromAnyThread()->isAtomsZone(), !target->maybeCompartment());
     951             :     // If we have access to a compartment pointer for both things, they must match.
     952       16238 :     MOZ_ASSERT_IF(source->maybeCompartment() && target->maybeCompartment(),
     953             :                   source->maybeCompartment() == target->maybeCompartment());
     954       16238 : }
     955             : 
     956             : template <typename S, typename T>
     957             : void
     958       11588 : js::GCMarker::traverseEdge(S source, T* target)
     959             : {
     960       11588 :     CheckTraversedEdge(source, target);
     961       11588 :     traverse(target);
     962       11588 : }
     963             : 
     964             : template <typename V, typename S> struct TraverseEdgeFunctor : public VoidDefaultAdaptor<V> {
     965        4804 :     template <typename T> void operator()(T t, GCMarker* gcmarker, S s) {
     966        4804 :         return gcmarker->traverseEdge(s, t);
     967             :     }
     968             : };
     969             : 
     970             : template <typename S, typename T>
     971             : void
     972        5070 : js::GCMarker::traverseEdge(S source, const T& thing)
     973             : {
     974        5070 :     DispatchTyped(TraverseEdgeFunctor<T, S>(), thing, this, source);
     975        5070 : }
     976             : 
     977             : template <typename T>
     978             : bool
     979       52858 : js::GCMarker::mark(T* thing)
     980             : {
     981       52858 :     AssertShouldMarkInZone(thing);
     982       52858 :     MOZ_ASSERT(!IsInsideNursery(gc::TenuredCell::fromPointer(thing)));
     983             :     return gc::ParticipatesInCC<T>::value
     984       52858 :            ? gc::TenuredCell::fromPointer(thing)->markIfUnmarked(markColor())
     985       52858 :            : gc::TenuredCell::fromPointer(thing)->markIfUnmarked(gc::MarkColor::Black);
     986             : }
     987             : 
     988             : 
     989             : /*** Inline, Eager GC Marking *********************************************************************/
     990             : 
     991             : // Each of the eager, inline marking paths is directly preceeded by the
     992             : // out-of-line, generic tracing code for comparison. Both paths must end up
     993             : // traversing equivalent subgraphs.
     994             : 
     995             : void
     996           0 : LazyScript::traceChildren(JSTracer* trc)
     997             : {
     998           0 :     if (script_)
     999           0 :         TraceWeakEdge(trc, &script_, "script");
    1000             : 
    1001           0 :     if (function_)
    1002           0 :         TraceEdge(trc, &function_, "function");
    1003             : 
    1004           0 :     if (sourceObject_)
    1005           0 :         TraceEdge(trc, &sourceObject_, "sourceObject");
    1006             : 
    1007           0 :     if (enclosingScope_)
    1008           0 :         TraceEdge(trc, &enclosingScope_, "enclosingScope");
    1009             : 
    1010             :     // We rely on the fact that atoms are always tenured.
    1011           0 :     JSAtom** closedOverBindings = this->closedOverBindings();
    1012           0 :     for (auto i : IntegerRange(numClosedOverBindings())) {
    1013           0 :         if (closedOverBindings[i])
    1014           0 :             TraceManuallyBarrieredEdge(trc, &closedOverBindings[i], "closedOverBinding");
    1015             :     }
    1016             : 
    1017           0 :     GCPtrFunction* innerFunctions = this->innerFunctions();
    1018           0 :     for (auto i : IntegerRange(numInnerFunctions()))
    1019           0 :         TraceEdge(trc, &innerFunctions[i], "lazyScriptInnerFunction");
    1020           0 : }
    1021             : inline void
    1022          87 : js::GCMarker::eagerlyMarkChildren(LazyScript *thing)
    1023             : {
    1024          87 :     if (thing->script_)
    1025          87 :         noteWeakEdge(thing->script_.unsafeUnbarrieredForTracing());
    1026             : 
    1027          87 :     if (thing->function_)
    1028          87 :         traverseEdge(thing, static_cast<JSObject*>(thing->function_));
    1029             : 
    1030          87 :     if (thing->sourceObject_)
    1031          87 :         traverseEdge(thing, static_cast<JSObject*>(thing->sourceObject_));
    1032             : 
    1033          87 :     if (thing->enclosingScope_)
    1034          87 :         traverseEdge(thing, static_cast<Scope*>(thing->enclosingScope_));
    1035             : 
    1036             :     // We rely on the fact that atoms are always tenured.
    1037          87 :     JSAtom** closedOverBindings = thing->closedOverBindings();
    1038         180 :     for (auto i : IntegerRange(thing->numClosedOverBindings())) {
    1039          93 :         if (closedOverBindings[i])
    1040           0 :             traverseEdge(thing, static_cast<JSString*>(closedOverBindings[i]));
    1041             :     }
    1042             : 
    1043          87 :     GCPtrFunction* innerFunctions = thing->innerFunctions();
    1044          87 :     for (auto i : IntegerRange(thing->numInnerFunctions()))
    1045           0 :         traverseEdge(thing, static_cast<JSObject*>(innerFunctions[i]));
    1046          87 : }
    1047             : 
    1048             : void
    1049           0 : Shape::traceChildren(JSTracer* trc)
    1050             : {
    1051           0 :     TraceEdge(trc, &base_, "base");
    1052           0 :     TraceEdge(trc, &propidRef(), "propid");
    1053           0 :     if (parent)
    1054           0 :         TraceEdge(trc, &parent, "parent");
    1055             : 
    1056           0 :     if (hasGetterObject())
    1057           0 :         TraceManuallyBarrieredEdge(trc, &asAccessorShape().getterObj, "getter");
    1058           0 :     if (hasSetterObject())
    1059           0 :         TraceManuallyBarrieredEdge(trc, &asAccessorShape().setterObj, "setter");
    1060           0 : }
    1061             : inline void
    1062        1685 : js::GCMarker::eagerlyMarkChildren(Shape* shape)
    1063             : {
    1064        1685 :     MOZ_ASSERT_IF(markColor() == MarkColor::Gray, shape->isMarkedGray());
    1065        1685 :     MOZ_ASSERT_IF(markColor() == MarkColor::Black, shape->isMarkedBlack());
    1066             : 
    1067        4650 :     do {
    1068             :         // Special case: if a base shape has a shape table then all its pointers
    1069             :         // must point to this shape or an anscestor.  Since these pointers will
    1070             :         // be traced by this loop they do not need to be traced here as well.
    1071        4650 :         BaseShape* base = shape->base();
    1072        4650 :         CheckTraversedEdge(shape, base);
    1073        4650 :         if (mark(base)) {
    1074         377 :             MOZ_ASSERT(base->canSkipMarkingShapeTable(shape));
    1075         377 :             base->traceChildrenSkipShapeTable(this);
    1076             :         }
    1077             : 
    1078        4650 :         traverseEdge(shape, shape->propidRef().get());
    1079             : 
    1080             :         // When triggered between slices on belhalf of a barrier, these
    1081             :         // objects may reside in the nursery, so require an extra check.
    1082             :         // FIXME: Bug 1157967 - remove the isTenured checks.
    1083        4650 :         if (shape->hasGetterObject() && shape->getterObject()->isTenured())
    1084         448 :             traverseEdge(shape, shape->getterObject());
    1085        4650 :         if (shape->hasSetterObject() && shape->setterObject()->isTenured())
    1086         198 :             traverseEdge(shape, shape->setterObject());
    1087             : 
    1088        4650 :         shape = shape->previous();
    1089        4650 :     } while (shape && mark(shape));
    1090        1685 : }
    1091             : 
    1092             : void
    1093           0 : JSString::traceChildren(JSTracer* trc)
    1094             : {
    1095           0 :     if (hasBase())
    1096           0 :         traceBase(trc);
    1097           0 :     else if (isRope())
    1098           0 :         asRope().traceChildren(trc);
    1099           0 : }
    1100             : inline void
    1101        7790 : GCMarker::eagerlyMarkChildren(JSString* str)
    1102             : {
    1103        7790 :     if (str->isLinear())
    1104        7790 :         eagerlyMarkChildren(&str->asLinear());
    1105             :     else
    1106           0 :         eagerlyMarkChildren(&str->asRope());
    1107        7790 : }
    1108             : 
    1109             : void
    1110           0 : JSString::traceBase(JSTracer* trc)
    1111             : {
    1112           0 :     MOZ_ASSERT(hasBase());
    1113           0 :     TraceManuallyBarrieredEdge(trc, &d.s.u3.base, "base");
    1114           0 : }
    1115             : inline void
    1116        7790 : js::GCMarker::eagerlyMarkChildren(JSLinearString* linearStr)
    1117             : {
    1118        7790 :     AssertShouldMarkInZone(linearStr);
    1119        7790 :     MOZ_ASSERT(linearStr->isMarkedAny());
    1120        7790 :     MOZ_ASSERT(linearStr->JSString::isLinear());
    1121             : 
    1122             :     // Use iterative marking to avoid blowing out the stack.
    1123        7790 :     while (linearStr->hasBase()) {
    1124           0 :         linearStr = linearStr->base();
    1125           0 :         MOZ_ASSERT(linearStr->JSString::isLinear());
    1126           0 :         if (linearStr->isPermanentAtom())
    1127           0 :             break;
    1128           0 :         AssertShouldMarkInZone(linearStr);
    1129           0 :         if (!mark(static_cast<JSString*>(linearStr)))
    1130           0 :             break;
    1131             :     }
    1132        7790 : }
    1133             : 
    1134             : void
    1135           0 : JSRope::traceChildren(JSTracer* trc) {
    1136           0 :     js::TraceManuallyBarrieredEdge(trc, &d.s.u2.left, "left child");
    1137           0 :     js::TraceManuallyBarrieredEdge(trc, &d.s.u3.right, "right child");
    1138           0 : }
    1139             : inline void
    1140           0 : js::GCMarker::eagerlyMarkChildren(JSRope* rope)
    1141             : {
    1142             :     // This function tries to scan the whole rope tree using the marking stack
    1143             :     // as temporary storage. If that becomes full, the unscanned ropes are
    1144             :     // added to the delayed marking list. When the function returns, the
    1145             :     // marking stack is at the same depth as it was on entry. This way we avoid
    1146             :     // using tags when pushing ropes to the stack as ropes never leak to other
    1147             :     // users of the stack. This also assumes that a rope can only point to
    1148             :     // other ropes or linear strings, it cannot refer to GC things of other
    1149             :     // types.
    1150           0 :     size_t savedPos = stack.position();
    1151           0 :     JS_DIAGNOSTICS_ASSERT(rope->getTraceKind() == JS::TraceKind::String);
    1152             : #ifdef JS_DEBUG
    1153             :     static const size_t DEEP_ROPE_THRESHOLD = 100000;
    1154             :     static const size_t ROPE_CYCLE_HISTORY = 100;
    1155           0 :     DebugOnly<size_t> ropeDepth = 0;
    1156             :     JSRope* history[ROPE_CYCLE_HISTORY];
    1157             : #endif
    1158             :     while (true) {
    1159             : #ifdef JS_DEBUG
    1160           0 :         if (++ropeDepth >= DEEP_ROPE_THRESHOLD) {
    1161             :             // Bug 1011786 comment 294 - detect cyclic ropes. There are some
    1162             :             // legitimate deep ropes, at least in tests. So if we hit a deep
    1163             :             // rope, start recording the nodes we visit and check whether we
    1164             :             // repeat. But do it on a finite window size W so that we're not
    1165             :             // scanning the full history for every node. And only check every
    1166             :             // Wth push, to add only constant overhead per node. This will only
    1167             :             // catch cycles of size up to W (but it seems most likely that any
    1168             :             // cycles will be size 1 or maybe 2.)
    1169           0 :             if ((ropeDepth > DEEP_ROPE_THRESHOLD + ROPE_CYCLE_HISTORY) &&
    1170           0 :                 (ropeDepth % ROPE_CYCLE_HISTORY) == 0)
    1171             :             {
    1172           0 :                 for (size_t i = 0; i < ROPE_CYCLE_HISTORY; i++)
    1173           0 :                     MOZ_ASSERT(history[i] != rope, "cycle detected in rope");
    1174             :             }
    1175           0 :             history[ropeDepth % ROPE_CYCLE_HISTORY] = rope;
    1176             :         }
    1177             : #endif
    1178             : 
    1179           0 :         JS_DIAGNOSTICS_ASSERT(rope->getTraceKind() == JS::TraceKind::String);
    1180           0 :         JS_DIAGNOSTICS_ASSERT(rope->JSString::isRope());
    1181           0 :         AssertShouldMarkInZone(rope);
    1182           0 :         MOZ_ASSERT(rope->isMarkedAny());
    1183           0 :         JSRope* next = nullptr;
    1184             : 
    1185           0 :         JSString* right = rope->rightChild();
    1186           0 :         if (!right->isPermanentAtom() &&
    1187           0 :             mark(right))
    1188             :         {
    1189           0 :             if (right->isLinear())
    1190           0 :                 eagerlyMarkChildren(&right->asLinear());
    1191             :             else
    1192           0 :                 next = &right->asRope();
    1193             :         }
    1194             : 
    1195           0 :         JSString* left = rope->leftChild();
    1196           0 :         if (!left->isPermanentAtom() &&
    1197           0 :             mark(left))
    1198             :         {
    1199           0 :             if (left->isLinear()) {
    1200           0 :                 eagerlyMarkChildren(&left->asLinear());
    1201             :             } else {
    1202             :                 // When both children are ropes, set aside the right one to
    1203             :                 // scan it later.
    1204           0 :                 if (next && !stack.pushTempRope(next))
    1205           0 :                     delayMarkingChildren(next);
    1206           0 :                 next = &left->asRope();
    1207             :             }
    1208             :         }
    1209           0 :         if (next) {
    1210           0 :             rope = next;
    1211           0 :         } else if (savedPos != stack.position()) {
    1212           0 :             MOZ_ASSERT(savedPos < stack.position());
    1213           0 :             rope = stack.popPtr().asTempRope();
    1214             :         } else {
    1215           0 :             break;
    1216             :         }
    1217           0 :     }
    1218           0 :     MOZ_ASSERT(savedPos == stack.position());
    1219           0 : }
    1220             : 
    1221             : static inline void
    1222           0 : TraceBindingNames(JSTracer* trc, BindingName* names, uint32_t length)
    1223             : {
    1224           0 :     for (uint32_t i = 0; i < length; i++) {
    1225           0 :         JSAtom* name = names[i].name();
    1226           0 :         MOZ_ASSERT(name);
    1227           0 :         TraceManuallyBarrieredEdge(trc, &name, "scope name");
    1228             :     }
    1229           0 : };
    1230             : static inline void
    1231           0 : TraceNullableBindingNames(JSTracer* trc, BindingName* names, uint32_t length)
    1232             : {
    1233           0 :     for (uint32_t i = 0; i < length; i++) {
    1234           0 :         if (JSAtom* name = names[i].name())
    1235           0 :             TraceManuallyBarrieredEdge(trc, &name, "scope name");
    1236             :     }
    1237           0 : };
    1238             : void
    1239           0 : BindingName::trace(JSTracer* trc)
    1240             : {
    1241           0 :     if (JSAtom* atom = name())
    1242           0 :         TraceManuallyBarrieredEdge(trc, &atom, "binding name");
    1243           0 : }
    1244             : void
    1245           0 : BindingIter::trace(JSTracer* trc)
    1246             : {
    1247           0 :     TraceNullableBindingNames(trc, names_, length_);
    1248           0 : }
    1249             : void
    1250           0 : LexicalScope::Data::trace(JSTracer* trc)
    1251             : {
    1252           0 :     TraceBindingNames(trc, names, length);
    1253           0 : }
    1254             : void
    1255           0 : FunctionScope::Data::trace(JSTracer* trc)
    1256             : {
    1257           0 :     TraceNullableEdge(trc, &canonicalFunction, "scope canonical function");
    1258           0 :     TraceNullableBindingNames(trc, names, length);
    1259           0 : }
    1260             : void
    1261           0 : VarScope::Data::trace(JSTracer* trc)
    1262             : {
    1263           0 :     TraceBindingNames(trc, names, length);
    1264           0 : }
    1265             : void
    1266           0 : GlobalScope::Data::trace(JSTracer* trc)
    1267             : {
    1268           0 :     TraceBindingNames(trc, names, length);
    1269           0 : }
    1270             : void
    1271           0 : EvalScope::Data::trace(JSTracer* trc)
    1272             : {
    1273           0 :     TraceBindingNames(trc, names, length);
    1274           0 : }
    1275             : void
    1276           0 : ModuleScope::Data::trace(JSTracer* trc)
    1277             : {
    1278           0 :     TraceNullableEdge(trc, &module, "scope module");
    1279           0 :     TraceBindingNames(trc, names, length);
    1280           0 : }
    1281             : void
    1282           0 : WasmFunctionScope::Data::trace(JSTracer* trc)
    1283             : {
    1284           0 :     TraceNullableEdge(trc, &instance, "wasm function");
    1285           0 :     TraceBindingNames(trc, names, length);
    1286           0 : }
    1287             : void
    1288           0 : Scope::traceChildren(JSTracer* trc)
    1289             : {
    1290           0 :     TraceNullableEdge(trc, &enclosing_, "scope enclosing");
    1291           0 :     TraceNullableEdge(trc, &environmentShape_, "scope env shape");
    1292           0 :     switch (kind_) {
    1293             :       case ScopeKind::Function:
    1294           0 :         reinterpret_cast<FunctionScope::Data*>(data_)->trace(trc);
    1295           0 :         break;
    1296             :       case ScopeKind::FunctionBodyVar:
    1297             :       case ScopeKind::ParameterExpressionVar:
    1298           0 :         reinterpret_cast<VarScope::Data*>(data_)->trace(trc);
    1299           0 :         break;
    1300             :       case ScopeKind::Lexical:
    1301             :       case ScopeKind::SimpleCatch:
    1302             :       case ScopeKind::Catch:
    1303             :       case ScopeKind::NamedLambda:
    1304             :       case ScopeKind::StrictNamedLambda:
    1305           0 :         reinterpret_cast<LexicalScope::Data*>(data_)->trace(trc);
    1306           0 :         break;
    1307             :       case ScopeKind::Global:
    1308             :       case ScopeKind::NonSyntactic:
    1309           0 :         reinterpret_cast<GlobalScope::Data*>(data_)->trace(trc);
    1310           0 :         break;
    1311             :       case ScopeKind::Eval:
    1312             :       case ScopeKind::StrictEval:
    1313           0 :         reinterpret_cast<EvalScope::Data*>(data_)->trace(trc);
    1314           0 :         break;
    1315             :       case ScopeKind::Module:
    1316           0 :         reinterpret_cast<ModuleScope::Data*>(data_)->trace(trc);
    1317           0 :         break;
    1318             :       case ScopeKind::With:
    1319           0 :         break;
    1320             :       case ScopeKind::WasmFunction:
    1321           0 :         reinterpret_cast<WasmFunctionScope::Data*>(data_)->trace(trc);
    1322           0 :         break;
    1323             :     }
    1324           0 : }
    1325             : inline void
    1326         479 : js::GCMarker::eagerlyMarkChildren(Scope* scope)
    1327             : {
    1328         479 :     if (scope->enclosing_)
    1329         459 :         traverseEdge(scope, static_cast<Scope*>(scope->enclosing_));
    1330         479 :     if (scope->environmentShape_)
    1331         114 :         traverseEdge(scope, static_cast<Shape*>(scope->environmentShape_));
    1332         479 :     BindingName* names = nullptr;
    1333         479 :     uint32_t length = 0;
    1334         479 :     switch (scope->kind_) {
    1335             :       case ScopeKind::Function: {
    1336         294 :         FunctionScope::Data* data = reinterpret_cast<FunctionScope::Data*>(scope->data_);
    1337         294 :         traverseEdge(scope, static_cast<JSObject*>(data->canonicalFunction));
    1338         294 :         names = data->names;
    1339         294 :         length = data->length;
    1340         294 :         break;
    1341             :       }
    1342             : 
    1343             :       case ScopeKind::FunctionBodyVar:
    1344             :       case ScopeKind::ParameterExpressionVar: {
    1345           5 :         VarScope::Data* data = reinterpret_cast<VarScope::Data*>(scope->data_);
    1346           5 :         names = data->names;
    1347           5 :         length = data->length;
    1348           5 :         break;
    1349             :       }
    1350             : 
    1351             :       case ScopeKind::Lexical:
    1352             :       case ScopeKind::SimpleCatch:
    1353             :       case ScopeKind::Catch:
    1354             :       case ScopeKind::NamedLambda:
    1355             :       case ScopeKind::StrictNamedLambda: {
    1356         160 :         LexicalScope::Data* data = reinterpret_cast<LexicalScope::Data*>(scope->data_);
    1357         160 :         names = data->names;
    1358         160 :         length = data->length;
    1359         160 :         break;
    1360             :       }
    1361             : 
    1362             :       case ScopeKind::Global:
    1363             :       case ScopeKind::NonSyntactic: {
    1364          20 :         GlobalScope::Data* data = reinterpret_cast<GlobalScope::Data*>(scope->data_);
    1365          20 :         names = data->names;
    1366          20 :         length = data->length;
    1367          20 :         break;
    1368             :       }
    1369             : 
    1370             :       case ScopeKind::Eval:
    1371             :       case ScopeKind::StrictEval: {
    1372           0 :         EvalScope::Data* data = reinterpret_cast<EvalScope::Data*>(scope->data_);
    1373           0 :         names = data->names;
    1374           0 :         length = data->length;
    1375           0 :         break;
    1376             :       }
    1377             : 
    1378             :       case ScopeKind::Module: {
    1379           0 :         ModuleScope::Data* data = reinterpret_cast<ModuleScope::Data*>(scope->data_);
    1380           0 :         traverseEdge(scope, static_cast<JSObject*>(data->module));
    1381           0 :         names = data->names;
    1382           0 :         length = data->length;
    1383           0 :         break;
    1384             :       }
    1385             : 
    1386             :       case ScopeKind::With:
    1387           0 :         break;
    1388             : 
    1389             :       case ScopeKind::WasmFunction: {
    1390           0 :         WasmFunctionScope::Data* data = reinterpret_cast<WasmFunctionScope::Data*>(scope->data_);
    1391           0 :         traverseEdge(scope, static_cast<JSObject*>(data->instance));
    1392           0 :         names = data->names;
    1393           0 :         length = data->length;
    1394           0 :         break;
    1395             :       }
    1396             :     }
    1397         479 :     if (scope->kind_ == ScopeKind::Function) {
    1398         761 :         for (uint32_t i = 0; i < length; i++) {
    1399         467 :             if (JSAtom* name = names[i].name())
    1400         461 :                 traverseEdge(scope, static_cast<JSString*>(name));
    1401             :         }
    1402             :     } else {
    1403         534 :         for (uint32_t i = 0; i < length; i++)
    1404         349 :             traverseEdge(scope, static_cast<JSString*>(names[i].name()));
    1405             :     }
    1406         479 : }
    1407             : 
    1408             : void
    1409           0 : js::ObjectGroup::traceChildren(JSTracer* trc)
    1410             : {
    1411           0 :     unsigned count = getPropertyCount();
    1412           0 :     for (unsigned i = 0; i < count; i++) {
    1413           0 :         if (ObjectGroup::Property* prop = getProperty(i))
    1414           0 :             TraceEdge(trc, &prop->id, "group_property");
    1415             :     }
    1416             : 
    1417           0 :     if (proto().isObject())
    1418           0 :         TraceEdge(trc, &proto(), "group_proto");
    1419             : 
    1420           0 :     if (trc->isMarkingTracer())
    1421           0 :         compartment()->mark();
    1422             : 
    1423           0 :     if (JSObject* global = compartment()->unsafeUnbarrieredMaybeGlobal())
    1424           0 :         TraceManuallyBarrieredEdge(trc, &global, "group_global");
    1425             : 
    1426             : 
    1427           0 :     if (newScript())
    1428           0 :         newScript()->trace(trc);
    1429             : 
    1430           0 :     if (maybePreliminaryObjects())
    1431           0 :         maybePreliminaryObjects()->trace(trc);
    1432             : 
    1433           0 :     if (maybeUnboxedLayout())
    1434           0 :         unboxedLayout().trace(trc);
    1435             : 
    1436           0 :     if (ObjectGroup* unboxedGroup = maybeOriginalUnboxedGroup()) {
    1437           0 :         TraceManuallyBarrieredEdge(trc, &unboxedGroup, "group_original_unboxed_group");
    1438           0 :         setOriginalUnboxedGroup(unboxedGroup);
    1439             :     }
    1440             : 
    1441           0 :     if (JSObject* descr = maybeTypeDescr()) {
    1442           0 :         TraceManuallyBarrieredEdge(trc, &descr, "group_type_descr");
    1443           0 :         setTypeDescr(&descr->as<TypeDescr>());
    1444             :     }
    1445             : 
    1446           0 :     if (JSObject* fun = maybeInterpretedFunction()) {
    1447           0 :         TraceManuallyBarrieredEdge(trc, &fun, "group_function");
    1448           0 :         setInterpretedFunction(&fun->as<JSFunction>());
    1449             :     }
    1450           0 : }
    1451             : void
    1452         465 : js::GCMarker::lazilyMarkChildren(ObjectGroup* group)
    1453             : {
    1454         465 :     unsigned count = group->getPropertyCount();
    1455        1429 :     for (unsigned i = 0; i < count; i++) {
    1456         964 :         if (ObjectGroup::Property* prop = group->getProperty(i))
    1457         419 :             traverseEdge(group, prop->id.get());
    1458             :     }
    1459             : 
    1460         465 :     if (group->proto().isObject())
    1461         427 :         traverseEdge(group, group->proto().toObject());
    1462             : 
    1463         465 :     group->compartment()->mark();
    1464             : 
    1465         465 :     if (GlobalObject* global = group->compartment()->unsafeUnbarrieredMaybeGlobal())
    1466         465 :         traverseEdge(group, static_cast<JSObject*>(global));
    1467             : 
    1468         465 :     if (group->newScript())
    1469           2 :         group->newScript()->trace(this);
    1470             : 
    1471         465 :     if (group->maybePreliminaryObjects())
    1472          40 :         group->maybePreliminaryObjects()->trace(this);
    1473             : 
    1474         465 :     if (group->maybeUnboxedLayout())
    1475           0 :         group->unboxedLayout().trace(this);
    1476             : 
    1477         465 :     if (ObjectGroup* unboxedGroup = group->maybeOriginalUnboxedGroup())
    1478           0 :         traverseEdge(group, unboxedGroup);
    1479             : 
    1480         465 :     if (TypeDescr* descr = group->maybeTypeDescr())
    1481           0 :         traverseEdge(group, static_cast<JSObject*>(descr));
    1482             : 
    1483         465 :     if (JSFunction* fun = group->maybeInterpretedFunction())
    1484         288 :         traverseEdge(group, static_cast<JSObject*>(fun));
    1485         465 : }
    1486             : 
    1487             : struct TraverseObjectFunctor
    1488             : {
    1489             :     template <typename T>
    1490           0 :     void operator()(T* thing, GCMarker* gcmarker, JSObject* src) {
    1491           0 :         gcmarker->traverseEdge(src, *thing);
    1492           0 :     }
    1493             : };
    1494             : 
    1495             : // Call the trace hook set on the object, if present. If further tracing of
    1496             : // NativeObject fields is required, this will return the native object.
    1497             : enum class CheckGeneration { DoChecks, NoChecks};
    1498             : template <typename Functor, typename... Args>
    1499             : static inline NativeObject*
    1500       24450 : CallTraceHook(Functor f, JSTracer* trc, JSObject* obj, CheckGeneration check, Args&&... args)
    1501             : {
    1502       24450 :     const Class* clasp = obj->getClass();
    1503       24450 :     MOZ_ASSERT(clasp);
    1504       24450 :     MOZ_ASSERT(obj->isNative() == clasp->isNative());
    1505             : 
    1506       24450 :     if (!clasp->hasTrace())
    1507        9126 :         return &obj->as<NativeObject>();
    1508             : 
    1509       15324 :     if (clasp->isTrace(InlineTypedObject::obj_trace)) {
    1510           0 :         Shape** pshape = obj->as<InlineTypedObject>().addressOfShapeFromGC();
    1511           0 :         f(pshape, mozilla::Forward<Args>(args)...);
    1512             : 
    1513           0 :         InlineTypedObject& tobj = obj->as<InlineTypedObject>();
    1514           0 :         if (tobj.typeDescr().hasTraceList()) {
    1515           0 :             VisitTraceList(f, tobj.typeDescr().traceList(), tobj.inlineTypedMemForGC(),
    1516             :                            mozilla::Forward<Args>(args)...);
    1517             :         }
    1518             : 
    1519           0 :         return nullptr;
    1520             :     }
    1521             : 
    1522       15324 :     if (clasp == &UnboxedPlainObject::class_) {
    1523         158 :         JSObject** pexpando = obj->as<UnboxedPlainObject>().addressOfExpando();
    1524         158 :         if (*pexpando)
    1525           0 :             f(pexpando, mozilla::Forward<Args>(args)...);
    1526             : 
    1527         158 :         UnboxedPlainObject& unboxed = obj->as<UnboxedPlainObject>();
    1528             :         const UnboxedLayout& layout = check == CheckGeneration::DoChecks
    1529             :                                       ? unboxed.layout()
    1530         158 :                                       : unboxed.layoutDontCheckGeneration();
    1531         158 :         if (layout.traceList()) {
    1532         158 :             VisitTraceList(f, layout.traceList(), unboxed.data(),
    1533             :                            mozilla::Forward<Args>(args)...);
    1534             :         }
    1535             : 
    1536         158 :         return nullptr;
    1537             :     }
    1538             : 
    1539       15166 :     clasp->doTrace(trc, obj);
    1540             : 
    1541       15166 :     if (!clasp->isNative())
    1542        2112 :         return nullptr;
    1543       13054 :     return &obj->as<NativeObject>();
    1544             : }
    1545             : 
    1546             : template <typename F, typename... Args>
    1547             : static void
    1548         166 : VisitTraceList(F f, const int32_t* traceList, uint8_t* memory, Args&&... args)
    1549             : {
    1550         174 :     while (*traceList != -1) {
    1551           8 :         f(reinterpret_cast<JSString**>(memory + *traceList), mozilla::Forward<Args>(args)...);
    1552           8 :         traceList++;
    1553             :     }
    1554         158 :     traceList++;
    1555         568 :     while (*traceList != -1) {
    1556         205 :         JSObject** objp = reinterpret_cast<JSObject**>(memory + *traceList);
    1557         205 :         if (*objp)
    1558         203 :             f(objp, mozilla::Forward<Args>(args)...);
    1559         205 :         traceList++;
    1560             :     }
    1561         158 :     traceList++;
    1562         158 :     while (*traceList != -1) {
    1563           0 :         f(reinterpret_cast<Value*>(memory + *traceList), mozilla::Forward<Args>(args)...);
    1564           0 :         traceList++;
    1565             :     }
    1566         158 : }
    1567             : 
    1568             : 
    1569             : /*** Mark-stack Marking ***************************************************************************/
    1570             : 
    1571             : bool
    1572           3 : GCMarker::drainMarkStack(SliceBudget& budget)
    1573             : {
    1574             : #ifdef DEBUG
    1575           3 :     MOZ_ASSERT(!strictCompartmentChecking);
    1576           3 :     strictCompartmentChecking = true;
    1577           9 :     auto acc = mozilla::MakeScopeExit([&] {strictCompartmentChecking = false;});
    1578             : #endif
    1579             : 
    1580           3 :     if (budget.isOverBudget())
    1581           0 :         return false;
    1582             : 
    1583           0 :     for (;;) {
    1584        4693 :         while (!stack.isEmpty()) {
    1585        2348 :             processMarkStackTop(budget);
    1586        2348 :             if (budget.isOverBudget()) {
    1587           3 :                 saveValueRanges();
    1588           3 :                 return false;
    1589             :             }
    1590             :         }
    1591             : 
    1592           0 :         if (!hasDelayedChildren())
    1593           0 :             break;
    1594             : 
    1595             :         /*
    1596             :          * Mark children of things that caused too deep recursion during the
    1597             :          * above tracing. Don't do this until we're done with everything
    1598             :          * else.
    1599             :          */
    1600           0 :         if (!markDelayedChildren(budget)) {
    1601           0 :             saveValueRanges();
    1602           0 :             return false;
    1603             :         }
    1604             :     }
    1605             : 
    1606           0 :     return true;
    1607             : }
    1608             : 
    1609             : inline static bool
    1610        1131 : ObjectDenseElementsMayBeMarkable(NativeObject* nobj)
    1611             : {
    1612             :     /*
    1613             :      * For arrays that are large enough it's worth checking the type information
    1614             :      * to see if the object's elements contain any GC pointers.  If not, we
    1615             :      * don't need to trace them.
    1616             :      */
    1617        1131 :     const unsigned MinElementsLength = 32;
    1618        1131 :     if (nobj->getDenseInitializedLength() < MinElementsLength || nobj->isSingleton())
    1619        1124 :         return true;
    1620             : 
    1621           7 :     ObjectGroup* group = nobj->group();
    1622           7 :     if (group->needsSweep() || group->unknownProperties())
    1623           2 :         return true;
    1624             : 
    1625           5 :     HeapTypeSet* typeSet = group->maybeGetProperty(JSID_VOID);
    1626           5 :     if (!typeSet)
    1627           0 :         return true;
    1628             : 
    1629             :     static const uint32_t flagMask =
    1630             :         TYPE_FLAG_STRING | TYPE_FLAG_SYMBOL | TYPE_FLAG_LAZYARGS | TYPE_FLAG_ANYOBJECT;
    1631           5 :     bool mayBeMarkable = typeSet->hasAnyFlag(flagMask) || typeSet->getObjectCount() != 0;
    1632             : 
    1633             : #ifdef DEBUG
    1634           5 :     if (!mayBeMarkable) {
    1635           0 :         const Value* elements = nobj->getDenseElementsAllowCopyOnWrite();
    1636           0 :         for (unsigned i = 0; i < nobj->getDenseInitializedLength(); i++)
    1637           0 :             MOZ_ASSERT(!elements[i].isGCThing());
    1638             :     }
    1639             : #endif
    1640             : 
    1641           5 :     return mayBeMarkable;
    1642             : }
    1643             : 
    1644             : inline void
    1645        2348 : GCMarker::processMarkStackTop(SliceBudget& budget)
    1646             : {
    1647             :     /*
    1648             :      * The function uses explicit goto and implements the scanning of the
    1649             :      * object directly. It allows to eliminate the tail recursion and
    1650             :      * significantly improve the marking performance, see bug 641025.
    1651             :      */
    1652             :     HeapSlot* vp;
    1653             :     HeapSlot* end;
    1654             :     JSObject* obj;
    1655             : 
    1656        2348 :     switch (stack.peekTag()) {
    1657             :       case MarkStack::ValueArrayTag: {
    1658         993 :         auto array = stack.popValueArray();
    1659         993 :         obj = array.ptr.asValueArrayObject();
    1660         993 :         vp = array.start;
    1661         993 :         end = array.end;
    1662         993 :         goto scan_value_array;
    1663             :       }
    1664             : 
    1665             :       case MarkStack::ObjectTag: {
    1666         567 :         obj = stack.popPtr().as<JSObject>();
    1667         567 :         AssertShouldMarkInZone(obj);
    1668         567 :         goto scan_obj;
    1669             :       }
    1670             : 
    1671             :       case MarkStack::GroupTag: {
    1672         465 :         auto group = stack.popPtr().as<ObjectGroup>();
    1673         465 :         return lazilyMarkChildren(group);
    1674             :       }
    1675             : 
    1676             :       case MarkStack::JitCodeTag: {
    1677          22 :         auto code = stack.popPtr().as<jit::JitCode>();
    1678          22 :         return code->traceChildren(this);
    1679             :       }
    1680             : 
    1681             :       case MarkStack::ScriptTag: {
    1682         299 :         auto script = stack.popPtr().as<JSScript>();
    1683         299 :         return script->traceChildren(this);
    1684             :       }
    1685             : 
    1686             :       case MarkStack::SavedValueArrayTag: {
    1687           2 :         auto savedArray = stack.popSavedValueArray();
    1688           2 :         JSObject* obj = savedArray.ptr.asSavedValueArrayObject();
    1689           2 :         if (restoreValueArray(savedArray, &vp, &end))
    1690           2 :             pushValueArray(obj, vp, end);
    1691             :         else
    1692           0 :             repush(obj);
    1693           2 :         return;
    1694             :       }
    1695             : 
    1696           0 :       default: MOZ_CRASH("Invalid tag in mark stack");
    1697             :     }
    1698             :     return;
    1699             : 
    1700             :   scan_value_array:
    1701        2472 :     MOZ_ASSERT(vp <= end);
    1702        3642 :     while (vp != end) {
    1703        1509 :         budget.step();
    1704        1509 :         if (budget.isOverBudget()) {
    1705           0 :             pushValueArray(obj, vp, end);
    1706           0 :             return;
    1707             :         }
    1708             : 
    1709        1509 :         const Value& v = *vp++;
    1710        1509 :         if (v.isString()) {
    1711          53 :             traverseEdge(obj, v.toString());
    1712        1456 :         } else if (v.isObject()) {
    1713        1005 :             JSObject* obj2 = &v.toObject();
    1714        1005 :             MOZ_ASSERT(obj->compartment() == obj2->compartment());
    1715        1005 :             if (mark(obj2)) {
    1716             :                 // Save the rest of this value array for later and start scanning obj2's children.
    1717         924 :                 pushValueArray(obj, vp, end);
    1718         924 :                 obj = obj2;
    1719         924 :                 goto scan_obj;
    1720             :             }
    1721         451 :         } else if (v.isSymbol()) {
    1722           0 :             traverseEdge(obj, v.toSymbol());
    1723         451 :         } else if (v.isPrivateGCThing()) {
    1724           1 :             traverseEdge(obj, v.toGCCellPtr());
    1725             :         }
    1726             :     }
    1727        1548 :     return;
    1728             : 
    1729             :   scan_obj:
    1730             :     {
    1731        1491 :         AssertShouldMarkInZone(obj);
    1732             : 
    1733        1491 :         budget.step();
    1734        1491 :         if (budget.isOverBudget()) {
    1735           3 :             repush(obj);
    1736           3 :             return;
    1737             :         }
    1738             : 
    1739        1488 :         markImplicitEdges(obj);
    1740        1488 :         ObjectGroup* group = obj->groupFromGC();
    1741        1488 :         traverseEdge(obj, group);
    1742             : 
    1743        2976 :         NativeObject *nobj = CallTraceHook(TraverseObjectFunctor(), this, obj,
    1744        1488 :                                            CheckGeneration::DoChecks, this, obj);
    1745        1488 :         if (!nobj)
    1746           9 :             return;
    1747             : 
    1748        1479 :         Shape* shape = nobj->lastProperty();
    1749        1479 :         traverseEdge(obj, shape);
    1750             : 
    1751        1479 :         unsigned nslots = nobj->slotSpan();
    1752             : 
    1753             :         do {
    1754        1479 :             if (nobj->hasEmptyElements())
    1755        1451 :                 break;
    1756             : 
    1757          28 :             if (nobj->denseElementsAreCopyOnWrite()) {
    1758          18 :                 JSObject* owner = nobj->getElementsHeader()->ownerObject();
    1759          18 :                 if (owner != nobj) {
    1760           0 :                     traverseEdge(obj, owner);
    1761           0 :                     break;
    1762             :                 }
    1763             :             }
    1764             : 
    1765          28 :             if (!ObjectDenseElementsMayBeMarkable(nobj))
    1766           0 :                 break;
    1767             : 
    1768          28 :             vp = nobj->getDenseElementsAllowCopyOnWrite();
    1769          28 :             end = vp + nobj->getDenseInitializedLength();
    1770             : 
    1771          28 :             if (!nslots)
    1772          22 :                 goto scan_value_array;
    1773           6 :             pushValueArray(nobj, vp, end);
    1774             :         } while (false);
    1775             : 
    1776        1457 :         vp = nobj->fixedSlots();
    1777        1457 :         if (nobj->slots_) {
    1778          78 :             unsigned nfixed = nobj->numFixedSlots();
    1779          78 :             if (nslots > nfixed) {
    1780          78 :                 pushValueArray(nobj, vp, vp + nfixed);
    1781          78 :                 vp = nobj->slots_;
    1782          78 :                 end = vp + (nslots - nfixed);
    1783          78 :                 goto scan_value_array;
    1784             :             }
    1785             :         }
    1786        1379 :         MOZ_ASSERT(nslots <= nobj->numFixedSlots());
    1787        1379 :         end = vp + nslots;
    1788        1379 :         goto scan_value_array;
    1789             :     }
    1790             : }
    1791             : 
    1792             : /*
    1793             :  * During incremental GC, we return from drainMarkStack without having processed
    1794             :  * the entire stack. At that point, JS code can run and reallocate slot arrays
    1795             :  * that are stored on the stack. To prevent this from happening, we replace all
    1796             :  * ValueArrayTag stack items with SavedValueArrayTag. In the latter, slots
    1797             :  * pointers are replaced with slot indexes, and slot array end pointers are
    1798             :  * replaced with the kind of index (properties vs. elements).
    1799             :  */
    1800             : void
    1801           3 : GCMarker::saveValueRanges()
    1802             : {
    1803           6 :     MarkStackIter iter(stack);
    1804       10381 :     while (!iter.done()) {
    1805        5189 :         auto tag = iter.peekTag();
    1806        5189 :         if (tag == MarkStack::ValueArrayTag) {
    1807          17 :             auto array = iter.peekValueArray();
    1808             : 
    1809          17 :             NativeObject* obj = &array.ptr.asValueArrayObject()->as<NativeObject>();
    1810          17 :             MOZ_ASSERT(obj->isNative());
    1811             : 
    1812             :             uintptr_t index;
    1813             :             HeapSlot::Kind kind;
    1814          17 :             HeapSlot* vp = obj->getDenseElementsAllowCopyOnWrite();
    1815          17 :             if (array.end == vp + obj->getDenseInitializedLength()) {
    1816           0 :                 MOZ_ASSERT(array.start >= vp);
    1817             :                 // Add the number of shifted elements here (and subtract in
    1818             :                 // restoreValueArray) to ensure shift() calls on the array
    1819             :                 // are handled correctly.
    1820           0 :                 index = obj->unshiftedIndex(array.start - vp);
    1821           0 :                 kind = HeapSlot::Element;
    1822             :             } else {
    1823          17 :                 HeapSlot* vp = obj->fixedSlots();
    1824          17 :                 unsigned nfixed = obj->numFixedSlots();
    1825          17 :                 if (array.start == array.end) {
    1826           7 :                     index = obj->slotSpan();
    1827          10 :                 } else if (array.start >= vp && array.start < vp + nfixed) {
    1828           5 :                     MOZ_ASSERT(array.end == vp + Min(nfixed, obj->slotSpan()));
    1829           5 :                     index = array.start - vp;
    1830             :                 } else {
    1831           5 :                     MOZ_ASSERT(array.start >= obj->slots_ &&
    1832             :                                array.end == obj->slots_ + obj->slotSpan() - nfixed);
    1833           5 :                     index = (array.start - obj->slots_) + nfixed;
    1834             :                 }
    1835          17 :                 kind = HeapSlot::Slot;
    1836             :             }
    1837          17 :             iter.saveValueArray(obj, index, kind);
    1838          17 :             iter.nextArray();
    1839        5172 :         } else if (tag == MarkStack::SavedValueArrayTag) {
    1840          19 :             iter.nextArray();
    1841             :         } else {
    1842        5153 :             iter.nextPtr();
    1843             :         }
    1844             :     }
    1845           3 : }
    1846             : 
    1847             : bool
    1848           2 : GCMarker::restoreValueArray(const MarkStack::SavedValueArray& array,
    1849             :                             HeapSlot** vpp, HeapSlot** endp)
    1850             : {
    1851           2 :     JSObject* objArg = array.ptr.asSavedValueArrayObject();
    1852           2 :     if (!objArg->isNative())
    1853           0 :         return false;
    1854           2 :     NativeObject* obj = &objArg->as<NativeObject>();
    1855             : 
    1856           2 :     uintptr_t start = array.index;
    1857           2 :     if (array.kind == HeapSlot::Element) {
    1858           0 :         uint32_t initlen = obj->getDenseInitializedLength();
    1859             : 
    1860             :         // Account for shifted elements.
    1861           0 :         uint32_t numShifted = obj->getElementsHeader()->numShiftedElements();
    1862           0 :         start = (numShifted < start) ? start - numShifted : 0;
    1863             : 
    1864           0 :         HeapSlot* vp = obj->getDenseElementsAllowCopyOnWrite();
    1865           0 :         if (start < initlen) {
    1866           0 :             *vpp = vp + start;
    1867           0 :             *endp = vp + initlen;
    1868             :         } else {
    1869             :             /* The object shrunk, in which case no scanning is needed. */
    1870           0 :             *vpp = *endp = vp;
    1871             :         }
    1872             :     } else {
    1873           2 :         MOZ_ASSERT(array.kind == HeapSlot::Slot);
    1874           2 :         HeapSlot* vp = obj->fixedSlots();
    1875           2 :         unsigned nfixed = obj->numFixedSlots();
    1876           2 :         unsigned nslots = obj->slotSpan();
    1877           2 :         if (start < nslots) {
    1878           1 :             if (start < nfixed) {
    1879           0 :                 *vpp = vp + start;
    1880           0 :                 *endp = vp + Min(nfixed, nslots);
    1881             :             } else {
    1882           1 :                 *vpp = obj->slots_ + start - nfixed;
    1883           1 :                 *endp = obj->slots_ + nslots - nfixed;
    1884             :             }
    1885             :         } else {
    1886             :             /* The object shrunk, in which case no scanning is needed. */
    1887           1 :             *vpp = *endp = vp;
    1888             :         }
    1889             :     }
    1890             : 
    1891           2 :     MOZ_ASSERT(*vpp <= *endp);
    1892           2 :     return true;
    1893             : }
    1894             : 
    1895             : 
    1896             : /*** Mark Stack ***********************************************************************************/
    1897             : 
    1898             : static_assert(sizeof(MarkStack::TaggedPtr) == sizeof(uintptr_t),
    1899             :               "A TaggedPtr should be the same size as a pointer");
    1900             : static_assert(sizeof(MarkStack::ValueArray) == sizeof(MarkStack::SavedValueArray),
    1901             :               "ValueArray and SavedValueArray should be the same size");
    1902             : static_assert((sizeof(MarkStack::ValueArray) % sizeof(uintptr_t)) == 0,
    1903             :               "ValueArray and SavedValueArray should be multiples of the pointer size");
    1904             : 
    1905             : static const size_t ValueArrayWords = sizeof(MarkStack::ValueArray) / sizeof(uintptr_t);
    1906             : 
    1907             : template <typename T>
    1908             : struct MapTypeToMarkStackTag {};
    1909             : template <>
    1910             : struct MapTypeToMarkStackTag<JSObject*> { static const auto value = MarkStack::ObjectTag; };
    1911             : template <>
    1912             : struct MapTypeToMarkStackTag<ObjectGroup*> { static const auto value = MarkStack::GroupTag; };
    1913             : template <>
    1914             : struct MapTypeToMarkStackTag<jit::JitCode*> { static const auto value = MarkStack::JitCodeTag; };
    1915             : template <>
    1916             : struct MapTypeToMarkStackTag<JSScript*> { static const auto value = MarkStack::ScriptTag; };
    1917             : 
    1918             : static inline bool
    1919        6542 : TagIsArrayTag(MarkStack::Tag tag)
    1920             : {
    1921        6542 :     return tag == MarkStack::ValueArrayTag || tag == MarkStack::SavedValueArrayTag;
    1922             : }
    1923             : 
    1924             : static inline void
    1925        2020 : CheckValueArray(const MarkStack::ValueArray& array)
    1926             : {
    1927        2020 :     MOZ_ASSERT(array.ptr.tag() == MarkStack::ValueArrayTag);
    1928        2020 :     MOZ_ASSERT(uintptr_t(array.start) <= uintptr_t(array.end));
    1929        2020 :     MOZ_ASSERT((uintptr_t(array.end) - uintptr_t(array.start)) % sizeof(Value) == 0);
    1930        2020 : }
    1931             : 
    1932             : static inline void
    1933          19 : CheckSavedValueArray(const MarkStack::SavedValueArray& array)
    1934             : {
    1935          19 :     MOZ_ASSERT(array.ptr.tag() == MarkStack::SavedValueArrayTag);
    1936          19 :     MOZ_ASSERT(array.kind == HeapSlot::Slot || array.kind == HeapSlot::Element);
    1937          19 : }
    1938             : 
    1939             : inline
    1940        4947 : MarkStack::TaggedPtr::TaggedPtr(Tag tag, Cell* ptr)
    1941        4947 :   : bits(tag | uintptr_t(ptr))
    1942             : {
    1943        4947 :     MOZ_ASSERT(tag <= LastTag);
    1944        4947 :     MOZ_ASSERT((uintptr_t(ptr) & CellAlignMask) == 0);
    1945        4947 : }
    1946             : 
    1947             : inline MarkStack::Tag
    1948       20558 : MarkStack::TaggedPtr::tag() const
    1949             : {
    1950       20558 :     auto tag = Tag(bits & TagMask);
    1951       20558 :     MOZ_ASSERT(tag <= LastTag);
    1952       20558 :     return tag;
    1953             : }
    1954             : 
    1955             : inline Cell*
    1956        4768 : MarkStack::TaggedPtr::ptr() const
    1957             : {
    1958        4768 :     return reinterpret_cast<Cell*>(bits & ~TagMask);
    1959             : }
    1960             : 
    1961             : template <typename T>
    1962             : inline T*
    1963        1353 : MarkStack::TaggedPtr::as() const
    1964             : {
    1965        1353 :     MOZ_ASSERT(tag() == MapTypeToMarkStackTag<T*>::value);
    1966        1353 :     MOZ_ASSERT(ptr()->asTenured().getTraceKind() == MapTypeToTraceKind<T>::kind);
    1967        1353 :     return static_cast<T*>(ptr());
    1968             : }
    1969             : 
    1970             : inline JSObject*
    1971        1027 : MarkStack::TaggedPtr::asValueArrayObject() const
    1972             : {
    1973        1027 :     MOZ_ASSERT(tag() == ValueArrayTag);
    1974        1027 :     MOZ_ASSERT(ptr()->asTenured().getTraceKind() == JS::TraceKind::Object);
    1975        1027 :     return static_cast<JSObject*>(ptr());
    1976             : }
    1977             : 
    1978             : inline JSObject*
    1979           4 : MarkStack::TaggedPtr::asSavedValueArrayObject() const
    1980             : {
    1981           4 :     MOZ_ASSERT(tag() == SavedValueArrayTag);
    1982           4 :     MOZ_ASSERT(ptr()->asTenured().getTraceKind() == JS::TraceKind::Object);
    1983           4 :     return static_cast<JSObject*>(ptr());
    1984             : }
    1985             : 
    1986             : inline JSRope*
    1987           0 : MarkStack::TaggedPtr::asTempRope() const
    1988             : {
    1989           0 :     MOZ_ASSERT(tag() == TempRopeTag);
    1990           0 :     MOZ_ASSERT(ptr()->asTenured().getTraceKind() == JS::TraceKind::String);
    1991           0 :     return static_cast<JSRope*>(ptr());
    1992             : }
    1993             : 
    1994             : inline
    1995        1010 : MarkStack::ValueArray::ValueArray(JSObject* obj, HeapSlot* startArg, HeapSlot* endArg)
    1996        1010 :   : end(endArg), start(startArg), ptr(ValueArrayTag, obj)
    1997        1010 : {}
    1998             : 
    1999             : inline
    2000          17 : MarkStack::SavedValueArray::SavedValueArray(JSObject* obj, size_t indexArg, HeapSlot::Kind kindArg)
    2001          17 :   : kind(kindArg), index(indexArg), ptr(SavedValueArrayTag, obj)
    2002          17 : {}
    2003             : 
    2004           4 : MarkStack::MarkStack(size_t maxCapacity)
    2005             :   : stack_(nullptr)
    2006             :   , tos_(nullptr)
    2007             :   , end_(nullptr)
    2008             :   , baseCapacity_(0)
    2009             :   , maxCapacity_(maxCapacity)
    2010             : #ifdef DEBUG
    2011           4 :   , iteratorCount_(0)
    2012             : #endif
    2013           4 : {}
    2014             : 
    2015           0 : MarkStack::~MarkStack()
    2016             : {
    2017           0 :     MOZ_ASSERT(iteratorCount_ == 0);
    2018           0 :     js_free(stack_);
    2019           0 : }
    2020             : 
    2021             : bool
    2022           4 : MarkStack::init(JSGCMode gcMode)
    2023             : {
    2024           4 :     setBaseCapacity(gcMode);
    2025             : 
    2026           4 :     MOZ_ASSERT(!stack_);
    2027           4 :     auto newStack = js_pod_malloc<TaggedPtr>(baseCapacity_);
    2028           4 :     if (!newStack)
    2029           0 :         return false;
    2030             : 
    2031           4 :     setStack(newStack, 0, baseCapacity_);
    2032           4 :     return true;
    2033             : }
    2034             : 
    2035             : inline void
    2036           4 : MarkStack::setStack(TaggedPtr* stack, size_t tosIndex, size_t capacity)
    2037             : {
    2038           4 :     MOZ_ASSERT(iteratorCount_ == 0);
    2039           4 :     stack_ = stack;
    2040           4 :     tos_ = stack + tosIndex;
    2041           4 :     end_ = stack + capacity;
    2042           4 : }
    2043             : 
    2044             : void
    2045           8 : MarkStack::setBaseCapacity(JSGCMode mode)
    2046             : {
    2047           8 :     switch (mode) {
    2048             :       case JSGC_MODE_GLOBAL:
    2049             :       case JSGC_MODE_ZONE:
    2050           8 :         baseCapacity_ = NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY;
    2051           8 :         break;
    2052             :       case JSGC_MODE_INCREMENTAL:
    2053           0 :         baseCapacity_ = INCREMENTAL_MARK_STACK_BASE_CAPACITY;
    2054           0 :         break;
    2055             :       default:
    2056           0 :         MOZ_CRASH("bad gc mode");
    2057             :     }
    2058             : 
    2059           8 :     if (baseCapacity_ > maxCapacity_)
    2060           0 :         baseCapacity_ = maxCapacity_;
    2061           8 : }
    2062             : 
    2063             : void
    2064           0 : MarkStack::setMaxCapacity(size_t maxCapacity)
    2065             : {
    2066           0 :     MOZ_ASSERT(maxCapacity != 0);
    2067           0 :     MOZ_ASSERT(isEmpty());
    2068           0 :     maxCapacity_ = maxCapacity;
    2069           0 :     if (baseCapacity_ > maxCapacity_)
    2070           0 :         baseCapacity_ = maxCapacity_;
    2071             : 
    2072           0 :     reset();
    2073           0 : }
    2074             : 
    2075             : inline bool
    2076        3920 : MarkStack::pushTaggedPtr(Tag tag, Cell* ptr)
    2077             : {
    2078        3920 :     if (!ensureSpace(1))
    2079           0 :         return false;
    2080             : 
    2081        3920 :     MOZ_ASSERT(tos_ < end_);
    2082        3920 :     *tos_++ = TaggedPtr(tag, ptr);
    2083        3920 :     return true;
    2084             : }
    2085             : 
    2086             : template <typename T>
    2087             : inline bool
    2088        3920 : MarkStack::push(T* ptr)
    2089             : {
    2090        3920 :     return pushTaggedPtr(MapTypeToMarkStackTag<T*>::value, ptr);
    2091             : }
    2092             : 
    2093             : inline bool
    2094           0 : MarkStack::pushTempRope(JSRope* rope)
    2095             : {
    2096           0 :     return pushTaggedPtr(TempRopeTag, rope);
    2097             : }
    2098             : 
    2099             : inline bool
    2100        1010 : MarkStack::push(JSObject* obj, HeapSlot* start, HeapSlot* end)
    2101             : {
    2102        1010 :     return push(ValueArray(obj, start, end));
    2103             : }
    2104             : 
    2105             : inline bool
    2106        1010 : MarkStack::push(const ValueArray& array)
    2107             : {
    2108        1010 :     CheckValueArray(array);
    2109             : 
    2110        1010 :     if (!ensureSpace(ValueArrayWords))
    2111           0 :         return false;
    2112             : 
    2113        1010 :     *reinterpret_cast<ValueArray*>(tos_.ref()) = array;
    2114        1010 :     tos_ += ValueArrayWords;
    2115        1010 :     MOZ_ASSERT(tos_ <= end_);
    2116        1010 :     MOZ_ASSERT(peekTag() == ValueArrayTag);
    2117        1010 :     return true;
    2118             : }
    2119             : 
    2120             : inline bool
    2121             : MarkStack::push(const SavedValueArray& array)
    2122             : {
    2123             :     CheckSavedValueArray(array);
    2124             : 
    2125             :     if (!ensureSpace(ValueArrayWords))
    2126             :         return false;
    2127             : 
    2128             :     *reinterpret_cast<SavedValueArray*>(tos_.ref()) = array;
    2129             :     tos_ += ValueArrayWords;
    2130             :     MOZ_ASSERT(tos_ <= end_);
    2131             :     MOZ_ASSERT(peekTag() == SavedValueArrayTag);
    2132             :     return true;
    2133             : }
    2134             : 
    2135             : inline const MarkStack::TaggedPtr&
    2136        5706 : MarkStack::peekPtr() const
    2137             : {
    2138        5706 :     MOZ_ASSERT(!isEmpty());
    2139        5706 :     return tos_[-1];
    2140             : }
    2141             : 
    2142             : inline MarkStack::Tag
    2143        5706 : MarkStack::peekTag() const
    2144             : {
    2145        5706 :     return peekPtr().tag();
    2146             : }
    2147             : 
    2148             : inline MarkStack::TaggedPtr
    2149        1353 : MarkStack::popPtr()
    2150             : {
    2151        1353 :     MOZ_ASSERT(!isEmpty());
    2152        1353 :     MOZ_ASSERT(!TagIsArrayTag(peekTag()));
    2153        1353 :     tos_--;
    2154        1353 :     return *tos_;
    2155             : }
    2156             : 
    2157             : inline MarkStack::ValueArray
    2158         993 : MarkStack::popValueArray()
    2159             : {
    2160         993 :     MOZ_ASSERT(peekTag() == ValueArrayTag);
    2161         993 :     MOZ_ASSERT(position() >= ValueArrayWords);
    2162             : 
    2163         993 :     tos_ -= ValueArrayWords;
    2164         993 :     const auto& array = *reinterpret_cast<ValueArray*>(tos_.ref());
    2165         993 :     CheckValueArray(array);
    2166         993 :     return array;
    2167             : }
    2168             : 
    2169             : inline MarkStack::SavedValueArray
    2170           2 : MarkStack::popSavedValueArray()
    2171             : {
    2172           2 :     MOZ_ASSERT(peekTag() == SavedValueArrayTag);
    2173           2 :     MOZ_ASSERT(position() >= ValueArrayWords);
    2174             : 
    2175           2 :     tos_ -= ValueArrayWords;
    2176           2 :     const auto& array = *reinterpret_cast<SavedValueArray*>(tos_.ref());
    2177           2 :     CheckSavedValueArray(array);
    2178           2 :     return array;
    2179             : }
    2180             : 
    2181             : void
    2182           0 : MarkStack::reset()
    2183             : {
    2184           0 :     if (capacity() == baseCapacity_) {
    2185             :         // No size change; keep the current stack.
    2186           0 :         setStack(stack_, 0, baseCapacity_);
    2187           0 :         return;
    2188             :     }
    2189             : 
    2190           0 :     MOZ_ASSERT(baseCapacity_ != 0);
    2191           0 :     auto newStack = js_pod_realloc<TaggedPtr>(stack_, capacity(), baseCapacity_);
    2192           0 :     if (!newStack) {
    2193             :         // If the realloc fails, just keep using the existing stack; it's
    2194             :         // not ideal but better than failing.
    2195           0 :         newStack = stack_;
    2196           0 :         baseCapacity_ = capacity();
    2197             :     }
    2198           0 :     setStack(newStack, 0, baseCapacity_);
    2199             : }
    2200             : 
    2201             : inline bool
    2202        4930 : MarkStack::ensureSpace(size_t count)
    2203             : {
    2204        4930 :     if ((tos_ + count) <= end_)
    2205        4930 :         return true;
    2206             : 
    2207           0 :     return enlarge(count);
    2208             : }
    2209             : 
    2210             : bool
    2211           0 : MarkStack::enlarge(size_t count)
    2212             : {
    2213           0 :     size_t newCapacity = Min(maxCapacity_.ref(), capacity() * 2);
    2214           0 :     if (newCapacity < capacity() + count)
    2215           0 :         return false;
    2216             : 
    2217           0 :     size_t tosIndex = position();
    2218             : 
    2219           0 :     MOZ_ASSERT(newCapacity != 0);
    2220           0 :     auto newStack = js_pod_realloc<TaggedPtr>(stack_, capacity(), newCapacity);
    2221           0 :     if (!newStack)
    2222           0 :         return false;
    2223             : 
    2224           0 :     setStack(newStack, tosIndex, newCapacity);
    2225           0 :     return true;
    2226             : }
    2227             : 
    2228             : void
    2229           4 : MarkStack::setGCMode(JSGCMode gcMode)
    2230             : {
    2231             :     // The mark stack won't be resized until the next call to reset(), but
    2232             :     // that will happen at the end of the next GC.
    2233           4 :     setBaseCapacity(gcMode);
    2234           4 : }
    2235             : 
    2236             : size_t
    2237           0 : MarkStack::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
    2238             : {
    2239           0 :     return mallocSizeOf(stack_);
    2240             : }
    2241             : 
    2242           3 : MarkStackIter::MarkStackIter(const MarkStack& stack)
    2243             :   : stack_(stack),
    2244           3 :     pos_(stack.tos_)
    2245             : {
    2246             : #ifdef DEBUG
    2247           3 :     stack.iteratorCount_++;
    2248             : #endif
    2249           3 : }
    2250             : 
    2251           6 : MarkStackIter::~MarkStackIter()
    2252             : {
    2253             : #ifdef DEBUG
    2254           3 :     MOZ_ASSERT(stack_.iteratorCount_);
    2255           3 :     stack_.iteratorCount_--;
    2256             : #endif
    2257           3 : }
    2258             : 
    2259             : inline size_t
    2260       20861 : MarkStackIter::position() const
    2261             : {
    2262       20861 :     return pos_ - stack_.stack_;
    2263             : }
    2264             : 
    2265             : inline bool
    2266       20791 : MarkStackIter::done() const
    2267             : {
    2268       20791 :     return position() == 0;
    2269             : }
    2270             : 
    2271             : inline MarkStack::TaggedPtr
    2272       10446 : MarkStackIter::peekPtr() const
    2273             : {
    2274       10446 :     MOZ_ASSERT(!done());
    2275       10446 :     return pos_[-1];
    2276             : }
    2277             : 
    2278             : inline MarkStack::Tag
    2279       10429 : MarkStackIter::peekTag() const
    2280             : {
    2281       10429 :     return peekPtr().tag();
    2282             : }
    2283             : 
    2284             : inline MarkStack::ValueArray
    2285          17 : MarkStackIter::peekValueArray() const
    2286             : {
    2287          17 :     MOZ_ASSERT(peekTag() == MarkStack::ValueArrayTag);
    2288          17 :     MOZ_ASSERT(position() >= ValueArrayWords);
    2289             : 
    2290          17 :     const auto& array = *reinterpret_cast<MarkStack::ValueArray*>(pos_ - ValueArrayWords);
    2291          17 :     CheckValueArray(array);
    2292          17 :     return array;
    2293             : }
    2294             : 
    2295             : inline void
    2296        5153 : MarkStackIter::nextPtr()
    2297             : {
    2298        5153 :     MOZ_ASSERT(!done());
    2299        5153 :     MOZ_ASSERT(!TagIsArrayTag(peekTag()));
    2300        5153 :     pos_--;
    2301        5153 : }
    2302             : 
    2303             : inline void
    2304           0 : MarkStackIter::next()
    2305             : {
    2306           0 :     if (TagIsArrayTag(peekTag()))
    2307           0 :         nextArray();
    2308             :     else
    2309           0 :         nextPtr();
    2310           0 : }
    2311             : 
    2312             : inline void
    2313          36 : MarkStackIter::nextArray()
    2314             : {
    2315          36 :     MOZ_ASSERT(TagIsArrayTag(peekTag()));
    2316          36 :     MOZ_ASSERT(position() >= ValueArrayWords);
    2317          36 :     pos_ -= ValueArrayWords;
    2318          36 : }
    2319             : 
    2320             : void
    2321          17 : MarkStackIter::saveValueArray(NativeObject* obj, uintptr_t index, HeapSlot::Kind kind)
    2322             : {
    2323          17 :     MOZ_ASSERT(peekTag() == MarkStack::ValueArrayTag);
    2324          17 :     MOZ_ASSERT(peekPtr().asValueArrayObject() == obj);
    2325          17 :     MOZ_ASSERT(position() >= ValueArrayWords);
    2326             : 
    2327          17 :     auto& array = *reinterpret_cast<MarkStack::SavedValueArray*>(pos_ - ValueArrayWords);
    2328          17 :     array = MarkStack::SavedValueArray(obj, index, kind);
    2329          17 :     CheckSavedValueArray(array);
    2330          17 :     MOZ_ASSERT(peekTag() == MarkStack::SavedValueArrayTag);
    2331          17 : }
    2332             : 
    2333             : 
    2334             : /*** GCMarker *************************************************************************************/
    2335             : 
    2336             : /*
    2337             :  * ExpandWeakMaps: the GC is recomputing the liveness of WeakMap entries by
    2338             :  * expanding each live WeakMap into its constituent key->value edges, a table
    2339             :  * of which will be consulted in a later phase whenever marking a potential
    2340             :  * key.
    2341             :  */
    2342           4 : GCMarker::GCMarker(JSRuntime* rt)
    2343             :   : JSTracer(rt, JSTracer::TracerKindTag::Marking, ExpandWeakMaps),
    2344             :     stack(size_t(-1)),
    2345             :     color(MarkColor::Black),
    2346             :     unmarkedArenaStackTop(nullptr)
    2347             : #ifdef DEBUG
    2348             :   , markLaterArenas(0)
    2349             :   , started(false)
    2350           4 :   , strictCompartmentChecking(false)
    2351             : #endif
    2352             : {
    2353           4 : }
    2354             : 
    2355             : bool
    2356           4 : GCMarker::init(JSGCMode gcMode)
    2357             : {
    2358           4 :     return stack.init(gcMode);
    2359             : }
    2360             : 
    2361             : void
    2362           1 : GCMarker::start()
    2363             : {
    2364             : #ifdef DEBUG
    2365           1 :     MOZ_ASSERT(!started);
    2366           1 :     started = true;
    2367             : #endif
    2368           1 :     color = MarkColor::Black;
    2369           1 :     linearWeakMarkingDisabled_ = false;
    2370             : 
    2371           1 :     MOZ_ASSERT(!unmarkedArenaStackTop);
    2372           1 :     MOZ_ASSERT(markLaterArenas == 0);
    2373           1 : }
    2374             : 
    2375             : void
    2376           0 : GCMarker::stop()
    2377             : {
    2378             : #ifdef DEBUG
    2379           0 :     MOZ_ASSERT(isDrained());
    2380             : 
    2381           0 :     MOZ_ASSERT(started);
    2382           0 :     started = false;
    2383             : 
    2384           0 :     MOZ_ASSERT(!unmarkedArenaStackTop);
    2385           0 :     MOZ_ASSERT(markLaterArenas == 0);
    2386             : #endif
    2387             : 
    2388             :     /* Free non-ballast stack memory. */
    2389           0 :     stack.reset();
    2390           0 :     AutoEnterOOMUnsafeRegion oomUnsafe;
    2391           0 :     for (GCZonesIter zone(runtime()); !zone.done(); zone.next()) {
    2392           0 :         if (!zone->gcWeakKeys().clear())
    2393           0 :             oomUnsafe.crash("clearing weak keys in GCMarker::stop()");
    2394             :     }
    2395           0 : }
    2396             : 
    2397             : void
    2398           0 : GCMarker::reset()
    2399             : {
    2400           0 :     color = MarkColor::Black;
    2401             : 
    2402           0 :     stack.reset();
    2403           0 :     MOZ_ASSERT(isMarkStackEmpty());
    2404             : 
    2405           0 :     while (unmarkedArenaStackTop) {
    2406           0 :         Arena* arena = unmarkedArenaStackTop;
    2407           0 :         MOZ_ASSERT(arena->hasDelayedMarking);
    2408           0 :         MOZ_ASSERT(markLaterArenas);
    2409           0 :         unmarkedArenaStackTop = arena->getNextDelayedMarking();
    2410           0 :         arena->unsetDelayedMarking();
    2411           0 :         arena->markOverflow = 0;
    2412           0 :         arena->allocatedDuringIncremental = 0;
    2413             : #ifdef DEBUG
    2414           0 :         markLaterArenas--;
    2415             : #endif
    2416             :     }
    2417           0 :     MOZ_ASSERT(isDrained());
    2418           0 :     MOZ_ASSERT(!markLaterArenas);
    2419           0 : }
    2420             : 
    2421             : 
    2422             : template <typename T>
    2423             : void
    2424        3920 : GCMarker::pushTaggedPtr(T* ptr)
    2425             : {
    2426        3920 :     checkZone(ptr);
    2427        3920 :     if (!stack.push(ptr))
    2428           0 :         delayMarkingChildren(ptr);
    2429        3920 : }
    2430             : 
    2431             : void
    2432        1010 : GCMarker::pushValueArray(JSObject* obj, HeapSlot* start, HeapSlot* end)
    2433             : {
    2434        1010 :     checkZone(obj);
    2435        1010 :     if (!stack.push(obj, start, end))
    2436           0 :         delayMarkingChildren(obj);
    2437        1010 : }
    2438             : 
    2439             : void
    2440           3 : GCMarker::repush(JSObject* obj)
    2441             : {
    2442           3 :     MOZ_ASSERT_IF(markColor() == MarkColor::Gray, gc::TenuredCell::fromPointer(obj)->isMarkedGray());
    2443           3 :     MOZ_ASSERT_IF(markColor() == MarkColor::Black, gc::TenuredCell::fromPointer(obj)->isMarkedBlack());
    2444           3 :     pushTaggedPtr(obj);
    2445           3 : }
    2446             : 
    2447             : void
    2448           0 : GCMarker::enterWeakMarkingMode()
    2449             : {
    2450           0 :     MOZ_ASSERT(tag_ == TracerKindTag::Marking);
    2451           0 :     if (linearWeakMarkingDisabled_)
    2452           0 :         return;
    2453             : 
    2454             :     // During weak marking mode, we maintain a table mapping weak keys to
    2455             :     // entries in known-live weakmaps. Initialize it with the keys of marked
    2456             :     // weakmaps -- or more precisely, the keys of marked weakmaps that are
    2457             :     // mapped to not yet live values. (Once bug 1167452 implements incremental
    2458             :     // weakmap marking, this initialization step will become unnecessary, as
    2459             :     // the table will already hold all such keys.)
    2460           0 :     if (weakMapAction() == ExpandWeakMaps) {
    2461           0 :         tag_ = TracerKindTag::WeakMarking;
    2462             : 
    2463           0 :         for (GCSweepGroupIter zone(runtime()); !zone.done(); zone.next()) {
    2464           0 :             for (WeakMapBase* m : zone->gcWeakMapList()) {
    2465           0 :                 if (m->marked)
    2466           0 :                     (void) m->markIteratively(this);
    2467             :             }
    2468             :         }
    2469             :     }
    2470             : }
    2471             : 
    2472             : void
    2473           0 : GCMarker::leaveWeakMarkingMode()
    2474             : {
    2475           0 :     MOZ_ASSERT_IF(weakMapAction() == ExpandWeakMaps && !linearWeakMarkingDisabled_,
    2476             :                   tag_ == TracerKindTag::WeakMarking);
    2477           0 :     tag_ = TracerKindTag::Marking;
    2478             : 
    2479             :     // Table is expensive to maintain when not in weak marking mode, so we'll
    2480             :     // rebuild it upon entry rather than allow it to contain stale data.
    2481           0 :     AutoEnterOOMUnsafeRegion oomUnsafe;
    2482           0 :     for (GCZonesIter zone(runtime()); !zone.done(); zone.next()) {
    2483           0 :         if (!zone->gcWeakKeys().clear())
    2484           0 :             oomUnsafe.crash("clearing weak keys in GCMarker::leaveWeakMarkingMode()");
    2485             :     }
    2486           0 : }
    2487             : 
    2488             : void
    2489           0 : GCMarker::markDelayedChildren(Arena* arena)
    2490             : {
    2491           0 :     if (arena->markOverflow) {
    2492           0 :         bool always = arena->allocatedDuringIncremental;
    2493           0 :         arena->markOverflow = 0;
    2494             : 
    2495           0 :         for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
    2496           0 :             TenuredCell* t = i.getCell();
    2497           0 :             if (always || t->isMarkedAny()) {
    2498           0 :                 t->markIfUnmarked();
    2499           0 :                 js::TraceChildren(this, t, MapAllocToTraceKind(arena->getAllocKind()));
    2500             :             }
    2501             :         }
    2502             :     } else {
    2503           0 :         MOZ_ASSERT(arena->allocatedDuringIncremental);
    2504           0 :         PushArena(this, arena);
    2505             :     }
    2506           0 :     arena->allocatedDuringIncremental = 0;
    2507             :     /*
    2508             :      * Note that during an incremental GC we may still be allocating into
    2509             :      * the arena. However, prepareForIncrementalGC sets the
    2510             :      * allocatedDuringIncremental flag if we continue marking.
    2511             :      */
    2512           0 : }
    2513             : 
    2514             : bool
    2515           0 : GCMarker::markDelayedChildren(SliceBudget& budget)
    2516             : {
    2517           0 :     GCRuntime& gc = runtime()->gc;
    2518           0 :     gcstats::AutoPhase ap(gc.stats(), gc.state() == State::Mark, gcstats::PhaseKind::MARK_DELAYED);
    2519             : 
    2520           0 :     MOZ_ASSERT(unmarkedArenaStackTop);
    2521           0 :     do {
    2522             :         /*
    2523             :          * If marking gets delayed at the same arena again, we must repeat
    2524             :          * marking of its things. For that we pop arena from the stack and
    2525             :          * clear its hasDelayedMarking flag before we begin the marking.
    2526             :          */
    2527           0 :         Arena* arena = unmarkedArenaStackTop;
    2528           0 :         MOZ_ASSERT(arena->hasDelayedMarking);
    2529           0 :         MOZ_ASSERT(markLaterArenas);
    2530           0 :         unmarkedArenaStackTop = arena->getNextDelayedMarking();
    2531           0 :         arena->unsetDelayedMarking();
    2532             : #ifdef DEBUG
    2533           0 :         markLaterArenas--;
    2534             : #endif
    2535           0 :         markDelayedChildren(arena);
    2536             : 
    2537           0 :         budget.step(150);
    2538           0 :         if (budget.isOverBudget())
    2539           0 :             return false;
    2540           0 :     } while (unmarkedArenaStackTop);
    2541           0 :     MOZ_ASSERT(!markLaterArenas);
    2542             : 
    2543           0 :     return true;
    2544             : }
    2545             : 
    2546             : template<typename T>
    2547             : static void
    2548           0 : PushArenaTyped(GCMarker* gcmarker, Arena* arena)
    2549             : {
    2550           0 :     for (ArenaCellIterUnderGC i(arena); !i.done(); i.next())
    2551           0 :         gcmarker->traverse(i.get<T>());
    2552           0 : }
    2553             : 
    2554             : struct PushArenaFunctor {
    2555           0 :     template <typename T> void operator()(GCMarker* gcmarker, Arena* arena) {
    2556           0 :         PushArenaTyped<T>(gcmarker, arena);
    2557           0 :     }
    2558             : };
    2559             : 
    2560             : void
    2561           0 : gc::PushArena(GCMarker* gcmarker, Arena* arena)
    2562             : {
    2563           0 :     DispatchTraceKindTyped(PushArenaFunctor(),
    2564           0 :                            MapAllocToTraceKind(arena->getAllocKind()), gcmarker, arena);
    2565           0 : }
    2566             : 
    2567             : #ifdef DEBUG
    2568             : void
    2569        4930 : GCMarker::checkZone(void* p)
    2570             : {
    2571        4930 :     MOZ_ASSERT(started);
    2572        9860 :     DebugOnly<Cell*> cell = static_cast<Cell*>(p);
    2573        4930 :     MOZ_ASSERT_IF(cell->isTenured(), cell->asTenured().zone()->isCollecting());
    2574        4930 : }
    2575             : #endif
    2576             : 
    2577             : size_t
    2578           0 : GCMarker::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
    2579             : {
    2580           0 :     size_t size = stack.sizeOfExcludingThis(mallocSizeOf);
    2581           0 :     for (ZonesIter zone(runtime(), WithAtoms); !zone.done(); zone.next())
    2582           0 :         size += zone->gcGrayRoots().sizeOfExcludingThis(mallocSizeOf);
    2583           0 :     return size;
    2584             : }
    2585             : 
    2586             : #ifdef DEBUG
    2587             : Zone*
    2588           0 : GCMarker::stackContainsCrossZonePointerTo(const Cell* target) const
    2589             : {
    2590           0 :     MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
    2591             : 
    2592           0 :     Zone* targetZone = target->asTenured().zone();
    2593             : 
    2594           0 :     for (MarkStackIter iter(stack); !iter.done(); iter.next()) {
    2595           0 :         if (iter.peekTag() != MarkStack::ObjectTag)
    2596           0 :             continue;
    2597             : 
    2598           0 :         auto source = iter.peekPtr().as<JSObject>();
    2599           0 :         Zone* sourceZone = source->zone();
    2600           0 :         if (sourceZone == targetZone)
    2601           0 :             continue;
    2602             : 
    2603             :         // The private slot of proxy objects might contain a cross-compartment
    2604             :         // pointer.
    2605           0 :         if (source->is<ProxyObject>()) {
    2606           0 :             Value value = source->as<ProxyObject>().private_();
    2607           0 :             MOZ_ASSERT_IF(!IsCrossCompartmentWrapper(source),
    2608             :                           IsObjectValueInCompartment(value, source->compartment()));
    2609           0 :             if (value.isObject() && &value.toObject() == target)
    2610           0 :                 return sourceZone;
    2611             :         }
    2612             : 
    2613           0 :         if (Debugger::isDebuggerCrossCompartmentEdge(source, target))
    2614           0 :             return sourceZone;
    2615             :     }
    2616             : 
    2617           0 :     return nullptr;
    2618             : }
    2619             : #endif // DEBUG
    2620             : 
    2621             : 
    2622             : /*** Tenuring Tracer *****************************************************************************/
    2623             : 
    2624             : namespace js {
    2625             : template <typename T>
    2626             : void
    2627       34578 : TenuringTracer::traverse(T** tp)
    2628             : {
    2629       34578 : }
    2630             : 
    2631             : template <>
    2632             : void
    2633       55349 : TenuringTracer::traverse(JSObject** objp)
    2634             : {
    2635             :     // We only ever visit the internals of objects after moving them to tenured.
    2636       55349 :     MOZ_ASSERT(!nursery().isInside(objp));
    2637             : 
    2638       55349 :     if (IsInsideNursery(*objp) && !nursery().getForwardedPointer(objp))
    2639       22681 :         *objp = moveToTenured(*objp);
    2640       55349 : }
    2641             : 
    2642             : template <typename S>
    2643             : struct TenuringTraversalFunctor : public IdentityDefaultAdaptor<S> {
    2644       42075 :     template <typename T> S operator()(T* t, TenuringTracer* trc) {
    2645       42075 :         trc->traverse(&t);
    2646       42075 :         return js::gc::RewrapTaggedPointer<S, T>::wrap(t);
    2647             :     }
    2648             : };
    2649             : 
    2650             : template <typename T>
    2651             : void
    2652       53601 : TenuringTracer::traverse(T* thingp)
    2653             : {
    2654       53601 :     *thingp = DispatchTyped(TenuringTraversalFunctor<T>(), *thingp, this);
    2655       53601 : }
    2656             : } // namespace js
    2657             : 
    2658             : template <typename T>
    2659             : void
    2660          63 : js::gc::StoreBuffer::MonoTypeBuffer<T>::trace(StoreBuffer* owner, TenuringTracer& mover)
    2661             : {
    2662         126 :     mozilla::ReentrancyGuard g(*owner);
    2663          63 :     MOZ_ASSERT(owner->isEnabled());
    2664          63 :     MOZ_ASSERT(stores_.initialized());
    2665          63 :     if (last_)
    2666          58 :         last_.trace(mover);
    2667        6858 :     for (typename StoreSet::Range r = stores_.all(); !r.empty(); r.popFront())
    2668        6795 :         r.front().trace(mover);
    2669          63 : }
    2670             : 
    2671             : namespace js {
    2672             : namespace gc {
    2673             : template void
    2674             : StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>::trace(StoreBuffer*, TenuringTracer&);
    2675             : template void
    2676             : StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>::trace(StoreBuffer*, TenuringTracer&);
    2677             : template void
    2678             : StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>::trace(StoreBuffer*, TenuringTracer&);
    2679             : } // namespace gc
    2680             : } // namespace js
    2681             : 
    2682             : void
    2683        3983 : js::gc::StoreBuffer::SlotsEdge::trace(TenuringTracer& mover) const
    2684             : {
    2685        3983 :     NativeObject* obj = object();
    2686             : 
    2687             :     // Beware JSObject::swap exchanging a native object for a non-native one.
    2688        3983 :     if (!obj->isNative())
    2689          66 :         return;
    2690             : 
    2691        3917 :     if (IsInsideNursery(obj))
    2692           0 :         return;
    2693             : 
    2694        3917 :     if (kind() == ElementKind) {
    2695          15 :         int32_t initLen = obj->getDenseInitializedLength();
    2696          15 :         int32_t numShifted = obj->getElementsHeader()->numShiftedElements();
    2697          15 :         int32_t clampedStart = Min(Max(0, start_ - numShifted), initLen);
    2698          15 :         int32_t clampedEnd = Min(Max(0, start_ + count_ - numShifted), initLen);
    2699          15 :         MOZ_ASSERT(clampedStart >= 0);
    2700          15 :         MOZ_ASSERT(clampedStart <= clampedEnd);
    2701          30 :         mover.traceSlots(static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart)
    2702          30 :                             ->unsafeUnbarrieredForTracing(), clampedEnd - clampedStart);
    2703             :     } else {
    2704        3902 :         int32_t start = Min(uint32_t(start_), obj->slotSpan());
    2705        3902 :         int32_t end = Min(uint32_t(start_) + count_, obj->slotSpan());
    2706        3902 :         MOZ_ASSERT(end >= start);
    2707        3902 :         mover.traceObjectSlots(obj, start, end - start);
    2708             :     }
    2709             : }
    2710             : 
    2711             : static inline void
    2712         281 : TraceWholeCell(TenuringTracer& mover, JSObject* object)
    2713             : {
    2714         281 :     mover.traceObject(object);
    2715             : 
    2716             :     // Additionally trace the expando object attached to any unboxed plain
    2717             :     // objects. Baseline and Ion can write properties to the expando while
    2718             :     // only adding a post barrier to the owning unboxed object. Note that
    2719             :     // it isn't possible for a nursery unboxed object to have a tenured
    2720             :     // expando, so that adding a post barrier on the original object will
    2721             :     // capture any tenured->nursery edges in the expando as well.
    2722             : 
    2723         281 :     if (object->is<UnboxedPlainObject>()) {
    2724         131 :         if (UnboxedExpandoObject* expando = object->as<UnboxedPlainObject>().maybeExpando())
    2725           0 :             expando->traceChildren(&mover);
    2726             :     }
    2727         281 : }
    2728             : 
    2729             : static inline void
    2730           0 : TraceWholeCell(TenuringTracer& mover, JSScript* script)
    2731             : {
    2732           0 :     script->traceChildren(&mover);
    2733           0 : }
    2734             : 
    2735             : static inline void
    2736           0 : TraceWholeCell(TenuringTracer& mover, jit::JitCode* jitcode)
    2737             : {
    2738           0 :     jitcode->traceChildren(&mover);
    2739           0 : }
    2740             : 
    2741             : template <typename T>
    2742             : static void
    2743          63 : TraceBufferedCells(TenuringTracer& mover, Arena* arena, ArenaCellSet* cells)
    2744             : {
    2745       32319 :     for (size_t i = 0; i < MaxArenaCellIndex; i++) {
    2746       32256 :         if (cells->hasCell(i)) {
    2747         281 :             auto cell = reinterpret_cast<T*>(uintptr_t(arena) + ArenaCellIndexBytes * i);
    2748         281 :             TraceWholeCell(mover, cell);
    2749             :         }
    2750             :     }
    2751          63 : }
    2752             : 
    2753             : void
    2754          21 : js::gc::StoreBuffer::traceWholeCells(TenuringTracer& mover)
    2755             : {
    2756          84 :     for (ArenaCellSet* cells = bufferWholeCell; cells; cells = cells->next) {
    2757          63 :         Arena* arena = cells->arena;
    2758             : 
    2759          63 :         MOZ_ASSERT(arena->bufferedCells() == cells);
    2760          63 :         arena->bufferedCells() = &ArenaCellSet::Empty;
    2761             : 
    2762          63 :         JS::TraceKind kind = MapAllocToTraceKind(arena->getAllocKind());
    2763          63 :         switch (kind) {
    2764             :           case JS::TraceKind::Object:
    2765          63 :             TraceBufferedCells<JSObject>(mover, arena, cells);
    2766          63 :             break;
    2767             :           case JS::TraceKind::Script:
    2768           0 :             TraceBufferedCells<JSScript>(mover, arena, cells);
    2769           0 :             break;
    2770             :           case JS::TraceKind::JitCode:
    2771           0 :             TraceBufferedCells<jit::JitCode>(mover, arena, cells);
    2772           0 :             break;
    2773             :           default:
    2774           0 :             MOZ_CRASH("Unexpected trace kind");
    2775             :         }
    2776             :     }
    2777             : 
    2778          21 :     bufferWholeCell = nullptr;
    2779          21 : }
    2780             : 
    2781             : void
    2782        2475 : js::gc::StoreBuffer::CellPtrEdge::trace(TenuringTracer& mover) const
    2783             : {
    2784        2475 :     if (!*edge)
    2785           0 :         return;
    2786             : 
    2787        2475 :     MOZ_ASSERT((*edge)->getTraceKind() == JS::TraceKind::Object);
    2788        2475 :     mover.traverse(reinterpret_cast<JSObject**>(edge));
    2789             : }
    2790             : 
    2791             : void
    2792         395 : js::gc::StoreBuffer::ValueEdge::trace(TenuringTracer& mover) const
    2793             : {
    2794         395 :     if (deref())
    2795         395 :         mover.traverse(edge);
    2796         395 : }
    2797             : 
    2798             : /* Insert the given relocation entry into the list of things to visit. */
    2799             : void
    2800       22681 : js::TenuringTracer::insertIntoFixupList(RelocationOverlay* entry) {
    2801       22681 :     *tail = entry;
    2802       22681 :     tail = &entry->nextRef();
    2803       22681 :     *tail = nullptr;
    2804       22681 : }
    2805             : 
    2806             : JSObject*
    2807       22681 : js::TenuringTracer::moveToTenured(JSObject* src)
    2808             : {
    2809       22681 :     MOZ_ASSERT(IsInsideNursery(src));
    2810       22681 :     MOZ_ASSERT(!src->zone()->usedByHelperThread());
    2811             : 
    2812       22681 :     AllocKind dstKind = src->allocKindForTenure(nursery());
    2813       22681 :     Zone* zone = src->zone();
    2814             : 
    2815       22681 :     TenuredCell* t = zone->arenas.allocateFromFreeList(dstKind, Arena::thingSize(dstKind));
    2816       22681 :     if (!t) {
    2817         842 :         AutoEnterOOMUnsafeRegion oomUnsafe;
    2818         421 :         t = runtime()->gc.refillFreeListInGC(zone, dstKind);
    2819         421 :         if (!t)
    2820           0 :             oomUnsafe.crash(ChunkSize, "Failed to allocate object while tenuring.");
    2821             :     }
    2822       22681 :     JSObject* dst = reinterpret_cast<JSObject*>(t);
    2823       22681 :     tenuredSize += moveObjectToTenured(dst, src, dstKind);
    2824             : 
    2825       22681 :     RelocationOverlay* overlay = RelocationOverlay::fromCell(src);
    2826       22681 :     overlay->forwardTo(dst);
    2827       22681 :     insertIntoFixupList(overlay);
    2828             : 
    2829       22681 :     TracePromoteToTenured(src, dst);
    2830       22681 :     MemProfiler::MoveNurseryToTenured(src, dst);
    2831       22681 :     return dst;
    2832             : }
    2833             : 
    2834             : void
    2835          21 : js::Nursery::collectToFixedPoint(TenuringTracer& mover, TenureCountCache& tenureCounts)
    2836             : {
    2837       22702 :     for (RelocationOverlay* p = mover.head; p; p = p->next()) {
    2838       22681 :         JSObject* obj = static_cast<JSObject*>(p->forwardingAddress());
    2839       22681 :         mover.traceObject(obj);
    2840             : 
    2841       22681 :         TenureCount& entry = tenureCounts.findEntry(obj->groupRaw());
    2842       22681 :         if (entry.group == obj->groupRaw()) {
    2843         728 :             entry.count++;
    2844       21953 :         } else if (!entry.group) {
    2845         300 :             entry.group = obj->groupRaw();
    2846         300 :             entry.count = 1;
    2847             :         }
    2848             :     }
    2849          21 : }
    2850             : 
    2851             : struct TenuringFunctor
    2852             : {
    2853             :     template <typename T>
    2854         211 :     void operator()(T* thing, TenuringTracer& mover) {
    2855         211 :         mover.traverse(thing);
    2856         211 :     }
    2857             : };
    2858             : 
    2859             : // Visit all object children of the object and trace them.
    2860             : void
    2861       22962 : js::TenuringTracer::traceObject(JSObject* obj)
    2862             : {
    2863       45924 :     NativeObject *nobj = CallTraceHook(TenuringFunctor(), this, obj,
    2864       22962 :                                        CheckGeneration::NoChecks, *this);
    2865       22962 :     if (!nobj)
    2866        2261 :         return;
    2867             : 
    2868             :     // Note: the contents of copy on write elements pointers are filled in
    2869             :     // during parsing and cannot contain nursery pointers.
    2870       42997 :     if (!nobj->hasEmptyElements() &&
    2871       21804 :         !nobj->denseElementsAreCopyOnWrite() &&
    2872        1103 :         ObjectDenseElementsMayBeMarkable(nobj))
    2873             :     {
    2874        1103 :         Value* elems = static_cast<HeapSlot*>(nobj->getDenseElements())->unsafeUnbarrieredForTracing();
    2875        1103 :         traceSlots(elems, elems + nobj->getDenseInitializedLength());
    2876             :     }
    2877             : 
    2878       20701 :     traceObjectSlots(nobj, 0, nobj->slotSpan());
    2879             : }
    2880             : 
    2881             : void
    2882       24603 : js::TenuringTracer::traceObjectSlots(NativeObject* nobj, uint32_t start, uint32_t length)
    2883             : {
    2884             :     HeapSlot* fixedStart;
    2885             :     HeapSlot* fixedEnd;
    2886             :     HeapSlot* dynStart;
    2887             :     HeapSlot* dynEnd;
    2888       24603 :     nobj->getSlotRange(start, length, &fixedStart, &fixedEnd, &dynStart, &dynEnd);
    2889       24603 :     if (fixedStart)
    2890        7383 :         traceSlots(fixedStart->unsafeUnbarrieredForTracing(), fixedEnd->unsafeUnbarrieredForTracing());
    2891       24603 :     if (dynStart)
    2892        3779 :         traceSlots(dynStart->unsafeUnbarrieredForTracing(), dynEnd->unsafeUnbarrieredForTracing());
    2893       24603 : }
    2894             : 
    2895             : void
    2896       58324 : js::TenuringTracer::traceSlots(Value* vp, Value* end)
    2897             : {
    2898      104368 :     for (; vp != end; ++vp)
    2899       46044 :         traverse(vp);
    2900       12280 : }
    2901             : 
    2902             : #ifdef DEBUG
    2903             : static inline ptrdiff_t
    2904       22681 : OffsetToChunkEnd(void* p)
    2905             : {
    2906       22681 :     return ChunkLocationOffset - (uintptr_t(p) & gc::ChunkMask);
    2907             : }
    2908             : #endif
    2909             : 
    2910             : size_t
    2911       22681 : js::TenuringTracer::moveObjectToTenured(JSObject* dst, JSObject* src, AllocKind dstKind)
    2912             : {
    2913       22681 :     size_t srcSize = Arena::thingSize(dstKind);
    2914       22681 :     size_t tenuredSize = srcSize;
    2915             : 
    2916             :     /*
    2917             :      * Arrays do not necessarily have the same AllocKind between src and dst.
    2918             :      * We deal with this by copying elements manually, possibly re-inlining
    2919             :      * them if there is adequate room inline in dst.
    2920             :      *
    2921             :      * For Arrays we're reducing tenuredSize to the smaller srcSize
    2922             :      * because moveElementsToTenured() accounts for all Array elements,
    2923             :      * even if they are inlined.
    2924             :      */
    2925       22681 :     if (src->is<ArrayObject>()) {
    2926        1594 :         tenuredSize = srcSize = sizeof(NativeObject);
    2927       21087 :     } else if (src->is<TypedArrayObject>()) {
    2928           0 :         TypedArrayObject* tarray = &src->as<TypedArrayObject>();
    2929             :         // Typed arrays with inline data do not necessarily have the same
    2930             :         // AllocKind between src and dst. The nursery does not allocate an
    2931             :         // inline data buffer that has the same size as the slow path will do.
    2932             :         // In the slow path, the Typed Array Object stores the inline data
    2933             :         // in the allocated space that fits the AllocKind. In the fast path,
    2934             :         // the nursery will allocate another buffer that is directly behind the
    2935             :         // minimal JSObject. That buffer size plus the JSObject size is not
    2936             :         // necessarily as large as the slow path's AllocKind size.
    2937           0 :         if (tarray->hasInlineElements()) {
    2938           0 :             AllocKind srcKind = GetGCObjectKind(TypedArrayObject::FIXED_DATA_START);
    2939           0 :             size_t headerSize = Arena::thingSize(srcKind);
    2940           0 :             srcSize = headerSize + tarray->byteLength();
    2941             :         }
    2942             :     }
    2943             : 
    2944             :     // Copy the Cell contents.
    2945       22681 :     MOZ_ASSERT(OffsetToChunkEnd(src) >= ptrdiff_t(srcSize));
    2946       22681 :     js_memcpy(dst, src, srcSize);
    2947             : 
    2948             :     // Move any hash code attached to the object.
    2949       22681 :     src->zone()->transferUniqueId(dst, src);
    2950             : 
    2951             :     // Move the slots and elements, if we need to.
    2952       22681 :     if (src->isNative()) {
    2953       20553 :         NativeObject* ndst = &dst->as<NativeObject>();
    2954       20553 :         NativeObject* nsrc = &src->as<NativeObject>();
    2955       20553 :         tenuredSize += moveSlotsToTenured(ndst, nsrc, dstKind);
    2956       20553 :         tenuredSize += moveElementsToTenured(ndst, nsrc, dstKind);
    2957             : 
    2958             :         // The shape's list head may point into the old object. This can only
    2959             :         // happen for dictionaries, which are native objects.
    2960       20553 :         if (&nsrc->shape_ == ndst->shape_->listp) {
    2961           6 :             MOZ_ASSERT(nsrc->shape_->inDictionary());
    2962           6 :             ndst->shape_->listp = &ndst->shape_;
    2963             :         }
    2964             :     }
    2965             : 
    2966       22681 :     if (src->is<InlineTypedObject>()) {
    2967           0 :         InlineTypedObject::objectMovedDuringMinorGC(this, dst, src);
    2968       22681 :     } else if (src->is<TypedArrayObject>()) {
    2969           0 :         tenuredSize += TypedArrayObject::objectMovedDuringMinorGC(this, dst, src, dstKind);
    2970       22681 :     } else if (src->is<UnboxedArrayObject>()) {
    2971           0 :         tenuredSize += UnboxedArrayObject::objectMovedDuringMinorGC(this, dst, src, dstKind);
    2972       22681 :     } else if (src->is<ArgumentsObject>()) {
    2973           3 :         tenuredSize += ArgumentsObject::objectMovedDuringMinorGC(this, dst, src);
    2974       22678 :     } else if (src->is<ProxyObject>()) {
    2975             :         // Objects in the nursery are never swapped so the proxy must have an
    2976             :         // inline ProxyValueArray.
    2977        2102 :         MOZ_ASSERT(src->as<ProxyObject>().usingInlineValueArray());
    2978        2102 :         dst->as<ProxyObject>().setInlineValueArray();
    2979        2102 :         if (JSObjectMovedOp op = dst->getClass()->extObjectMovedOp())
    2980        2102 :             op(dst, src);
    2981       20576 :     } else if (JSObjectMovedOp op = dst->getClass()->extObjectMovedOp()) {
    2982          39 :         op(dst, src);
    2983       20537 :     } else if (src->getClass()->hasFinalize()) {
    2984             :         // Such objects need to be handled specially above to ensure any
    2985             :         // additional nursery buffers they hold are moved.
    2986           0 :         MOZ_RELEASE_ASSERT(CanNurseryAllocateFinalizedClass(src->getClass()));
    2987           0 :         MOZ_CRASH("Unhandled JSCLASS_SKIP_NURSERY_FINALIZE Class");
    2988             :     }
    2989             : 
    2990       22681 :     return tenuredSize;
    2991             : }
    2992             : 
    2993             : size_t
    2994       20553 : js::TenuringTracer::moveSlotsToTenured(NativeObject* dst, NativeObject* src, AllocKind dstKind)
    2995             : {
    2996             :     /* Fixed slots have already been copied over. */
    2997       20553 :     if (!src->hasDynamicSlots())
    2998       19324 :         return 0;
    2999             : 
    3000        1229 :     if (!nursery().isInside(src->slots_)) {
    3001           1 :         nursery().removeMallocedBuffer(src->slots_);
    3002           1 :         return 0;
    3003             :     }
    3004             : 
    3005        1228 :     Zone* zone = src->zone();
    3006        1228 :     size_t count = src->numDynamicSlots();
    3007             : 
    3008             :     {
    3009        2456 :         AutoEnterOOMUnsafeRegion oomUnsafe;
    3010        1228 :         dst->slots_ = zone->pod_malloc<HeapSlot>(count);
    3011        1228 :         if (!dst->slots_)
    3012           0 :             oomUnsafe.crash(sizeof(HeapSlot) * count, "Failed to allocate slots while tenuring.");
    3013             :     }
    3014             : 
    3015        1228 :     PodCopy(dst->slots_, src->slots_, count);
    3016        1228 :     nursery().setSlotsForwardingPointer(src->slots_, dst->slots_, count);
    3017        1228 :     return count * sizeof(HeapSlot);
    3018             : }
    3019             : 
    3020             : size_t
    3021       20553 : js::TenuringTracer::moveElementsToTenured(NativeObject* dst, NativeObject* src, AllocKind dstKind)
    3022             : {
    3023       20553 :     if (src->hasEmptyElements() || src->denseElementsAreCopyOnWrite())
    3024       19450 :         return 0;
    3025             : 
    3026        1103 :     void* srcAllocatedHeader = src->getUnshiftedElementsHeader();
    3027             : 
    3028             :     /* TODO Bug 874151: Prefer to put element data inline if we have space. */
    3029        1103 :     if (!nursery().isInside(srcAllocatedHeader)) {
    3030           2 :         MOZ_ASSERT(src->elements_ == dst->elements_);
    3031           2 :         nursery().removeMallocedBuffer(srcAllocatedHeader);
    3032           2 :         return 0;
    3033             :     }
    3034             : 
    3035        1101 :     ObjectElements* srcHeader = src->getElementsHeader();
    3036             : 
    3037             :     // Shifted elements are copied too.
    3038        1101 :     uint32_t numShifted = srcHeader->numShiftedElements();
    3039        1101 :     size_t nslots = srcHeader->numAllocatedElements();
    3040             : 
    3041             :     /* Unlike other objects, Arrays can have fixed elements. */
    3042        1101 :     if (src->is<ArrayObject>() && nslots <= GetGCKindSlots(dstKind)) {
    3043        1084 :         dst->as<ArrayObject>().setFixedElements();
    3044        1084 :         js_memcpy(dst->getElementsHeader(), srcAllocatedHeader, nslots * sizeof(HeapSlot));
    3045        1084 :         dst->elements_ += numShifted;
    3046        1084 :         nursery().setElementsForwardingPointer(srcHeader, dst->getElementsHeader(),
    3047        1084 :                                                srcHeader->capacity);
    3048        1084 :         return nslots * sizeof(HeapSlot);
    3049             :     }
    3050             : 
    3051          17 :     MOZ_ASSERT(nslots >= 2);
    3052             : 
    3053          17 :     Zone* zone = src->zone();
    3054             :     ObjectElements* dstHeader;
    3055             :     {
    3056          34 :         AutoEnterOOMUnsafeRegion oomUnsafe;
    3057          17 :         dstHeader = reinterpret_cast<ObjectElements*>(zone->pod_malloc<HeapSlot>(nslots));
    3058          17 :         if (!dstHeader) {
    3059           0 :             oomUnsafe.crash(sizeof(HeapSlot) * nslots,
    3060           0 :                             "Failed to allocate elements while tenuring.");
    3061             :         }
    3062             :     }
    3063             : 
    3064          17 :     js_memcpy(dstHeader, srcAllocatedHeader, nslots * sizeof(HeapSlot));
    3065          17 :     dst->elements_ = dstHeader->elements() + numShifted;
    3066          17 :     nursery().setElementsForwardingPointer(srcHeader, dst->getElementsHeader(),
    3067          17 :                                            srcHeader->capacity);
    3068          17 :     return nslots * sizeof(HeapSlot);
    3069             : }
    3070             : 
    3071             : 
    3072             : /*** IsMarked / IsAboutToBeFinalized **************************************************************/
    3073             : 
    3074             : template <typename T>
    3075             : static inline void
    3076      642435 : CheckIsMarkedThing(T* thingp)
    3077             : {
    3078             : #define IS_SAME_TYPE_OR(name, type, _) mozilla::IsSame<type*, T>::value ||
    3079             :     static_assert(
    3080             :             JS_FOR_EACH_TRACEKIND(IS_SAME_TYPE_OR)
    3081             :             false, "Only the base cell layout types are allowed into marking/tracing internals");
    3082             : #undef IS_SAME_TYPE_OR
    3083             : 
    3084             : #ifdef DEBUG
    3085      642435 :     MOZ_ASSERT(thingp);
    3086      642435 :     MOZ_ASSERT(*thingp);
    3087      642435 :     JSRuntime* rt = (*thingp)->runtimeFromAnyThread();
    3088      642435 :     MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(*thingp),
    3089             :                   CurrentThreadCanAccessRuntime(rt) ||
    3090             :                   CurrentThreadCanAccessZone((*thingp)->zoneFromAnyThread()) ||
    3091             :                   (JS::CurrentThreadIsHeapCollecting() && rt->gc.state() == State::Sweep));
    3092             : #endif
    3093      642436 : }
    3094             : 
    3095             : template <typename T>
    3096             : static bool
    3097          63 : IsMarkedInternalCommon(T* thingp)
    3098             : {
    3099          63 :     CheckIsMarkedThing(thingp);
    3100          63 :     MOZ_ASSERT(!IsInsideNursery(*thingp));
    3101             : 
    3102          63 :     TenuredCell& thing = (*thingp)->asTenured();
    3103          63 :     Zone* zone = thing.zoneFromAnyThread();
    3104          63 :     if (!zone->isCollectingFromAnyThread() || zone->isGCFinished())
    3105           0 :         return true;
    3106             : 
    3107          63 :     if (zone->isGCCompacting() && IsForwarded(*thingp)) {
    3108           0 :         *thingp = Forwarded(*thingp);
    3109           0 :         return true;
    3110             :     }
    3111             : 
    3112          63 :     return thing.isMarkedAny() || thing.arena()->allocatedDuringIncremental;
    3113             : }
    3114             : 
    3115             : template <typename T>
    3116             : static bool
    3117           0 : IsMarkedInternal(JSRuntime* rt, T** thingp)
    3118             : {
    3119           0 :     if (IsOwnedByOtherRuntime(rt, *thingp))
    3120           0 :         return true;
    3121             : 
    3122           0 :     return IsMarkedInternalCommon(thingp);
    3123             : }
    3124             : 
    3125             : template <>
    3126             : /* static */ bool
    3127          63 : IsMarkedInternal(JSRuntime* rt, JSObject** thingp)
    3128             : {
    3129          63 :     if (IsOwnedByOtherRuntime(rt, *thingp))
    3130           0 :         return true;
    3131             : 
    3132          63 :     if (IsInsideNursery(*thingp)) {
    3133           0 :         MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
    3134           0 :         return Nursery::getForwardedPointer(thingp);
    3135             :     }
    3136          63 :     return IsMarkedInternalCommon(thingp);
    3137             : }
    3138             : 
    3139             : template <typename S>
    3140             : struct IsMarkedFunctor : public IdentityDefaultAdaptor<S> {
    3141           0 :     template <typename T> S operator()(T* t, JSRuntime* rt, bool* rv) {
    3142           0 :         *rv = IsMarkedInternal(rt, &t);
    3143           0 :         return js::gc::RewrapTaggedPointer<S, T>::wrap(t);
    3144             :     }
    3145             : };
    3146             : 
    3147             : template <typename T>
    3148             : static bool
    3149           0 : IsMarkedInternal(JSRuntime* rt, T* thingp)
    3150             : {
    3151           0 :     bool rv = true;
    3152           0 :     *thingp = DispatchTyped(IsMarkedFunctor<T>(), *thingp, rt, &rv);
    3153           0 :     return rv;
    3154             : }
    3155             : 
    3156             : bool
    3157           0 : js::gc::IsAboutToBeFinalizedDuringSweep(TenuredCell& tenured)
    3158             : {
    3159           0 :     MOZ_ASSERT(!IsInsideNursery(&tenured));
    3160           0 :     MOZ_ASSERT(tenured.zoneFromAnyThread()->isGCSweeping());
    3161           0 :     if (tenured.arena()->allocatedDuringIncremental)
    3162           0 :         return false;
    3163           0 :     return !tenured.isMarkedAny();
    3164             : }
    3165             : 
    3166             : template <typename T>
    3167             : static bool
    3168      642372 : IsAboutToBeFinalizedInternal(T** thingp)
    3169             : {
    3170      642372 :     CheckIsMarkedThing(thingp);
    3171      642373 :     T* thing = *thingp;
    3172      642373 :     JSRuntime* rt = thing->runtimeFromAnyThread();
    3173             : 
    3174             :     /* Permanent atoms are never finalized by non-owning runtimes. */
    3175      642373 :     if (ThingIsPermanentAtomOrWellKnownSymbol(thing) && TlsContext.get()->runtime() != rt)
    3176           0 :         return false;
    3177             : 
    3178      642373 :     if (IsInsideNursery(thing)) {
    3179      115606 :         return JS::CurrentThreadIsHeapMinorCollecting() &&
    3180      115606 :                !Nursery::getForwardedPointer(reinterpret_cast<JSObject**>(thingp));
    3181             :     }
    3182             : 
    3183      531958 :     Zone* zone = thing->asTenured().zoneFromAnyThread();
    3184      531957 :     if (zone->isGCSweeping()) {
    3185           0 :         return IsAboutToBeFinalizedDuringSweep(thing->asTenured());
    3186      531957 :     } else if (zone->isGCCompacting() && IsForwarded(thing)) {
    3187           0 :         *thingp = Forwarded(thing);
    3188           0 :         return false;
    3189             :     }
    3190             : 
    3191      531957 :     return false;
    3192             : }
    3193             : 
    3194             : template <typename S>
    3195             : struct IsAboutToBeFinalizedFunctor : public IdentityDefaultAdaptor<S> {
    3196        7775 :     template <typename T> S operator()(T* t, bool* rv) {
    3197        7775 :         *rv = IsAboutToBeFinalizedInternal(&t);
    3198        7775 :         return js::gc::RewrapTaggedPointer<S, T>::wrap(t);
    3199             :     }
    3200             : };
    3201             : 
    3202             : template <typename T>
    3203             : static bool
    3204        8074 : IsAboutToBeFinalizedInternal(T* thingp)
    3205             : {
    3206        8074 :     bool rv = false;
    3207        8074 :     *thingp = DispatchTyped(IsAboutToBeFinalizedFunctor<T>(), *thingp, &rv);
    3208        8074 :     return rv;
    3209             : }
    3210             : 
    3211             : namespace js {
    3212             : namespace gc {
    3213             : 
    3214             : template <typename T>
    3215             : bool
    3216           0 : IsMarkedUnbarriered(JSRuntime* rt, T* thingp)
    3217             : {
    3218           0 :     return IsMarkedInternal(rt, ConvertToBase(thingp));
    3219             : }
    3220             : 
    3221             : template <typename T>
    3222             : bool
    3223          63 : IsMarked(JSRuntime* rt, WriteBarrieredBase<T>* thingp)
    3224             : {
    3225          63 :     return IsMarkedInternal(rt, ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
    3226             : }
    3227             : 
    3228             : template <typename T>
    3229             : bool
    3230      438102 : IsAboutToBeFinalizedUnbarriered(T* thingp)
    3231             : {
    3232      438102 :     return IsAboutToBeFinalizedInternal(ConvertToBase(thingp));
    3233             : }
    3234             : 
    3235             : template <typename T>
    3236             : bool
    3237           0 : IsAboutToBeFinalized(WriteBarrieredBase<T>* thingp)
    3238             : {
    3239           0 :     return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
    3240             : }
    3241             : 
    3242             : template <typename T>
    3243             : bool
    3244        3088 : IsAboutToBeFinalized(ReadBarrieredBase<T>* thingp)
    3245             : {
    3246        3088 :     return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
    3247             : }
    3248             : 
    3249             : template <typename T>
    3250             : JS_PUBLIC_API(bool)
    3251           0 : EdgeNeedsSweep(JS::Heap<T>* thingp)
    3252             : {
    3253           0 :     return IsAboutToBeFinalizedInternal(ConvertToBase(thingp->unsafeGet()));
    3254             : }
    3255             : 
    3256             : template <typename T>
    3257             : JS_PUBLIC_API(bool)
    3258      201484 : EdgeNeedsSweepUnbarrieredSlow(T* thingp)
    3259             : {
    3260      201484 :     return IsAboutToBeFinalizedInternal(ConvertToBase(thingp));
    3261             : }
    3262             : 
    3263             : // Instantiate a copy of the Tracing templates for each derived type.
    3264             : #define INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS(type) \
    3265             :     template bool IsMarkedUnbarriered<type>(JSRuntime*, type*);                \
    3266             :     template bool IsMarked<type>(JSRuntime*, WriteBarrieredBase<type>*); \
    3267             :     template bool IsAboutToBeFinalizedUnbarriered<type>(type*); \
    3268             :     template bool IsAboutToBeFinalized<type>(WriteBarrieredBase<type>*); \
    3269             :     template bool IsAboutToBeFinalized<type>(ReadBarrieredBase<type>*);
    3270             : #define INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS(type) \
    3271             :     template JS_PUBLIC_API(bool) EdgeNeedsSweep<type>(JS::Heap<type>*); \
    3272             :     template JS_PUBLIC_API(bool) EdgeNeedsSweepUnbarrieredSlow<type>(type*);
    3273             : FOR_EACH_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS)
    3274             : FOR_EACH_PUBLIC_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS)
    3275             : FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS)
    3276             : #undef INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS
    3277             : 
    3278             : } /* namespace gc */
    3279             : } /* namespace js */
    3280             : 
    3281             : 
    3282             : /*** Cycle Collector Barrier Implementation *******************************************************/
    3283             : 
    3284             : /*
    3285             :  * The GC and CC are run independently. Consequently, the following sequence of
    3286             :  * events can occur:
    3287             :  * 1. GC runs and marks an object gray.
    3288             :  * 2. The mutator runs (specifically, some C++ code with access to gray
    3289             :  *    objects) and creates a pointer from a JS root or other black object to
    3290             :  *    the gray object. If we re-ran a GC at this point, the object would now be
    3291             :  *    black.
    3292             :  * 3. Now we run the CC. It may think it can collect the gray object, even
    3293             :  *    though it's reachable from the JS heap.
    3294             :  *
    3295             :  * To prevent this badness, we unmark the gray bit of an object when it is
    3296             :  * accessed by callers outside XPConnect. This would cause the object to go
    3297             :  * black in step 2 above. This must be done on everything reachable from the
    3298             :  * object being returned. The following code takes care of the recursive
    3299             :  * re-coloring.
    3300             :  *
    3301             :  * There is an additional complication for certain kinds of edges that are not
    3302             :  * contained explicitly in the source object itself, such as from a weakmap key
    3303             :  * to its value, and from an object being watched by a watchpoint to the
    3304             :  * watchpoint's closure. These "implicit edges" are represented in some other
    3305             :  * container object, such as the weakmap or the watchpoint itself. In these
    3306             :  * cases, calling unmark gray on an object won't find all of its children.
    3307             :  *
    3308             :  * Handling these implicit edges has two parts:
    3309             :  * - A special pass enumerating all of the containers that know about the
    3310             :  *   implicit edges to fix any black-gray edges that have been created. This
    3311             :  *   is implemented in nsXPConnect::FixWeakMappingGrayBits.
    3312             :  * - To prevent any incorrectly gray objects from escaping to live JS outside
    3313             :  *   of the containers, we must add unmark-graying read barriers to these
    3314             :  *   containers.
    3315             :  */
    3316             : 
    3317             : #ifdef DEBUG
    3318             : struct AssertNonGrayTracer : public JS::CallbackTracer {
    3319           0 :     explicit AssertNonGrayTracer(JSRuntime* rt) : JS::CallbackTracer(rt) {}
    3320           0 :     void onChild(const JS::GCCellPtr& thing) override {
    3321           0 :         MOZ_ASSERT(!thing.asCell()->isMarkedGray());
    3322           0 :     }
    3323             : };
    3324             : #endif
    3325             : 
    3326             : class UnmarkGrayTracer : public JS::CallbackTracer
    3327             : {
    3328             :   public:
    3329             :     // We set weakMapAction to DoNotTraceWeakMaps because the cycle collector
    3330             :     // will fix up any color mismatches involving weakmaps when it runs.
    3331           0 :     explicit UnmarkGrayTracer(JSRuntime *rt)
    3332           0 :       : JS::CallbackTracer(rt, DoNotTraceWeakMaps)
    3333             :       , unmarkedAny(false)
    3334             :       , oom(false)
    3335           0 :       , stack(rt->gc.unmarkGrayStack)
    3336           0 :     {}
    3337             : 
    3338             :     void unmark(JS::GCCellPtr cell);
    3339             : 
    3340             :     // Whether we unmarked anything.
    3341             :     bool unmarkedAny;
    3342             : 
    3343             :     // Whether we ran out of memory.
    3344             :     bool oom;
    3345             : 
    3346             :   private:
    3347             :     // Stack of cells to traverse.
    3348             :     Vector<JS::GCCellPtr, 0, SystemAllocPolicy>& stack;
    3349             : 
    3350             :     void onChild(const JS::GCCellPtr& thing) override;
    3351             : };
    3352             : 
    3353             : void
    3354           0 : UnmarkGrayTracer::onChild(const JS::GCCellPtr& thing)
    3355             : {
    3356           0 :     Cell* cell = thing.asCell();
    3357             : 
    3358             :     // Cells in the nursery cannot be gray, and therefore must necessarily point
    3359             :     // to only black edges.
    3360           0 :     if (!cell->isTenured()) {
    3361             : #ifdef DEBUG
    3362           0 :         AssertNonGrayTracer nongray(runtime());
    3363           0 :         TraceChildren(&nongray, cell, thing.kind());
    3364             : #endif
    3365           0 :         return;
    3366             :     }
    3367             : 
    3368           0 :     TenuredCell& tenured = cell->asTenured();
    3369           0 :     if (!tenured.isMarkedGray())
    3370           0 :         return;
    3371             : 
    3372           0 :     tenured.markBlack();
    3373           0 :     unmarkedAny = true;
    3374             : 
    3375           0 :     if (!stack.append(thing))
    3376           0 :         oom = true;
    3377             : }
    3378             : 
    3379             : void
    3380           0 : UnmarkGrayTracer::unmark(JS::GCCellPtr cell)
    3381             : {
    3382           0 :     MOZ_ASSERT(stack.empty());
    3383             : 
    3384           0 :     onChild(cell);
    3385             : 
    3386           0 :     while (!stack.empty() && !oom)
    3387           0 :         TraceChildren(this, stack.popCopy());
    3388             : 
    3389           0 :     if (oom) {
    3390             :          // If we run out of memory, we take a drastic measure: require that we
    3391             :          // GC again before the next CC.
    3392           0 :         stack.clear();
    3393           0 :         runtime()->gc.setGrayBitsInvalid();
    3394           0 :         return;
    3395             :     }
    3396             : }
    3397             : 
    3398             : template <typename T>
    3399             : static bool
    3400           0 : TypedUnmarkGrayCellRecursively(T* t)
    3401             : {
    3402           0 :     MOZ_ASSERT(t);
    3403             : 
    3404           0 :     JSRuntime* rt = t->runtimeFromActiveCooperatingThread();
    3405           0 :     MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
    3406           0 :     MOZ_ASSERT(!JS::CurrentThreadIsHeapCycleCollecting());
    3407             : 
    3408           0 :     UnmarkGrayTracer unmarker(rt);
    3409           0 :     gcstats::AutoPhase outerPhase(rt->gc.stats(), gcstats::PhaseKind::BARRIER);
    3410           0 :     gcstats::AutoPhase innerPhase(rt->gc.stats(), gcstats::PhaseKind::UNMARK_GRAY);
    3411           0 :     unmarker.unmark(JS::GCCellPtr(t, MapTypeToTraceKind<T>::kind));
    3412           0 :     return unmarker.unmarkedAny;
    3413             : }
    3414             : 
    3415             : struct UnmarkGrayCellRecursivelyFunctor {
    3416           0 :     template <typename T> bool operator()(T* t) { return TypedUnmarkGrayCellRecursively(t); }
    3417             : };
    3418             : 
    3419             : bool
    3420           0 : js::UnmarkGrayCellRecursively(Cell* cell, JS::TraceKind kind)
    3421             : {
    3422           0 :     return DispatchTraceKindTyped(UnmarkGrayCellRecursivelyFunctor(), cell, kind);
    3423             : }
    3424             : 
    3425             : bool
    3426           0 : js::UnmarkGrayShapeRecursively(Shape* shape)
    3427             : {
    3428           0 :     return TypedUnmarkGrayCellRecursively(shape);
    3429             : }
    3430             : 
    3431             : JS_FRIEND_API(bool)
    3432           0 : JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr thing)
    3433             : {
    3434           0 :     return js::UnmarkGrayCellRecursively(thing.asCell(), thing.kind());
    3435             : }
    3436             : 
    3437             : namespace js {
    3438             : namespace debug {
    3439             : 
    3440             : MarkInfo
    3441           0 : GetMarkInfo(Cell* rawCell)
    3442             : {
    3443           0 :     if (!rawCell->isTenured())
    3444           0 :         return MarkInfo::NURSERY;
    3445             : 
    3446           0 :     TenuredCell* cell = &rawCell->asTenured();
    3447           0 :     if (cell->isMarkedGray())
    3448           0 :         return MarkInfo::GRAY;
    3449           0 :     if (cell->isMarkedBlack())
    3450           0 :         return MarkInfo::BLACK;
    3451           0 :     return MarkInfo::UNMARKED;
    3452             : }
    3453             : 
    3454             : uintptr_t*
    3455           0 : GetMarkWordAddress(Cell* cell)
    3456             : {
    3457           0 :     if (!cell->isTenured())
    3458           0 :         return nullptr;
    3459             : 
    3460             :     uintptr_t* wordp;
    3461             :     uintptr_t mask;
    3462           0 :     js::gc::detail::GetGCThingMarkWordAndMask(uintptr_t(cell), ColorBit::BlackBit, &wordp, &mask);
    3463           0 :     return wordp;
    3464             : }
    3465             : 
    3466             : uintptr_t
    3467           0 : GetMarkMask(Cell* cell, uint32_t colorBit)
    3468             : {
    3469           0 :     MOZ_ASSERT(colorBit == 0 || colorBit == 1);
    3470             : 
    3471           0 :     if (!cell->isTenured())
    3472           0 :         return 0;
    3473             : 
    3474           0 :     ColorBit bit = colorBit == 0 ? ColorBit::BlackBit : ColorBit::GrayOrBlackBit;
    3475             :     uintptr_t* wordp;
    3476             :     uintptr_t mask;
    3477           0 :     js::gc::detail::GetGCThingMarkWordAndMask(uintptr_t(cell), bit, &wordp, &mask);
    3478           0 :     return mask;
    3479             : }
    3480             : 
    3481             : }
    3482             : }

Generated by: LCOV version 1.13