tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Marking.cpp (98958B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "gc/Marking-inl.h"
      8 
      9 #include "mozilla/DebugOnly.h"
     10 #include "mozilla/IntegerRange.h"
     11 #include "mozilla/MathAlgorithms.h"
     12 #include "mozilla/Maybe.h"
     13 #include "mozilla/PodOperations.h"
     14 #include "mozilla/ScopeExit.h"
     15 
     16 #include <algorithm>
     17 #include <type_traits>
     18 
     19 #include "jsmath.h"
     20 
     21 #include "gc/BufferAllocator.h"
     22 #include "gc/GCInternals.h"
     23 #include "gc/ParallelMarking.h"
     24 #include "gc/TraceKind.h"
     25 #include "jit/JitCode.h"
     26 #include "js/GCTypeMacros.h"  // JS_FOR_EACH_PUBLIC_{,TAGGED_}GC_POINTER_TYPE
     27 #include "js/SliceBudget.h"
     28 #include "util/Poison.h"
     29 #include "vm/GeneratorObject.h"
     30 
     31 #include "gc/BufferAllocator-inl.h"
     32 #include "gc/GC-inl.h"
     33 #include "gc/PrivateIterators-inl.h"
     34 #include "gc/TraceMethods-inl.h"
     35 #include "gc/WeakMap-inl.h"
     36 #include "vm/GeckoProfiler-inl.h"
     37 
     38 using namespace js;
     39 using namespace js::gc;
     40 
     41 using JS::MapTypeToTraceKind;
     42 using JS::SliceBudget;
     43 
     44 using mozilla::DebugOnly;
     45 using mozilla::IntegerRange;
     46 
     47 // [SMDOC] GC Tracing
     48 //
     49 // Tracing Overview
     50 // ================
     51 //
     52 // Tracing, in this context, refers to an abstract visitation of some or all of
     53 // the GC-controlled heap. The effect of tracing an edge of the graph depends
     54 // on the subclass of the JSTracer on whose behalf we are tracing.
     55 //
     56 // Marking
     57 // -------
     58 //
     59 // The primary JSTracer is the GCMarker. The marking tracer causes the target
     60 // of each traversed edge to be marked black and the target edge's children to
     61 // be marked either gray (in the gc algorithm sense) or immediately black.
     62 //
     63 // Callback
     64 // --------
     65 //
     66 // The secondary JSTracer is the CallbackTracer. This simply invokes a callback
     67 // on each edge in a child.
     68 //
     69 // The following is a rough outline of the general struture of the tracing
     70 // internals.
     71 //
     72 /* clang-format off */
     73 //
     74 //  +-------------------+                             ......................
     75 //  |                   |                             :                    :
     76 //  |                   v                             v                +---+---+
     77 //  |   TraceRoot   TraceEdge   TraceRange        GCMarker::           |       |
     78 //  |       |           |           |         processMarkStackTop      | Mark  |
     79 //  |       +-----------------------+                 |                | Stack |
     80 //  |                   |                             |                |       |
     81 //  |                   v                             |                +---+---+
     82 //  |           TraceEdgeInternal                     |                    ^
     83 //  |                   |                             +<-------------+     :
     84 //  |                   |                             |              |     :
     85 //  |                   v                             v              |     :
     86 //  |            CallbackTracer::             markAndTraverseEdge    |     :
     87 //  |              onSomeEdge                         |              |     :
     88 //  |                   |                             |              |     :
     89 //  |                   |                             |              |     :
     90 //  |                   +-------------+---------------+              |     :
     91 //  |                                 |                              |     :
     92 //  |                                 v                              |     :
     93 //  |                          markAndTraverse                       |     :
     94 //  |                                 |                              |     :
     95 //  |                                 |                              |     :
     96 //  |                              traverse                          |     :
     97 //  |                                 |                              |     :
     98 //  |             +--------------------------------------+           |     :
     99 //  |             |                   |                  |           |     :
    100 //  |             v                   v                  v           |     :
    101 //  |    markAndTraceChildren    markAndPush    eagerlyMarkChildren  |     :
    102 //  |             |                   :                  |           |     :
    103 //  |             v                   :                  +-----------+     :
    104 //  |      T::traceChildren           :                                    :
    105 //  |             |                   :                                    :
    106 //  +-------------+                   ......................................
    107 //
    108 //   Legend:
    109 //     ------- Direct calls
    110 //     ....... Data flow
    111 //
    112 /* clang-format on */
    113 
    114 static const size_t ValueRangeWords =
    115    sizeof(MarkStack::SlotsOrElementsRange) / sizeof(uintptr_t);
    116 
    117 /*** Tracing Invariants *****************************************************/
    118 
    119 template <typename T>
    120 static inline bool IsOwnedByOtherRuntime(JSRuntime* rt, T thing) {
    121  bool other = thing->runtimeFromAnyThread() != rt;
    122  MOZ_ASSERT_IF(other, thing->isPermanentAndMayBeShared());
    123  return other;
    124 }
    125 
    126 #ifdef DEBUG
    127 
    128 static inline bool IsInFreeList(TenuredCell* cell) {
    129  Arena* arena = cell->arena();
    130  uintptr_t addr = reinterpret_cast<uintptr_t>(cell);
    131  MOZ_ASSERT(Arena::isAligned(addr, arena->getThingSize()));
    132  return arena->inFreeList(addr);
    133 }
    134 
    135 template <typename T>
    136 void js::CheckTracedThing(JSTracer* trc, T* thing) {
    137  MOZ_ASSERT(trc);
    138  MOZ_ASSERT(thing);
    139 
    140  if (IsForwarded(thing)) {
    141    JS::TracerKind kind = trc->kind();
    142    MOZ_ASSERT(kind == JS::TracerKind::Tenuring ||
    143               kind == JS::TracerKind::MinorSweeping ||
    144               kind == JS::TracerKind::Moving ||
    145               kind == JS::TracerKind::HeapCheck);
    146    thing = Forwarded(thing);
    147  }
    148 
    149  /* This function uses data that's not available in the nursery. */
    150  if (IsInsideNursery(thing)) {
    151    return;
    152  }
    153 
    154  /*
    155   * Permanent shared things that are not associated with this runtime will be
    156   * ignored during marking.
    157   */
    158  Zone* zone = thing->zoneFromAnyThread();
    159  if (IsOwnedByOtherRuntime(trc->runtime(), thing)) {
    160    MOZ_ASSERT(!zone->wasGCStarted());
    161    MOZ_ASSERT(thing->isMarkedBlack());
    162    return;
    163  }
    164 
    165  JSRuntime* rt = trc->runtime();
    166  MOZ_ASSERT(zone->runtimeFromAnyThread() == rt);
    167 
    168  bool isGcMarkingTracer = trc->isMarkingTracer();
    169  bool isUnmarkGrayTracer = IsTracerKind(trc, JS::TracerKind::UnmarkGray);
    170  bool isClearEdgesTracer = IsTracerKind(trc, JS::TracerKind::ClearEdges);
    171 
    172  if (TlsContext.get()) {
    173    // If we're on the main thread we must have access to the runtime and zone.
    174    MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
    175    MOZ_ASSERT(CurrentThreadCanAccessZone(zone));
    176  } else {
    177    MOZ_ASSERT(isGcMarkingTracer || isUnmarkGrayTracer || isClearEdgesTracer ||
    178               IsTracerKind(trc, JS::TracerKind::Moving) ||
    179               IsTracerKind(trc, JS::TracerKind::Sweeping));
    180    MOZ_ASSERT_IF(!isClearEdgesTracer, CurrentThreadIsPerformingGC());
    181  }
    182 
    183  MOZ_ASSERT(thing->isAligned());
    184  MOZ_ASSERT(MapTypeToTraceKind<std::remove_pointer_t<T>>::kind ==
    185             thing->getTraceKind());
    186 
    187  /*
    188   * Check that we only mark allocated cells.
    189   *
    190   * This check is restricted to marking for two reasons: Firstly, if background
    191   * sweeping is running and concurrently modifying the free list then it is not
    192   * safe. Secondly, it was thought to be slow so this is a compromise so as to
    193   * not affect test times too much.
    194   */
    195  MOZ_ASSERT_IF(zone->isGCMarking(), !IsInFreeList(&thing->asTenured()));
    196 }
    197 
    198 template <typename T>
    199 void js::CheckTracedThing(JSTracer* trc, const T& thing) {
    200  ApplyGCThingTyped(thing, [trc](auto t) { CheckTracedThing(trc, t); });
    201 }
    202 
    203 template <typename T>
    204 static void CheckMarkedThing(GCMarker* gcMarker, T* thing) {
    205  Zone* zone = thing->zoneFromAnyThread();
    206 
    207  MOZ_ASSERT(zone->shouldMarkInZone(gcMarker->markColor()) ||
    208             zone->isAtomsZone());
    209 
    210  MOZ_ASSERT_IF(gcMarker->shouldCheckCompartments(),
    211                zone->isCollectingFromAnyThread() || zone->isAtomsZone());
    212 
    213  MOZ_ASSERT_IF(gcMarker->markColor() == MarkColor::Gray,
    214                !zone->isGCMarkingBlackOnly() || zone->isAtomsZone());
    215 
    216  MOZ_ASSERT(!(zone->isGCSweeping() || zone->isGCFinished() ||
    217               zone->isGCCompacting()));
    218 
    219  // Check that we don't stray from the current compartment and zone without
    220  // using TraceCrossCompartmentEdge.
    221  Compartment* comp = thing->maybeCompartment();
    222  MOZ_ASSERT_IF(gcMarker->tracingCompartment && comp,
    223                gcMarker->tracingCompartment == comp);
    224  MOZ_ASSERT_IF(gcMarker->tracingZone,
    225                gcMarker->tracingZone == zone || zone->isAtomsZone());
    226 }
    227 
    228 namespace js {
    229 
    230 #  define IMPL_CHECK_TRACED_THING(_, type, _1, _2) \
    231    template void CheckTracedThing<type>(JSTracer*, type*);
    232 JS_FOR_EACH_TRACEKIND(IMPL_CHECK_TRACED_THING);
    233 #  undef IMPL_CHECK_TRACED_THING
    234 
    235 template void CheckTracedThing<Value>(JSTracer*, const Value&);
    236 template void CheckTracedThing<wasm::AnyRef>(JSTracer*, const wasm::AnyRef&);
    237 
    238 }  // namespace js
    239 
    240 #endif
    241 
    242 static inline bool ShouldMarkCrossCompartment(GCMarker* marker, JSObject* src,
    243                                              Cell* dstCell, const char* name) {
    244 #ifdef DEBUG
    245  if (src->isMarkedGray() && !dstCell->isTenured()) {
    246    // Bug 1743098: This shouldn't be possible but it does seem to happen. Log
    247    // some useful information in debug builds.
    248    SEprinter printer;
    249    printer.printf(
    250        "ShouldMarkCrossCompartment: cross compartment edge '%s' from gray "
    251        "object to nursery thing\n",
    252        name);
    253    printer.put("src: ");
    254    src->dump(printer);
    255    printer.put("dst: ");
    256    dstCell->dump(printer);
    257    MOZ_CRASH("Found cross compartment edge from gray object to nursery thing");
    258  }
    259 #endif
    260 
    261  CellColor targetColor = AsCellColor(marker->markColor());
    262  CellColor currentColor = dstCell->color();
    263  if (currentColor >= targetColor) {
    264    // Cell is already sufficiently marked. Nothing to do.
    265    return false;
    266  }
    267 
    268  TenuredCell& dst = dstCell->asTenured();
    269  JS::Zone* dstZone = dst.zone();
    270  if (!src->zone()->isGCMarking() && !dstZone->isGCMarking()) {
    271    return false;
    272  }
    273 
    274  if (targetColor == CellColor::Black) {
    275    // Check our sweep groups are correct: we should never have to
    276    // mark something in a zone that we have started sweeping.
    277    MOZ_ASSERT(currentColor < CellColor::Black);
    278    MOZ_ASSERT(!dstZone->isGCSweeping());
    279 
    280    /*
    281     * Having black->gray edges violates our promise to the cycle collector so
    282     * we ensure that gray things we encounter when marking black end up getting
    283     * marked black.
    284     *
    285     * This can happen for two reasons:
    286     *
    287     * 1) If we're collecting a compartment and it has an edge to an uncollected
    288     * compartment it's possible that the source and destination of the
    289     * cross-compartment edge should be gray, but the source was marked black by
    290     * the write barrier.
    291     *
    292     * 2) If we yield during gray marking and the write barrier marks a gray
    293     * thing black.
    294     *
    295     * We handle the first case before returning whereas the second case happens
    296     * as part of normal marking.
    297     */
    298    if (currentColor == CellColor::Gray && !dstZone->isGCMarking()) {
    299      UnmarkGrayGCThingUnchecked(marker,
    300                                 JS::GCCellPtr(&dst, dst.getTraceKind()));
    301      return false;
    302    }
    303 
    304    return dstZone->isGCMarking();
    305  }
    306 
    307  // Check our sweep groups are correct as above.
    308  MOZ_ASSERT(currentColor == CellColor::White);
    309  MOZ_ASSERT(!dstZone->isGCSweeping());
    310 
    311  if (dstZone->isGCMarkingBlackOnly()) {
    312    /*
    313     * The destination compartment is being not being marked gray now,
    314     * but it will be later, so record the cell so it can be marked gray
    315     * at the appropriate time.
    316     */
    317    DelayCrossCompartmentGrayMarking(marker, src);
    318    return false;
    319  }
    320 
    321  return dstZone->isGCMarkingBlackAndGray();
    322 }
    323 
    324 static bool ShouldTraceCrossCompartment(JSTracer* trc, JSObject* src,
    325                                        Cell* dstCell, const char* name) {
    326  if (!trc->isMarkingTracer()) {
    327    return true;
    328  }
    329 
    330  return ShouldMarkCrossCompartment(GCMarker::fromTracer(trc), src, dstCell,
    331                                    name);
    332 }
    333 
    334 static bool ShouldTraceCrossCompartment(JSTracer* trc, JSObject* src,
    335                                        const Value& val, const char* name) {
    336  return val.isGCThing() &&
    337         ShouldTraceCrossCompartment(trc, src, val.toGCThing(), name);
    338 }
    339 
    340 template <typename T>
    341 static inline bool ShouldMark(GCMarker* gcmarker, T* thing) {
    342  // We may encounter nursery things during normal marking since we don't
    343  // collect the nursery at the start of every GC slice.
    344  if (!thing->isTenured()) {
    345    return false;
    346  }
    347 
    348  // Allow marking symbols even if we're not collecting the atoms zone. This is
    349  // necessary to unmark gray symbols during an incremental GC. Failing to do
    350  // this will break our promise to the cycle collector that there are no black
    351  // to gray edges.
    352  if (std::is_same_v<T, JS::Symbol> &&
    353      gcmarker->markColor() == MarkColor::Black) {
    354    return true;
    355  }
    356 
    357  // Otherwise don't mark things outside a collected zone if we are in a
    358  // per-zone GC. Don't mark permanent shared things owned by other runtimes (we
    359  // will never observe their zone being collected).
    360  Zone* zone = thing->asTenured().zoneFromAnyThread();
    361  return zone->shouldMarkInZone(gcmarker->markColor());
    362 }
    363 
    364 #ifdef DEBUG
    365 
    366 template <typename T>
    367 void js::gc::AssertShouldMarkInZone(GCMarker* marker, T* thing) {
    368  if (thing->isMarkedBlack()) {
    369    return;
    370  }
    371 
    372  Zone* zone = thing->zone();
    373  MOZ_ASSERT(zone->shouldMarkInZone(marker->markColor()) ||
    374             zone->isAtomsZone());
    375 }
    376 
    377 void js::gc::AssertRootMarkingPhase(JSTracer* trc) {
    378  MOZ_ASSERT_IF(trc->isMarkingTracer(),
    379                trc->runtime()->gc.state() == State::NotActive ||
    380                    trc->runtime()->gc.state() == State::MarkRoots);
    381 }
    382 
    383 #endif  // DEBUG
    384 
    385 /*** Tracing Interface ******************************************************/
    386 
    387 template <typename T>
    388 static void TraceExternalEdgeHelper(JSTracer* trc, T* thingp,
    389                                    const char* name) {
    390  MOZ_ASSERT(InternalBarrierMethods<T>::isMarkable(*thingp));
    391  TraceEdgeInternal(trc, ConvertToBase(thingp), name);
    392 }
    393 
    394 JS_PUBLIC_API void js::UnsafeTraceManuallyBarrieredEdge(JSTracer* trc,
    395                                                        JSObject** thingp,
    396                                                        const char* name) {
    397  TraceEdgeInternal(trc, ConvertToBase(thingp), name);
    398 }
    399 
    400 template <typename T>
    401 static void TraceRootHelper(JSTracer* trc, T* thingp, const char* name) {
    402  MOZ_ASSERT(thingp);
    403  js::TraceNullableRoot(trc, thingp, name);
    404 }
    405 
    406 namespace js {
    407 class AbstractGeneratorObject;
    408 class SavedFrame;
    409 }  // namespace js
    410 
    411 #define DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION(type)                           \
    412  JS_PUBLIC_API void js::gc::TraceExternalEdge(JSTracer* trc, type* thingp, \
    413                                               const char* name) {          \
    414    TraceExternalEdgeHelper(trc, thingp, name);                             \
    415  }
    416 
    417 // Define TraceExternalEdge for each public GC pointer type.
    418 JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION)
    419 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION)
    420 
    421 #undef DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION
    422 
    423 #define DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(type)                 \
    424  JS_PUBLIC_API void JS::TraceRoot(JSTracer* trc, type* thingp, \
    425                                   const char* name) {          \
    426    TraceRootHelper(trc, thingp, name);                         \
    427  }
    428 
    429 // Define TraceRoot for each public GC pointer type.
    430 JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(DEFINE_UNSAFE_TRACE_ROOT_FUNCTION)
    431 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(DEFINE_UNSAFE_TRACE_ROOT_FUNCTION)
    432 
    433 // Also, for the moment, define TraceRoot for internal GC pointer types.
    434 DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(AbstractGeneratorObject*)
    435 DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(SavedFrame*)
    436 DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(wasm::AnyRef)
    437 
    438 #undef DEFINE_UNSAFE_TRACE_ROOT_FUNCTION
    439 
    440 namespace js::gc {
    441 
    442 #define INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(type)                     \
    443  template void TraceRangeInternal<type>(JSTracer*, size_t len, type*, \
    444                                         const char*);
    445 
    446 #define INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND(_1, type, _2, _3) \
    447  INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(type*)
    448 
    449 JS_FOR_EACH_TRACEKIND(INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND)
    450 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(INSTANTIATE_INTERNAL_TRACE_FUNCTIONS)
    451 INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(TaggedProto)
    452 
    453 #undef INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND
    454 #undef INSTANTIATE_INTERNAL_TRACE_FUNCTIONS
    455 
    456 }  // namespace js::gc
    457 
    458 // In debug builds, makes a note of the current compartment before calling a
    459 // trace hook or traceChildren() method on a GC thing.
    460 class MOZ_RAII AutoSetTracingSource {
    461 #ifndef DEBUG
    462 public:
    463  template <typename T>
    464  AutoSetTracingSource(JSTracer* trc, T* thing) {}
    465  ~AutoSetTracingSource() {}
    466 #else
    467  GCMarker* marker = nullptr;
    468 
    469 public:
    470  template <typename T>
    471  AutoSetTracingSource(JSTracer* trc, T* thing) {
    472    if (trc->isMarkingTracer() && thing) {
    473      marker = GCMarker::fromTracer(trc);
    474      MOZ_ASSERT(!marker->tracingZone);
    475      marker->tracingZone = thing->asTenured().zone();
    476      MOZ_ASSERT(!marker->tracingCompartment);
    477      marker->tracingCompartment = thing->maybeCompartment();
    478    }
    479  }
    480 
    481  ~AutoSetTracingSource() {
    482    if (marker) {
    483      marker->tracingZone = nullptr;
    484      marker->tracingCompartment = nullptr;
    485    }
    486  }
    487 #endif
    488 };
    489 
    490 // In debug builds, clear the trace hook compartment. This happens after the
    491 // trace hook has called back into one of our trace APIs and we've checked the
    492 // traced thing.
    493 class MOZ_RAII AutoClearTracingSource {
    494 #ifndef DEBUG
    495 public:
    496  explicit AutoClearTracingSource(GCMarker* marker) {}
    497  explicit AutoClearTracingSource(JSTracer* trc) {}
    498  ~AutoClearTracingSource() {}
    499 #else
    500  GCMarker* marker = nullptr;
    501  JS::Zone* prevZone = nullptr;
    502  Compartment* prevCompartment = nullptr;
    503 
    504 public:
    505  explicit AutoClearTracingSource(JSTracer* trc) {
    506    if (trc->isMarkingTracer()) {
    507      marker = GCMarker::fromTracer(trc);
    508      prevZone = marker->tracingZone;
    509      marker->tracingZone = nullptr;
    510      prevCompartment = marker->tracingCompartment;
    511      marker->tracingCompartment = nullptr;
    512    }
    513  }
    514  ~AutoClearTracingSource() {
    515    if (marker) {
    516      marker->tracingZone = prevZone;
    517      marker->tracingCompartment = prevCompartment;
    518    }
    519  }
    520 #endif
    521 };
    522 
    523 template <typename T>
    524 void js::TraceManuallyBarrieredCrossCompartmentEdge(JSTracer* trc,
    525                                                    JSObject* src, T* dst,
    526                                                    const char* name) {
    527  // Clear expected compartment for cross-compartment edge.
    528  AutoClearTracingSource acts(trc);
    529 
    530  if (ShouldTraceCrossCompartment(trc, src, *dst, name)) {
    531    TraceEdgeInternal(trc, dst, name);
    532  }
    533 }
    534 template void js::TraceManuallyBarrieredCrossCompartmentEdge<Value>(
    535    JSTracer*, JSObject*, Value*, const char*);
    536 template void js::TraceManuallyBarrieredCrossCompartmentEdge<JSObject*>(
    537    JSTracer*, JSObject*, JSObject**, const char*);
    538 template void js::TraceManuallyBarrieredCrossCompartmentEdge<BaseScript*>(
    539    JSTracer*, JSObject*, BaseScript**, const char*);
    540 
    541 template <typename T>
    542 void js::TraceSameZoneCrossCompartmentEdge(JSTracer* trc,
    543                                           const BarrieredBase<T>* dst,
    544                                           const char* name) {
    545 #ifdef DEBUG
    546  if (trc->isMarkingTracer()) {
    547    T thing = *dst->unbarrieredAddress();
    548    MOZ_ASSERT(thing->maybeCompartment(),
    549               "Use TraceEdge for GC things without a compartment");
    550 
    551    GCMarker* gcMarker = GCMarker::fromTracer(trc);
    552    MOZ_ASSERT_IF(gcMarker->tracingZone,
    553                  thing->zone() == gcMarker->tracingZone);
    554  }
    555 
    556  // Skip compartment checks for this edge.
    557  if (trc->kind() == JS::TracerKind::CompartmentCheck) {
    558    return;
    559  }
    560 #endif
    561 
    562  // Clear expected compartment for cross-compartment edge.
    563  AutoClearTracingSource acts(trc);
    564  TraceEdgeInternal(trc, ConvertToBase(dst->unbarrieredAddress()), name);
    565 }
    566 template void js::TraceSameZoneCrossCompartmentEdge(
    567    JSTracer*, const BarrieredBase<Shape*>*, const char*);
    568 
    569 template <typename T>
    570 void js::TraceWeakMapKeyEdgeInternal(JSTracer* trc, Zone* weakMapZone,
    571                                     T** thingp, const char* name) {
    572  // We'd like to assert that the the thing's zone is currently being marked but
    573  // that's not always true when tracing debugger weak maps which have keys in
    574  // other compartments.
    575 
    576  // Clear expected compartment for cross-compartment edge.
    577  AutoClearTracingSource acts(trc);
    578 
    579  TraceEdgeInternal(trc, thingp, name);
    580 }
    581 
    582 template <typename T>
    583 void js::TraceWeakMapKeyEdgeInternal(JSTracer* trc, Zone* weakMapZone,
    584                                     T* thingp, const char* name) {
    585  // We can't use ShouldTraceCrossCompartment here because that assumes the
    586  // source of the edge is a CCW object which could be used to delay gray
    587  // marking. Instead, assert that the weak map zone is in the same marking
    588  // state as the target thing's zone and therefore we can go ahead and mark it.
    589 #ifdef DEBUG
    590  if (trc->isMarkingTracer()) {
    591    MOZ_ASSERT(weakMapZone->isGCMarking());
    592    MOZ_ASSERT(weakMapZone->gcState() ==
    593               gc::ToMarkable(*thingp)->zone()->gcState());
    594  }
    595 #endif
    596 
    597  // Clear expected compartment for cross-compartment edge.
    598  AutoClearTracingSource acts(trc);
    599 
    600  TraceEdgeInternal(trc, thingp, name);
    601 }
    602 
    603 template void js::TraceWeakMapKeyEdgeInternal<JSObject>(JSTracer*, Zone*,
    604                                                        JSObject**,
    605                                                        const char*);
    606 template void js::TraceWeakMapKeyEdgeInternal<BaseScript>(JSTracer*, Zone*,
    607                                                          BaseScript**,
    608                                                          const char*);
    609 template void js::TraceWeakMapKeyEdgeInternal<JS::Value>(JSTracer*, Zone*,
    610                                                         JS::Value*,
    611                                                         const char*);
    612 
    613 static Cell* TraceGenericPointerRootAndType(JSTracer* trc, Cell* thing,
    614                                            JS::TraceKind kind,
    615                                            const char* name) {
    616  return MapGCThingTyped(thing, kind, [trc, name](auto t) -> Cell* {
    617    TraceRoot(trc, &t, name);
    618    return t;
    619  });
    620 }
    621 
    622 void js::TraceGenericPointerRoot(JSTracer* trc, Cell** thingp,
    623                                 const char* name) {
    624  MOZ_ASSERT(thingp);
    625  Cell* thing = *thingp;
    626  if (!thing) {
    627    return;
    628  }
    629 
    630  Cell* traced =
    631      TraceGenericPointerRootAndType(trc, thing, thing->getTraceKind(), name);
    632  if (traced != thing) {
    633    *thingp = traced;
    634  }
    635 }
    636 
    637 void js::TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, Cell** thingp,
    638                                                  const char* name) {
    639  MOZ_ASSERT(thingp);
    640  Cell* thing = *thingp;
    641  if (!*thingp) {
    642    return;
    643  }
    644 
    645  auto* traced = MapGCThingTyped(thing, thing->getTraceKind(),
    646                                 [trc, name](auto t) -> Cell* {
    647                                   TraceManuallyBarrieredEdge(trc, &t, name);
    648                                   return t;
    649                                 });
    650  if (traced != thing) {
    651    *thingp = traced;
    652  }
    653 }
    654 
    655 void js::TraceGCCellPtrRoot(JSTracer* trc, JS::GCCellPtr* thingp,
    656                            const char* name) {
    657  Cell* thing = thingp->asCell();
    658  if (!thing) {
    659    return;
    660  }
    661 
    662  Cell* traced =
    663      TraceGenericPointerRootAndType(trc, thing, thingp->kind(), name);
    664 
    665  if (!traced) {
    666    *thingp = JS::GCCellPtr();
    667  } else if (traced != thingp->asCell()) {
    668    *thingp = JS::GCCellPtr(traced, thingp->kind());
    669  }
    670 }
    671 
    672 void js::TraceManuallyBarrieredGCCellPtr(JSTracer* trc, JS::GCCellPtr* thingp,
    673                                         const char* name) {
    674  Cell* thing = thingp->asCell();
    675  if (!thing) {
    676    return;
    677  }
    678 
    679  Cell* traced = MapGCThingTyped(thing, thing->getTraceKind(),
    680                                 [trc, name](auto t) -> Cell* {
    681                                   TraceManuallyBarrieredEdge(trc, &t, name);
    682                                   return t;
    683                                 });
    684 
    685  if (!traced) {
    686    // If we are clearing edges, also erase the type. This happens when using
    687    // ClearEdgesTracer.
    688    *thingp = JS::GCCellPtr();
    689  } else if (traced != thingp->asCell()) {
    690    *thingp = JS::GCCellPtr(traced, thingp->kind());
    691  }
    692 }
    693 
    694 template <typename T>
    695 inline bool TraceTaggedPtrEdge(JSTracer* trc, T* thingp, const char* name) {
    696  // Return true by default. For some types the lambda below won't be called.
    697  bool ret = true;
    698  auto thing = MapGCThingTyped(*thingp, [&](auto thing) {
    699    if (!TraceEdgeInternal(trc, &thing, name)) {
    700      ret = false;
    701      return TaggedPtr<T>::empty();
    702    }
    703 
    704    return TaggedPtr<T>::wrap(thing);
    705  });
    706 
    707  // Only update *thingp if the value changed, to avoid TSan false positives for
    708  // template objects when using DumpHeapTracer or UbiNode tracers while Ion
    709  // compiling off-thread.
    710  if (thing.isSome() && thing.value() != *thingp) {
    711    *thingp = thing.value();
    712  }
    713 
    714  return ret;
    715 }
    716 
    717 bool js::gc::TraceEdgeInternal(JSTracer* trc, Value* thingp, const char* name) {
    718  return TraceTaggedPtrEdge(trc, thingp, name);
    719 }
    720 bool js::gc::TraceEdgeInternal(JSTracer* trc, jsid* thingp, const char* name) {
    721  return TraceTaggedPtrEdge(trc, thingp, name);
    722 }
    723 bool js::gc::TraceEdgeInternal(JSTracer* trc, TaggedProto* thingp,
    724                               const char* name) {
    725  return TraceTaggedPtrEdge(trc, thingp, name);
    726 }
    727 bool js::gc::TraceEdgeInternal(JSTracer* trc, wasm::AnyRef* thingp,
    728                               const char* name) {
    729  return TraceTaggedPtrEdge(trc, thingp, name);
    730 }
    731 
    732 template <typename T>
    733 void js::gc::TraceRangeInternal(JSTracer* trc, size_t len, T* vec,
    734                                const char* name) {
    735  JS::AutoTracingIndex index(trc);
    736  for (auto i : IntegerRange(len)) {
    737    if (InternalBarrierMethods<T>::isMarkable(vec[i])) {
    738      TraceEdgeInternal(trc, &vec[i], name);
    739    }
    740    ++index;
    741  }
    742 }
    743 
    744 /*** GC Marking Interface ***************************************************/
    745 
    746 namespace js {
    747 
    748 void GCMarker::markEphemeronEdges(EphemeronEdgeVector& edges,
    749                                  gc::MarkColor srcColor) {
    750  // This is called as part of GC weak marking or by barriers outside of GC.
    751  MOZ_ASSERT_IF(CurrentThreadIsPerformingGC(),
    752                state == MarkingState::WeakMarking);
    753 
    754  DebugOnly<size_t> initialLength = edges.length();
    755 
    756  for (auto& edge : edges) {
    757    MarkColor targetColor = std::min(srcColor, MarkColor(edge.color()));
    758    MOZ_ASSERT(markColor() >= targetColor);
    759    if (targetColor == markColor()) {
    760      ApplyGCThingTyped(edge.target(), edge.target()->getTraceKind(),
    761                        [this](auto t) {
    762                          markAndTraverse<MarkingOptions::MarkImplicitEdges>(t);
    763                        });
    764    }
    765  }
    766 
    767  // The above marking always goes through pushThing, which will not cause
    768  // 'edges' to be appended to while iterating.
    769  MOZ_ASSERT(edges.length() == initialLength);
    770 
    771  // This is not just an optimization. When nuking a CCW, we conservatively
    772  // mark through the related edges and then lose the CCW->target connection
    773  // that induces a sweep group edge. As a result, it is possible for the
    774  // delegate zone to get marked later, look up an edge in this table, and
    775  // then try to mark something in a Zone that is no longer marking.
    776  if (srcColor == MarkColor::Black && markColor() == MarkColor::Black) {
    777    edges.eraseIf([](auto& edge) { return edge.color() == MarkColor::Black; });
    778  }
    779 }
    780 
    781 template <typename T>
    782 struct TypeCanHaveImplicitEdges : std::false_type {};
    783 template <>
    784 struct TypeCanHaveImplicitEdges<JSObject> : std::true_type {};
    785 template <>
    786 struct TypeCanHaveImplicitEdges<BaseScript> : std::true_type {};
    787 template <>
    788 struct TypeCanHaveImplicitEdges<JS::Symbol> : std::true_type {};
    789 
    790 template <typename T>
    791 void GCMarker::markImplicitEdges(T* markedThing) {
    792  if constexpr (!TypeCanHaveImplicitEdges<T>::value) {
    793    return;
    794  }
    795 
    796  if (!isWeakMarking()) {
    797    return;
    798  }
    799 
    800  Zone* zone = markedThing->asTenured().zone();
    801  MOZ_ASSERT(zone->isGCMarking() || zone->isAtomsZone());
    802  MOZ_ASSERT(!zone->isGCSweeping());
    803 
    804  auto& ephemeronTable = zone->gcEphemeronEdges();
    805  auto p = ephemeronTable.lookup(&markedThing->asTenured());
    806  if (!p) {
    807    return;
    808  }
    809 
    810  EphemeronEdgeVector& edges = p->value();
    811 
    812  // markedThing might be a key in a debugger weakmap, which can end up marking
    813  // values that are in a different compartment.
    814  AutoClearTracingSource acts(tracer());
    815 
    816  MarkColor thingColor = markColor();
    817  MOZ_ASSERT(CellColor(thingColor) ==
    818             gc::detail::GetEffectiveColor(this, markedThing));
    819 
    820  markEphemeronEdges(edges, thingColor);
    821 
    822  if (edges.empty()) {
    823    ephemeronTable.remove(p);
    824  }
    825 }
    826 
    827 template void GCMarker::markImplicitEdges(JSObject*);
    828 template void GCMarker::markImplicitEdges(BaseScript*);
    829 template void GCMarker::markImplicitEdges(JS::Symbol*);
    830 
    831 }  // namespace js
    832 
    833 template <uint32_t opts>
    834 MarkingTracerT<opts>::MarkingTracerT(JSRuntime* runtime, GCMarker* marker)
    835    : GenericTracerImpl<MarkingTracerT<opts>>(
    836          runtime, JS::TracerKind::Marking,
    837          JS::TraceOptions(JS::WeakMapTraceAction::Expand,
    838                           JS::WeakEdgeTraceAction::Skip)) {
    839  // Marking tracers are owned by (and part of) a GCMarker.
    840  MOZ_ASSERT(this == marker->tracer());
    841  MOZ_ASSERT(getMarker() == marker);
    842 }
    843 
    844 template <uint32_t opts>
    845 MOZ_ALWAYS_INLINE GCMarker* MarkingTracerT<opts>::getMarker() {
    846  return GCMarker::fromTracer(this);
    847 }
    848 
    849 template <uint32_t opts>
    850 template <typename T>
    851 void MarkingTracerT<opts>::onEdge(T** thingp, const char* name) {
    852  T* thing = *thingp;
    853 
    854  // Do per-type marking precondition checks.
    855  GCMarker* marker = getMarker();
    856  if (!ShouldMark(marker, thing)) {
    857    MOZ_ASSERT(gc::detail::GetEffectiveColor(marker, thing) ==
    858               js::gc::CellColor::Black);
    859    return;
    860  }
    861 
    862  MOZ_ASSERT_IF(IsOwnedByOtherRuntime(this->runtime(), thing),
    863                thing->isMarkedBlack());
    864 
    865 #ifdef DEBUG
    866  CheckMarkedThing(marker, thing);
    867 #endif
    868 
    869  AutoClearTracingSource acts(this);
    870  marker->markAndTraverse<opts>(thing);
    871 }
    872 
    873 #define INSTANTIATE_ONEDGE_METHOD(name, type, _1, _2)                 \
    874  template void MarkingTracerT<MarkingOptions::None>::onEdge<type>(   \
    875      type * *thingp, const char* name);                              \
    876  template void                                                       \
    877  MarkingTracerT<MarkingOptions::MarkImplicitEdges>::onEdge<type>(    \
    878      type * *thingp, const char* name);                              \
    879  template void                                                       \
    880  MarkingTracerT<MarkingOptions::MarkRootCompartments>::onEdge<type>( \
    881      type * *thingp, const char* name);
    882 JS_FOR_EACH_TRACEKIND(INSTANTIATE_ONEDGE_METHOD)
    883 #undef INSTANTIATE_ONEDGE_METHOD
    884 
    885 static void TraceEdgeForBarrier(GCMarker* gcmarker, TenuredCell* thing,
    886                                JS::TraceKind kind) {
    887  // Dispatch to markAndTraverse without checking ShouldMark.
    888  ApplyGCThingTyped(thing, kind, [gcmarker](auto thing) {
    889    MOZ_ASSERT(ShouldMark(gcmarker, thing));
    890    CheckTracedThing(gcmarker->tracer(), thing);
    891    AutoClearTracingSource acts(gcmarker->tracer());
    892 #ifdef DEBUG
    893    AutoSetThreadIsMarking threadIsMarking;
    894 #endif  // DEBUG
    895    gcmarker->markAndTraverse<NormalMarkingOptions>(thing);
    896  });
    897 }
    898 
    899 JS_PUBLIC_API void js::gc::PerformIncrementalReadBarrier(JS::GCCellPtr thing) {
    900  // Optimized marking for read barriers. This is called from
    901  // ExposeGCThingToActiveJS which has already checked the prerequisites for
    902  // performing a read barrier. This means we can skip a bunch of checks and
    903  // call into the tracer directly.
    904 
    905  MOZ_ASSERT(thing);
    906  MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
    907 
    908  TenuredCell* cell = &thing.asCell()->asTenured();
    909  MOZ_ASSERT(!cell->isMarkedBlack());
    910 
    911  Zone* zone = cell->zone();
    912  MOZ_ASSERT(zone->needsIncrementalBarrier());
    913 
    914  // Skip dispatching on known tracer type.
    915  GCMarker* gcmarker = GCMarker::fromTracer(zone->barrierTracer());
    916  TraceEdgeForBarrier(gcmarker, cell, thing.kind());
    917 }
    918 
    919 void js::gc::PerformIncrementalReadBarrier(TenuredCell* cell) {
    920  // Internal version of previous function.
    921 
    922  MOZ_ASSERT(cell);
    923  MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
    924 
    925  if (cell->isMarkedBlack()) {
    926    return;
    927  }
    928 
    929  Zone* zone = cell->zone();
    930  MOZ_ASSERT(zone->needsIncrementalBarrier());
    931 
    932  // Skip dispatching on known tracer type.
    933  GCMarker* gcmarker = GCMarker::fromTracer(zone->barrierTracer());
    934  TraceEdgeForBarrier(gcmarker, cell, cell->getTraceKind());
    935 }
    936 
    937 void js::gc::PerformIncrementalPreWriteBarrier(TenuredCell* cell) {
    938  // The same as PerformIncrementalReadBarrier except for an extra check on the
    939  // runtime for cells in atoms zone.
    940 
    941  Zone* zone = cell->zoneFromAnyThread();
    942 
    943  MOZ_ASSERT(cell);
    944  if (cell->isMarkedBlack()) {
    945    return;
    946  }
    947 
    948  // Barriers can be triggered off the main thread by background finalization of
    949  // HeapPtrs to the atoms zone. We don't want to trigger the barrier in this
    950  // case.
    951  bool checkThread = zone->isAtomsZone();
    952  JSRuntime* runtime = cell->runtimeFromAnyThread();
    953  if (checkThread && !CurrentThreadCanAccessRuntime(runtime)) {
    954    MOZ_ASSERT(CurrentThreadIsGCFinalizing());
    955    return;
    956  }
    957 
    958  MOZ_ASSERT(zone->needsIncrementalBarrier());
    959  MOZ_ASSERT(CurrentThreadIsMainThread());
    960  MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
    961 
    962  // Skip dispatching on known tracer type.
    963  GCMarker* gcmarker = GCMarker::fromTracer(zone->barrierTracer());
    964  TraceEdgeForBarrier(gcmarker, cell, cell->getTraceKind());
    965 }
    966 
    967 void js::gc::PerformIncrementalBarrierDuringFlattening(JSString* str) {
    968  TenuredCell* cell = &str->asTenured();
    969 
    970  // Skip eager marking of ropes during flattening. Their children will also be
    971  // barriered by flattening process so we don't need to traverse them.
    972  if (str->isRope()) {
    973    cell->markBlack();
    974    return;
    975  }
    976 
    977  PerformIncrementalPreWriteBarrier(cell);
    978 }
    979 
    980 template <uint32_t opts, typename T>
    981 void js::GCMarker::markAndTraverse(T* thing) {
    982  if (mark<opts>(thing)) {
    983    // We only mark permanent things during initialization.
    984    MOZ_ASSERT_IF(thing->isPermanentAndMayBeShared(),
    985                  !runtime()->permanentAtomsPopulated());
    986 
    987    // We don't need to pass MarkRootCompartments options on to children.
    988    constexpr uint32_t traverseOpts =
    989        opts & ~MarkingOptions::MarkRootCompartments;
    990 
    991    traverse<traverseOpts>(thing);
    992 
    993    if constexpr (bool(opts & MarkingOptions::MarkRootCompartments)) {
    994      // Mark the compartment as live.
    995      SetCompartmentHasMarkedCells(thing);
    996    }
    997  }
    998 }
    999 
   1000 // The |traverse| method overloads select the traversal strategy for each kind.
   1001 //
   1002 // There are three possible strategies:
   1003 //
   1004 // 1. traceChildren
   1005 //
   1006 //    The simplest traversal calls out to the fully generic traceChildren
   1007 //    function to visit the child edges. In the absence of other traversal
   1008 //    mechanisms, this function will rapidly grow the stack past its bounds and
   1009 //    crash the process. Thus, this generic tracing should only be used in cases
   1010 //    where subsequent tracing will not recurse.
   1011 //
   1012 // 2. scanChildren
   1013 //
   1014 //    Strings, Shapes, and Scopes are extremely common, but have simple patterns
   1015 //    of recursion. We traverse trees of these edges immediately, with
   1016 //    aggressive, manual inlining, implemented by eagerlyTraceChildren.
   1017 //
   1018 // 3. pushThing
   1019 //
   1020 //    Objects are extremely common and can contain arbitrarily nested graphs, so
   1021 //    are not trivially inlined. In this case we use the mark stack to control
   1022 //    recursion. JitCode shares none of these properties, but is included for
   1023 //    historical reasons. JSScript normally cannot recurse, but may be used as a
   1024 //    weakmap key and thereby recurse into weakmapped values.
   1025 
   1026 template <uint32_t opts>
   1027 void GCMarker::traverse(BaseShape* thing) {
   1028  traceChildren<opts>(thing);
   1029 }
   1030 template <uint32_t opts>
   1031 void GCMarker::traverse(GetterSetter* thing) {
   1032  traceChildren<opts>(thing);
   1033 }
   1034 template <uint32_t opts>
   1035 void GCMarker::traverse(JS::Symbol* thing) {
   1036  if constexpr (bool(opts & MarkingOptions::MarkImplicitEdges)) {
   1037    pushThing<opts>(thing);
   1038    return;
   1039  }
   1040  traceChildren<opts>(thing);
   1041 }
   1042 template <uint32_t opts>
   1043 void GCMarker::traverse(JS::BigInt* thing) {
   1044  traceChildren<opts>(thing);
   1045 }
   1046 template <uint32_t opts>
   1047 void GCMarker::traverse(RegExpShared* thing) {
   1048  traceChildren<opts>(thing);
   1049 }
   1050 template <uint32_t opts>
   1051 void GCMarker::traverse(JSString* thing) {
   1052  scanChildren<opts>(thing);
   1053 }
   1054 template <uint32_t opts>
   1055 void GCMarker::traverse(Shape* thing) {
   1056  scanChildren<opts>(thing);
   1057 }
   1058 template <uint32_t opts>
   1059 void GCMarker::traverse(PropMap* thing) {
   1060  scanChildren<opts>(thing);
   1061 }
   1062 template <uint32_t opts>
   1063 void GCMarker::traverse(js::Scope* thing) {
   1064  scanChildren<opts>(thing);
   1065 }
   1066 template <uint32_t opts>
   1067 void GCMarker::traverse(JSObject* thing) {
   1068  pushThing<opts>(thing);
   1069 }
   1070 template <uint32_t opts>
   1071 void GCMarker::traverse(jit::JitCode* thing) {
   1072  pushThing<opts>(thing);
   1073 }
   1074 template <uint32_t opts>
   1075 void GCMarker::traverse(BaseScript* thing) {
   1076  pushThing<opts>(thing);
   1077 }
   1078 
   1079 template <uint32_t opts, typename T>
   1080 void js::GCMarker::traceChildren(T* thing) {
   1081  MOZ_ASSERT(!thing->isPermanentAndMayBeShared());
   1082  MOZ_ASSERT(thing->isMarkedAny());
   1083  AutoSetTracingSource asts(tracer(), thing);
   1084  thing->traceChildren(tracer());
   1085 }
   1086 
   1087 template <uint32_t opts, typename T>
   1088 void js::GCMarker::scanChildren(T* thing) {
   1089  MOZ_ASSERT(!thing->isPermanentAndMayBeShared());
   1090  MOZ_ASSERT(thing->isMarkedAny());
   1091  eagerlyMarkChildren<opts>(thing);
   1092 }
   1093 
   1094 template <uint32_t opts, typename T>
   1095 void js::GCMarker::pushThing(T* thing) {
   1096  MOZ_ASSERT(!thing->isPermanentAndMayBeShared());
   1097  MOZ_ASSERT(thing->isMarkedAny());
   1098  pushTaggedPtr(thing);
   1099 }
   1100 
   1101 template void js::GCMarker::markAndTraverse<MarkingOptions::None, JSObject>(
   1102    JSObject* thing);
   1103 template void js::GCMarker::markAndTraverse<MarkingOptions::MarkImplicitEdges,
   1104                                            JSObject>(JSObject* thing);
   1105 template void js::GCMarker::markAndTraverse<
   1106    MarkingOptions::MarkRootCompartments, JSObject>(JSObject* thing);
   1107 
   1108 #ifdef DEBUG
   1109 void GCMarker::setCheckAtomMarking(bool check) {
   1110  MOZ_ASSERT(check != checkAtomMarking);
   1111  checkAtomMarking = check;
   1112 }
   1113 #endif
   1114 
   1115 template <typename S, typename T>
   1116 inline void GCMarker::checkTraversedEdge(S source, T* target) {
   1117 #ifdef DEBUG
   1118  // Atoms and Symbols do not have or mark their internal pointers,
   1119  // respectively.
   1120  MOZ_ASSERT(!source->isPermanentAndMayBeShared());
   1121 
   1122  // Shared things are already black so we will not mark them.
   1123  if (target->isPermanentAndMayBeShared()) {
   1124    Zone* zone = target->zoneFromAnyThread();
   1125    MOZ_ASSERT(!zone->wasGCStarted());
   1126    MOZ_ASSERT(!zone->needsIncrementalBarrier());
   1127    MOZ_ASSERT(target->isMarkedBlack());
   1128    MOZ_ASSERT(!target->maybeCompartment());
   1129    return;
   1130  }
   1131 
   1132  Zone* sourceZone = source->zone();
   1133  Zone* targetZone = target->zone();
   1134 
   1135  // Atoms and Symbols do not have access to a compartment pointer, or we'd need
   1136  // to adjust the subsequent check to catch that case.
   1137  MOZ_ASSERT_IF(targetZone->isAtomsZone(), !target->maybeCompartment());
   1138 
   1139  // The Zones must match, unless the target is an atom.
   1140  MOZ_ASSERT(targetZone == sourceZone || targetZone->isAtomsZone());
   1141 
   1142  // If we are marking an atom, that atom must be marked in the source zone's
   1143  // atom bitmap.
   1144  if (checkAtomMarking && !sourceZone->isAtomsZone() &&
   1145      targetZone->isAtomsZone()) {
   1146    GCRuntime* gc = &target->runtimeFromAnyThread()->gc;
   1147    TenuredCell* atom = &target->asTenured();
   1148    MOZ_ASSERT(gc->atomMarking.getAtomMarkColor(sourceZone, atom) >=
   1149               AsCellColor(markColor()));
   1150  }
   1151 
   1152  // If we have access to a compartment pointer for both things, they must
   1153  // match.
   1154  MOZ_ASSERT_IF(source->maybeCompartment() && target->maybeCompartment(),
   1155                source->maybeCompartment() == target->maybeCompartment());
   1156 #endif
   1157 }
   1158 
   1159 template <uint32_t opts, typename S, typename T>
   1160 void js::GCMarker::markAndTraverseEdge(S* source, T* target) {
   1161  if constexpr (std::is_same_v<T, JS::Symbol>) {
   1162    // Unmark gray symbols in incremental GC.
   1163    if (markColor() == MarkColor::Black) {
   1164      GCRuntime* gc = &runtime()->gc;
   1165      MOZ_ASSERT(gc->atomMarking.atomIsMarked(source->zone(), target));
   1166      gc->atomMarking.maybeUnmarkGrayAtomically(source->zone(), target);
   1167    }
   1168  }
   1169  checkTraversedEdge(source, target);
   1170  markAndTraverse<opts>(target);
   1171 }
   1172 
   1173 template <uint32_t opts, typename S, typename T>
   1174 void js::GCMarker::markAndTraverseEdge(S* source, const T& target) {
   1175  ApplyGCThingTyped(target, [this, source](auto t) {
   1176    this->markAndTraverseEdge<opts>(source, t);
   1177  });
   1178 }
   1179 
   1180 template <uint32_t opts>
   1181 MOZ_NEVER_INLINE bool js::GCMarker::markAndTraversePrivateGCThing(
   1182    JSObject* source, Cell* target) {
   1183  JS::TraceKind kind = target->getTraceKind();
   1184  ApplyGCThingTyped(target, kind, [this, source](auto t) {
   1185    this->markAndTraverseEdge<opts>(source, t);
   1186  });
   1187 
   1188  // Ensure stack headroom in case we pushed.
   1189  if (MOZ_UNLIKELY(!stack.ensureSpace(ValueRangeWords))) {
   1190    delayMarkingChildrenOnOOM(source);
   1191    return false;
   1192  }
   1193 
   1194  return true;
   1195 }
   1196 
   1197 template <uint32_t opts>
   1198 bool js::GCMarker::markAndTraverseSymbol(JSObject* source, JS::Symbol* target) {
   1199  this->markAndTraverseEdge<opts>(source, target);
   1200 
   1201  // Ensure stack headroom in case we pushed.
   1202  if (MOZ_UNLIKELY(!stack.ensureSpace(ValueRangeWords))) {
   1203    delayMarkingChildrenOnOOM(source);
   1204    return false;
   1205  }
   1206 
   1207  return true;
   1208 }
   1209 
   1210 template <uint32_t opts, typename T>
   1211 bool js::GCMarker::mark(T* thing) {
   1212  if (!thing->isTenured()) {
   1213    return false;
   1214  }
   1215 
   1216  if constexpr (std::is_same_v<T, JS::Symbol>) {
   1217    // Don't mark symbols owned by other runtimes. Mark symbols black in
   1218    // uncollected zones for gray unmarking, but don't mark symbols gray in
   1219    // uncollected zones.
   1220    if (IsOwnedByOtherRuntime(runtime(), thing) ||
   1221        (markColor() == MarkColor::Gray &&
   1222         !thing->zone()->isGCMarkingOrVerifyingPreBarriers())) {
   1223      return false;
   1224    }
   1225  }
   1226 
   1227  AssertShouldMarkInZone(this, thing);
   1228 
   1229  MarkColor color =
   1230      TraceKindCanBeGray<T>::value ? markColor() : MarkColor::Black;
   1231 
   1232  if constexpr (bool(opts & MarkingOptions::ParallelMarking)) {
   1233    return thing->asTenured().markIfUnmarkedThreadSafe(color);
   1234  }
   1235 
   1236  return thing->asTenured().markIfUnmarked(color);
   1237 }
   1238 
   1239 /*** Mark-stack Marking *****************************************************/
   1240 
   1241 // Call the trace hook set on the object, if present.
   1242 static inline void CallTraceHook(JSTracer* trc, JSObject* obj) {
   1243  const JSClass* clasp = obj->getClass();
   1244  MOZ_ASSERT(clasp);
   1245 
   1246  if (clasp->hasTrace()) {
   1247    AutoSetTracingSource asts(trc, obj);
   1248    clasp->doTrace(trc, obj);
   1249  }
   1250 }
   1251 
   1252 static gcstats::PhaseKind GrayMarkingPhaseForCurrentPhase(
   1253    const gcstats::Statistics& stats) {
   1254  using namespace gcstats;
   1255  switch (stats.currentPhaseKind()) {
   1256    case PhaseKind::MARK:
   1257      return PhaseKind::MARK_GRAY;
   1258    case PhaseKind::MARK_WEAK:
   1259      return PhaseKind::MARK_GRAY_WEAK;
   1260    default:
   1261      MOZ_CRASH("Unexpected current phase");
   1262  }
   1263 }
   1264 
   1265 size_t GCMarker::moveWork(GCMarker* dst, GCMarker* src, bool allowDistribute) {
   1266  MOZ_ASSERT(dst->stack.isEmpty());
   1267  MOZ_ASSERT(src->canDonateWork());
   1268 
   1269  return MarkStack::moveWork(src, dst->stack, src->stack, allowDistribute);
   1270 }
   1271 
   1272 bool GCMarker::initStack() {
   1273  MOZ_ASSERT(!isActive());
   1274  MOZ_ASSERT(markColor_ == gc::MarkColor::Black);
   1275  return stack.init();
   1276 }
   1277 
   1278 void GCMarker::resetStackCapacity() {
   1279  MOZ_ASSERT(!isActive());
   1280  MOZ_ASSERT(markColor_ == gc::MarkColor::Black);
   1281  (void)stack.resetStackCapacity();
   1282 }
   1283 
   1284 void GCMarker::freeStack() {
   1285  MOZ_ASSERT(!isActive());
   1286  MOZ_ASSERT(markColor_ == gc::MarkColor::Black);
   1287  stack.clearAndFreeStack();
   1288 }
   1289 
   1290 bool GCMarker::markUntilBudgetExhausted(SliceBudget& budget,
   1291                                        ShouldReportMarkTime reportTime) {
   1292 #ifdef DEBUG
   1293  MOZ_ASSERT(!strictCompartmentChecking);
   1294  strictCompartmentChecking = true;
   1295  auto acc = mozilla::MakeScopeExit([&] { strictCompartmentChecking = false; });
   1296 #endif
   1297 
   1298  if (budget.isOverBudget()) {
   1299    return false;
   1300  }
   1301 
   1302  if (isWeakMarking()) {
   1303    return doMarking<MarkingOptions::MarkImplicitEdges>(budget, reportTime);
   1304  }
   1305 
   1306  return doMarking<MarkingOptions::None>(budget, reportTime);
   1307 }
   1308 
   1309 template <uint32_t opts>
   1310 bool GCMarker::doMarking(SliceBudget& budget, ShouldReportMarkTime reportTime) {
   1311  GCRuntime& gc = runtime()->gc;
   1312 
   1313  // This method leaves the mark color as it found it.
   1314 
   1315  if (hasBlackEntries() && !markOneColor<opts, MarkColor::Black>(budget)) {
   1316    return false;
   1317  }
   1318 
   1319  if (hasGrayEntries()) {
   1320    mozilla::Maybe<gcstats::AutoPhase> ap;
   1321    if (reportTime) {
   1322      auto& stats = runtime()->gc.stats();
   1323      ap.emplace(stats, GrayMarkingPhaseForCurrentPhase(stats));
   1324    }
   1325 
   1326    if (!markOneColor<opts, MarkColor::Gray>(budget)) {
   1327      return false;
   1328    }
   1329  }
   1330 
   1331  // Mark children of things that caused too deep recursion during the above
   1332  // tracing. All normal marking happens before any delayed marking.
   1333  if (gc.hasDelayedMarking()) {
   1334    gc.markAllDelayedChildren(reportTime);
   1335  }
   1336 
   1337  MOZ_ASSERT(!gc.hasDelayedMarking());
   1338  MOZ_ASSERT(isDrained());
   1339 
   1340  return true;
   1341 }
   1342 
   1343 template <uint32_t opts, MarkColor color>
   1344 bool GCMarker::markOneColor(SliceBudget& budget) {
   1345  AutoSetMarkColor setColor(*this, color);
   1346  AutoUpdateMarkStackRanges updateRanges(*this);
   1347 
   1348  while (processMarkStackTop<opts>(budget)) {
   1349    if (stack.isEmpty()) {
   1350      return true;
   1351    }
   1352  }
   1353 
   1354  return false;
   1355 }
   1356 
   1357 bool GCMarker::markCurrentColorInParallel(ParallelMarkTask* task,
   1358                                          SliceBudget& budget) {
   1359  MOZ_ASSERT(stack.elementsRangesAreValid);
   1360 
   1361  ParallelMarkTask::AtomicCount& waitingTaskCount = task->waitingTaskCountRef();
   1362 
   1363  while (processMarkStackTop<MarkingOptions::ParallelMarking>(budget)) {
   1364    if (stack.isEmpty()) {
   1365      return true;
   1366    }
   1367 
   1368    // TODO: It might be better to only check this occasionally, possibly
   1369    // combined with the slice budget check. Experiments with giving this its
   1370    // own counter resulted in worse performance.
   1371    if (waitingTaskCount && shouldDonateWork()) {
   1372      task->donateWork();
   1373    }
   1374  }
   1375 
   1376  return false;
   1377 }
   1378 
   1379 #ifdef DEBUG
   1380 bool GCMarker::markOneObjectForTest(JSObject* obj) {
   1381  MOZ_ASSERT(obj->zone()->isGCMarking());
   1382  MOZ_ASSERT(!obj->isMarked(markColor()));
   1383 
   1384  size_t oldPosition = stack.position();
   1385  markAndTraverse<NormalMarkingOptions>(obj);
   1386  if (stack.position() == oldPosition) {
   1387    return false;
   1388  }
   1389 
   1390  AutoUpdateMarkStackRanges updateRanges(*this);
   1391 
   1392  SliceBudget unlimited = SliceBudget::unlimited();
   1393  processMarkStackTop<NormalMarkingOptions>(unlimited);
   1394 
   1395  return true;
   1396 }
   1397 #endif
   1398 
   1399 static inline void CheckForCompartmentMismatch(JSObject* obj, JSObject* obj2) {
   1400 #ifdef DEBUG
   1401  if (MOZ_UNLIKELY(obj->compartment() != obj2->compartment())) {
   1402    fprintf(
   1403        stderr,
   1404        "Compartment mismatch in pointer from %s object slot to %s object\n",
   1405        obj->getClass()->name, obj2->getClass()->name);
   1406    MOZ_CRASH("Compartment mismatch");
   1407  }
   1408 #endif
   1409 }
   1410 
   1411 static inline size_t NumUsedFixedSlots(NativeObject* obj) {
   1412  return std::min(obj->numFixedSlots(), obj->slotSpan());
   1413 }
   1414 
   1415 static inline size_t NumUsedDynamicSlots(NativeObject* obj) {
   1416  size_t nfixed = obj->numFixedSlots();
   1417  size_t nslots = obj->slotSpan();
   1418  if (nslots < nfixed) {
   1419    return 0;
   1420  }
   1421 
   1422  return nslots - nfixed;
   1423 }
   1424 
   1425 void GCMarker::updateRangesAtStartOfSlice() {
   1426  MOZ_ASSERT(!stack.elementsRangesAreValid);
   1427 
   1428  for (MarkStackIter iter(stack); !iter.done(); iter.next()) {
   1429    if (iter.isSlotsOrElementsRange()) {
   1430      MarkStack::SlotsOrElementsRange range = iter.slotsOrElementsRange();
   1431      JSObject* obj = range.ptr().asRangeObject();
   1432      if (!obj->is<NativeObject>()) {
   1433        // The object owning the range was swapped with a non-native object by
   1434        // the mutator. The barriers at the end of JSObject::swap ensure that
   1435        // everything gets marked so there's nothing to do here.
   1436        range.setEmpty();
   1437        iter.setSlotsOrElementsRange(range);
   1438      } else if (range.kind() == SlotsOrElementsKind::Elements) {
   1439        NativeObject* obj = &range.ptr().asRangeObject()->as<NativeObject>();
   1440        size_t index = range.start();
   1441        size_t numShifted = obj->getElementsHeader()->numShiftedElements();
   1442        index -= std::min(numShifted, index);
   1443        range.setStart(index);
   1444        iter.setSlotsOrElementsRange(range);
   1445      }
   1446    }
   1447  }
   1448 
   1449 #ifdef DEBUG
   1450  stack.elementsRangesAreValid = true;
   1451 #endif
   1452 }
   1453 
   1454 void GCMarker::updateRangesAtEndOfSlice() {
   1455  MOZ_ASSERT(stack.elementsRangesAreValid);
   1456 
   1457  for (MarkStackIter iter(stack); !iter.done(); iter.next()) {
   1458    if (iter.isSlotsOrElementsRange()) {
   1459      MarkStack::SlotsOrElementsRange range = iter.slotsOrElementsRange();
   1460      if (range.kind() == SlotsOrElementsKind::Elements) {
   1461        NativeObject* obj = &range.ptr().asRangeObject()->as<NativeObject>();
   1462        size_t numShifted = obj->getElementsHeader()->numShiftedElements();
   1463        range.setStart(range.start() + numShifted);
   1464        iter.setSlotsOrElementsRange(range);
   1465      }
   1466    }
   1467  }
   1468 
   1469 #ifdef DEBUG
   1470  stack.elementsRangesAreValid = false;
   1471 #endif
   1472 }
   1473 
   1474 template <uint32_t opts>
   1475 inline bool GCMarker::processMarkStackTop(SliceBudget& budget) {
   1476  /*
   1477   * This function uses explicit goto and scans objects directly. This allows us
   1478   * to eliminate tail recursion and significantly improve the marking
   1479   * performance, see bug 641025.
   1480   *
   1481   * Note that the mutator can change the size and layout of objects between
   1482   * marking slices, so we must check slots and element ranges read from the
   1483   * stack.
   1484   */
   1485 
   1486  MOZ_ASSERT(!stack.isEmpty());
   1487  MOZ_ASSERT(stack.elementsRangesAreValid);
   1488  MOZ_ASSERT_IF(markColor() == MarkColor::Gray, !hasBlackEntries());
   1489 
   1490  JSObject* obj;             // The object being scanned.
   1491  SlotsOrElementsKind kind;  // The kind of slot range being scanned, if any.
   1492  HeapSlot* base;            // Slot range base pointer.
   1493  size_t index;              // Index of the next slot to mark.
   1494  size_t end;                // End of slot range to mark.
   1495 
   1496  if (stack.peekTag() == MarkStack::SlotsOrElementsRangeTag) {
   1497    auto range = stack.popSlotsOrElementsRange();
   1498    obj = range.ptr().asRangeObject();
   1499    NativeObject* nobj = &obj->as<NativeObject>();
   1500    kind = range.kind();
   1501    index = range.start();
   1502 
   1503    switch (kind) {
   1504      case SlotsOrElementsKind::FixedSlots: {
   1505        base = nobj->fixedSlots();
   1506        end = NumUsedFixedSlots(nobj);
   1507        break;
   1508      }
   1509 
   1510      case SlotsOrElementsKind::DynamicSlots: {
   1511        base = nobj->slots_;
   1512        end = NumUsedDynamicSlots(nobj);
   1513        break;
   1514      }
   1515 
   1516      case SlotsOrElementsKind::Elements: {
   1517        base = nobj->getDenseElements();
   1518        end = nobj->getDenseInitializedLength();
   1519        break;
   1520      }
   1521 
   1522      case SlotsOrElementsKind::Unused: {
   1523        MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("Unused SlotsOrElementsKind");
   1524      }
   1525    }
   1526 
   1527    goto scan_value_range;
   1528  }
   1529 
   1530  budget.step();
   1531  if (budget.isOverBudget()) {
   1532    return false;
   1533  }
   1534 
   1535  {
   1536    MarkStack::TaggedPtr ptr = stack.popPtr();
   1537    switch (ptr.tag()) {
   1538      case MarkStack::ObjectTag: {
   1539        obj = ptr.as<JSObject>();
   1540        AssertShouldMarkInZone(this, obj);
   1541        goto scan_obj;
   1542      }
   1543 
   1544      case MarkStack::SymbolTag: {
   1545        auto* symbol = ptr.as<JS::Symbol>();
   1546        if constexpr (bool(opts & MarkingOptions::MarkImplicitEdges)) {
   1547          markImplicitEdges(symbol);
   1548        }
   1549        AutoSetTracingSource asts(tracer(), symbol);
   1550        symbol->traceChildren(tracer());
   1551        return true;
   1552      }
   1553 
   1554      case MarkStack::JitCodeTag: {
   1555        auto* code = ptr.as<jit::JitCode>();
   1556        AutoSetTracingSource asts(tracer(), code);
   1557        code->traceChildren(tracer());
   1558        return true;
   1559      }
   1560 
   1561      case MarkStack::ScriptTag: {
   1562        auto* script = ptr.as<BaseScript>();
   1563        if constexpr (bool(opts & MarkingOptions::MarkImplicitEdges)) {
   1564          markImplicitEdges(script);
   1565        }
   1566        AutoSetTracingSource asts(tracer(), script);
   1567        script->traceChildren(tracer());
   1568        return true;
   1569      }
   1570 
   1571      default:
   1572        MOZ_CRASH("Invalid tag in mark stack");
   1573    }
   1574  }
   1575 
   1576  return true;
   1577 
   1578 scan_value_range:
   1579  while (index < end) {
   1580    MOZ_ASSERT(stack.capacity() >= stack.position() + ValueRangeWords);
   1581 
   1582    budget.step();
   1583    if (budget.isOverBudget()) {
   1584      pushValueRange(obj, kind, index, end);
   1585      return false;
   1586    }
   1587 
   1588    const Value& v = base[index];
   1589    index++;
   1590 
   1591    if (!v.isGCThing()) {
   1592      continue;
   1593    }
   1594 
   1595    if (v.isString()) {
   1596      markAndTraverseEdge<opts>(obj, v.toString());
   1597    } else if (v.isObject()) {
   1598      JSObject* obj2 = &v.toObject();
   1599 #ifdef DEBUG
   1600      if (!obj2) {
   1601        fprintf(stderr,
   1602                "processMarkStackTop found ObjectValue(nullptr) "
   1603                "at %zu Values from end of range in object:\n",
   1604                size_t(end - (index - 1)));
   1605        obj->dump();
   1606      }
   1607 #endif
   1608      CheckForCompartmentMismatch(obj, obj2);
   1609      if (mark<opts>(obj2)) {
   1610        // Save the rest of this value range for later and start scanning obj2's
   1611        // children.
   1612        pushValueRange(obj, kind, index, end);
   1613        obj = obj2;
   1614        goto scan_obj;
   1615      }
   1616    } else if (v.isSymbol()) {
   1617      if (!markAndTraverseSymbol<opts>(obj, v.toSymbol())) {
   1618        return true;
   1619      }
   1620    } else if (v.isBigInt()) {
   1621      markAndTraverseEdge<opts>(obj, v.toBigInt());
   1622    } else {
   1623      MOZ_ASSERT(v.isPrivateGCThing());
   1624      if (!markAndTraversePrivateGCThing<opts>(obj, v.toGCThing())) {
   1625        return true;
   1626      }
   1627    }
   1628  }
   1629 
   1630  return true;
   1631 
   1632 scan_obj: {
   1633  AssertShouldMarkInZone(this, obj);
   1634 
   1635  if constexpr (bool(opts & MarkingOptions::MarkImplicitEdges)) {
   1636    markImplicitEdges(obj);
   1637  }
   1638  markAndTraverseEdge<opts>(obj, obj->shape());
   1639 
   1640  CallTraceHook(tracer(), obj);
   1641 
   1642  if (!obj->is<NativeObject>()) {
   1643    return true;
   1644  }
   1645 
   1646  NativeObject* nobj = &obj->as<NativeObject>();
   1647 
   1648  // Ensure stack headroom for three ranges (fixed slots, dynamic slots and
   1649  // elements).
   1650  if (MOZ_UNLIKELY(!stack.ensureSpace(ValueRangeWords * 3))) {
   1651    delayMarkingChildrenOnOOM(obj);
   1652    return true;
   1653  }
   1654 
   1655  unsigned nslots = nobj->slotSpan();
   1656 
   1657  if (nobj->hasDynamicSlots()) {
   1658    ObjectSlots* slots = nobj->getSlotsHeader();
   1659    MarkTenuredBuffer(nobj->zone(), slots);
   1660  }
   1661 
   1662  if (nobj->hasDynamicElements()) {
   1663    void* elements = nobj->getUnshiftedElementsHeader();
   1664    MarkTenuredBuffer(nobj->zone(), elements);
   1665  }
   1666 
   1667  if (!nobj->hasEmptyElements()) {
   1668    base = nobj->getDenseElements();
   1669    kind = SlotsOrElementsKind::Elements;
   1670    index = 0;
   1671    end = nobj->getDenseInitializedLength();
   1672 
   1673    if (!nslots) {
   1674      // No slots at all. Scan elements immediately.
   1675      goto scan_value_range;
   1676    }
   1677 
   1678    pushValueRange(nobj, kind, index, end);
   1679  }
   1680 
   1681  unsigned nfixed = nobj->numFixedSlots();
   1682  base = nobj->fixedSlots();
   1683  kind = SlotsOrElementsKind::FixedSlots;
   1684  index = 0;
   1685 
   1686  if (nslots > nfixed) {
   1687    // Push dynamic slots for later scan.
   1688    pushValueRange(nobj, SlotsOrElementsKind::DynamicSlots, 0, nslots - nfixed);
   1689    end = nfixed;
   1690  } else {
   1691    end = nslots;
   1692  }
   1693 
   1694  // Scan any fixed slots.
   1695  goto scan_value_range;
   1696 }
   1697 }
   1698 
   1699 /*** Mark Stack *************************************************************/
   1700 
   1701 static_assert(sizeof(MarkStack::TaggedPtr) == sizeof(uintptr_t),
   1702              "A TaggedPtr should be the same size as a pointer");
   1703 static_assert((sizeof(MarkStack::SlotsOrElementsRange) % sizeof(uintptr_t)) ==
   1704                  0,
   1705              "SlotsOrElementsRange size should be a multiple of "
   1706              "the pointer size");
   1707 
   1708 template <typename T>
   1709 struct MapTypeToMarkStackTag {};
   1710 template <>
   1711 struct MapTypeToMarkStackTag<JSObject*> {
   1712  static const auto value = MarkStack::ObjectTag;
   1713 };
   1714 template <>
   1715 struct MapTypeToMarkStackTag<JS::Symbol*> {
   1716  static const auto value = MarkStack::SymbolTag;
   1717 };
   1718 template <>
   1719 struct MapTypeToMarkStackTag<jit::JitCode*> {
   1720  static const auto value = MarkStack::JitCodeTag;
   1721 };
   1722 template <>
   1723 struct MapTypeToMarkStackTag<BaseScript*> {
   1724  static const auto value = MarkStack::ScriptTag;
   1725 };
   1726 
   1727 static inline bool TagIsRangeTag(MarkStack::Tag tag) {
   1728  return tag == MarkStack::SlotsOrElementsRangeTag;
   1729 }
   1730 
   1731 inline MarkStack::TaggedPtr::TaggedPtr(Tag tag, Cell* ptr)
   1732    : bits(tag | uintptr_t(ptr)) {
   1733  assertValid();
   1734 }
   1735 
   1736 /* static */
   1737 inline MarkStack::TaggedPtr MarkStack::TaggedPtr::fromBits(uintptr_t bits) {
   1738  return TaggedPtr(bits);
   1739 }
   1740 
   1741 inline MarkStack::TaggedPtr::TaggedPtr(uintptr_t bits) : bits(bits) {
   1742  assertValid();
   1743 }
   1744 
   1745 inline uintptr_t MarkStack::TaggedPtr::asBits() const { return bits; }
   1746 
   1747 inline MarkStack::Tag MarkStack::TaggedPtr::tag() const {
   1748  auto tag = Tag(bits & TagMask);
   1749  MOZ_ASSERT(tag <= LastTag);
   1750  return tag;
   1751 }
   1752 
   1753 inline Cell* MarkStack::TaggedPtr::ptr() const {
   1754  return reinterpret_cast<Cell*>(bits & ~TagMask);
   1755 }
   1756 
   1757 inline void MarkStack::TaggedPtr::assertValid() const {
   1758  (void)tag();
   1759  MOZ_ASSERT(IsCellPointerValid(ptr()));
   1760 }
   1761 
   1762 template <typename T>
   1763 inline T* MarkStack::TaggedPtr::as() const {
   1764  MOZ_ASSERT(tag() == MapTypeToMarkStackTag<T*>::value);
   1765  MOZ_ASSERT(ptr()->isTenured());
   1766  MOZ_ASSERT(ptr()->is<T>());
   1767  return static_cast<T*>(ptr());
   1768 }
   1769 
   1770 inline JSObject* MarkStack::TaggedPtr::asRangeObject() const {
   1771  MOZ_ASSERT(TagIsRangeTag(tag()));
   1772  MOZ_ASSERT(ptr()->isTenured());
   1773  return ptr()->as<JSObject>();
   1774 }
   1775 
   1776 inline JSRope* MarkStack::TaggedPtr::asTempRope() const {
   1777  MOZ_ASSERT(tag() == TempRopeTag);
   1778  return &ptr()->as<JSString>()->asRope();
   1779 }
   1780 
   1781 inline MarkStack::SlotsOrElementsRange::SlotsOrElementsRange(
   1782    SlotsOrElementsKind kindArg, JSObject* obj, size_t startArg)
   1783    : startAndKind_((startArg << StartShift) | size_t(kindArg)),
   1784      ptr_(SlotsOrElementsRangeTag, obj) {
   1785  assertValid();
   1786  MOZ_ASSERT(kind() == kindArg);
   1787  MOZ_ASSERT(start() == startArg);
   1788 }
   1789 
   1790 /* static */
   1791 inline MarkStack::SlotsOrElementsRange
   1792 MarkStack::SlotsOrElementsRange::fromBits(uintptr_t startAndKind,
   1793                                          uintptr_t ptr) {
   1794  return SlotsOrElementsRange(startAndKind, ptr);
   1795 }
   1796 
   1797 inline MarkStack::SlotsOrElementsRange::SlotsOrElementsRange(
   1798    uintptr_t startAndKind, uintptr_t ptr)
   1799    : startAndKind_(startAndKind), ptr_(TaggedPtr::fromBits(ptr)) {
   1800  assertValid();
   1801 }
   1802 
   1803 inline void MarkStack::SlotsOrElementsRange::assertValid() const {
   1804  ptr_.assertValid();
   1805  MOZ_ASSERT(TagIsRangeTag(ptr_.tag()));
   1806 }
   1807 
   1808 inline SlotsOrElementsKind MarkStack::SlotsOrElementsRange::kind() const {
   1809  return SlotsOrElementsKind(startAndKind_ & KindMask);
   1810 }
   1811 
   1812 inline size_t MarkStack::SlotsOrElementsRange::start() const {
   1813  return startAndKind_ >> StartShift;
   1814 }
   1815 
   1816 inline void MarkStack::SlotsOrElementsRange::setStart(size_t newStart) {
   1817  startAndKind_ = (newStart << StartShift) | uintptr_t(kind());
   1818  MOZ_ASSERT(start() == newStart);
   1819 }
   1820 
   1821 inline void MarkStack::SlotsOrElementsRange::setEmpty() {
   1822  // Replace this SlotsOrElementsRange with something that's valid for marking
   1823  // but doesn't involve accessing this range, which is now invalid. This
   1824  // replaces the two-word range with two single-word entries for the owning
   1825  // object.
   1826  TaggedPtr entry(ObjectTag, ptr().asRangeObject());
   1827  ptr_ = entry;
   1828  startAndKind_ = entry.asBits();
   1829 }
   1830 
   1831 inline MarkStack::TaggedPtr MarkStack::SlotsOrElementsRange::ptr() const {
   1832  return ptr_;
   1833 }
   1834 
   1835 inline uintptr_t MarkStack::SlotsOrElementsRange::asBits0() const {
   1836  return startAndKind_;
   1837 }
   1838 
   1839 inline uintptr_t MarkStack::SlotsOrElementsRange::asBits1() const {
   1840  return ptr_.asBits();
   1841 }
   1842 
   1843 MarkStack::MarkStack() { MOZ_ASSERT(isEmpty()); }
   1844 
   1845 MarkStack::~MarkStack() {
   1846  MOZ_ASSERT(isEmpty());
   1847  clearAndFreeStack();
   1848 }
   1849 
   1850 void MarkStack::swap(MarkStack& other) {
   1851  std::swap(stack_, other.stack_);
   1852  std::swap(capacity_, other.capacity_);
   1853  std::swap(topIndex_, other.topIndex_);
   1854 #ifdef JS_GC_ZEAL
   1855  std::swap(maxCapacity_, other.maxCapacity_);
   1856 #endif
   1857 #ifdef DEBUG
   1858  std::swap(elementsRangesAreValid, other.elementsRangesAreValid);
   1859 #endif
   1860 }
   1861 
   1862 bool MarkStack::init() { return resetStackCapacity(); }
   1863 
   1864 bool MarkStack::resetStackCapacity() {
   1865  MOZ_ASSERT(isEmpty());
   1866 
   1867  size_t capacity = MARK_STACK_BASE_CAPACITY;
   1868 
   1869 #ifdef JS_GC_ZEAL
   1870  capacity = std::min(capacity, maxCapacity_.ref());
   1871 #endif
   1872 
   1873  return resize(capacity);
   1874 }
   1875 
   1876 #ifdef JS_GC_ZEAL
   1877 void MarkStack::setMaxCapacity(size_t maxCapacity) {
   1878  MOZ_ASSERT(maxCapacity != 0);
   1879  MOZ_ASSERT(isEmpty());
   1880 
   1881  maxCapacity_ = maxCapacity;
   1882  if (capacity() > maxCapacity_) {
   1883    // If the realloc fails, just keep using the existing stack; it's
   1884    // not ideal but better than failing.
   1885    (void)resize(maxCapacity_);
   1886  }
   1887 }
   1888 #endif
   1889 
   1890 MOZ_ALWAYS_INLINE bool MarkStack::indexIsEntryBase(size_t index) const {
   1891  // The mark stack holds both TaggedPtr and SlotsOrElementsRange entries, which
   1892  // are one or two words long respectively. Determine whether |index| points to
   1893  // the base of an entry (i.e. the lowest word in memory).
   1894  //
   1895  // The possible cases are that |index| points to:
   1896  //  1. a single word TaggedPtr entry => true
   1897  //  2. the startAndKind_ word of SlotsOrElementsRange => true
   1898  //     (startAndKind_ is a uintptr_t tagged with SlotsOrElementsKind)
   1899  //  3. the ptr_ word of SlotsOrElementsRange (itself a TaggedPtr) => false
   1900  //
   1901  // To check for case 3, interpret the word as a TaggedPtr: if it is tagged as
   1902  // a SlotsOrElementsRange tagged pointer then we are inside such a range and
   1903  // |index| does not point to the base of an entry. This requires that no
   1904  // startAndKind_ word can be interpreted as such, which is arranged by making
   1905  // SlotsOrElementsRangeTag zero and all SlotsOrElementsKind tags non-zero.
   1906 
   1907  MOZ_ASSERT(index < capacity_);
   1908  return (stack_[index] & TagMask) != SlotsOrElementsRangeTag;
   1909 }
   1910 
   1911 /* static */
   1912 size_t MarkStack::moveWork(GCMarker* marker, MarkStack& dst, MarkStack& src,
   1913                           bool allowDistribute) {
   1914  // Move some work from |src| to |dst|. Assumes |dst| is empty.
   1915  //
   1916  // When this method runs during parallel marking, we are on the thread that
   1917  // owns |src|, and the thread that owns |dst| is blocked waiting on the
   1918  // ParallelMarkTask::resumed condition variable.
   1919 
   1920  MOZ_ASSERT(dst.isEmpty());
   1921  MOZ_ASSERT(src.elementsRangesAreValid == dst.elementsRangesAreValid);
   1922 
   1923  // Limit the size of moves to stop threads with work spending too much time
   1924  // donating.
   1925  static const size_t MaxWordsToMove = 4096;
   1926 
   1927  size_t totalWords = src.position();
   1928  size_t wordsToMove = std::min(totalWords / 2, MaxWordsToMove);
   1929 
   1930  // Mark stack entries do not represent uniform amounts of marking work (they
   1931  // are either single GC things or arbitrarily large arrays) and when the mark
   1932  // stack is small the situation often arises where one thread repeatedly takes
   1933  // what is in effect a small amount of marking work while leaving the other
   1934  // thread with a whole lot more. To split the work up more effectively we
   1935  // randomly distribute stack entries for small stack.
   1936  //
   1937  // This works by randomly choosing one of every pair of entries in |src| and
   1938  // moving it to |dst| (rather than moving half of the stack as a contiguous
   1939  // region).
   1940  //
   1941  // This has the effect of reducing the number of donations between threads. It
   1942  // does not decrease average marking time but it does decrease variance of
   1943  // marking time.
   1944  static constexpr size_t MaxWordsToDistribute = 30;
   1945  if (allowDistribute && totalWords <= MaxWordsToDistribute) {
   1946    if (!dst.ensureSpace(totalWords)) {
   1947      return 0;
   1948    }
   1949 
   1950    src.topIndex_ = 0;
   1951 
   1952    // We will use bits from a single 64-bit random number.
   1953    static_assert(HowMany(MaxWordsToDistribute, 2) <= 64);
   1954    uint64_t randomBits = marker->random.ref().next();
   1955    DebugOnly<size_t> randomBitCount = 64;
   1956 
   1957    size_t i = 0;    // Entry index.
   1958    size_t pos = 0;  // Source stack position.
   1959    uintptr_t* data = src.stack_;
   1960    while (pos < totalWords) {
   1961      MOZ_ASSERT(src.indexIsEntryBase(pos));
   1962 
   1963      // Randomly chose which stack to copy the entry to, with each half of each
   1964      // pair of entries moving to different stacks.
   1965      MOZ_ASSERT(randomBitCount != 0);
   1966      bool whichStack = (randomBits & 1) ^ (i & 1);
   1967      randomBits <<= i & 1;
   1968      randomBitCount -= i & 1;
   1969 
   1970      MarkStack& stack = whichStack ? dst : src;
   1971 
   1972      bool isRange =
   1973          pos < totalWords - 1 && TagIsRangeTag(Tag(data[pos + 1] & TagMask));
   1974      if (isRange) {
   1975        stack.infalliblePush(
   1976            SlotsOrElementsRange::fromBits(data[pos], data[pos + 1]));
   1977        pos += ValueRangeWords;
   1978      } else {
   1979        stack.infalliblePush(TaggedPtr::fromBits(data[pos]));
   1980        pos++;
   1981      }
   1982 
   1983      i++;
   1984    }
   1985 
   1986    return totalWords;
   1987  }
   1988 
   1989  size_t targetPos = src.position() - wordsToMove;
   1990 
   1991  // Adjust the target position in case it points to the middle of a two word
   1992  // entry.
   1993  if (!src.indexIsEntryBase(targetPos)) {
   1994    targetPos--;
   1995    wordsToMove++;
   1996  }
   1997  MOZ_ASSERT(src.indexIsEntryBase(targetPos));
   1998  MOZ_ASSERT(targetPos < src.position());
   1999  MOZ_ASSERT(targetPos > 0);
   2000  MOZ_ASSERT(wordsToMove == src.position() - targetPos);
   2001 
   2002  if (!dst.ensureSpace(wordsToMove)) {
   2003    return 0;
   2004  }
   2005 
   2006  // TODO: This doesn't have good cache behaviour when moving work between
   2007  // threads. It might be better if the original thread ended up with the top
   2008  // part of the stack, in src words if this method stole from the bottom of
   2009  // the stack rather than the top.
   2010 
   2011  mozilla::PodCopy(dst.end(), src.stack_ + targetPos, wordsToMove);
   2012  dst.topIndex_ += wordsToMove;
   2013  dst.peekPtr().assertValid();
   2014 
   2015  src.topIndex_ = targetPos;
   2016 #ifdef DEBUG
   2017  src.poisonUnused();
   2018 #endif
   2019  src.peekPtr().assertValid();
   2020  return wordsToMove;
   2021 }
   2022 
   2023 void MarkStack::clearAndResetCapacity() {
   2024  // Fall back to the smaller initial capacity so we don't hold on to excess
   2025  // memory between GCs.
   2026  topIndex_ = 0;
   2027  (void)resetStackCapacity();
   2028 }
   2029 
   2030 void MarkStack::clearAndFreeStack() {
   2031  // Free all stack memory so we don't hold on to excess memory between GCs.
   2032  js_free(stack_);
   2033  stack_ = nullptr;
   2034  capacity_ = 0;
   2035  topIndex_ = 0;
   2036 }
   2037 
   2038 template <typename T>
   2039 inline bool MarkStack::push(T* ptr) {
   2040  return push(TaggedPtr(MapTypeToMarkStackTag<T*>::value, ptr));
   2041 }
   2042 
   2043 inline bool MarkStack::pushTempRope(JSRope* rope) {
   2044  return push(TaggedPtr(TempRopeTag, rope));
   2045 }
   2046 
   2047 inline bool MarkStack::push(const TaggedPtr& ptr) {
   2048  if (!ensureSpace(1)) {
   2049    return false;
   2050  }
   2051 
   2052  infalliblePush(ptr);
   2053  return true;
   2054 }
   2055 
   2056 inline void MarkStack::infalliblePush(const TaggedPtr& ptr) {
   2057  MOZ_ASSERT(position() + 1 <= capacity());
   2058  *end() = ptr.asBits();
   2059  topIndex_++;
   2060 }
   2061 
   2062 inline void MarkStack::infalliblePush(JSObject* obj, SlotsOrElementsKind kind,
   2063                                      size_t start) {
   2064  SlotsOrElementsRange range(kind, obj, start);
   2065  infalliblePush(range);
   2066 }
   2067 
   2068 inline void MarkStack::infalliblePush(const SlotsOrElementsRange& range) {
   2069  MOZ_ASSERT(position() + ValueRangeWords <= capacity());
   2070 
   2071  range.assertValid();
   2072  end()[0] = range.asBits0();
   2073  end()[1] = range.asBits1();
   2074  topIndex_ += ValueRangeWords;
   2075  MOZ_ASSERT(TagIsRangeTag(peekTag()));
   2076 }
   2077 
   2078 inline MarkStack::TaggedPtr MarkStack::peekPtr() const {
   2079  MOZ_ASSERT(!isEmpty());
   2080  return TaggedPtr::fromBits(at(topIndex_ - 1));
   2081 }
   2082 
   2083 inline MarkStack::Tag MarkStack::peekTag() const {
   2084  MOZ_ASSERT(!isEmpty());
   2085  return peekPtr().tag();
   2086 }
   2087 
   2088 inline MarkStack::TaggedPtr MarkStack::popPtr() {
   2089  MOZ_ASSERT(!isEmpty());
   2090  MOZ_ASSERT(!TagIsRangeTag(peekTag()));
   2091  peekPtr().assertValid();
   2092  topIndex_--;
   2093  return TaggedPtr::fromBits(*end());
   2094 }
   2095 
   2096 inline MarkStack::SlotsOrElementsRange MarkStack::popSlotsOrElementsRange() {
   2097  MOZ_ASSERT(!isEmpty());
   2098  MOZ_ASSERT(TagIsRangeTag(peekTag()));
   2099  MOZ_ASSERT(position() >= ValueRangeWords);
   2100 
   2101  topIndex_ -= ValueRangeWords;
   2102  return SlotsOrElementsRange::fromBits(end()[0], end()[1]);
   2103 }
   2104 
   2105 inline bool MarkStack::ensureSpace(size_t count) {
   2106  if (MOZ_LIKELY((topIndex_ + count) <= capacity())) {
   2107    return true;
   2108  }
   2109 
   2110  return enlarge(count);
   2111 }
   2112 
   2113 MOZ_NEVER_INLINE bool MarkStack::enlarge(size_t count) {
   2114  size_t required = capacity() + count;
   2115  size_t newCapacity = mozilla::RoundUpPow2(required);
   2116 
   2117 #ifdef JS_GC_ZEAL
   2118  newCapacity = std::min(newCapacity, maxCapacity_.ref());
   2119  if (newCapacity < required) {
   2120    return false;
   2121  }
   2122 #endif
   2123 
   2124  return resize(newCapacity);
   2125 }
   2126 
   2127 bool MarkStack::resize(size_t newCapacity) {
   2128  MOZ_ASSERT(newCapacity != 0);
   2129  MOZ_ASSERT(newCapacity >= position());
   2130 
   2131  auto poisonOnExit = mozilla::MakeScopeExit([this]() { poisonUnused(); });
   2132 
   2133  if (newCapacity == capacity_) {
   2134    return true;
   2135  }
   2136 
   2137  uintptr_t* newStack =
   2138      js_pod_realloc<uintptr_t>(stack_, capacity_, newCapacity);
   2139  if (!newStack) {
   2140    return false;
   2141  }
   2142 
   2143  stack_ = newStack;
   2144  capacity_ = newCapacity;
   2145  return true;
   2146 }
   2147 
   2148 inline void MarkStack::poisonUnused() {
   2149  static_assert((JS_FRESH_MARK_STACK_PATTERN & TagMask) > LastTag,
   2150                "The mark stack poison pattern must not look like a valid "
   2151                "tagged pointer");
   2152 
   2153  MOZ_ASSERT(topIndex_ <= capacity_);
   2154  AlwaysPoison(stack_ + topIndex_, JS_FRESH_MARK_STACK_PATTERN,
   2155               capacity_ - topIndex_, MemCheckKind::MakeUndefined);
   2156 }
   2157 
   2158 size_t MarkStack::sizeOfExcludingThis(
   2159    mozilla::MallocSizeOf mallocSizeOf) const {
   2160  return capacity_ * sizeof(uintptr_t);
   2161 }
   2162 
   2163 MarkStackIter::MarkStackIter(MarkStack& stack)
   2164    : stack_(stack), pos_(stack.position()) {}
   2165 
   2166 inline size_t MarkStackIter::position() const { return pos_; }
   2167 
   2168 inline bool MarkStackIter::done() const { return position() == 0; }
   2169 
   2170 inline void MarkStackIter::next() {
   2171  if (isSlotsOrElementsRange()) {
   2172    MOZ_ASSERT(position() >= ValueRangeWords);
   2173    pos_ -= ValueRangeWords;
   2174    return;
   2175  }
   2176 
   2177  MOZ_ASSERT(!done());
   2178  pos_--;
   2179 }
   2180 
   2181 inline bool MarkStackIter::isSlotsOrElementsRange() const {
   2182  return TagIsRangeTag(peekTag());
   2183 }
   2184 
   2185 inline MarkStack::Tag MarkStackIter::peekTag() const { return peekPtr().tag(); }
   2186 
   2187 inline MarkStack::TaggedPtr MarkStackIter::peekPtr() const {
   2188  MOZ_ASSERT(!done());
   2189  return MarkStack::TaggedPtr::fromBits(stack_.at(pos_ - 1));
   2190 }
   2191 
   2192 inline MarkStack::SlotsOrElementsRange MarkStackIter::slotsOrElementsRange()
   2193    const {
   2194  MOZ_ASSERT(TagIsRangeTag(peekTag()));
   2195  MOZ_ASSERT(position() >= ValueRangeWords);
   2196 
   2197  uintptr_t* ptr = stack_.ptr(pos_ - ValueRangeWords);
   2198  return MarkStack::SlotsOrElementsRange::fromBits(ptr[0], ptr[1]);
   2199 }
   2200 
   2201 inline void MarkStackIter::setSlotsOrElementsRange(
   2202    const MarkStack::SlotsOrElementsRange& range) {
   2203  MOZ_ASSERT(isSlotsOrElementsRange());
   2204 
   2205  uintptr_t* ptr = stack_.ptr(pos_ - ValueRangeWords);
   2206  ptr[0] = range.asBits0();
   2207  ptr[1] = range.asBits1();
   2208 }
   2209 
   2210 /*** GCMarker ***************************************************************/
   2211 
   2212 /*
   2213 * WeakMapTraceAction::Expand: the GC is recomputing the liveness of WeakMap
   2214 * entries by expanding each live WeakMap into its constituent key->value edges,
   2215 * a table of which will be consulted in a later phase whenever marking a
   2216 * potential key.
   2217 */
   2218 GCMarker::GCMarker(JSRuntime* rt)
   2219    : tracer_(mozilla::VariantType<MarkingTracer>(), rt, this),
   2220      runtime_(rt),
   2221      haveSwappedStacks(false),
   2222      markColor_(MarkColor::Black),
   2223      state(NotActive),
   2224      incrementalWeakMapMarkingEnabled(
   2225          TuningDefaults::IncrementalWeakMapMarkingEnabled),
   2226      random(js::GenerateRandomSeed(), js::GenerateRandomSeed())
   2227 #ifdef DEBUG
   2228      ,
   2229      checkAtomMarking(true),
   2230      strictCompartmentChecking(false)
   2231 #endif
   2232 {
   2233 }
   2234 
   2235 bool GCMarker::init() { return stack.init(); }
   2236 
   2237 void GCMarker::start() {
   2238  MOZ_ASSERT(state == NotActive);
   2239  MOZ_ASSERT(stack.isEmpty());
   2240  state = RegularMarking;
   2241  haveAllImplicitEdges = true;
   2242  setMarkColor(MarkColor::Black);
   2243 }
   2244 
   2245 static void ClearEphemeronEdges(JSRuntime* rt) {
   2246  for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
   2247    zone->gcEphemeronEdges().clearAndCompact();
   2248  }
   2249 }
   2250 
   2251 void GCMarker::stop() {
   2252  MOZ_ASSERT(isDrained());
   2253  MOZ_ASSERT(markColor() == MarkColor::Black);
   2254  MOZ_ASSERT(!haveSwappedStacks);
   2255 
   2256  if (state == NotActive) {
   2257    return;
   2258  }
   2259  state = NotActive;
   2260 
   2261  otherStack.clearAndFreeStack();
   2262  ClearEphemeronEdges(runtime());
   2263  unmarkGrayStack.clearAndFree();
   2264 }
   2265 
   2266 void GCMarker::reset() {
   2267  state = NotActive;
   2268 
   2269  stack.clearAndResetCapacity();
   2270  otherStack.clearAndFreeStack();
   2271  ClearEphemeronEdges(runtime());
   2272  MOZ_ASSERT(isDrained());
   2273 
   2274  setMarkColor(MarkColor::Black);
   2275  MOZ_ASSERT(!haveSwappedStacks);
   2276 
   2277  unmarkGrayStack.clearAndFree();
   2278 }
   2279 
   2280 void GCMarker::setMarkColor(gc::MarkColor newColor) {
   2281  if (markColor_ == newColor) {
   2282    return;
   2283  }
   2284 
   2285  // We don't support gray marking while there is black marking work to do.
   2286  MOZ_ASSERT(!hasBlackEntries());
   2287 
   2288  markColor_ = newColor;
   2289 
   2290  // Switch stacks. We only need to do this if there are any stack entries (as
   2291  // empty stacks are interchangeable) or to swtich back to the original stack.
   2292  if (!isDrained() || haveSwappedStacks) {
   2293    stack.swap(otherStack);
   2294    haveSwappedStacks = !haveSwappedStacks;
   2295  }
   2296 }
   2297 
   2298 bool GCMarker::hasEntries(MarkColor color) const {
   2299  const MarkStack& stackForColor = color == markColor() ? stack : otherStack;
   2300  return stackForColor.hasEntries();
   2301 }
   2302 
   2303 template <typename T>
   2304 inline void GCMarker::pushTaggedPtr(T* ptr) {
   2305  checkZone(ptr);
   2306  if (!stack.push(ptr)) {
   2307    delayMarkingChildrenOnOOM(ptr);
   2308  }
   2309 }
   2310 
   2311 inline void GCMarker::pushValueRange(JSObject* obj, SlotsOrElementsKind kind,
   2312                                     size_t start, size_t end) {
   2313  checkZone(obj);
   2314  MOZ_ASSERT(obj->is<NativeObject>());
   2315  MOZ_ASSERT(start <= end);
   2316 
   2317  if (start != end) {
   2318    stack.infalliblePush(obj, kind, start);
   2319  }
   2320 }
   2321 
   2322 void GCMarker::setRootMarkingMode(bool newState) {
   2323  if (newState) {
   2324    setMarkingStateAndTracer<RootMarkingTracer>(RegularMarking, RootMarking);
   2325  } else {
   2326    setMarkingStateAndTracer<MarkingTracer>(RootMarking, RegularMarking);
   2327  }
   2328 }
   2329 
   2330 void GCMarker::enterParallelMarkingMode() {
   2331  setMarkingStateAndTracer<ParallelMarkingTracer>(RegularMarking,
   2332                                                  ParallelMarking);
   2333 }
   2334 
   2335 void GCMarker::leaveParallelMarkingMode() {
   2336  setMarkingStateAndTracer<MarkingTracer>(ParallelMarking, RegularMarking);
   2337 }
   2338 
   2339 // It may not be worth the overhead of donating very few mark stack entries. For
   2340 // some (non-parallelizable) workloads this could lead to constantly
   2341 // interrupting marking work and makes parallel marking slower than single
   2342 // threaded.
   2343 //
   2344 // Conversely, we do want to try splitting up work occasionally or we may fail
   2345 // to parallelize workloads that result in few mark stack entries.
   2346 //
   2347 // Therefore we try hard to split work up at the start of a slice (calling
   2348 // canDonateWork) but when a slice is running we only donate if there is enough
   2349 // work to make it worthwhile (calling shouldDonateWork).
   2350 bool GCMarker::canDonateWork() const {
   2351  return stack.position() > ValueRangeWords;
   2352 }
   2353 bool GCMarker::shouldDonateWork() const {
   2354  constexpr size_t MinWordCount = 12;
   2355  static_assert(MinWordCount >= ValueRangeWords,
   2356                "We must always leave at least one stack entry.");
   2357 
   2358  return stack.position() > MinWordCount;
   2359 }
   2360 
   2361 template <typename Tracer>
   2362 void GCMarker::setMarkingStateAndTracer(MarkingState prev, MarkingState next) {
   2363  MOZ_ASSERT(state == prev);
   2364  state = next;
   2365  tracer_.emplace<Tracer>(runtime(), this);
   2366 }
   2367 
   2368 bool GCMarker::enterWeakMarkingMode() {
   2369  MOZ_ASSERT(tracer()->weakMapAction() == JS::WeakMapTraceAction::Expand);
   2370  if (!haveAllImplicitEdges) {
   2371    return false;
   2372  }
   2373 
   2374  // During weak marking mode, we maintain a table mapping weak keys to
   2375  // entries in known-live weakmaps. Initialize it with the keys of marked
   2376  // weakmaps -- or more precisely, the keys of marked weakmaps that are
   2377  // mapped to not yet live values. (Once bug 1167452 implements incremental
   2378  // weakmap marking, this initialization step will become unnecessary, as
   2379  // the table will already hold all such keys.)
   2380 
   2381  // Set state before doing anything else, so any new key that is marked
   2382  // during the following gcEphemeronEdges scan will itself be looked up in
   2383  // gcEphemeronEdges and marked according to ephemeron rules.
   2384  setMarkingStateAndTracer<WeakMarkingTracer>(RegularMarking, WeakMarking);
   2385 
   2386  return true;
   2387 }
   2388 
   2389 IncrementalProgress JS::Zone::enterWeakMarkingMode(GCMarker* marker,
   2390                                                   SliceBudget& budget) {
   2391  MOZ_ASSERT(marker->isWeakMarking());
   2392 
   2393  if (!marker->incrementalWeakMapMarkingEnabled) {
   2394    for (WeakMapBase* m : gcWeakMapList()) {
   2395      if (IsMarked(m->mapColor())) {
   2396        (void)m->markEntries(marker);
   2397      }
   2398    }
   2399    return IncrementalProgress::Finished;
   2400  }
   2401 
   2402  // gcEphemeronEdges contains the keys from all weakmaps marked so far, or at
   2403  // least the keys that might still need to be marked through. Scan through
   2404  // gcEphemeronEdges and mark all values whose keys are marked. This marking
   2405  // may recursively mark through other weakmap entries (immediately since we
   2406  // are now in WeakMarking mode). The end result is a consistent state where
   2407  // all values are marked if both their map and key are marked -- though note
   2408  // that we may later leave weak marking mode, do some more marking, and then
   2409  // enter back in.
   2410  if (!isGCMarking()) {
   2411    return IncrementalProgress::Finished;
   2412  }
   2413 
   2414  for (auto r = gcEphemeronEdges().all(); !r.empty(); r.popFront()) {
   2415    Cell* src = r.front().key();
   2416    CellColor srcColor = gc::detail::GetEffectiveColor(marker, src);
   2417    auto& edges = r.front().value();
   2418 
   2419    size_t numEdges = edges.length();
   2420    if (IsMarked(srcColor) && edges.length() > 0) {
   2421      marker->markEphemeronEdges(edges, AsMarkColor(srcColor));
   2422    }
   2423    budget.step(1 + numEdges);
   2424    if (budget.isOverBudget()) {
   2425      return NotFinished;
   2426    }
   2427  }
   2428 
   2429  return IncrementalProgress::Finished;
   2430 }
   2431 
   2432 void GCMarker::leaveWeakMarkingMode() {
   2433  if (state == RegularMarking) {
   2434    return;
   2435  }
   2436 
   2437  setMarkingStateAndTracer<MarkingTracer>(WeakMarking, RegularMarking);
   2438 
   2439  // The gcEphemeronEdges table is still populated and may be used during a
   2440  // future weak marking mode within this GC.
   2441 }
   2442 
   2443 void GCMarker::abortLinearWeakMarking() {
   2444  haveAllImplicitEdges = false;
   2445  if (state == WeakMarking) {
   2446    leaveWeakMarkingMode();
   2447  }
   2448 }
   2449 
   2450 MOZ_NEVER_INLINE void GCMarker::delayMarkingChildrenOnOOM(Cell* cell) {
   2451  runtime()->gc.delayMarkingChildren(cell, markColor());
   2452 }
   2453 
   2454 bool GCRuntime::hasDelayedMarking() const {
   2455  bool result = delayedMarkingList;
   2456  MOZ_ASSERT(result == (markLaterArenas != 0));
   2457  return result;
   2458 }
   2459 
   2460 void GCRuntime::delayMarkingChildren(Cell* cell, MarkColor color) {
   2461  // Synchronize access to delayed marking state during parallel marking.
   2462  LockGuard<Mutex> lock(delayedMarkingLock);
   2463 
   2464  Arena* arena = cell->asTenured().arena();
   2465  if (!arena->onDelayedMarkingList()) {
   2466    arena->setNextDelayedMarkingArena(delayedMarkingList);
   2467    delayedMarkingList = arena;
   2468 #ifdef DEBUG
   2469    markLaterArenas++;
   2470 #endif
   2471  }
   2472 
   2473  if (!arena->hasDelayedMarking(color)) {
   2474    arena->setHasDelayedMarking(color, true);
   2475    delayedMarkingWorkAdded = true;
   2476  }
   2477 }
   2478 
   2479 void GCRuntime::markDelayedChildren(Arena* arena, MarkColor color) {
   2480  JSTracer* trc = marker().tracer();
   2481  JS::TraceKind kind = MapAllocToTraceKind(arena->getAllocKind());
   2482  MarkColor colorToCheck =
   2483      TraceKindCanBeMarkedGray(kind) ? color : MarkColor::Black;
   2484 
   2485  for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
   2486    if (cell->isMarked(colorToCheck)) {
   2487      ApplyGCThingTyped(cell, kind, [trc, this](auto t) {
   2488        t->traceChildren(trc);
   2489        marker().markImplicitEdges(t);
   2490      });
   2491    }
   2492  }
   2493 }
   2494 
   2495 /*
   2496 * Process arenas from |delayedMarkingList| by marking the unmarked children of
   2497 * marked cells of color |color|.
   2498 *
   2499 * This is called twice, first to mark gray children and then to mark black
   2500 * children.
   2501 */
   2502 void GCRuntime::processDelayedMarkingList(MarkColor color) {
   2503  // Marking delayed children may add more arenas to the list, including arenas
   2504  // we are currently processing or have previously processed. Handle this by
   2505  // clearing a flag on each arena before marking its children. This flag will
   2506  // be set again if the arena is re-added. Iterate the list until no new arenas
   2507  // were added.
   2508 
   2509  AutoSetMarkColor setColor(marker(), color);
   2510  AutoUpdateMarkStackRanges updateRanges(marker());
   2511 
   2512  do {
   2513    delayedMarkingWorkAdded = false;
   2514    for (Arena* arena = delayedMarkingList; arena;
   2515         arena = arena->getNextDelayedMarking()) {
   2516      if (arena->hasDelayedMarking(color)) {
   2517        arena->setHasDelayedMarking(color, false);
   2518        markDelayedChildren(arena, color);
   2519      }
   2520    }
   2521    while (marker().hasEntriesForCurrentColor()) {
   2522      SliceBudget budget = SliceBudget::unlimited();
   2523      MOZ_ALWAYS_TRUE(
   2524          marker().processMarkStackTop<NormalMarkingOptions>(budget));
   2525    }
   2526  } while (delayedMarkingWorkAdded);
   2527 
   2528  MOZ_ASSERT(marker().isDrained());
   2529 }
   2530 
   2531 void GCRuntime::markAllDelayedChildren(ShouldReportMarkTime reportTime) {
   2532  MOZ_ASSERT(CurrentThreadIsMainThread() || CurrentThreadIsPerformingGC());
   2533  MOZ_ASSERT(marker().isDrained());
   2534  MOZ_ASSERT(hasDelayedMarking());
   2535 
   2536  mozilla::Maybe<gcstats::AutoPhase> ap;
   2537  if (reportTime) {
   2538    ap.emplace(stats(), gcstats::PhaseKind::MARK_DELAYED);
   2539  }
   2540 
   2541  // We have a list of arenas containing marked cells with unmarked children
   2542  // where we ran out of stack space during marking. Both black and gray cells
   2543  // in these arenas may have unmarked children. Mark black children first.
   2544 
   2545  const MarkColor colors[] = {MarkColor::Black, MarkColor::Gray};
   2546  for (MarkColor color : colors) {
   2547    processDelayedMarkingList(color);
   2548    rebuildDelayedMarkingList();
   2549  }
   2550 
   2551  MOZ_ASSERT(!hasDelayedMarking());
   2552 }
   2553 
   2554 void GCRuntime::rebuildDelayedMarkingList() {
   2555  // Rebuild the delayed marking list, removing arenas which do not need further
   2556  // marking.
   2557 
   2558  Arena* listTail = nullptr;
   2559  forEachDelayedMarkingArena([&](Arena* arena) {
   2560    if (!arena->hasAnyDelayedMarking()) {
   2561      arena->clearDelayedMarkingState();
   2562 #ifdef DEBUG
   2563      MOZ_ASSERT(markLaterArenas);
   2564      markLaterArenas--;
   2565 #endif
   2566      return;
   2567    }
   2568 
   2569    appendToDelayedMarkingList(&listTail, arena);
   2570  });
   2571  appendToDelayedMarkingList(&listTail, nullptr);
   2572 }
   2573 
   2574 void GCRuntime::resetDelayedMarking() {
   2575  MOZ_ASSERT(CurrentThreadIsMainThread());
   2576 
   2577  forEachDelayedMarkingArena([&](Arena* arena) {
   2578    MOZ_ASSERT(arena->onDelayedMarkingList());
   2579    arena->clearDelayedMarkingState();
   2580 #ifdef DEBUG
   2581    MOZ_ASSERT(markLaterArenas);
   2582    markLaterArenas--;
   2583 #endif
   2584  });
   2585  delayedMarkingList = nullptr;
   2586  MOZ_ASSERT(!markLaterArenas);
   2587 }
   2588 
   2589 inline void GCRuntime::appendToDelayedMarkingList(Arena** listTail,
   2590                                                  Arena* arena) {
   2591  if (*listTail) {
   2592    (*listTail)->updateNextDelayedMarkingArena(arena);
   2593  } else {
   2594    delayedMarkingList = arena;
   2595  }
   2596  *listTail = arena;
   2597 }
   2598 
   2599 template <typename F>
   2600 inline void GCRuntime::forEachDelayedMarkingArena(F&& f) {
   2601  Arena* arena = delayedMarkingList;
   2602  Arena* next;
   2603  while (arena) {
   2604    next = arena->getNextDelayedMarking();
   2605    f(arena);
   2606    arena = next;
   2607  }
   2608 }
   2609 
   2610 #ifdef DEBUG
   2611 void GCMarker::checkZone(Cell* cell) {
   2612  MOZ_ASSERT(state != NotActive);
   2613  if (cell->isTenured()) {
   2614    Zone* zone = cell->asTenured().zone();
   2615    MOZ_ASSERT(zone->isGCMarkingOrVerifyingPreBarriers() ||
   2616               zone->isAtomsZone());
   2617  }
   2618 }
   2619 #endif
   2620 
   2621 size_t GCMarker::sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
   2622  return mallocSizeOf(this) + stack.sizeOfExcludingThis(mallocSizeOf) +
   2623         otherStack.sizeOfExcludingThis(mallocSizeOf);
   2624 }
   2625 
   2626 /*** IsMarked / IsAboutToBeFinalized ****************************************/
   2627 
   2628 template <typename T>
   2629 static inline void CheckIsMarkedThing(T* thing) {
   2630 #define IS_SAME_TYPE_OR(name, type, _, _1) std::is_same_v<type, T> ||
   2631  static_assert(JS_FOR_EACH_TRACEKIND(IS_SAME_TYPE_OR) false,
   2632                "Only the base cell layout types are allowed into "
   2633                "marking/tracing internals");
   2634 #undef IS_SAME_TYPE_OR
   2635 
   2636 #ifdef DEBUG
   2637  MOZ_ASSERT(thing);
   2638 
   2639  // Allow any thread access to uncollected things.
   2640  Zone* zone = thing->zoneFromAnyThread();
   2641  if (thing->isPermanentAndMayBeShared()) {
   2642    // Shared things are not collected and should always be marked, except
   2643    // during shutdown when we've merged shared atoms back into the main atoms
   2644    // zone.
   2645    if (zone->wasGCStarted()) {
   2646      MOZ_ASSERT(!zone->runtimeFromAnyThread()->gc.maybeSharedAtomsZone());
   2647      return;
   2648    }
   2649    MOZ_ASSERT(!zone->needsIncrementalBarrier());
   2650    MOZ_ASSERT(thing->isMarkedBlack());
   2651    return;
   2652  }
   2653 
   2654  // Allow the current thread access if it is sweeping or in sweep-marking, but
   2655  // try to check the zone. Some threads have access to all zones when sweeping.
   2656  JS::GCContext* gcx = TlsGCContext.get();
   2657  MOZ_ASSERT(gcx->gcUse() != GCUse::Finalizing);
   2658  if (gcx->gcUse() == GCUse::Sweeping || gcx->gcUse() == GCUse::Marking) {
   2659    MOZ_ASSERT_IF(gcx->gcSweepZone(),
   2660                  gcx->gcSweepZone() == zone || zone->isAtomsZone());
   2661    return;
   2662  }
   2663 
   2664  // Otherwise only allow access from the main thread or this zone's associated
   2665  // thread.
   2666  MOZ_ASSERT(CurrentThreadCanAccessRuntime(thing->runtimeFromAnyThread()) ||
   2667             CurrentThreadCanAccessZone(thing->zoneFromAnyThread()));
   2668 #endif
   2669 }
   2670 
   2671 template <typename T>
   2672 bool js::gc::IsMarkedInternal(JSRuntime* rt, T* thing) {
   2673  // Don't depend on the mark state of other cells during finalization.
   2674  MOZ_ASSERT(!CurrentThreadIsGCFinalizing());
   2675  MOZ_ASSERT(rt->heapState() != JS::HeapState::MinorCollecting);
   2676  MOZ_ASSERT(thing);
   2677  CheckIsMarkedThing(thing);
   2678 
   2679  // This is not used during minor sweeping nor used to update moved GC things.
   2680  MOZ_ASSERT(!IsForwarded(thing));
   2681 
   2682  // Permanent things are never marked by non-owning runtimes.
   2683  TenuredCell* cell = &thing->asTenured();
   2684  Zone* zone = cell->zoneFromAnyThread();
   2685 #ifdef DEBUG
   2686  if (IsOwnedByOtherRuntime(rt, thing)) {
   2687    MOZ_ASSERT(!zone->wasGCStarted());
   2688    MOZ_ASSERT(thing->isMarkedBlack());
   2689  }
   2690 #endif
   2691 
   2692  return !zone->isGCMarking() || TenuredThingIsMarkedAny(thing);
   2693 }
   2694 
   2695 template <typename T>
   2696 bool js::gc::IsAboutToBeFinalizedInternal(T* thing) {
   2697  // Don't depend on the mark state of other cells during finalization.
   2698  MOZ_ASSERT(!CurrentThreadIsGCFinalizing());
   2699  MOZ_ASSERT(thing);
   2700  CheckIsMarkedThing(thing);
   2701 
   2702  // This is not used during minor sweeping nor used to update moved GC things.
   2703  MOZ_ASSERT(!IsForwarded(thing));
   2704 
   2705  if (!thing->isTenured()) {
   2706    return false;
   2707  }
   2708 
   2709  // Permanent things are never finalized by non-owning runtimes.
   2710  TenuredCell* cell = &thing->asTenured();
   2711  Zone* zone = cell->zoneFromAnyThread();
   2712 #ifdef DEBUG
   2713  JSRuntime* rt = TlsGCContext.get()->runtimeFromAnyThread();
   2714  if (IsOwnedByOtherRuntime(rt, thing)) {
   2715    MOZ_ASSERT(!zone->wasGCStarted());
   2716    MOZ_ASSERT(thing->isMarkedBlack());
   2717  }
   2718 #endif
   2719 
   2720  return zone->isGCSweeping() && !TenuredThingIsMarkedAny(thing);
   2721 }
   2722 
   2723 template <typename T>
   2724 bool js::gc::IsAboutToBeFinalizedInternal(const T& thing) {
   2725  bool dying = false;
   2726  ApplyGCThingTyped(
   2727      thing, [&dying](auto t) { dying = IsAboutToBeFinalizedInternal(t); });
   2728  return dying;
   2729 }
   2730 
   2731 SweepingTracer::SweepingTracer(JSRuntime* rt)
   2732    : GenericTracerImpl(rt, JS::TracerKind::Sweeping,
   2733                        JS::WeakMapTraceAction::TraceKeysAndValues) {}
   2734 
   2735 template <typename T>
   2736 inline void SweepingTracer::onEdge(T** thingp, const char* name) {
   2737  T* thing = *thingp;
   2738  CheckIsMarkedThing(thing);
   2739 
   2740  if (!thing->isTenured()) {
   2741    return;
   2742  }
   2743 
   2744  // Permanent things are never finalized by non-owning runtimes.
   2745  TenuredCell* cell = &thing->asTenured();
   2746  Zone* zone = cell->zoneFromAnyThread();
   2747 #ifdef DEBUG
   2748  if (IsOwnedByOtherRuntime(runtime(), thing)) {
   2749    MOZ_ASSERT(!zone->wasGCStarted());
   2750    MOZ_ASSERT(thing->isMarkedBlack());
   2751  }
   2752 #endif
   2753 
   2754  // It would be nice if we could assert that the zone of the tenured cell is in
   2755  // the Sweeping state, but that isn't always true for:
   2756  //  - atoms
   2757  //  - the jitcode map
   2758  //  - the mark queue
   2759  if ((zone->isGCSweeping() || zone->isAtomsZone()) && !cell->isMarkedAny()) {
   2760    *thingp = nullptr;
   2761  }
   2762 }
   2763 
   2764 namespace js::gc {
   2765 
   2766 template <typename T>
   2767 JS_PUBLIC_API bool TraceWeakEdge(JSTracer* trc, JS::Heap<T>* thingp) {
   2768  return TraceEdgeInternal(trc, gc::ConvertToBase(thingp->unsafeAddress()),
   2769                           "JS::Heap edge");
   2770 }
   2771 
   2772 template <typename T>
   2773 JS_PUBLIC_API bool EdgeNeedsSweepUnbarrieredSlow(T* thingp) {
   2774  return IsAboutToBeFinalizedInternal(*ConvertToBase(thingp));
   2775 }
   2776 
   2777 // Instantiate a copy of the Tracing templates for each public GC type.
   2778 #define INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS(type)            \
   2779  template JS_PUBLIC_API bool TraceWeakEdge<type>(JSTracer * trc,   \
   2780                                                  JS::Heap<type>*); \
   2781  template JS_PUBLIC_API bool EdgeNeedsSweepUnbarrieredSlow<type>(type*);
   2782 JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS)
   2783 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(
   2784    INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS)
   2785 
   2786 #define INSTANTIATE_INTERNAL_IS_MARKED_FUNCTION(type) \
   2787  template bool IsMarkedInternal(JSRuntime* rt, type thing);
   2788 
   2789 #define INSTANTIATE_INTERNAL_IATBF_FUNCTION(type) \
   2790  template bool IsAboutToBeFinalizedInternal(type thingp);
   2791 
   2792 #define INSTANTIATE_INTERNAL_MARKING_FUNCTIONS_FROM_TRACEKIND(_1, type, _2, \
   2793                                                              _3)           \
   2794  INSTANTIATE_INTERNAL_IS_MARKED_FUNCTION(type*)                            \
   2795  INSTANTIATE_INTERNAL_IATBF_FUNCTION(type*)
   2796 
   2797 JS_FOR_EACH_TRACEKIND(INSTANTIATE_INTERNAL_MARKING_FUNCTIONS_FROM_TRACEKIND)
   2798 
   2799 #define INSTANTIATE_IATBF_FUNCTION_FOR_TAGGED_POINTER(type) \
   2800  INSTANTIATE_INTERNAL_IATBF_FUNCTION(const type&)
   2801 
   2802 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(
   2803    INSTANTIATE_IATBF_FUNCTION_FOR_TAGGED_POINTER)
   2804 
   2805 #undef INSTANTIATE_INTERNAL_IS_MARKED_FUNCTION
   2806 #undef INSTANTIATE_INTERNAL_IATBF_FUNCTION
   2807 #undef INSTANTIATE_INTERNAL_MARKING_FUNCTIONS_FROM_TRACEKIND
   2808 #undef INSTANTIATE_IATBF_FUNCTION_FOR_TAGGED_POINTER
   2809 
   2810 }  // namespace js::gc
   2811 
   2812 /*** Cycle Collector Barrier Implementation *********************************/
   2813 
   2814 /*
   2815 * The GC and CC are run independently. Consequently, the following sequence of
   2816 * events can occur:
   2817 * 1. GC runs and marks an object gray.
   2818 * 2. The mutator runs (specifically, some C++ code with access to gray
   2819 *    objects) and creates a pointer from a JS root or other black object to
   2820 *    the gray object. If we re-ran a GC at this point, the object would now be
   2821 *    black.
   2822 * 3. Now we run the CC. It may think it can collect the gray object, even
   2823 *    though it's reachable from the JS heap.
   2824 *
   2825 * To prevent this badness, we unmark the gray bit of an object when it is
   2826 * accessed by callers outside XPConnect. This would cause the object to go
   2827 * black in step 2 above. This must be done on everything reachable from the
   2828 * object being returned. The following code takes care of the recursive
   2829 * re-coloring.
   2830 *
   2831 * There is an additional complication for certain kinds of edges that are not
   2832 * contained explicitly in the source object itself, such as from a weakmap key
   2833 * to its value. These "implicit edges" are represented in some other
   2834 * container object, such as the weakmap itself. In these
   2835 * cases, calling unmark gray on an object won't find all of its children.
   2836 *
   2837 * Handling these implicit edges has two parts:
   2838 * - A special pass enumerating all of the containers that know about the
   2839 *   implicit edges to fix any black-gray edges that have been created. This
   2840 *   is implemented in nsXPConnect::FixWeakMappingGrayBits.
   2841 * - To prevent any incorrectly gray objects from escaping to live JS outside
   2842 *   of the containers, we must add unmark-graying read barriers to these
   2843 *   containers.
   2844 */
   2845 
   2846 #ifdef DEBUG
   2847 struct AssertNonGrayTracer final : public JS::CallbackTracer {
   2848  // This is used by the UnmarkGray tracer only, and needs to report itself as
   2849  // the non-gray tracer to not trigger assertions.  Do not use it in another
   2850  // context without making this more generic.
   2851  explicit AssertNonGrayTracer(JSRuntime* rt)
   2852      : JS::CallbackTracer(rt, JS::TracerKind::UnmarkGray) {}
   2853  void onChild(JS::GCCellPtr thing, const char* name) override {
   2854    MOZ_ASSERT(!thing.asCell()->isMarkedGray());
   2855  }
   2856 };
   2857 #endif
   2858 
   2859 class js::gc::UnmarkGrayTracer final : public JS::CallbackTracer {
   2860 public:
   2861  // We set weakMapAction to WeakMapTraceAction::Skip because the cycle
   2862  // collector will fix up any color mismatches involving weakmaps when it runs.
   2863  explicit UnmarkGrayTracer(GCMarker* marker)
   2864      : JS::CallbackTracer(marker->runtime(), JS::TracerKind::UnmarkGray,
   2865                           JS::WeakMapTraceAction::Skip),
   2866        unmarkedAny(false),
   2867        oom(false),
   2868        marker(marker),
   2869        stack(marker->unmarkGrayStack) {}
   2870 
   2871  void unmark(JS::GCCellPtr cell);
   2872 
   2873  // Whether we unmarked anything.
   2874  bool unmarkedAny;
   2875 
   2876  // Whether we ran out of memory.
   2877  bool oom;
   2878 
   2879 private:
   2880  // Marker to use if we need to unmark in zones that are currently being
   2881  // marked.
   2882  GCMarker* marker;
   2883 
   2884  // The source of edges traversed by onChild.
   2885  Zone* sourceZone;
   2886 
   2887  // Stack of cells to traverse.
   2888  Vector<JS::GCCellPtr, 0, SystemAllocPolicy>& stack;
   2889 
   2890  void onChild(JS::GCCellPtr thing, const char* name) override;
   2891 };
   2892 
   2893 void UnmarkGrayTracer::onChild(JS::GCCellPtr thing, const char* name) {
   2894  Cell* cell = thing.asCell();
   2895 
   2896  // Cells in the nursery cannot be gray, and nor can certain kinds of tenured
   2897  // cells. These must necessarily point only to black edges.
   2898  if (!cell->isTenured() || !TraceKindCanBeMarkedGray(thing.kind())) {
   2899 #ifdef DEBUG
   2900    MOZ_ASSERT(!cell->isMarkedGray());
   2901    AssertNonGrayTracer nongray(runtime());
   2902    JS::TraceChildren(&nongray, thing);
   2903 #endif
   2904    return;
   2905  }
   2906 
   2907  TenuredCell& tenured = cell->asTenured();
   2908  Zone* zone = tenured.zoneFromAnyThread();
   2909 
   2910  // As well as updating the mark bits, we may need to update the color in the
   2911  // atom marking bitmap for symbols to record that |sourceZone| now has a black
   2912  // edge to |thing|.
   2913  if (zone->isAtomsZone() && sourceZone) {
   2914    GCRuntime* gc = &runtime()->gc;
   2915    if (tenured.is<JS::Symbol>()) {
   2916      JS::Symbol* symbol = tenured.as<JS::Symbol>();
   2917      gc->atomMarking.maybeUnmarkGrayAtomically(sourceZone, symbol);
   2918    }
   2919  }
   2920 
   2921  // If the cell is in a zone whose mark bits are being cleared, then it will
   2922  // end up being marked black by GC marking.
   2923  if (zone->isGCPreparing()) {
   2924    return;
   2925  }
   2926 
   2927  // If the cell is already marked black then there's nothing more to do.
   2928  if (tenured.isMarkedBlack()) {
   2929    return;
   2930  }
   2931 
   2932  if (zone->isGCMarking()) {
   2933    // If the cell is in a zone that we're currently marking, then it's
   2934    // possible that it is currently white but will end up gray. To handle
   2935    // this case, trigger the barrier for any cells in zones that are
   2936    // currently being marked. This will ensure they will eventually get
   2937    // marked black.
   2938    TraceEdgeForBarrier(marker, &tenured, thing.kind());
   2939  } else if (tenured.isMarkedGray()) {
   2940    // TODO: It may be a small improvement to only use the atomic version
   2941    // during parallel marking.
   2942    tenured.markBlackAtomic();
   2943    if (!stack.append(thing)) {
   2944      oom = true;
   2945    }
   2946  }
   2947 
   2948  unmarkedAny = true;
   2949 }
   2950 
   2951 void UnmarkGrayTracer::unmark(JS::GCCellPtr cell) {
   2952  MOZ_ASSERT(stack.empty());
   2953 
   2954  // TODO: We probably don't need to do anything if the gray bits are
   2955  // invalid. However an early return here causes ExposeGCThingToActiveJS to
   2956  // fail because it asserts that something gets unmarked.
   2957 
   2958  sourceZone = nullptr;
   2959  onChild(cell, "unmarking root");
   2960 
   2961  while (!stack.empty() && !oom) {
   2962    JS::GCCellPtr thing = stack.popCopy();
   2963    sourceZone = thing.asCell()->zone();
   2964    TraceChildren(this, thing);
   2965  }
   2966 
   2967  if (oom) {
   2968    // If we run out of memory, we take a drastic measure: require that we
   2969    // GC again before the next CC.
   2970    stack.clear();
   2971    runtime()->gc.setGrayBitsInvalid();
   2972  }
   2973 }
   2974 
   2975 bool js::gc::UnmarkGrayGCThingUnchecked(GCMarker* marker, JS::GCCellPtr thing) {
   2976  MOZ_ASSERT(thing);
   2977 
   2978  mozilla::Maybe<AutoGeckoProfilerEntry> profilingStackFrame;
   2979  if (JSContext* cx = TlsContext.get()) {
   2980    profilingStackFrame.emplace(cx, "UnmarkGrayGCThing",
   2981                                JS::ProfilingCategoryPair::GCCC_UnmarkGray);
   2982  }
   2983 
   2984  UnmarkGrayTracer unmarker(marker);
   2985  unmarker.unmark(thing);
   2986  return unmarker.unmarkedAny;
   2987 }
   2988 
   2989 JS_PUBLIC_API bool JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr thing) {
   2990  MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
   2991  MOZ_ASSERT(!JS::RuntimeHeapIsCycleCollecting());
   2992 
   2993  JSRuntime* rt = thing.asCell()->runtimeFromMainThread();
   2994  if (thing.asCell()->zone()->isGCPreparing()) {
   2995    // Mark bits are being cleared in preparation for GC.
   2996    return false;
   2997  }
   2998 
   2999  MOZ_ASSERT(thing.asCell()->isMarkedGray());
   3000  return UnmarkGrayGCThingUnchecked(&rt->gc.marker(), thing);
   3001 }
   3002 
   3003 void js::gc::UnmarkGrayGCThingRecursively(TenuredCell* cell) {
   3004  JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr(cell, cell->getTraceKind()));
   3005 }
   3006 
   3007 #ifdef DEBUG
   3008 Cell* js::gc::UninlinedForwarded(const Cell* cell) { return Forwarded(cell); }
   3009 #endif
   3010 
   3011 namespace js::debug {
   3012 
   3013 MarkInfo GetMarkInfo(void* vp) {
   3014  GCRuntime& gc = TlsGCContext.get()->runtime()->gc;
   3015  if (gc.nursery().isInside(vp)) {
   3016    ChunkBase* chunk = js::gc::detail::GetGCAddressChunkBase(vp);
   3017    return chunk->getKind() == js::gc::ChunkKind::NurseryFromSpace
   3018               ? MarkInfo::NURSERY_FROMSPACE
   3019               : MarkInfo::NURSERY_TOSPACE;
   3020  }
   3021 
   3022  if (gc.isPointerWithinBufferAlloc(vp)) {
   3023    return MarkInfo::BUFFER;
   3024  }
   3025 
   3026  if (!gc.isPointerWithinTenuredCell(vp)) {
   3027    return MarkInfo::UNKNOWN;
   3028  }
   3029 
   3030  if (!IsCellPointerValid(vp)) {
   3031    return MarkInfo::UNKNOWN;
   3032  }
   3033 
   3034  TenuredCell* cell = reinterpret_cast<TenuredCell*>(vp);
   3035  if (cell->isMarkedGray()) {
   3036    return MarkInfo::GRAY;
   3037  }
   3038  if (cell->isMarkedBlack()) {
   3039    return MarkInfo::BLACK;
   3040  }
   3041  return MarkInfo::UNMARKED;
   3042 }
   3043 
   3044 uintptr_t* GetMarkWordAddress(Cell* cell) {
   3045  if (!cell->isTenured()) {
   3046    return nullptr;
   3047  }
   3048 
   3049  AtomicBitmapWord* wordp;
   3050  uintptr_t mask;
   3051  ArenaChunkBase* chunk = gc::detail::GetCellChunkBase(&cell->asTenured());
   3052  chunk->markBits.getMarkWordAndMask(&cell->asTenured(), ColorBit::BlackBit,
   3053                                     &wordp, &mask);
   3054  return reinterpret_cast<uintptr_t*>(wordp);
   3055 }
   3056 
   3057 uintptr_t GetMarkMask(Cell* cell, uint32_t colorBit) {
   3058  MOZ_ASSERT(colorBit == 0 || colorBit == 1);
   3059 
   3060  if (!cell->isTenured()) {
   3061    return 0;
   3062  }
   3063 
   3064  ColorBit bit = colorBit == 0 ? ColorBit::BlackBit : ColorBit::GrayOrBlackBit;
   3065  AtomicBitmapWord* wordp;
   3066  uintptr_t mask;
   3067  ArenaChunkBase* chunk = gc::detail::GetCellChunkBase(&cell->asTenured());
   3068  chunk->markBits.getMarkWordAndMask(&cell->asTenured(), bit, &wordp, &mask);
   3069  return mask;
   3070 }
   3071 
   3072 }  // namespace js::debug