tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Sweeping.cpp (83419B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 /*
      8 * Implementation of GC sweeping.
      9 *
     10 * In the SpiderMonkey GC, 'sweeping' is used to mean two things:
     11 *  - updating data structures to remove pointers to dead GC things and updating
     12 *    pointers to moved GC things
     13 *  - finalizing dead GC things
     14 *
     15 * Furthermore, the GC carries out gray and weak marking after the start of the
     16 * sweep phase. This is also implemented in this file.
     17 */
     18 
     19 #include "mozilla/DebugOnly.h"
     20 #include "mozilla/Maybe.h"
     21 #include "mozilla/ScopeExit.h"
     22 #include "mozilla/TimeStamp.h"
     23 
     24 #include "builtin/FinalizationRegistryObject.h"
     25 #include "builtin/WeakRefObject.h"
     26 #include "debugger/DebugAPI.h"
     27 #include "gc/AllocKind.h"
     28 #include "gc/BufferAllocator.h"
     29 #include "gc/FinalizationObservers.h"
     30 #include "gc/GCInternals.h"
     31 #include "gc/GCLock.h"
     32 #include "gc/GCProbes.h"
     33 #include "gc/GCRuntime.h"
     34 #include "gc/ParallelWork.h"
     35 #include "gc/Statistics.h"
     36 #include "gc/TraceKind.h"
     37 #include "gc/WeakMap.h"
     38 #include "gc/Zone.h"
     39 #include "jit/JitFrames.h"
     40 #include "jit/JitRuntime.h"
     41 #include "jit/JitZone.h"
     42 #include "proxy/DeadObjectProxy.h"
     43 #include "vm/BigIntType.h"
     44 #include "vm/HelperThreads.h"
     45 #include "vm/JSContext.h"
     46 #include "vm/Probes.h"
     47 #include "vm/Time.h"
     48 #include "vm/WrapperObject.h"
     49 
     50 #include "gc/PrivateIterators-inl.h"
     51 #include "vm/GeckoProfiler-inl.h"
     52 #include "vm/JSObject-inl.h"
     53 #include "vm/PropMap-inl.h"
     54 #include "vm/Shape-inl.h"
     55 #include "vm/StringType-inl.h"
     56 
     57 using namespace js;
     58 using namespace js::gc;
     59 
     60 using mozilla::DebugOnly;
     61 using mozilla::TimeStamp;
     62 
     63 using JS::SliceBudget;
     64 
     65 /*
     66 * Sweeping of arenas and possible finalization of dead cells proceeds in a
     67 * sequence of phases.
     68 *
     69 *  1. ForegroundObjectFinalizePhase
     70 *     JSObjects with finalizers. Swept incrementally on the main thread.
     71 *
     72 *  2. ForegroundNonObjectFinalizePhase
     73 *     Non-JSObjects with finalizers. Swept incrementally on the main thread.
     74 *
     75 *  3. BackgroundObjectFinalizePhase
     76 *     JSObjects with finalizers that can run off main thread. Swept
     77 *     non-incrementally on a helper thread.
     78 *
     79 *  4. BackgroundTrivialFinalizePhase
     80 *     Everything else. These may or may not have finalizers. Any finalizers
     81 *     must not delete HeapPtrs. Swept non-incrementally on a helper thread.
     82 */
     83 
     84 static constexpr AllocKinds ForegroundObjectFinalizePhase = {
     85    AllocKind::OBJECT0_FOREGROUND, AllocKind::OBJECT2_FOREGROUND,
     86    AllocKind::OBJECT4_FOREGROUND, AllocKind::OBJECT6_FOREGROUND,
     87    AllocKind::OBJECT8_FOREGROUND, AllocKind::OBJECT12_FOREGROUND,
     88    AllocKind::OBJECT16_FOREGROUND};
     89 
     90 static constexpr AllocKinds ForegroundNonObjectFinalizePhase = {
     91    AllocKind::SCRIPT, AllocKind::JITCODE};
     92 
     93 static constexpr AllocKinds BackgroundObjectFinalizePhase = {
     94    AllocKind::OBJECT0_BACKGROUND, AllocKind::OBJECT2_BACKGROUND,
     95    AllocKind::ARRAYBUFFER4,       AllocKind::OBJECT4_BACKGROUND,
     96    AllocKind::ARRAYBUFFER6,       AllocKind::OBJECT6_BACKGROUND,
     97    AllocKind::ARRAYBUFFER8,       AllocKind::OBJECT8_BACKGROUND,
     98    AllocKind::ARRAYBUFFER12,      AllocKind::OBJECT12_BACKGROUND,
     99    AllocKind::ARRAYBUFFER16,      AllocKind::OBJECT16_BACKGROUND};
    100 
    101 static constexpr AllocKinds BackgroundTrivialFinalizePhase = {
    102    AllocKind::FUNCTION,        AllocKind::FUNCTION_EXTENDED,
    103    AllocKind::OBJECT0,         AllocKind::OBJECT2,
    104    AllocKind::OBJECT4,         AllocKind::OBJECT6,
    105    AllocKind::OBJECT8,         AllocKind::OBJECT12,
    106    AllocKind::OBJECT16,        AllocKind::SCOPE,
    107    AllocKind::REGEXP_SHARED,   AllocKind::FAT_INLINE_STRING,
    108    AllocKind::STRING,          AllocKind::EXTERNAL_STRING,
    109    AllocKind::FAT_INLINE_ATOM, AllocKind::ATOM,
    110    AllocKind::SYMBOL,          AllocKind::BIGINT,
    111    AllocKind::SHAPE,           AllocKind::BASE_SHAPE,
    112    AllocKind::GETTER_SETTER,   AllocKind::COMPACT_PROP_MAP,
    113    AllocKind::NORMAL_PROP_MAP, AllocKind::DICT_PROP_MAP};
    114 
    115 static constexpr AllocKinds AllBackgroundSweptKinds =
    116    BackgroundObjectFinalizePhase + BackgroundTrivialFinalizePhase;
    117 
    118 static constexpr size_t ArenaReleaseBatchSize = 32;
    119 
    120 template <typename T, FinalizeKind finalizeKind>
    121 inline size_t Arena::finalize(JS::GCContext* gcx, AllocKind thingKind,
    122                              size_t thingSize) {
    123  /* Enforce requirements on size of T. */
    124  MOZ_ASSERT(thingSize % CellAlignBytes == 0);
    125  MOZ_ASSERT(thingSize >= MinCellSize);
    126  MOZ_ASSERT(thingSize <= 255);
    127 
    128  MOZ_ASSERT(allocated());
    129  MOZ_ASSERT(thingKind == getAllocKind());
    130  MOZ_ASSERT(thingSize == getThingSize());
    131  MOZ_ASSERT(!onDelayedMarkingList_);
    132 
    133  MOZ_ASSERT(finalizeKind == GetFinalizeKind(thingKind));
    134 
    135  uint_fast16_t freeStart = firstThingOffset(thingKind);
    136 
    137  // Update the free list as we go along. The cell iterator will always be ahead
    138  // of this pointer when it is written through, so the write will not interfere
    139  // with the iteration.
    140  FreeSpan* newListTail = &firstFreeSpan;
    141 
    142  size_t nmarked = 0;
    143  size_t nfinalized = 0;
    144 
    145  for (ArenaCellIterUnderFinalize cell(this); !cell.done(); cell.next()) {
    146    T* t = cell.as<T>();
    147    if (TenuredThingIsMarkedAny(t)) {
    148      uint_fast16_t thing = uintptr_t(t) & ArenaMask;
    149      if (thing != freeStart) {
    150        // We just finished passing over one or more free things,
    151        // so record a new FreeSpan.
    152        newListTail->initBounds(freeStart, thing - thingSize, this);
    153        newListTail = newListTail->nextSpanUnchecked(this);
    154      }
    155      freeStart = thing + thingSize;
    156      nmarked++;
    157    } else {
    158      if constexpr (std::is_same_v<T, JSObject>) {
    159        js::probes::FinalizeObject(t);
    160      }
    161      if constexpr (finalizeKind != FinalizeKind::None) {
    162        t->finalize(gcx);
    163      }
    164      AlwaysPoison(t, JS_SWEPT_TENURED_PATTERN, thingSize,
    165                   MemCheckKind::MakeUndefined);
    166      gcprobes::TenuredFinalize(t);
    167      nfinalized++;
    168    }
    169  }
    170 
    171  if constexpr (std::is_same_v<T, JSObject> || std::is_same_v<T, JSString> ||
    172                std::is_same_v<T, JS::BigInt>) {
    173    if (isNewlyCreated_) {
    174      zone()->pretenuring.updateCellCountsInNewlyCreatedArenas(
    175          nmarked + nfinalized, nmarked);
    176    }
    177  }
    178  isNewlyCreated_ = 0;
    179 
    180  if (freeStart == ArenaSize) {
    181    // If the last thing was marked, we will have already set the bounds of
    182    // the final span, and we just need to terminate the list.
    183    newListTail->initAsEmpty();
    184  } else {
    185    // Otherwise, end the list with a span that covers the final stretch of free
    186    // things.
    187    newListTail->initFinal(freeStart, ArenaSize - thingSize, this);
    188  }
    189 
    190 #ifdef DEBUG
    191  size_t nfree = numFreeThings(thingSize);
    192  MOZ_ASSERT(nfree + nmarked == thingsPerArena(thingKind));
    193 #endif
    194 
    195  return nmarked;
    196 }
    197 
    198 // Finalize arenas from src list, releasing empty arenas if keepArenas wasn't
    199 // specified and inserting the others into the appropriate destination size
    200 // bins.
    201 template <typename T, FinalizeKind finalizeKind, ReleaseEmpty releaseEmpty>
    202 static inline bool FinalizeTypedArenas(JS::GCContext* gcx, ArenaList& src,
    203                                       SortedArenaList& dest,
    204                                       AllocKind thingKind,
    205                                       SliceBudget& budget) {
    206  MOZ_ASSERT(gcx->isFinalizing());
    207 
    208  size_t thingSize = Arena::thingSize(thingKind);
    209  size_t thingsPerArena = Arena::thingsPerArena(thingKind);
    210  size_t markCount = 0;
    211  size_t emptyCount = 0;
    212 
    213  GCRuntime* gc = &gcx->runtimeFromAnyThread()->gc;
    214  auto updateMarkCount = mozilla::MakeScopeExit(
    215      [&] { gc->stats().addCount(gcstats::COUNT_CELLS_MARKED, markCount); });
    216 
    217  while (!src.isEmpty()) {
    218    Arena* arena = src.popFront();
    219    size_t nmarked =
    220        arena->finalize<T, finalizeKind>(gcx, thingKind, thingSize);
    221    size_t nfree = thingsPerArena - nmarked;
    222 
    223    markCount += nmarked;
    224 
    225    dest.insertAt(arena, nfree);
    226 
    227    if constexpr (bool(releaseEmpty)) {
    228      if (nmarked == 0) {
    229        emptyCount++;
    230        MOZ_ASSERT(emptyCount <= ArenaReleaseBatchSize);
    231        if (emptyCount == ArenaReleaseBatchSize) {
    232          Arena* emptyArenas = nullptr;
    233          dest.extractEmptyTo(&emptyArenas);
    234          emptyArenas =
    235              gc->releaseSomeEmptyArenas(emptyArenas->zone(), emptyArenas);
    236          MOZ_ASSERT(!emptyArenas);
    237          emptyCount = 0;
    238        }
    239      }
    240    }
    241 
    242    budget.step(thingsPerArena);
    243    if (budget.isOverBudget()) {
    244      return false;
    245    }
    246  }
    247 
    248  if constexpr (bool(releaseEmpty)) {
    249    if (emptyCount) {
    250      Arena* emptyArenas = nullptr;
    251      dest.extractEmptyTo(&emptyArenas);
    252      emptyArenas =
    253          gc->releaseSomeEmptyArenas(emptyArenas->zone(), emptyArenas);
    254      MOZ_ASSERT(!emptyArenas);
    255    }
    256  }
    257 
    258  return true;
    259 }
    260 
    261 /*
    262 * Finalize the list of areans.
    263 */
    264 template <ReleaseEmpty releaseEmpty>
    265 static bool FinalizeArenas(JS::GCContext* gcx, ArenaList& src,
    266                           SortedArenaList& dest, AllocKind thingKind,
    267                           SliceBudget& budget) {
    268  switch (thingKind) {
    269 #define EXPAND_CASE(allocKind, _1, type, _2, finalizeKind, _3, _4)      \
    270  case AllocKind::allocKind:                                            \
    271    return FinalizeTypedArenas<type, FinalizeKind::finalizeKind,        \
    272                               releaseEmpty>(gcx, src, dest, thingKind, \
    273                                             budget);
    274    FOR_EACH_ALLOCKIND(EXPAND_CASE)
    275 #undef EXPAND_CASE
    276 
    277    default:
    278      MOZ_CRASH("Invalid alloc kind");
    279  }
    280 }
    281 
    282 void GCRuntime::initBackgroundSweep(Zone* zone, JS::GCContext* gcx,
    283                                    const AllocKinds& kinds) {
    284  for (AllocKind kind : kinds) {
    285    zone->arenas.initBackgroundSweep(kind);
    286  }
    287 }
    288 
    289 void ArenaLists::initBackgroundSweep(AllocKind thingKind) {
    290  MOZ_ASSERT(IsBackgroundSwept(thingKind));
    291  MOZ_ASSERT(concurrentUse(thingKind) == ConcurrentUse::None);
    292 
    293  if (!collectingArenaList(thingKind).isEmpty()) {
    294    concurrentUse(thingKind) = ConcurrentUse::BackgroundFinalize;
    295  }
    296 }
    297 
    298 template <ReleaseEmpty releaseEmpty>
    299 void ArenaLists::backgroundFinalize(JS::GCContext* gcx, AllocKind kind,
    300                                    Arena** empty) {
    301  MOZ_ASSERT(IsBackgroundSwept(kind));
    302  MOZ_ASSERT(bool(empty) != bool(releaseEmpty));
    303 
    304  ArenaList& arenas = collectingArenaList(kind);
    305  if (arenas.isEmpty()) {
    306    MOZ_ASSERT(concurrentUse(kind) == ConcurrentUse::None);
    307    return;
    308  }
    309  MOZ_ASSERT(concurrentUse(kind) == ConcurrentUse::BackgroundFinalize);
    310 
    311  SortedArenaList finalizedSorted(kind);
    312 
    313  auto unlimited = SliceBudget::unlimited();
    314  FinalizeArenas<releaseEmpty>(gcx, arenas, finalizedSorted, kind, unlimited);
    315  MOZ_ASSERT(arenas.isEmpty());
    316 
    317  if constexpr (!bool(releaseEmpty)) {
    318    finalizedSorted.extractEmptyTo(empty);
    319  }
    320  MOZ_ASSERT(!finalizedSorted.hasEmptyArenas());
    321 
    322  // Set the collectingArenaList to the possibly empty list of swept arenas
    323  // while holding the GC lock. Set concurrentUse to indicate to the main thread
    324  // that sweeping has finished.
    325  ArenaList sweptArenas = finalizedSorted.convertToArenaList();
    326 
    327  AutoLockGC lock(gcx->runtimeFromAnyThread());
    328  collectingArenaList(kind) = std::move(sweptArenas);
    329  concurrentUse(kind) = ConcurrentUse::BackgroundFinalizeFinished;
    330 }
    331 
    332 void ArenaLists::mergeBackgroundSweptArenas() {
    333  // Merge swept arenas into main arena lists on the main thread.
    334  MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime()));
    335 
    336  for (AllocKind kind : AllBackgroundSweptKinds) {
    337    maybeMergeSweptArenas(kind);
    338  }
    339 }
    340 
    341 void ArenaLists::maybeMergeSweptArenas(AllocKind kind) {
    342  MOZ_ASSERT(zone_->isGCFinished());
    343  MOZ_ASSERT(concurrentUse(kind) != ConcurrentUse::BackgroundFinalize);
    344 
    345  if (concurrentUse(kind) == ConcurrentUse::BackgroundFinalizeFinished) {
    346    concurrentUse(kind) = ConcurrentUse::None;
    347    mergeSweptArenas(kind, collectingArenaList(kind));
    348  }
    349 
    350  MOZ_ASSERT(collectingArenaList(kind).isEmpty());
    351 }
    352 
    353 // This methods merges the following to get the final state of an arena
    354 // list:
    355 //  - swept arenas
    356 //  - arenas allocated during marking
    357 //  - arenas allocated during sweeping
    358 void ArenaLists::mergeSweptArenas(AllocKind kind, ArenaList& sweptArenas) {
    359  MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime()));
    360  MOZ_ASSERT(concurrentUse(kind) == ConcurrentUse::None);
    361 
    362  arenaList(kind).prepend(std::move(sweptArenas));
    363 }
    364 
    365 void ArenaLists::queueForegroundThingsForSweep() {
    366  gcCompactPropMapArenasToUpdate =
    367      collectingArenaList(AllocKind::COMPACT_PROP_MAP).getFirst();
    368  gcNormalPropMapArenasToUpdate =
    369      collectingArenaList(AllocKind::NORMAL_PROP_MAP).getFirst();
    370 }
    371 
    372 void GCRuntime::sweepBackgroundThings(ZoneList& zones) {
    373  if (zones.isEmpty()) {
    374    return;
    375  }
    376 
    377  JS::GCContext* gcx = TlsGCContext.get();
    378  MOZ_ASSERT(gcx->isFinalizing());
    379 
    380  // Sweep zones in order. The atoms zone must be finalized last as other
    381  // zones may have direct pointers into it.
    382  while (!zones.isEmpty()) {
    383    Zone* zone = zones.removeFront();
    384    MOZ_ASSERT(zone->isGCFinished());
    385 
    386    TimeStamp startTime = TimeStamp::Now();
    387 
    388    ArenaLists& arenaLists = zone->arenas;
    389    Arena* emptyArenas = arenaLists.takeSweptEmptyArenas();
    390 
    391    // We must finalize kinds in the order specified at the top of this file.
    392 
    393    for (auto kind : BackgroundObjectFinalizePhase) {
    394      MOZ_ASSERT(IsBackgroundFinalized(kind));
    395      arenaLists.backgroundFinalize<ReleaseEmpty::No>(gcx, kind, &emptyArenas);
    396    }
    397 
    398    // Release any arenas that are now empty.
    399    //
    400    // Finalizers can still access the zone pointer in now-empty arenas because
    401    // of the write barrier in the HeapPtr destructor. This means we can't
    402    // release any empty arenas until all such finalizers have been run.
    403    //
    404    // At this point this has happened and we can release empty arenas
    405    // immediately from now on.
    406 
    407    AutoDisallowPreWriteBarrier disallowBarrier(gcx);
    408 
    409    while (emptyArenas) {
    410      emptyArenas = releaseSomeEmptyArenas(zone, emptyArenas);
    411    }
    412 
    413    // Now everything with a non-trivial finalizer has been finalized we can
    414    // sweep buffer memory.
    415    //
    416    // Note we depend on this happening before the BUFFER alloc kinds in
    417    // BackgroundTrivialFinalizePhase are swept!
    418    bool decommit = shouldDecommit() && DecommitEnabled();
    419    zone->bufferAllocator.sweepForMajorCollection(decommit);
    420 
    421    // TODO: The remaining sweeping work can be parallelised between multiple
    422    // threads.
    423    for (AllocKind kind : BackgroundTrivialFinalizePhase) {
    424      MOZ_ASSERT(IsBackgroundSwept(kind));
    425      arenaLists.backgroundFinalize<ReleaseEmpty::Yes>(gcx, kind);
    426    }
    427 
    428    // Record time spent sweeping this zone.
    429    TimeStamp endTime = TimeStamp::Now();
    430    zone->perZoneGCTime += endTime - startTime;
    431  }
    432 }
    433 
    434 Arena* GCRuntime::releaseSomeEmptyArenas(Zone* zone, Arena* emptyArenas) {
    435  // Batch releases so as to periodically drop and reaquire the GC lock to
    436  // avoid blocking the main thread from allocating arenas. This is important
    437  // for allocation-heavy workloads such as the splay benchmark.
    438  //
    439  // This block is equivalent to calling GCRuntime::releaseArena on each arena
    440  // individually.
    441  bool isAtomsZone = zone->isAtomsZone();
    442 
    443  Arena* arenasToRelease[ArenaReleaseBatchSize];
    444  size_t atomsBitmapIndexes[ArenaReleaseBatchSize];
    445  size_t count = 0;
    446 
    447  size_t gcHeapBytesFreed = 0;
    448 
    449  // Take up to ArenaReleaseBatchSize arenas from emptyArenas list.
    450  for (size_t i = 0; emptyArenas && i < ArenaReleaseBatchSize; i++) {
    451    Arena* arena = emptyArenas;
    452    emptyArenas = arena->next;
    453 
    454    gcHeapBytesFreed += ArenaSize;
    455 
    456    if (isAtomsZone) {
    457      atomsBitmapIndexes[i] = arena->atomBitmapStart();
    458 #ifdef DEBUG
    459      arena->atomBitmapStart() = 0;
    460 #endif
    461    }
    462 
    463    arena->release();
    464    arenasToRelease[i] = arena;
    465    count++;
    466  }
    467 
    468  zone->gcHeapSize.removeBytes(gcHeapBytesFreed, true, heapSize);
    469 
    470  AutoLockGC lock(this);
    471  for (size_t i = 0; i < count; i++) {
    472    Arena* arena = arenasToRelease[i];
    473    if (isAtomsZone) {
    474      atomMarking.freeIndex(atomsBitmapIndexes[i], lock);
    475    }
    476    arena->chunk()->releaseArena(this, arena, lock);
    477  }
    478 
    479  return emptyArenas;
    480 }
    481 
    482 void GCRuntime::assertBackgroundSweepingFinished() {
    483 #ifdef DEBUG
    484  {
    485    AutoLockHelperThreadState lock;
    486    MOZ_ASSERT(backgroundSweepZones.ref().isEmpty());
    487  }
    488 
    489  for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
    490    for (auto kind : AllAllocKinds()) {
    491      MOZ_ASSERT_IF(state() == State::NotActive || state() >= State::Compact,
    492                    zone->arenas.collectingArenaList(kind).isEmpty());
    493      MOZ_ASSERT(zone->arenas.doneBackgroundFinalize(kind));
    494    }
    495  }
    496 #endif
    497 }
    498 
    499 void GCRuntime::queueZonesAndStartBackgroundSweep(ZoneList&& zones) {
    500  {
    501    AutoLockHelperThreadState lock;
    502    MOZ_ASSERT(!requestSliceAfterBackgroundTask);
    503    backgroundSweepZones.ref().appendList(std::move(zones));
    504    if (useBackgroundThreads) {
    505      sweepTask.startOrRunIfIdle(lock);
    506    }
    507  }
    508  if (!useBackgroundThreads) {
    509    sweepTask.join();
    510    sweepTask.runFromMainThread();
    511  }
    512 }
    513 
    514 BackgroundSweepTask::BackgroundSweepTask(GCRuntime* gc)
    515    : GCParallelTask(gc, gcstats::PhaseKind::SWEEP, GCUse::Finalizing) {}
    516 
    517 void BackgroundSweepTask::run(AutoLockHelperThreadState& lock) {
    518  gc->sweepFromBackgroundThread(lock);
    519 }
    520 
    521 void GCRuntime::sweepFromBackgroundThread(AutoLockHelperThreadState& lock) {
    522  do {
    523    ZoneList zones;
    524    zones.appendList(std::move(backgroundSweepZones.ref()));
    525 
    526    AutoUnlockHelperThreadState unlock(lock);
    527    sweepBackgroundThings(zones);
    528 
    529    // The main thread may call queueZonesAndStartBackgroundSweep() while this
    530    // is running so we must check there is no more work after releasing the
    531    // lock.
    532  } while (!backgroundSweepZones.ref().isEmpty());
    533 
    534  maybeRequestGCAfterBackgroundTask(lock);
    535 }
    536 
    537 void GCRuntime::waitBackgroundSweepEnd() {
    538  sweepTask.join();
    539  for (AllZonesIter zone(this); !zone.done(); zone.next()) {
    540    if (zone->isGCFinished()) {
    541      zone->arenas.mergeBackgroundSweptArenas();
    542    }
    543  }
    544  if (state() != State::Sweep) {
    545    assertBackgroundSweepingFinished();
    546  }
    547 }
    548 
    549 void GCRuntime::waitBackgroundDecommitEnd() { decommitTask.join(); }
    550 
    551 void GCRuntime::startBackgroundFree() {
    552  AutoLockHelperThreadState lock;
    553 
    554  if (!hasBuffersForBackgroundFree()) {
    555    return;
    556  }
    557 
    558  freeTask.startOrRunIfIdle(lock);
    559 }
    560 
    561 BackgroundFreeTask::BackgroundFreeTask(GCRuntime* gc)
    562    : GCParallelTask(gc, gcstats::PhaseKind::NONE) {
    563  // This can occur outside GCs so doesn't have a stats phase.
    564 }
    565 
    566 void BackgroundFreeTask::run(AutoLockHelperThreadState& lock) {
    567  gc->freeFromBackgroundThread(lock);
    568 }
    569 
    570 void GCRuntime::freeFromBackgroundThread(AutoLockHelperThreadState& lock) {
    571  do {
    572    LifoAlloc lifoBlocks(JSContext::TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE,
    573                         js::BackgroundMallocArena);
    574    lifoBlocks.transferFrom(&lifoBlocksToFree.ref());
    575 
    576    Nursery::BufferSet buffers;
    577    std::swap(buffers, buffersToFreeAfterMinorGC.ref());
    578 
    579    Nursery::StringBufferVector stringBuffers;
    580    std::swap(stringBuffers, stringBuffersToReleaseAfterMinorGC.ref());
    581 
    582    AutoUnlockHelperThreadState unlock(lock);
    583 
    584    lifoBlocks.freeAll();
    585 
    586    JS::GCContext* gcx = TlsGCContext.get();
    587    for (Nursery::BufferSet::Range r = buffers.all(); !r.empty();
    588         r.popFront()) {
    589      // Malloc memory associated with nursery objects is not tracked as these
    590      // are assumed to be short lived.
    591      gcx->freeUntracked(r.front());
    592    }
    593 
    594    for (auto* buffer : stringBuffers) {
    595      buffer->Release();
    596    }
    597  } while (hasBuffersForBackgroundFree());
    598 }
    599 
    600 void GCRuntime::waitBackgroundFreeEnd() { freeTask.join(); }
    601 
    602 template <class ZoneIterT>
    603 IncrementalProgress GCRuntime::markWeakReferences(
    604    SliceBudget& incrementalBudget) {
    605  MOZ_ASSERT(!marker().isWeakMarking());
    606 
    607  gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::MARK_WEAK);
    608 
    609  auto unlimited = SliceBudget::unlimited();
    610  SliceBudget& budget =
    611      marker().incrementalWeakMapMarkingEnabled ? incrementalBudget : unlimited;
    612 
    613  // Ensure we don't return to the mutator while we're still in weak marking
    614  // mode.
    615  auto leaveOnExit =
    616      mozilla::MakeScopeExit([&] { marker().leaveWeakMarkingMode(); });
    617 
    618  // If enterWeakMarkingMode takes up at least 80% of a slice, finish marking
    619  // completely in the next slice before yielding again. This avoids the problem
    620  // where scanning gcEphemeronEdges (which must be done at the beginning of
    621  // each slice) takes longer than a slice and therefore no (or little) progress
    622  // can be made per slice.
    623  double progressBeforeEnterWMM = budget.progress();
    624  auto checkSlowEnter = mozilla::MakeScopeExit([&] {
    625    // Called only when returning NotFinished.
    626    if (budget.progress() - progressBeforeEnterWMM > 0.8) {
    627      // Overran the budget. Finish the marking synchronously in the next slice.
    628      // Repeatedly returning to the mutator would require re-scanning the full
    629      // edge table in every slice, and we already know that this will take up
    630      // most or all of a single slice budget.
    631      finishMarkingDuringSweeping = true;
    632    }
    633  });
    634 
    635  // The previous logic is for the first enterWeakMarkingMode slice. This logic
    636  // then kicks in for the next slice, to update the budget to actually keep
    637  // going.
    638  if (!budget.isUnlimited() && finishMarkingDuringSweeping) {
    639    JS_LOG(gc, Info, "enterWeakMarkingMode finishing marking in next slice");
    640    budget.keepGoing = true;
    641  }
    642 
    643  if (marker().enterWeakMarkingMode()) {
    644    // If there was an 'enter-weak-marking-mode' token in the queue, then it
    645    // and everything after it will still be in the queue so we can process
    646    // them now.
    647    while (processTestMarkQueue() == QueueYielded) {
    648    };
    649 
    650    // Do not rely on the information about not-yet-marked weak keys that have
    651    // been collected by barriers. Clear out the gcEphemeronEdges entries and
    652    // rebuild the full table. Note that this a cross-zone operation; delegate
    653    // zone entries will be populated by map zone traversals, so everything
    654    // needs to be cleared first, then populated.
    655    if (!marker().incrementalWeakMapMarkingEnabled) {
    656      for (ZoneIterT zone(this); !zone.done(); zone.next()) {
    657        zone->gcEphemeronEdges().clearAndCompact();
    658      }
    659    }
    660 
    661    for (ZoneIterT zone(this); !zone.done(); zone.next()) {
    662      if (zone->enterWeakMarkingMode(&marker(), budget) == NotFinished) {
    663        return NotFinished;
    664      }
    665    }
    666  }
    667 
    668  bool markedAny = true;
    669  while (markedAny) {
    670    if (!marker().markUntilBudgetExhausted(budget)) {
    671      MOZ_ASSERT(marker().incrementalWeakMapMarkingEnabled);
    672      return NotFinished;
    673    }
    674 
    675    markedAny = false;
    676 
    677    if (!marker().isWeakMarking()) {
    678      for (ZoneIterT zone(this); !zone.done(); zone.next()) {
    679        markedAny |= WeakMapBase::markZoneIteratively(zone, &marker());
    680      }
    681    }
    682 
    683    markedAny |= jit::JitRuntime::MarkJitcodeGlobalTableIteratively(&marker());
    684  }
    685 
    686  assertNoMarkingWork();
    687  checkSlowEnter.release();  // No need to lengthen next slice.
    688 
    689  return Finished;
    690 }
    691 
    692 IncrementalProgress GCRuntime::markWeakReferencesInCurrentGroup(
    693    SliceBudget& budget) {
    694  return markWeakReferences<SweepGroupZonesIter>(budget);
    695 }
    696 
    697 IncrementalProgress GCRuntime::markGrayRoots(SliceBudget& budget,
    698                                             gcstats::PhaseKind phase) {
    699  MOZ_ASSERT(marker().markColor() == MarkColor::Black);
    700 
    701  gcstats::AutoPhase ap(stats(), phase);
    702 
    703  {
    704    AutoSetMarkColor setColorGray(marker(), MarkColor::Gray);
    705 
    706    AutoUpdateLiveCompartments updateLive(this);
    707    marker().setRootMarkingMode(true);
    708    auto guard = mozilla::MakeScopeExit(
    709        [this]() { marker().setRootMarkingMode(false); });
    710 
    711    IncrementalProgress result =
    712        traceEmbeddingGrayRoots(marker().tracer(), budget);
    713    if (result == NotFinished) {
    714      return NotFinished;
    715    }
    716 
    717    Compartment::traceIncomingCrossCompartmentEdgesForZoneGC(
    718        marker().tracer(), Compartment::GrayEdges);
    719  }
    720 
    721  // Also mark any incoming cross compartment edges that were originally gray
    722  // but have been marked black by a barrier.
    723  Compartment::traceIncomingCrossCompartmentEdgesForZoneGC(
    724      marker().tracer(), Compartment::BlackEdges);
    725 
    726  return Finished;
    727 }
    728 
    729 IncrementalProgress GCRuntime::markAllWeakReferences() {
    730  SliceBudget budget = SliceBudget::unlimited();
    731  return markWeakReferences<GCZonesIter>(budget);
    732 }
    733 
    734 void GCRuntime::markAllGrayReferences(gcstats::PhaseKind phase) {
    735 #ifdef DEBUG
    736  // Check zones are in the correct state to be marked.
    737  for (GCZonesIter zone(this); !zone.done(); zone.next()) {
    738    MOZ_ASSERT(zone->isGCMarkingBlackAndGray());
    739  }
    740 #endif
    741 
    742  SliceBudget budget = SliceBudget::unlimited();
    743  markGrayRoots(budget, phase);
    744  drainMarkStack();
    745 }
    746 
    747 void GCRuntime::dropStringWrappers() {
    748  /*
    749   * String "wrappers" are dropped on GC because their presence would require
    750   * us to sweep the wrappers in all compartments every time we sweep a
    751   * compartment group.
    752   */
    753  for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
    754    zone->dropStringWrappersOnGC();
    755  }
    756 }
    757 
    758 /*
    759 * Group zones that must be swept at the same time.
    760 *
    761 * From the point of view of the mutator, groups of zones transition atomically
    762 * from marking to sweeping. If compartment A has an edge to an unmarked object
    763 * in compartment B, then we must not start sweeping A in a later slice than we
    764 * start sweeping B. That's because a write barrier in A could lead to the
    765 * unmarked object in B becoming marked. However, if we had already swept that
    766 * object, we would be in trouble.
    767 *
    768 * If we consider these dependencies as a graph, then all the compartments in
    769 * any strongly-connected component of this graph must start sweeping in the
    770 * same slice.
    771 *
    772 * Tarjan's algorithm is used to calculate the components.
    773 */
    774 
    775 bool Compartment::findSweepGroupEdges() {
    776  Zone* source = zone();
    777  for (WrappedObjectCompartmentEnum e(this); !e.empty(); e.popFront()) {
    778    Compartment* targetComp = e.front();
    779    Zone* target = targetComp->zone();
    780 
    781    if (!target->isGCMarking() || source->hasSweepGroupEdgeTo(target)) {
    782      continue;
    783    }
    784 
    785    for (ObjectWrapperEnum e(this, targetComp); !e.empty(); e.popFront()) {
    786      JSObject* key = e.front().mutableKey();
    787      MOZ_ASSERT(key->zone() == target);
    788 
    789      // Add an edge to the wrapped object's zone to ensure that the wrapper
    790      // zone is not still being marked when we start sweeping the wrapped zone.
    791      // As an optimization, if the wrapped object is already marked black there
    792      // is no danger of later marking and we can skip this.
    793      if (key->isMarkedBlack()) {
    794        continue;
    795      }
    796 
    797      if (!source->addSweepGroupEdgeTo(target)) {
    798        return false;
    799      }
    800 
    801      // We don't need to consider any more wrappers for this target
    802      // compartment since we already added an edge.
    803      break;
    804    }
    805  }
    806 
    807  return true;
    808 }
    809 
    810 bool Zone::findSweepGroupEdges(Zone* atomsZone) {
    811  MOZ_ASSERT_IF(this != atomsZone, !isAtomsZone());
    812 
    813  // Any zone may have a pointer to an atom in the atoms zone, and these aren't
    814  // in the cross compartment map.
    815  if (atomsZone->wasGCStarted() && !addSweepGroupEdgeTo(atomsZone)) {
    816    return false;
    817  }
    818 
    819  for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
    820    if (!comp->findSweepGroupEdges()) {
    821      return false;
    822    }
    823  }
    824 
    825  return WeakMapBase::findSweepGroupEdgesForZone(atomsZone, this);
    826 }
    827 
    828 bool GCRuntime::addEdgesForMarkQueue() {
    829 #ifdef DEBUG
    830  // For testing only.
    831  //
    832  // Add edges between all objects mentioned in the test mark queue, since
    833  // otherwise they will get marked in a different order than their sweep
    834  // groups. Note that this is only done at the beginning of an incremental
    835  // collection, so it is possible for objects to be added later that do not
    836  // follow the sweep group ordering. These objects will wait until their sweep
    837  // group comes up, or will be skipped if their sweep group is already past.
    838  JS::Zone* prevZone = nullptr;
    839  for (Value val : testMarkQueue) {
    840    if (!val.isObject()) {
    841      continue;
    842    }
    843    JSObject* obj = &val.toObject();
    844    JS::Zone* zone = obj->zone();
    845    if (!zone->isGCMarking()) {
    846      continue;
    847    }
    848    if (prevZone && prevZone != zone) {
    849      if (!prevZone->addSweepGroupEdgeTo(zone)) {
    850        return false;
    851      }
    852    }
    853    prevZone = zone;
    854  }
    855 #endif
    856  return true;
    857 }
    858 
    859 bool GCRuntime::findSweepGroupEdges() {
    860  for (GCZonesIter zone(this); !zone.done(); zone.next()) {
    861    if (!zone->findSweepGroupEdges(atomsZone())) {
    862      return false;
    863    }
    864  }
    865 
    866  if (!addEdgesForMarkQueue()) {
    867    return false;
    868  }
    869 
    870  return DebugAPI::findSweepGroupEdges(rt);
    871 }
    872 
    873 void GCRuntime::groupZonesForSweeping(JS::GCReason reason) {
    874 #ifdef DEBUG
    875  for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
    876    MOZ_ASSERT(zone->gcSweepGroupEdges().empty());
    877  }
    878 #endif
    879 
    880  JSContext* cx = rt->mainContextFromOwnThread();
    881  ZoneComponentFinder finder(cx);
    882  if (!isIncremental || !findSweepGroupEdges()) {
    883    finder.useOneComponent();
    884  }
    885 
    886  // Use one component for zeal modes that yield at specific points.
    887  if (useZeal && zealModeControlsYieldPoint()) {
    888    finder.useOneComponent();
    889  }
    890 
    891  for (GCZonesIter zone(this); !zone.done(); zone.next()) {
    892    MOZ_ASSERT(zone->isGCMarking());
    893    finder.addNode(zone);
    894  }
    895  sweepGroups = finder.getResultsList();
    896  currentSweepGroup = sweepGroups;
    897  sweepGroupIndex = 1;
    898 
    899  for (GCZonesIter zone(this); !zone.done(); zone.next()) {
    900    zone->clearSweepGroupEdges();
    901  }
    902 
    903 #ifdef DEBUG
    904  unsigned idx = sweepGroupIndex;
    905  for (Zone* head = currentSweepGroup; head; head = head->nextGroup()) {
    906    for (Zone* zone = head; zone; zone = zone->nextNodeInGroup()) {
    907      MOZ_ASSERT(zone->isGCMarking());
    908      zone->gcSweepGroupIndex = idx;
    909    }
    910    idx++;
    911  }
    912 
    913  MOZ_ASSERT_IF(!isIncremental, !currentSweepGroup->nextGroup());
    914  for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
    915    MOZ_ASSERT(zone->gcSweepGroupEdges().empty());
    916  }
    917 #endif
    918 }
    919 
    920 void GCRuntime::moveToNextSweepGroup() {
    921  currentSweepGroup = currentSweepGroup->nextGroup();
    922  ++sweepGroupIndex;
    923  if (!currentSweepGroup) {
    924    abortSweepAfterCurrentGroup = false;
    925    return;
    926  }
    927 
    928  MOZ_ASSERT_IF(abortSweepAfterCurrentGroup, !isIncremental);
    929  if (!isIncremental) {
    930    ZoneComponentFinder::mergeGroups(currentSweepGroup);
    931  }
    932 
    933  for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
    934    MOZ_ASSERT(zone->gcState() == zone->initialMarkingState());
    935    MOZ_ASSERT(!zone->isQueuedForBackgroundSweep());
    936  }
    937 
    938  if (abortSweepAfterCurrentGroup) {
    939    markTask.join();
    940 
    941    // Abort collection of subsequent sweep groups.
    942    for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
    943      MOZ_ASSERT(!zone->gcNextGraphComponent);
    944      zone->changeGCState(zone->initialMarkingState(), Zone::Finished);
    945      zone->arenas.unmarkPreMarkedFreeCells();
    946      zone->arenas.mergeArenasFromCollectingLists();
    947      zone->clearGCSliceThresholds();
    948 #ifdef DEBUG
    949      zone->cellsToAssertNotGray().clearAndFree();
    950 #endif
    951    }
    952 
    953    for (SweepGroupCompartmentsIter comp(rt); !comp.done(); comp.next()) {
    954      resetGrayList(comp);
    955    }
    956 
    957    abortSweepAfterCurrentGroup = false;
    958    currentSweepGroup = nullptr;
    959  }
    960 }
    961 
    962 /*
    963 * Gray marking:
    964 *
    965 * At the end of collection, anything reachable from a gray root that has not
    966 * otherwise been marked black must be marked gray.
    967 *
    968 * This means that when marking things gray we must not allow marking to leave
    969 * the current compartment group, as that could result in things being marked
    970 * gray when they might subsequently be marked black.  To achieve this, when we
    971 * find a cross compartment pointer we don't mark the referent but add it to a
    972 * singly-linked list of incoming gray pointers that is stored with each
    973 * compartment.
    974 *
    975 * The list head is stored in Compartment::gcIncomingGrayPointers and contains
    976 * cross compartment wrapper objects. The next pointer is stored in the second
    977 * extra slot of the cross compartment wrapper.
    978 *
    979 * The list is created during gray marking when one of the
    980 * MarkCrossCompartmentXXX functions is called for a pointer that leaves the
    981 * current compartent group.  This calls DelayCrossCompartmentGrayMarking to
    982 * push the referring object onto the list.
    983 *
    984 * The list is traversed and then unlinked in
    985 * GCRuntime::markIncomingGrayCrossCompartmentPointers.
    986 */
    987 
    988 static bool IsGrayListObject(JSObject* obj) {
    989  MOZ_ASSERT(obj);
    990  return obj->is<CrossCompartmentWrapperObject>() && !IsDeadProxyObject(obj);
    991 }
    992 
    993 /* static */
    994 unsigned ProxyObject::grayLinkReservedSlot(JSObject* obj) {
    995  MOZ_ASSERT(IsGrayListObject(obj));
    996  return CrossCompartmentWrapperObject::GrayLinkReservedSlot;
    997 }
    998 
    999 #ifdef DEBUG
   1000 static void AssertNotOnGrayList(JSObject* obj) {
   1001  MOZ_ASSERT_IF(
   1002      IsGrayListObject(obj),
   1003      GetProxyReservedSlot(obj, ProxyObject::grayLinkReservedSlot(obj))
   1004          .isUndefined());
   1005 }
   1006 #endif
   1007 
   1008 static void AssertNoWrappersInGrayList(JSRuntime* rt) {
   1009 #ifdef DEBUG
   1010  for (CompartmentsIter c(rt); !c.done(); c.next()) {
   1011    MOZ_ASSERT(!c->gcIncomingGrayPointers);
   1012    for (Compartment::ObjectWrapperEnum e(c); !e.empty(); e.popFront()) {
   1013      AssertNotOnGrayList(e.front().value().unbarrieredGet());
   1014    }
   1015  }
   1016 #endif
   1017 }
   1018 
   1019 static JSObject* CrossCompartmentPointerReferent(JSObject* obj) {
   1020  MOZ_ASSERT(IsGrayListObject(obj));
   1021  return &obj->as<ProxyObject>().private_().toObject();
   1022 }
   1023 
   1024 static JSObject* NextIncomingCrossCompartmentPointer(JSObject* prev,
   1025                                                     bool unlink) {
   1026  unsigned slot = ProxyObject::grayLinkReservedSlot(prev);
   1027  JSObject* next = GetProxyReservedSlot(prev, slot).toObjectOrNull();
   1028  MOZ_ASSERT_IF(next, IsGrayListObject(next));
   1029 
   1030  if (unlink) {
   1031    SetProxyReservedSlot(prev, slot, UndefinedValue());
   1032  }
   1033 
   1034  return next;
   1035 }
   1036 
   1037 void js::gc::DelayCrossCompartmentGrayMarking(GCMarker* maybeMarker,
   1038                                              JSObject* src) {
   1039  MOZ_ASSERT_IF(!maybeMarker, !JS::RuntimeHeapIsBusy());
   1040  MOZ_ASSERT(IsGrayListObject(src));
   1041  MOZ_ASSERT(src->isMarkedGray());
   1042 
   1043  AutoTouchingGrayThings tgt;
   1044 
   1045  mozilla::Maybe<AutoLockGC> lock;
   1046  if (maybeMarker && maybeMarker->isParallelMarking()) {
   1047    // Synchronize access to JSCompartment::gcIncomingGrayPointers.
   1048    //
   1049    // TODO: Instead of building this list we could scan all incoming CCWs and
   1050    // mark through gray ones when marking gray roots for a sweep group.
   1051    lock.emplace(maybeMarker->runtime());
   1052  }
   1053 
   1054  /* Called from MarkCrossCompartmentXXX functions. */
   1055  unsigned slot = ProxyObject::grayLinkReservedSlot(src);
   1056  JSObject* dest = CrossCompartmentPointerReferent(src);
   1057  Compartment* comp = dest->compartment();
   1058 
   1059  if (GetProxyReservedSlot(src, slot).isUndefined()) {
   1060    SetProxyReservedSlot(src, slot,
   1061                         ObjectOrNullValue(comp->gcIncomingGrayPointers));
   1062    comp->gcIncomingGrayPointers = src;
   1063  } else {
   1064    MOZ_ASSERT(GetProxyReservedSlot(src, slot).isObjectOrNull());
   1065  }
   1066 
   1067 #ifdef DEBUG
   1068  /*
   1069   * Assert that the object is in our list, also walking the list to check its
   1070   * integrity.
   1071   */
   1072  JSObject* obj = comp->gcIncomingGrayPointers;
   1073  bool found = false;
   1074  while (obj) {
   1075    if (obj == src) {
   1076      found = true;
   1077    }
   1078    obj = NextIncomingCrossCompartmentPointer(obj, false);
   1079  }
   1080  MOZ_ASSERT(found);
   1081 #endif
   1082 }
   1083 
   1084 void GCRuntime::markIncomingGrayCrossCompartmentPointers() {
   1085  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_INCOMING_GRAY);
   1086 
   1087  for (SweepGroupCompartmentsIter c(rt); !c.done(); c.next()) {
   1088    MOZ_ASSERT(c->zone()->isGCMarkingBlackAndGray());
   1089    MOZ_ASSERT_IF(c->gcIncomingGrayPointers,
   1090                  IsGrayListObject(c->gcIncomingGrayPointers));
   1091 
   1092    for (JSObject* src = c->gcIncomingGrayPointers; src;
   1093         src = NextIncomingCrossCompartmentPointer(src, true)) {
   1094      JSObject* dst = CrossCompartmentPointerReferent(src);
   1095      MOZ_ASSERT(dst->compartment() == c);
   1096      MOZ_ASSERT_IF(src->asTenured().isMarkedBlack(),
   1097                    dst->asTenured().isMarkedBlack());
   1098 
   1099      if (src->asTenured().isMarkedGray()) {
   1100        TraceManuallyBarrieredEdge(marker().tracer(), &dst,
   1101                                   "cross-compartment gray pointer");
   1102      }
   1103    }
   1104 
   1105    c->gcIncomingGrayPointers = nullptr;
   1106  }
   1107 }
   1108 
   1109 static bool RemoveFromGrayList(JSObject* wrapper) {
   1110  AutoTouchingGrayThings tgt;
   1111 
   1112  if (!IsGrayListObject(wrapper)) {
   1113    return false;
   1114  }
   1115 
   1116  unsigned slot = ProxyObject::grayLinkReservedSlot(wrapper);
   1117  if (GetProxyReservedSlot(wrapper, slot).isUndefined()) {
   1118    return false; /* Not on our list. */
   1119  }
   1120 
   1121  JSObject* tail = GetProxyReservedSlot(wrapper, slot).toObjectOrNull();
   1122  SetProxyReservedSlot(wrapper, slot, UndefinedValue());
   1123 
   1124  Compartment* comp = CrossCompartmentPointerReferent(wrapper)->compartment();
   1125  JSObject* obj = comp->gcIncomingGrayPointers;
   1126  if (obj == wrapper) {
   1127    comp->gcIncomingGrayPointers = tail;
   1128    return true;
   1129  }
   1130 
   1131  while (obj) {
   1132    unsigned slot = ProxyObject::grayLinkReservedSlot(obj);
   1133    JSObject* next = GetProxyReservedSlot(obj, slot).toObjectOrNull();
   1134    if (next == wrapper) {
   1135      js::detail::SetProxyReservedSlotUnchecked(obj, slot,
   1136                                                ObjectOrNullValue(tail));
   1137      return true;
   1138    }
   1139    obj = next;
   1140  }
   1141 
   1142  MOZ_CRASH("object not found in gray link list");
   1143 }
   1144 
   1145 void GCRuntime::resetGrayList(Compartment* comp) {
   1146  JSObject* src = comp->gcIncomingGrayPointers;
   1147  while (src) {
   1148    src = NextIncomingCrossCompartmentPointer(src, true);
   1149  }
   1150  comp->gcIncomingGrayPointers = nullptr;
   1151 }
   1152 
   1153 #ifdef DEBUG
   1154 static bool HasIncomingCrossCompartmentPointers(JSRuntime* rt) {
   1155  for (SweepGroupCompartmentsIter c(rt); !c.done(); c.next()) {
   1156    if (c->gcIncomingGrayPointers) {
   1157      return true;
   1158    }
   1159  }
   1160 
   1161  return false;
   1162 }
   1163 #endif
   1164 
   1165 void js::NotifyGCNukeWrapper(JSContext* cx, JSObject* wrapper) {
   1166  MOZ_ASSERT(IsCrossCompartmentWrapper(wrapper));
   1167 
   1168  /*
   1169   * References to target of wrapper are being removed, we no longer have to
   1170   * remember to mark it.
   1171   */
   1172  RemoveFromGrayList(wrapper);
   1173 }
   1174 
   1175 enum {
   1176  JS_GC_SWAP_OBJECT_A_REMOVED = 1 << 0,
   1177  JS_GC_SWAP_OBJECT_B_REMOVED = 1 << 1
   1178 };
   1179 
   1180 unsigned js::NotifyGCPreSwap(JSObject* a, JSObject* b) {
   1181  /*
   1182   * Two objects in the same compartment are about to have had their contents
   1183   * swapped.  If either of them are in our gray pointer list, then we remove
   1184   * them from the lists, returning a bitset indicating what happened.
   1185   */
   1186  return (RemoveFromGrayList(a) ? JS_GC_SWAP_OBJECT_A_REMOVED : 0) |
   1187         (RemoveFromGrayList(b) ? JS_GC_SWAP_OBJECT_B_REMOVED : 0);
   1188 }
   1189 
   1190 void js::NotifyGCPostSwap(JSObject* a, JSObject* b, unsigned removedFlags) {
   1191  /*
   1192   * Two objects in the same compartment have had their contents swapped.  If
   1193   * either of them were in our gray pointer list, we re-add them again.
   1194   */
   1195  if (removedFlags & JS_GC_SWAP_OBJECT_A_REMOVED) {
   1196    DelayCrossCompartmentGrayMarking(nullptr, b);
   1197  }
   1198  if (removedFlags & JS_GC_SWAP_OBJECT_B_REMOVED) {
   1199    DelayCrossCompartmentGrayMarking(nullptr, a);
   1200  }
   1201 }
   1202 
   1203 static inline void MaybeCheckWeakMapMarking(GCRuntime* gc) {
   1204 #if defined(JS_GC_ZEAL) || defined(DEBUG)
   1205 
   1206  bool shouldCheck;
   1207 #  if defined(DEBUG)
   1208  shouldCheck = true;
   1209 #  else
   1210  shouldCheck = gc->hasZealMode(ZealMode::CheckWeakMapMarking);
   1211 #  endif
   1212 
   1213  if (shouldCheck) {
   1214    for (SweepGroupZonesIter zone(gc); !zone.done(); zone.next()) {
   1215      MOZ_RELEASE_ASSERT(WeakMapBase::checkMarkingForZone(zone));
   1216    }
   1217  }
   1218 
   1219 #endif
   1220 }
   1221 
   1222 IncrementalProgress GCRuntime::beginMarkingSweepGroup(JS::GCContext* gcx,
   1223                                                      SliceBudget& budget) {
   1224 #ifdef DEBUG
   1225  MOZ_ASSERT(!markOnBackgroundThreadDuringSweeping);
   1226  assertNoMarkingWork();
   1227  for (auto& marker : markers) {
   1228    MOZ_ASSERT(marker->markColor() == MarkColor::Black);
   1229  }
   1230 #endif
   1231 
   1232  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK);
   1233 
   1234  // Change state of current group to MarkBlackAndGray to restrict gray marking
   1235  // to this group. Note that there may be pointers to the atoms zone, and these
   1236  // will be marked through, as they are not marked with
   1237  // TraceCrossCompartmentEdge.
   1238  for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
   1239    MOZ_ASSERT_IF(!zone->isGCMarkingBlackAndGray(),
   1240                  zone->cellsToAssertNotGray().empty());
   1241    zone->changeGCState(zone->initialMarkingState(), Zone::MarkBlackAndGray);
   1242  }
   1243 
   1244  AutoSetMarkColor setColorGray(marker(), MarkColor::Gray);
   1245 
   1246  // Mark incoming gray pointers from previously swept compartments.
   1247  markIncomingGrayCrossCompartmentPointers();
   1248 
   1249  return Finished;
   1250 }
   1251 
   1252 #ifdef DEBUG
   1253 bool GCRuntime::zoneInCurrentSweepGroup(Zone* zone) const {
   1254  MOZ_ASSERT_IF(!zone->wasGCStarted(), !zone->gcNextGraphComponent);
   1255  return zone->wasGCStarted() &&
   1256         zone->gcNextGraphComponent == currentSweepGroup->nextGroup();
   1257 }
   1258 #endif
   1259 
   1260 IncrementalProgress GCRuntime::markGrayRootsInCurrentGroup(
   1261    JS::GCContext* gcx, SliceBudget& budget) {
   1262  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK);
   1263 
   1264  // Check that the zone state is set correctly for the current sweep group as
   1265  // that determines what gets marked.
   1266  MOZ_ASSERT(atomsZone()->wasGCStarted() ==
   1267             atomsZone()->isGCMarkingBlackAndGray());
   1268  for (NonAtomZonesIter zone(this); !zone.done(); zone.next()) {
   1269    MOZ_ASSERT(zone->isGCMarkingBlackAndGray() ==
   1270               zoneInCurrentSweepGroup(zone));
   1271  }
   1272 
   1273  return markGrayRoots(budget, gcstats::PhaseKind::MARK_GRAY);
   1274 }
   1275 
   1276 IncrementalProgress GCRuntime::markGray(JS::GCContext* gcx,
   1277                                        SliceBudget& budget) {
   1278  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK);
   1279 
   1280  if (markUntilBudgetExhausted(budget, useParallelMarking) == NotFinished) {
   1281    return NotFinished;
   1282  }
   1283 
   1284  return Finished;
   1285 }
   1286 
   1287 IncrementalProgress GCRuntime::endMarkingSweepGroup(JS::GCContext* gcx,
   1288                                                    SliceBudget& budget) {
   1289 #ifdef DEBUG
   1290  MOZ_ASSERT(!markOnBackgroundThreadDuringSweeping);
   1291  assertNoMarkingWork();
   1292  for (auto& marker : markers) {
   1293    MOZ_ASSERT(marker->markColor() == MarkColor::Black);
   1294  }
   1295  MOZ_ASSERT(!HasIncomingCrossCompartmentPointers(rt));
   1296 #endif
   1297 
   1298  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK);
   1299 
   1300  if (markWeakReferencesInCurrentGroup(budget) == NotFinished) {
   1301    return NotFinished;
   1302  }
   1303 
   1304  AutoSetMarkColor setColorGray(marker(), MarkColor::Gray);
   1305 
   1306  // Mark transitively inside the current compartment group.
   1307  if (markWeakReferencesInCurrentGroup(budget) == NotFinished) {
   1308    return NotFinished;
   1309  }
   1310 
   1311  MOZ_ASSERT(marker().isDrained());
   1312 
   1313  // We must not yield after this point before we start sweeping the group.
   1314  safeToYield = false;
   1315 
   1316  // If we temporarily prevented yielding during marking, release the hold now.
   1317  budget.keepGoing = false;
   1318 
   1319  MaybeCheckWeakMapMarking(this);
   1320 
   1321  return Finished;
   1322 }
   1323 
   1324 // Causes the given WeakCache to be swept when run.
   1325 class ImmediateSweepWeakCacheTask : public GCParallelTask {
   1326  Zone* zone;
   1327  JS::detail::WeakCacheBase& cache;
   1328 
   1329 public:
   1330  ImmediateSweepWeakCacheTask(GCRuntime* gc, Zone* zone,
   1331                              JS::detail::WeakCacheBase& wc)
   1332      : GCParallelTask(gc, gcstats::PhaseKind::SWEEP_WEAK_CACHES),
   1333        zone(zone),
   1334        cache(wc) {}
   1335 
   1336  ImmediateSweepWeakCacheTask(ImmediateSweepWeakCacheTask&& other) noexcept
   1337      : GCParallelTask(std::move(other)),
   1338        zone(other.zone),
   1339        cache(other.cache) {}
   1340 
   1341  ImmediateSweepWeakCacheTask(const ImmediateSweepWeakCacheTask&) = delete;
   1342 
   1343  void run(AutoLockHelperThreadState& lock) override {
   1344    AutoUnlockHelperThreadState unlock(lock);
   1345    AutoSetThreadIsSweeping threadIsSweeping(zone);
   1346    SweepingTracer trc(gc->rt);
   1347    cache.traceWeak(&trc, JS::detail::WeakCacheBase::LockStoreBuffer);
   1348  }
   1349 };
   1350 
   1351 void GCRuntime::updateAtomsBitmap() {
   1352  atomMarking.refineZoneBitmapsForCollectedZones(this);
   1353 
   1354  // Mark atoms used by uncollected zones after refining the atoms bitmaps.
   1355  auto& atomsToMark = atomsUsedByUncollectedZones.ref();
   1356  if (atomsToMark) {
   1357    atomMarking.markAtomsUsedByUncollectedZones(this, std::move(atomsToMark));
   1358  }
   1359 
   1360  // For convenience sweep these tables non-incrementally as part of bitmap
   1361  // sweeping; they are likely to be much smaller than the main atoms table.
   1362  SweepingTracer trc(rt);
   1363  rt->symbolRegistry().traceWeak(&trc);
   1364 }
   1365 
   1366 void GCRuntime::sweepCCWrappers() {
   1367  SweepingTracer trc(rt);
   1368  for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
   1369    zone->traceWeakCCWEdges(&trc);
   1370  }
   1371 }
   1372 
   1373 void GCRuntime::sweepRealmGlobals() {
   1374  SweepingTracer trc(rt);
   1375  for (SweepGroupRealmsIter r(this); !r.done(); r.next()) {
   1376    AutoSetThreadIsSweeping threadIsSweeping(r->zone());
   1377    r->traceWeakGlobalEdge(&trc);
   1378  }
   1379 }
   1380 
   1381 void GCRuntime::sweepMisc() {
   1382  SweepingTracer trc(rt);
   1383  for (SweepGroupRealmsIter r(this); !r.done(); r.next()) {
   1384    AutoSetThreadIsSweeping threadIsSweeping(r->zone());
   1385    r->traceWeakSavedStacks(&trc);
   1386  }
   1387  for (SweepGroupCompartmentsIter c(this); !c.done(); c.next()) {
   1388    AutoSetThreadIsSweeping threadIsSweeping(c->zone());
   1389    c->traceWeakNativeIterators(&trc);
   1390  }
   1391 }
   1392 
   1393 void GCRuntime::sweepCompressionTasks() {
   1394  // Discard pending compression entries for ScriptSources that have no
   1395  // other references.
   1396  rt->pendingCompressions().eraseIf(
   1397      [&](const auto& entry) { return entry.shouldCancel(); });
   1398 
   1399  // Attach finished compression tasks.
   1400  AutoLockHelperThreadState lock;
   1401  AttachFinishedCompressions(rt, lock);
   1402 }
   1403 
   1404 void GCRuntime::sweepWeakMaps() {
   1405  SweepingTracer trc(rt);
   1406  for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
   1407    /* No need to look up any more weakmap keys from this sweep group. */
   1408    zone->gcEphemeronEdges().clearAndCompact();
   1409 
   1410    // Lock the storebuffer since this may access it when rehashing or resizing
   1411    // the tables.
   1412    AutoLockStoreBuffer lock(rt);
   1413    zone->sweepWeakMaps(&trc);
   1414  }
   1415 }
   1416 
   1417 void GCRuntime::sweepUniqueIds() {
   1418  for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
   1419    AutoSetThreadIsSweeping threadIsSweeping(zone);
   1420    zone->sweepUniqueIds();
   1421  }
   1422 }
   1423 
   1424 void JS::Zone::sweepUniqueIds() {
   1425  SweepingTracer trc(runtimeFromAnyThread());
   1426  uniqueIds().traceWeak(&trc);
   1427 }
   1428 
   1429 /* static */
   1430 bool UniqueIdGCPolicy::traceWeak(JSTracer* trc, Cell** keyp, uint64_t* valuep) {
   1431  // Since this is only ever used for sweeping, we can optimize it for that
   1432  // case. (Compacting GC updates this table manually when it moves a cell.)
   1433  MOZ_ASSERT(trc->kind() == JS::TracerKind::Sweeping);
   1434  return (*keyp)->isMarkedAny();
   1435 }
   1436 
   1437 void GCRuntime::sweepFinalizationObserversOnMainThread() {
   1438  // This calls back into the browser which expects to be called from the main
   1439  // thread.
   1440  gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::SWEEP_COMPARTMENTS);
   1441  gcstats::AutoPhase ap2(stats(),
   1442                         gcstats::PhaseKind::SWEEP_FINALIZATION_OBSERVERS);
   1443  SweepingTracer trc(rt);
   1444  AutoLockStoreBuffer lock(rt);
   1445  for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
   1446    traceWeakFinalizationObserverEdges(&trc, zone);
   1447  }
   1448 }
   1449 
   1450 void GCRuntime::startTask(GCParallelTask& task,
   1451                          AutoLockHelperThreadState& lock) {
   1452  if (!CanUseExtraThreads()) {
   1453    AutoUnlockHelperThreadState unlock(lock);
   1454    task.runFromMainThread();
   1455    stats().recordParallelPhase(task.phaseKind, task.duration());
   1456    return;
   1457  }
   1458 
   1459  task.startWithLockHeld(lock);
   1460 }
   1461 
   1462 void GCRuntime::joinTask(GCParallelTask& task,
   1463                         AutoLockHelperThreadState& lock) {
   1464  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::JOIN_PARALLEL_TASKS);
   1465  task.joinWithLockHeld(lock);
   1466 }
   1467 
   1468 void GCRuntime::sweepDebuggerOnMainThread(JS::GCContext* gcx) {
   1469  SweepingTracer trc(rt);
   1470  AutoLockStoreBuffer lock(rt);
   1471 
   1472  // Detach unreachable debuggers and global objects from each other.
   1473  // This can modify weakmaps and so must happen before weakmap sweeping.
   1474  DebugAPI::sweepAll(gcx);
   1475 
   1476  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_COMPARTMENTS);
   1477 
   1478  // Sweep debug environment information. This performs lookups in the Zone's
   1479  // unique IDs table and so must not happen in parallel with sweeping that
   1480  // table.
   1481  {
   1482    gcstats::AutoPhase ap2(stats(), gcstats::PhaseKind::SWEEP_MISC);
   1483    for (SweepGroupRealmsIter r(rt); !r.done(); r.next()) {
   1484      r->traceWeakDebugEnvironmentEdges(&trc);
   1485    }
   1486  }
   1487 }
   1488 
   1489 void GCRuntime::sweepJitDataOnMainThread(JS::GCContext* gcx) {
   1490  SweepingTracer trc(rt);
   1491  {
   1492    gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_JIT_DATA);
   1493 
   1494    // Bug 1071218: the following method has not yet been refactored to
   1495    // work on a single zone-group at once.
   1496 
   1497    // Sweep entries containing about-to-be-finalized JitCode in the
   1498    // JitcodeGlobalTable.
   1499    jit::JitRuntime::TraceWeakJitcodeGlobalTable(rt, &trc);
   1500  }
   1501 
   1502  // Trace weak edges in JitScripts to remove edges to dying GC things.
   1503  {
   1504    gcstats::AutoPhase apdc(stats(), gcstats::PhaseKind::SWEEP_JIT_SCRIPTS);
   1505    for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
   1506      zone->traceWeakJitScripts(&trc);
   1507    }
   1508  }
   1509 
   1510  // JitZone must be swept *after* sweeping JitScripts, because
   1511  // Zone::traceWeakJitScripts might access CacheIRStubInfos deleted here.
   1512  {
   1513    gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_JIT_DATA);
   1514 
   1515    for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
   1516      if (jit::JitZone* jitZone = zone->jitZone()) {
   1517        jitZone->traceWeak(&trc, zone);
   1518      }
   1519    }
   1520 
   1521    JSContext* cx = rt->mainContextFromOwnThread();
   1522    jit::TraceWeakJitActivationsInSweepingZones(cx, &trc);
   1523  }
   1524 }
   1525 
   1526 void GCRuntime::sweepObjectsWithWeakPointers() {
   1527  SweepingTracer trc(rt);
   1528  for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
   1529    AutoSetThreadIsSweeping threadIsSweeping(zone);
   1530    zone->sweepObjectsWithWeakPointers(&trc);
   1531  }
   1532 }
   1533 
   1534 void JS::Zone::sweepObjectsWithWeakPointers(JSTracer* trc) {
   1535  MOZ_ASSERT(trc->traceWeakEdges());
   1536 
   1537  objectsWithWeakPointers.ref().mutableEraseIf([&](JSObject*& obj) {
   1538    if (!TraceManuallyBarrieredWeakEdge(trc, &obj, "objectsWithWeakPointers")) {
   1539      // Object itself is dead.
   1540      return true;
   1541    }
   1542 
   1543    // Call trace hook to sweep weak pointers.
   1544    obj->getClass()->doTrace(trc, obj);
   1545    return false;
   1546  });
   1547 }
   1548 
   1549 using WeakCacheTaskVector =
   1550    mozilla::Vector<ImmediateSweepWeakCacheTask, 0, SystemAllocPolicy>;
   1551 
   1552 // Call a functor for all weak caches that need to be swept in the current
   1553 // sweep group.
   1554 template <typename Functor>
   1555 static inline bool IterateWeakCaches(JSRuntime* rt, Functor f) {
   1556  for (SweepGroupZonesIter zone(rt); !zone.done(); zone.next()) {
   1557    for (JS::detail::WeakCacheBase* cache : zone->weakCaches()) {
   1558      if (!f(cache, zone.get())) {
   1559        return false;
   1560      }
   1561    }
   1562  }
   1563 
   1564  for (JS::detail::WeakCacheBase* cache : rt->weakCaches()) {
   1565    if (!f(cache, nullptr)) {
   1566      return false;
   1567    }
   1568  }
   1569 
   1570  return true;
   1571 }
   1572 
   1573 static bool PrepareWeakCacheTasks(JSRuntime* rt,
   1574                                  WeakCacheTaskVector* immediateTasks) {
   1575  // Start incremental sweeping for caches that support it or add to a vector
   1576  // of sweep tasks to run on a helper thread.
   1577 
   1578  MOZ_ASSERT(immediateTasks->empty());
   1579 
   1580  GCRuntime* gc = &rt->gc;
   1581  bool ok =
   1582      IterateWeakCaches(rt, [&](JS::detail::WeakCacheBase* cache, Zone* zone) {
   1583        if (cache->empty()) {
   1584          return true;
   1585        }
   1586 
   1587        // Caches that support incremental sweeping will be swept later.
   1588        if (zone && cache->setIncrementalBarrierTracer(&gc->sweepingTracer)) {
   1589          return true;
   1590        }
   1591 
   1592        return immediateTasks->emplaceBack(gc, zone, *cache);
   1593      });
   1594 
   1595  if (!ok) {
   1596    immediateTasks->clearAndFree();
   1597  }
   1598 
   1599  return ok;
   1600 }
   1601 
   1602 static void SweepAllWeakCachesOnMainThread(JSRuntime* rt) {
   1603  // If we ran out of memory, do all the work on the main thread.
   1604  gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::SWEEP_WEAK_CACHES);
   1605  SweepingTracer trc(rt);
   1606  IterateWeakCaches(rt, [&](JS::detail::WeakCacheBase* cache, Zone* zone) {
   1607    if (cache->needsIncrementalBarrier()) {
   1608      cache->setIncrementalBarrierTracer(nullptr);
   1609    }
   1610    cache->traceWeak(&trc, JS::detail::WeakCacheBase::LockStoreBuffer);
   1611    return true;
   1612  });
   1613 }
   1614 
   1615 void GCRuntime::sweepEmbeddingWeakPointers(JS::GCContext* gcx) {
   1616  using namespace gcstats;
   1617 
   1618  AutoLockStoreBuffer lock(rt);
   1619 
   1620  AutoPhase ap(stats(), PhaseKind::FINALIZE_START);
   1621  callFinalizeCallbacks(gcx, JSFINALIZE_GROUP_PREPARE);
   1622  {
   1623    AutoPhase ap2(stats(), PhaseKind::WEAK_ZONES_CALLBACK);
   1624    callWeakPointerZonesCallbacks(&sweepingTracer);
   1625  }
   1626  {
   1627    AutoPhase ap2(stats(), PhaseKind::WEAK_COMPARTMENT_CALLBACK);
   1628    for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
   1629      for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
   1630        callWeakPointerCompartmentCallbacks(&sweepingTracer, comp);
   1631      }
   1632    }
   1633  }
   1634  callFinalizeCallbacks(gcx, JSFINALIZE_GROUP_START);
   1635 }
   1636 
   1637 IncrementalProgress GCRuntime::beginSweepingSweepGroup(JS::GCContext* gcx,
   1638                                                       SliceBudget& budget) {
   1639  /*
   1640   * Begin sweeping the group of zones in currentSweepGroup, performing
   1641   * actions that must be done before yielding to caller.
   1642   */
   1643 
   1644  using namespace gcstats;
   1645 
   1646  AutoSCC scc(stats(), sweepGroupIndex);
   1647  finishMarkingDuringSweeping = false;
   1648 
   1649  bool sweepingAtoms = false;
   1650  for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
   1651    /* Set the GC state to sweeping. */
   1652    zone->changeGCState(Zone::MarkBlackAndGray, Zone::Sweep);
   1653 
   1654    /* Purge the ArenaLists before sweeping. */
   1655    zone->arenas.checkSweepStateNotInUse();
   1656    zone->arenas.unmarkPreMarkedFreeCells();
   1657    zone->arenas.clearFreeLists();
   1658 
   1659    if (zone->isAtomsZone()) {
   1660      sweepingAtoms = true;
   1661    }
   1662  }
   1663 
   1664  // Updating the atom marking bitmaps. This marks atoms referenced by
   1665  // uncollected zones so cannot be done in parallel with the other sweeping
   1666  // work below.
   1667  if (sweepingAtoms) {
   1668    AutoPhase ap(stats(), PhaseKind::UPDATE_ATOMS_BITMAP);
   1669    updateAtomsBitmap();
   1670  }
   1671 
   1672 #ifdef DEBUG
   1673  // Now that the final mark state has been computed check any gray marking
   1674  // assertions we delayed until this point.
   1675  for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
   1676    for (const auto* cell : zone->cellsToAssertNotGray()) {
   1677      JS::AssertCellIsNotGray(cell);
   1678    }
   1679    zone->cellsToAssertNotGray().clearAndFree();
   1680  }
   1681 #endif
   1682 
   1683 #ifdef JS_GC_ZEAL
   1684  validateIncrementalMarking();
   1685 #endif
   1686 
   1687  AutoSetThreadIsSweeping threadIsSweeping;
   1688 
   1689  // Disable incremental barriers for all zones while we are sweeping/finalizing
   1690  // zones in this sweep group. Set the |disableBarriersForSweeping| flag so we
   1691  // enable/disable the barriers on yield/resume.
   1692  MOZ_ASSERT(!disableBarriersForSweeping);
   1693  disableBarriersForSweeping = true;
   1694  disableIncrementalBarriers();
   1695 
   1696  // This must happen before sweeping realm globals.
   1697  sweepDebuggerOnMainThread(gcx);
   1698 
   1699  // FinalizationRegistry sweeping touches weak maps and so must not run in
   1700  // parallel with that. This triggers a read barrier and can add marking work
   1701  // for zones that are still marking. Must happen before sweeping realm
   1702  // globals.
   1703  sweepFinalizationObserversOnMainThread();
   1704 
   1705  // This must happen before updating embedding weak pointers.
   1706  sweepRealmGlobals();
   1707 
   1708  sweepEmbeddingWeakPointers(gcx);
   1709 
   1710  {
   1711    AutoLockHelperThreadState lock;
   1712 
   1713    AutoPhase ap(stats(), PhaseKind::SWEEP_COMPARTMENTS);
   1714 
   1715    AutoRunParallelTask sweepCCWrappers(this, &GCRuntime::sweepCCWrappers,
   1716                                        PhaseKind::SWEEP_CC_WRAPPER,
   1717                                        GCUse::Sweeping, lock);
   1718    AutoRunParallelTask sweepMisc(this, &GCRuntime::sweepMisc,
   1719                                  PhaseKind::SWEEP_MISC, GCUse::Sweeping, lock);
   1720    AutoRunParallelTask sweepCompTasks(this, &GCRuntime::sweepCompressionTasks,
   1721                                       PhaseKind::SWEEP_COMPRESSION,
   1722                                       GCUse::Sweeping, lock);
   1723    AutoRunParallelTask sweepWeakMaps(this, &GCRuntime::sweepWeakMaps,
   1724                                      PhaseKind::SWEEP_WEAKMAPS,
   1725                                      GCUse::Sweeping, lock);
   1726    AutoRunParallelTask sweepUniqueIds(this, &GCRuntime::sweepUniqueIds,
   1727                                       PhaseKind::SWEEP_UNIQUEIDS,
   1728                                       GCUse::Sweeping, lock);
   1729    AutoRunParallelTask sweepWeakPointers(
   1730        this, &GCRuntime::sweepObjectsWithWeakPointers,
   1731        PhaseKind::SWEEP_WEAK_POINTERS, GCUse::Sweeping, lock);
   1732 
   1733    WeakCacheTaskVector sweepCacheTasks;
   1734    bool canSweepWeakCachesOffThread =
   1735        PrepareWeakCacheTasks(rt, &sweepCacheTasks);
   1736    if (canSweepWeakCachesOffThread) {
   1737      weakCachesToSweep.ref().emplace(currentSweepGroup);
   1738      for (auto& task : sweepCacheTasks) {
   1739        startTask(task, lock);
   1740      }
   1741    }
   1742 
   1743    {
   1744      AutoUnlockHelperThreadState unlock(lock);
   1745      sweepJitDataOnMainThread(gcx);
   1746 
   1747      if (!canSweepWeakCachesOffThread) {
   1748        MOZ_ASSERT(sweepCacheTasks.empty());
   1749        SweepAllWeakCachesOnMainThread(rt);
   1750      }
   1751    }
   1752 
   1753    for (auto& task : sweepCacheTasks) {
   1754      joinTask(task, lock);
   1755    }
   1756  }
   1757 
   1758  if (sweepingAtoms) {
   1759    startSweepingAtomsTable();
   1760  }
   1761 
   1762  // Queue all GC things in all zones for sweeping, either on the foreground
   1763  // or on the background thread.
   1764 
   1765  for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
   1766    zone->arenas.queueForegroundThingsForSweep();
   1767    constexpr AllocKinds backgroundKinds =
   1768        BackgroundObjectFinalizePhase + BackgroundTrivialFinalizePhase;
   1769    initBackgroundSweep(zone, gcx, backgroundKinds);
   1770  }
   1771 
   1772  MOZ_ASSERT(!sweepZone);
   1773 
   1774  safeToYield = true;
   1775  markOnBackgroundThreadDuringSweeping = CanUseExtraThreads();
   1776 
   1777  return Finished;
   1778 }
   1779 
   1780 #ifdef JS_GC_ZEAL
   1781 bool GCRuntime::shouldYieldForZeal(ZealMode mode) {
   1782  bool yield = useZeal && hasZealMode(mode);
   1783 
   1784  // Only yield on the first sweep slice for this mode.
   1785  bool firstSweepSlice = initialState != State::Sweep;
   1786  if (mode == ZealMode::IncrementalMultipleSlices && !firstSweepSlice) {
   1787    yield = false;
   1788  }
   1789 
   1790  return yield;
   1791 }
   1792 #endif
   1793 
   1794 IncrementalProgress GCRuntime::endSweepingSweepGroup(JS::GCContext* gcx,
   1795                                                     SliceBudget& budget) {
   1796  // This is to prevent a race between markTask checking the zone state and
   1797  // us changing it below.
   1798  if (joinBackgroundMarkTask() == NotFinished) {
   1799    return NotFinished;
   1800  }
   1801 
   1802  assertNoMarkingWork();
   1803 
   1804  // Disable background marking during sweeping until we start sweeping the next
   1805  // zone group.
   1806  markOnBackgroundThreadDuringSweeping = false;
   1807 
   1808  {
   1809    gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::FINALIZE_END);
   1810    AutoLockStoreBuffer lock(rt);
   1811    callFinalizeCallbacks(gcx, JSFINALIZE_GROUP_END);
   1812  }
   1813 
   1814  /* Free LIFO blocks on a background thread if possible. */
   1815  startBackgroundFree();
   1816 
   1817  /* Update the GC state for zones we have swept. */
   1818  for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
   1819    if (jit::JitZone* jitZone = zone->jitZone()) {
   1820      // Clear out any small pools that we're hanging on to.
   1821      jitZone->execAlloc().purge();
   1822    }
   1823    AutoLockGC lock(this);
   1824    zone->changeGCState(Zone::Sweep, Zone::Finished);
   1825    zone->arenas.unmarkPreMarkedFreeCells();
   1826    zone->arenas.checkNoArenasToUpdate();
   1827    zone->pretenuring.clearCellCountsInNewlyCreatedArenas();
   1828  }
   1829 
   1830  /* Ensure the initial minor GC has finished sweeping. */
   1831  MOZ_ASSERT(minorGCNumber >= initialMinorGCNumber);
   1832  if (minorGCNumber == initialMinorGCNumber) {
   1833    nursery().joinSweepTask();
   1834  }
   1835 
   1836  /*
   1837   * Start background thread to sweep zones if required, sweeping any atoms
   1838   * zones last if present.
   1839   */
   1840  ZoneList zones;
   1841  {
   1842    BufferAllocator::MaybeLock lock;
   1843    for (SweepGroupZonesIter zone(this); !zone.done(); zone.next()) {
   1844      if (zone->isAtomsZone()) {
   1845        zones.append(zone);
   1846      } else {
   1847        zones.prepend(zone);
   1848      }
   1849 
   1850      zone->bufferAllocator.startMajorSweeping(lock);
   1851    }
   1852  }
   1853  queueZonesAndStartBackgroundSweep(std::move(zones));
   1854 
   1855  // Re-enable incremental barriers for all zones now we are we done sweeping
   1856  // zones in this group.
   1857  MOZ_ASSERT(disableBarriersForSweeping);
   1858  disableBarriersForSweeping = false;
   1859  enableIncrementalBarriers();
   1860 
   1861  return Finished;
   1862 }
   1863 
   1864 IncrementalProgress GCRuntime::markDuringSweeping(JS::GCContext* gcx,
   1865                                                  SliceBudget& budget) {
   1866  MOZ_ASSERT(markTask.isIdle());
   1867 
   1868  if (markOnBackgroundThreadDuringSweeping) {
   1869    if (!marker().isDrained() || hasDelayedMarking()) {
   1870      AutoLockHelperThreadState lock;
   1871      MOZ_ASSERT(markTask.isIdle(lock));
   1872      markTask.setBudget(budget);
   1873      markTask.startOrRunIfIdle(lock);
   1874    }
   1875    return Finished;  // This means don't yield to the mutator here.
   1876  }
   1877 
   1878  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK);
   1879  return markUntilBudgetExhausted(budget, useParallelMarking);
   1880 }
   1881 
   1882 void GCRuntime::beginSweepPhase(JS::GCReason reason, AutoGCSession& session) {
   1883  /*
   1884   * Sweep phase.
   1885   *
   1886   * Finalize as we sweep, outside of lock but with RuntimeHeapIsBusy()
   1887   * true so that any attempt to allocate a GC-thing from a finalizer will
   1888   * fail, rather than nest badly and leave the unmarked newborn to be swept.
   1889   */
   1890 
   1891  MOZ_ASSERT(!abortSweepAfterCurrentGroup);
   1892  MOZ_ASSERT(!markOnBackgroundThreadDuringSweeping);
   1893 
   1894 #ifdef DEBUG
   1895  releaseHeldRelocatedArenas();
   1896  verifyAllChunks();
   1897 #endif
   1898 
   1899 #ifdef JS_GC_ZEAL
   1900  computeNonIncrementalMarkingForValidation(session);
   1901 #endif
   1902 
   1903  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP);
   1904 
   1905  AssertNoWrappersInGrayList(rt);
   1906  dropStringWrappers();
   1907 
   1908  groupZonesForSweeping(reason);
   1909 
   1910  sweepActions->assertFinished();
   1911 }
   1912 
   1913 bool ArenaLists::foregroundFinalize(JS::GCContext* gcx, AllocKind thingKind,
   1914                                    SliceBudget& sliceBudget,
   1915                                    SortedArenaList& sweepList) {
   1916  checkNoArenasToUpdateForKind(thingKind);
   1917 
   1918  // Non-empty arenas are reused for use for new allocations as soon as the
   1919  // finalizers for that allocation kind have run. Empty arenas are only
   1920  // released when everything in the zone has been swept (see
   1921  // GCRuntime::sweepBackgroundThings for more details).
   1922  ArenaList& arenas = collectingArenaList(thingKind);
   1923  if (!FinalizeArenas<ReleaseEmpty::No>(gcx, arenas, sweepList, thingKind,
   1924                                        sliceBudget)) {
   1925    return false;
   1926  }
   1927 
   1928  sweepList.extractEmptyTo(&savedEmptyArenas.ref());
   1929  ArenaList sweptArenas = sweepList.convertToArenaList();
   1930  mergeSweptArenas(thingKind, sweptArenas);
   1931  return true;
   1932 }
   1933 
   1934 BackgroundMarkTask::BackgroundMarkTask(GCRuntime* gc)
   1935    : GCParallelTask(gc, gcstats::PhaseKind::MARK, GCUse::Marking),
   1936      budget(SliceBudget::unlimited()) {}
   1937 
   1938 void js::gc::BackgroundMarkTask::run(AutoLockHelperThreadState& lock) {
   1939  AutoUnlockHelperThreadState unlock(lock);
   1940 
   1941  // Time reporting is handled separately for parallel tasks.
   1942  gc->sweepMarkResult = gc->markUntilBudgetExhausted(
   1943      this->budget, GCRuntime::SingleThreadedMarking, DontReportMarkTime);
   1944 }
   1945 
   1946 IncrementalProgress GCRuntime::joinBackgroundMarkTask() {
   1947  AutoLockHelperThreadState lock;
   1948  if (markTask.isIdle(lock)) {
   1949    return Finished;
   1950  }
   1951 
   1952  joinTask(markTask, lock);
   1953 
   1954  IncrementalProgress result = sweepMarkResult;
   1955  sweepMarkResult = Finished;
   1956  return result;
   1957 }
   1958 
   1959 template <typename T>
   1960 static void SweepThing(JS::GCContext* gcx, T* thing) {
   1961  if (!TenuredThingIsMarkedAny(thing)) {
   1962    thing->sweep(gcx);
   1963  }
   1964 }
   1965 
   1966 template <typename T>
   1967 static bool SweepArenaList(JS::GCContext* gcx, ArenaList& arenaList,
   1968                           Arena** arenasToSweep, SliceBudget& sliceBudget) {
   1969  if (!*arenasToSweep) {
   1970    return true;
   1971  }
   1972 
   1973  DebugOnly<Zone*> zone = (*arenasToSweep)->zone();
   1974  MOZ_ASSERT(zone->isGCSweeping());
   1975 
   1976  AllocKind kind = MapTypeToAllocKind<T>::kind;
   1977  size_t steps = Arena::thingsPerArena(kind);
   1978 
   1979  for (auto arena = arenaList.iterFrom(*arenasToSweep); !arena.done();
   1980       arena.next()) {
   1981    MOZ_ASSERT(arena->zone() == zone);
   1982    MOZ_ASSERT(arena->getAllocKind() == kind);
   1983 
   1984    if (sliceBudget.isOverBudget()) {
   1985      *arenasToSweep = arena.get();
   1986      return false;
   1987    }
   1988 
   1989    for (ArenaCellIterUnderGC cell(arena.get()); !cell.done(); cell.next()) {
   1990      SweepThing(gcx, cell.as<T>());
   1991    }
   1992 
   1993    sliceBudget.step(steps);
   1994  }
   1995 
   1996  *arenasToSweep = nullptr;
   1997  return true;
   1998 }
   1999 
   2000 void GCRuntime::startSweepingAtomsTable() {
   2001  auto& maybeAtoms = maybeAtomsToSweep.ref();
   2002  MOZ_ASSERT(maybeAtoms.isNothing());
   2003 
   2004  AtomsTable* atomsTable = rt->atomsForSweeping();
   2005  if (!atomsTable) {
   2006    return;
   2007  }
   2008 
   2009  // Create secondary tables to hold new atoms added while we're sweeping the
   2010  // main tables incrementally.
   2011  if (!atomsTable->startIncrementalSweep(maybeAtoms)) {
   2012    SweepingTracer trc(rt);
   2013    atomsTable->traceWeak(&trc);
   2014  }
   2015 }
   2016 
   2017 IncrementalProgress GCRuntime::sweepAtomsTable(JS::GCContext* gcx,
   2018                                               SliceBudget& budget) {
   2019  if (!atomsZone()->isGCSweeping()) {
   2020    return Finished;
   2021  }
   2022 
   2023  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_ATOMS_TABLE);
   2024 
   2025  auto& maybeAtoms = maybeAtomsToSweep.ref();
   2026  if (!maybeAtoms) {
   2027    return Finished;
   2028  }
   2029 
   2030  if (!rt->atomsForSweeping()->sweepIncrementally(maybeAtoms.ref(), budget)) {
   2031    return NotFinished;
   2032  }
   2033 
   2034  maybeAtoms.reset();
   2035 
   2036  return Finished;
   2037 }
   2038 
   2039 static size_t IncrementalSweepWeakCache(GCRuntime* gc,
   2040                                        const WeakCacheToSweep& item) {
   2041  AutoSetThreadIsSweeping threadIsSweeping(item.zone);
   2042 
   2043  JS::detail::WeakCacheBase* cache = item.cache;
   2044  MOZ_ASSERT(cache->needsIncrementalBarrier());
   2045 
   2046  SweepingTracer trc(gc->rt);
   2047  size_t steps =
   2048      cache->traceWeak(&trc, JS::detail::WeakCacheBase::LockStoreBuffer);
   2049  cache->setIncrementalBarrierTracer(nullptr);
   2050 
   2051  return steps;
   2052 }
   2053 
   2054 WeakCacheSweepIterator::WeakCacheSweepIterator(JS::Zone* sweepGroup)
   2055    : sweepZone(sweepGroup), sweepCache(sweepZone->weakCaches().getFirst()) {
   2056  settle();
   2057 }
   2058 
   2059 bool WeakCacheSweepIterator::done() const { return !sweepZone; }
   2060 
   2061 WeakCacheToSweep WeakCacheSweepIterator::get() const {
   2062  MOZ_ASSERT(!done());
   2063 
   2064  return {sweepCache, sweepZone};
   2065 }
   2066 
   2067 void WeakCacheSweepIterator::next() {
   2068  MOZ_ASSERT(!done());
   2069 
   2070  sweepCache = sweepCache->getNext();
   2071  settle();
   2072 }
   2073 
   2074 void WeakCacheSweepIterator::settle() {
   2075  while (sweepZone) {
   2076    while (sweepCache && !sweepCache->needsIncrementalBarrier()) {
   2077      sweepCache = sweepCache->getNext();
   2078    }
   2079 
   2080    if (sweepCache) {
   2081      break;
   2082    }
   2083 
   2084    sweepZone = sweepZone->nextNodeInGroup();
   2085    if (sweepZone) {
   2086      sweepCache = sweepZone->weakCaches().getFirst();
   2087    }
   2088  }
   2089 
   2090  MOZ_ASSERT((!sweepZone && !sweepCache) ||
   2091             (sweepCache && sweepCache->needsIncrementalBarrier()));
   2092 }
   2093 
   2094 IncrementalProgress GCRuntime::sweepWeakCaches(JS::GCContext* gcx,
   2095                                               SliceBudget& budget) {
   2096  if (weakCachesToSweep.ref().isNothing()) {
   2097    return Finished;
   2098  }
   2099 
   2100  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_COMPARTMENTS);
   2101 
   2102  WeakCacheSweepIterator& work = weakCachesToSweep.ref().ref();
   2103 
   2104  AutoLockHelperThreadState lock;
   2105 
   2106  {
   2107    AutoRunParallelWork runWork(this, IncrementalSweepWeakCache,
   2108                                gcstats::PhaseKind::SWEEP_WEAK_CACHES,
   2109                                GCUse::Sweeping, work, budget, lock);
   2110    AutoUnlockHelperThreadState unlock(lock);
   2111  }
   2112 
   2113  if (work.done()) {
   2114    weakCachesToSweep.ref().reset();
   2115    return Finished;
   2116  }
   2117 
   2118  return NotFinished;
   2119 }
   2120 
   2121 IncrementalProgress GCRuntime::finalizeAllocKind(JS::GCContext* gcx,
   2122                                                 SliceBudget& budget) {
   2123  MOZ_ASSERT(sweepZone->isGCSweeping());
   2124 
   2125  auto& finalizedArenas = foregroundFinalizedArenas.ref();
   2126  if (!finalizedArenas) {
   2127    finalizedArenas.emplace(sweepAllocKind);
   2128    foregroundFinalizedZone = sweepZone;
   2129    foregroundFinalizedAllocKind = sweepAllocKind;
   2130  } else {
   2131    MOZ_ASSERT(finalizedArenas->allocKind() == sweepAllocKind);
   2132    MOZ_ASSERT(foregroundFinalizedZone == sweepZone);
   2133    MOZ_ASSERT(foregroundFinalizedAllocKind == sweepAllocKind);
   2134  }
   2135 
   2136  AutoSetThreadIsFinalizing threadIsFinalizing(gcx);
   2137  ArenaLists& arenaLists = sweepZone->arenas;
   2138  if (!arenaLists.foregroundFinalize(gcx, sweepAllocKind, budget,
   2139                                     finalizedArenas.ref())) {
   2140    return NotFinished;
   2141  }
   2142 
   2143  finalizedArenas.reset();
   2144  foregroundFinalizedZone = nullptr;
   2145  foregroundFinalizedAllocKind = AllocKind::LIMIT;
   2146 
   2147  return Finished;
   2148 }
   2149 
   2150 SortedArenaList* GCRuntime::maybeGetForegroundFinalizedArenas(Zone* zone,
   2151                                                              AllocKind kind) {
   2152  MOZ_ASSERT(zone);
   2153  MOZ_ASSERT(IsValidAllocKind(kind));
   2154 
   2155  auto& finalizedArenas = foregroundFinalizedArenas.ref();
   2156 
   2157  if (finalizedArenas.isNothing() || zone != foregroundFinalizedZone ||
   2158      kind != foregroundFinalizedAllocKind) {
   2159    return nullptr;
   2160  }
   2161 
   2162  return finalizedArenas.ptr();
   2163 }
   2164 
   2165 IncrementalProgress GCRuntime::sweepPropMapTree(JS::GCContext* gcx,
   2166                                                SliceBudget& budget) {
   2167  // Remove dead SharedPropMaps from the tree. This happens incrementally on the
   2168  // main thread. PropMaps are finalized later on the a background thread.
   2169 
   2170  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_PROP_MAP);
   2171 
   2172  ArenaLists& al = sweepZone->arenas;
   2173 
   2174  if (!SweepArenaList<CompactPropMap>(
   2175          gcx, al.collectingArenaList(AllocKind::COMPACT_PROP_MAP),
   2176          &al.gcCompactPropMapArenasToUpdate.ref(), budget)) {
   2177    return NotFinished;
   2178  }
   2179  if (!SweepArenaList<NormalPropMap>(
   2180          gcx, al.collectingArenaList(AllocKind::NORMAL_PROP_MAP),
   2181          &al.gcNormalPropMapArenasToUpdate.ref(), budget)) {
   2182    return NotFinished;
   2183  }
   2184 
   2185  return Finished;
   2186 }
   2187 
   2188 // An iterator for a standard container that provides an STL-like begin()/end()
   2189 // interface. This iterator provides a done()/get()/next() style interface.
   2190 template <typename Container>
   2191 class ContainerIter {
   2192  using Iter = decltype(std::declval<const Container>().begin());
   2193  using Elem = decltype(*std::declval<Iter>());
   2194 
   2195  Iter iter;
   2196  const Iter end;
   2197 
   2198 public:
   2199  explicit ContainerIter(const Container& container)
   2200      : iter(container.begin()), end(container.end()) {}
   2201 
   2202  bool done() const { return iter == end; }
   2203 
   2204  Elem get() const { return *iter; }
   2205 
   2206  void next() {
   2207    MOZ_ASSERT(!done());
   2208    ++iter;
   2209  }
   2210 };
   2211 
   2212 // IncrementalIter is a template class that makes a normal iterator into one
   2213 // that can be used to perform incremental work by using external state that
   2214 // persists between instantiations. The state is only initialised on the first
   2215 // use and subsequent uses carry on from the previous state.
   2216 template <typename Iter>
   2217 struct IncrementalIter {
   2218  using State = mozilla::Maybe<Iter>;
   2219  using Elem = decltype(std::declval<Iter>().get());
   2220 
   2221 private:
   2222  State& maybeIter;
   2223 
   2224 public:
   2225  template <typename... Args>
   2226  explicit IncrementalIter(State& maybeIter, Args&&... args)
   2227      : maybeIter(maybeIter) {
   2228    if (maybeIter.isNothing()) {
   2229      maybeIter.emplace(std::forward<Args>(args)...);
   2230    }
   2231  }
   2232 
   2233  ~IncrementalIter() {
   2234    if (done()) {
   2235      maybeIter.reset();
   2236    }
   2237  }
   2238 
   2239  bool done() const { return maybeIter.ref().done(); }
   2240 
   2241  Elem get() const { return maybeIter.ref().get(); }
   2242 
   2243  void next() { maybeIter.ref().next(); }
   2244 };
   2245 
   2246 // Iterate through the sweep groups created by
   2247 // GCRuntime::groupZonesForSweeping().
   2248 class js::gc::SweepGroupsIter {
   2249  GCRuntime* gc;
   2250 
   2251 public:
   2252  explicit SweepGroupsIter(JSRuntime* rt) : gc(&rt->gc) {
   2253    MOZ_ASSERT(gc->currentSweepGroup);
   2254  }
   2255 
   2256  bool done() const { return !gc->currentSweepGroup; }
   2257 
   2258  Zone* get() const { return gc->currentSweepGroup; }
   2259 
   2260  void next() {
   2261    MOZ_ASSERT(!done());
   2262    gc->moveToNextSweepGroup();
   2263  }
   2264 };
   2265 
   2266 namespace sweepaction {
   2267 
   2268 // Implementation of the SweepAction interface that calls a method on GCRuntime.
   2269 class SweepActionCall final : public SweepAction {
   2270  using Method = IncrementalProgress (GCRuntime::*)(JS::GCContext* gcx,
   2271                                                    SliceBudget& budget);
   2272 
   2273  Method method;
   2274 
   2275 public:
   2276  explicit SweepActionCall(Method m) : method(m) {}
   2277  IncrementalProgress run(Args& args) override {
   2278    return (args.gc->*method)(args.gcx, args.budget);
   2279  }
   2280  void assertFinished() const override {}
   2281 };
   2282 
   2283 // Implementation of the SweepAction interface that yields in a specified zeal
   2284 // mode.
   2285 class SweepActionMaybeYield final : public SweepAction {
   2286 #ifdef JS_GC_ZEAL
   2287  ZealMode mode;
   2288  bool isYielding;
   2289 #endif
   2290 
   2291 public:
   2292  explicit SweepActionMaybeYield(ZealMode mode)
   2293 #ifdef JS_GC_ZEAL
   2294      : mode(mode),
   2295        isYielding(false)
   2296 #endif
   2297  {
   2298  }
   2299 
   2300  IncrementalProgress run(Args& args) override {
   2301 #ifdef JS_GC_ZEAL
   2302    if (!isYielding && args.gc->shouldYieldForZeal(mode)) {
   2303      isYielding = true;
   2304      return NotFinished;
   2305    }
   2306 
   2307    isYielding = false;
   2308 #endif
   2309    return Finished;
   2310  }
   2311 
   2312  void assertFinished() const override { MOZ_ASSERT(!isYielding); }
   2313 
   2314  // These actions should be skipped if GC zeal is not configured.
   2315 #ifndef JS_GC_ZEAL
   2316  bool shouldSkip() override { return true; }
   2317 #endif
   2318 };
   2319 
   2320 // Implementation of the SweepAction interface that calls a list of actions in
   2321 // sequence.
   2322 class SweepActionSequence final : public SweepAction {
   2323  using ActionVector = Vector<UniquePtr<SweepAction>, 0, SystemAllocPolicy>;
   2324  using Iter = IncrementalIter<ContainerIter<ActionVector>>;
   2325 
   2326  ActionVector actions;
   2327  typename Iter::State iterState;
   2328 
   2329 public:
   2330  bool init(UniquePtr<SweepAction>* acts, size_t count) {
   2331    for (size_t i = 0; i < count; i++) {
   2332      auto& action = acts[i];
   2333      if (!action) {
   2334        return false;
   2335      }
   2336      if (action->shouldSkip()) {
   2337        continue;
   2338      }
   2339      if (!actions.emplaceBack(std::move(action))) {
   2340        return false;
   2341      }
   2342    }
   2343    return true;
   2344  }
   2345 
   2346  IncrementalProgress run(Args& args) override {
   2347    for (Iter iter(iterState, actions); !iter.done(); iter.next()) {
   2348      if (iter.get()->run(args) == NotFinished) {
   2349        return NotFinished;
   2350      }
   2351    }
   2352    return Finished;
   2353  }
   2354 
   2355  void assertFinished() const override {
   2356    MOZ_ASSERT(iterState.isNothing());
   2357    for (const auto& action : actions) {
   2358      action->assertFinished();
   2359    }
   2360  }
   2361 };
   2362 
   2363 template <typename Iter, typename Init>
   2364 class SweepActionForEach final : public SweepAction {
   2365  using Elem = decltype(std::declval<Iter>().get());
   2366  using IncrIter = IncrementalIter<Iter>;
   2367 
   2368  Init iterInit;
   2369  Elem* elemOut;
   2370  UniquePtr<SweepAction> action;
   2371  typename IncrIter::State iterState;
   2372 
   2373 public:
   2374  SweepActionForEach(const Init& init, Elem* maybeElemOut,
   2375                     UniquePtr<SweepAction> action)
   2376      : iterInit(init), elemOut(maybeElemOut), action(std::move(action)) {}
   2377 
   2378  IncrementalProgress run(Args& args) override {
   2379    MOZ_ASSERT_IF(elemOut, *elemOut == Elem());
   2380    auto clearElem = mozilla::MakeScopeExit([&] { setElem(Elem()); });
   2381    for (IncrIter iter(iterState, iterInit); !iter.done(); iter.next()) {
   2382      setElem(iter.get());
   2383      if (action->run(args) == NotFinished) {
   2384        return NotFinished;
   2385      }
   2386    }
   2387    return Finished;
   2388  }
   2389 
   2390  void assertFinished() const override {
   2391    MOZ_ASSERT(iterState.isNothing());
   2392    MOZ_ASSERT_IF(elemOut, *elemOut == Elem());
   2393    action->assertFinished();
   2394  }
   2395 
   2396 private:
   2397  void setElem(const Elem& value) {
   2398    if (elemOut) {
   2399      *elemOut = value;
   2400    }
   2401  }
   2402 };
   2403 
   2404 static UniquePtr<SweepAction> Call(IncrementalProgress (GCRuntime::*method)(
   2405    JS::GCContext* gcx, SliceBudget& budget)) {
   2406  return MakeUnique<SweepActionCall>(method);
   2407 }
   2408 
   2409 static UniquePtr<SweepAction> MaybeYield(ZealMode zealMode) {
   2410  return MakeUnique<SweepActionMaybeYield>(zealMode);
   2411 }
   2412 
   2413 template <typename... Rest>
   2414 static UniquePtr<SweepAction> Sequence(UniquePtr<SweepAction> first,
   2415                                       Rest... rest) {
   2416  UniquePtr<SweepAction> actions[] = {std::move(first), std::move(rest)...};
   2417  auto seq = MakeUnique<SweepActionSequence>();
   2418  if (!seq || !seq->init(actions, std::size(actions))) {
   2419    return nullptr;
   2420  }
   2421 
   2422  return UniquePtr<SweepAction>(std::move(seq));
   2423 }
   2424 
   2425 static UniquePtr<SweepAction> RepeatForSweepGroup(
   2426    JSRuntime* rt, UniquePtr<SweepAction> action) {
   2427  if (!action) {
   2428    return nullptr;
   2429  }
   2430 
   2431  using Action = SweepActionForEach<SweepGroupsIter, JSRuntime*>;
   2432  return js::MakeUnique<Action>(rt, nullptr, std::move(action));
   2433 }
   2434 
   2435 static UniquePtr<SweepAction> ForEachZoneInSweepGroup(
   2436    JSRuntime* rt, Zone** zoneOut, UniquePtr<SweepAction> action) {
   2437  if (!action) {
   2438    return nullptr;
   2439  }
   2440 
   2441  using Action = SweepActionForEach<SweepGroupZonesIter, JSRuntime*>;
   2442  return js::MakeUnique<Action>(rt, zoneOut, std::move(action));
   2443 }
   2444 
   2445 static UniquePtr<SweepAction> ForEachAllocKind(const AllocKinds& kinds,
   2446                                               AllocKind* kindOut,
   2447                                               UniquePtr<SweepAction> action) {
   2448  if (!action) {
   2449    return nullptr;
   2450  }
   2451 
   2452  using Action = SweepActionForEach<ContainerIter<AllocKinds>, AllocKinds>;
   2453  return js::MakeUnique<Action>(kinds, kindOut, std::move(action));
   2454 }
   2455 
   2456 }  // namespace sweepaction
   2457 
   2458 bool GCRuntime::initSweepActions() {
   2459  using namespace sweepaction;
   2460  using sweepaction::Call;
   2461 
   2462  sweepActions.ref() = RepeatForSweepGroup(
   2463      rt,
   2464      Sequence(
   2465          Call(&GCRuntime::beginMarkingSweepGroup),
   2466          Call(&GCRuntime::markGrayRootsInCurrentGroup),
   2467          MaybeYield(ZealMode::YieldWhileGrayMarking),
   2468          Call(&GCRuntime::markGray), Call(&GCRuntime::endMarkingSweepGroup),
   2469          Call(&GCRuntime::beginSweepingSweepGroup),
   2470          MaybeYield(ZealMode::IncrementalMultipleSlices),
   2471          MaybeYield(ZealMode::YieldBeforeSweepingAtoms),
   2472          Call(&GCRuntime::sweepAtomsTable),
   2473          MaybeYield(ZealMode::YieldBeforeSweepingCaches),
   2474          Call(&GCRuntime::sweepWeakCaches),
   2475          ForEachZoneInSweepGroup(
   2476              rt, &sweepZone.ref(),
   2477              Sequence(MaybeYield(ZealMode::YieldBeforeSweepingObjects),
   2478                       ForEachAllocKind(ForegroundObjectFinalizePhase,
   2479                                        &sweepAllocKind.ref(),
   2480                                        Call(&GCRuntime::finalizeAllocKind)),
   2481                       MaybeYield(ZealMode::YieldBeforeSweepingNonObjects),
   2482                       ForEachAllocKind(ForegroundNonObjectFinalizePhase,
   2483                                        &sweepAllocKind.ref(),
   2484                                        Call(&GCRuntime::finalizeAllocKind)),
   2485                       MaybeYield(ZealMode::YieldBeforeSweepingPropMapTrees),
   2486                       Call(&GCRuntime::sweepPropMapTree))),
   2487          Call(&GCRuntime::endSweepingSweepGroup)));
   2488 
   2489  return sweepActions != nullptr;
   2490 }
   2491 
   2492 void GCRuntime::prepareForSweepSlice(JS::GCReason reason) {
   2493  // Work that must be done at the start of each slice where we sweep.
   2494  //
   2495  // Since this must happen at the start of the slice, it must be called in
   2496  // marking slices before any sweeping happens. Therefore it is called
   2497  // conservatively since we may not always transition to sweeping from marking.
   2498 
   2499  // Clear out whole cell store buffer entries to unreachable cells.
   2500  if (storeBuffer().mayHavePointersToDeadCells()) {
   2501    collectNurseryFromMajorGC(reason);
   2502  }
   2503 
   2504  // Trace wrapper rooters before marking if we might start sweeping in
   2505  // this slice.
   2506  rt->mainContextFromOwnThread()->traceWrapperGCRooters(marker().tracer());
   2507 }
   2508 
   2509 // Ensure barriers are disabled if required when entering a sweep slice and
   2510 // re-enabled when yielding to the mutator. |disableBarriersForSweeping| is set
   2511 // in beginSweepingSweepGroup and cleared in endSweepingSweepGroup.
   2512 class js::gc::AutoUpdateBarriersForSweeping {
   2513 public:
   2514  explicit AutoUpdateBarriersForSweeping(GCRuntime* gc) : gc(gc) {
   2515    MOZ_ASSERT(gc->state() == State::Sweep);
   2516    if (gc->disableBarriersForSweeping) {
   2517      gc->disableIncrementalBarriers();
   2518    }
   2519  }
   2520 
   2521  ~AutoUpdateBarriersForSweeping() {
   2522    MOZ_ASSERT(gc->state() == State::Sweep);
   2523    if (gc->disableBarriersForSweeping) {
   2524      gc->enableIncrementalBarriers();
   2525    }
   2526  }
   2527 
   2528 private:
   2529  GCRuntime* gc;
   2530 };
   2531 
   2532 IncrementalProgress GCRuntime::performSweepActions(SliceBudget& budget) {
   2533  MOZ_ASSERT_IF(storeBuffer().isEnabled(),
   2534                !storeBuffer().mayHavePointersToDeadCells());
   2535 
   2536  AutoMajorGCProfilerEntry s(this);
   2537  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP);
   2538 
   2539  JS::GCContext* gcx = rt->gcContext();
   2540  AutoSetThreadIsSweeping threadIsSweeping(gcx);
   2541  AutoPoisonFreedJitCode pjc(gcx);
   2542 
   2543  // Drain the mark stack, possibly in a parallel task if we're in a part of
   2544  // sweeping that allows it.
   2545  //
   2546  // The first time we enter the sweep phase we must not yield to the mutator
   2547  // until we've starting sweeping a sweep group but in that case the stack must
   2548  // be empty already.
   2549 
   2550  MOZ_ASSERT(initialState <= State::Sweep);
   2551  bool startOfSweeping = initialState < State::Sweep;
   2552 
   2553  if (startOfSweeping) {
   2554    assertNoMarkingWork();
   2555  } else {
   2556    if (markDuringSweeping(gcx, budget) == NotFinished) {
   2557      return NotFinished;
   2558    }
   2559  }
   2560 
   2561  // Don't trigger pre-barriers when sweeping or finalizing.
   2562  AutoUpdateBarriersForSweeping updateBarriers(this);
   2563 
   2564  // Then continue running sweep actions.
   2565 
   2566  SweepAction::Args args{this, gcx, budget};
   2567  IncrementalProgress sweepProgress = sweepActions->run(args);
   2568  IncrementalProgress markProgress = joinBackgroundMarkTask();
   2569 
   2570  if (sweepProgress == Finished && markProgress == Finished) {
   2571    return Finished;
   2572  }
   2573 
   2574  MOZ_ASSERT(isIncremental);
   2575  return NotFinished;
   2576 }
   2577 
   2578 bool GCRuntime::allCCVisibleZonesWereCollected() {
   2579  // Calculate whether the gray marking state is now valid.
   2580  //
   2581  // The gray bits change from invalid to valid if we finished a full GC from
   2582  // the point of view of the cycle collector. We ignore the following:
   2583  //
   2584  //  - Empty zones.
   2585  //
   2586  // This exception ensures that when the CC requests a full GC the gray mark
   2587  // state ends up valid even if we don't collect all of the zones.
   2588 
   2589  for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
   2590    if (!zone->isCollecting() && !zone->arenas.arenaListsAreEmpty()) {
   2591      return false;
   2592    }
   2593  }
   2594 
   2595  return true;
   2596 }
   2597 
   2598 void GCRuntime::endSweepPhase(bool destroyingRuntime) {
   2599  MOZ_ASSERT(!markOnBackgroundThreadDuringSweeping);
   2600 
   2601  sweepActions->assertFinished();
   2602 
   2603  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP);
   2604 
   2605  MOZ_ASSERT_IF(destroyingRuntime, !useBackgroundThreads);
   2606 
   2607  // Release parallel marking threads for worker runtimes now we've finished
   2608  // marking. The main thread keeps the reservation as long as parallel marking
   2609  // is enabled.
   2610  if (!rt->isMainRuntime()) {
   2611    MOZ_ASSERT_IF(useParallelMarking, reservedMarkingThreads != 0);
   2612    releaseMarkingThreads();
   2613  }
   2614 
   2615  {
   2616    gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::DESTROY);
   2617 
   2618    // Sweep shared script bytecode now all zones have been swept and finalizers
   2619    // for BaseScripts have released their references.
   2620    SweepScriptData(rt);
   2621  }
   2622 
   2623  {
   2624    gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::FINALIZE_END);
   2625    AutoLockStoreBuffer lock(rt);
   2626    callFinalizeCallbacks(rt->gcContext(), JSFINALIZE_COLLECTION_END);
   2627 
   2628    if (allCCVisibleZonesWereCollected()) {
   2629      grayBitsValid = true;
   2630    }
   2631  }
   2632 
   2633  if (isIncremental) {
   2634    findDeadCompartments();
   2635  }
   2636 
   2637 #ifdef JS_GC_ZEAL
   2638  finishMarkingValidation();
   2639 #endif
   2640 
   2641  AssertNoWrappersInGrayList(rt);
   2642 }