tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Compacting.cpp (33188B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 /*
      8 * Implementation of compacting GC.
      9 */
     10 
     11 #include "mozilla/Maybe.h"
     12 
     13 #include "debugger/DebugAPI.h"
     14 #include "gc/ArenaList.h"
     15 #include "gc/GCInternals.h"
     16 #include "gc/GCLock.h"
     17 #include "gc/ParallelWork.h"
     18 #include "gc/Zone.h"
     19 #include "jit/JitCode.h"
     20 #include "jit/JitRuntime.h"
     21 #include "jit/JitZone.h"
     22 #include "js/GCAPI.h"
     23 #include "vm/HelperThreads.h"
     24 #include "vm/Realm.h"
     25 #include "wasm/WasmGcObject.h"
     26 
     27 #include "gc/Heap-inl.h"
     28 #include "gc/Marking-inl.h"
     29 #include "gc/PrivateIterators-inl.h"
     30 #include "gc/StableCellHasher-inl.h"
     31 #include "gc/TraceMethods-inl.h"
     32 #include "vm/GeckoProfiler-inl.h"
     33 
     34 using namespace js;
     35 using namespace js::gc;
     36 
     37 using mozilla::Maybe;
     38 
     39 using JS::SliceBudget;
     40 
     41 bool GCRuntime::canRelocateZone(Zone* zone) const {
     42  return !zone->isAtomsZone();
     43 }
     44 
     45 void GCRuntime::beginCompactPhase() {
     46  MOZ_ASSERT(!isBackgroundSweeping());
     47  assertBackgroundSweepingFinished();
     48 
     49  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT);
     50 
     51  MOZ_ASSERT(zonesToMaybeCompact.ref().isEmpty());
     52  for (GCZonesIter zone(this); !zone.done(); zone.next()) {
     53    if (canRelocateZone(zone)) {
     54      zonesToMaybeCompact.ref().append(zone);
     55    }
     56  }
     57 
     58  startedCompacting = true;
     59  zonesCompacted = 0;
     60 
     61 #ifdef DEBUG
     62  AutoLockGC lock(this);
     63  MOZ_ASSERT(!relocatedArenasToRelease);
     64 #endif
     65 }
     66 
     67 IncrementalProgress GCRuntime::compactPhase(JS::GCReason reason,
     68                                            SliceBudget& sliceBudget,
     69                                            AutoGCSession& session) {
     70  assertBackgroundSweepingFinished();
     71  MOZ_ASSERT(startedCompacting);
     72 
     73  AutoMajorGCProfilerEntry s(this);
     74  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT);
     75 
     76  // TODO: JSScripts can move. If the sampler interrupts the GC in the
     77  // middle of relocating an arena, invalid JSScript pointers may be
     78  // accessed. Suppress all sampling until a finer-grained solution can be
     79  // found. See bug 1295775.
     80  AutoSuppressProfilerSampling suppressSampling(rt->mainContextFromOwnThread());
     81 
     82  ZoneList relocatedZones;
     83  Arena* relocatedArenas = nullptr;
     84  while (!zonesToMaybeCompact.ref().isEmpty()) {
     85    Zone* zone = zonesToMaybeCompact.ref().front();
     86    zonesToMaybeCompact.ref().removeFront();
     87 
     88    MOZ_ASSERT(nursery().isEmpty());
     89    zone->changeGCState(Zone::Finished, Zone::Compact);
     90 
     91    if (relocateArenas(zone, reason, relocatedArenas, sliceBudget)) {
     92      updateZonePointersToRelocatedCells(zone);
     93      relocatedZones.append(zone);
     94      zonesCompacted++;
     95    } else {
     96      zone->changeGCState(Zone::Compact, Zone::Finished);
     97    }
     98 
     99    if (sliceBudget.isOverBudget()) {
    100      break;
    101    }
    102  }
    103 
    104  if (!relocatedZones.isEmpty()) {
    105    updateRuntimePointersToRelocatedCells(session);
    106 
    107    do {
    108      Zone* zone = relocatedZones.front();
    109      relocatedZones.removeFront();
    110      zone->changeGCState(Zone::Compact, Zone::Finished);
    111    } while (!relocatedZones.isEmpty());
    112  }
    113 
    114  clearRelocatedArenas(relocatedArenas, reason);
    115 
    116 #ifdef DEBUG
    117  protectOrReleaseRelocatedArenas(relocatedArenas, reason);
    118 #else
    119  releaseRelocatedArenas(relocatedArenas);
    120 #endif
    121 
    122  // Clear caches that can contain cell pointers.
    123  rt->caches().purgeForCompaction();
    124 
    125 #ifdef DEBUG
    126  checkHashTablesAfterMovingGC();
    127 #endif
    128 
    129  return zonesToMaybeCompact.ref().isEmpty() ? Finished : NotFinished;
    130 }
    131 
    132 void GCRuntime::endCompactPhase() { startedCompacting = false; }
    133 
    134 static bool ShouldRelocateAllArenas(JS::GCReason reason) {
    135  return reason == JS::GCReason::DEBUG_GC;
    136 }
    137 
    138 /*
    139 * Choose which arenas to relocate all cells from.
    140 *
    141 * Return a pair of arena pointers indicating the arenas to relocate that can be
    142 * passed to removeRange(), or null pointers if nothing can be relocated.
    143 */
    144 std::pair<Arena*, Arena*> ArenaList::pickArenasToRelocate(
    145    AllocKind kind, size_t& arenaTotalOut, size_t& relocTotalOut) {
    146  // Relocate the greatest number of arenas such that the number of used cells
    147  // in relocated arenas is less than or equal to the number of free cells in
    148  // unrelocated arenas. In other words we only relocate cells we can move
    149  // into existing arenas, and we choose the least full areans to relocate.
    150  //
    151  // This is made easier by the fact that the start of the arena list has been
    152  // sorted in descending order of number of used cells, so we will always
    153  // relocate a sublist of the arena list. All we need to do is find the points
    154  // at which to start and end relocating.
    155 
    156  if (!hasNonFullArenas()) {
    157    // All arenas are full so no compacting is possible.
    158    return {nullptr, nullptr};
    159  }
    160 
    161  // Count non-full and full arenas and total used cells, and find the last
    162  // non-full arena.
    163  size_t fullArenaCount = 0;     // Number of full arenas (not relocated).
    164  size_t nonFullArenaCount = 0;  // Number of non-full arenas to consider.
    165  size_t totalUsedCells = 0;     // Total used cells in non-full arenas.
    166  Arena* lastNonFullArena = nullptr;
    167 
    168  Iterator arena = iter();
    169  for (; !arena.done(); arena.next()) {
    170    if (arena->isFull()) {
    171      break;
    172    }
    173 
    174    MOZ_ASSERT(!arena->isFull());
    175    nonFullArenaCount++;
    176    totalUsedCells += arena->countUsedCells();
    177    lastNonFullArena = arena.get();
    178  }
    179  for (; !arena.done(); arena.next()) {
    180    // It's likely that the final arena is not full but we ignore that.
    181    fullArenaCount++;
    182  }
    183 
    184  size_t previousFreeCells = 0;  // Total free cells before arena.
    185  size_t followingUsedCells =
    186      totalUsedCells;  // Total used cells in non full arenas afterwards.
    187  size_t relocCount = nonFullArenaCount;  // Number of arenas to relocate.
    188  Arena* prev = nullptr;                  // The previous arena considered.
    189 
    190  const size_t cellsPerArena = Arena::thingsPerArena(kind);
    191 
    192  // Examine the initial part of the list containing non-full arenas.
    193  for (arena = iter(); prev != lastNonFullArena;
    194       prev = arena.get(), arena.next()) {
    195    if (followingUsedCells <= previousFreeCells) {
    196      // We have found the point where cells in the following non-full arenas
    197      // can be relocated into the free space in previous arenas. We're done.
    198      break;
    199    }
    200 
    201    size_t freeCells = arena->countFreeCells();
    202    MOZ_ASSERT(freeCells != 0);
    203    size_t usedCells = cellsPerArena - freeCells;
    204    followingUsedCells -= usedCells;
    205    previousFreeCells += freeCells;
    206    MOZ_ASSERT(relocCount != 0);
    207    relocCount--;
    208  }
    209 
    210  MOZ_ASSERT((relocCount == 0) == (prev == lastNonFullArena));
    211  arenaTotalOut += fullArenaCount + nonFullArenaCount;
    212  relocTotalOut += relocCount;
    213 
    214  if (relocCount == 0) {
    215    return {nullptr, nullptr};
    216  }
    217 
    218  return {prev, lastNonFullArena};
    219 }
    220 
    221 #ifdef DEBUG
    222 inline bool PtrIsInRange(const void* ptr, const void* start, size_t length) {
    223  return uintptr_t(ptr) - uintptr_t(start) < length;
    224 }
    225 #endif
    226 
    227 static void RelocateCell(Zone* zone, TenuredCell* src, AllocKind thingKind,
    228                         size_t thingSize) {
    229  JS::AutoSuppressGCAnalysis nogc;
    230 
    231  // Allocate a new cell.
    232  MOZ_ASSERT(zone == src->zone());
    233  TenuredCell* dst =
    234      reinterpret_cast<TenuredCell*>(AllocateTenuredCellInGC(zone, thingKind));
    235 
    236  // Copy source cell contents to destination.
    237  memcpy(dst, src, thingSize);
    238 
    239  // Move any uid attached to the object.
    240  gc::TransferUniqueId(dst, src);
    241 
    242  if (IsObjectAllocKind(thingKind)) {
    243    auto* srcObj = static_cast<JSObject*>(static_cast<Cell*>(src));
    244    auto* dstObj = static_cast<JSObject*>(static_cast<Cell*>(dst));
    245 
    246    if (srcObj->is<NativeObject>()) {
    247      NativeObject* srcNative = &srcObj->as<NativeObject>();
    248      NativeObject* dstNative = &dstObj->as<NativeObject>();
    249 
    250      // Fixup the pointer to inline object elements if necessary.
    251      if (srcNative->hasFixedElements()) {
    252        uint32_t numShifted =
    253            srcNative->getElementsHeader()->numShiftedElements();
    254        dstNative->setFixedElements(numShifted);
    255      }
    256    } else if (srcObj->is<ProxyObject>()) {
    257      if (srcObj->as<ProxyObject>().usingInlineValueArray()) {
    258        dstObj->as<ProxyObject>().setInlineValueArray();
    259      }
    260    }
    261 
    262    // Call object moved hook if present.
    263    if (JSObjectMovedOp op = srcObj->getClass()->extObjectMovedOp()) {
    264      op(dstObj, srcObj);
    265    }
    266 
    267    MOZ_ASSERT_IF(
    268        dstObj->is<NativeObject>(),
    269        !PtrIsInRange(
    270            (const Value*)dstObj->as<NativeObject>().getDenseElements(), src,
    271            thingSize));
    272  }
    273 
    274  // Copy the mark bits.
    275  dst->copyMarkBitsFrom(src);
    276 
    277  // Poison the source cell contents except for the forwarding flag and pointer
    278  // which will be stored in the first word. We can't do this for buffer
    279  // allocations as these can be used as native object dynamic elements and this
    280  // would overwrite the elements flags which are needed when updating the
    281  // dynamic elements pointer.
    282 #ifdef DEBUG
    283  bool poison = true;
    284  if (IsObjectAllocKind(thingKind)) {
    285    JSObject* srcObj = static_cast<JSObject*>(static_cast<Cell*>(src));
    286    poison = !(srcObj->is<WasmArrayObject>() &&
    287               srcObj->as<WasmArrayObject>().isDataInline());
    288  }
    289  if (poison) {
    290    AlwaysPoison(reinterpret_cast<uint8_t*>(src) + sizeof(uintptr_t),
    291                 JS_MOVED_TENURED_PATTERN, thingSize - sizeof(uintptr_t),
    292                 MemCheckKind::MakeNoAccess);
    293  }
    294 #endif
    295 
    296  // Mark source cell as forwarded and leave a pointer to the destination.
    297  RelocationOverlay::forwardCell(src, dst);
    298 }
    299 
    300 static void RelocateArena(Arena* arena, SliceBudget& sliceBudget) {
    301  MOZ_ASSERT(arena->allocated());
    302  MOZ_ASSERT(!arena->onDelayedMarkingList());
    303  MOZ_ASSERT(arena->bufferedCells()->isEmpty());
    304 
    305  Zone* zone = arena->zone();
    306 
    307  AllocKind thingKind = arena->getAllocKind();
    308  size_t thingSize = arena->getThingSize();
    309 
    310  for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
    311    RelocateCell(zone, cell, thingKind, thingSize);
    312    sliceBudget.step();
    313  }
    314 
    315 #ifdef DEBUG
    316  for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
    317    TenuredCell* src = cell;
    318    MOZ_ASSERT(src->isForwarded());
    319    TenuredCell* dest = Forwarded(src);
    320    MOZ_ASSERT(src->isMarkedBlack() == dest->isMarkedBlack());
    321    MOZ_ASSERT(src->isMarkedGray() == dest->isMarkedGray());
    322  }
    323 #endif
    324 }
    325 
    326 /*
    327 * Relocate all arenas identified by pickArenasToRelocate: for each arena,
    328 * relocate each cell within it, then add it to a list of relocated arenas.
    329 */
    330 Arena* ArenaList::relocateArenas(Arena* toRelocate, Arena* relocated,
    331                                 SliceBudget& sliceBudget,
    332                                 gcstats::Statistics& stats) {
    333  while (Arena* arena = toRelocate) {
    334    toRelocate = arena->next;
    335    RelocateArena(arena, sliceBudget);
    336    // Prepend to list of relocated arenas
    337    arena->next = relocated;
    338    relocated = arena;
    339    stats.count(gcstats::COUNT_ARENA_RELOCATED);
    340  }
    341 
    342  return relocated;
    343 }
    344 
    345 // Skip compacting zones unless we can free a certain proportion of their GC
    346 // heap memory.
    347 static const double MIN_ZONE_RECLAIM_PERCENT = 2.0;
    348 
    349 static bool ShouldRelocateZone(size_t arenaCount, size_t relocCount,
    350                               JS::GCReason reason) {
    351  if (relocCount == 0) {
    352    return false;
    353  }
    354 
    355  if (IsOOMReason(reason)) {
    356    return true;
    357  }
    358 
    359  double relocFraction = double(relocCount) / double(arenaCount);
    360  return relocFraction * 100.0 >= MIN_ZONE_RECLAIM_PERCENT;
    361 }
    362 
    363 static AllocKinds CompactingAllocKinds() {
    364  AllocKinds result;
    365  for (AllocKind kind : AllAllocKinds()) {
    366    if (IsCompactingKind(kind)) {
    367      result += kind;
    368    }
    369  }
    370  return result;
    371 }
    372 
    373 bool ArenaLists::relocateArenas(Arena*& relocatedListOut, JS::GCReason reason,
    374                                SliceBudget& sliceBudget,
    375                                gcstats::Statistics& stats) {
    376  // This is only called from the main thread while we are doing a GC, so
    377  // there is no need to lock.
    378  MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime()));
    379  MOZ_ASSERT(runtime()->gc.isHeapCompacting());
    380  MOZ_ASSERT(!runtime()->gc.isBackgroundSweeping());
    381 
    382  // Relocate all compatible kinds
    383  AllocKinds allocKindsToRelocate = CompactingAllocKinds();
    384 
    385  // Clear all the free lists.
    386  clearFreeLists();
    387 
    388  if (ShouldRelocateAllArenas(reason)) {
    389    zone_->prepareForMovingGC();
    390    for (auto kind : allocKindsToRelocate) {
    391      ArenaList& al = arenaList(kind);
    392      Arena* allArenas = al.release();
    393      relocatedListOut =
    394          al.relocateArenas(allArenas, relocatedListOut, sliceBudget, stats);
    395    }
    396  } else {
    397    size_t arenaCount = 0;
    398    size_t relocCount = 0;
    399    AllAllocKindArray<std::pair<Arena*, Arena*>> rangeToRelocate;
    400 
    401    for (auto kind : allocKindsToRelocate) {
    402      rangeToRelocate[kind] =
    403          arenaList(kind).pickArenasToRelocate(kind, arenaCount, relocCount);
    404    }
    405 
    406    if (!ShouldRelocateZone(arenaCount, relocCount, reason)) {
    407      return false;
    408    }
    409 
    410    zone_->prepareForMovingGC();
    411    for (auto kind : allocKindsToRelocate) {
    412      if (rangeToRelocate[kind].first) {
    413        ArenaList& al = arenaList(kind);
    414        const auto& range = rangeToRelocate[kind];
    415        Arena* arenas = al.removeRange(range.first, range.second);
    416        relocatedListOut =
    417            al.relocateArenas(arenas, relocatedListOut, sliceBudget, stats);
    418      }
    419    }
    420  }
    421 
    422  return true;
    423 }
    424 
    425 bool GCRuntime::relocateArenas(Zone* zone, JS::GCReason reason,
    426                               Arena*& relocatedListOut,
    427                               SliceBudget& sliceBudget) {
    428  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT_MOVE);
    429 
    430  MOZ_ASSERT(!zone->isPreservingCode());
    431  MOZ_ASSERT(canRelocateZone(zone));
    432 
    433  js::CancelOffThreadCompile(rt, JS::Zone::Compact);
    434 
    435  if (!zone->arenas.relocateArenas(relocatedListOut, reason, sliceBudget,
    436                                   stats())) {
    437    return false;
    438  }
    439 
    440 #ifdef DEBUG
    441  // Check that we did as much compaction as we should have. There
    442  // should always be less than one arena's worth of free cells.
    443  for (auto kind : CompactingAllocKinds()) {
    444    ArenaList& al = zone->arenas.arenaList(kind);
    445    size_t freeCells = 0;
    446    for (auto arena = al.iter(); !arena.done(); arena.next()) {
    447      if (arena->isFull()) {
    448        break;
    449      }
    450      freeCells += arena->countFreeCells();
    451    }
    452    MOZ_ASSERT(freeCells < Arena::thingsPerArena(kind));
    453  }
    454 #endif
    455 
    456  return true;
    457 }
    458 
    459 MovingTracer::MovingTracer(JSRuntime* rt)
    460    : GenericTracerImpl(rt, JS::TracerKind::Moving,
    461                        JS::WeakMapTraceAction::TraceKeysAndValues) {}
    462 
    463 template <typename T>
    464 inline void MovingTracer::onEdge(T** thingp, const char* name) {
    465  T* thing = *thingp;
    466  if (IsForwarded(thing)) {
    467    MOZ_ASSERT(thing->runtimeFromAnyThread() == runtime());
    468    *thingp = Forwarded(thing);
    469  }
    470 }
    471 
    472 void GCRuntime::sweepZoneAfterCompacting(MovingTracer* trc, Zone* zone) {
    473  MOZ_ASSERT(zone->isGCCompacting());
    474 
    475  zone->traceWeakMaps(trc);
    476  zone->sweepObjectsWithWeakPointers(trc);
    477 
    478  // Must happen after tracing weak maps above.
    479  traceWeakFinalizationObserverEdges(trc, zone);
    480 
    481  for (auto* cache : zone->weakCaches()) {
    482    cache->traceWeak(trc, JS::detail::WeakCacheBase::DontLockStoreBuffer);
    483  }
    484 
    485  if (jit::JitZone* jitZone = zone->jitZone()) {
    486    jitZone->traceWeak(trc, zone);
    487  }
    488 
    489  for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
    490    c->traceWeakNativeIterators(trc);
    491 
    492    for (RealmsInCompartmentIter r(c); !r.done(); r.next()) {
    493      r->traceWeakSavedStacks(trc);
    494      r->traceWeakGlobalEdge(trc);
    495      r->traceWeakDebugEnvironmentEdges(trc);
    496    }
    497  }
    498 }
    499 
    500 template <typename T>
    501 static inline void UpdateCellPointers(MovingTracer* trc, T* cell) {
    502  // We only update unmoved GC things or the new copy of moved GC things, never
    503  // the old copy. If this happened it could clear the forwarded flag which
    504  // could lead to pointers to the old copy not being updated.
    505  MOZ_ASSERT(!cell->isForwarded());
    506 
    507  cell->fixupAfterMovingGC();
    508  cell->traceChildren(trc);
    509 }
    510 
    511 template <typename T>
    512 static void UpdateArenaPointersTyped(MovingTracer* trc, Arena* arena) {
    513  for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
    514    UpdateCellPointers(trc, cell.as<T>());
    515  }
    516 }
    517 
    518 static bool CanUpdateKindInBackground(AllocKind kind) {
    519  // We try to update as many GC things in parallel as we can, but there are
    520  // kinds for which this might not be safe:
    521  //  - we assume JSObjects that are foreground finalized are not safe to
    522  //    update in parallel
    523  //  - updating a SharedPropMap touches child maps in
    524  //    SharedPropMap::fixupAfterMovingGC
    525  return js::gc::IsBackgroundFinalized(kind) && !IsShapeAllocKind(kind) &&
    526         kind != AllocKind::BASE_SHAPE;
    527 }
    528 
    529 /*
    530 * Update the internal pointers for all cells in an arena.
    531 */
    532 static void UpdateArenaPointers(MovingTracer* trc, Arena* arena) {
    533  AllocKind kind = arena->getAllocKind();
    534 
    535  MOZ_ASSERT_IF(!CanUpdateKindInBackground(kind),
    536                CurrentThreadCanAccessRuntime(trc->runtime()));
    537 
    538  switch (kind) {
    539 #define EXPAND_CASE(allocKind, traceKind, type, sizedType, bgFinal, nursery, \
    540                    compact)                                                 \
    541  case AllocKind::allocKind:                                                 \
    542    UpdateArenaPointersTyped<type>(trc, arena);                              \
    543    return;
    544    FOR_EACH_ALLOCKIND(EXPAND_CASE)
    545 #undef EXPAND_CASE
    546 
    547    default:
    548      MOZ_CRASH("Invalid alloc kind for UpdateArenaPointers");
    549  }
    550 }
    551 
    552 struct ArenaListSegment {
    553  Arena* begin;
    554  Arena* end;
    555 };
    556 
    557 /*
    558 * Update the internal pointers for all arenas in a segment of an arena list.
    559 *
    560 * Returns the number of steps to count against the slice budget.
    561 */
    562 static size_t UpdateArenaListSegmentPointers(GCRuntime* gc,
    563                                             const ArenaListSegment& arenas) {
    564  MOZ_ASSERT(arenas.begin);
    565  MovingTracer trc(gc->rt);
    566  size_t count = 0;
    567  Arena* arena = arenas.begin;
    568  do {
    569    UpdateArenaPointers(&trc, arena);
    570    count++;
    571    arena = arena->next;
    572  } while (arena != arenas.end);
    573  return count * 256;
    574 }
    575 
    576 class ArenasToUpdate {
    577  // Maximum number of arenas to update in one block.
    578 #ifdef DEBUG
    579  static const unsigned MaxArenasToProcess = 16;
    580 #else
    581  static const unsigned MaxArenasToProcess = 256;
    582 #endif
    583 
    584 public:
    585  ArenasToUpdate(Zone* zone, const AllocKinds& kinds)
    586      : kinds(kinds), zone(zone) {
    587    settle();
    588  }
    589 
    590  bool done() const {
    591    MOZ_ASSERT_IF(!segmentBegin, endOfArenaList);
    592    return !segmentBegin;
    593  }
    594 
    595  ArenaListSegment get() const {
    596    MOZ_ASSERT(!done());
    597    return {segmentBegin, segmentEnd};
    598  }
    599 
    600  void next();
    601 
    602 private:
    603  AllocKinds kinds;                   // Selects which thing kinds to update.
    604  Zone* zone;                         // Zone to process.
    605  AllocKind kind = AllocKind::FIRST;  // Current alloc kind to process.
    606  Arena* segmentBegin = nullptr;
    607  Arena* segmentEnd = nullptr;
    608  bool endOfArenaList = true;
    609 
    610  static AllocKind nextAllocKind(AllocKind i) {
    611    return AllocKind(uint8_t(i) + 1);
    612  }
    613 
    614  void settle();
    615  void findSegmentEnd();
    616 };
    617 
    618 void ArenasToUpdate::settle() {
    619  // Called when we have set |kind| to a new kind. Sets |arena| to the next
    620  // arena or null if there are no more arenas to update.
    621 
    622  MOZ_ASSERT(!segmentBegin);
    623  MOZ_ASSERT(endOfArenaList);
    624 
    625  for (; kind < AllocKind::LIMIT; kind = nextAllocKind(kind)) {
    626    if (!kinds.contains(kind)) {
    627      continue;
    628    }
    629 
    630    Arena* arena = zone->arenas.getFirstArena(kind);
    631    if (arena) {
    632      segmentBegin = arena;
    633      endOfArenaList = false;
    634      findSegmentEnd();
    635      break;
    636    }
    637  }
    638 }
    639 
    640 void ArenasToUpdate::findSegmentEnd() {
    641  // Take up to MaxArenasToProcess arenas from the list starting at
    642  // |segmentBegin| and set |segmentEnd| and |endOfArenaList|.
    643  MOZ_ASSERT(segmentBegin);
    644  MOZ_ASSERT(!endOfArenaList);
    645 
    646  Arena* arena = segmentBegin;
    647  Arena* firstArena = zone->arenas.getFirstArena(kind);
    648  for (size_t i = 0; i < MaxArenasToProcess; i++) {
    649    arena = arena->next;
    650    if (arena == firstArena) {
    651      // We have reached the end of the circular linked list.
    652      endOfArenaList = true;
    653      break;
    654    }
    655  }
    656  segmentEnd = arena;
    657 }
    658 
    659 void ArenasToUpdate::next() {
    660  MOZ_ASSERT(!done());
    661 
    662  if (!endOfArenaList) {
    663    segmentBegin = segmentEnd;
    664    findSegmentEnd();
    665    return;
    666  }
    667 
    668  segmentBegin = nullptr;
    669  kind = nextAllocKind(kind);
    670  settle();
    671 }
    672 
    673 static AllocKinds ForegroundUpdateKinds(AllocKinds kinds) {
    674  AllocKinds result;
    675  for (AllocKind kind : kinds) {
    676    if (!CanUpdateKindInBackground(kind)) {
    677      result += kind;
    678    }
    679  }
    680  return result;
    681 }
    682 
    683 void GCRuntime::updateCellPointers(Zone* zone, AllocKinds kinds) {
    684  AllocKinds fgKinds = ForegroundUpdateKinds(kinds);
    685  AllocKinds bgKinds = kinds - fgKinds;
    686 
    687  ArenasToUpdate fgArenas(zone, fgKinds);
    688  ArenasToUpdate bgArenas(zone, bgKinds);
    689 
    690  AutoLockHelperThreadState lock;
    691 
    692  AutoRunParallelWork bgTasks(this, UpdateArenaListSegmentPointers,
    693                              gcstats::PhaseKind::COMPACT_UPDATE_CELLS,
    694                              GCUse::Unspecified, bgArenas,
    695                              SliceBudget::unlimited(), lock);
    696 
    697  AutoUnlockHelperThreadState unlock(lock);
    698 
    699  for (; !fgArenas.done(); fgArenas.next()) {
    700    UpdateArenaListSegmentPointers(this, fgArenas.get());
    701  }
    702 }
    703 
    704 // After cells have been relocated any pointers to a cell's old locations must
    705 // be updated to point to the new location. This happens by iterating through
    706 // all cells in heap and tracing their children (non-recursively) to update
    707 // them.
    708 //
    709 // This is complicated by the fact that updating a GC thing sometimes depends on
    710 // making use of other GC things. After a moving GC these things may not be in a
    711 // valid state since they may contain pointers which have not been updated yet.
    712 //
    713 // The main remaining dependency is:
    714 //
    715 //   - Updating a JSObject makes use of its shape
    716 //
    717 // This means we require at least two phases for update:
    718 //
    719 //  1) a phase including shapes
    720 //  2) a phase including all JS objects
    721 //
    722 // Also, there can be data races calling IsForwarded() on the new location of a
    723 // cell whose first word is being updated in parallel on another thread. This
    724 // easiest way to avoid this is to not store a GC pointer in the first word of a
    725 // cell. Otherwise this can be avoided by updating different kinds of cell in
    726 // different phases.
    727 //
    728 // Since we want to minimize the number of phases, arrange kinds into two
    729 // arbitrary phases.
    730 
    731 static constexpr AllocKinds UpdatePhaseOne{AllocKind::SCRIPT,
    732                                           AllocKind::BASE_SHAPE,
    733                                           AllocKind::SHAPE,
    734                                           AllocKind::STRING,
    735                                           AllocKind::JITCODE,
    736                                           AllocKind::REGEXP_SHARED,
    737                                           AllocKind::SCOPE,
    738                                           AllocKind::GETTER_SETTER,
    739                                           AllocKind::COMPACT_PROP_MAP,
    740                                           AllocKind::NORMAL_PROP_MAP,
    741                                           AllocKind::DICT_PROP_MAP};
    742 
    743 static constexpr AllocKinds UpdatePhaseTwo{AllocKind::FUNCTION,
    744                                           AllocKind::FUNCTION_EXTENDED,
    745                                           AllocKind::OBJECT0,
    746                                           AllocKind::OBJECT0_FOREGROUND,
    747                                           AllocKind::OBJECT0_BACKGROUND,
    748                                           AllocKind::OBJECT2,
    749                                           AllocKind::OBJECT2_FOREGROUND,
    750                                           AllocKind::OBJECT2_BACKGROUND,
    751                                           AllocKind::ARRAYBUFFER4,
    752                                           AllocKind::OBJECT4,
    753                                           AllocKind::OBJECT4_FOREGROUND,
    754                                           AllocKind::OBJECT4_BACKGROUND,
    755                                           AllocKind::ARRAYBUFFER6,
    756                                           AllocKind::OBJECT6,
    757                                           AllocKind::OBJECT6_FOREGROUND,
    758                                           AllocKind::OBJECT6_BACKGROUND,
    759                                           AllocKind::ARRAYBUFFER8,
    760                                           AllocKind::OBJECT8,
    761                                           AllocKind::OBJECT8_FOREGROUND,
    762                                           AllocKind::OBJECT8_BACKGROUND,
    763                                           AllocKind::ARRAYBUFFER12,
    764                                           AllocKind::OBJECT12,
    765                                           AllocKind::OBJECT12_FOREGROUND,
    766                                           AllocKind::OBJECT12_BACKGROUND,
    767                                           AllocKind::ARRAYBUFFER16,
    768                                           AllocKind::OBJECT16,
    769                                           AllocKind::OBJECT16_FOREGROUND,
    770                                           AllocKind::OBJECT16_BACKGROUND};
    771 
    772 void GCRuntime::updateAllCellPointers(MovingTracer* trc, Zone* zone) {
    773  updateCellPointers(zone, UpdatePhaseOne);
    774  updateCellPointers(zone, UpdatePhaseTwo);
    775 }
    776 
    777 /*
    778 * Update pointers to relocated cells in a single zone by doing a traversal of
    779 * that zone's arenas and calling per-zone sweep hooks.
    780 *
    781 * The latter is necessary to update weak references which are not marked as
    782 * part of the traversal.
    783 */
    784 void GCRuntime::updateZonePointersToRelocatedCells(Zone* zone) {
    785  MOZ_ASSERT(!rt->isBeingDestroyed());
    786  MOZ_ASSERT(zone->isGCCompacting());
    787 
    788  AutoTouchingGrayThings tgt;
    789 
    790  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT_UPDATE);
    791  MovingTracer trc(rt);
    792 
    793  zone->fixupAfterMovingGC();
    794  zone->fixupScriptMapsAfterMovingGC(&trc);
    795 
    796  // Fixup compartment global pointers as these get accessed during marking.
    797  for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
    798    comp->fixupAfterMovingGC(&trc);
    799  }
    800 
    801  zone->externalStringCache().purge();
    802  zone->functionToStringCache().purge();
    803  zone->shapeZone().purgeShapeCaches(rt->gcContext());
    804  rt->caches().stringToAtomCache.purge();
    805 
    806  // Iterate through all cells that can contain relocatable pointers to update
    807  // them. Since updating each cell is independent we try to parallelize this
    808  // as much as possible.
    809  updateAllCellPointers(&trc, zone);
    810 
    811  // Sweep everything to fix up weak pointers.
    812  sweepZoneAfterCompacting(&trc, zone);
    813 
    814  // Call callbacks to get the rest of the system to fixup other untraced
    815  // pointers.
    816  for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
    817    callWeakPointerCompartmentCallbacks(&trc, comp);
    818  }
    819 }
    820 
    821 /*
    822 * Update runtime-wide pointers to relocated cells.
    823 */
    824 void GCRuntime::updateRuntimePointersToRelocatedCells(AutoGCSession& session) {
    825  MOZ_ASSERT(!rt->isBeingDestroyed());
    826 
    827  gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::COMPACT_UPDATE);
    828  MovingTracer trc(rt);
    829 
    830  Zone::fixupAllCrossCompartmentWrappersAfterMovingGC(&trc);
    831 
    832  rt->geckoProfiler().fixupStringsMapAfterMovingGC();
    833 
    834  // Mark roots to update them.
    835 
    836  traceRuntimeForMajorGC(&trc, session);
    837 
    838  jit::UpdateJitActivationsForCompactingGC(rt);
    839 
    840  {
    841    gcstats::AutoPhase ap2(stats(), gcstats::PhaseKind::MARK_ROOTS);
    842    DebugAPI::traceAllForMovingGC(&trc);
    843    DebugAPI::traceCrossCompartmentEdges(&trc);
    844 
    845    // Mark all gray roots.
    846    traceEmbeddingGrayRoots(&trc);
    847    Compartment::traceIncomingCrossCompartmentEdgesForZoneGC(
    848        &trc, Compartment::GrayEdges);
    849  }
    850 
    851  // Sweep everything to fix up weak pointers.
    852  jit::JitRuntime::TraceWeakJitcodeGlobalTable(rt, &trc);
    853  for (JS::detail::WeakCacheBase* cache : rt->weakCaches()) {
    854    cache->traceWeak(&trc, JS::detail::WeakCacheBase::DontLockStoreBuffer);
    855  }
    856 
    857  if (rt->hasJitRuntime() && rt->jitRuntime()->hasInterpreterEntryMap()) {
    858    rt->jitRuntime()->getInterpreterEntryMap()->updateScriptsAfterMovingGC();
    859  }
    860 
    861  // Type inference may put more blocks here to free.
    862  {
    863    AutoLockHelperThreadState lock;
    864    lifoBlocksToFree.ref().freeAll();
    865  }
    866 
    867  // Call callbacks to get the rest of the system to fixup other untraced
    868  // pointers.
    869  callWeakPointerZonesCallbacks(&trc);
    870 }
    871 
    872 void GCRuntime::clearRelocatedArenas(Arena* arenaList, JS::GCReason reason) {
    873  AutoLockGC lock(this);
    874  clearRelocatedArenasWithoutUnlocking(arenaList, reason, lock);
    875 }
    876 
    877 void GCRuntime::clearRelocatedArenasWithoutUnlocking(Arena* arenaList,
    878                                                     JS::GCReason reason,
    879                                                     const AutoLockGC& lock) {
    880  // Clear the relocated arenas, now containing only forwarding pointers
    881  while (arenaList) {
    882    Arena* arena = arenaList;
    883    arenaList = arenaList->next;
    884 
    885    // Clear the mark bits
    886    arena->unmarkAll();
    887 
    888    // Mark arena as empty
    889    arena->setAsFullyUnused();
    890 
    891 #ifdef DEBUG
    892    // The cell contents have been partially marked no access in RelocateCell,
    893    // so we need to mark the region as undefined again so we can poison it.
    894    SetMemCheckKind(reinterpret_cast<void*>(arena->thingsStart()),
    895                    arena->getThingsSpan(), MemCheckKind::MakeUndefined);
    896 #endif
    897 
    898    AlwaysPoison(reinterpret_cast<void*>(arena->thingsStart()),
    899                 JS_MOVED_TENURED_PATTERN, arena->getThingsSpan(),
    900                 MemCheckKind::MakeNoAccess);
    901 
    902    // Don't count emptied arenas as being freed by the current GC:
    903    //  - if we purposely moved everything to new arenas, as that will already
    904    //    have allocated a similar number of arenas. (This only happens for
    905    //    collections triggered by GC zeal.)
    906    //  - if they were allocated since the start of the GC.
    907    bool allArenasRelocated = ShouldRelocateAllArenas(reason);
    908    bool updateRetainedSize = !allArenasRelocated && !arena->isNewlyCreated();
    909    Zone* zone = arena->zone();
    910    zone->gcHeapSize.removeBytes(ArenaSize, updateRetainedSize, heapSize);
    911 
    912    // There is no atom marking bitmap index to free.
    913    MOZ_ASSERT(!zone->isAtomsZone());
    914 
    915    // Release the arena but don't return it to the chunk yet.
    916    arena->release();
    917  }
    918 }
    919 
    920 #ifdef DEBUG
    921 
    922 // In debug mode we don't always release relocated arenas straight away.
    923 // Sometimes protect them instead and hold onto them until the next GC sweep
    924 // phase to catch any pointers to them that didn't get forwarded.
    925 
    926 static inline bool CanProtectArenas() {
    927  // On some systems the page size is larger than the size of an arena so we
    928  // can't change the mapping permissions per arena.
    929  return SystemPageSize() <= ArenaSize;
    930 }
    931 
    932 static inline bool ShouldProtectRelocatedArenas(JS::GCReason reason) {
    933  // For zeal mode collections we don't release the relocated arenas
    934  // immediately. Instead we protect them and keep them around until the next
    935  // collection so we can catch any stray accesses to them.
    936  return reason == JS::GCReason::DEBUG_GC && CanProtectArenas();
    937 }
    938 
    939 void GCRuntime::protectOrReleaseRelocatedArenas(Arena* arenaList,
    940                                                JS::GCReason reason) {
    941  if (ShouldProtectRelocatedArenas(reason)) {
    942    protectAndHoldArenas(arenaList);
    943    return;
    944  }
    945 
    946  releaseRelocatedArenas(arenaList);
    947 }
    948 
    949 void GCRuntime::protectAndHoldArenas(Arena* arenaList) {
    950  for (Arena* arena = arenaList; arena;) {
    951    MOZ_ASSERT(!arena->allocated());
    952    Arena* next = arena->next;
    953    if (!next) {
    954      // Prepend to hold list before we protect the memory.
    955      AutoLockGC lock(this);
    956      arena->next = relocatedArenasToRelease;
    957      relocatedArenasToRelease = arenaList;
    958    }
    959    ProtectPages(arena, ArenaSize);
    960    arena = next;
    961  }
    962 }
    963 
    964 void GCRuntime::unprotectHeldRelocatedArenas(const AutoLockGC& lock) {
    965  for (Arena* arena = relocatedArenasToRelease; arena; arena = arena->next) {
    966    UnprotectPages(arena, ArenaSize);
    967    MOZ_ASSERT(!arena->allocated());
    968  }
    969 }
    970 
    971 void GCRuntime::releaseHeldRelocatedArenas() {
    972  AutoLockGC lock(this);
    973  unprotectHeldRelocatedArenas(lock);
    974  Arena* arenas = relocatedArenasToRelease;
    975  relocatedArenasToRelease = nullptr;
    976  releaseRelocatedArenasWithoutUnlocking(arenas, lock);
    977 }
    978 
    979 void GCRuntime::releaseHeldRelocatedArenasWithoutUnlocking(
    980    const AutoLockGC& lock) {
    981  unprotectHeldRelocatedArenas(lock);
    982  releaseRelocatedArenasWithoutUnlocking(relocatedArenasToRelease, lock);
    983  relocatedArenasToRelease = nullptr;
    984 }
    985 
    986 #endif
    987 
    988 void GCRuntime::releaseRelocatedArenas(Arena* arenaList) {
    989  AutoLockGC lock(this);
    990  releaseRelocatedArenasWithoutUnlocking(arenaList, lock);
    991 }
    992 
    993 void GCRuntime::releaseRelocatedArenasWithoutUnlocking(Arena* arenaList,
    994                                                       const AutoLockGC& lock) {
    995  // Release relocated arenas previously cleared with clearRelocatedArenas().
    996  while (arenaList) {
    997    Arena* arena = arenaList;
    998    arenaList = arenaList->next;
    999 
   1000    // We already updated the memory accounting so just call
   1001    // Chunk::releaseArena.
   1002    arena->chunk()->releaseArena(this, arena, lock);
   1003  }
   1004 }