tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Zone.cpp (27604B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "gc/Zone.h"
      8 #include "js/shadow/Zone.h"  // JS::shadow::Zone
      9 
     10 #include "mozilla/Sprintf.h"
     11 #include "mozilla/TimeStamp.h"
     12 
     13 #include "gc/FinalizationObservers.h"
     14 #include "gc/GCContext.h"
     15 #include "gc/PublicIterators.h"
     16 #include "jit/BaselineIC.h"
     17 #include "jit/BaselineJIT.h"
     18 #include "jit/Invalidation.h"
     19 #include "jit/JitScript.h"
     20 #include "jit/JitZone.h"
     21 #include "vm/Runtime.h"
     22 #include "vm/Time.h"
     23 
     24 #include "debugger/DebugAPI-inl.h"
     25 #include "gc/GC-inl.h"
     26 #include "gc/Marking-inl.h"
     27 #include "gc/Nursery-inl.h"
     28 #include "gc/StableCellHasher-inl.h"
     29 #include "gc/WeakMap-inl.h"
     30 #include "vm/JSScript-inl.h"
     31 #include "vm/Realm-inl.h"
     32 
     33 using namespace js;
     34 using namespace js::gc;
     35 
     36 Zone* const Zone::NotOnList = reinterpret_cast<Zone*>(1);
     37 
     38 ZoneAllocator::ZoneAllocator(JSRuntime* rt, Kind kind)
     39    : JS::shadow::Zone(rt, rt->gc.marker().tracer(), kind),
     40      jitHeapThreshold(size_t(jit::MaxCodeBytesPerProcess * 0.8)) {}
     41 
     42 ZoneAllocator::~ZoneAllocator() {
     43 #ifdef DEBUG
     44  mallocTracker.checkEmptyOnDestroy();
     45  MOZ_ASSERT(gcHeapSize.bytes() == 0);
     46  MOZ_ASSERT(mallocHeapSize.bytes() == 0);
     47  MOZ_ASSERT(jitHeapSize.bytes() == 0);
     48 #endif
     49 }
     50 
     51 void ZoneAllocator::fixupAfterMovingGC() {
     52 #ifdef DEBUG
     53  mallocTracker.fixupAfterMovingGC();
     54 #endif
     55 }
     56 
     57 void js::ZoneAllocator::updateSchedulingStateOnGCStart() {
     58  gcHeapSize.updateOnGCStart();
     59  mallocHeapSize.updateOnGCStart();
     60  jitHeapSize.updateOnGCStart();
     61  perZoneGCTime = mozilla::TimeDuration::Zero();
     62 }
     63 
     64 void js::ZoneAllocator::updateGCStartThresholds(GCRuntime& gc) {
     65  bool isAtomsZone = JS::Zone::from(this)->isAtomsZone();
     66  gcHeapThreshold.updateStartThreshold(
     67      gcHeapSize.retainedBytes(), smoothedAllocationRate.ref(),
     68      smoothedCollectionRate.ref(), gc.tunables, gc.schedulingState,
     69      isAtomsZone);
     70 
     71  mallocHeapThreshold.updateStartThreshold(mallocHeapSize.retainedBytes(),
     72                                           gc.tunables, gc.schedulingState);
     73 }
     74 
     75 void js::ZoneAllocator::setGCSliceThresholds(GCRuntime& gc,
     76                                             bool waitingOnBGTask) {
     77  gcHeapThreshold.setSliceThreshold(this, gcHeapSize, gc.tunables,
     78                                    waitingOnBGTask);
     79  mallocHeapThreshold.setSliceThreshold(this, mallocHeapSize, gc.tunables,
     80                                        waitingOnBGTask);
     81  jitHeapThreshold.setSliceThreshold(this, jitHeapSize, gc.tunables,
     82                                     waitingOnBGTask);
     83 }
     84 
     85 void js::ZoneAllocator::clearGCSliceThresholds() {
     86  gcHeapThreshold.clearSliceThreshold();
     87  mallocHeapThreshold.clearSliceThreshold();
     88  jitHeapThreshold.clearSliceThreshold();
     89 }
     90 
     91 bool ZoneAllocator::addSharedMemory(void* mem, size_t nbytes, MemoryUse use) {
     92  // nbytes can be zero here for SharedArrayBuffers.
     93 
     94  MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
     95 
     96  auto ptr = sharedMemoryUseCounts.lookupForAdd(mem);
     97  MOZ_ASSERT_IF(ptr, ptr->value().use == use);
     98 
     99  if (!ptr && !sharedMemoryUseCounts.add(ptr, mem, gc::SharedMemoryUse(use))) {
    100    return false;
    101  }
    102 
    103  ptr->value().count++;
    104 
    105  // Allocations can grow, so add any increase over the previous size and record
    106  // the new size.
    107  if (nbytes > ptr->value().nbytes) {
    108    mallocHeapSize.addBytes(nbytes - ptr->value().nbytes);
    109    ptr->value().nbytes = nbytes;
    110  }
    111 
    112  maybeTriggerGCOnMalloc();
    113 
    114  return true;
    115 }
    116 
    117 void ZoneAllocator::removeSharedMemory(void* mem, size_t nbytes,
    118                                       MemoryUse use) {
    119  // nbytes can be zero here for SharedArrayBuffers.
    120 
    121  MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
    122  MOZ_ASSERT(CurrentThreadIsGCFinalizing());
    123 
    124  auto ptr = sharedMemoryUseCounts.lookup(mem);
    125 
    126  MOZ_ASSERT(ptr);
    127  MOZ_ASSERT(ptr->value().count != 0);
    128  MOZ_ASSERT(ptr->value().use == use);
    129  MOZ_ASSERT(ptr->value().nbytes >= nbytes);
    130 
    131  ptr->value().count--;
    132  if (ptr->value().count == 0) {
    133    mallocHeapSize.removeBytes(ptr->value().nbytes, true);
    134    sharedMemoryUseCounts.remove(ptr);
    135  }
    136 }
    137 
    138 template <TrackingKind kind>
    139 void js::TrackedAllocPolicy<kind>::decMemory(size_t nbytes) {
    140  bool updateRetainedSize = false;
    141  if constexpr (kind == TrackingKind::Cell) {
    142    // Only subtract freed cell memory from retained size for cell associations
    143    // during sweeping.
    144    JS::GCContext* gcx = TlsGCContext.get();
    145    updateRetainedSize = gcx->isFinalizing();
    146  }
    147 
    148  zone_->decNonGCMemory(this, nbytes, MemoryUse::TrackedAllocPolicy,
    149                        updateRetainedSize);
    150 }
    151 
    152 namespace js {
    153 template class TrackedAllocPolicy<TrackingKind::Zone>;
    154 template class TrackedAllocPolicy<TrackingKind::Cell>;
    155 }  // namespace js
    156 
    157 JS::Zone::Zone(JSRuntime* rt, Kind kind)
    158    : ZoneAllocator(rt, kind),
    159      arenas(this),
    160      bufferAllocator(this),
    161      data(nullptr),
    162      suppressAllocationMetadataBuilder(false),
    163      allocNurseryObjects_(true),
    164      allocNurseryStrings_(true),
    165      allocNurseryBigInts_(true),
    166      allocNurseryGetterSetters_(true),
    167      pretenuring(this),
    168      crossZoneStringWrappers_(this),
    169      shapeZone_(this),
    170      gcScheduled_(false),
    171      gcScheduledSaved_(false),
    172      gcPreserveCode_(false),
    173      keepPropMapTables_(false),
    174      wasCollected_(false),
    175      listNext_(NotOnList),
    176      keptAliveSet(this),
    177      objectFuses(rt) {
    178  /* Ensure that there are no vtables to mess us up here. */
    179  MOZ_ASSERT(reinterpret_cast<JS::shadow::Zone*>(this) ==
    180             static_cast<JS::shadow::Zone*>(this));
    181  MOZ_ASSERT_IF(isAtomsZone(), rt->gc.zones().empty());
    182 
    183  updateGCStartThresholds(rt->gc);
    184  rt->gc.nursery().setAllocFlagsForZone(this);
    185 }
    186 
    187 Zone::~Zone() {
    188  MOZ_ASSERT_IF(regExps_.ref(), regExps().empty());
    189 
    190  MOZ_ASSERT(numRealmsWithAllocMetadataBuilder_ == 0);
    191 
    192  DebugAPI::deleteDebugScriptMap(debugScriptMap);
    193  js_delete(finalizationObservers_.ref().release());
    194 
    195  MOZ_ASSERT(gcWeakMapList().isEmpty());
    196  MOZ_ASSERT(objectsWithWeakPointers.ref().empty());
    197 
    198  JSRuntime* rt = runtimeFromAnyThread();
    199  if (this == rt->gc.systemZone) {
    200    MOZ_ASSERT(isSystemZone());
    201    rt->gc.systemZone = nullptr;
    202  }
    203 
    204  js_delete(jitZone_.ref());
    205 
    206  if (preservedWrappers_) {
    207    MOZ_RELEASE_ASSERT(preservedWrappersCount_ == 0);
    208    js_free(preservedWrappers_);
    209  }
    210 }
    211 
    212 bool Zone::init() {
    213  regExps_.ref() = make_unique<RegExpZone>(this);
    214  return !!regExps_.ref();
    215 }
    216 
    217 void Zone::setNeedsIncrementalBarrier(bool needs) {
    218  needsIncrementalBarrier_ = needs;
    219 }
    220 
    221 void Zone::changeGCState(GCState prev, GCState next) {
    222  MOZ_ASSERT(RuntimeHeapIsBusy());
    223  MOZ_ASSERT(gcState() == prev);
    224  MOZ_ASSERT_IF(isGCMarkingOrVerifyingPreBarriers(), needsIncrementalBarrier_);
    225 
    226  gcState_ = next;
    227  needsIncrementalBarrier_ = isGCMarkingOrVerifyingPreBarriers();
    228 }
    229 
    230 template <class Pred>
    231 static void EraseIf(js::gc::EphemeronEdgeVector& entries, Pred pred) {
    232  auto* begin = entries.begin();
    233  auto* const end = entries.end();
    234 
    235  auto* newEnd = begin;
    236  for (auto* p = begin; p != end; p++) {
    237    if (!pred(*p)) {
    238      *newEnd++ = *p;
    239    }
    240  }
    241 
    242  size_t removed = end - newEnd;
    243  entries.shrinkBy(removed);
    244 }
    245 
    246 void Zone::sweepAfterMinorGC(JSTracer* trc) {
    247  crossZoneStringWrappers().sweepAfterMinorGC(trc);
    248 
    249  for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
    250    comp->sweepAfterMinorGC(trc);
    251  }
    252 }
    253 
    254 void Zone::traceWeakCCWEdges(JSTracer* trc) {
    255  crossZoneStringWrappers().traceWeak(trc);
    256  for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
    257    comp->traceCrossCompartmentObjectWrapperEdges(trc);
    258  }
    259 }
    260 
    261 /* static */
    262 void Zone::fixupAllCrossCompartmentWrappersAfterMovingGC(JSTracer* trc) {
    263  MOZ_ASSERT(trc->runtime()->gc.isHeapCompacting());
    264 
    265  for (ZonesIter zone(trc->runtime(), WithAtoms); !zone.done(); zone.next()) {
    266    // Trace the wrapper map to update keys (wrapped values) in other
    267    // compartments that may have been moved.
    268    zone->crossZoneStringWrappers().traceWeak(trc);
    269 
    270    for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
    271      comp->fixupCrossCompartmentObjectWrappersAfterMovingGC(trc);
    272    }
    273  }
    274 }
    275 
    276 void Zone::dropStringWrappersOnGC() {
    277  MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
    278  crossZoneStringWrappers().clear();
    279 }
    280 
    281 #ifdef JSGC_HASH_TABLE_CHECKS
    282 
    283 void Zone::checkAllCrossCompartmentWrappersAfterMovingGC() {
    284  checkStringWrappersAfterMovingGC();
    285  for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
    286    comp->checkObjectWrappersAfterMovingGC();
    287  }
    288 }
    289 
    290 void Zone::checkStringWrappersAfterMovingGC() {
    291  CheckTableAfterMovingGC(crossZoneStringWrappers(), [this](const auto& entry) {
    292    JSString* key = entry.key().get();
    293    CheckGCThingAfterMovingGC(key);  // Keys may be in a different zone.
    294    CheckGCThingAfterMovingGC(entry.value().unbarrieredGet(), this);
    295    return key;
    296  });
    297 }
    298 #endif
    299 
    300 void Zone::maybeDiscardJitCode(JS::GCContext* gcx) {
    301  if (!isPreservingCode()) {
    302    forceDiscardJitCode(gcx);
    303  }
    304 }
    305 
    306 void Zone::forceDiscardJitCode(JS::GCContext* gcx,
    307                               const JitDiscardOptions& options) {
    308  if (!jitZone()) {
    309    return;
    310  }
    311 
    312  if (options.discardJitScripts) {
    313    lastDiscardedCodeTime_ = mozilla::TimeStamp::Now();
    314  }
    315 
    316  // Copy Baseline IC stubs that are active on the stack to a new LifoAlloc.
    317  // After freeing stub memory, these chunks are then transferred to the
    318  // zone-wide allocator.
    319  jit::ICStubSpace newStubSpace;
    320 
    321 #ifdef DEBUG
    322  // Assert no ICScripts are marked as active.
    323  jitZone()->forEachJitScript([](jit::JitScript* jitScript) {
    324    MOZ_ASSERT(!jitScript->hasActiveICScript());
    325  });
    326 #endif
    327 
    328  // Mark ICScripts on the stack as active and copy active Baseline stubs.
    329  jit::MarkActiveICScriptsAndCopyStubs(this, newStubSpace);
    330 
    331  // Invalidate all Ion code in this zone.
    332  jit::InvalidateAll(gcx, this);
    333 
    334  jitZone()->forEachJitScript<jit::IncludeDyingScripts>(
    335      [&](jit::JitScript* jitScript) {
    336        JSScript* script = jitScript->owningScript();
    337        jit::FinishInvalidation(gcx, script);
    338 
    339        // Discard baseline script if it's not marked as active.
    340        if (jitScript->hasBaselineScript() &&
    341            !jitScript->icScript()->active()) {
    342          jit::FinishDiscardBaselineScript(gcx, script);
    343        }
    344 
    345 #ifdef JS_CACHEIR_SPEW
    346        maybeUpdateWarmUpCount(script);
    347 #endif
    348 
    349        // Warm-up counter for scripts are reset on GC. After discarding code we
    350        // need to let it warm back up to get information such as which
    351        // opcodes are setting array holes or accessing getter properties.
    352        script->resetWarmUpCounterForGC();
    353 
    354        // Try to release the script's JitScript. This should happen after
    355        // releasing JIT code because we can't do this when the script still has
    356        // JIT code.
    357        if (options.discardJitScripts) {
    358          script->maybeReleaseJitScript(gcx);
    359          jitScript = script->maybeJitScript();
    360          if (!jitScript) {
    361            // If we successfully discarded the JIT script, try to discard the
    362            // ScriptCounts too.
    363            if (!script->realm()->collectCoverageForDebug() &&
    364                !gcx->runtime()->profilingScripts) {
    365              script->destroyScriptCounts();
    366            }
    367            script->realm()->removeFromCompileQueue(script);
    368            return;  // Continue script loop.
    369          }
    370        }
    371 
    372        // If we did not release the JitScript, we need to purge IC stubs
    373        // because the ICStubSpace will be purged below. Also purge all
    374        // trial-inlined ICScripts that are not active on the stack.
    375        jitScript->purgeInactiveICScripts();
    376        jitScript->purgeStubs(script, newStubSpace);
    377 
    378        if (options.resetNurseryAllocSites ||
    379            options.resetPretenuredAllocSites) {
    380          jitScript->resetAllocSites(options.resetNurseryAllocSites,
    381                                     options.resetPretenuredAllocSites);
    382        }
    383 
    384        // Reset the active flag of each ICScript.
    385        jitScript->resetAllActiveFlags();
    386      });
    387 
    388  // Also clear references to jit code from RegExpShared cells at this point.
    389  // This avoid holding onto ExecutablePools.
    390  for (auto regExp = cellIterUnsafe<RegExpShared>(); !regExp.done();
    391       regExp.next()) {
    392    regExp->discardJitCode();
    393  }
    394 
    395  /*
    396   * When scripts contain pointers to nursery things, the store buffer
    397   * can contain entries that point into the optimized stub space. Since
    398   * this method can be called outside the context of a GC, this situation
    399   * could result in us trying to mark invalid store buffer entries.
    400   *
    401   * Defer freeing any allocated blocks until after the next minor GC.
    402   */
    403  jitZone()->stubSpace()->freeAllAfterMinorGC(this);
    404  jitZone()->stubSpace()->transferFrom(newStubSpace);
    405  jitZone()->purgeIonCacheIRStubInfo();
    406 
    407  // Generate a profile marker
    408  if (gcx->runtime()->geckoProfiler().enabled()) {
    409    char discardingJitScript = options.discardJitScripts ? 'Y' : 'N';
    410    char discardingBaseline = 'Y';
    411    char discardingIon = 'Y';
    412 
    413    char discardingRegExp = 'Y';
    414    char discardingNurserySites = options.resetNurseryAllocSites ? 'Y' : 'N';
    415    char discardingPretenuredSites =
    416        options.resetPretenuredAllocSites ? 'Y' : 'N';
    417 
    418    char buf[100];
    419    SprintfLiteral(buf,
    420                   "JitScript:%c Baseline:%c Ion:%c "
    421                   "RegExp:%c NurserySites:%c PretenuredSites:%c",
    422                   discardingJitScript, discardingBaseline, discardingIon,
    423                   discardingRegExp, discardingNurserySites,
    424                   discardingPretenuredSites);
    425    gcx->runtime()->geckoProfiler().markEvent("DiscardJit", buf);
    426  }
    427 }
    428 
    429 void JS::Zone::resetAllocSitesAndInvalidate(bool resetNurserySites,
    430                                            bool resetPretenuredSites) {
    431  MOZ_ASSERT(resetNurserySites || resetPretenuredSites);
    432 
    433  if (!jitZone()) {
    434    return;
    435  }
    436 
    437  JSContext* cx = runtime_->mainContextFromOwnThread();
    438  jitZone()->forEachJitScript<jit::IncludeDyingScripts>(
    439      [&](jit::JitScript* jitScript) {
    440        if (jitScript->resetAllocSites(resetNurserySites,
    441                                       resetPretenuredSites)) {
    442          JSScript* script = jitScript->owningScript();
    443          CancelOffThreadIonCompile(script);
    444          if (script->hasIonScript()) {
    445            jit::Invalidate(cx, script,
    446                            /* resetUses = */ true,
    447                            /* cancelOffThread = */ true);
    448          }
    449        }
    450      });
    451 }
    452 
    453 void JS::Zone::traceWeakJitScripts(JSTracer* trc) {
    454  if (jitZone()) {
    455    jitZone()->forEachJitScript(
    456        [&](jit::JitScript* jitScript) { jitScript->traceWeak(trc); });
    457  }
    458 }
    459 
    460 void JS::Zone::beforeClearDelegateInternal(JSObject* wrapper,
    461                                           JSObject* delegate) {
    462  // 'delegate' is no longer the delegate of 'wrapper'.
    463  MOZ_ASSERT(js::gc::detail::GetDelegate(wrapper) == delegate);
    464  MOZ_ASSERT(needsIncrementalBarrier());
    465  MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(this));
    466 
    467  // |wrapper| might be a key in a weak map, so trigger a barrier to account for
    468  // the removal of the automatically added edge from delegate to wrapper.
    469  PreWriteBarrier(wrapper);
    470 }
    471 
    472 #ifdef JSGC_HASH_TABLE_CHECKS
    473 void JS::Zone::checkUniqueIdTableAfterMovingGC() {
    474  CheckTableAfterMovingGC(uniqueIds(), [this](const auto& entry) {
    475    js::gc::CheckGCThingAfterMovingGC(entry.key(), this);
    476    return entry.key();
    477  });
    478 }
    479 #endif
    480 
    481 js::jit::JitZone* Zone::createJitZone(JSContext* cx) {
    482  MOZ_ASSERT(!jitZone_);
    483 #ifndef ENABLE_PORTABLE_BASELINE_INTERP
    484  MOZ_ASSERT(cx->runtime()->hasJitRuntime());
    485 #endif
    486 
    487  auto jitZone = cx->make_unique<jit::JitZone>(cx, allocNurseryStrings());
    488  if (!jitZone) {
    489    return nullptr;
    490  }
    491 
    492  jitZone_ = jitZone.release();
    493  return jitZone_;
    494 }
    495 
    496 bool Zone::hasMarkedRealms() {
    497  for (RealmsInZoneIter realm(this); !realm.done(); realm.next()) {
    498    if (realm->marked()) {
    499      return true;
    500    }
    501  }
    502  return false;
    503 }
    504 
    505 void Zone::notifyObservingDebuggers() {
    506  AutoAssertNoGC nogc;
    507  MOZ_ASSERT(JS::RuntimeHeapIsCollecting(),
    508             "This method should be called during GC.");
    509 
    510  JSRuntime* rt = runtimeFromMainThread();
    511 
    512  for (RealmsInZoneIter realms(this); !realms.done(); realms.next()) {
    513    GlobalObject* global = realms->unsafeUnbarrieredMaybeGlobal();
    514    if (!global) {
    515      continue;
    516    }
    517 
    518    DebugAPI::notifyParticipatesInGC(global, rt->gc.majorGCCount());
    519  }
    520 }
    521 
    522 bool Zone::isOnList() const { return listNext_ != NotOnList; }
    523 
    524 Zone* Zone::nextZone() const {
    525  MOZ_ASSERT(isOnList());
    526  return listNext_;
    527 }
    528 
    529 void Zone::prepareForMovingGC() {
    530  JS::GCContext* gcx = runtimeFromMainThread()->gcContext();
    531 
    532  MOZ_ASSERT(!isPreservingCode());
    533  forceDiscardJitCode(gcx);
    534 }
    535 
    536 void Zone::fixupAfterMovingGC() {
    537  ZoneAllocator::fixupAfterMovingGC();
    538  shapeZone().fixupPropMapShapeTableAfterMovingGC();
    539 }
    540 
    541 void Zone::purgeAtomCache() {
    542  atomCache_.ref().reset();
    543 
    544  // Also purge the dtoa caches so that subsequent lookups populate atom
    545  // cache too.
    546  for (RealmsInZoneIter r(this); !r.done(); r.next()) {
    547    r->dtoaCache.purge();
    548  }
    549 }
    550 
    551 void Zone::addSizeOfIncludingThis(
    552    mozilla::MallocSizeOf mallocSizeOf, size_t* zoneObject, JS::CodeSizes* code,
    553    size_t* regexpZone, size_t* jitZone, size_t* cacheIRStubs,
    554    size_t* objectFusesArg, size_t* uniqueIdMap, size_t* initialPropMapTable,
    555    size_t* shapeTables, size_t* atomsMarkBitmaps, size_t* compartmentObjects,
    556    size_t* crossCompartmentWrappersTables, size_t* compartmentsPrivateData,
    557    size_t* scriptCountsMapArg) {
    558  *zoneObject += mallocSizeOf(this);
    559  *regexpZone += regExps().sizeOfIncludingThis(mallocSizeOf);
    560  if (jitZone_) {
    561    jitZone_->addSizeOfIncludingThis(mallocSizeOf, code, jitZone, cacheIRStubs);
    562  }
    563  *objectFusesArg += objectFuses.sizeOfExcludingThis(mallocSizeOf);
    564  *uniqueIdMap += uniqueIds().shallowSizeOfExcludingThis(mallocSizeOf);
    565  shapeZone().addSizeOfExcludingThis(mallocSizeOf, initialPropMapTable,
    566                                     shapeTables);
    567  *atomsMarkBitmaps += markedAtoms().sizeOfExcludingThis(mallocSizeOf);
    568  *crossCompartmentWrappersTables +=
    569      crossZoneStringWrappers().sizeOfExcludingThis(mallocSizeOf);
    570 
    571  for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
    572    comp->addSizeOfIncludingThis(mallocSizeOf, compartmentObjects,
    573                                 crossCompartmentWrappersTables,
    574                                 compartmentsPrivateData);
    575  }
    576 
    577  if (scriptCountsMap) {
    578    *scriptCountsMapArg +=
    579        scriptCountsMap->shallowSizeOfIncludingThis(mallocSizeOf);
    580    for (auto r = scriptCountsMap->all(); !r.empty(); r.popFront()) {
    581      *scriptCountsMapArg +=
    582          r.front().value()->sizeOfIncludingThis(mallocSizeOf);
    583    }
    584  }
    585 }
    586 
    587 void* ZoneAllocator::onOutOfMemory(js::AllocFunction allocFunc,
    588                                   arena_id_t arena, size_t nbytes,
    589                                   void* reallocPtr) {
    590  if (!js::CurrentThreadCanAccessRuntime(runtime_)) {
    591    return nullptr;
    592  }
    593  // The analysis sees that JSRuntime::onOutOfMemory could report an error,
    594  // which with a JSErrorInterceptor could GC. But we're passing a null cx (to
    595  // a default parameter) so the error will not be reported.
    596  JS::AutoSuppressGCAnalysis suppress;
    597  return runtimeFromMainThread()->onOutOfMemory(allocFunc, arena, nbytes,
    598                                                reallocPtr);
    599 }
    600 
    601 void ZoneAllocator::reportAllocationOverflow() const {
    602  js::ReportAllocationOverflow(static_cast<JSContext*>(nullptr));
    603 }
    604 
    605 ZoneList::ZoneList() : head(nullptr), tail(nullptr) {}
    606 
    607 ZoneList::ZoneList(Zone* zone) : head(zone), tail(zone) {
    608  MOZ_RELEASE_ASSERT(!zone->isOnList());
    609  zone->listNext_ = nullptr;
    610 }
    611 
    612 ZoneList::~ZoneList() { MOZ_ASSERT(isEmpty()); }
    613 
    614 void ZoneList::check() const {
    615 #ifdef DEBUG
    616  MOZ_ASSERT((head == nullptr) == (tail == nullptr));
    617  if (!head) {
    618    return;
    619  }
    620 
    621  Zone* zone = head;
    622  for (;;) {
    623    MOZ_ASSERT(zone && zone->isOnList());
    624    if (zone == tail) break;
    625    zone = zone->listNext_;
    626  }
    627  MOZ_ASSERT(!zone->listNext_);
    628 #endif
    629 }
    630 
    631 bool ZoneList::isEmpty() const { return head == nullptr; }
    632 
    633 Zone* ZoneList::front() const {
    634  MOZ_ASSERT(!isEmpty());
    635  MOZ_ASSERT(head->isOnList());
    636  return head;
    637 }
    638 
    639 void ZoneList::prepend(Zone* zone) { prependList(ZoneList(zone)); }
    640 
    641 void ZoneList::append(Zone* zone) { appendList(ZoneList(zone)); }
    642 
    643 void ZoneList::prependList(ZoneList&& other) {
    644  check();
    645  other.check();
    646 
    647  if (other.isEmpty()) {
    648    return;
    649  }
    650 
    651  MOZ_ASSERT(tail != other.tail);
    652 
    653  if (!isEmpty()) {
    654    other.tail->listNext_ = head;
    655  } else {
    656    tail = other.tail;
    657  }
    658  head = other.head;
    659 
    660  other.head = nullptr;
    661  other.tail = nullptr;
    662 }
    663 
    664 void ZoneList::appendList(ZoneList&& other) {
    665  check();
    666  other.check();
    667 
    668  if (other.isEmpty()) {
    669    return;
    670  }
    671 
    672  MOZ_ASSERT(tail != other.tail);
    673 
    674  if (!isEmpty()) {
    675    tail->listNext_ = other.head;
    676  } else {
    677    head = other.head;
    678  }
    679  tail = other.tail;
    680 
    681  other.head = nullptr;
    682  other.tail = nullptr;
    683 }
    684 
    685 Zone* ZoneList::removeFront() {
    686  MOZ_ASSERT(!isEmpty());
    687  check();
    688 
    689  Zone* front = head;
    690  head = head->listNext_;
    691  if (!head) {
    692    tail = nullptr;
    693  }
    694 
    695  front->listNext_ = Zone::NotOnList;
    696 
    697  return front;
    698 }
    699 
    700 void ZoneList::clear() {
    701  while (!isEmpty()) {
    702    removeFront();
    703  }
    704 }
    705 
    706 JS_PUBLIC_API void JS::shadow::RegisterWeakCache(
    707    JS::Zone* zone, detail::WeakCacheBase* cachep) {
    708  zone->registerWeakCache(cachep);
    709 }
    710 
    711 void Zone::traceRootsInMajorGC(JSTracer* trc) {
    712  if (trc->isMarkingTracer() && !isGCMarking()) {
    713    return;
    714  }
    715 
    716  // Trace zone script-table roots. See comment below for justification re:
    717  // calling this only during major (non-nursery) collections.
    718  traceScriptTableRoots(trc);
    719 }
    720 
    721 void Zone::traceScriptTableRoots(JSTracer* trc) {
    722  static_assert(std::is_convertible_v<BaseScript*, gc::TenuredCell*>,
    723                "BaseScript must not be nursery-allocated for script-table "
    724                "tracing to work");
    725 
    726  // Performance optimization: the script-table keys are JSScripts, which
    727  // cannot be in the nursery, so we can skip this tracing if we are only in a
    728  // minor collection. We static-assert this fact above.
    729  MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
    730 
    731  // N.B.: the script-table keys are weak *except* in an exceptional case: when
    732  // then --dump-bytecode command line option or the PCCount JSFriend API is
    733  // used, then the scripts for all counts must remain alive. We only trace
    734  // when the `trc->runtime()->profilingScripts` flag is set. This flag is
    735  // cleared in JSRuntime::destroyRuntime() during shutdown to ensure that
    736  // scripts are collected before the runtime goes away completely.
    737  if (scriptCountsMap && trc->runtime()->profilingScripts) {
    738    for (ScriptCountsMap::Range r = scriptCountsMap->all(); !r.empty();
    739         r.popFront()) {
    740      BaseScript* script = r.front().key();
    741      MOZ_ASSERT(script->hasScriptCounts());
    742      TraceRoot(trc, &script, "profilingScripts");
    743    }
    744  }
    745 
    746  // Trace the debugger's DebugScript weak map.
    747  if (debugScriptMap) {
    748    DebugAPI::traceDebugScriptMap(trc, debugScriptMap);
    749  }
    750 }
    751 
    752 void Zone::fixupScriptMapsAfterMovingGC(JSTracer* trc) {
    753  // Map entries are removed by BaseScript::finalize, but we need to update the
    754  // script pointers here in case they are moved by the GC.
    755 
    756  if (scriptCountsMap) {
    757    scriptCountsMap->traceWeak(trc);
    758  }
    759 
    760  if (scriptLCovMap) {
    761    scriptLCovMap->traceWeak(trc);
    762  }
    763 
    764 #ifdef MOZ_VTUNE
    765  if (scriptVTuneIdMap) {
    766    scriptVTuneIdMap->traceWeak(trc);
    767  }
    768 #endif
    769 
    770 #ifdef JS_CACHEIR_SPEW
    771  if (scriptFinalWarmUpCountMap) {
    772    scriptFinalWarmUpCountMap->traceWeak(trc);
    773  }
    774 #endif
    775 }
    776 
    777 #ifdef JSGC_HASH_TABLE_CHECKS
    778 void Zone::checkScriptMapsAfterMovingGC() {
    779  // |debugScriptMap| is checked automatically because it is s a WeakMap.
    780 
    781  if (scriptCountsMap) {
    782    CheckTableAfterMovingGC(*scriptCountsMap, [this](const auto& entry) {
    783      BaseScript* script = entry.key();
    784      CheckGCThingAfterMovingGC(script, this);
    785      return script;
    786    });
    787  }
    788 
    789  if (scriptLCovMap) {
    790    CheckTableAfterMovingGC(*scriptLCovMap, [this](const auto& entry) {
    791      BaseScript* script = entry.key();
    792      CheckGCThingAfterMovingGC(script, this);
    793      return script;
    794    });
    795  }
    796 
    797 #  ifdef MOZ_VTUNE
    798  if (scriptVTuneIdMap) {
    799    CheckTableAfterMovingGC(*scriptVTuneIdMap, [this](const auto& entry) {
    800      BaseScript* script = entry.key();
    801      CheckGCThingAfterMovingGC(script, this);
    802      return script;
    803    });
    804  }
    805 #  endif  // MOZ_VTUNE
    806 
    807 #  ifdef JS_CACHEIR_SPEW
    808  if (scriptFinalWarmUpCountMap) {
    809    CheckTableAfterMovingGC(*scriptFinalWarmUpCountMap,
    810                            [this](const auto& entry) {
    811                              BaseScript* script = entry.key();
    812                              CheckGCThingAfterMovingGC(script, this);
    813                              return script;
    814                            });
    815  }
    816 #  endif  // JS_CACHEIR_SPEW
    817 }
    818 #endif
    819 
    820 void Zone::clearScriptCounts(Realm* realm) {
    821  if (!scriptCountsMap) {
    822    return;
    823  }
    824 
    825  // Clear all hasScriptCounts_ flags of BaseScript, in order to release all
    826  // ScriptCounts entries of the given realm.
    827  for (auto i = scriptCountsMap->modIter(); !i.done(); i.next()) {
    828    const HeapPtr<BaseScript*>& script = i.get().key();
    829    if (IsAboutToBeFinalized(script)) {
    830      // Dead scripts may be present during incremental GC until script
    831      // finalizers have been run.
    832      continue;
    833    }
    834 
    835    if (script->realm() != realm) {
    836      continue;
    837    }
    838    // We can't destroy the ScriptCounts yet if the script has Baseline code,
    839    // because Baseline code bakes in pointers to the counters. The ScriptCounts
    840    // will be destroyed in Zone::discardJitCode when discarding the JitScript.
    841    if (script->hasBaselineScript()) {
    842      continue;
    843    }
    844    script->clearHasScriptCounts();
    845    i.remove();
    846  }
    847 }
    848 
    849 void Zone::clearScriptLCov(Realm* realm) {
    850  if (!scriptLCovMap) {
    851    return;
    852  }
    853 
    854  for (auto i = scriptLCovMap->modIter(); !i.done(); i.next()) {
    855    const HeapPtr<BaseScript*>& script = i.get().key();
    856    if (IsAboutToBeFinalized(script)) {
    857      // Dead scripts may be present during incremental GC until script
    858      // finalizers have been run.
    859      continue;
    860    }
    861 
    862    if (script->realm() == realm) {
    863      i.remove();
    864    }
    865  }
    866 }
    867 
    868 void Zone::clearRootsForShutdownGC() {
    869  // Finalization callbacks are not called if we're shutting down.
    870  if (finalizationObservers()) {
    871    finalizationObservers()->clearRecords();
    872  }
    873 
    874  clearKeptObjects();
    875 }
    876 
    877 void Zone::finishRoots() {
    878  for (RealmsInZoneIter r(this); !r.done(); r.next()) {
    879    r->finishRoots();
    880  }
    881 }
    882 
    883 void Zone::traceKeptObjects(JSTracer* trc) { keptAliveSet.ref().trace(trc); }
    884 
    885 bool Zone::addToKeptObjects(HandleValue target) {
    886  MOZ_ASSERT(CanBeHeldWeakly(target));
    887  MOZ_ASSERT_IF(target.isSymbol(),
    888                !target.toSymbol()->isPermanentAndMayBeShared());
    889 
    890  return keptAliveSet.ref().put(target);
    891 }
    892 
    893 void Zone::clearKeptObjects() { keptAliveSet.ref().clear(); }
    894 
    895 bool Zone::ensureFinalizationObservers() {
    896  if (finalizationObservers_.ref()) {
    897    return true;
    898  }
    899 
    900  finalizationObservers_ = js::MakeUnique<FinalizationObservers>(this);
    901  return bool(finalizationObservers_.ref());
    902 }
    903 
    904 bool Zone::registerObjectWithWeakPointers(JSObject* obj) {
    905  MOZ_ASSERT(obj->getClass()->hasTrace());
    906  MOZ_ASSERT(!IsInsideNursery(obj));
    907  return objectsWithWeakPointers.ref().append(obj);
    908 }