tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Verifier.cpp (36904B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "mozilla/Sprintf.h"
      8 
      9 #include <algorithm>
     10 #include <utility>
     11 
     12 #ifdef MOZ_VALGRIND
     13 #  include <valgrind/memcheck.h>
     14 #endif
     15 
     16 #include "gc/GCInternals.h"
     17 #include "gc/GCLock.h"
     18 #include "gc/PublicIterators.h"
     19 #include "gc/WeakMap.h"
     20 #include "gc/Zone.h"
     21 #include "js/friend/DumpFunctions.h"  // js::DumpObject
     22 #include "js/HashTable.h"
     23 #include "vm/JSContext.h"
     24 
     25 #include "gc/ArenaList-inl.h"
     26 #include "gc/GC-inl.h"
     27 #include "gc/Heap-inl.h"
     28 #include "gc/Marking-inl.h"
     29 #include "gc/PrivateIterators-inl.h"
     30 
     31 using namespace js;
     32 using namespace js::gc;
     33 
     34 #ifdef JS_GC_ZEAL
     35 
     36 /*
     37 * Write barrier verification
     38 *
     39 * The next few functions are for write barrier verification.
     40 *
     41 * The VerifyBarriers function is a shorthand. It checks if a verification phase
     42 * is currently running. If not, it starts one. Otherwise, it ends the current
     43 * phase and starts a new one.
     44 *
     45 * The user can adjust the frequency of verifications, which causes
     46 * VerifyBarriers to be a no-op all but one out of N calls. However, if the
     47 * |always| parameter is true, it starts a new phase no matter what.
     48 *
     49 * Pre-Barrier Verifier:
     50 *   When StartVerifyBarriers is called, a snapshot is taken of all objects in
     51 *   the GC heap and saved in an explicit graph data structure. Later,
     52 *   EndVerifyBarriers traverses the heap again. Any pointer values that were in
     53 *   the snapshot and are no longer found must be marked; otherwise an assertion
     54 *   triggers. Note that we must not GC in between starting and finishing a
     55 *   verification phase.
     56 */
     57 
     58 struct EdgeValue {
     59  JS::GCCellPtr thing;
     60  const char* label;
     61 };
     62 
     63 struct VerifyNode {
     64  JS::GCCellPtr thing;
     65  uint32_t count = 0;
     66  EdgeValue edges[1];
     67 };
     68 
     69 using NodeMap =
     70    HashMap<Cell*, VerifyNode*, DefaultHasher<Cell*>, SystemAllocPolicy>;
     71 
     72 /*
     73 * The verifier data structures are simple. The entire graph is stored in a
     74 * single block of memory. At the beginning is a VerifyNode for the root
     75 * node. It is followed by a sequence of EdgeValues--the exact number is given
     76 * in the node. After the edges come more nodes and their edges.
     77 *
     78 * The edgeptr and term fields are used to allocate out of the block of memory
     79 * for the graph. If we run out of memory (i.e., if edgeptr goes beyond term),
     80 * we just abandon the verification.
     81 *
     82 * The nodemap field is a hashtable that maps from the address of the GC thing
     83 * to the VerifyNode that represents it.
     84 */
     85 class js::VerifyPreTracer final : public JS::CallbackTracer {
     86  JS::AutoDisableGenerationalGC noggc;
     87 
     88  void onChild(JS::GCCellPtr thing, const char* name) override;
     89 
     90 public:
     91  /* The gcNumber when the verification began. */
     92  uint64_t number;
     93 
     94  /* This counts up to gcZealFrequency to decide whether to verify. */
     95  int count;
     96 
     97  /* This graph represents the initial GC "snapshot". */
     98  VerifyNode* curnode;
     99  VerifyNode* root;
    100  char* edgeptr;
    101  char* term;
    102  NodeMap nodemap;
    103 
    104  explicit VerifyPreTracer(JSRuntime* rt)
    105      : JS::CallbackTracer(rt, JS::TracerKind::Callback,
    106                           JS::WeakEdgeTraceAction::Skip),
    107        noggc(rt->mainContextFromOwnThread()),
    108        number(rt->gc.gcNumber()),
    109        count(0),
    110        curnode(nullptr),
    111        root(nullptr),
    112        edgeptr(nullptr),
    113        term(nullptr) {
    114    // We don't care about weak edges here. Since they are not marked they
    115    // cannot cause the problem that the pre-write barrier protects against.
    116  }
    117 
    118  ~VerifyPreTracer() { js_free(root); }
    119 };
    120 
    121 inline bool IgnoreForPreBarrierVerifier(JSRuntime* runtime,
    122                                        JS::GCCellPtr thing) {
    123  // Skip things in other runtimes.
    124  if (thing.asCell()->asTenured().runtimeFromAnyThread() != runtime) {
    125    return true;
    126  }
    127 
    128  return false;
    129 }
    130 
    131 /*
    132 * This function builds up the heap snapshot by adding edges to the current
    133 * node.
    134 */
    135 void VerifyPreTracer::onChild(JS::GCCellPtr thing, const char* name) {
    136  MOZ_ASSERT(!IsInsideNursery(thing.asCell()));
    137 
    138  if (IgnoreForPreBarrierVerifier(runtime(), thing)) {
    139    return;
    140  }
    141 
    142  edgeptr += sizeof(EdgeValue);
    143  if (edgeptr >= term) {
    144    edgeptr = term;
    145    return;
    146  }
    147 
    148  VerifyNode* node = curnode;
    149  uint32_t i = node->count;
    150 
    151  node->edges[i].thing = thing;
    152  node->edges[i].label = name;
    153  node->count++;
    154 }
    155 
    156 static VerifyNode* MakeNode(VerifyPreTracer* trc, JS::GCCellPtr thing) {
    157  NodeMap::AddPtr p = trc->nodemap.lookupForAdd(thing.asCell());
    158  if (!p) {
    159    VerifyNode* node = (VerifyNode*)trc->edgeptr;
    160    trc->edgeptr += sizeof(VerifyNode) - sizeof(EdgeValue);
    161    if (trc->edgeptr >= trc->term) {
    162      trc->edgeptr = trc->term;
    163      return nullptr;
    164    }
    165 
    166    node->thing = thing;
    167    node->count = 0;
    168    if (!trc->nodemap.add(p, thing.asCell(), node)) {
    169      trc->edgeptr = trc->term;
    170      return nullptr;
    171    }
    172 
    173    return node;
    174  }
    175  return nullptr;
    176 }
    177 
    178 static VerifyNode* NextNode(VerifyNode* node) {
    179  if (node->count == 0) {
    180    return (VerifyNode*)((char*)node + sizeof(VerifyNode) - sizeof(EdgeValue));
    181  }
    182 
    183  return (VerifyNode*)((char*)node + sizeof(VerifyNode) +
    184                       sizeof(EdgeValue) * (node->count - 1));
    185 }
    186 
    187 template <typename ZonesIterT>
    188 static void ClearMarkBits(GCRuntime* gc) {
    189  // This does not clear the mark bits for permanent atoms, whose arenas are
    190  // removed from the arena lists by GCRuntime::freezePermanentAtoms.
    191 
    192  for (ZonesIterT zone(gc); !zone.done(); zone.next()) {
    193    for (auto kind : AllAllocKinds()) {
    194      for (ArenaIter arena(zone, kind); !arena.done(); arena.next()) {
    195        arena->unmarkAll();
    196      }
    197    }
    198  }
    199 }
    200 
    201 void gc::GCRuntime::startVerifyPreBarriers() {
    202  if (verifyPreData || isIncrementalGCInProgress()) {
    203    return;
    204  }
    205 
    206  JSContext* cx = rt->mainContextFromOwnThread();
    207  MOZ_ASSERT(!cx->suppressGC);
    208 
    209  number++;
    210 
    211  VerifyPreTracer* trc = js_new<VerifyPreTracer>(rt);
    212  if (!trc) {
    213    return;
    214  }
    215 
    216  AutoPrepareForTracing prep(cx);
    217 
    218 #  ifdef DEBUG
    219  for (AllZonesIter zone(this); !zone.done(); zone.next()) {
    220    zone->bufferAllocator.checkGCStateNotInUse();
    221  }
    222 #  endif
    223 
    224  ClearMarkBits<AllZonesIter>(this);
    225 
    226  gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::TRACE_HEAP);
    227 
    228  const size_t size = 64 * 1024 * 1024;
    229  trc->root = (VerifyNode*)js_malloc(size);
    230  if (!trc->root) {
    231    goto oom;
    232  }
    233  trc->edgeptr = (char*)trc->root;
    234  trc->term = trc->edgeptr + size;
    235 
    236  /* Create the root node. */
    237  trc->curnode = MakeNode(trc, JS::GCCellPtr());
    238 
    239  MOZ_ASSERT(incrementalState == State::NotActive);
    240  incrementalState = State::MarkRoots;
    241 
    242  /* Make all the roots be edges emanating from the root node. */
    243  traceRuntime(trc, prep);
    244 
    245  VerifyNode* node;
    246  node = trc->curnode;
    247  if (trc->edgeptr == trc->term) {
    248    goto oom;
    249  }
    250 
    251  /* For each edge, make a node for it if one doesn't already exist. */
    252  while ((char*)node < trc->edgeptr) {
    253    for (uint32_t i = 0; i < node->count; i++) {
    254      EdgeValue& e = node->edges[i];
    255      VerifyNode* child = MakeNode(trc, e.thing);
    256      if (child) {
    257        trc->curnode = child;
    258        JS::TraceChildren(trc, e.thing);
    259      }
    260      if (trc->edgeptr == trc->term) {
    261        goto oom;
    262      }
    263    }
    264 
    265    node = NextNode(node);
    266  }
    267 
    268  verifyPreData = trc;
    269  incrementalState = State::Mark;
    270  marker().start();
    271 
    272  for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
    273    zone->changeGCState(Zone::NoGC, Zone::VerifyPreBarriers);
    274    zone->setNeedsIncrementalBarrier(true);
    275    zone->arenas.clearFreeLists();
    276  }
    277 
    278  return;
    279 
    280 oom:
    281  incrementalState = State::NotActive;
    282  js_delete(trc);
    283  verifyPreData = nullptr;
    284 }
    285 
    286 static bool IsMarkedOrAllocated(TenuredCell* cell) {
    287  return cell->isMarkedAny();
    288 }
    289 
    290 struct CheckEdgeTracer final : public JS::CallbackTracer {
    291  VerifyNode* node;
    292  explicit CheckEdgeTracer(JSRuntime* rt)
    293      : JS::CallbackTracer(rt), node(nullptr) {}
    294  void onChild(JS::GCCellPtr thing, const char* name) override;
    295 };
    296 
    297 static const uint32_t MAX_VERIFIER_EDGES = 1000;
    298 
    299 /*
    300 * This function is called by EndVerifyBarriers for every heap edge. If the edge
    301 * already existed in the original snapshot, we "cancel it out" by overwriting
    302 * it with nullptr. EndVerifyBarriers later asserts that the remaining
    303 * non-nullptr edges (i.e., the ones from the original snapshot that must have
    304 * been modified) must point to marked objects.
    305 */
    306 void CheckEdgeTracer::onChild(JS::GCCellPtr thing, const char* name) {
    307  if (IgnoreForPreBarrierVerifier(runtime(), thing)) {
    308    return;
    309  }
    310 
    311  /* Avoid n^2 behavior. */
    312  if (node->count > MAX_VERIFIER_EDGES) {
    313    return;
    314  }
    315 
    316  for (uint32_t i = 0; i < node->count; i++) {
    317    if (node->edges[i].thing == thing) {
    318      node->edges[i].thing = JS::GCCellPtr();
    319      return;
    320    }
    321  }
    322 }
    323 
    324 static bool IsMarkedOrAllocated(const EdgeValue& edge) {
    325  if (!edge.thing || IsMarkedOrAllocated(&edge.thing.asCell()->asTenured())) {
    326    return true;
    327  }
    328 
    329  // Permanent atoms and well-known symbols aren't marked during graph
    330  // traversal.
    331  if (edge.thing.is<JSString>() &&
    332      edge.thing.as<JSString>().isPermanentAtom()) {
    333    return true;
    334  }
    335  if (edge.thing.is<JS::Symbol>() &&
    336      edge.thing.as<JS::Symbol>().isWellKnownSymbol()) {
    337    return true;
    338  }
    339 
    340  return false;
    341 }
    342 
    343 void gc::GCRuntime::endVerifyPreBarriers() {
    344  VerifyPreTracer* trc = verifyPreData;
    345 
    346  if (!trc) {
    347    return;
    348  }
    349 
    350  MOZ_ASSERT(!JS::IsGenerationalGCEnabled(rt));
    351 
    352  // Now that barrier marking has finished, prepare the heap to allow this
    353  // method to trace cells and discover their outgoing edges.
    354  AutoPrepareForTracing prep(rt->mainContextFromOwnThread());
    355 
    356  bool compartmentCreated = false;
    357 
    358  /* We need to disable barriers before tracing, which may invoke barriers. */
    359  for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
    360    if (zone->isVerifyingPreBarriers()) {
    361      zone->changeGCState(Zone::VerifyPreBarriers, Zone::NoGC);
    362    } else {
    363      compartmentCreated = true;
    364    }
    365 
    366    MOZ_ASSERT(!zone->wasGCStarted());
    367    MOZ_ASSERT(!zone->needsIncrementalBarrier());
    368  }
    369 
    370  verifyPreData = nullptr;
    371  MOZ_ASSERT(incrementalState == State::Mark);
    372  incrementalState = State::NotActive;
    373 
    374  if (!compartmentCreated) {
    375    CheckEdgeTracer cetrc(rt);
    376 
    377    /* Start after the roots. */
    378    VerifyNode* node = NextNode(trc->root);
    379    while ((char*)node < trc->edgeptr) {
    380      cetrc.node = node;
    381      JS::TraceChildren(&cetrc, node->thing);
    382 
    383      if (node->count <= MAX_VERIFIER_EDGES) {
    384        for (uint32_t i = 0; i < node->count; i++) {
    385          EdgeValue& edge = node->edges[i];
    386          if (!IsMarkedOrAllocated(edge)) {
    387            char msgbuf[1024];
    388            SprintfLiteral(
    389                msgbuf,
    390                "[barrier verifier] Unmarked edge: %s %p '%s' edge to %s %p",
    391                JS::GCTraceKindToAscii(node->thing.kind()),
    392                node->thing.asCell(), edge.label,
    393                JS::GCTraceKindToAscii(edge.thing.kind()), edge.thing.asCell());
    394            MOZ_ReportAssertionFailure(msgbuf, __FILE__, __LINE__);
    395            MOZ_CRASH();
    396          }
    397        }
    398      }
    399 
    400      node = NextNode(node);
    401    }
    402  }
    403 
    404  marker().reset();
    405  resetDelayedMarking();
    406 
    407  for (AllZonesIter zone(this); !zone.done(); zone.next()) {
    408    zone->bufferAllocator.clearMarkStateAfterBarrierVerification();
    409  }
    410 
    411  js_delete(trc);
    412 }
    413 
    414 /*** Barrier Verifier Scheduling ***/
    415 
    416 void gc::VerifyBarriers(JSRuntime* rt, VerifierType type) {
    417  if (type == PreBarrierVerifier) {
    418    rt->gc.verifyPreBarriers();
    419  }
    420 
    421  if (type == PostBarrierVerifier) {
    422    rt->gc.verifyPostBarriers();
    423  }
    424 }
    425 
    426 void gc::GCRuntime::verifyPreBarriers() {
    427  if (verifyPreData) {
    428    endVerifyPreBarriers();
    429  } else {
    430    startVerifyPreBarriers();
    431  }
    432 }
    433 
    434 void gc::GCRuntime::verifyPostBarriers() {
    435  if (hasZealMode(ZealMode::VerifierPost)) {
    436    clearZealMode(ZealMode::VerifierPost);
    437  } else {
    438    setZeal(uint8_t(ZealMode::VerifierPost), JS::ShellDefaultGCZealFrequency);
    439  }
    440 }
    441 
    442 void gc::GCRuntime::maybeVerifyPreBarriers(bool always) {
    443  if (!hasZealMode(ZealMode::VerifierPre)) {
    444    return;
    445  }
    446 
    447  if (rt->mainContextFromOwnThread()->suppressGC) {
    448    return;
    449  }
    450 
    451  if (verifyPreData) {
    452    if (++verifyPreData->count < zealFrequency && !always) {
    453      return;
    454    }
    455 
    456    endVerifyPreBarriers();
    457  }
    458 
    459  startVerifyPreBarriers();
    460 }
    461 
    462 void js::gc::MaybeVerifyBarriers(JSContext* cx, bool always) {
    463  GCRuntime* gc = &cx->runtime()->gc;
    464  gc->maybeVerifyPreBarriers(always);
    465 }
    466 
    467 void js::gc::GCRuntime::finishVerifier() {
    468  if (verifyPreData) {
    469    js_delete(verifyPreData.ref());
    470    verifyPreData = nullptr;
    471  }
    472 }
    473 
    474 struct GCChunkHasher {
    475  using Lookup = gc::ArenaChunk*;
    476 
    477  /*
    478   * Strip zeros for better distribution after multiplying by the golden
    479   * ratio.
    480   */
    481  static HashNumber hash(gc::ArenaChunk* chunk) {
    482    MOZ_ASSERT(!(uintptr_t(chunk) & gc::ChunkMask));
    483    return HashNumber(uintptr_t(chunk) >> gc::ChunkShift);
    484  }
    485 
    486  static bool match(gc::ArenaChunk* k, gc::ArenaChunk* l) {
    487    MOZ_ASSERT(!(uintptr_t(k) & gc::ChunkMask));
    488    MOZ_ASSERT(!(uintptr_t(l) & gc::ChunkMask));
    489    return k == l;
    490  }
    491 };
    492 
    493 class js::gc::MarkingValidator {
    494 public:
    495  explicit MarkingValidator(GCRuntime* gc);
    496  void nonIncrementalMark(AutoGCSession& session);
    497  void validate();
    498 
    499 private:
    500  GCRuntime* gc;
    501  bool initialized;
    502 
    503  using BitmapMap = HashMap<ArenaChunk*, UniquePtr<ChunkMarkBitmap>,
    504                            GCChunkHasher, SystemAllocPolicy>;
    505  BitmapMap map;
    506 };
    507 
    508 js::gc::MarkingValidator::MarkingValidator(GCRuntime* gc)
    509    : gc(gc), initialized(false) {}
    510 
    511 void js::gc::MarkingValidator::nonIncrementalMark(AutoGCSession& session) {
    512  /*
    513   * Perform a non-incremental mark for all collecting zones and record
    514   * the results for later comparison.
    515   */
    516 
    517  GCMarker* gcmarker = &gc->marker();
    518 
    519  MOZ_ASSERT(!gcmarker->isWeakMarking());
    520 
    521 #  ifdef DEBUG
    522  // The test mark queue can cause spurious differences if the non-incremental
    523  // marking for validation happens before the full queue has been processed,
    524  // since the later part of the queue may mark things during sweeping. Disable
    525  // validation if there is anything left in the queue at this point.
    526  if (gc->testMarkQueueRemaining() > 0) {
    527    return;
    528  }
    529 #  endif
    530 
    531  /* We require that the nursery is empty at the start of collection. */
    532  MOZ_ASSERT(gc->nursery().isEmpty());
    533 
    534  /* Wait for off-thread parsing which can allocate. */
    535  WaitForAllHelperThreads();
    536 
    537  gc->waitBackgroundAllocEnd();
    538  gc->waitBackgroundSweepEnd();
    539 
    540  /* Save existing mark bits. */
    541  {
    542    AutoLockGC lock(gc);
    543    for (auto chunk = gc->allNonEmptyChunks(lock); !chunk.done();
    544         chunk.next()) {
    545      // Bug 1842582: Allocate mark bit buffer in two stages to avoid alignment
    546      // restriction which we currently can't support.
    547      void* buffer = js_malloc(sizeof(ChunkMarkBitmap));
    548      if (!buffer) {
    549        return;
    550      }
    551      UniquePtr<ChunkMarkBitmap> entry(new (buffer) ChunkMarkBitmap);
    552      entry->copyFrom(chunk->markBits);
    553      if (!map.putNew(chunk, std::move(entry))) {
    554        return;
    555      }
    556    }
    557  }
    558 
    559  /*
    560   * Temporarily clear the weakmaps' mark flags for the compartments we are
    561   * collecting.
    562   */
    563 
    564  WeakMapColors markedWeakMaps;
    565 
    566  /*
    567   * For saving, smush all of the keys into one big table and split them back
    568   * up into per-zone tables when restoring.
    569   */
    570  gc::EphemeronEdgeTable savedEphemeronEdges;
    571 
    572  for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
    573    if (!WeakMapBase::saveZoneMarkedWeakMaps(zone, markedWeakMaps)) {
    574      return;
    575    }
    576 
    577    AutoEnterOOMUnsafeRegion oomUnsafe;
    578    for (auto r = zone->gcEphemeronEdges().all(); !r.empty(); r.popFront()) {
    579      MOZ_ASSERT(r.front().key()->zone() == zone);
    580      if (!savedEphemeronEdges.putNew(r.front().key(),
    581                                      std::move(r.front().value()))) {
    582        // Notice the std::move -- this could consume the moved-from value even
    583        // on failure, so it's unsafe to continue if putNew fails.
    584        oomUnsafe.crash("saving weak keys table for validator");
    585      }
    586    }
    587 
    588    zone->gcEphemeronEdges().clearAndCompact();
    589  }
    590 
    591  /*
    592   * After this point, the function must run to completion, so we shouldn't do
    593   * anything fallible.
    594   */
    595  initialized = true;
    596 
    597  /* Re-do all the marking, but non-incrementally. */
    598  js::gc::State state = gc->incrementalState;
    599  gc->incrementalState = State::MarkRoots;
    600 
    601  {
    602    gcstats::AutoPhase ap(gc->stats(), gcstats::PhaseKind::PREPARE);
    603 
    604    {
    605      gcstats::AutoPhase ap(gc->stats(), gcstats::PhaseKind::UNMARK);
    606 
    607      for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
    608        WeakMapBase::unmarkZone(zone);
    609      }
    610 
    611      MOZ_ASSERT(gcmarker->isDrained());
    612 
    613      ClearMarkBits<GCZonesIter>(gc);
    614    }
    615  }
    616 
    617  {
    618    gcstats::AutoPhase ap(gc->stats(), gcstats::PhaseKind::MARK);
    619 
    620    gc->traceRuntimeForMajorGC(gcmarker->tracer(), session);
    621 
    622    gc->incrementalState = State::Mark;
    623    gc->drainMarkStack();
    624  }
    625 
    626  gc->incrementalState = State::Sweep;
    627  {
    628    gcstats::AutoPhase ap1(gc->stats(), gcstats::PhaseKind::SWEEP);
    629    gcstats::AutoPhase ap2(gc->stats(), gcstats::PhaseKind::MARK);
    630 
    631    gc->markAllWeakReferences();
    632 
    633    /* Update zone state for gray marking. */
    634    for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
    635      zone->changeGCState(zone->initialMarkingState(), Zone::MarkBlackAndGray);
    636    }
    637 
    638    /*
    639     * markAllGrayReferences may mark both gray and black, so it manages the
    640     * mark color internally.
    641     */
    642    gc->markAllGrayReferences(gcstats::PhaseKind::MARK_GRAY);
    643 
    644    AutoSetMarkColor setColorGray(*gcmarker, MarkColor::Gray);
    645    gc->markAllWeakReferences();
    646 
    647    /* Restore zone state. */
    648    for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
    649      zone->changeGCState(Zone::MarkBlackAndGray, zone->initialMarkingState());
    650    }
    651    MOZ_ASSERT(gc->marker().isDrained());
    652  }
    653 
    654  /* Take a copy of the non-incremental mark state and restore the original. */
    655  {
    656    AutoLockGC lock(gc);
    657    for (auto chunk = gc->allNonEmptyChunks(lock); !chunk.done();
    658         chunk.next()) {
    659      ChunkMarkBitmap* bitmap = &chunk->markBits;
    660      auto ptr = map.lookup(chunk);
    661      MOZ_RELEASE_ASSERT(ptr, "Chunk not found in map");
    662      ChunkMarkBitmap* entry = ptr->value().get();
    663      ChunkMarkBitmap temp;
    664      temp.copyFrom(*entry);
    665      entry->copyFrom(*bitmap);
    666      bitmap->copyFrom(temp);
    667    }
    668  }
    669 
    670  for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
    671    WeakMapBase::unmarkZone(zone);
    672    MOZ_ASSERT(zone->gcEphemeronEdges().empty(), "unmarkZone clears the map");
    673  }
    674 
    675  WeakMapBase::restoreMarkedWeakMaps(markedWeakMaps);
    676 
    677  for (auto r = savedEphemeronEdges.all(); !r.empty(); r.popFront()) {
    678    AutoEnterOOMUnsafeRegion oomUnsafe;
    679    Zone* zone = r.front().key()->asTenured().zone();
    680    if (!zone->gcEphemeronEdges().putNew(r.front().key(),
    681                                         std::move(r.front().value()))) {
    682      oomUnsafe.crash("restoring weak keys table for validator");
    683    }
    684  }
    685 
    686 #  ifdef DEBUG
    687  MOZ_ASSERT(gc->testMarkQueueRemaining() == 0);
    688  MOZ_ASSERT(gc->queueMarkColor.isNothing());
    689 #  endif
    690 
    691  gc->incrementalState = state;
    692 }
    693 
    694 void js::gc::MarkingValidator::validate() {
    695  /*
    696   * Validates the incremental marking for a single compartment by comparing
    697   * the mark bits to those previously recorded for a non-incremental mark.
    698   */
    699 
    700  if (!initialized) {
    701    return;
    702  }
    703 
    704  MOZ_ASSERT(!gc->marker().isWeakMarking());
    705 
    706  gc->waitBackgroundSweepEnd();
    707 
    708  bool ok = true;
    709  AutoLockGC lock(gc->rt);
    710  for (auto chunk = gc->allNonEmptyChunks(lock); !chunk.done(); chunk.next()) {
    711    BitmapMap::Ptr ptr = map.lookup(chunk);
    712    if (!ptr) {
    713      continue; /* Allocated after we did the non-incremental mark. */
    714    }
    715 
    716    ChunkMarkBitmap* bitmap = ptr->value().get();
    717    ChunkMarkBitmap* incBitmap = &chunk->markBits;
    718 
    719    for (size_t i = 0; i < ArenasPerChunk; i++) {
    720      size_t pageIndex = ArenaChunk::arenaToPageIndex(i);
    721      if (chunk->decommittedPages[pageIndex]) {
    722        continue;
    723      }
    724      Arena* arena = &chunk->arenas[i];
    725      if (!arena->allocated()) {
    726        continue;
    727      }
    728      if (!arena->zone()->isGCSweeping()) {
    729        continue;
    730      }
    731 
    732      AllocKind kind = arena->getAllocKind();
    733      uintptr_t thing = arena->thingsStart();
    734      uintptr_t end = arena->thingsEnd();
    735      while (thing < end) {
    736        auto* cell = reinterpret_cast<TenuredCell*>(thing);
    737 
    738        /*
    739         * If a non-incremental GC wouldn't have collected a cell, then an
    740         * incremental GC should not collect it either. However incremental
    741         * marking is conservative and is allowed to mark things that
    742         * non-incremental marking would not have marked.
    743         *
    744         * Further, incremental marking should not result in a cell that is
    745         * "less marked" than non-incremental marking. For example where
    746         * non-incremental marking would have marked a cell black incremental
    747         * marking is not allowed to mark it gray, since the cycle collector
    748         * could then consider paths through it to be part of garbage
    749         * cycles. It's OK for a cell that would have been marked gray by
    750         * non-incremental marking to be marked black by incremental marking.
    751         *
    752         * It's OK for a cell that would not be marked by non-incremental
    753         * marking to end up gray. Since the cell is unreachable according to
    754         * the non-incremental marking then the cycle collector will not find
    755         * it. This can happen when a barrier marks a weak map key black and the
    756         * map is gray, resulting in the value being marked gray.
    757         *
    758         * In summary:
    759         *
    760         *   Non-incremental   Incremental:   Outcome:
    761         *       result:         result:
    762         *
    763         *   White              White         OK
    764         *                      Gray          OK, conservative
    765         *                      Black         OK, conservative
    766         *   Gray               White         Fail
    767         *                      Gray          OK
    768         *                      Black         OK, conservative
    769         *   Black              White         Fail
    770         *                      Gray          Fail
    771         *                      Black         OK
    772         */
    773 
    774        CellColor incColor = TenuredCell::getColor(incBitmap, cell);
    775        CellColor nonIncColor = TenuredCell::getColor(bitmap, cell);
    776        if (incColor < nonIncColor) {
    777          ok = false;
    778          fprintf(stderr,
    779                  "%p: cell was marked %s, but would be marked %s by "
    780                  "non-incremental marking\n",
    781                  cell, CellColorName(incColor), CellColorName(nonIncColor));
    782 #  ifdef DEBUG
    783          cell->dump();
    784          fprintf(stderr, "\n");
    785 #  endif
    786        }
    787 
    788        thing += Arena::thingSize(kind);
    789      }
    790    }
    791  }
    792 
    793  MOZ_RELEASE_ASSERT(ok, "Incremental marking verification failed");
    794 }
    795 
    796 void GCRuntime::computeNonIncrementalMarkingForValidation(
    797    AutoGCSession& session) {
    798  MOZ_ASSERT(!markingValidator);
    799  if (isIncremental && hasZealMode(ZealMode::IncrementalMarkingValidator)) {
    800    markingValidator = js_new<MarkingValidator>(this);
    801  }
    802  if (markingValidator) {
    803    markingValidator->nonIncrementalMark(session);
    804  }
    805 }
    806 
    807 void GCRuntime::validateIncrementalMarking() {
    808  if (markingValidator) {
    809    markingValidator->validate();
    810  }
    811 }
    812 
    813 void GCRuntime::finishMarkingValidation() {
    814  js_delete(markingValidator.ref());
    815  markingValidator = nullptr;
    816 }
    817 
    818 #endif /* JS_GC_ZEAL */
    819 
    820 #if defined(JS_GC_ZEAL) || defined(DEBUG)
    821 
    822 class HeapCheckTracerBase : public JS::CallbackTracer {
    823 public:
    824  explicit HeapCheckTracerBase(JSRuntime* rt, JS::TraceOptions options);
    825  bool traceHeap(AutoHeapSession& session);
    826  virtual bool checkCell(Cell* cell, const char* name) = 0;
    827 
    828 protected:
    829  void dumpCellInfo(Cell* cell);
    830  void dumpCellPath(const char* name);
    831 
    832  Cell* parentCell() {
    833    return parentIndex == -1 ? nullptr : stack[parentIndex].thing.asCell();
    834  }
    835 
    836  size_t failures;
    837 
    838 private:
    839  void onChild(JS::GCCellPtr thing, const char* name) override;
    840 
    841  struct WorkItem {
    842    WorkItem(JS::GCCellPtr thing, const char* name, int parentIndex)
    843        : thing(thing),
    844          name(name),
    845          parentIndex(parentIndex),
    846          processed(false) {}
    847 
    848    JS::GCCellPtr thing;
    849    const char* name;
    850    int parentIndex;
    851    bool processed;
    852  };
    853 
    854  JSRuntime* rt;
    855  bool oom;
    856  HashSet<Cell*, DefaultHasher<Cell*>, SystemAllocPolicy> visited;
    857  Vector<WorkItem, 0, SystemAllocPolicy> stack;
    858  int parentIndex;
    859 };
    860 
    861 HeapCheckTracerBase::HeapCheckTracerBase(JSRuntime* rt,
    862                                         JS::TraceOptions options)
    863    : CallbackTracer(rt, JS::TracerKind::HeapCheck, options),
    864      failures(0),
    865      rt(rt),
    866      oom(false),
    867      parentIndex(-1) {}
    868 
    869 void HeapCheckTracerBase::onChild(JS::GCCellPtr thing, const char* name) {
    870  Cell* cell = thing.asCell();
    871  if (visited.lookup(cell)) {
    872    return;
    873  }
    874 
    875  if (!visited.put(cell)) {
    876    oom = true;
    877    return;
    878  }
    879 
    880  if (!checkCell(cell, name)) {
    881    // Don't trace through known bad cell.
    882    return;
    883  }
    884 
    885  // Don't trace into GC things owned by another runtime.
    886  if (cell->runtimeFromAnyThread() != rt) {
    887    return;
    888  }
    889 
    890  WorkItem item(thing, name, parentIndex);
    891  if (!stack.append(item)) {
    892    oom = true;
    893  }
    894 }
    895 
    896 bool HeapCheckTracerBase::traceHeap(AutoHeapSession& session) {
    897  // The analysis thinks that traceRuntime might GC by calling a GC callback.
    898  JS::AutoSuppressGCAnalysis nogc;
    899  if (!rt->isBeingDestroyed()) {
    900    rt->gc.traceRuntime(this, session);
    901  }
    902 
    903  while (!stack.empty() && !oom) {
    904    WorkItem item = stack.back();
    905    if (item.processed) {
    906      stack.popBack();
    907    } else {
    908      MOZ_ASSERT(stack.length() <= INT_MAX);
    909      parentIndex = int(stack.length()) - 1;
    910      stack.back().processed = true;
    911      TraceChildren(this, item.thing);
    912    }
    913  }
    914 
    915  return !oom;
    916 }
    917 
    918 void HeapCheckTracerBase::dumpCellInfo(Cell* cell) {
    919  auto kind = cell->getTraceKind();
    920  JSObject* obj =
    921      kind == JS::TraceKind::Object ? static_cast<JSObject*>(cell) : nullptr;
    922 
    923  fprintf(stderr, "%s %s", CellColorName(cell->color()),
    924          GCTraceKindToAscii(kind));
    925  if (obj) {
    926    fprintf(stderr, " %s", obj->getClass()->name);
    927  }
    928  fprintf(stderr, " %p", cell);
    929  if (obj) {
    930    fprintf(stderr, " in compartment %p", obj->compartment());
    931  }
    932  fprintf(stderr, " in zone %p", cell->zone());
    933 }
    934 
    935 void HeapCheckTracerBase::dumpCellPath(const char* name) {
    936  for (int index = parentIndex; index != -1; index = stack[index].parentIndex) {
    937    const WorkItem& parent = stack[index];
    938    Cell* cell = parent.thing.asCell();
    939    fprintf(stderr, "  from ");
    940    dumpCellInfo(cell);
    941    fprintf(stderr, " %s edge\n", name);
    942    name = parent.name;
    943  }
    944  fprintf(stderr, "  from root %s\n", name);
    945 }
    946 
    947 class CheckHeapTracer final : public HeapCheckTracerBase {
    948 public:
    949  enum GCType { Moving, NonMoving, VerifyPostBarriers };
    950 
    951  explicit CheckHeapTracer(JSRuntime* rt, GCType type);
    952  void check(AutoHeapSession& session);
    953 
    954 private:
    955  bool checkCell(Cell* cell, const char* name) override;
    956  bool cellIsValid(Cell* cell);
    957  GCType gcType;
    958 };
    959 
    960 CheckHeapTracer::CheckHeapTracer(JSRuntime* rt, GCType type)
    961    : HeapCheckTracerBase(rt, JS::WeakMapTraceAction::TraceKeysAndValues),
    962      gcType(type) {}
    963 
    964 inline static bool IsValidGCThingPointer(Cell* cell) {
    965  return (uintptr_t(cell) & CellAlignMask) == 0;
    966 }
    967 
    968 bool CheckHeapTracer::checkCell(Cell* cell, const char* name) {
    969  if (cellIsValid(cell)) {
    970    return true;
    971  }
    972 
    973  failures++;
    974  fprintf(stderr, "Bad pointer %p\n", cell);
    975  dumpCellPath(name);
    976  return false;
    977 }
    978 
    979 bool CheckHeapTracer::cellIsValid(Cell* cell) {
    980  if (!IsValidGCThingPointer(cell)) {
    981    return false;
    982  }
    983 
    984  if (gcType == GCType::Moving) {
    985    return IsGCThingValidAfterMovingGC(cell);
    986  }
    987 
    988  if (gcType == GCType::NonMoving) {
    989    return !cell->isForwarded();
    990  }
    991 
    992  MOZ_ASSERT(gcType == GCType::VerifyPostBarriers);
    993 
    994  // No reachable Cell* should be in the collected part of the nursery.
    995  if (runtime()->gc.nursery().inCollectedRegion(cell)) {
    996    return false;
    997  }
    998 
    999  // String data should also not be in the collected part of nursery.
   1000  if (cell->is<JSString>() && cell->as<JSString>()->isLinear()) {
   1001    if (cell->as<JSString>()->asLinear().hasCharsInCollectedNurseryRegion()) {
   1002      return false;
   1003    }
   1004  }
   1005 
   1006  return true;
   1007 }
   1008 
   1009 void CheckHeapTracer::check(AutoHeapSession& session) {
   1010  if (!traceHeap(session)) {
   1011    return;
   1012  }
   1013 
   1014  if (failures) {
   1015    fprintf(stderr, "Heap check: %zu failure(s)\n", failures);
   1016  }
   1017  MOZ_RELEASE_ASSERT(failures == 0);
   1018 }
   1019 
   1020 void js::gc::CheckHeapAfterGC(JSRuntime* rt) {
   1021  MOZ_ASSERT(!rt->gc.isBackgroundDecommitting());
   1022 
   1023  AutoTraceSession session(rt);
   1024  CheckHeapTracer::GCType gcType;
   1025 
   1026  if (rt->gc.nursery().isEmpty()) {
   1027    gcType = CheckHeapTracer::GCType::Moving;
   1028  } else {
   1029    gcType = CheckHeapTracer::GCType::NonMoving;
   1030  }
   1031 
   1032  CheckHeapTracer tracer(rt, gcType);
   1033  tracer.check(session);
   1034 }
   1035 
   1036 class CheckGrayMarkingTracer final : public HeapCheckTracerBase {
   1037 public:
   1038  explicit CheckGrayMarkingTracer(JSRuntime* rt);
   1039  bool check(AutoHeapSession& session);
   1040 
   1041 private:
   1042  bool checkCell(Cell* cell, const char* name) override;
   1043  bool isBlackToGrayEdge(Cell* parent, Cell* child);
   1044 };
   1045 
   1046 CheckGrayMarkingTracer::CheckGrayMarkingTracer(JSRuntime* rt)
   1047    : HeapCheckTracerBase(rt, JS::TraceOptions(JS::WeakMapTraceAction::Skip,
   1048                                               JS::WeakEdgeTraceAction::Skip)) {
   1049  // Weak gray->black edges are allowed.
   1050 }
   1051 
   1052 bool CheckGrayMarkingTracer::checkCell(Cell* cell, const char* name) {
   1053  Cell* parent = parentCell();
   1054  if (!parent) {
   1055    return true;
   1056  }
   1057 
   1058  if (!isBlackToGrayEdge(parent, cell)) {
   1059    return true;
   1060  }
   1061 
   1062  failures++;
   1063 
   1064  fprintf(stderr, "Found black to gray edge to ");
   1065  dumpCellInfo(cell);
   1066  fprintf(stderr, "\n");
   1067  dumpCellPath(name);
   1068 
   1069 #  ifdef DEBUG
   1070  if (parent->is<JSObject>()) {
   1071    fprintf(stderr, "\nSource: ");
   1072    DumpObject(parent->as<JSObject>(), stderr);
   1073  }
   1074  if (cell->is<JSObject>()) {
   1075    fprintf(stderr, "\nTarget: ");
   1076    DumpObject(cell->as<JSObject>(), stderr);
   1077  }
   1078 #  endif
   1079 
   1080  return false;
   1081 }
   1082 
   1083 bool CheckGrayMarkingTracer::isBlackToGrayEdge(Cell* parent, Cell* child) {
   1084  return parent->isMarkedBlack() && child->isMarkedGray();
   1085 }
   1086 
   1087 bool CheckGrayMarkingTracer::check(AutoHeapSession& session) {
   1088  if (!traceHeap(session)) {
   1089    return true;  // Ignore failure.
   1090  }
   1091 
   1092  return failures == 0;
   1093 }
   1094 
   1095 JS_PUBLIC_API bool js::CheckGrayMarkingState(JSRuntime* rt) {
   1096  MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
   1097  MOZ_ASSERT(!rt->gc.isIncrementalGCInProgress());
   1098  if (!rt->gc.areGrayBitsValid()) {
   1099    return true;
   1100  }
   1101 
   1102  gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::TRACE_HEAP);
   1103  AutoTraceSession session(rt);
   1104  CheckGrayMarkingTracer tracer(rt);
   1105 
   1106  return tracer.check(session);
   1107 }
   1108 
   1109 static JSObject* MaybeGetDelegate(Cell* cell) {
   1110  if (!cell->is<JSObject>()) {
   1111    return nullptr;
   1112  }
   1113 
   1114  JSObject* object = cell->as<JSObject>();
   1115  return js::UncheckedUnwrapWithoutExpose(object);
   1116 }
   1117 
   1118 bool js::gc::CheckWeakMapEntryMarking(const WeakMapBase* map, Cell* key,
   1119                                      Cell* maybeValue) {
   1120  bool ok = true;
   1121 
   1122  Zone* zone = map->zone();
   1123  MOZ_RELEASE_ASSERT(CurrentThreadCanAccessZone(zone));
   1124  MOZ_RELEASE_ASSERT(zone->isGCMarking());
   1125 
   1126  JSObject* object = map->memberOf;
   1127  if (object) {
   1128    MOZ_RELEASE_ASSERT(object->zone() == zone);
   1129  }
   1130 
   1131  // Debugger weak maps can have keys in different zones.
   1132  Zone* keyZone = key->zoneFromAnyThread();
   1133  if (!map->allowKeysInOtherZones()) {
   1134    MOZ_RELEASE_ASSERT(keyZone == zone || keyZone->isAtomsZone());
   1135  }
   1136 
   1137  if (maybeValue) {
   1138    Zone* valueZone = maybeValue->zoneFromAnyThread();
   1139    MOZ_RELEASE_ASSERT(valueZone == zone || valueZone->isAtomsZone());
   1140  }
   1141 
   1142  if (object && object->color() != map->mapColor()) {
   1143    fprintf(stderr, "WeakMap object is marked differently to the map\n");
   1144    fprintf(stderr, "(map %p is %s, object %p is %s)\n", map,
   1145            CellColorName(map->mapColor()), object,
   1146            CellColorName(object->color()));
   1147    ok = false;
   1148  }
   1149 
   1150  // Values belonging to other runtimes or in uncollected zones are treated as
   1151  // black.
   1152  JSRuntime* mapRuntime = zone->runtimeFromAnyThread();
   1153  auto effectiveColor = [=](Cell* cell) -> CellColor {
   1154    if (!cell || cell->runtimeFromAnyThread() != mapRuntime) {
   1155      return CellColor::Black;
   1156    }
   1157    if (cell->zoneFromAnyThread()->isGCMarkingOrSweeping()) {
   1158      return cell->color();
   1159    }
   1160    return CellColor::Black;
   1161  };
   1162 
   1163  CellColor valueColor = effectiveColor(maybeValue);
   1164  CellColor keyColor = effectiveColor(key);
   1165 
   1166  if (valueColor < std::min(map->mapColor(), keyColor)) {
   1167    fprintf(stderr, "WeakMap value is less marked than map and key\n");
   1168    fprintf(stderr, "(map %p is %s, key %p is %s, value %p is %s)\n", map,
   1169            CellColorName(map->mapColor()), key, CellColorName(keyColor),
   1170            maybeValue, CellColorName(valueColor));
   1171 #  ifdef DEBUG
   1172    fprintf(stderr, "Key:\n");
   1173    key->dump();
   1174    if (auto* delegate = MaybeGetDelegate(key); delegate) {
   1175      fprintf(stderr, "Delegate:\n");
   1176      delegate->dump();
   1177    }
   1178    if (maybeValue) {
   1179      fprintf(stderr, "Value:\n");
   1180      maybeValue->dump();
   1181    }
   1182 #  endif
   1183 
   1184    ok = false;
   1185  }
   1186 
   1187  JSObject* delegate = MaybeGetDelegate(key);
   1188  if (!delegate) {
   1189    return ok;
   1190  }
   1191 
   1192  CellColor delegateColor = effectiveColor(delegate);
   1193  if (keyColor < std::min(map->mapColor(), delegateColor)) {
   1194    fprintf(stderr, "WeakMap key is less marked than map or delegate\n");
   1195    fprintf(stderr, "(map %p is %s, delegate %p is %s, key %p is %s)\n", map,
   1196            CellColorName(map->mapColor()), delegate,
   1197            CellColorName(delegateColor), key, CellColorName(keyColor));
   1198    ok = false;
   1199  }
   1200 
   1201  // Symbols key must be marked in the atom marking bitmap for the zone.
   1202  if (key->is<JS::Symbol>()) {
   1203    GCRuntime* gc = &mapRuntime->gc;
   1204    CellColor bitmapColor =
   1205        gc->atomMarking.getAtomMarkColor(zone, key->as<JS::Symbol>());
   1206    if (bitmapColor < keyColor) {
   1207      fprintf(stderr, "Atom marking bitmap is less marked than symbol key %p\n",
   1208              key);
   1209      fprintf(stderr, "(key %p is %s, bitmap is %s)\n", key,
   1210              CellColorName(keyColor), CellColorName(bitmapColor));
   1211      ok = false;
   1212    }
   1213  }
   1214 
   1215  return ok;
   1216 }
   1217 
   1218 #endif  // defined(JS_GC_ZEAL) || defined(DEBUG)
   1219 
   1220 #ifdef JS_GC_ZEAL
   1221 void GCRuntime::verifyPostBarriers(AutoHeapSession& session) {
   1222  // Walk the entire heap to check for pointers into the nursery that should
   1223  // have been tracked by the store buffer.
   1224  CheckHeapTracer tracer(rt, CheckHeapTracer::GCType::VerifyPostBarriers);
   1225  tracer.check(session);
   1226 }
   1227 
   1228 void GCRuntime::checkHeapBeforeMinorGC(AutoHeapSession& session) {
   1229  // Similar to verifyPostBarriers but run before a minor GC. Checks for tenured
   1230  // dependent strings pointing to nursery chars but not in the store buffer. If
   1231  // a tenured string cell points to a nursery string cell, then it will be in
   1232  // the store buffer and is fine. So this looks for tenured strings that point
   1233  // to tenured strings but contain nursery data.
   1234 
   1235  for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
   1236    if (zone->isGCFinished()) {
   1237      continue;  // Don't access zones that are being swept off thread.
   1238    }
   1239 
   1240    for (ArenaIter aiter(zone, gc::AllocKind::STRING); !aiter.done();
   1241         aiter.next()) {
   1242      for (ArenaCellIterUnderGC cell(aiter.get()); !cell.done(); cell.next()) {
   1243        if (cell->as<JSString>()->isDependent()) {
   1244          JSDependentString* str = &cell->as<JSString>()->asDependent();
   1245          if (str->isTenured() && str->base()->isTenured()) {
   1246            MOZ_RELEASE_ASSERT(!str->hasCharsInCollectedNurseryRegion());
   1247          }
   1248        }
   1249      }
   1250    }
   1251  }
   1252 }
   1253 #endif
   1254 
   1255 // Return whether an arbitrary pointer is within a cell with the given
   1256 // traceKind. Only for assertions and js::debug::* APIs.
   1257 bool GCRuntime::isPointerWithinTenuredCell(void* ptr, JS::TraceKind traceKind) {
   1258  AutoLockGC lock(this);
   1259  for (auto chunk = allNonEmptyChunks(lock); !chunk.done(); chunk.next()) {
   1260    MOZ_ASSERT(!chunk->isNurseryChunk());
   1261    if (ptr >= &chunk->arenas[0] && ptr < &chunk->arenas[ArenasPerChunk]) {
   1262      auto* arena = reinterpret_cast<Arena*>(uintptr_t(ptr) & ~ArenaMask);
   1263      if (!arena->allocated()) {
   1264        return false;
   1265      }
   1266 
   1267      return traceKind == JS::TraceKind::Null ||
   1268             MapAllocToTraceKind(arena->getAllocKind()) == traceKind;
   1269    }
   1270  }
   1271 
   1272  return false;
   1273 }
   1274 
   1275 bool GCRuntime::isPointerWithinBufferAlloc(void* ptr) {
   1276  for (AllZonesIter zone(this); !zone.done(); zone.next()) {
   1277    if (zone->bufferAllocator.isPointerWithinBuffer(ptr)) {
   1278      return true;
   1279    }
   1280  }
   1281 
   1282  return false;
   1283 }