tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Heap.h (23070B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #ifndef gc_Heap_h
      8 #define gc_Heap_h
      9 
     10 #include "mozilla/DebugOnly.h"
     11 
     12 #include "gc/AllocKind.h"
     13 #include "gc/Memory.h"
     14 #include "gc/Pretenuring.h"
     15 #include "js/HeapAPI.h"
     16 #include "js/TypeDecls.h"
     17 #include "util/Poison.h"
     18 
     19 namespace js {
     20 
     21 class AutoLockGC;
     22 class AutoLockGCBgAlloc;
     23 class Nursery;
     24 
     25 namespace gc {
     26 
     27 class Arena;
     28 class ArenaCellSet;
     29 class ArenaList;
     30 class GCRuntime;
     31 class MarkingValidator;
     32 class SortedArenaList;
     33 class TenuredCell;
     34 
     35 // Cells are aligned to CellAlignShift, so the largest tagged null pointer is:
     36 const uintptr_t LargestTaggedNullCellPointer = (1 << CellAlignShift) - 1;
     37 
     38 static_assert(ArenaSize % CellAlignBytes == 0,
     39              "Arena size must be a multiple of cell alignment");
     40 
     41 /*
     42 * A FreeSpan represents a contiguous sequence of free cells in an Arena. It
     43 * can take two forms.
     44 *
     45 * - In an empty span, |first| and |last| are both zero.
     46 *
     47 * - In a non-empty span, |first| is the address of the first free thing in the
     48 *   span, and |last| is the address of the last free thing in the span.
     49 *   Furthermore, the memory pointed to by |last| holds a FreeSpan structure
     50 *   that points to the next span (which may be empty); this works because
     51 *   sizeof(FreeSpan) is less than the smallest thingSize.
     52 */
     53 class FreeSpan {
     54  friend class Arena;
     55  friend class ArenaCellIter;
     56  friend class ArenaFreeCellIter;
     57 
     58  uint16_t first;
     59  uint16_t last;
     60 
     61 public:
     62  // This inits just |first| and |last|; if the span is non-empty it doesn't
     63  // do anything with the next span stored at |last|.
     64  void initBounds(uintptr_t firstArg, uintptr_t lastArg, const Arena* arena) {
     65    checkRange(firstArg, lastArg, arena);
     66    first = firstArg;
     67    last = lastArg;
     68  }
     69 
     70  void initAsEmpty() {
     71    first = 0;
     72    last = 0;
     73  }
     74 
     75  // This sets |first| and |last|, and also sets the next span stored at
     76  // |last| as empty. (As a result, |firstArg| and |lastArg| cannot represent
     77  // an empty span.)
     78  void initFinal(uintptr_t firstArg, uintptr_t lastArg, const Arena* arena) {
     79    initBounds(firstArg, lastArg, arena);
     80    FreeSpan* last = nextSpanUnchecked(arena);
     81    last->initAsEmpty();
     82    checkSpan(arena);
     83  }
     84 
     85  bool isEmpty() const { return !first; }
     86 
     87  Arena* getArenaUnchecked() { return reinterpret_cast<Arena*>(this); }
     88  inline Arena* getArena();
     89 
     90  static size_t offsetOfFirst() { return offsetof(FreeSpan, first); }
     91 
     92  static size_t offsetOfLast() { return offsetof(FreeSpan, last); }
     93 
     94  // Like nextSpan(), but no checking of the following span is done.
     95  FreeSpan* nextSpanUnchecked(const Arena* arena) const {
     96    MOZ_ASSERT(arena && !isEmpty());
     97    return reinterpret_cast<FreeSpan*>(uintptr_t(arena) + last);
     98  }
     99 
    100  const FreeSpan* nextSpan(const Arena* arena) const {
    101    checkSpan(arena);
    102    return nextSpanUnchecked(arena);
    103  }
    104 
    105  MOZ_ALWAYS_INLINE TenuredCell* allocate(size_t thingSize) {
    106    // Eschew the usual checks, because this might be the placeholder span.
    107    // If this is somehow an invalid, non-empty span, checkSpan() will catch it.
    108    Arena* arena = getArenaUnchecked();
    109    checkSpan(arena);
    110    uintptr_t thing = uintptr_t(arena) + first;
    111    if (first < last) {
    112      // We have space for at least two more things, so do a simple
    113      // bump-allocate.
    114      first += thingSize;
    115    } else if (MOZ_LIKELY(first)) {
    116      // The last space points to the next free span (which may be empty).
    117      const FreeSpan* next = nextSpan(arena);
    118      first = next->first;
    119      last = next->last;
    120    } else {
    121      return nullptr;  // The span is empty.
    122    }
    123    checkSpan(arena);
    124    DebugOnlyPoison(reinterpret_cast<void*>(thing),
    125                    JS_ALLOCATED_TENURED_PATTERN, thingSize,
    126                    MemCheckKind::MakeUndefined);
    127    return reinterpret_cast<TenuredCell*>(thing);
    128  }
    129 
    130  inline void checkSpan(const Arena* arena) const;
    131  inline void checkRange(uintptr_t first, uintptr_t last,
    132                         const Arena* arena) const;
    133 };
    134 
    135 /*
    136 * Arenas are the allocation units of the tenured heap in the GC. An arena
    137 * is 4kiB in size and 4kiB-aligned. It starts with several header fields
    138 * followed by some bytes of padding. The remainder of the arena is filled
    139 * with GC things of a particular AllocKind. The padding ensures that the
    140 * GC thing array ends exactly at the end of the arena:
    141 *
    142 * <----------------------------------------------> = ArenaSize bytes
    143 * +---------------+---------+----+----+-----+----+
    144 * | header fields | padding | T0 | T1 | ... | Tn |
    145 * +---------------+---------+----+----+-----+----+
    146 * <-------------------------> = first thing offset
    147 */
    148 class alignas(ArenaSize) Arena {
    149  static JS_PUBLIC_DATA const uint8_t ThingSizes[];
    150  static JS_PUBLIC_DATA const uint8_t FirstThingOffsets[];
    151  static JS_PUBLIC_DATA const uint8_t ThingsPerArena[];
    152  /*
    153   * The first span of free things in the arena. Most of these spans are
    154   * stored as offsets in free regions of the data array, and most operations
    155   * on FreeSpans take an Arena pointer for safety. However, the FreeSpans
    156   * used for allocation are stored here, at the start of an Arena, and use
    157   * their own address to grab the next span within the same Arena.
    158   */
    159  FreeSpan firstFreeSpan;
    160 
    161  /*
    162   * One of the AllocKind constants or AllocKind::LIMIT when the arena does
    163   * not contain any GC things and is on the list of empty arenas in the GC
    164   * chunk.
    165   */
    166  AllocKind allocKind;
    167 
    168  /*
    169   * The zone that this Arena is contained within, when allocated. The offset
    170   * of this field must match the ArenaZoneOffset stored in js/HeapAPI.h,
    171   * as is statically asserted below.
    172   */
    173  JS::Zone* zone_;
    174 
    175 public:
    176  /*
    177   * Arena::next has two purposes: when unallocated, it points to the next
    178   * available Arena. When allocated, it points to the next Arena in the same
    179   * zone and with the same alloc kind.
    180   */
    181  Arena* next;
    182 
    183 private:
    184  static const size_t ARENA_FLAG_BITS = 4;
    185  static const size_t DELAYED_MARKING_ARENA_BITS =
    186      JS_BITS_PER_WORD - ArenaShift;
    187  static_assert(
    188      ARENA_FLAG_BITS + DELAYED_MARKING_ARENA_BITS <= JS_BITS_PER_WORD,
    189      "Not enough space to pack flags and nextDelayedMarkingArena_ pointer "
    190      "into a single word.");
    191 
    192  /*
    193   * True until the arena is swept for the first time.
    194   */
    195  size_t isNewlyCreated_ : 1;
    196 
    197  /*
    198   * When recursive marking uses too much stack we delay marking of arenas and
    199   * link them into a list for later processing. This uses the following fields.
    200   */
    201  size_t onDelayedMarkingList_ : 1;
    202  size_t hasDelayedBlackMarking_ : 1;
    203  size_t hasDelayedGrayMarking_ : 1;
    204  size_t nextDelayedMarkingArena_ : DELAYED_MARKING_ARENA_BITS;
    205 
    206  union {
    207    /*
    208     * For arenas in zones other than the atoms zone, if non-null, points
    209     * to an ArenaCellSet that represents the set of cells in this arena
    210     * that are in the nursery's store buffer.
    211     */
    212    ArenaCellSet* bufferedCells_;
    213 
    214    /*
    215     * For arenas in the atoms zone, the starting index into zone atom
    216     * marking bitmaps (see AtomMarking.h) of the things in this zone.
    217     * Atoms never refer to nursery things, so no store buffer index is
    218     * needed.
    219     */
    220    size_t atomBitmapStart_;
    221  };
    222 
    223 public:
    224  /*
    225   * The size of data should be |ArenaSize - offsetof(data)|, but the offset
    226   * is not yet known to the compiler, so we do it by hand. |firstFreeSpan|
    227   * takes up 8 bytes on 64-bit due to alignment requirements; the rest are
    228   * obvious. This constant is stored in js/HeapAPI.h.
    229   */
    230  uint8_t data[ArenaSize - ArenaHeaderSize];
    231 
    232  // Create a free arena in uninitialized committed memory.
    233  void init(GCRuntime* gc, JS::Zone* zone, AllocKind kind);
    234 
    235  JS::Zone* zone() const { return zone_; }
    236 
    237  // Sets |firstFreeSpan| to the Arena's entire valid range, and
    238  // also sets the next span stored at |firstFreeSpan.last| as empty.
    239  void setAsFullyUnused() {
    240    AllocKind kind = getAllocKind();
    241    firstFreeSpan.first = firstThingOffset(kind);
    242    firstFreeSpan.last = lastThingOffset(kind);
    243    FreeSpan* last = firstFreeSpan.nextSpanUnchecked(this);
    244    last->initAsEmpty();
    245  }
    246 
    247  // Unregister the associated atom marking bitmap index for an arena in the
    248  // atoms zone.
    249  inline void freeAtomMarkingBitmapIndex(GCRuntime* gc, const AutoLockGC& lock);
    250 
    251  // Return an allocated arena to its unallocated (free) state.
    252  // For arenas in the atoms zone, freeAtomMarkingBitmapIndex() must be called
    253  // first.
    254  inline void release();
    255 
    256  uintptr_t address() const {
    257    checkAddress();
    258    return uintptr_t(this);
    259  }
    260 
    261  inline void checkAddress() const;
    262 
    263  inline ArenaChunk* chunk() const;
    264 
    265  // Return whether this arena is in the 'allocated' state, meaning that it has
    266  // been initialized by calling init() and has a zone and alloc kind set.
    267  // This is mostly used for assertions.
    268  bool allocated() const;
    269 
    270  AllocKind getAllocKind() const {
    271    MOZ_ASSERT(IsValidAllocKind(allocKind));
    272    return allocKind;
    273  }
    274 
    275  FreeSpan* getFirstFreeSpan() { return &firstFreeSpan; }
    276 
    277  static size_t thingSize(AllocKind kind) { return ThingSizes[size_t(kind)]; }
    278  static size_t thingsPerArena(AllocKind kind) {
    279    return ThingsPerArena[size_t(kind)];
    280  }
    281  static size_t thingsSpan(AllocKind kind) {
    282    return thingsPerArena(kind) * thingSize(kind);
    283  }
    284 
    285  static size_t firstThingOffset(AllocKind kind) {
    286    return FirstThingOffsets[size_t(kind)];
    287  }
    288  static size_t lastThingOffset(AllocKind kind) {
    289    return ArenaSize - thingSize(kind);
    290  }
    291 
    292  size_t getThingSize() const { return thingSize(getAllocKind()); }
    293  size_t getThingsPerArena() const { return thingsPerArena(getAllocKind()); }
    294  size_t getThingsSpan() const { return getThingsPerArena() * getThingSize(); }
    295  size_t getFirstThingOffset() const {
    296    return firstThingOffset(getAllocKind());
    297  }
    298 
    299  uintptr_t thingsStart() const { return address() + getFirstThingOffset(); }
    300  uintptr_t thingsEnd() const { return address() + ArenaSize; }
    301 
    302  bool isEmpty() const {
    303    // Arena is empty if its first span covers the whole arena.
    304    firstFreeSpan.checkSpan(this);
    305    AllocKind kind = getAllocKind();
    306    return firstFreeSpan.first == firstThingOffset(kind) &&
    307           firstFreeSpan.last == lastThingOffset(kind);
    308  }
    309 
    310  bool isFull() const { return firstFreeSpan.isEmpty(); }
    311  bool hasFreeThings() const { return !isFull(); }
    312 
    313  size_t numFreeThings(size_t thingSize) const {
    314    firstFreeSpan.checkSpan(this);
    315    size_t numFree = 0;
    316    const FreeSpan* span = &firstFreeSpan;
    317    for (; !span->isEmpty(); span = span->nextSpan(this)) {
    318      numFree += (span->last - span->first) / thingSize + 1;
    319    }
    320    return numFree;
    321  }
    322 
    323  size_t countFreeCells() { return numFreeThings(getThingSize()); }
    324  size_t countUsedCells() { return getThingsPerArena() - countFreeCells(); }
    325 
    326 #ifdef DEBUG
    327  bool inFreeList(uintptr_t thing) {
    328    uintptr_t base = address();
    329    const FreeSpan* span = &firstFreeSpan;
    330    for (; !span->isEmpty(); span = span->nextSpan(this)) {
    331      // If the thing comes before the current span, it's not free.
    332      if (thing < base + span->first) {
    333        return false;
    334      }
    335 
    336      // If we find it before the end of the span, it's free.
    337      if (thing <= base + span->last) {
    338        return true;
    339      }
    340    }
    341    return false;
    342  }
    343 #endif
    344 
    345  static bool isAligned(uintptr_t thing, size_t thingSize) {
    346    /* Things ends at the arena end. */
    347    uintptr_t tailOffset = ArenaSize - (thing & ArenaMask);
    348    return tailOffset % thingSize == 0;
    349  }
    350 
    351  bool isNewlyCreated() const { return isNewlyCreated_; }
    352 
    353  bool onDelayedMarkingList() const { return onDelayedMarkingList_; }
    354 
    355  Arena* getNextDelayedMarking() const {
    356    MOZ_ASSERT(onDelayedMarkingList_);
    357    return reinterpret_cast<Arena*>(nextDelayedMarkingArena_ << ArenaShift);
    358  }
    359 
    360  void setNextDelayedMarkingArena(Arena* arena) {
    361    MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
    362    MOZ_ASSERT(!onDelayedMarkingList_);
    363    MOZ_ASSERT(!hasDelayedBlackMarking_);
    364    MOZ_ASSERT(!hasDelayedGrayMarking_);
    365    MOZ_ASSERT(!nextDelayedMarkingArena_);
    366    onDelayedMarkingList_ = 1;
    367    if (arena) {
    368      nextDelayedMarkingArena_ = arena->address() >> ArenaShift;
    369    }
    370  }
    371 
    372  void updateNextDelayedMarkingArena(Arena* arena) {
    373    MOZ_ASSERT(!(uintptr_t(arena) & ArenaMask));
    374    MOZ_ASSERT(onDelayedMarkingList_);
    375    nextDelayedMarkingArena_ = arena ? arena->address() >> ArenaShift : 0;
    376  }
    377 
    378  bool hasDelayedMarking(MarkColor color) const {
    379    MOZ_ASSERT(onDelayedMarkingList_);
    380    return color == MarkColor::Black ? hasDelayedBlackMarking_
    381                                     : hasDelayedGrayMarking_;
    382  }
    383 
    384  bool hasAnyDelayedMarking() const {
    385    MOZ_ASSERT(onDelayedMarkingList_);
    386    return hasDelayedBlackMarking_ || hasDelayedGrayMarking_;
    387  }
    388 
    389  void setHasDelayedMarking(MarkColor color, bool value) {
    390    MOZ_ASSERT(onDelayedMarkingList_);
    391    if (color == MarkColor::Black) {
    392      hasDelayedBlackMarking_ = value;
    393    } else {
    394      hasDelayedGrayMarking_ = value;
    395    }
    396  }
    397 
    398  void clearDelayedMarkingState() {
    399    MOZ_ASSERT(onDelayedMarkingList_);
    400    onDelayedMarkingList_ = 0;
    401    hasDelayedBlackMarking_ = 0;
    402    hasDelayedGrayMarking_ = 0;
    403    nextDelayedMarkingArena_ = 0;
    404  }
    405 
    406  inline ArenaCellSet*& bufferedCells();
    407  inline size_t& atomBitmapStart();
    408 
    409  template <typename T, FinalizeKind finalizeKind>
    410  size_t finalize(JS::GCContext* gcx, AllocKind thingKind, size_t thingSize);
    411 
    412  static void staticAsserts();
    413  static void checkLookupTables();
    414 
    415  void unmarkAll();
    416  void unmarkPreMarkedFreeCells();
    417 
    418  void arenaAllocatedDuringGC();
    419 
    420 #ifdef DEBUG
    421  void checkNoMarkedFreeCells();
    422  void checkAllCellsMarkedBlack();
    423 #endif
    424 
    425 #if defined(DEBUG) || defined(JS_GC_ZEAL)
    426  void checkNoMarkedCells();
    427 #endif
    428 };
    429 
    430 inline Arena* FreeSpan::getArena() {
    431  Arena* arena = getArenaUnchecked();
    432  arena->checkAddress();
    433  return arena;
    434 }
    435 
    436 inline void FreeSpan::checkSpan(const Arena* arena) const {
    437 #ifdef DEBUG
    438  if (!first) {
    439    MOZ_ASSERT(!first && !last);
    440    return;
    441  }
    442 
    443  arena->checkAddress();
    444  checkRange(first, last, arena);
    445 
    446  // If there's a following span, it must have a higher address,
    447  // and the gap must be at least 2 * thingSize.
    448  const FreeSpan* next = nextSpanUnchecked(arena);
    449  if (next->first) {
    450    checkRange(next->first, next->last, arena);
    451    size_t thingSize = arena->getThingSize();
    452    MOZ_ASSERT(last + 2 * thingSize <= next->first);
    453  }
    454 #endif
    455 }
    456 
    457 inline void FreeSpan::checkRange(uintptr_t first, uintptr_t last,
    458                                 const Arena* arena) const {
    459 #ifdef DEBUG
    460  MOZ_ASSERT(arena);
    461  MOZ_ASSERT(first <= last);
    462  AllocKind thingKind = arena->getAllocKind();
    463  MOZ_ASSERT(first >= Arena::firstThingOffset(thingKind));
    464  MOZ_ASSERT(last <= Arena::lastThingOffset(thingKind));
    465  MOZ_ASSERT((last - first) % Arena::thingSize(thingKind) == 0);
    466 #endif
    467 }
    468 
    469 /*
    470 * A chunk in the tenured heap. ArenaChunks contain arenas and associated data
    471 * structures (mark bitmap, delayed marking state).
    472 */
    473 class ArenaChunk : public ArenaChunkBase {
    474  Arena arenas[ArenasPerChunk];
    475 
    476  friend class GCRuntime;
    477  friend class MarkingValidator;
    478 
    479 public:
    480  static ArenaChunk* fromAddress(uintptr_t addr) {
    481    addr &= ~ChunkMask;
    482    return reinterpret_cast<ArenaChunk*>(addr);
    483  }
    484 
    485  static bool withinValidRange(uintptr_t addr) {
    486    uintptr_t offset = addr & ChunkMask;
    487    if (ArenaChunk::fromAddress(addr)->isNurseryChunk()) {
    488      return offset >= sizeof(ChunkBase) && offset < ChunkSize;
    489    }
    490    return offset >= offsetof(ArenaChunk, arenas) && offset < ChunkSize;
    491  }
    492 
    493  static size_t arenaIndex(const Arena* arena) {
    494    uintptr_t addr = arena->address();
    495    MOZ_ASSERT(!ArenaChunk::fromAddress(addr)->isNurseryChunk());
    496    MOZ_ASSERT(withinValidRange(addr));
    497    uintptr_t offset = addr & ChunkMask;
    498    return (offset - offsetof(ArenaChunk, arenas)) >> ArenaShift;
    499  }
    500 
    501  static size_t pageIndex(const Arena* arena) {
    502    return arenaToPageIndex(arenaIndex(arena));
    503  }
    504 
    505  static size_t arenaToPageIndex(size_t arenaIndex) {
    506    static_assert((offsetof(ArenaChunk, arenas) % PageSize) == 0,
    507                  "First arena should be on a page boundary");
    508    return arenaIndex / ArenasPerPage;
    509  }
    510 
    511  static size_t pageToArenaIndex(size_t pageIndex) {
    512    return pageIndex * ArenasPerPage;
    513  }
    514 
    515  explicit ArenaChunk(JSRuntime* runtime) : ArenaChunkBase(runtime) {}
    516 
    517  uintptr_t address() const {
    518    uintptr_t addr = reinterpret_cast<uintptr_t>(this);
    519    MOZ_ASSERT(!(addr & ChunkMask));
    520    return addr;
    521  }
    522 
    523  bool isEmpty() const { return info.numArenasFree == ArenasPerChunk; }
    524 
    525  bool hasAvailableArenas() const { return !isFull(); }
    526  bool isFull() const { return info.numArenasFree == 0; }
    527 
    528  bool isNurseryChunk() const { return storeBuffer; }
    529 
    530  Arena* allocateArena(GCRuntime* gc, JS::Zone* zone, AllocKind kind);
    531 
    532  void releaseArena(GCRuntime* gc, Arena* arena, const AutoLockGC& lock);
    533 
    534  void decommitFreeArenas(GCRuntime* gc, const bool& cancel, AutoLockGC& lock);
    535  [[nodiscard]] bool decommitOneFreePage(GCRuntime* gc, size_t pageIndex,
    536                                         const AutoLockGC& lock);
    537  void decommitAllArenas();
    538 
    539  // This will decommit each unused not-already decommitted arena. It performs a
    540  // system call for each arena but is only used during OOM.
    541  void decommitFreeArenasWithoutUnlocking(const AutoLockGC& lock);
    542 
    543  static void* allocate(GCRuntime* gc, StallAndRetry stallAndRetry);
    544  static ArenaChunk* init(void* ptr, GCRuntime* gc, bool allMemoryCommitted);
    545 
    546  /* Unlink and return the freeArenasHead. */
    547  Arena* fetchNextFreeArena(GCRuntime* gc);
    548 
    549  // Merge arenas freed by background sweeping into the main free arenas bitmap.
    550  void mergePendingFreeArenas(GCRuntime* gc, const AutoLockGC& lock);
    551 
    552 #ifdef DEBUG
    553  void verify() const;
    554 #else
    555  void verify() const {}
    556 #endif
    557 
    558 private:
    559  void commitOnePage(GCRuntime* gc);
    560 
    561  void updateFreeCountsAfterAlloc(GCRuntime* gc, size_t numArenasAlloced,
    562                                  const AutoLockGC& lock);
    563  void updateFreeCountsAfterFree(GCRuntime* gc, size_t numArenasFreed,
    564                                 bool wasCommitted, const AutoLockGC& lock);
    565 
    566  // Like updateFreeCountsAfterFree, but operates on the GCRuntime's current
    567  // chunk. Does not take the lock unless the chunk is full or if we need to
    568  // move the chunk between pools.
    569  void updateCurrentChunkAfterAlloc(GCRuntime* gc);
    570 
    571  // Check if all arenas in a page are free.
    572  bool canDecommitPage(size_t pageIndex) const;
    573 
    574  // Check the arena from freeArenasList is located in a free page.
    575  // Unlike the isPageFree(size_t) version, this isPageFree(Arena*) will see the
    576  // following arenas from the freeArenasHead are also located in the same page,
    577  // to prevent not to access the arenas mprotect'ed during compaction in debug
    578  // build.
    579  bool isPageFree(const Arena* arena) const;
    580 
    581  void* pageAddress(size_t pageIndex) {
    582    return &arenas[pageToArenaIndex(pageIndex)];
    583  }
    584 };
    585 
    586 inline void Arena::checkAddress() const {
    587  mozilla::DebugOnly<uintptr_t> addr = uintptr_t(this);
    588  MOZ_ASSERT(addr);
    589  MOZ_ASSERT(!(addr & ArenaMask));
    590  MOZ_ASSERT(ArenaChunk::withinValidRange(addr));
    591 }
    592 
    593 inline ArenaChunk* Arena::chunk() const {
    594  return ArenaChunk::fromAddress(address());
    595 }
    596 
    597 // Cell header stored before all nursery cells.
    598 struct alignas(gc::CellAlignBytes) NurseryCellHeader {
    599  // Store zone pointer with the trace kind in the lowest three bits.
    600  const uintptr_t allocSiteAndTraceKind;
    601 
    602  // We only need to store a subset of trace kinds so this doesn't cover the
    603  // full range.
    604  static const uintptr_t TraceKindMask = 3;
    605 
    606  static uintptr_t MakeValue(AllocSite* const site, JS::TraceKind kind) {
    607    MOZ_ASSERT(uintptr_t(kind) <= TraceKindMask);
    608    MOZ_ASSERT((uintptr_t(site) & TraceKindMask) == 0);
    609    return uintptr_t(site) | uintptr_t(kind);
    610  }
    611 
    612  inline NurseryCellHeader(AllocSite* site, JS::TraceKind kind)
    613      : allocSiteAndTraceKind(MakeValue(site, kind)) {}
    614 
    615  AllocSite* allocSite() const {
    616    return reinterpret_cast<AllocSite*>(allocSiteAndTraceKind & ~TraceKindMask);
    617  }
    618 
    619  JS::Zone* zone() const { return allocSite()->zone(); }
    620 
    621  JS::TraceKind traceKind() const {
    622    return JS::TraceKind(allocSiteAndTraceKind & TraceKindMask);
    623  }
    624 
    625  static const NurseryCellHeader* from(const Cell* cell) {
    626    MOZ_ASSERT(IsInsideNursery(cell));
    627    return reinterpret_cast<const NurseryCellHeader*>(
    628        uintptr_t(cell) - sizeof(NurseryCellHeader));
    629  }
    630 };
    631 
    632 static_assert(uintptr_t(JS::TraceKind::Object) <=
    633              NurseryCellHeader::TraceKindMask);
    634 static_assert(uintptr_t(JS::TraceKind::String) <=
    635              NurseryCellHeader::TraceKindMask);
    636 static_assert(uintptr_t(JS::TraceKind::BigInt) <=
    637              NurseryCellHeader::TraceKindMask);
    638 static_assert(uintptr_t(JS::TraceKind::GetterSetter) <=
    639              NurseryCellHeader::TraceKindMask);
    640 
    641 } /* namespace gc */
    642 
    643 namespace debug {
    644 
    645 // Utility functions meant to be called from an interactive debugger.
    646 enum class MarkInfo : int {
    647  BLACK = 0,
    648  GRAY = 1,
    649  UNMARKED = -1,
    650  NURSERY_FROMSPACE = -2,
    651  NURSERY_TOSPACE = -3,  // Unused if semispace disabled.
    652  UNKNOWN = -4,
    653  BUFFER = -5,
    654 };
    655 
    656 // For calling from gdb only: given a pointer that is either in the nursery
    657 // (possibly pointing to a buffer, not necessarily a Cell) or a tenured Cell,
    658 // return its mark color or UNMARKED if it is tenured, otherwise the region of
    659 // memory that contains it. UNKNOWN is only for non-Cell pointers, and means it
    660 // is not in the nursery (so could be malloced or stack or whatever.)
    661 MOZ_NEVER_INLINE MarkInfo GetMarkInfo(void* vp);
    662 
    663 // Sample usage from gdb:
    664 //
    665 //   (gdb) p $word = js::debug::GetMarkWordAddress(obj)
    666 //   $1 = (uintptr_t *) 0x7fa56d5fe360
    667 //   (gdb) p/x $mask = js::debug::GetMarkMask(obj, js::gc::GRAY)
    668 //   $2 = 0x200000000
    669 //   (gdb) watch *$word
    670 //   Hardware watchpoint 7: *$word
    671 //   (gdb) cond 7 *$word & $mask
    672 //   (gdb) cont
    673 //
    674 // Note that this is *not* a watchpoint on a single bit. It is a watchpoint on
    675 // the whole word, which will trigger whenever the word changes and the
    676 // selected bit is set after the change.
    677 //
    678 // So if the bit changing is the desired one, this is exactly what you want.
    679 // But if a different bit changes (either set or cleared), you may still stop
    680 // execution if the $mask bit happened to already be set. gdb does not expose
    681 // enough information to restrict the watchpoint to just a single bit.
    682 
    683 // Return the address of the word containing the mark bits for the given cell,
    684 // or nullptr if the cell is in the nursery.
    685 MOZ_NEVER_INLINE uintptr_t* GetMarkWordAddress(js::gc::Cell* cell);
    686 
    687 // Return the mask for the given cell and color bit, or 0 if the cell is in the
    688 // nursery.
    689 MOZ_NEVER_INLINE uintptr_t GetMarkMask(js::gc::Cell* cell, uint32_t colorBit);
    690 
    691 } /* namespace debug */
    692 } /* namespace js */
    693 
    694 #endif /* gc_Heap_h */