tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

HeapAPI.h (32583B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #ifndef js_HeapAPI_h
      8 #define js_HeapAPI_h
      9 
     10 #include "mozilla/Atomics.h"
     11 #include "mozilla/BitSet.h"
     12 
     13 #include <limits.h>
     14 #include <type_traits>
     15 
     16 #include "js/AllocPolicy.h"
     17 #include "js/GCAnnotations.h"
     18 #include "js/HashTable.h"
     19 #include "js/shadow/String.h"  // JS::shadow::String
     20 #include "js/shadow/Symbol.h"  // JS::shadow::Symbol
     21 #include "js/shadow/Zone.h"    // JS::shadow::Zone
     22 #include "js/TraceKind.h"
     23 #include "js/TypeDecls.h"
     24 
     25 /* These values are private to the JS engine. */
     26 namespace js {
     27 
     28 JS_PUBLIC_API bool CurrentThreadCanAccessZone(JS::Zone* zone);
     29 
     30 // To prevent false sharing, some data structures are aligned to a typical cache
     31 // line size.
     32 static constexpr size_t TypicalCacheLineSize = 64;
     33 
     34 namespace gc {
     35 
     36 class Arena;
     37 struct Cell;
     38 class ArenaChunk;
     39 class StoreBuffer;
     40 class TenuredCell;
     41 
     42 const size_t ArenaShift = 12;
     43 const size_t ArenaSize = size_t(1) << ArenaShift;
     44 const size_t ArenaMask = ArenaSize - 1;
     45 
     46 #if defined(XP_DARWIN) && defined(__aarch64__)
     47 const size_t PageShift = 14;
     48 #else
     49 const size_t PageShift = 12;
     50 #endif
     51 // Expected page size, so we could initialze ArenasPerPage at compile-time.
     52 // The actual system page size should be queried by SystemPageSize().
     53 const size_t PageSize = size_t(1) << PageShift;
     54 const size_t PageMask = PageSize - 1;
     55 constexpr size_t ArenasPerPage = PageSize / ArenaSize;
     56 
     57 const size_t ChunkShift = 20;
     58 const size_t ChunkSize = size_t(1) << ChunkShift;
     59 const size_t ChunkMask = ChunkSize - 1;
     60 
     61 const size_t CellAlignShift = 3;
     62 const size_t CellAlignBytes = size_t(1) << CellAlignShift;
     63 const size_t CellAlignMask = CellAlignBytes - 1;
     64 
     65 const size_t CellBytesPerMarkBit = CellAlignBytes;
     66 const size_t MarkBitsPerCell = 2;
     67 
     68 /*
     69 * The minimum cell size ends up as twice the cell alignment because the mark
     70 * bitmap contains one bit per CellBytesPerMarkBit bytes (which is equal to
     71 * CellAlignBytes) and we need two mark bits per cell.
     72 */
     73 const size_t MinCellSize = CellBytesPerMarkBit * MarkBitsPerCell;
     74 
     75 /*
     76 * The mark bitmap has one bit per each possible cell start position. This
     77 * wastes some space for larger GC things but allows us to avoid division by the
     78 * cell's size when accessing the bitmap.
     79 */
     80 const size_t ArenaBitmapBits = ArenaSize / CellBytesPerMarkBit;
     81 const size_t ArenaBitmapBytes = HowMany(ArenaBitmapBits, 8);
     82 const size_t ArenaBitmapWords = HowMany(ArenaBitmapBits, JS_BITS_PER_WORD);
     83 
     84 enum class ChunkKind : uint8_t {
     85  Invalid = 0,
     86  TenuredArenas,
     87  Buffers,
     88  NurseryToSpace,
     89  NurseryFromSpace
     90 };
     91 
     92 // The base class for all GC chunks, either in the nursery or in the tenured
     93 // heap memory. This structure is locatable from any GC pointer by aligning to
     94 // the chunk size.
     95 class ChunkBase {
     96 protected:
     97  // Initialize a tenured heap chunk.
     98  explicit ChunkBase(JSRuntime* rt) {
     99    MOZ_ASSERT((uintptr_t(this) & ChunkMask) == 0);
    100    initBaseForArenaChunk(rt);
    101  }
    102 
    103  void initBaseForArenaChunk(JSRuntime* rt) {
    104    runtime = rt;
    105    storeBuffer = nullptr;
    106    kind = ChunkKind::TenuredArenas;
    107    nurseryChunkIndex = UINT8_MAX;
    108  }
    109 
    110  // Initialize a nursery chunk.
    111  ChunkBase(JSRuntime* rt, StoreBuffer* sb, ChunkKind kind, uint8_t chunkIndex)
    112      : storeBuffer(sb),
    113        runtime(rt),
    114        kind(kind),
    115        nurseryChunkIndex(chunkIndex) {
    116    MOZ_ASSERT(isNurseryChunk());
    117    MOZ_ASSERT((uintptr_t(this) & ChunkMask) == 0);
    118    MOZ_ASSERT(storeBuffer);
    119  }
    120 
    121  ChunkBase(JSRuntime* rt, ChunkKind kind)
    122      : storeBuffer(nullptr),
    123        runtime(rt),
    124        kind(kind),
    125        nurseryChunkIndex(UINT8_MAX) {}
    126 
    127 public:
    128  ChunkKind getKind() const {
    129    MOZ_ASSERT_IF(storeBuffer, isNurseryChunk());
    130    MOZ_ASSERT_IF(!storeBuffer, isTenuredChunk());
    131    return kind;
    132  }
    133 
    134  bool isNurseryChunk() const {
    135    return kind == ChunkKind::NurseryToSpace ||
    136           kind == ChunkKind::NurseryFromSpace;
    137  }
    138 
    139  bool isTenuredChunk() const {
    140    return kind == ChunkKind::TenuredArenas || kind == ChunkKind::Buffers;
    141  }
    142 
    143  // The store buffer for pointers from tenured things to things in this
    144  // chunk. Will be non-null if and only if this is a nursery chunk.
    145  StoreBuffer* storeBuffer;
    146 
    147  // Provide quick access to the runtime from absolutely anywhere.
    148  JSRuntime* runtime;
    149 
    150  ChunkKind kind;
    151 
    152  uint8_t nurseryChunkIndex;
    153 };
    154 
    155 // Information about tenured heap chunks containing arenas.
    156 struct ArenaChunkInfo {
    157 private:
    158  friend class ChunkPool;
    159  ArenaChunk* next = nullptr;
    160  ArenaChunk* prev = nullptr;
    161 
    162 public:
    163  /* Number of free arenas, either committed or decommitted. */
    164  uint32_t numArenasFree;
    165 
    166  /* Number of free, committed arenas. */
    167  uint32_t numArenasFreeCommitted;
    168 
    169  /* Whether this chunk is the chunk currently being allocated from. */
    170  bool isCurrentChunk = false;
    171 };
    172 
    173 /*
    174 * Calculating ArenasPerChunk:
    175 *
    176 * To figure out how many Arenas will fit in a chunk we need to know how much
    177 * extra space is available after we allocate the header data. This is a problem
    178 * because the header size depends on the number of arenas in the chunk.
    179 *
    180 * The dependent fields are markBits, decommittedPages and
    181 * freeCommittedArenas. markBits needs ArenaBitmapBytes bytes per arena,
    182 * decommittedPages needs one bit per page and freeCommittedArenas needs one
    183 * bit per arena.
    184 *
    185 * We can calculate an approximate value by dividing the number of bits of free
    186 * space in the chunk by the number of bits needed per arena. This is an
    187 * approximation because it doesn't take account of the fact that the variable
    188 * sized fields must be rounded up to a whole number of words, or any padding
    189 * the compiler adds between fields.
    190 *
    191 * Fortunately, for the chunk and arena size parameters we use this
    192 * approximation turns out to be correct. If it were not we might need to adjust
    193 * the arena count down by one to allow more space for the padding.
    194 */
    195 const size_t BitsPerPageWithHeaders =
    196    (ArenaSize + ArenaBitmapBytes) * ArenasPerPage * CHAR_BIT + ArenasPerPage +
    197    1;
    198 const size_t ChunkBitsAvailable =
    199    (ChunkSize - sizeof(ChunkBase) - sizeof(ArenaChunkInfo)) * CHAR_BIT;
    200 const size_t PagesPerChunk = ChunkBitsAvailable / BitsPerPageWithHeaders;
    201 const size_t ArenasPerChunk = PagesPerChunk * ArenasPerPage;
    202 const size_t FreeCommittedBits = ArenasPerChunk;
    203 const size_t DecommitBits = PagesPerChunk;
    204 const size_t BitsPerArenaWithHeaders =
    205    (ArenaSize + ArenaBitmapBytes) * CHAR_BIT +
    206    (DecommitBits / ArenasPerChunk) + 1;
    207 
    208 const size_t CalculatedChunkSizeRequired =
    209    sizeof(ChunkBase) + sizeof(ArenaChunkInfo) +
    210    RoundUp(ArenasPerChunk * ArenaBitmapBytes, sizeof(uintptr_t)) +
    211    RoundUp(FreeCommittedBits, sizeof(uint32_t) * CHAR_BIT) / CHAR_BIT +
    212    RoundUp(DecommitBits, sizeof(uint32_t) * CHAR_BIT) / CHAR_BIT +
    213    ArenasPerChunk * ArenaSize;
    214 static_assert(CalculatedChunkSizeRequired <= ChunkSize,
    215              "Calculated ArenasPerChunk is too large");
    216 
    217 const size_t CalculatedChunkPadSize = ChunkSize - CalculatedChunkSizeRequired;
    218 static_assert(CalculatedChunkPadSize * CHAR_BIT < BitsPerArenaWithHeaders,
    219              "Calculated ArenasPerChunk is too small");
    220 
    221 static_assert(ArenasPerChunk == 252,
    222              "Do not accidentally change our heap's density.");
    223 
    224 const size_t FirstArenaOffset = ChunkSize - ArenasPerChunk * ArenaSize;
    225 
    226 using AtomicBitmapWord = mozilla::Atomic<uintptr_t, mozilla::Relaxed>;
    227 
    228 // A bitmap backed by atomic storage.
    229 template <size_t N>
    230 class AtomicBitmap {
    231 public:
    232  static constexpr size_t BitCount = N;
    233 
    234  using Word = AtomicBitmapWord;
    235  static constexpr size_t BitsPerWord = sizeof(Word) * CHAR_BIT;
    236 
    237  static_assert(N % BitsPerWord == 0);
    238  static constexpr size_t WordCount = N / BitsPerWord;
    239 
    240 private:
    241  Word bitmap[WordCount];
    242 
    243  static uintptr_t BitMask(size_t bit) {
    244    MOZ_ASSERT(bit < N);
    245    return uintptr_t(1) << (bit % BitsPerWord);
    246  }
    247 
    248 public:
    249  bool getBit(size_t bit) const {
    250    return getWord(bit / BitsPerWord) & BitMask(bit);
    251  }
    252 
    253  void setBit(size_t bit, bool value) {
    254    Word& word = wordRef(bit / BitsPerWord);
    255    if (value) {
    256      word |= BitMask(bit);
    257    } else {
    258      word &= ~BitMask(bit);
    259    }
    260  }
    261 
    262  uintptr_t getWord(size_t index) const {
    263    MOZ_ASSERT(index < WordCount);
    264    return bitmap[index];
    265  }
    266  Word& wordRef(size_t index) {
    267    MOZ_ASSERT(index < WordCount);
    268    return bitmap[index];
    269  }
    270 
    271  inline bool isEmpty() const;
    272  inline void clear();
    273  inline void copyFrom(const AtomicBitmap& other);
    274 
    275  class Iter;
    276 };
    277 
    278 /*
    279 * Live objects are marked black or gray. Everything reachable from a JS root is
    280 * marked black. Objects marked gray are eligible for cycle collection.
    281 *
    282 *    BlackBit:     GrayOrBlackBit:  Color:
    283 *       0               0           white
    284 *       0               1           gray
    285 *       1               0           black
    286 *       1               1           black
    287 */
    288 enum class ColorBit : uint32_t { BlackBit = 0, GrayOrBlackBit = 1 };
    289 
    290 // Mark colors. Order is important here: the greater value the 'more marked' a
    291 // cell is.
    292 enum class MarkColor : uint8_t { Gray = 1, Black = 2 };
    293 
    294 static constexpr size_t ChunkMarkBitCount =
    295    (ChunkSize - FirstArenaOffset) / CellBytesPerMarkBit;
    296 
    297 // Mark bitmap for a tenured heap chunk.
    298 //
    299 // Mark bitmaps are atomic because they can be written by gray unmarking on the
    300 // main thread while read by sweeping on a background thread. The former does
    301 // not affect the result of the latter.
    302 class alignas(TypicalCacheLineSize) ChunkMarkBitmap
    303    : protected AtomicBitmap<ChunkMarkBitCount> {
    304  using Bitmap = AtomicBitmap<ChunkMarkBitCount>;
    305 
    306 public:
    307  using Bitmap::BitsPerWord;
    308  using Bitmap::WordCount;
    309 
    310  static constexpr size_t FirstThingAdjustmentBits =
    311      FirstArenaOffset / CellBytesPerMarkBit;
    312  static_assert(FirstThingAdjustmentBits % BitsPerWord == 0);
    313  static constexpr size_t FirstThingAdjustmentWords =
    314      FirstThingAdjustmentBits / BitsPerWord;
    315 
    316  MOZ_ALWAYS_INLINE void getMarkWordAndMask(const void* cell, ColorBit colorBit,
    317                                            Word** wordp, uintptr_t* maskp) {
    318    // Note: the JIT inlines this code. Update MacroAssembler::loadMarkBits and
    319    // its callers when making changes here!
    320 
    321    MOZ_ASSERT(size_t(colorBit) < MarkBitsPerCell);
    322 
    323    size_t offset = uintptr_t(cell) & ChunkMask;
    324    MOZ_ASSERT(offset >= FirstArenaOffset);
    325 
    326    const size_t bit = offset / CellBytesPerMarkBit + size_t(colorBit);
    327    size_t word = bit / BitsPerWord - FirstThingAdjustmentWords;
    328    MOZ_ASSERT(word < WordCount);
    329    *wordp = &wordRef(word);
    330    *maskp = uintptr_t(1) << (bit % BitsPerWord);
    331  }
    332 
    333  // The following are not exported and are defined in gc/Heap.h:
    334  MOZ_ALWAYS_INLINE bool markBit(const void* cell, ColorBit colorBit) {
    335    Word* word;
    336    uintptr_t mask;
    337    getMarkWordAndMask(cell, colorBit, &word, &mask);
    338    return *word & mask;
    339  }
    340 
    341  MOZ_ALWAYS_INLINE bool isMarkedAny(const void* cell) {
    342    return markBit(cell, ColorBit::BlackBit) ||
    343           markBit(cell, ColorBit::GrayOrBlackBit);
    344  }
    345 
    346  MOZ_ALWAYS_INLINE bool isMarkedBlack(const void* cell) {
    347    // Return true if BlackBit is set.
    348    return markBit(cell, ColorBit::BlackBit);
    349  }
    350 
    351  MOZ_ALWAYS_INLINE bool isMarkedGray(const void* cell) {
    352    // Return true if GrayOrBlackBit is set and BlackBit is not set.
    353    return !markBit(cell, ColorBit::BlackBit) &&
    354           markBit(cell, ColorBit::GrayOrBlackBit);
    355  }
    356 
    357  inline bool markIfUnmarked(const void* cell, MarkColor color);
    358  inline bool markIfUnmarkedThreadSafe(const void* cell, MarkColor color);
    359  inline void markBlack(const void* cell);
    360  inline void markBlackAtomic(const void* cell);
    361  inline void copyMarkBit(TenuredCell* dst, const TenuredCell* src,
    362                          ColorBit colorBit);
    363  inline void unmark(const void* cell);
    364  inline void unmarkOneBit(const void* cell, ColorBit colorBit);
    365  inline AtomicBitmapWord* arenaBits(Arena* arena);
    366 
    367  inline void copyFrom(const ChunkMarkBitmap& other);
    368  using Bitmap::clear;
    369 };
    370 
    371 // Bitmap with one bit per page used for decommitted page set.
    372 using ChunkPageBitmap = mozilla::BitSet<PagesPerChunk, uint32_t>;
    373 
    374 // Bitmap with one bit per arena used for free committed arena set.
    375 using ChunkArenaBitmap = mozilla::BitSet<ArenasPerChunk, uint32_t>;
    376 
    377 // Base class for a tenured heap chunk containing fixed size arenas.
    378 class ArenaChunkBase : public ChunkBase {
    379 public:
    380  ArenaChunkInfo info;
    381  ChunkMarkBitmap markBits;
    382  ChunkArenaBitmap freeCommittedArenas;
    383  ChunkPageBitmap decommittedPages;
    384 
    385 protected:
    386  explicit ArenaChunkBase(JSRuntime* runtime) : ChunkBase(runtime) {
    387    static_assert(sizeof(markBits) == ArenaBitmapBytes * ArenasPerChunk,
    388                  "Ensure our MarkBitmap actually covers all arenas.");
    389    info.numArenasFree = ArenasPerChunk;
    390  }
    391 
    392  void initAsCommitted();
    393  void initAsDecommitted();
    394 };
    395 static_assert(FirstArenaOffset ==
    396              RoundUp(sizeof(gc::ArenaChunkBase), ArenaSize));
    397 
    398 /*
    399 * We sometimes use an index to refer to a cell in an arena. The index for a
    400 * cell is found by dividing by the cell alignment so not all indices refer to
    401 * valid cells.
    402 */
    403 const size_t ArenaCellIndexBytes = CellAlignBytes;
    404 const size_t MaxArenaCellIndex = ArenaSize / CellAlignBytes;
    405 
    406 const size_t ChunkStoreBufferOffset = offsetof(ChunkBase, storeBuffer);
    407 const size_t ChunkMarkBitmapOffset = offsetof(ArenaChunkBase, markBits);
    408 
    409 // Hardcoded offsets into Arena class.
    410 const size_t ArenaZoneOffset = 2 * sizeof(uint32_t);
    411 const size_t ArenaHeaderSize = ArenaZoneOffset + 2 * sizeof(uintptr_t) +
    412                               sizeof(size_t) + sizeof(uintptr_t);
    413 
    414 // The first word of a GC thing has certain requirements from the GC and is used
    415 // to store flags in the low bits.
    416 const size_t CellFlagBitsReservedForGC = 3;
    417 
    418 // The first word can be used to store JSClass pointers for some thing kinds, so
    419 // these must be suitably aligned.
    420 const size_t JSClassAlignBytes = size_t(1) << CellFlagBitsReservedForGC;
    421 
    422 #ifdef JS_DEBUG
    423 /* When downcasting, ensure we are actually the right type. */
    424 extern JS_PUBLIC_API void AssertGCThingHasType(js::gc::Cell* cell,
    425                                               JS::TraceKind kind);
    426 #else
    427 inline void AssertGCThingHasType(js::gc::Cell* cell, JS::TraceKind kind) {}
    428 #endif
    429 
    430 MOZ_ALWAYS_INLINE bool IsInsideNursery(const js::gc::Cell* cell);
    431 MOZ_ALWAYS_INLINE bool IsInsideNursery(const js::gc::TenuredCell* cell);
    432 
    433 } /* namespace gc */
    434 } /* namespace js */
    435 
    436 namespace JS {
    437 
    438 enum class HeapState {
    439  Idle,             // doing nothing with the GC heap
    440  Tracing,          // tracing the GC heap without collecting, e.g.
    441                    // IterateCompartments()
    442  MajorCollecting,  // doing a GC of the major heap
    443  MinorCollecting,  // doing a GC of the minor heap (nursery)
    444  CycleCollecting   // in the "Unlink" phase of cycle collection
    445 };
    446 
    447 JS_PUBLIC_API HeapState RuntimeHeapState();
    448 
    449 static inline bool RuntimeHeapIsBusy() {
    450  return RuntimeHeapState() != HeapState::Idle;
    451 }
    452 
    453 static inline bool RuntimeHeapIsTracing() {
    454  return RuntimeHeapState() == HeapState::Tracing;
    455 }
    456 
    457 static inline bool RuntimeHeapIsMajorCollecting() {
    458  return RuntimeHeapState() == HeapState::MajorCollecting;
    459 }
    460 
    461 static inline bool RuntimeHeapIsMinorCollecting() {
    462  return RuntimeHeapState() == HeapState::MinorCollecting;
    463 }
    464 
    465 static inline bool RuntimeHeapIsCollecting(HeapState state) {
    466  return state == HeapState::MajorCollecting ||
    467         state == HeapState::MinorCollecting;
    468 }
    469 
    470 static inline bool RuntimeHeapIsCollecting() {
    471  return RuntimeHeapIsCollecting(RuntimeHeapState());
    472 }
    473 
    474 static inline bool RuntimeHeapIsCycleCollecting() {
    475  return RuntimeHeapState() == HeapState::CycleCollecting;
    476 }
    477 
    478 /*
    479 * This list enumerates the different types of conceptual stacks we have in
    480 * SpiderMonkey. In reality, they all share the C stack, but we allow different
    481 * stack limits depending on the type of code running.
    482 */
    483 enum StackKind {
    484  StackForSystemCode,       // C++, such as the GC, running on behalf of the VM.
    485  StackForTrustedScript,    // Script running with trusted principals.
    486  StackForUntrustedScript,  // Script running with untrusted principals.
    487  StackKindCount
    488 };
    489 
    490 /*
    491 * Default maximum size for the generational nursery in bytes. This is the
    492 * initial value. In the browser this configured by the
    493 * javascript.options.mem.nursery.max_kb pref.
    494 */
    495 const uint32_t DefaultNurseryMaxBytes = 64 * js::gc::ChunkSize;
    496 
    497 /* Default maximum heap size in bytes to pass to JS_NewContext(). */
    498 const uint32_t DefaultHeapMaxBytes = 32 * 1024 * 1024;
    499 
    500 /**
    501 * A GC pointer, tagged with the trace kind.
    502 *
    503 * In general, a GC pointer should be stored with an exact type. This class
    504 * is for use when that is not possible because a single pointer must point
    505 * to several kinds of GC thing.
    506 */
    507 class JS_PUBLIC_API GCCellPtr {
    508 public:
    509  GCCellPtr() : GCCellPtr(nullptr) {}
    510 
    511  // Construction from a void* and trace kind.
    512  GCCellPtr(void* gcthing, JS::TraceKind traceKind)
    513      : ptr(checkedCast(gcthing, traceKind)) {}
    514 
    515  // Automatically construct a null GCCellPtr from nullptr.
    516  MOZ_IMPLICIT GCCellPtr(decltype(nullptr))
    517      : ptr(checkedCast(nullptr, JS::TraceKind::Null)) {}
    518 
    519  // Construction from an explicit type.
    520  template <typename T>
    521  explicit GCCellPtr(T* p)
    522      : ptr(checkedCast(p, JS::MapTypeToTraceKind<T>::kind)) {}
    523  explicit GCCellPtr(JSFunction* p)
    524      : ptr(checkedCast(p, JS::TraceKind::Object)) {}
    525  explicit GCCellPtr(JSScript* p)
    526      : ptr(checkedCast(p, JS::TraceKind::Script)) {}
    527  explicit GCCellPtr(const Value& v);
    528 
    529  JS::TraceKind kind() const {
    530    uintptr_t kindBits = ptr & OutOfLineTraceKindMask;
    531    if (kindBits != OutOfLineTraceKindMask) {
    532      return JS::TraceKind(kindBits);
    533    }
    534    return outOfLineKind();
    535  }
    536 
    537  // Allow GCCellPtr to be used in a boolean context.
    538  explicit operator bool() const {
    539    MOZ_ASSERT(bool(asCell()) == (kind() != JS::TraceKind::Null));
    540    return asCell();
    541  }
    542 
    543  bool operator==(const GCCellPtr other) const { return ptr == other.ptr; }
    544  bool operator!=(const GCCellPtr other) const { return ptr != other.ptr; }
    545 
    546  // Simplify checks to the kind.
    547  template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
    548  bool is() const {
    549    return kind() == JS::MapTypeToTraceKind<T>::kind;
    550  }
    551 
    552  // Conversions to more specific types must match the kind. Access to
    553  // further refined types is not allowed directly from a GCCellPtr.
    554  template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
    555  T& as() const {
    556    MOZ_ASSERT(kind() == JS::MapTypeToTraceKind<T>::kind);
    557    // We can't use static_cast here, because the fact that JSObject
    558    // inherits from js::gc::Cell is not part of the public API.
    559    return *reinterpret_cast<T*>(asCell());
    560  }
    561 
    562  // Return a pointer to the cell this |GCCellPtr| refers to, or |nullptr|.
    563  // (It would be more symmetrical with |to| for this to return a |Cell&|, but
    564  // the result can be |nullptr|, and null references are undefined behavior.)
    565  js::gc::Cell* asCell() const {
    566    return reinterpret_cast<js::gc::Cell*>(ptr & ~OutOfLineTraceKindMask);
    567  }
    568 
    569  // The CC's trace logger needs an identity that is XPIDL serializable.
    570  uint64_t unsafeAsInteger() const {
    571    return static_cast<uint64_t>(unsafeAsUIntPtr());
    572  }
    573  // Inline mark bitmap access requires direct pointer arithmetic.
    574  uintptr_t unsafeAsUIntPtr() const {
    575    MOZ_ASSERT(asCell());
    576    MOZ_ASSERT(!js::gc::IsInsideNursery(asCell()));
    577    return reinterpret_cast<uintptr_t>(asCell());
    578  }
    579 
    580  MOZ_ALWAYS_INLINE bool mayBeOwnedByOtherRuntime() const {
    581    if (!is<JSString>() && !is<JS::Symbol>()) {
    582      return false;
    583    }
    584    if (is<JSString>()) {
    585      return JS::shadow::String::isPermanentAtom(asCell());
    586    }
    587    MOZ_ASSERT(is<JS::Symbol>());
    588    return JS::shadow::Symbol::isWellKnownSymbol(asCell());
    589  }
    590 
    591 private:
    592  static uintptr_t checkedCast(void* p, JS::TraceKind traceKind) {
    593    auto* cell = static_cast<js::gc::Cell*>(p);
    594    MOZ_ASSERT((uintptr_t(p) & OutOfLineTraceKindMask) == 0);
    595    AssertGCThingHasType(cell, traceKind);
    596    // Store trace in the bottom bits of pointer for common kinds.
    597    uintptr_t kindBits = uintptr_t(traceKind);
    598    if (kindBits >= OutOfLineTraceKindMask) {
    599      kindBits = OutOfLineTraceKindMask;
    600    }
    601    return uintptr_t(p) | kindBits;
    602  }
    603 
    604  JS::TraceKind outOfLineKind() const;
    605 
    606  uintptr_t ptr;
    607 } JS_HAZ_GC_POINTER;
    608 
    609 // Unwraps the given GCCellPtr, calls the functor |f| with a template argument
    610 // of the actual type of the pointer, and returns the result.
    611 template <typename F>
    612 auto MapGCThingTyped(GCCellPtr thing, F&& f) {
    613  switch (thing.kind()) {
    614 #define JS_EXPAND_DEF(name, type, _, _1) \
    615  case JS::TraceKind::name:              \
    616    return f(&thing.as<type>());
    617    JS_FOR_EACH_TRACEKIND(JS_EXPAND_DEF);
    618 #undef JS_EXPAND_DEF
    619    default:
    620      MOZ_CRASH("Invalid trace kind in MapGCThingTyped for GCCellPtr.");
    621  }
    622 }
    623 
    624 // Unwraps the given GCCellPtr and calls the functor |f| with a template
    625 // argument of the actual type of the pointer. Doesn't return anything.
    626 template <typename F>
    627 void ApplyGCThingTyped(GCCellPtr thing, F&& f) {
    628  // This function doesn't do anything but is supplied for symmetry with other
    629  // MapGCThingTyped/ApplyGCThingTyped implementations that have to wrap the
    630  // functor to return a dummy value that is ignored.
    631  MapGCThingTyped(thing, f);
    632 }
    633 
    634 } /* namespace JS */
    635 
    636 namespace js {
    637 namespace gc {
    638 
    639 namespace detail {
    640 
    641 // `addr` must be an address within GC-controlled memory. Note that it cannot
    642 // point just past GC-controlled memory.
    643 static MOZ_ALWAYS_INLINE ChunkBase* GetGCAddressChunkBase(const void* addr) {
    644  MOZ_ASSERT(addr);
    645  auto* chunk = reinterpret_cast<ChunkBase*>(uintptr_t(addr) & ~ChunkMask);
    646  MOZ_ASSERT(chunk->runtime);
    647  MOZ_ASSERT(chunk->kind != ChunkKind::Invalid);
    648  return chunk;
    649 }
    650 
    651 static MOZ_ALWAYS_INLINE ChunkBase* GetCellChunkBase(const Cell* cell) {
    652  return GetGCAddressChunkBase(cell);
    653 }
    654 
    655 static MOZ_ALWAYS_INLINE ArenaChunkBase* GetCellChunkBase(
    656    const TenuredCell* cell) {
    657  MOZ_ASSERT(cell);
    658  auto* chunk = reinterpret_cast<ArenaChunkBase*>(uintptr_t(cell) & ~ChunkMask);
    659  MOZ_ASSERT(chunk->runtime);
    660  MOZ_ASSERT(chunk->kind == ChunkKind::TenuredArenas);
    661  return chunk;
    662 }
    663 
    664 static MOZ_ALWAYS_INLINE JS::Zone* GetTenuredGCThingZone(const void* ptr) {
    665  // This takes a void* because the compiler can't see type relationships in
    666  // this header. |ptr| must be a pointer to a tenured GC thing.
    667  MOZ_ASSERT(ptr);
    668  const uintptr_t zone_addr = (uintptr_t(ptr) & ~ArenaMask) | ArenaZoneOffset;
    669  return *reinterpret_cast<JS::Zone**>(zone_addr);
    670 }
    671 
    672 static MOZ_ALWAYS_INLINE bool TenuredCellIsMarkedBlack(
    673    const TenuredCell* cell) {
    674  MOZ_ASSERT(cell);
    675  MOZ_ASSERT(!js::gc::IsInsideNursery(cell));
    676 
    677  ArenaChunkBase* chunk = GetCellChunkBase(cell);
    678  return chunk->markBits.isMarkedBlack(cell);
    679 }
    680 
    681 static MOZ_ALWAYS_INLINE bool NonBlackCellIsMarkedGray(
    682    const TenuredCell* cell) {
    683  // Return true if GrayOrBlackBit is set. Callers should check BlackBit first.
    684 
    685  MOZ_ASSERT(cell);
    686  MOZ_ASSERT(!js::gc::IsInsideNursery(cell));
    687  MOZ_ASSERT(!TenuredCellIsMarkedBlack(cell));
    688 
    689  ArenaChunkBase* chunk = GetCellChunkBase(cell);
    690  return chunk->markBits.markBit(cell, ColorBit::GrayOrBlackBit);
    691 }
    692 
    693 static MOZ_ALWAYS_INLINE bool TenuredCellIsMarkedGray(const TenuredCell* cell) {
    694  MOZ_ASSERT(cell);
    695  MOZ_ASSERT(!js::gc::IsInsideNursery(cell));
    696  ArenaChunkBase* chunk = GetCellChunkBase(cell);
    697  return chunk->markBits.isMarkedGray(cell);
    698 }
    699 
    700 static MOZ_ALWAYS_INLINE bool CellIsMarkedGray(const Cell* cell) {
    701  MOZ_ASSERT(cell);
    702  if (js::gc::IsInsideNursery(cell)) {
    703    return false;
    704  }
    705  return TenuredCellIsMarkedGray(reinterpret_cast<const TenuredCell*>(cell));
    706 }
    707 
    708 extern JS_PUBLIC_API bool CanCheckGrayBits(const TenuredCell* cell);
    709 
    710 extern JS_PUBLIC_API bool CellIsMarkedGrayIfKnown(const TenuredCell* cell);
    711 
    712 #ifdef DEBUG
    713 extern JS_PUBLIC_API void AssertCellIsNotGray(const Cell* cell);
    714 
    715 extern JS_PUBLIC_API bool ObjectIsMarkedBlack(const JSObject* obj);
    716 #endif
    717 
    718 MOZ_ALWAYS_INLINE bool ChunkPtrHasStoreBuffer(const void* ptr) {
    719  return GetGCAddressChunkBase(ptr)->storeBuffer;
    720 }
    721 
    722 } /* namespace detail */
    723 
    724 MOZ_ALWAYS_INLINE bool IsInsideNursery(const Cell* cell) {
    725  MOZ_ASSERT(cell);
    726  return detail::ChunkPtrHasStoreBuffer(cell);
    727 }
    728 
    729 MOZ_ALWAYS_INLINE bool IsInsideNursery(const TenuredCell* cell) {
    730  MOZ_ASSERT(cell);
    731  MOZ_ASSERT(!IsInsideNursery(reinterpret_cast<const Cell*>(cell)));
    732  return false;
    733 }
    734 
    735 // Return whether |cell| is in the region of the nursery currently being
    736 // collected.
    737 MOZ_ALWAYS_INLINE bool InCollectedNurseryRegion(const Cell* cell) {
    738  MOZ_ASSERT(cell);
    739  return detail::GetCellChunkBase(cell)->getKind() ==
    740         ChunkKind::NurseryFromSpace;
    741 }
    742 
    743 // Allow use before the compiler knows the derivation of JSObject, JSString,
    744 // JS::BigInt, and js::GetterSetter.
    745 MOZ_ALWAYS_INLINE bool IsInsideNursery(const JSObject* obj) {
    746  return IsInsideNursery(reinterpret_cast<const Cell*>(obj));
    747 }
    748 MOZ_ALWAYS_INLINE bool IsInsideNursery(const JSString* str) {
    749  return IsInsideNursery(reinterpret_cast<const Cell*>(str));
    750 }
    751 MOZ_ALWAYS_INLINE bool IsInsideNursery(const JS::BigInt* bi) {
    752  return IsInsideNursery(reinterpret_cast<const Cell*>(bi));
    753 }
    754 MOZ_ALWAYS_INLINE bool IsInsideNursery(const js::GetterSetter* gs) {
    755  return IsInsideNursery(reinterpret_cast<const Cell*>(gs));
    756 }
    757 MOZ_ALWAYS_INLINE bool InCollectedNurseryRegion(const JSObject* obj) {
    758  return InCollectedNurseryRegion(reinterpret_cast<const Cell*>(obj));
    759 }
    760 
    761 MOZ_ALWAYS_INLINE bool IsCellPointerValid(const void* ptr) {
    762  auto addr = uintptr_t(ptr);
    763  if (addr < ChunkSize || addr % CellAlignBytes != 0) {
    764    return false;
    765  }
    766 
    767  auto* cell = reinterpret_cast<const Cell*>(ptr);
    768  if (!IsInsideNursery(cell)) {
    769    return detail::GetTenuredGCThingZone(cell) != nullptr;
    770  }
    771 
    772  return true;
    773 }
    774 
    775 MOZ_ALWAYS_INLINE bool IsCellPointerValidOrNull(const void* cell) {
    776  if (!cell) {
    777    return true;
    778  }
    779  return IsCellPointerValid(cell);
    780 }
    781 
    782 } /* namespace gc */
    783 } /* namespace js */
    784 
    785 namespace JS {
    786 
    787 extern JS_PUBLIC_API Zone* GetTenuredGCThingZone(GCCellPtr thing);
    788 
    789 extern JS_PUBLIC_API Zone* GetNurseryCellZone(js::gc::Cell* cell);
    790 
    791 static MOZ_ALWAYS_INLINE Zone* GetGCThingZone(GCCellPtr thing) {
    792  if (!js::gc::IsInsideNursery(thing.asCell())) {
    793    return js::gc::detail::GetTenuredGCThingZone(thing.asCell());
    794  }
    795 
    796  return GetNurseryCellZone(thing.asCell());
    797 }
    798 
    799 static MOZ_ALWAYS_INLINE Zone* GetStringZone(JSString* str) {
    800  if (!js::gc::IsInsideNursery(str)) {
    801    return js::gc::detail::GetTenuredGCThingZone(str);
    802  }
    803 
    804  return GetNurseryCellZone(reinterpret_cast<js::gc::Cell*>(str));
    805 }
    806 
    807 extern JS_PUBLIC_API Zone* GetObjectZone(JSObject* obj);
    808 
    809 // Check whether a GC thing is gray. If the gray marking state is unknown
    810 // (e.g. due to OOM during gray unmarking) this returns false.
    811 static MOZ_ALWAYS_INLINE bool GCThingIsMarkedGray(GCCellPtr thing) {
    812  js::gc::Cell* cell = thing.asCell();
    813  if (IsInsideNursery(cell)) {
    814    return false;
    815  }
    816 
    817  auto* tenuredCell = reinterpret_cast<js::gc::TenuredCell*>(cell);
    818  return js::gc::detail::CellIsMarkedGrayIfKnown(tenuredCell);
    819 }
    820 
    821 // Specialised gray marking check for use by the cycle collector. This is not
    822 // called during incremental GC or when the gray bits are invalid.
    823 static MOZ_ALWAYS_INLINE bool GCThingIsMarkedGrayInCC(GCCellPtr thing) {
    824  js::gc::Cell* cell = thing.asCell();
    825  if (IsInsideNursery(cell)) {
    826    return false;
    827  }
    828 
    829  auto* tenuredCell = reinterpret_cast<js::gc::TenuredCell*>(cell);
    830  MOZ_ASSERT(js::gc::detail::CanCheckGrayBits(tenuredCell));
    831  return js::gc::detail::TenuredCellIsMarkedGray(tenuredCell);
    832 }
    833 
    834 extern JS_PUBLIC_API JS::TraceKind GCThingTraceKind(void* thing);
    835 
    836 /*
    837 * Returns true when writes to GC thing pointers (and reads from weak pointers)
    838 * must call an incremental barrier. This is generally only true when running
    839 * mutator code in-between GC slices. At other times, the barrier may be elided
    840 * for performance.
    841 */
    842 extern JS_PUBLIC_API bool IsIncrementalBarrierNeeded(JSContext* cx);
    843 
    844 /*
    845 * Notify the GC that a reference to a JSObject is about to be overwritten.
    846 * This method must be called if IsIncrementalBarrierNeeded.
    847 */
    848 extern JS_PUBLIC_API void IncrementalPreWriteBarrier(JSObject* obj);
    849 
    850 /*
    851 * Notify the GC that a reference to a tenured GC cell is about to be
    852 * overwritten. This method must be called if IsIncrementalBarrierNeeded.
    853 */
    854 extern JS_PUBLIC_API void IncrementalPreWriteBarrier(GCCellPtr thing);
    855 
    856 /**
    857 * Unsets the gray bit for anything reachable from |thing|. |kind| should not be
    858 * JS::TraceKind::Shape. |thing| should be non-null. The return value indicates
    859 * if anything was unmarked.
    860 */
    861 extern JS_PUBLIC_API bool UnmarkGrayGCThingRecursively(GCCellPtr thing);
    862 
    863 }  // namespace JS
    864 
    865 namespace js {
    866 namespace gc {
    867 
    868 extern JS_PUBLIC_API void PerformIncrementalReadBarrier(JS::GCCellPtr thing);
    869 
    870 static MOZ_ALWAYS_INLINE void ExposeGCThingToActiveJS(JS::GCCellPtr thing) {
    871  // js::jit::ReadBarrier is a specialized version of this function designed to
    872  // be called from jitcode. If this code is changed, it should be kept in sync.
    873 
    874  // TODO: I'd like to assert !RuntimeHeapIsBusy() here but this gets
    875  // called while we are tracing the heap, e.g. during memory reporting
    876  // (see bug 1313318).
    877  MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
    878 
    879  // GC things residing in the nursery cannot be gray: they have no mark bits.
    880  // All live objects in the nursery are moved to tenured at the beginning of
    881  // each GC slice, so the gray marker never sees nursery things.
    882  if (IsInsideNursery(thing.asCell())) {
    883    return;
    884  }
    885 
    886  auto* cell = reinterpret_cast<TenuredCell*>(thing.asCell());
    887  if (detail::TenuredCellIsMarkedBlack(cell)) {
    888    return;
    889  }
    890 
    891  // GC things owned by other runtimes are always black.
    892  MOZ_ASSERT(!thing.mayBeOwnedByOtherRuntime());
    893 
    894  auto* zone = JS::shadow::Zone::from(detail::GetTenuredGCThingZone(cell));
    895  if (zone->needsIncrementalBarrier()) {
    896    PerformIncrementalReadBarrier(thing);
    897  } else if (!zone->isGCPreparing() && detail::NonBlackCellIsMarkedGray(cell)) {
    898    MOZ_ALWAYS_TRUE(JS::UnmarkGrayGCThingRecursively(thing));
    899  }
    900 
    901  MOZ_ASSERT_IF(!zone->isGCPreparing(), !detail::TenuredCellIsMarkedGray(cell));
    902 }
    903 
    904 static MOZ_ALWAYS_INLINE void IncrementalReadBarrier(JS::GCCellPtr thing) {
    905  // This is a lighter version of ExposeGCThingToActiveJS that doesn't do gray
    906  // unmarking.
    907 
    908  if (IsInsideNursery(thing.asCell())) {
    909    return;
    910  }
    911 
    912  auto* cell = reinterpret_cast<TenuredCell*>(thing.asCell());
    913  auto* zone = JS::shadow::Zone::from(detail::GetTenuredGCThingZone(cell));
    914  if (zone->needsIncrementalBarrier() &&
    915      !detail::TenuredCellIsMarkedBlack(cell)) {
    916    // GC things owned by other runtimes are always black.
    917    MOZ_ASSERT(!thing.mayBeOwnedByOtherRuntime());
    918    PerformIncrementalReadBarrier(thing);
    919  }
    920 }
    921 
    922 template <typename T>
    923 extern JS_PUBLIC_API bool EdgeNeedsSweepUnbarrieredSlow(T* thingp);
    924 
    925 static MOZ_ALWAYS_INLINE bool EdgeNeedsSweepUnbarriered(JSObject** objp) {
    926  // This function does not handle updating nursery pointers. Raw JSObject
    927  // pointers should be updated separately or replaced with
    928  // JS::Heap<JSObject*> which handles this automatically.
    929  MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
    930  if (IsInsideNursery(*objp)) {
    931    return false;
    932  }
    933 
    934  auto zone = JS::shadow::Zone::from(detail::GetTenuredGCThingZone(*objp));
    935  if (!zone->isGCSweepingOrCompacting()) {
    936    return false;
    937  }
    938 
    939  return EdgeNeedsSweepUnbarrieredSlow(objp);
    940 }
    941 
    942 /*
    943 * Sample data about internally mapped memory. This includes all GC heap
    944 * allocations (and excludes js_alloc allocations).
    945 */
    946 struct ProfilerMemoryCounts {
    947  size_t bytes = 0;
    948  uint64_t operations = 0;
    949 };
    950 JS_PUBLIC_API ProfilerMemoryCounts GetProfilerMemoryCounts();
    951 
    952 }  // namespace gc
    953 }  // namespace js
    954 
    955 namespace JS {
    956 
    957 /*
    958 * This should be called when an object that is marked gray is exposed to the JS
    959 * engine (by handing it to running JS code or writing it into live JS
    960 * data). During incremental GC, since the gray bits haven't been computed yet,
    961 * we conservatively mark the object black.
    962 */
    963 static MOZ_ALWAYS_INLINE void ExposeObjectToActiveJS(JSObject* obj) {
    964  MOZ_ASSERT(obj);
    965  MOZ_ASSERT(!js::gc::EdgeNeedsSweepUnbarrieredSlow(&obj));
    966  js::gc::ExposeGCThingToActiveJS(GCCellPtr(obj));
    967 }
    968 
    969 } /* namespace JS */
    970 
    971 #endif /* js_HeapAPI_h */