tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Cell.h (28285B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #ifndef gc_Cell_h
      8 #define gc_Cell_h
      9 
     10 #include "mozilla/EndianUtils.h"
     11 
     12 #include <type_traits>
     13 
     14 #include "gc/GCContext.h"
     15 #include "gc/Heap.h"
     16 #include "gc/TraceKind.h"
     17 #include "js/GCAnnotations.h"
     18 #include "js/shadow/Zone.h"  // JS::shadow::Zone
     19 #include "js/TypeDecls.h"
     20 
     21 namespace JS {
     22 enum class TraceKind;
     23 } /* namespace JS */
     24 
     25 namespace js {
     26 
     27 class JS_PUBLIC_API GenericPrinter;
     28 
     29 extern bool RuntimeFromMainThreadIsHeapMajorCollecting(
     30    JS::shadow::Zone* shadowZone);
     31 
     32 #ifdef DEBUG
     33 // Barriers can't be triggered during offthread baseline or Ion
     34 // compilation, which may run on a helper thread.
     35 extern bool CurrentThreadIsBaselineCompiling();
     36 extern bool CurrentThreadIsIonCompiling();
     37 extern bool CurrentThreadIsOffThreadCompiling();
     38 #endif
     39 
     40 extern void TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc,
     41                                                     gc::Cell** thingp,
     42                                                     const char* name);
     43 
     44 namespace gc {
     45 
     46 enum class AllocKind : uint8_t;
     47 class CellAllocator;  // Declared so subtypes of Cell can friend it easily.
     48 class StoreBuffer;
     49 class TenuredCell;
     50 
     51 extern void PerformIncrementalReadBarrier(TenuredCell* cell);
     52 extern void PerformIncrementalPreWriteBarrier(TenuredCell* cell);
     53 extern void PerformIncrementalBarrierDuringFlattening(JSString* str);
     54 extern void UnmarkGrayGCThingRecursively(TenuredCell* cell);
     55 
     56 // Like gc::MarkColor but allows the possibility of the cell being unmarked.
     57 enum class CellColor : uint8_t { White = 0, Gray = 1, Black = 2 };
     58 static_assert(uint8_t(CellColor::Gray) == uint8_t(MarkColor::Gray));
     59 static_assert(uint8_t(CellColor::Black) == uint8_t(MarkColor::Black));
     60 
     61 inline bool IsMarked(CellColor color) { return color != CellColor::White; }
     62 inline MarkColor AsMarkColor(CellColor color) {
     63  MOZ_ASSERT(IsMarked(color));
     64  return MarkColor(color);
     65 }
     66 inline CellColor AsCellColor(MarkColor color) { return CellColor(color); }
     67 extern const char* CellColorName(CellColor color);
     68 
     69 // Cell header word. Stores GC flags and derived class data.
     70 //
     71 // Loads of GC flags + all stores are marked as (relaxed) atomic operations,
     72 // to deal with the following benign data race during compacting GC:
     73 //
     74 // - Thread 1 checks isForwarded (which is always false in this situation).
     75 // - Thread 2 updates the derived class data (without changing the forwarded
     76 //   flag).
     77 //
     78 // To improve performance, we don't use atomic operations for get() because
     79 // atomic operations inhibit certain compiler optimizations: GCC and Clang are
     80 // unable to fold multiple loads even if they're both relaxed atomics. This is
     81 // especially a problem for chained loads such as obj->shape->base->clasp.
     82 class HeaderWord {
     83  // Indicates whether the cell has been forwarded (moved) by generational or
     84  // compacting GC and is now a RelocationOverlay.
     85  static constexpr uintptr_t FORWARD_BIT = Bit(0);
     86  // Bits 1 and 2 are reserved for future use by the GC.
     87 
     88  uintptr_t value_;
     89 
     90  void setAtomic(uintptr_t value) {
     91    __atomic_store_n(&value_, value, __ATOMIC_RELAXED);
     92  }
     93 
     94 public:
     95  static constexpr uintptr_t RESERVED_MASK =
     96      BitMask(gc::CellFlagBitsReservedForGC);
     97  static_assert(gc::CellFlagBitsReservedForGC >= 3,
     98                "Not enough flag bits reserved for GC");
     99 
    100  uintptr_t getAtomic() const {
    101    return __atomic_load_n(&value_, __ATOMIC_RELAXED);
    102  }
    103 
    104  // Accessors for derived class data.
    105  uintptr_t get() const {
    106    // Note: non-atomic load. See class comment.
    107    uintptr_t value = value_;
    108    MOZ_ASSERT((value & RESERVED_MASK) == 0);
    109    return value;
    110  }
    111  void set(uintptr_t value) {
    112    MOZ_ASSERT((value & RESERVED_MASK) == 0);
    113    setAtomic(value);
    114  }
    115 
    116  // Accessors for GC data.
    117  uintptr_t flags() const { return getAtomic() & RESERVED_MASK; }
    118  bool isForwarded() const { return flags() & FORWARD_BIT; }
    119  void setForwardingAddress(uintptr_t ptr) {
    120    MOZ_ASSERT((ptr & RESERVED_MASK) == 0);
    121    setAtomic(ptr | FORWARD_BIT);
    122  }
    123  uintptr_t getForwardingAddress() const {
    124    MOZ_ASSERT(isForwarded());
    125    return getAtomic() & ~RESERVED_MASK;
    126  }
    127 };
    128 
    129 // [SMDOC] GC Cell
    130 //
    131 // A GC cell is the ultimate base class for all GC things. All types allocated
    132 // on the GC heap extend either gc::Cell or gc::TenuredCell. If a type is always
    133 // tenured, prefer the TenuredCell class as base.
    134 //
    135 // The first word of Cell is a HeaderWord (a uintptr_t) that reserves the low
    136 // three bits for GC purposes. The remaining bits are available to sub-classes
    137 // and can be used store a pointer to another gc::Cell. To make use of the
    138 // remaining space, sub-classes derive from a helper class such as
    139 // TenuredCellWithNonGCPointer.
    140 //
    141 // During moving GC operation a Cell may be marked as forwarded. This indicates
    142 // that a gc::RelocationOverlay is currently stored in the Cell's memory and
    143 // should be used to find the new location of the Cell.
    144 struct Cell {
    145  // Cell header word. Stores GC flags and derived class data.
    146  HeaderWord header_;
    147 
    148 public:
    149  Cell() = default;
    150 
    151  Cell(const Cell&) = delete;
    152  void operator=(const Cell&) = delete;
    153 
    154  bool isForwarded() const { return header_.isForwarded(); }
    155  uintptr_t flags() const { return header_.flags(); }
    156 
    157  MOZ_ALWAYS_INLINE bool isTenured() const { return !IsInsideNursery(this); }
    158  MOZ_ALWAYS_INLINE const TenuredCell& asTenured() const;
    159  MOZ_ALWAYS_INLINE TenuredCell& asTenured();
    160 
    161  MOZ_ALWAYS_INLINE bool isMarkedAny() const;
    162  MOZ_ALWAYS_INLINE bool isMarkedBlack() const;
    163  MOZ_ALWAYS_INLINE bool isMarkedGray() const;
    164  MOZ_ALWAYS_INLINE bool isMarked(gc::MarkColor color) const;
    165  MOZ_ALWAYS_INLINE bool isMarkedAtLeast(gc::MarkColor color) const;
    166  MOZ_ALWAYS_INLINE CellColor color() const;
    167 
    168  inline JSRuntime* runtimeFromMainThread() const;
    169 
    170  // Note: Unrestricted access to the runtime of a GC thing from an arbitrary
    171  // thread can easily lead to races. Use this method very carefully.
    172  inline JSRuntime* runtimeFromAnyThread() const;
    173 
    174  // May be overridden by GC thing kinds that have a compartment pointer.
    175  inline JS::Compartment* maybeCompartment() const { return nullptr; }
    176 
    177  // The StoreBuffer used to record incoming pointers from the tenured heap.
    178  // This will return nullptr for a tenured cell.
    179  inline StoreBuffer* storeBuffer() const;
    180 
    181  inline JS::TraceKind getTraceKind() const;
    182 
    183  static MOZ_ALWAYS_INLINE bool needPreWriteBarrier(JS::Zone* zone);
    184 
    185  template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
    186  inline bool is() const {
    187    return getTraceKind() == JS::MapTypeToTraceKind<T>::kind;
    188  }
    189 
    190  template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
    191  inline T* as() {
    192    // |this|-qualify the |is| call below to avoid compile errors with even
    193    // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
    194    MOZ_ASSERT(this->is<T>());
    195    return static_cast<T*>(this);
    196  }
    197 
    198  template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
    199  inline const T* as() const {
    200    // |this|-qualify the |is| call below to avoid compile errors with even
    201    // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
    202    MOZ_ASSERT(this->is<T>());
    203    return static_cast<const T*>(this);
    204  }
    205 
    206  inline JS::Zone* zone() const;
    207  inline JS::Zone* zoneFromAnyThread() const;
    208 
    209  // Get the zone for a cell known to be in the nursery.
    210  inline JS::Zone* nurseryZone() const;
    211  inline JS::Zone* nurseryZoneFromAnyThread() const;
    212 
    213  inline ChunkBase* chunk() const;
    214 
    215  // Default implementation for kinds that cannot be permanent. This may be
    216  // overriden by derived classes.
    217  MOZ_ALWAYS_INLINE bool isPermanentAndMayBeShared() const { return false; }
    218 
    219 #ifdef DEBUG
    220  static inline void assertThingIsNotGray(Cell* cell);
    221  inline bool isAligned() const;
    222  void dump(GenericPrinter& out) const;
    223  void dump() const;
    224 #endif
    225 
    226 protected:
    227  uintptr_t address() const;
    228 
    229 private:
    230  // Cells are destroyed by the GC. Do not delete them directly.
    231  void operator delete(void*) = delete;
    232 } JS_HAZ_GC_THING;
    233 
    234 // A GC TenuredCell gets behaviors that are valid for things in the Tenured
    235 // heap, such as access to the arena and mark bits.
    236 class TenuredCell : public Cell {
    237 public:
    238  MOZ_ALWAYS_INLINE bool isTenured() const {
    239    MOZ_ASSERT(!IsInsideNursery(this));
    240    return true;
    241  }
    242 
    243  ArenaChunk* chunk() const { return static_cast<ArenaChunk*>(Cell::chunk()); }
    244 
    245  // Mark bit management.
    246  MOZ_ALWAYS_INLINE bool isMarkedAny() const;
    247  MOZ_ALWAYS_INLINE bool isMarkedBlack() const;
    248  MOZ_ALWAYS_INLINE bool isMarkedGray() const;
    249  MOZ_ALWAYS_INLINE CellColor color() const;
    250 
    251  // The return value indicates if the cell went from unmarked to marked.
    252  MOZ_ALWAYS_INLINE bool markIfUnmarked(
    253      MarkColor color = MarkColor::Black) const;
    254  MOZ_ALWAYS_INLINE bool markIfUnmarkedThreadSafe(MarkColor color) const;
    255  MOZ_ALWAYS_INLINE void markBlack() const;
    256  MOZ_ALWAYS_INLINE void markBlackAtomic() const;
    257  MOZ_ALWAYS_INLINE void copyMarkBitsFrom(const TenuredCell* src);
    258  MOZ_ALWAYS_INLINE void unmark();
    259 
    260  // Access to the arena.
    261  inline Arena* arena() const;
    262  inline AllocKind getAllocKind() const;
    263  inline JS::TraceKind getTraceKind() const;
    264  inline JS::Zone* zone() const;
    265  inline JS::Zone* zoneFromAnyThread() const;
    266  inline bool isInsideZone(JS::Zone* zone) const;
    267 
    268  MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZone() const {
    269    return JS::shadow::Zone::from(zone());
    270  }
    271  MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZoneFromAnyThread() const {
    272    return JS::shadow::Zone::from(zoneFromAnyThread());
    273  }
    274 
    275  template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
    276  inline bool is() const {
    277    return getTraceKind() == JS::MapTypeToTraceKind<T>::kind;
    278  }
    279 
    280  template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
    281  inline T* as() {
    282    // |this|-qualify the |is| call below to avoid compile errors with even
    283    // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
    284    MOZ_ASSERT(this->is<T>());
    285    return static_cast<T*>(this);
    286  }
    287 
    288  template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
    289  inline const T* as() const {
    290    // |this|-qualify the |is| call below to avoid compile errors with even
    291    // fairly recent versions of gcc, e.g. 7.1.1 according to bz.
    292    MOZ_ASSERT(this->is<T>());
    293    return static_cast<const T*>(this);
    294  }
    295 
    296  // Default implementation for kinds that don't require fixup.
    297  void fixupAfterMovingGC() {}
    298 
    299  static inline CellColor getColor(ChunkMarkBitmap* bitmap,
    300                                   const TenuredCell* cell);
    301 
    302 #ifdef DEBUG
    303  inline bool isAligned() const;
    304 #endif
    305 };
    306 
    307 MOZ_ALWAYS_INLINE const TenuredCell& Cell::asTenured() const {
    308  MOZ_ASSERT(isTenured());
    309  return *static_cast<const TenuredCell*>(this);
    310 }
    311 
    312 MOZ_ALWAYS_INLINE TenuredCell& Cell::asTenured() {
    313  MOZ_ASSERT(isTenured());
    314  return *static_cast<TenuredCell*>(this);
    315 }
    316 
    317 MOZ_ALWAYS_INLINE bool Cell::isMarkedAny() const {
    318  return !isTenured() || asTenured().isMarkedAny();
    319 }
    320 
    321 MOZ_ALWAYS_INLINE bool Cell::isMarkedBlack() const {
    322  return !isTenured() || asTenured().isMarkedBlack();
    323 }
    324 
    325 MOZ_ALWAYS_INLINE bool Cell::isMarkedGray() const {
    326  return isTenured() && asTenured().isMarkedGray();
    327 }
    328 
    329 MOZ_ALWAYS_INLINE bool Cell::isMarked(gc::MarkColor color) const {
    330  return color == MarkColor::Gray ? isMarkedGray() : isMarkedBlack();
    331 }
    332 
    333 MOZ_ALWAYS_INLINE bool Cell::isMarkedAtLeast(gc::MarkColor color) const {
    334  return color == MarkColor::Gray ? isMarkedAny() : isMarkedBlack();
    335 }
    336 
    337 MOZ_ALWAYS_INLINE CellColor Cell::color() const {
    338  return isTenured() ? asTenured().color() : CellColor::Black;
    339 }
    340 
    341 inline JSRuntime* Cell::runtimeFromMainThread() const {
    342  JSRuntime* rt = chunk()->runtime;
    343  MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
    344  return rt;
    345 }
    346 
    347 inline JSRuntime* Cell::runtimeFromAnyThread() const {
    348  return chunk()->runtime;
    349 }
    350 
    351 inline uintptr_t Cell::address() const {
    352  uintptr_t addr = uintptr_t(this);
    353  MOZ_ASSERT(addr % CellAlignBytes == 0);
    354  MOZ_ASSERT(ArenaChunk::withinValidRange(addr));
    355  return addr;
    356 }
    357 
    358 ChunkBase* Cell::chunk() const {
    359  uintptr_t addr = uintptr_t(this);
    360  MOZ_ASSERT(addr % CellAlignBytes == 0);
    361  auto* chunk = reinterpret_cast<ChunkBase*>(addr & ~ChunkMask);
    362  MOZ_ASSERT(chunk->isNurseryChunk() ||
    363             chunk->kind == ChunkKind::TenuredArenas);
    364  return chunk;
    365 }
    366 
    367 inline StoreBuffer* Cell::storeBuffer() const { return chunk()->storeBuffer; }
    368 
    369 JS::Zone* Cell::zone() const {
    370  if (isTenured()) {
    371    return asTenured().zone();
    372  }
    373 
    374  return nurseryZone();
    375 }
    376 
    377 JS::Zone* Cell::zoneFromAnyThread() const {
    378  if (isTenured()) {
    379    return asTenured().zoneFromAnyThread();
    380  }
    381 
    382  return nurseryZoneFromAnyThread();
    383 }
    384 
    385 JS::Zone* Cell::nurseryZone() const {
    386  JS::Zone* zone = nurseryZoneFromAnyThread();
    387  MOZ_ASSERT(CurrentThreadIsGCMarking() || CurrentThreadCanAccessZone(zone));
    388  return zone;
    389 }
    390 
    391 JS::Zone* Cell::nurseryZoneFromAnyThread() const {
    392  return NurseryCellHeader::from(this)->zone();
    393 }
    394 
    395 #ifdef DEBUG
    396 extern Cell* UninlinedForwarded(const Cell* cell);
    397 #endif
    398 
    399 inline JS::TraceKind Cell::getTraceKind() const {
    400  if (isTenured()) {
    401    MOZ_ASSERT_IF(isForwarded(), UninlinedForwarded(this)->getTraceKind() ==
    402                                     asTenured().getTraceKind());
    403    return asTenured().getTraceKind();
    404  }
    405 
    406  return NurseryCellHeader::from(this)->traceKind();
    407 }
    408 
    409 /* static */ MOZ_ALWAYS_INLINE bool Cell::needPreWriteBarrier(JS::Zone* zone) {
    410  return JS::shadow::Zone::from(zone)->needsIncrementalBarrier();
    411 }
    412 
    413 MOZ_ALWAYS_INLINE bool TenuredCell::isMarkedAny() const {
    414  MOZ_ASSERT(arena()->allocated());
    415  return chunk()->markBits.isMarkedAny(this);
    416 }
    417 
    418 MOZ_ALWAYS_INLINE bool TenuredCell::isMarkedBlack() const {
    419  MOZ_ASSERT(arena()->allocated());
    420  return chunk()->markBits.isMarkedBlack(this);
    421 }
    422 
    423 MOZ_ALWAYS_INLINE bool TenuredCell::isMarkedGray() const {
    424  MOZ_ASSERT(arena()->allocated());
    425  return chunk()->markBits.isMarkedGray(this);
    426 }
    427 
    428 MOZ_ALWAYS_INLINE CellColor TenuredCell::color() const {
    429  return getColor(&chunk()->markBits, this);
    430 }
    431 
    432 /* static */
    433 inline CellColor TenuredCell::getColor(ChunkMarkBitmap* bitmap,
    434                                       const TenuredCell* cell) {
    435  // Note that this method isn't synchronised so may give surprising results if
    436  // the mark bitmap is being modified concurrently.
    437 
    438  if (bitmap->isMarkedBlack(cell)) {
    439    return CellColor::Black;
    440  }
    441 
    442  if (bitmap->isMarkedGray(cell)) {
    443    return CellColor::Gray;
    444  }
    445 
    446  return CellColor::White;
    447 }
    448 
    449 inline Arena* TenuredCell::arena() const {
    450  MOZ_ASSERT(isTenured());
    451  uintptr_t addr = address();
    452  addr &= ~ArenaMask;
    453  return reinterpret_cast<Arena*>(addr);
    454 }
    455 
    456 AllocKind TenuredCell::getAllocKind() const { return arena()->getAllocKind(); }
    457 
    458 JS::TraceKind TenuredCell::getTraceKind() const {
    459  return MapAllocToTraceKind(getAllocKind());
    460 }
    461 
    462 JS::Zone* TenuredCell::zone() const {
    463  JS::Zone* zone = zoneFromAnyThread();
    464  MOZ_ASSERT(CurrentThreadIsGCMarking() || CurrentThreadCanAccessZone(zone));
    465  return zone;
    466 }
    467 
    468 JS::Zone* TenuredCell::zoneFromAnyThread() const { return arena()->zone(); }
    469 
    470 bool TenuredCell::isInsideZone(JS::Zone* zone) const {
    471  return zone == zoneFromAnyThread();
    472 }
    473 
    474 // Read barrier and pre-write barrier implementation for GC cells.
    475 
    476 template <typename T>
    477 MOZ_ALWAYS_INLINE void ReadBarrier(T* thing) {
    478  static_assert(std::is_base_of_v<Cell, T>);
    479  static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>);
    480 
    481  if (thing) {
    482    ReadBarrierImpl(thing);
    483  }
    484 }
    485 
    486 MOZ_ALWAYS_INLINE void ReadBarrierImpl(TenuredCell* thing) {
    487  MOZ_ASSERT(CurrentThreadIsMainThread());
    488  MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
    489  MOZ_ASSERT(thing);
    490 
    491  JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();
    492  if (shadowZone->needsIncrementalBarrier()) {
    493    PerformIncrementalReadBarrier(thing);
    494    return;
    495  }
    496 
    497  if (thing->isMarkedGray()) {
    498    UnmarkGrayGCThingRecursively(thing);
    499  }
    500 }
    501 
    502 MOZ_ALWAYS_INLINE void ReadBarrierImpl(Cell* thing) {
    503  MOZ_ASSERT(!CurrentThreadIsGCMarking());
    504  MOZ_ASSERT(thing);
    505 
    506  if (thing->isTenured()) {
    507    ReadBarrierImpl(&thing->asTenured());
    508  }
    509 }
    510 
    511 #ifdef DEBUG
    512 static bool PreWriteBarrierAllowed() {
    513  JS::GCContext* gcx = MaybeGetGCContext();
    514  if (!gcx || !gcx->isPreWriteBarrierAllowed()) {
    515    return false;
    516  }
    517 
    518  return gcx->onMainThread() || gcx->gcUse() == gc::GCUse::Sweeping ||
    519         gcx->gcUse() == gc::GCUse::Finalizing;
    520 }
    521 #endif
    522 
    523 MOZ_ALWAYS_INLINE void PreWriteBarrierImpl(TenuredCell* thing) {
    524  MOZ_ASSERT(PreWriteBarrierAllowed());
    525  MOZ_ASSERT(thing);
    526 
    527  // Barriers can be triggered on the main thread while collecting, but are
    528  // disabled. For example, this happens when sweeping HeapPtr wrappers. See
    529  // AutoDisableBarriers.
    530 
    531  JS::shadow::Zone* zone = thing->shadowZoneFromAnyThread();
    532  if (zone->needsIncrementalBarrier()) {
    533    PerformIncrementalPreWriteBarrier(thing);
    534  }
    535 }
    536 
    537 MOZ_ALWAYS_INLINE void PreWriteBarrierImpl(Cell* thing) {
    538  MOZ_ASSERT(!CurrentThreadIsGCMarking());
    539  MOZ_ASSERT(thing);
    540 
    541  if (thing->isTenured()) {
    542    PreWriteBarrierImpl(&thing->asTenured());
    543  }
    544 }
    545 
    546 template <typename T>
    547 MOZ_ALWAYS_INLINE void PreWriteBarrier(T* thing) {
    548  static_assert(std::is_base_of_v<Cell, T>);
    549  static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>);
    550 
    551  if (thing) {
    552    PreWriteBarrierImpl(thing);
    553  }
    554 }
    555 
    556 // Pre-write barrier implementation for structures containing GC cells, taking a
    557 // functor to trace the structure.
    558 template <typename T, typename F>
    559 MOZ_ALWAYS_INLINE void PreWriteBarrier(JS::Zone* zone, T* data,
    560                                       const F& traceFn) {
    561  MOZ_ASSERT(data);
    562  MOZ_ASSERT(!CurrentThreadIsOffThreadCompiling());
    563  MOZ_ASSERT(!CurrentThreadIsGCMarking());
    564 
    565  auto* shadowZone = JS::shadow::Zone::from(zone);
    566  if (!shadowZone->needsIncrementalBarrier()) {
    567    return;
    568  }
    569 
    570  MOZ_ASSERT(CurrentThreadCanAccessRuntime(shadowZone->runtimeFromAnyThread()));
    571  MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
    572 
    573  traceFn(shadowZone->barrierTracer(), data);
    574 }
    575 
    576 // Pre-write barrier implementation for structures containing GC cells. T must
    577 // support a |trace| method.
    578 template <typename T>
    579 MOZ_ALWAYS_INLINE void PreWriteBarrier(JS::Zone* zone, T* data) {
    580  MOZ_ASSERT(data);
    581  PreWriteBarrier(zone, data, [](JSTracer* trc, T* data) { data->trace(trc); });
    582 }
    583 
    584 #ifdef DEBUG
    585 
    586 /* static */ void Cell::assertThingIsNotGray(Cell* cell) {
    587  JS::AssertCellIsNotGray(cell);
    588 }
    589 
    590 bool Cell::isAligned() const {
    591  if (!isTenured()) {
    592    return true;
    593  }
    594  return asTenured().isAligned();
    595 }
    596 
    597 bool TenuredCell::isAligned() const {
    598  return Arena::isAligned(address(), arena()->getThingSize());
    599 }
    600 
    601 #endif
    602 
    603 // Base class for nusery-allocatable GC things that have 32-bit length and
    604 // 32-bit flags (currently JSString and BigInt).
    605 //
    606 // This tries to store both in Cell::header_, but if that isn't large enough the
    607 // length is stored separately.
    608 //
    609 //          32       0
    610 //  ------------------
    611 //  | Length | Flags |
    612 //  ------------------
    613 //
    614 // The low bits of the flags word (see CellFlagBitsReservedForGC) are reserved
    615 // for GC. Derived classes must ensure they don't use these flags for non-GC
    616 // purposes.
    617 class alignas(gc::CellAlignBytes) CellWithLengthAndFlags : public Cell {
    618 #if JS_BITS_PER_WORD == 32
    619  // Additional storage for length if |header_| is too small to fit both.
    620  uint32_t length_;
    621 #endif
    622 
    623 protected:
    624  uint32_t headerLengthField() const {
    625 #if JS_BITS_PER_WORD == 32
    626    return length_;
    627 #else
    628    return uint32_t(header_.get() >> 32);
    629 #endif
    630  }
    631  uint32_t headerLengthFieldAtomic() const {
    632 #if JS_BITS_PER_WORD == 32
    633    return length_;
    634 #else
    635    return uint32_t(header_.getAtomic() >> 32);
    636 #endif
    637  }
    638 
    639  uint32_t headerFlagsField() const { return uint32_t(header_.get()); }
    640  uint32_t headerFlagsFieldAtomic() const {
    641    return uint32_t(header_.getAtomic());
    642  }
    643 
    644  void setHeaderFlagBit(uint32_t flag) {
    645    header_.set(header_.get() | uintptr_t(flag));
    646  }
    647  void clearHeaderFlagBit(uint32_t flag) {
    648    header_.set(header_.get() & ~uintptr_t(flag));
    649  }
    650  void toggleHeaderFlagBit(uint32_t flag) {
    651    header_.set(header_.get() ^ uintptr_t(flag));
    652  }
    653 
    654  void setHeaderLengthAndFlags(uint32_t len, uint32_t flags) {
    655 #if JS_BITS_PER_WORD == 32
    656    header_.set(flags);
    657    length_ = len;
    658 #else
    659    header_.set((uint64_t(len) << 32) | uint64_t(flags));
    660 #endif
    661  }
    662 
    663 public:
    664  // Returns the offset of header_. JIT code should use offsetOfFlags
    665  // below.
    666  static constexpr size_t offsetOfRawHeaderFlagsField() {
    667    return offsetof(CellWithLengthAndFlags, header_);
    668  }
    669 
    670  // Offsets for direct field from jit code. A number of places directly
    671  // access 32-bit length and flags fields so do endian trickery here.
    672 #if JS_BITS_PER_WORD == 32
    673  static constexpr size_t offsetOfHeaderFlags() {
    674    return offsetof(CellWithLengthAndFlags, header_);
    675  }
    676  static constexpr size_t offsetOfHeaderLength() {
    677    return offsetof(CellWithLengthAndFlags, length_);
    678  }
    679 #elif MOZ_LITTLE_ENDIAN()
    680  static constexpr size_t offsetOfHeaderFlags() {
    681    return offsetof(CellWithLengthAndFlags, header_);
    682  }
    683  static constexpr size_t offsetOfHeaderLength() {
    684    return offsetof(CellWithLengthAndFlags, header_) + sizeof(uint32_t);
    685  }
    686 #else
    687  static constexpr size_t offsetOfHeaderFlags() {
    688    return offsetof(CellWithLengthAndFlags, header_) + sizeof(uint32_t);
    689  }
    690  static constexpr size_t offsetOfHeaderLength() {
    691    return offsetof(CellWithLengthAndFlags, header_);
    692  }
    693 #endif
    694 };
    695 
    696 // Base class for non-nursery-allocatable GC things that allows storing a non-GC
    697 // thing pointer in the first word.
    698 //
    699 // The low bits of the word (see CellFlagBitsReservedForGC) are reserved for GC.
    700 template <class PtrT>
    701 class alignas(gc::CellAlignBytes) TenuredCellWithNonGCPointer
    702    : public TenuredCell {
    703  static_assert(!std::is_pointer_v<PtrT>,
    704                "PtrT should be the type of the referent, not of the pointer");
    705  static_assert(
    706      !std::is_base_of_v<Cell, PtrT>,
    707      "Don't use TenuredCellWithNonGCPointer for pointers to GC things");
    708 
    709 protected:
    710  TenuredCellWithNonGCPointer() = default;
    711  explicit TenuredCellWithNonGCPointer(PtrT* initial) {
    712    uintptr_t data = uintptr_t(initial);
    713    header_.set(data);
    714  }
    715 
    716  PtrT* headerPtr() const {
    717    MOZ_ASSERT(flags() == 0);
    718    return reinterpret_cast<PtrT*>(uintptr_t(header_.get()));
    719  }
    720 
    721  void setHeaderPtr(PtrT* newValue) {
    722    // As above, no flags are expected to be set here.
    723    uintptr_t data = uintptr_t(newValue);
    724    MOZ_ASSERT(flags() == 0);
    725    header_.set(data);
    726  }
    727 
    728 public:
    729  static constexpr size_t offsetOfHeaderPtr() {
    730    return offsetof(TenuredCellWithNonGCPointer, header_);
    731  }
    732 };
    733 
    734 // Base class for non-nursery-allocatable GC things that allows storing flags
    735 // in the first word.
    736 //
    737 // The low bits of the flags word (see CellFlagBitsReservedForGC) are reserved
    738 // for GC.
    739 class alignas(gc::CellAlignBytes) TenuredCellWithFlags : public TenuredCell {
    740 protected:
    741  TenuredCellWithFlags() { header_.set(0); }
    742  explicit TenuredCellWithFlags(uintptr_t initial) { header_.set(initial); }
    743 
    744  uintptr_t headerFlagsField() const {
    745    MOZ_ASSERT(flags() == 0);
    746    return header_.get();
    747  }
    748 
    749  void setHeaderFlagBits(uintptr_t flags) {
    750    header_.set(header_.get() | flags);
    751  }
    752  void clearHeaderFlagBits(uintptr_t flags) {
    753    header_.set(header_.get() & ~flags);
    754  }
    755 };
    756 
    757 // Base class for GC things that have a tenured GC pointer as their first word.
    758 //
    759 // The low bits of the first word (see CellFlagBitsReservedForGC) are reserved
    760 // for GC.
    761 //
    762 // This includes a pre write barrier when the pointer is update. No post barrier
    763 // is necessary as the pointer is always tenured.
    764 template <class BaseCell, class PtrT>
    765 class alignas(gc::CellAlignBytes) CellWithTenuredGCPointer : public BaseCell {
    766  static void staticAsserts() {
    767    // These static asserts are not in class scope because the PtrT may not be
    768    // defined when this class template is instantiated.
    769    static_assert(
    770        std::is_same_v<BaseCell, Cell> || std::is_same_v<BaseCell, TenuredCell>,
    771        "BaseCell must be either Cell or TenuredCell");
    772    static_assert(
    773        !std::is_pointer_v<PtrT>,
    774        "PtrT should be the type of the referent, not of the pointer");
    775    static_assert(
    776        std::is_base_of_v<Cell, PtrT>,
    777        "Only use CellWithTenuredGCPointer for pointers to GC things");
    778  }
    779 
    780 protected:
    781  CellWithTenuredGCPointer() = default;
    782  explicit CellWithTenuredGCPointer(PtrT* initial) { initHeaderPtr(initial); }
    783 
    784  void initHeaderPtr(PtrT* initial) {
    785    MOZ_ASSERT_IF(initial, !IsInsideNursery(initial));
    786    uintptr_t data = uintptr_t(initial);
    787    this->header_.set(data);
    788  }
    789 
    790  void setHeaderPtr(PtrT* newValue) {
    791    // As above, no flags are expected to be set here.
    792    MOZ_ASSERT_IF(newValue, !IsInsideNursery(newValue));
    793    PreWriteBarrier(headerPtr());
    794    unbarrieredSetHeaderPtr(newValue);
    795  }
    796 
    797 public:
    798  PtrT* headerPtr() const {
    799    staticAsserts();
    800    MOZ_ASSERT(this->flags() == 0);
    801    return reinterpret_cast<PtrT*>(uintptr_t(this->header_.get()));
    802  }
    803  PtrT* headerPtrAtomic() const {
    804    staticAsserts();
    805    MOZ_ASSERT(this->flags() == 0);
    806    return reinterpret_cast<PtrT*>(uintptr_t(this->header_.getAtomic()));
    807  }
    808 
    809  void unbarrieredSetHeaderPtr(PtrT* newValue) {
    810    uintptr_t data = uintptr_t(newValue);
    811    MOZ_ASSERT(this->flags() == 0);
    812    this->header_.set(data);
    813  }
    814 
    815  static constexpr size_t offsetOfHeaderPtr() {
    816    return offsetof(CellWithTenuredGCPointer, header_);
    817  }
    818 };
    819 
    820 void CellHeaderPostWriteBarrier(JSObject** ptr, JSObject* prev, JSObject* next);
    821 
    822 template <typename T>
    823 constexpr inline bool GCTypeIsTenured() {
    824  static_assert(std::is_base_of_v<Cell, T>);
    825  static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>);
    826 
    827  return std::is_base_of_v<TenuredCell, T> || std::is_base_of_v<JSAtom, T>;
    828 }
    829 
    830 template <class PtrT>
    831 class alignas(gc::CellAlignBytes) CellWithGCPointer : public Cell {
    832  static void staticAsserts() {
    833    // These static asserts are not in class scope because the PtrT may not be
    834    // defined when this class template is instantiated.
    835    static_assert(
    836        !std::is_pointer_v<PtrT>,
    837        "PtrT should be the type of the referent, not of the pointer");
    838    static_assert(std::is_base_of_v<Cell, PtrT>,
    839                  "Only use CellWithGCPointer for pointers to GC things");
    840    static_assert(!GCTypeIsTenured<PtrT>,
    841                  "Don't use CellWithGCPointer for always-tenured GC things");
    842  }
    843 
    844 protected:
    845  CellWithGCPointer() = default;
    846  explicit CellWithGCPointer(PtrT* initial) { initHeaderPtr(initial); }
    847 
    848  void initHeaderPtr(PtrT* initial) {
    849    uintptr_t data = uintptr_t(initial);
    850    this->header_.set(data);
    851    if (initial && isTenured() && IsInsideNursery(initial)) {
    852      CellHeaderPostWriteBarrier(headerPtrAddress(), nullptr, initial);
    853    }
    854  }
    855 
    856  PtrT** headerPtrAddress() {
    857    MOZ_ASSERT(this->flags() == 0);
    858    return reinterpret_cast<PtrT**>(&this->header_);
    859  }
    860 
    861 public:
    862  PtrT* headerPtr() const {
    863    MOZ_ASSERT(this->flags() == 0);
    864    return reinterpret_cast<PtrT*>(uintptr_t(this->header_.get()));
    865  }
    866 
    867  void unbarrieredSetHeaderPtr(PtrT* newValue) {
    868    uintptr_t data = uintptr_t(newValue);
    869    MOZ_ASSERT(this->flags() == 0);
    870    this->header_.set(data);
    871  }
    872 
    873  static constexpr size_t offsetOfHeaderPtr() {
    874    return offsetof(CellWithGCPointer, header_);
    875  }
    876 };
    877 
    878 // Check whether a typed GC thing is marked at all. Doesn't check gray bits for
    879 // kinds that can't be marked gray.
    880 template <typename T>
    881 static inline bool TenuredThingIsMarkedAny(T* thing) {
    882  using BaseT = typename BaseGCType<T>::type;
    883  TenuredCell* cell = &thing->asTenured();
    884  if constexpr (TraceKindCanBeGray<BaseT>::value) {
    885    return cell->isMarkedAny();
    886  } else {
    887    MOZ_ASSERT(!cell->isMarkedGray());
    888    return cell->isMarkedBlack();
    889  }
    890 }
    891 
    892 template <>
    893 inline bool TenuredThingIsMarkedAny<Cell>(Cell* thing) {
    894  return thing->asTenured().isMarkedAny();
    895 }
    896 
    897 } /* namespace gc */
    898 } /* namespace js */
    899 
    900 #endif /* gc_Cell_h */