tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

StoreBuffer.h (21408B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #ifndef gc_StoreBuffer_h
      8 #define gc_StoreBuffer_h
      9 
     10 #include "mozilla/Assertions.h"
     11 #include "mozilla/Attributes.h"
     12 #include "mozilla/HashFunctions.h"
     13 #include "mozilla/ReentrancyGuard.h"
     14 
     15 #include <algorithm>
     16 
     17 #include "ds/BitArray.h"
     18 #include "ds/LifoAlloc.h"
     19 #include "gc/Cell.h"
     20 #include "gc/Nursery.h"
     21 #include "js/AllocPolicy.h"
     22 #include "js/UniquePtr.h"
     23 #include "wasm/WasmAnyRef.h"
     24 
     25 namespace JS {
     26 struct GCSizes;
     27 }  // namespace JS
     28 
     29 namespace js {
     30 
     31 class NativeObject;
     32 
     33 namespace gc {
     34 
     35 class Arena;
     36 class ArenaCellSet;
     37 
     38 /*
     39 * BufferableRef represents an abstract reference for use in the generational
     40 * GC's remembered set. Entries in the store buffer that cannot be represented
     41 * with the simple pointer-to-a-pointer scheme must derive from this class and
     42 * use the generic store buffer interface.
     43 *
     44 * A single BufferableRef entry in the generic buffer can represent many entries
     45 * in the remembered set.  For example js::OrderedHashTableRef represents all
     46 * the incoming edges corresponding to keys in an ordered hash table.
     47 */
     48 class BufferableRef {
     49 public:
     50  virtual void trace(JSTracer* trc) = 0;
     51  bool maybeInRememberedSet(const Nursery&) const { return true; }
     52 };
     53 
     54 using EdgeSet = HashSet<void*, PointerHasher<void*>, SystemAllocPolicy>;
     55 
     56 /* The size of a single block of store buffer storage space. */
     57 static const size_t LifoAllocBlockSize = 8 * 1024;
     58 
     59 /*
     60 * The StoreBuffer observes all writes that occur in the system and performs
     61 * efficient filtering of them to derive a remembered set for nursery GC.
     62 */
     63 class StoreBuffer {
     64  friend class mozilla::ReentrancyGuard;
     65 
     66  enum class PutResult { OK, AboutToOverflow };
     67 
     68  /*
     69   * This buffer holds only a single type of edge. Using this buffer is more
     70   * efficient than the generic buffer when many writes will be to the same
     71   * type of edge: e.g. Value or Cell*.
     72   */
     73  template <typename T>
     74  struct MonoTypeBuffer {
     75    /* The canonical set of stores. */
     76    using StoreSet = HashSet<T, typename T::Hasher, SystemAllocPolicy>;
     77    StoreSet stores_;
     78    size_t maxEntries_ = 0;
     79 
     80    /*
     81     * A one element cache in front of the canonical set to speed up
     82     * temporary instances of HeapPtr.
     83     */
     84    T last_ = T();
     85 
     86    MonoTypeBuffer() = default;
     87 
     88    MonoTypeBuffer(const MonoTypeBuffer& other) = delete;
     89    MonoTypeBuffer& operator=(const MonoTypeBuffer& other) = delete;
     90 
     91    inline MonoTypeBuffer(MonoTypeBuffer&& other);
     92    inline MonoTypeBuffer& operator=(MonoTypeBuffer&& other);
     93 
     94    void setSize(size_t entryCount);
     95 
     96    bool isEmpty() const;
     97    void clear();
     98 
     99    /* Add one item to the buffer. */
    100    PutResult put(const T& t) {
    101      PutResult r = sinkStore();
    102      last_ = t;
    103      return r;
    104    }
    105 
    106    /* Remove an item from the store buffer. */
    107    void unput(const T& v) {
    108      // Fast, hashless remove of last put.
    109      if (last_ == v) {
    110        last_ = T();
    111        return;
    112      }
    113      stores_.remove(v);
    114    }
    115 
    116    /* Move any buffered stores to the canonical store set. */
    117    PutResult sinkStore() {
    118      if (last_) {
    119        AutoEnterOOMUnsafeRegion oomUnsafe;
    120        if (!stores_.put(last_)) {
    121          oomUnsafe.crash("Failed to allocate for MonoTypeBuffer::put.");
    122        }
    123      }
    124      last_ = T();
    125 
    126      if (stores_.count() >= maxEntries_) {
    127        return PutResult::AboutToOverflow;
    128      }
    129 
    130      return PutResult::OK;
    131    }
    132 
    133    /* Trace the source of all edges in the store buffer. */
    134    void trace(TenuringTracer& mover, StoreBuffer* owner);
    135 
    136    size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf);
    137  };
    138 
    139  struct WholeCellBuffer {
    140    UniquePtr<LifoAlloc> storage_;
    141    size_t maxSize_ = 0;
    142    ArenaCellSet* sweepHead_ = nullptr;
    143    const Cell* last_ = nullptr;
    144 
    145    WholeCellBuffer() = default;
    146 
    147    WholeCellBuffer(const WholeCellBuffer& other) = delete;
    148    WholeCellBuffer& operator=(const WholeCellBuffer& other) = delete;
    149 
    150    inline WholeCellBuffer(WholeCellBuffer&& other);
    151    inline WholeCellBuffer& operator=(WholeCellBuffer&& other);
    152 
    153    [[nodiscard]] bool init();
    154    void setSize(size_t entryCount);
    155 
    156    bool isEmpty() const;
    157    void clear();
    158 
    159    bool isAboutToOverflow() const {
    160      return !storage_->isEmpty() && storage_->used() >= maxSize_;
    161    }
    162 
    163    void trace(TenuringTracer& mover, StoreBuffer* owner);
    164 
    165    inline void put(const Cell* cell);
    166    inline void putDontCheckLast(const Cell* cell);
    167 
    168    size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf);
    169 
    170    const Cell** lastBufferedPtr() { return &last_; }
    171 
    172   private:
    173    ArenaCellSet* allocateCellSet(Arena* arena);
    174  };
    175 
    176  struct GenericBuffer {
    177    UniquePtr<LifoAlloc> storage_;
    178    size_t maxSize_ = 0;
    179 
    180    GenericBuffer() = default;
    181 
    182    GenericBuffer(const GenericBuffer& other) = delete;
    183    GenericBuffer& operator=(const GenericBuffer& other) = delete;
    184 
    185    inline GenericBuffer(GenericBuffer&& other);
    186    inline GenericBuffer& operator=(GenericBuffer&& other);
    187 
    188    [[nodiscard]] bool init();
    189    void setSize(size_t entryCount);
    190 
    191    bool isEmpty() const;
    192    void clear();
    193 
    194    bool isAboutToOverflow() const {
    195      return !storage_->isEmpty() && storage_->used() >= maxSize_;
    196    }
    197 
    198    /* Trace all generic edges. */
    199    void trace(JSTracer* trc, StoreBuffer* owner);
    200 
    201    template <typename T>
    202    PutResult put(const T& t) {
    203      static_assert(std::is_base_of_v<BufferableRef, T>);
    204      MOZ_ASSERT(storage_);
    205 
    206      AutoEnterOOMUnsafeRegion oomUnsafe;
    207      unsigned size = sizeof(T);
    208      unsigned* sizep = storage_->pod_malloc<unsigned>();
    209      if (!sizep) {
    210        oomUnsafe.crash("Failed to allocate for GenericBuffer::put.");
    211      }
    212      *sizep = size;
    213 
    214      T* tp = storage_->new_<T>(t);
    215      if (!tp) {
    216        oomUnsafe.crash("Failed to allocate for GenericBuffer::put.");
    217      }
    218 
    219      if (isAboutToOverflow()) {
    220        return PutResult::AboutToOverflow;
    221      }
    222 
    223      return PutResult::OK;
    224    }
    225 
    226    size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf);
    227  };
    228 
    229  template <typename Edge>
    230  struct PointerEdgeHasher {
    231    using Lookup = Edge;
    232    static HashNumber hash(const Lookup& l) {
    233      return mozilla::HashGeneric(l.edge);
    234    }
    235    static bool match(const Edge& k, const Lookup& l) { return k == l; }
    236  };
    237 
    238  template <typename T>
    239  struct CellPtrEdge {
    240    T** edge = nullptr;
    241 
    242    CellPtrEdge() = default;
    243    explicit CellPtrEdge(T** v) : edge(v) {}
    244    bool operator==(const CellPtrEdge& other) const {
    245      return edge == other.edge;
    246    }
    247    bool operator!=(const CellPtrEdge& other) const {
    248      return edge != other.edge;
    249    }
    250 
    251    bool maybeInRememberedSet(const Nursery& nursery) const {
    252      MOZ_ASSERT(IsInsideNursery(*edge));
    253      return !nursery.isInside(edge);
    254    }
    255 
    256    void trace(TenuringTracer& mover) const;
    257 
    258    explicit operator bool() const { return edge != nullptr; }
    259 
    260    using Hasher = PointerEdgeHasher<CellPtrEdge<T>>;
    261  };
    262 
    263  using ObjectPtrEdge = CellPtrEdge<JSObject>;
    264  using StringPtrEdge = CellPtrEdge<JSString>;
    265  using BigIntPtrEdge = CellPtrEdge<JS::BigInt>;
    266  using GetterSetterPtrEdge = CellPtrEdge<js::GetterSetter>;
    267 
    268  struct ValueEdge {
    269    JS::Value* edge;
    270 
    271    ValueEdge() : edge(nullptr) {}
    272    explicit ValueEdge(JS::Value* v) : edge(v) {}
    273    bool operator==(const ValueEdge& other) const { return edge == other.edge; }
    274    bool operator!=(const ValueEdge& other) const { return edge != other.edge; }
    275 
    276    bool isGCThing() const { return edge->isGCThing(); }
    277 
    278    Cell* deref() const {
    279      return isGCThing() ? static_cast<Cell*>(edge->toGCThing()) : nullptr;
    280    }
    281 
    282    bool maybeInRememberedSet(const Nursery& nursery) const {
    283      MOZ_ASSERT(IsInsideNursery(deref()));
    284      return !nursery.isInside(edge);
    285    }
    286 
    287    void trace(TenuringTracer& mover) const;
    288 
    289    explicit operator bool() const { return edge != nullptr; }
    290 
    291    using Hasher = PointerEdgeHasher<ValueEdge>;
    292  };
    293 
    294  struct SlotsEdge {
    295    // These definitions must match those in HeapSlot::Kind.
    296    const static int SlotKind = 0;
    297    const static int ElementKind = 1;
    298 
    299    uintptr_t objectAndKind_;  // NativeObject* | Kind
    300    uint32_t start_;
    301    uint32_t count_;
    302 
    303    SlotsEdge() : objectAndKind_(0), start_(0), count_(0) {}
    304    SlotsEdge(NativeObject* object, int kind, uint32_t start, uint32_t count)
    305        : objectAndKind_(uintptr_t(object) | kind),
    306          start_(start),
    307          count_(count) {
    308      MOZ_ASSERT((uintptr_t(object) & 1) == 0);
    309      MOZ_ASSERT(kind <= 1);
    310      MOZ_ASSERT(count > 0);
    311      MOZ_ASSERT(start + count > start);
    312    }
    313 
    314    NativeObject* object() const {
    315      return reinterpret_cast<NativeObject*>(objectAndKind_ & ~1);
    316    }
    317    int kind() const { return (int)(objectAndKind_ & 1); }
    318 
    319    bool operator==(const SlotsEdge& other) const {
    320      return objectAndKind_ == other.objectAndKind_ && start_ == other.start_ &&
    321             count_ == other.count_;
    322    }
    323    bool operator!=(const SlotsEdge& other) const { return !(*this == other); }
    324 
    325    // True if this SlotsEdge range is adjacent to or overlaps with the other
    326    // SlotsEdge range. The adjacency case will coalesce a series of increasing
    327    // or decreasing single index writes 0, 1, 2, ..., N into a SlotsEdge range
    328    // of elements [0, N].
    329    bool touches(const SlotsEdge& other) const {
    330      if (objectAndKind_ != other.objectAndKind_) {
    331        return false;
    332      }
    333 
    334      if (other.start_ < start_) {
    335        // If the other range starts before this one, then it touches if its
    336        // (exclusive) end is at or after this range.
    337        return other.start_ + other.count_ >= start_;
    338      }
    339 
    340      // Otherwise, the other range touches if it starts before or at
    341      // the exclusive end of this range.
    342      return other.start_ <= start_ + count_;
    343    }
    344 
    345    // Destructively make this SlotsEdge range the union of the other
    346    // SlotsEdge range and this one. A precondition is that the ranges must
    347    // overlap.
    348    void merge(const SlotsEdge& other) {
    349      MOZ_ASSERT(touches(other));
    350      uint32_t end = std::max(start_ + count_, other.start_ + other.count_);
    351      start_ = std::min(start_, other.start_);
    352      count_ = end - start_;
    353    }
    354 
    355    bool maybeInRememberedSet(const Nursery& n) const {
    356      return !IsInsideNursery(reinterpret_cast<Cell*>(object()));
    357    }
    358 
    359    void trace(TenuringTracer& mover) const;
    360 
    361    explicit operator bool() const { return objectAndKind_ != 0; }
    362 
    363    struct Hasher {
    364      using Lookup = SlotsEdge;
    365      static HashNumber hash(const Lookup& l) {
    366        return mozilla::HashGeneric(l.objectAndKind_, l.start_, l.count_);
    367      }
    368      static bool match(const SlotsEdge& k, const Lookup& l) { return k == l; }
    369    };
    370  };
    371 
    372  struct WasmAnyRefEdge {
    373    wasm::AnyRef* edge;
    374 
    375    WasmAnyRefEdge() : edge(nullptr) {}
    376    explicit WasmAnyRefEdge(wasm::AnyRef* v) : edge(v) {}
    377    bool operator==(const WasmAnyRefEdge& other) const {
    378      return edge == other.edge;
    379    }
    380    bool operator!=(const WasmAnyRefEdge& other) const {
    381      return edge != other.edge;
    382    }
    383 
    384    bool isGCThing() const { return edge->isGCThing(); }
    385 
    386    Cell* deref() const {
    387      return isGCThing() ? static_cast<Cell*>(edge->toGCThing()) : nullptr;
    388    }
    389 
    390    bool maybeInRememberedSet(const Nursery& nursery) const {
    391      MOZ_ASSERT(IsInsideNursery(deref()));
    392      return !nursery.isInside(edge);
    393    }
    394 
    395    void trace(TenuringTracer& mover) const;
    396 
    397    explicit operator bool() const { return edge != nullptr; }
    398 
    399    using Hasher = PointerEdgeHasher<WasmAnyRefEdge>;
    400  };
    401 
    402 #ifdef DEBUG
    403  void checkAccess() const;
    404 #else
    405  void checkAccess() const {}
    406 #endif
    407 
    408  template <typename Buffer, typename Edge>
    409  void unputEdge(Buffer& buffer, const Edge& edge) {
    410    checkAccess();
    411    if (!isEnabled()) {
    412      return;
    413    }
    414 
    415    mozilla::ReentrancyGuard g(*this);
    416 
    417    buffer.unput(edge);
    418  }
    419 
    420  template <typename Buffer, typename Edge>
    421  void putEdge(Buffer& buffer, const Edge& edge, JS::GCReason overflowReason) {
    422    checkAccess();
    423    if (!isEnabled()) {
    424      return;
    425    }
    426 
    427    mozilla::ReentrancyGuard g(*this);
    428 
    429    if (!edge.maybeInRememberedSet(nursery_)) {
    430      return;
    431    }
    432 
    433    PutResult r = buffer.put(edge);
    434 
    435    if (MOZ_UNLIKELY(r == PutResult::AboutToOverflow)) {
    436      setAboutToOverflow(overflowReason);
    437    }
    438  }
    439 
    440  template <typename Buffer, typename Edge>
    441  void putEdgeFromTenured(Buffer& buffer, const Edge& edge,
    442                          JS::GCReason overflowReason) {
    443    MOZ_ASSERT(edge.maybeInRememberedSet(nursery_));
    444    checkAccess();
    445 
    446    if (!isEnabled()) {
    447      return;
    448    }
    449 
    450    mozilla::ReentrancyGuard g(*this);
    451    PutResult r = buffer.put(edge);
    452    if (MOZ_UNLIKELY(r == PutResult::AboutToOverflow)) {
    453      setAboutToOverflow(overflowReason);
    454    }
    455  }
    456 
    457  MonoTypeBuffer<ValueEdge> bufferVal;
    458  MonoTypeBuffer<StringPtrEdge> bufStrCell;
    459  MonoTypeBuffer<BigIntPtrEdge> bufBigIntCell;
    460  MonoTypeBuffer<GetterSetterPtrEdge> bufGetterSetterCell;
    461  MonoTypeBuffer<ObjectPtrEdge> bufObjCell;
    462  MonoTypeBuffer<SlotsEdge> bufferSlot;
    463  MonoTypeBuffer<WasmAnyRefEdge> bufferWasmAnyRef;
    464  WholeCellBuffer bufferWholeCell;
    465  GenericBuffer bufferGeneric;
    466 
    467  JSRuntime* runtime_;
    468  Nursery& nursery_;
    469  size_t entryCount_;
    470  double entryScaling_;
    471 
    472  bool aboutToOverflow_;
    473  bool enabled_;
    474  bool mayHavePointersToDeadCells_;
    475 #ifdef DEBUG
    476  bool mEntered; /* For ReentrancyGuard. */
    477 #endif
    478 
    479 public:
    480  explicit StoreBuffer(JSRuntime* rt);
    481 
    482  StoreBuffer(const StoreBuffer& other) = delete;
    483  StoreBuffer& operator=(const StoreBuffer& other) = delete;
    484 
    485  StoreBuffer(StoreBuffer&& other);
    486  StoreBuffer& operator=(StoreBuffer&& other);
    487 
    488  [[nodiscard]] bool enable();
    489  void disable();
    490  bool isEnabled() const { return enabled_; }
    491 
    492  void updateSize();
    493 
    494  bool isEmpty() const;
    495  void clear();
    496 
    497  const Nursery& nursery() const { return nursery_; }
    498 
    499  /* Get the overflowed status. */
    500  bool isAboutToOverflow() const { return aboutToOverflow_; }
    501 
    502  /*
    503   * Brain transplants may add whole cell buffer entires for dead cells. We must
    504   * evict the nursery prior to sweeping arenas if any such entries are present.
    505   */
    506  bool mayHavePointersToDeadCells() const {
    507    return mayHavePointersToDeadCells_;
    508  }
    509 
    510  /* Insert a single edge into the buffer/remembered set. */
    511  void putValue(JS::Value* vp) {
    512    putEdge(bufferVal, ValueEdge(vp), JS::GCReason::FULL_VALUE_BUFFER);
    513  }
    514  void unputValue(JS::Value* vp) { unputEdge(bufferVal, ValueEdge(vp)); }
    515 
    516  void putCell(JSString** strp) {
    517    putEdge(bufStrCell, StringPtrEdge(strp),
    518            JS::GCReason::FULL_CELL_PTR_STR_BUFFER);
    519  }
    520  void unputCell(JSString** strp) {
    521    unputEdge(bufStrCell, StringPtrEdge(strp));
    522  }
    523 
    524  void putCell(JS::BigInt** bip) {
    525    putEdge(bufBigIntCell, BigIntPtrEdge(bip),
    526            JS::GCReason::FULL_CELL_PTR_BIGINT_BUFFER);
    527  }
    528  void unputCell(JS::BigInt** bip) {
    529    unputEdge(bufBigIntCell, BigIntPtrEdge(bip));
    530  }
    531 
    532  void putCell(JSObject** strp) {
    533    putEdge(bufObjCell, ObjectPtrEdge(strp),
    534            JS::GCReason::FULL_CELL_PTR_OBJ_BUFFER);
    535  }
    536  void unputCell(JSObject** strp) {
    537    unputEdge(bufObjCell, ObjectPtrEdge(strp));
    538  }
    539 
    540  void putCell(js::GetterSetter** gsp) {
    541    putEdge(bufGetterSetterCell, GetterSetterPtrEdge(gsp),
    542            JS::GCReason::FULL_CELL_PTR_GETTER_SETTER_BUFFER);
    543  }
    544  void unputCell(js::GetterSetter** gsp) {
    545    unputEdge(bufGetterSetterCell, GetterSetterPtrEdge(gsp));
    546  }
    547 
    548  void putSlot(NativeObject* obj, int kind, uint32_t start, uint32_t count) {
    549    SlotsEdge edge(obj, kind, start, count);
    550    if (bufferSlot.last_.touches(edge)) {
    551      bufferSlot.last_.merge(edge);
    552    } else {
    553      putEdge(bufferSlot, edge, JS::GCReason::FULL_SLOT_BUFFER);
    554    }
    555  }
    556 
    557  void putWasmAnyRef(wasm::AnyRef* vp) {
    558    putEdge(bufferWasmAnyRef, WasmAnyRefEdge(vp),
    559            JS::GCReason::FULL_WASM_ANYREF_BUFFER);
    560  }
    561  void putWasmAnyRefEdgeFromTenured(wasm::AnyRef* vp) {
    562    putEdgeFromTenured(bufferWasmAnyRef, WasmAnyRefEdge(vp),
    563                       JS::GCReason::FULL_WASM_ANYREF_BUFFER);
    564  }
    565  void unputWasmAnyRef(wasm::AnyRef* vp) {
    566    unputEdge(bufferWasmAnyRef, WasmAnyRefEdge(vp));
    567  }
    568 
    569  static inline bool isInWholeCellBuffer(Cell* cell);
    570  inline void putWholeCell(Cell* cell);
    571  inline void putWholeCellDontCheckLast(Cell* cell);
    572  const void* addressOfLastBufferedWholeCell() {
    573    return bufferWholeCell.lastBufferedPtr();
    574  }
    575 
    576  /* Insert an entry into the generic buffer. */
    577  template <typename T>
    578  void putGeneric(const T& t) {
    579    putEdge(bufferGeneric, t, JS::GCReason::FULL_GENERIC_BUFFER);
    580  }
    581 
    582  void setMayHavePointersToDeadCells() { mayHavePointersToDeadCells_ = true; }
    583 
    584  /* Methods to trace the source of all edges in the store buffer. */
    585  void traceValues(TenuringTracer& mover);
    586  void traceCells(TenuringTracer& mover);
    587  void traceSlots(TenuringTracer& mover);
    588  void traceWasmAnyRefs(TenuringTracer& mover);
    589  void traceWholeCells(TenuringTracer& mover);
    590  void traceGenericEntries(JSTracer* trc);
    591 
    592  /* For use by our owned buffers and for testing. */
    593  void setAboutToOverflow(JS::GCReason);
    594 
    595  void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf,
    596                              JS::GCSizes* sizes);
    597 
    598  void checkEmpty() const;
    599 };
    600 
    601 // A set of cells in an arena used to implement the whole cell store buffer.
    602 // Also used to store a set of cells that need to be swept.
    603 class ArenaCellSet {
    604  friend class StoreBuffer;
    605 
    606  using ArenaCellBits = BitArray<MaxArenaCellIndex>;
    607 
    608  // The arena this relates to.
    609  Arena* arena = nullptr;
    610 
    611  // Pointer to next set forming a linked list. Used to form the list of cell
    612  // sets to sweep.
    613  ArenaCellSet* next = nullptr;
    614 
    615  // Bit vector for each possible cell start position.
    616  ArenaCellBits bits;
    617 
    618 #ifdef DEBUG
    619  // The minor GC number when this was created. This object should not survive
    620  // past the next minor collection.
    621  const uint64_t minorGCNumberAtCreation = 0;
    622 #endif
    623 
    624  // Construct the empty sentinel object.
    625  constexpr ArenaCellSet() = default;
    626 
    627 public:
    628  using WordT = ArenaCellBits::WordT;
    629  static constexpr size_t BitsPerWord = ArenaCellBits::bitsPerElement;
    630  static constexpr size_t NumWords = ArenaCellBits::numSlots;
    631 
    632  explicit ArenaCellSet(Arena* arena);
    633 
    634  bool hasCell(const TenuredCell* cell) const {
    635    return hasCell(getCellIndex(cell));
    636  }
    637 
    638  void putCell(const TenuredCell* cell) { putCell(getCellIndex(cell)); }
    639 
    640  bool isEmpty() const { return this == &Empty; }
    641 
    642  bool hasCell(size_t cellIndex) const;
    643 
    644  void putCell(size_t cellIndex);
    645 
    646  void check() const;
    647 
    648  WordT getWord(size_t wordIndex) const { return bits.getWord(wordIndex); }
    649 
    650  void setWord(size_t wordIndex, WordT value) {
    651    bits.setWord(wordIndex, value);
    652  }
    653 
    654  // Sweep this set, returning whether it also needs to be swept later.
    655  bool trace(TenuringTracer& mover);
    656 
    657  // Sentinel object used for all empty sets.
    658  //
    659  // We use a sentinel because it simplifies the JIT code slightly as we can
    660  // assume all arenas have a cell set.
    661  static ArenaCellSet Empty;
    662 
    663  static size_t getCellIndex(const TenuredCell* cell);
    664  static void getWordIndexAndMask(size_t cellIndex, size_t* wordp,
    665                                  uint32_t* maskp);
    666 
    667  // Attempt to trigger a minor GC if free space in the nursery (where these
    668  // objects are allocated) falls below this threshold.
    669  static const size_t NurseryFreeThresholdBytes = 64 * 1024;
    670 
    671  static size_t offsetOfArena() { return offsetof(ArenaCellSet, arena); }
    672  static size_t offsetOfBits() { return offsetof(ArenaCellSet, bits); }
    673 };
    674 
    675 // Post-write barrier implementation for GC cells.
    676 
    677 // Implement the post-write barrier for nursery allocateable cell type |T|. Call
    678 // this from |T::postWriteBarrier|.
    679 template <typename T>
    680 MOZ_ALWAYS_INLINE void PostWriteBarrierImpl(void* cellp, T* prev, T* next) {
    681  MOZ_ASSERT(cellp);
    682 
    683  // If the target needs an entry, add it.
    684  StoreBuffer* buffer;
    685  if (next && (buffer = next->storeBuffer())) {
    686    // If we know that the prev has already inserted an entry, we can skip
    687    // doing the lookup to add the new entry. Note that we cannot safely
    688    // assert the presence of the entry because it may have been added
    689    // via a different store buffer.
    690    if (prev && prev->storeBuffer()) {
    691      return;
    692    }
    693    buffer->putCell(static_cast<T**>(cellp));
    694    return;
    695  }
    696 
    697  // Remove the prev entry if the new value does not need it. There will only
    698  // be a prev entry if the prev value was in the nursery.
    699  if (prev && (buffer = prev->storeBuffer())) {
    700    buffer->unputCell(static_cast<T**>(cellp));
    701  }
    702 }
    703 
    704 template <typename T>
    705 MOZ_ALWAYS_INLINE void PostWriteBarrier(T** vp, T* prev, T* next) {
    706  static_assert(std::is_base_of_v<Cell, T>);
    707  static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>);
    708 
    709  if constexpr (!GCTypeIsTenured<T>()) {
    710    using BaseT = typename BaseGCType<T>::type;
    711    PostWriteBarrierImpl<BaseT>(vp, prev, next);
    712    return;
    713  }
    714 
    715  MOZ_ASSERT_IF(next, !IsInsideNursery(next));
    716 }
    717 
    718 // Used when we don't have a specific edge to put in the store buffer.
    719 void PostWriteBarrierCell(Cell* cell, Cell* prev, Cell* next);
    720 
    721 }  // namespace gc
    722 }  // namespace js
    723 
    724 #endif /* gc_StoreBuffer_h */