tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Barrier.h (41178B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #ifndef gc_Barrier_h
      8 #define gc_Barrier_h
      9 
     10 #include <type_traits>  // std::true_type
     11 
     12 #include "NamespaceImports.h"
     13 
     14 #include "gc/Cell.h"
     15 #include "gc/GCContext.h"
     16 #include "gc/StoreBuffer.h"
     17 #include "js/ComparisonOperators.h"     // JS::detail::DefineComparisonOps
     18 #include "js/experimental/TypedData.h"  // js::EnableIfABOVType
     19 #include "js/HeapAPI.h"
     20 #include "js/Id.h"
     21 #include "js/RootingAPI.h"
     22 #include "js/Value.h"
     23 #include "util/Poison.h"
     24 
     25 /*
     26 * [SMDOC] GC Barriers
     27 *
     28 * Several kinds of barrier are necessary to allow the GC to function correctly.
     29 * These are triggered by reading or writing to GC pointers in the heap and
     30 * serve to tell the collector about changes to the graph of reachable GC
     31 * things.
     32 *
     33 * Since it would be awkward to change every write to memory into a function
     34 * call, this file contains a bunch of C++ classes and templates that use
     35 * operator overloading to take care of barriers automatically. In most cases,
     36 * all that's necessary is to replace:
     37 *
     38 *     Type* field;
     39 *
     40 * with:
     41 *
     42 *     HeapPtr<Type> field;
     43 *
     44 * All heap-based GC pointers and tagged pointers must use one of these classes,
     45 * except in a couple of exceptional cases.
     46 *
     47 * These classes are designed to be used by the internals of the JS engine.
     48 * Barriers designed to be used externally are provided in js/RootingAPI.h.
     49 *
     50 * Overview
     51 * ========
     52 *
     53 * This file implements the following concrete classes:
     54 *
     55 * HeapPtr       General wrapper for heap-based pointers that provides pre- and
     56 *               post-write barriers. Most clients should use this.
     57 *
     58 * GCPtr         An optimisation of HeapPtr for objects which are only destroyed
     59 *               by GC finalization (this rules out use in Vector, for example).
     60 *
     61 * PreBarriered  Provides a pre-barrier but not a post-barrier. Necessary when
     62 *               generational GC updates are handled manually, e.g. for hash
     63 *               table keys that don't use StableCellHasher.
     64 *
     65 * HeapSlot      Provides pre and post-barriers, optimised for use in JSObject
     66 *               slots and elements.
     67 *
     68 * WeakHeapPtr   Provides read and post-write barriers, for use with weak
     69 *               pointers.
     70 *
     71 * UnsafeBarePtr Provides no barriers. Don't add new uses of this, or only if
     72 *               you really know what you are doing.
     73 *
     74 * The following classes are implemented in js/RootingAPI.h (in the JS
     75 * namespace):
     76 *
     77 * Heap          General wrapper for external clients. Like HeapPtr but also
     78 *               handles cycle collector concerns. Most external clients should
     79 *               use this.
     80 *
     81 * Heap::Tenured   Like Heap but doesn't allow nursery pointers. Allows storing
     82 *               flags in unused lower bits of the pointer.
     83 *
     84 * Which class to use?
     85 * -------------------
     86 *
     87 * Answer the following questions to decide which barrier class is right for
     88 * your use case:
     89 *
     90 * Is your code part of the JS engine?
     91 *   Yes, it's internal =>
     92 *     Is your pointer weak or strong?
     93 *       Strong =>
     94 *         Do you want automatic handling of nursery pointers?
     95 *           Yes, of course =>
     96 *             Can your object be destroyed outside of a GC?
     97 *               Yes => Use HeapPtr<T>
     98 *               No => Use GCPtr<T> (optimization)
     99 *           No, I'll do this myself =>
    100 *             Do you want pre-barriers so incremental marking works?
    101 *               Yes, of course => Use PreBarriered<T>
    102 *               No, and I'll fix all the bugs myself => Use UnsafeBarePtr<T>
    103 *       Weak => Use WeakHeapPtr<T>
    104 *   No, it's external =>
    105 *     Can your pointer refer to nursery objects?
    106 *       Yes => Use JS::Heap<T>
    107 *       Never => Use JS::Heap::Tenured<T> (optimization)
    108 *
    109 * If in doubt, use HeapPtr<T>.
    110 *
    111 * Write barriers
    112 * ==============
    113 *
    114 * A write barrier is a mechanism used by incremental or generational GCs to
    115 * ensure that every value that needs to be marked is marked. In general, the
    116 * write barrier should be invoked whenever a write can cause the set of things
    117 * traced through by the GC to change. This includes:
    118 *
    119 *   - writes to object properties
    120 *   - writes to array slots
    121 *   - writes to fields like JSObject::shape_ that we trace through
    122 *   - writes to fields in private data
    123 *   - writes to non-markable fields like JSObject::private that point to
    124 *     markable data
    125 *
    126 * The last category is the trickiest. Even though the private pointer does not
    127 * point to a GC thing, changing the private pointer may change the set of
    128 * objects that are traced by the GC. Therefore it needs a write barrier.
    129 *
    130 * Every barriered write should have the following form:
    131 *
    132 *   <pre-barrier>
    133 *   obj->field = value; // do the actual write
    134 *   <post-barrier>
    135 *
    136 * The pre-barrier is used for incremental GC and the post-barrier is for
    137 * generational GC.
    138 *
    139 * Pre-write barrier
    140 * -----------------
    141 *
    142 * To understand the pre-barrier, let's consider how incremental GC works. The
    143 * GC itself is divided into "slices". Between each slice, JS code is allowed to
    144 * run. Each slice should be short so that the user doesn't notice the
    145 * interruptions. In our GC, the structure of the slices is as follows:
    146 *
    147 * 1. ... JS work, which leads to a request to do GC ...
    148 * 2. [first GC slice, which performs all root marking and (maybe) more marking]
    149 * 3. ... more JS work is allowed to run ...
    150 * 4. [GC mark slice, which runs entirely in
    151 *    GCRuntime::markUntilBudgetExhausted]
    152 * 5. ... more JS work ...
    153 * 6. [GC mark slice, which runs entirely in
    154 *    GCRuntime::markUntilBudgetExhausted]
    155 * 7. ... more JS work ...
    156 * 8. [GC marking finishes; sweeping done non-incrementally; GC is done]
    157 * 9. ... JS continues uninterrupted now that GC is finishes ...
    158 *
    159 * Of course, there may be a different number of slices depending on how much
    160 * marking is to be done.
    161 *
    162 * The danger inherent in this scheme is that the JS code in steps 3, 5, and 7
    163 * might change the heap in a way that causes the GC to collect an object that
    164 * is actually reachable. The write barrier prevents this from happening. We use
    165 * a variant of incremental GC called "snapshot at the beginning." This approach
    166 * guarantees the invariant that if an object is reachable in step 2, then we
    167 * will mark it eventually. The name comes from the idea that we take a
    168 * theoretical "snapshot" of all reachable objects in step 2; all objects in
    169 * that snapshot should eventually be marked. (Note that the write barrier
    170 * verifier code takes an actual snapshot.)
    171 *
    172 * The basic correctness invariant of a snapshot-at-the-beginning collector is
    173 * that any object reachable at the end of the GC (step 9) must either:
    174 *   (1) have been reachable at the beginning (step 2) and thus in the snapshot
    175 *   (2) or must have been newly allocated, in steps 3, 5, or 7.
    176 * To deal with case (2), any objects allocated during an incremental GC are
    177 * automatically marked black.
    178 *
    179 * This strategy is actually somewhat conservative: if an object becomes
    180 * unreachable between steps 2 and 8, it would be safe to collect it. We won't,
    181 * mainly for simplicity. (Also, note that the snapshot is entirely
    182 * theoretical. We don't actually do anything special in step 2 that we wouldn't
    183 * do in a non-incremental GC.
    184 *
    185 * It's the pre-barrier's job to maintain the snapshot invariant. Consider the
    186 * write "obj->field = value". Let the prior value of obj->field be
    187 * value0. Since it's possible that value0 may have been what obj->field
    188 * contained in step 2, when the snapshot was taken, the barrier marks
    189 * value0. Note that it only does this if we're in the middle of an incremental
    190 * GC. Since this is rare, the cost of the write barrier is usually just an
    191 * extra branch.
    192 *
    193 * In practice, we implement the pre-barrier differently based on the type of
    194 * value0. E.g., see JSObject::preWriteBarrier, which is used if obj->field is
    195 * a JSObject*. It takes value0 as a parameter.
    196 *
    197 * Post-write barrier
    198 * ------------------
    199 *
    200 * For generational GC, we want to be able to quickly collect the nursery in a
    201 * minor collection.  Part of the way this is achieved is to only mark the
    202 * nursery itself; tenured things, which may form the majority of the heap, are
    203 * not traced through or marked.  This leads to the problem of what to do about
    204 * tenured objects that have pointers into the nursery: if such things are not
    205 * marked, they may be discarded while there are still live objects which
    206 * reference them. The solution is to maintain information about these pointers,
    207 * and mark their targets when we start a minor collection.
    208 *
    209 * The pointers can be thought of as edges in an object graph, and the set of
    210 * edges from the tenured generation into the nursery is known as the remembered
    211 * set. Post barriers are used to track this remembered set.
    212 *
    213 * Whenever a slot which could contain such a pointer is written, we check
    214 * whether the pointed-to thing is in the nursery (if storeBuffer() returns a
    215 * buffer).  If so we add the cell into the store buffer, which is the
    216 * collector's representation of the remembered set.  This means that when we
    217 * come to do a minor collection we can examine the contents of the store buffer
    218 * and mark any edge targets that are in the nursery.
    219 *
    220 * Read barriers
    221 * =============
    222 *
    223 * Weak pointer read barrier
    224 * -------------------------
    225 *
    226 * Weak pointers must have a read barrier to prevent the referent from being
    227 * collected if it is read after the start of an incremental GC.
    228 *
    229 * The problem happens when, during an incremental GC, some code reads a weak
    230 * pointer and writes it somewhere on the heap that has been marked black in a
    231 * previous slice. Since the weak pointer will not otherwise be marked and will
    232 * be swept and finalized in the last slice, this will leave the pointer just
    233 * written dangling after the GC. To solve this, we immediately mark black all
    234 * weak pointers that get read between slices so that it is safe to store them
    235 * in an already marked part of the heap, e.g. in Rooted.
    236 *
    237 * Cycle collector read barrier
    238 * ----------------------------
    239 *
    240 * Heap pointers external to the engine may be marked gray. The JS API has an
    241 * invariant that no gray pointers may be passed, and this maintained by a read
    242 * barrier that calls ExposeGCThingToActiveJS on such pointers. This is
    243 * implemented by JS::Heap<T> in js/RootingAPI.h.
    244 *
    245 * Implementation Details
    246 * ======================
    247 *
    248 * One additional note: not all object writes need to be pre-barriered. Writes
    249 * to newly allocated objects do not need a pre-barrier. In these cases, we use
    250 * the "obj->field.init(value)" method instead of "obj->field = value". We use
    251 * the init naming idiom in many places to signify that a field is being
    252 * assigned for the first time.
    253 *
    254 * This file implements the following hierarchy of classes:
    255 *
    256 * BarrieredBase             base class of all barriers
    257 *  |  |
    258 *  | WriteBarriered         base class which provides common write operations
    259 *  |  |  |  |  |
    260 *  |  |  |  | PreBarriered  provides pre-barriers only
    261 *  |  |  |  |
    262 *  |  |  | GCPtr            provides pre- and post-barriers
    263 *  |  |  |
    264 *  |  | HeapPtr             provides pre- and post-barriers; is relocatable
    265 *  |  |                     and deletable for use inside C++ managed memory
    266 *  |  |
    267 *  | HeapSlot               similar to GCPtr, but tailored to slots storage
    268 *  |
    269 * ReadBarriered             base class which provides common read operations
    270 *  |
    271 * WeakHeapPtr               provides read barriers only
    272 *
    273 *
    274 * The implementation of the barrier logic is implemented in the
    275 * Cell/TenuredCell base classes, which are called via:
    276 *
    277 * WriteBarriered<T>::pre
    278 *  -> InternalBarrierMethods<T*>::preBarrier
    279 *      -> Cell::preWriteBarrier
    280 *  -> InternalBarrierMethods<Value>::preBarrier
    281 *  -> InternalBarrierMethods<jsid>::preBarrier
    282 *      -> InternalBarrierMethods<T*>::preBarrier
    283 *          -> Cell::preWriteBarrier
    284 *
    285 * GCPtr<T>::post and HeapPtr<T>::post
    286 *  -> InternalBarrierMethods<T*>::postBarrier
    287 *      -> gc::PostWriteBarrierImpl
    288 *  -> InternalBarrierMethods<Value>::postBarrier
    289 *      -> StoreBuffer::put
    290 *
    291 * Barriers for use outside of the JS engine call into the same barrier
    292 * implementations at InternalBarrierMethods<T>::post via an indirect call to
    293 * Heap(.+)WriteBarriers.
    294 *
    295 * These clases are designed to be used to wrap GC thing pointers or values that
    296 * act like them (i.e. JS::Value and jsid).  It is possible to use them for
    297 * other types by supplying the necessary barrier implementations but this
    298 * is not usually necessary and should be done with caution.
    299 */
    300 
    301 namespace js {
    302 
    303 class NativeObject;
    304 
    305 namespace gc {
    306 
    307 inline void ValueReadBarrier(const Value& v) {
    308  MOZ_ASSERT(v.isGCThing());
    309  ReadBarrierImpl(v.toGCThing());
    310 }
    311 
    312 inline void ValuePreWriteBarrier(const Value& v) {
    313  MOZ_ASSERT(v.isGCThing());
    314  PreWriteBarrierImpl(v.toGCThing());
    315 }
    316 
    317 inline void IdPreWriteBarrier(jsid id) {
    318  MOZ_ASSERT(id.isGCThing());
    319  PreWriteBarrierImpl(&id.toGCThing()->asTenured());
    320 }
    321 
    322 inline void CellPtrPreWriteBarrier(JS::GCCellPtr thing) {
    323  MOZ_ASSERT(thing);
    324  PreWriteBarrierImpl(thing.asCell());
    325 }
    326 
    327 inline void WasmAnyRefPreWriteBarrier(const wasm::AnyRef& v) {
    328  MOZ_ASSERT(v.isGCThing());
    329  PreWriteBarrierImpl(v.toGCThing());
    330 }
    331 
    332 }  // namespace gc
    333 
    334 #ifdef DEBUG
    335 
    336 bool CurrentThreadIsTouchingGrayThings();
    337 
    338 bool IsMarkedBlack(JSObject* obj);
    339 
    340 #endif
    341 
    342 template <typename T, typename Enable = void>
    343 struct InternalBarrierMethods {};
    344 
    345 template <typename T>
    346 struct InternalBarrierMethods<T*> {
    347  static_assert(std::is_base_of_v<gc::Cell, T>, "Expected a GC thing type");
    348 
    349  static bool isMarkable(const T* v) { return v != nullptr; }
    350 
    351  static void preBarrier(T* v) { gc::PreWriteBarrier(v); }
    352 
    353  static void postBarrier(T** vp, T* prev, T* next) {
    354    gc::PostWriteBarrier(vp, prev, next);
    355  }
    356 
    357  static void readBarrier(T* v) { gc::ReadBarrier(v); }
    358 
    359 #ifdef DEBUG
    360  static void assertThingIsNotGray(T* v) { return T::assertThingIsNotGray(v); }
    361 #endif
    362 };
    363 
    364 namespace gc {
    365 MOZ_ALWAYS_INLINE void ValuePostWriteBarrier(Value* vp, const Value& prev,
    366                                             const Value& next) {
    367  MOZ_ASSERT(!CurrentThreadIsOffThreadCompiling());
    368  MOZ_ASSERT(vp);
    369 
    370  // If the target needs an entry, add it.
    371  js::gc::StoreBuffer* sb;
    372  if (next.isGCThing() && (sb = next.toGCThing()->storeBuffer())) {
    373    // If we know that the prev has already inserted an entry, we can
    374    // skip doing the lookup to add the new entry. Note that we cannot
    375    // safely assert the presence of the entry because it may have been
    376    // added via a different store buffer.
    377    if (prev.isGCThing() && prev.toGCThing()->storeBuffer()) {
    378      return;
    379    }
    380    sb->putValue(vp);
    381    return;
    382  }
    383  // Remove the prev entry if the new value does not need it.
    384  if (prev.isGCThing() && (sb = prev.toGCThing()->storeBuffer())) {
    385    sb->unputValue(vp);
    386  }
    387 }
    388 }  // namespace gc
    389 
    390 template <>
    391 struct InternalBarrierMethods<Value> {
    392  static bool isMarkable(const Value& v) { return v.isGCThing(); }
    393 
    394  static void preBarrier(const Value& v) {
    395    if (v.isGCThing()) {
    396      gc::ValuePreWriteBarrier(v);
    397    }
    398  }
    399 
    400  static MOZ_ALWAYS_INLINE void postBarrier(Value* vp, const Value& prev,
    401                                            const Value& next) {
    402    gc::ValuePostWriteBarrier(vp, prev, next);
    403  }
    404 
    405  static void readBarrier(const Value& v) {
    406    if (v.isGCThing()) {
    407      gc::ValueReadBarrier(v);
    408    }
    409  }
    410 
    411 #ifdef DEBUG
    412  static void assertThingIsNotGray(const Value& v) {
    413    JS::AssertValueIsNotGray(v);
    414  }
    415 #endif
    416 };
    417 
    418 template <>
    419 struct InternalBarrierMethods<jsid> {
    420  static bool isMarkable(jsid id) { return id.isGCThing(); }
    421  static void preBarrier(jsid id) {
    422    if (id.isGCThing()) {
    423      gc::IdPreWriteBarrier(id);
    424    }
    425  }
    426  static void postBarrier(jsid* idp, jsid prev, jsid next) {}
    427 #ifdef DEBUG
    428  static void assertThingIsNotGray(jsid id) { JS::AssertIdIsNotGray(id); }
    429 #endif
    430 };
    431 
    432 // Specialization for JS::ArrayBufferOrView subclasses.
    433 template <typename T>
    434 struct InternalBarrierMethods<T, EnableIfABOVType<T>> {
    435  using BM = BarrierMethods<T>;
    436 
    437  static bool isMarkable(const T& thing) { return bool(thing); }
    438  static void preBarrier(const T& thing) {
    439    gc::PreWriteBarrier(thing.asObjectUnbarriered());
    440  }
    441  static void postBarrier(T* tp, const T& prev, const T& next) {
    442    BM::postWriteBarrier(tp, prev, next);
    443  }
    444  static void readBarrier(const T& thing) { BM::readBarrier(thing); }
    445 #ifdef DEBUG
    446  static void assertThingIsNotGray(const T& thing) {
    447    JSObject* obj = thing.asObjectUnbarriered();
    448    if (obj) {
    449      JS::AssertValueIsNotGray(JS::ObjectValue(*obj));
    450    }
    451  }
    452 #endif
    453 };
    454 
    455 template <typename T>
    456 static inline void AssertTargetIsNotGray(const T& v) {
    457 #ifdef DEBUG
    458  if (!CurrentThreadIsTouchingGrayThings()) {
    459    InternalBarrierMethods<T>::assertThingIsNotGray(v);
    460  }
    461 #endif
    462 }
    463 
    464 // Base class of all barrier types.
    465 //
    466 // This is marked non-memmovable since post barriers added by derived classes
    467 // can add pointers to class instances to the store buffer.
    468 template <typename T>
    469 class MOZ_NON_MEMMOVABLE BarrieredBase {
    470 protected:
    471  // BarrieredBase is not directly instantiable.
    472  explicit BarrieredBase(const T& v) : value(v) {}
    473 
    474  // BarrieredBase subclasses cannot be copy constructed by default.
    475  BarrieredBase(const BarrieredBase<T>& other) = default;
    476 
    477  // Storage for all barrier classes. |value| must be a GC thing reference
    478  // type: either a direct pointer to a GC thing or a supported tagged
    479  // pointer that can reference GC things, such as JS::Value or jsid. Nested
    480  // barrier types are NOT supported. See assertTypeConstraints.
    481  T value;
    482 
    483 public:
    484  using ElementType = T;
    485 
    486  // Note: this is public because C++ cannot friend to a specific template
    487  // instantiation. Friending to the generic template leads to a number of
    488  // unintended consequences, including template resolution ambiguity and a
    489  // circular dependency with Tracing.h.
    490  T* unbarrieredAddress() const { return const_cast<T*>(&value); }
    491 };
    492 
    493 // Base class for barriered pointer types that intercept only writes.
    494 template <class T>
    495 class WriteBarriered : public BarrieredBase<T>,
    496                       public WrappedPtrOperations<T, WriteBarriered<T>> {
    497 protected:
    498  using BarrieredBase<T>::value;
    499 
    500  // WriteBarriered is not directly instantiable.
    501  explicit WriteBarriered(const T& v) : BarrieredBase<T>(v) {}
    502 
    503 public:
    504  DECLARE_POINTER_CONSTREF_OPS(T);
    505 
    506  // Use this if the automatic coercion to T isn't working.
    507  const T& get() const { return this->value; }
    508 
    509  // Use this if you want to change the value without invoking barriers.
    510  // Obviously this is dangerous unless you know the barrier is not needed.
    511  void unbarrieredSet(const T& v) { this->value = v; }
    512 
    513  // For users who need to manually barrier the raw types.
    514  static void preWriteBarrier(const T& v) {
    515    InternalBarrierMethods<T>::preBarrier(v);
    516  }
    517 
    518 protected:
    519  void pre() { InternalBarrierMethods<T>::preBarrier(this->value); }
    520  MOZ_ALWAYS_INLINE void post(const T& prev, const T& next) {
    521    InternalBarrierMethods<T>::postBarrier(&this->value, prev, next);
    522  }
    523 };
    524 
    525 #define DECLARE_POINTER_ASSIGN_AND_MOVE_OPS(Wrapper, T) \
    526  DECLARE_POINTER_ASSIGN_OPS(Wrapper, T)                \
    527  Wrapper<T>& operator=(Wrapper<T>&& other) noexcept {  \
    528    setUnchecked(other.release());                      \
    529    return *this;                                       \
    530  }
    531 
    532 /*
    533 * PreBarriered only automatically handles pre-barriers. Post-barriers must be
    534 * manually implemented when using this class. GCPtr and HeapPtr should be used
    535 * in all cases that do not require explicit low-level control of moving
    536 * behavior.
    537 *
    538 * This class is useful for example for HashMap keys where automatically
    539 * updating a moved nursery pointer would break the hash table.
    540 */
    541 template <class T>
    542 class PreBarriered : public WriteBarriered<T> {
    543 public:
    544  PreBarriered() : WriteBarriered<T>(JS::SafelyInitialized<T>::create()) {}
    545  /*
    546   * Allow implicit construction for use in generic contexts.
    547   */
    548  MOZ_IMPLICIT PreBarriered(const T& v) : WriteBarriered<T>(v) {}
    549 
    550  explicit PreBarriered(const PreBarriered<T>& other)
    551      : WriteBarriered<T>(other.value) {}
    552 
    553  PreBarriered(PreBarriered<T>&& other) noexcept
    554      : WriteBarriered<T>(other.release()) {}
    555 
    556  ~PreBarriered() { this->pre(); }
    557 
    558  void init(const T& v) { this->value = v; }
    559 
    560  /* Use to set the pointer to nullptr. */
    561  void clear() { set(JS::SafelyInitialized<T>::create()); }
    562 
    563  DECLARE_POINTER_ASSIGN_AND_MOVE_OPS(PreBarriered, T);
    564 
    565  void set(const T& v) {
    566    AssertTargetIsNotGray(v);
    567    setUnchecked(v);
    568  }
    569 
    570 private:
    571  void setUnchecked(const T& v) {
    572    this->pre();
    573    this->value = v;
    574  }
    575 
    576  T release() {
    577    T tmp = this->value;
    578    this->value = JS::SafelyInitialized<T>::create();
    579    return tmp;
    580  }
    581 };
    582 
    583 }  // namespace js
    584 
    585 namespace JS::detail {
    586 template <typename T>
    587 struct DefineComparisonOps<js::PreBarriered<T>> : std::true_type {
    588  static const T& get(const js::PreBarriered<T>& v) { return v.get(); }
    589 };
    590 }  // namespace JS::detail
    591 
    592 namespace js {
    593 
    594 /*
    595 * A pre- and post-barriered heap pointer, for use inside the JS engine.
    596 *
    597 * It must only be stored in memory that has GC lifetime. GCPtr must not be
    598 * used in contexts where it may be implicitly moved or deleted, e.g. most
    599 * containers.
    600 *
    601 * The post-barriers implemented by this class are faster than those
    602 * implemented by js::HeapPtr<T> or JS::Heap<T> at the cost of not
    603 * automatically handling deletion or movement.
    604 */
    605 template <class T>
    606 class GCPtr : public WriteBarriered<T> {
    607 public:
    608  GCPtr() : WriteBarriered<T>(JS::SafelyInitialized<T>::create()) {}
    609 
    610  explicit GCPtr(const T& v) : WriteBarriered<T>(v) {
    611    this->post(JS::SafelyInitialized<T>::create(), v);
    612  }
    613 
    614  explicit GCPtr(const GCPtr<T>& v) : WriteBarriered<T>(v) {
    615    this->post(JS::SafelyInitialized<T>::create(), v);
    616  }
    617 
    618 #ifdef DEBUG
    619  ~GCPtr() {
    620    // No barriers are necessary as this only happens when the GC is sweeping or
    621    // before this has been initialized (see above comment).
    622    //
    623    // If this assertion fails you may need to make the containing object use a
    624    // HeapPtr instead, as this can be deleted from outside of GC.
    625    MOZ_ASSERT(CurrentThreadIsGCSweeping() || CurrentThreadIsGCFinalizing() ||
    626               this->value == JS::SafelyInitialized<T>::create());
    627 
    628    Poison(this, JS_FREED_HEAP_PTR_PATTERN, sizeof(*this),
    629           MemCheckKind::MakeNoAccess);
    630  }
    631 #endif
    632 
    633  /*
    634   * Unlike HeapPtr<T>, GCPtr<T> must be managed with GC lifetimes.
    635   * Specifically, the memory used by the pointer itself must be live until
    636   * at least the next minor GC. For that reason, move semantics are invalid
    637   * and are deleted here. Please note that not all containers support move
    638   * semantics, so this does not completely prevent invalid uses.
    639   */
    640  GCPtr(GCPtr<T>&&) = delete;
    641  GCPtr<T>& operator=(GCPtr<T>&&) = delete;
    642 
    643  void init(const T& v) {
    644    AssertTargetIsNotGray(v);
    645    this->value = v;
    646    this->post(JS::SafelyInitialized<T>::create(), v);
    647  }
    648 
    649  DECLARE_POINTER_ASSIGN_OPS(GCPtr, T);
    650 
    651  void set(const T& v) {
    652    AssertTargetIsNotGray(v);
    653    setUnchecked(v);
    654  }
    655 
    656 private:
    657  void setUnchecked(const T& v) {
    658    this->pre();
    659    T tmp = this->value;
    660    this->value = v;
    661    this->post(tmp, this->value);
    662  }
    663 };
    664 
    665 }  // namespace js
    666 
    667 namespace JS::detail {
    668 template <typename T>
    669 struct DefineComparisonOps<js::GCPtr<T>> : std::true_type {
    670  static const T& get(const js::GCPtr<T>& v) { return v.get(); }
    671 };
    672 }  // namespace JS::detail
    673 
    674 namespace js {
    675 
    676 /*
    677 * A pre- and post-barriered heap pointer, for use inside the JS engine. These
    678 * heap pointers can be stored in C++ containers like GCVector and GCHashMap.
    679 *
    680 * The GC sometimes keeps pointers to pointers to GC things --- for example, to
    681 * track references into the nursery. However, C++ containers like GCVector and
    682 * GCHashMap usually reserve the right to relocate their elements any time
    683 * they're modified, invalidating all pointers to the elements. HeapPtr
    684 * has a move constructor which knows how to keep the GC up to date if it is
    685 * moved to a new location.
    686 *
    687 * However, because of this additional communication with the GC, HeapPtr
    688 * is somewhat slower, so it should only be used in contexts where this ability
    689 * is necessary.
    690 *
    691 * Obviously, JSObjects, JSStrings, and the like get tenured and compacted, so
    692 * whatever pointers they contain get relocated, in the sense used here.
    693 * However, since the GC itself is moving those values, it takes care of its
    694 * internal pointers to those pointers itself. HeapPtr is only necessary
    695 * when the relocation would otherwise occur without the GC's knowledge.
    696 */
    697 template <class T>
    698 class HeapPtr : public WriteBarriered<T> {
    699 public:
    700  HeapPtr() : WriteBarriered<T>(JS::SafelyInitialized<T>::create()) {}
    701 
    702  // Implicitly adding barriers is a reasonable default.
    703  MOZ_IMPLICIT HeapPtr(const T& v) : WriteBarriered<T>(v) {
    704    this->post(JS::SafelyInitialized<T>::create(), this->value);
    705  }
    706 
    707  MOZ_IMPLICIT HeapPtr(const HeapPtr<T>& other) : WriteBarriered<T>(other) {
    708    this->post(JS::SafelyInitialized<T>::create(), this->value);
    709  }
    710 
    711  HeapPtr(HeapPtr<T>&& other) noexcept : WriteBarriered<T>(other.release()) {
    712    this->post(JS::SafelyInitialized<T>::create(), this->value);
    713  }
    714 
    715  ~HeapPtr() {
    716    this->pre();
    717    this->post(this->value, JS::SafelyInitialized<T>::create());
    718  }
    719 
    720  void init(const T& v) {
    721    MOZ_ASSERT(this->value == JS::SafelyInitialized<T>::create());
    722    AssertTargetIsNotGray(v);
    723    this->value = v;
    724    this->post(JS::SafelyInitialized<T>::create(), this->value);
    725  }
    726 
    727  DECLARE_POINTER_ASSIGN_AND_MOVE_OPS(HeapPtr, T);
    728 
    729  void set(const T& v) {
    730    AssertTargetIsNotGray(v);
    731    setUnchecked(v);
    732  }
    733 
    734  /* Make this friend so it can access pre() and post(). */
    735  template <class T1, class T2>
    736  friend inline void BarrieredSetPair(Zone* zone, HeapPtr<T1*>& v1, T1* val1,
    737                                      HeapPtr<T2*>& v2, T2* val2);
    738 
    739 protected:
    740  void setUnchecked(const T& v) {
    741    this->pre();
    742    postBarrieredSet(v);
    743  }
    744 
    745  void postBarrieredSet(const T& v) {
    746    T tmp = this->value;
    747    this->value = v;
    748    this->post(tmp, this->value);
    749  }
    750 
    751  T release() {
    752    T tmp = this->value;
    753    postBarrieredSet(JS::SafelyInitialized<T>::create());
    754    return tmp;
    755  }
    756 };
    757 
    758 /*
    759 * A pre-barriered heap pointer, for use inside the JS engine.
    760 *
    761 * Similar to GCPtr, but used for a pointer to a malloc-allocated structure
    762 * containing GC thing pointers.
    763 *
    764 * It must only be stored in memory that has GC lifetime. It must not be used in
    765 * contexts where it may be implicitly moved or deleted, e.g. most containers.
    766 *
    767 * A post-barrier is unnecessary since malloc-allocated structures cannot be in
    768 * the nursery.
    769 */
    770 template <class T>
    771 class GCStructPtr : public BarrieredBase<T> {
    772 public:
    773  // This is sometimes used to hold tagged pointers.
    774  static constexpr uintptr_t MaxTaggedPointer = 0x5;
    775 
    776  GCStructPtr() : BarrieredBase<T>(JS::SafelyInitialized<T>::create()) {}
    777 
    778  // Implicitly adding barriers is a reasonable default.
    779  MOZ_IMPLICIT GCStructPtr(const T& v) : BarrieredBase<T>(v) {}
    780 
    781  GCStructPtr(const GCStructPtr<T>& other) : BarrieredBase<T>(other) {}
    782 
    783  GCStructPtr(GCStructPtr<T>&& other) noexcept
    784      : BarrieredBase<T>(other.release()) {}
    785 
    786  ~GCStructPtr() {
    787    // No barriers are necessary as this only happens when the GC is sweeping.
    788    MOZ_ASSERT_IF(isTraceable(),
    789                  CurrentThreadIsGCSweeping() || CurrentThreadIsGCFinalizing());
    790  }
    791 
    792  void init(const T& v) {
    793    MOZ_ASSERT(this->get() == JS::SafelyInitialized<T>());
    794    AssertTargetIsNotGray(v);
    795    this->value = v;
    796  }
    797 
    798  void set(JS::Zone* zone, const T& v) {
    799    pre(zone);
    800    this->value = v;
    801  }
    802 
    803  T get() const { return this->value; }
    804  operator T() const { return get(); }
    805  T operator->() const { return get(); }
    806 
    807 protected:
    808  bool isTraceable() const { return uintptr_t(get()) > MaxTaggedPointer; }
    809 
    810  void pre(JS::Zone* zone) {
    811    if (isTraceable()) {
    812      PreWriteBarrier(zone, get());
    813    }
    814  }
    815 };
    816 
    817 }  // namespace js
    818 
    819 namespace JS::detail {
    820 template <typename T>
    821 struct DefineComparisonOps<js::HeapPtr<T>> : std::true_type {
    822  static const T& get(const js::HeapPtr<T>& v) { return v.get(); }
    823 };
    824 }  // namespace JS::detail
    825 
    826 namespace js {
    827 
    828 // Base class for barriered pointer types that intercept reads and writes.
    829 template <typename T>
    830 class ReadBarriered : public BarrieredBase<T> {
    831 protected:
    832  // ReadBarriered is not directly instantiable.
    833  explicit ReadBarriered(const T& v) : BarrieredBase<T>(v) {}
    834 
    835  void read() const { InternalBarrierMethods<T>::readBarrier(this->value); }
    836  void post(const T& prev, const T& next) {
    837    InternalBarrierMethods<T>::postBarrier(&this->value, prev, next);
    838  }
    839 };
    840 
    841 // Incremental GC requires that weak pointers have read barriers. See the block
    842 // comment at the top of Barrier.h for a complete discussion of why.
    843 //
    844 // Note that this class also has post-barriers, so is safe to use with nursery
    845 // pointers. However, when used as a hashtable key, care must still be taken to
    846 // insert manual post-barriers on the table for rekeying if the key is based in
    847 // any way on the address of the object.
    848 template <typename T>
    849 class WeakHeapPtr : public ReadBarriered<T>,
    850                    public WrappedPtrOperations<T, WeakHeapPtr<T>> {
    851 protected:
    852  using ReadBarriered<T>::value;
    853 
    854 public:
    855  WeakHeapPtr() : ReadBarriered<T>(JS::SafelyInitialized<T>::create()) {}
    856 
    857  // It is okay to add barriers implicitly.
    858  MOZ_IMPLICIT WeakHeapPtr(const T& v) : ReadBarriered<T>(v) {
    859    this->post(JS::SafelyInitialized<T>::create(), v);
    860  }
    861 
    862  // The copy constructor creates a new weak edge but the wrapped pointer does
    863  // not escape, so no read barrier is necessary.
    864  explicit WeakHeapPtr(const WeakHeapPtr& other) : ReadBarriered<T>(other) {
    865    this->post(JS::SafelyInitialized<T>::create(), value);
    866  }
    867 
    868  // Move retains the lifetime status of the source edge, so does not fire
    869  // the read barrier of the defunct edge.
    870  WeakHeapPtr(WeakHeapPtr&& other) noexcept
    871      : ReadBarriered<T>(other.release()) {
    872    this->post(JS::SafelyInitialized<T>::create(), value);
    873  }
    874 
    875  ~WeakHeapPtr() {
    876    this->post(this->value, JS::SafelyInitialized<T>::create());
    877  }
    878 
    879  WeakHeapPtr& operator=(const WeakHeapPtr& v) {
    880    AssertTargetIsNotGray(v.value);
    881    T prior = this->value;
    882    this->value = v.value;
    883    this->post(prior, v.value);
    884    return *this;
    885  }
    886 
    887  const T& get() const {
    888    if (InternalBarrierMethods<T>::isMarkable(this->value)) {
    889      this->read();
    890    }
    891    return this->value;
    892  }
    893 
    894  const T& unbarrieredGet() const { return this->value; }
    895 
    896  explicit operator bool() const { return bool(this->value); }
    897 
    898  operator const T&() const { return get(); }
    899 
    900  const T& operator->() const { return get(); }
    901 
    902  void set(const T& v) {
    903    AssertTargetIsNotGray(v);
    904    setUnchecked(v);
    905  }
    906 
    907  void unbarrieredSet(const T& v) {
    908    AssertTargetIsNotGray(v);
    909    this->value = v;
    910  }
    911 
    912 private:
    913  void setUnchecked(const T& v) {
    914    T tmp = this->value;
    915    this->value = v;
    916    this->post(tmp, v);
    917  }
    918 
    919  T release() {
    920    T tmp = value;
    921    set(JS::SafelyInitialized<T>::create());
    922    return tmp;
    923  }
    924 };
    925 
    926 // A wrapper for a bare pointer, with no barriers.
    927 //
    928 // This should only be necessary in a limited number of cases. Please don't add
    929 // more uses of this if at all possible.
    930 template <typename T>
    931 class UnsafeBarePtr : public BarrieredBase<T> {
    932 public:
    933  UnsafeBarePtr() : BarrieredBase<T>(JS::SafelyInitialized<T>::create()) {}
    934  MOZ_IMPLICIT UnsafeBarePtr(T v) : BarrieredBase<T>(v) {}
    935  const T& get() const { return this->value; }
    936  void set(T newValue) { this->value = newValue; }
    937  DECLARE_POINTER_CONSTREF_OPS(T);
    938 };
    939 
    940 }  // namespace js
    941 
    942 namespace JS::detail {
    943 template <typename T>
    944 struct DefineComparisonOps<js::WeakHeapPtr<T>> : std::true_type {
    945  static const T& get(const js::WeakHeapPtr<T>& v) {
    946    return v.unbarrieredGet();
    947  }
    948 };
    949 }  // namespace JS::detail
    950 
    951 namespace js {
    952 
    953 // A pre- and post-barriered Value that is specialized to be aware that it
    954 // resides in a slots or elements vector. This allows it to be relocated in
    955 // memory, but with substantially less overhead than a HeapPtr.
    956 class HeapSlot : public WriteBarriered<Value> {
    957 public:
    958  enum Kind { Slot = 0, Element = 1 };
    959 
    960  void init(NativeObject* owner, Kind kind, uint32_t slot, const Value& v) {
    961    value = v;
    962    post(owner, kind, slot, v);
    963  }
    964 
    965  void initAsUndefined() { value.setUndefined(); }
    966 
    967  void destroy() { pre(); }
    968 
    969  void setUndefinedUnchecked() {
    970    pre();
    971    value.setUndefined();
    972  }
    973 
    974 #ifdef DEBUG
    975  bool preconditionForSet(NativeObject* owner, Kind kind, uint32_t slot) const;
    976  void assertPreconditionForPostWriteBarrier(NativeObject* obj, Kind kind,
    977                                             uint32_t slot,
    978                                             const Value& target) const;
    979 #endif
    980 
    981  MOZ_ALWAYS_INLINE void set(NativeObject* owner, Kind kind, uint32_t slot,
    982                             const Value& v) {
    983    MOZ_ASSERT(preconditionForSet(owner, kind, slot));
    984    pre();
    985    value = v;
    986    post(owner, kind, slot, v);
    987  }
    988 
    989 private:
    990  void post(NativeObject* owner, Kind kind, uint32_t slot,
    991            const Value& target) {
    992 #ifdef DEBUG
    993    assertPreconditionForPostWriteBarrier(owner, kind, slot, target);
    994 #endif
    995    if (this->value.isGCThing()) {
    996      gc::Cell* cell = this->value.toGCThing();
    997      if (cell->storeBuffer()) {
    998        cell->storeBuffer()->putSlot(owner, kind, slot, 1);
    999      }
   1000    }
   1001  }
   1002 };
   1003 
   1004 }  // namespace js
   1005 
   1006 namespace JS::detail {
   1007 template <>
   1008 struct DefineComparisonOps<js::HeapSlot> : std::true_type {
   1009  static const Value& get(const js::HeapSlot& v) { return v.get(); }
   1010 };
   1011 }  // namespace JS::detail
   1012 
   1013 namespace js {
   1014 
   1015 class HeapSlotArray {
   1016  HeapSlot* array;
   1017 
   1018 public:
   1019  explicit HeapSlotArray(HeapSlot* array) : array(array) {}
   1020 
   1021  HeapSlot* begin() const { return array; }
   1022 
   1023  operator const Value*() const {
   1024    static_assert(sizeof(GCPtr<Value>) == sizeof(Value));
   1025    static_assert(sizeof(HeapSlot) == sizeof(Value));
   1026    return reinterpret_cast<const Value*>(array);
   1027  }
   1028  operator HeapSlot*() const { return begin(); }
   1029 
   1030  HeapSlotArray operator+(int offset) const {
   1031    return HeapSlotArray(array + offset);
   1032  }
   1033  HeapSlotArray operator+(uint32_t offset) const {
   1034    return HeapSlotArray(array + offset);
   1035  }
   1036 };
   1037 
   1038 /*
   1039 * This is a hack for RegExpStatics::updateFromMatch. It allows us to do two
   1040 * barriers with only one branch to check if we're in an incremental GC.
   1041 */
   1042 template <class T1, class T2>
   1043 static inline void BarrieredSetPair(Zone* zone, HeapPtr<T1*>& v1, T1* val1,
   1044                                    HeapPtr<T2*>& v2, T2* val2) {
   1045  AssertTargetIsNotGray(val1);
   1046  AssertTargetIsNotGray(val2);
   1047  if (T1::needPreWriteBarrier(zone)) {
   1048    v1.pre();
   1049    v2.pre();
   1050  }
   1051  v1.postBarrieredSet(val1);
   1052  v2.postBarrieredSet(val2);
   1053 }
   1054 
   1055 /*
   1056 * ImmutableTenuredPtr is designed for one very narrow case: replacing
   1057 * immutable raw pointers to GC-managed things, implicitly converting to a
   1058 * handle type for ease of use. Pointers encapsulated by this type must:
   1059 *
   1060 *   be immutable (no incremental write barriers),
   1061 *   never point into the nursery (no generational write barriers), and
   1062 *   be traced via MarkRuntime (we use fromMarkedLocation).
   1063 *
   1064 * In short: you *really* need to know what you're doing before you use this
   1065 * class!
   1066 */
   1067 template <typename T>
   1068 class MOZ_HEAP_CLASS ImmutableTenuredPtr {
   1069  T value;
   1070 
   1071 public:
   1072  operator T() const { return value; }
   1073  T operator->() const { return value; }
   1074 
   1075  // `ImmutableTenuredPtr<T>` is implicitly convertible to `Handle<T>`.
   1076  //
   1077  // In case you need to convert to `Handle<U>` where `U` is base class of `T`,
   1078  // convert this to `Handle<T>` by `toHandle()` and then use implicit
   1079  // conversion from `Handle<T>` to `Handle<U>`.
   1080  operator Handle<T>() const { return toHandle(); }
   1081  Handle<T> toHandle() const { return Handle<T>::fromMarkedLocation(&value); }
   1082 
   1083  void init(T ptr) {
   1084    MOZ_ASSERT(ptr->isTenured());
   1085    AssertTargetIsNotGray(ptr);
   1086    value = ptr;
   1087  }
   1088 
   1089  T get() const { return value; }
   1090  const T* address() { return &value; }
   1091 };
   1092 
   1093 // Template to remove any barrier wrapper and get the underlying type.
   1094 template <typename T>
   1095 struct RemoveBarrier {
   1096  using Type = T;
   1097 };
   1098 template <typename T>
   1099 struct RemoveBarrier<HeapPtr<T>> {
   1100  using Type = T;
   1101 };
   1102 template <typename T>
   1103 struct RemoveBarrier<GCPtr<T>> {
   1104  using Type = T;
   1105 };
   1106 template <typename T>
   1107 struct RemoveBarrier<PreBarriered<T>> {
   1108  using Type = T;
   1109 };
   1110 template <typename T>
   1111 struct RemoveBarrier<WeakHeapPtr<T>> {
   1112  using Type = T;
   1113 };
   1114 
   1115 template <typename T>
   1116 using IsBarriered =
   1117    std::negation<std::is_same<T, typename RemoveBarrier<T>::Type>>;
   1118 
   1119 #if MOZ_IS_GCC
   1120 template struct JS_PUBLIC_API StableCellHasher<JSObject*>;
   1121 template struct JS_PUBLIC_API StableCellHasher<JSScript*>;
   1122 #endif
   1123 
   1124 template <typename T>
   1125 struct StableCellHasher<PreBarriered<T>> {
   1126  using Key = PreBarriered<T>;
   1127  using Lookup = T;
   1128 
   1129  static bool maybeGetHash(const Lookup& l, HashNumber* hashOut) {
   1130    return StableCellHasher<T>::maybeGetHash(l, hashOut);
   1131  }
   1132  static bool ensureHash(const Lookup& l, HashNumber* hashOut) {
   1133    return StableCellHasher<T>::ensureHash(l, hashOut);
   1134  }
   1135  static HashNumber hash(const Lookup& l) {
   1136    return StableCellHasher<T>::hash(l);
   1137  }
   1138  static bool match(const Key& k, const Lookup& l) {
   1139    return StableCellHasher<T>::match(k, l);
   1140  }
   1141 };
   1142 
   1143 template <typename T>
   1144 struct StableCellHasher<HeapPtr<T>> {
   1145  using Key = HeapPtr<T>;
   1146  using Lookup = T;
   1147 
   1148  static bool maybeGetHash(const Lookup& l, HashNumber* hashOut) {
   1149    return StableCellHasher<T>::maybeGetHash(l, hashOut);
   1150  }
   1151  static bool ensureHash(const Lookup& l, HashNumber* hashOut) {
   1152    return StableCellHasher<T>::ensureHash(l, hashOut);
   1153  }
   1154  static HashNumber hash(const Lookup& l) {
   1155    return StableCellHasher<T>::hash(l);
   1156  }
   1157  static bool match(const Key& k, const Lookup& l) {
   1158    return StableCellHasher<T>::match(k, l);
   1159  }
   1160 };
   1161 
   1162 template <typename T>
   1163 struct StableCellHasher<WeakHeapPtr<T>> {
   1164  using Key = WeakHeapPtr<T>;
   1165  using Lookup = T;
   1166 
   1167  static bool maybeGetHash(const Lookup& l, HashNumber* hashOut) {
   1168    return StableCellHasher<T>::maybeGetHash(l, hashOut);
   1169  }
   1170  static bool ensureHash(const Lookup& l, HashNumber* hashOut) {
   1171    return StableCellHasher<T>::ensureHash(l, hashOut);
   1172  }
   1173  static HashNumber hash(const Lookup& l) {
   1174    return StableCellHasher<T>::hash(l);
   1175  }
   1176  static bool match(const Key& k, const Lookup& l) {
   1177    return StableCellHasher<T>::match(k.unbarrieredGet(), l);
   1178  }
   1179 };
   1180 
   1181 /* Useful for hashtables with a HeapPtr as key. */
   1182 template <class T>
   1183 struct HeapPtrHasher {
   1184  using Key = HeapPtr<T>;
   1185  using Lookup = T;
   1186 
   1187  static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
   1188  static bool match(const Key& k, Lookup l) { return k.get() == l; }
   1189  static void rekey(Key& k, const Key& newKey) { k.unbarrieredSet(newKey); }
   1190 };
   1191 
   1192 template <class T>
   1193 struct PreBarrieredHasher {
   1194  using Key = PreBarriered<T>;
   1195  using Lookup = T;
   1196 
   1197  static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
   1198  static bool match(const Key& k, Lookup l) { return k.get() == l; }
   1199  static void rekey(Key& k, const Key& newKey) { k.unbarrieredSet(newKey); }
   1200 };
   1201 
   1202 /* Useful for hashtables with a WeakHeapPtr as key. */
   1203 template <class T>
   1204 struct WeakHeapPtrHasher {
   1205  using Key = WeakHeapPtr<T>;
   1206  using Lookup = T;
   1207 
   1208  static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
   1209  static bool match(const Key& k, Lookup l) { return k.unbarrieredGet() == l; }
   1210  static void rekey(Key& k, const Key& newKey) {
   1211    k.set(newKey.unbarrieredGet());
   1212  }
   1213 };
   1214 
   1215 template <class T>
   1216 struct UnsafeBarePtrHasher {
   1217  using Key = UnsafeBarePtr<T>;
   1218  using Lookup = T;
   1219 
   1220  static HashNumber hash(const Lookup& l) { return DefaultHasher<T>::hash(l); }
   1221  static bool match(const Key& k, Lookup l) { return k.get() == l; }
   1222  static void rekey(Key& k, const Key& newKey) { k.set(newKey.get()); }
   1223 };
   1224 
   1225 // Set up descriptive type aliases.
   1226 template <class T>
   1227 using PreBarrierWrapper = PreBarriered<T>;
   1228 template <class T>
   1229 using PreAndPostBarrierWrapper = GCPtr<T>;
   1230 
   1231 }  // namespace js
   1232 
   1233 namespace mozilla {
   1234 
   1235 template <class T>
   1236 struct DefaultHasher<js::HeapPtr<T>> : js::HeapPtrHasher<T> {};
   1237 
   1238 template <class T>
   1239 struct DefaultHasher<js::GCPtr<T>> {
   1240  // Not implemented. GCPtr can't be used as a hash table key because it has a
   1241  // post barrier but doesn't support relocation.
   1242 };
   1243 
   1244 template <class T>
   1245 struct DefaultHasher<js::PreBarriered<T>> : js::PreBarrieredHasher<T> {};
   1246 
   1247 template <class T>
   1248 struct DefaultHasher<js::WeakHeapPtr<T>> : js::WeakHeapPtrHasher<T> {};
   1249 
   1250 template <class T>
   1251 struct DefaultHasher<js::UnsafeBarePtr<T>> : js::UnsafeBarePtrHasher<T> {};
   1252 
   1253 }  // namespace mozilla
   1254 
   1255 #endif /* gc_Barrier_h */