GCHashTable.h (26960B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #ifndef GCHashTable_h 8 #define GCHashTable_h 9 10 #include "mozilla/Maybe.h" 11 12 #include "js/GCPolicyAPI.h" 13 #include "js/HashTable.h" 14 #include "js/RootingAPI.h" 15 #include "js/SweepingAPI.h" 16 #include "js/TypeDecls.h" 17 18 class JSTracer; 19 20 namespace JS { 21 22 // Define a reasonable default GC policy for GC-aware Maps. 23 template <typename Key, typename Value> 24 struct DefaultMapEntryGCPolicy { 25 static bool traceWeak(JSTracer* trc, Key* key, Value* value) { 26 return GCPolicy<Key>::traceWeak(trc, key) && 27 GCPolicy<Value>::traceWeak(trc, value); 28 } 29 static bool needsSweep(JSTracer* trc, const Key* key, const Value* value) { 30 // This is like a const version of the |traceWeak| method. It has the sense 31 // of the return value reversed and does not mutate keys/values. Used during 32 // incremental sweeping by the WeakCache specializations for maps and sets. 33 return GCPolicy<Key>::needsSweep(trc, key) || 34 GCPolicy<Value>::needsSweep(trc, value); 35 } 36 }; 37 38 // A GCHashMap is a GC-aware HashMap, meaning that it has additional trace 39 // methods that know how to visit all keys and values in the table. HashMaps 40 // that contain GC pointers will generally want to use this GCHashMap 41 // specialization instead of HashMap, because this conveniently supports tracing 42 // keys and values, and cleaning up weak entries. 43 // 44 // GCHashMap::trace applies GCPolicy<T>::trace to each entry's key and value. 45 // Most types of GC pointers already have appropriate specializations of 46 // GCPolicy, so they should just work as keys and values. Any struct type with a 47 // default constructor and trace function should work as well. If you need to 48 // define your own GCPolicy specialization, generic helpers can be found in 49 // js/public/TracingAPI.h. 50 // 51 // The MapEntryGCPolicy template parameter controls how the table drops entries 52 // when edges are weakly held. GCHashMap::traceWeak applies the 53 // MapEntryGCPolicy's traceWeak method to each table entry; if it returns true, 54 // the entry is dropped. The default MapEntryGCPolicy drops the entry if either 55 // the key or value is about to be finalized, according to its 56 // GCPolicy<T>::traceWeak method. (This default is almost always fine: it's hard 57 // to imagine keeping such an entry around anyway.) 58 // 59 // Note that this HashMap only knows *how* to trace, but it does not itself 60 // cause tracing to be invoked. For tracing, it must be used as 61 // Rooted<GCHashMap> or PersistentRooted<GCHashMap>, or barriered and traced 62 // manually. 63 template <typename Key, typename Value, 64 typename HashPolicy = js::DefaultHasher<Key>, 65 typename AllocPolicy = js::TempAllocPolicy, 66 typename MapEntryGCPolicy = DefaultMapEntryGCPolicy<Key, Value>> 67 class GCHashMap : public js::HashMap<Key, Value, HashPolicy, AllocPolicy> { 68 using Base = js::HashMap<Key, Value, HashPolicy, AllocPolicy>; 69 70 public: 71 using EntryGCPolicy = MapEntryGCPolicy; 72 73 explicit GCHashMap() : Base(AllocPolicy()) {} 74 explicit GCHashMap(AllocPolicy a) : Base(std::move(a)) {} 75 explicit GCHashMap(size_t length) : Base(length) {} 76 GCHashMap(AllocPolicy a, size_t length) : Base(std::move(a), length) {} 77 78 void trace(JSTracer* trc) { 79 for (typename Base::Enum e(*this); !e.empty(); e.popFront()) { 80 GCPolicy<Value>::trace(trc, &e.front().value(), "hashmap value"); 81 GCPolicy<Key>::trace(trc, &e.front().mutableKey(), "hashmap key"); 82 } 83 } 84 85 bool traceWeak(JSTracer* trc) { 86 typename Base::Enum e(*this); 87 traceWeakEntries(trc, e); 88 Base::compact(); 89 return !this->empty(); 90 } 91 92 void traceWeakEntries(JSTracer* trc, typename Base::Enum& e) { 93 for (; !e.empty(); e.popFront()) { 94 if (!MapEntryGCPolicy::traceWeak(trc, &e.front().mutableKey(), 95 &e.front().value())) { 96 e.removeFront(); 97 } 98 } 99 } 100 101 bool needsSweep(JSTracer* trc) const { 102 for (auto r = this->all(); !r.empty(); r.popFront()) { 103 if (MapEntryGCPolicy::needsSweep(trc, &r.front().key(), 104 &r.front().value())) { 105 return true; 106 } 107 } 108 return false; 109 } 110 111 // GCHashMap is movable 112 GCHashMap(GCHashMap&& rhs) : Base(std::move(rhs)) {} 113 void operator=(GCHashMap&& rhs) { 114 MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited"); 115 Base::operator=(std::move(rhs)); 116 } 117 118 private: 119 // GCHashMap is not copyable or assignable 120 GCHashMap(const GCHashMap& hm) = delete; 121 GCHashMap& operator=(const GCHashMap& hm) = delete; 122 } MOZ_INHERIT_TYPE_ANNOTATIONS_FROM_TEMPLATE_ARGS; 123 124 } // namespace JS 125 126 namespace js { 127 128 // HashMap that supports rekeying. 129 // 130 // If your keys are pointers to something like JSObject that can be tenured or 131 // compacted, prefer to use GCHashMap with StableCellHasher, which takes 132 // advantage of the Zone's stable id table to make rekeying unnecessary. 133 template <typename Key, typename Value, 134 typename HashPolicy = DefaultHasher<Key>, 135 typename AllocPolicy = TempAllocPolicy, 136 typename MapEntryGCPolicy = JS::DefaultMapEntryGCPolicy<Key, Value>> 137 class GCRekeyableHashMap : public JS::GCHashMap<Key, Value, HashPolicy, 138 AllocPolicy, MapEntryGCPolicy> { 139 using Base = JS::GCHashMap<Key, Value, HashPolicy, AllocPolicy>; 140 141 public: 142 explicit GCRekeyableHashMap(AllocPolicy a = AllocPolicy()) 143 : Base(std::move(a)) {} 144 explicit GCRekeyableHashMap(size_t length) : Base(length) {} 145 GCRekeyableHashMap(AllocPolicy a, size_t length) 146 : Base(std::move(a), length) {} 147 148 bool traceWeak(JSTracer* trc) { 149 for (typename Base::Enum e(*this); !e.empty(); e.popFront()) { 150 Key key(e.front().key()); 151 if (!MapEntryGCPolicy::traceWeak(trc, &key, &e.front().value())) { 152 e.removeFront(); 153 } else if (!HashPolicy::match(key, e.front().key())) { 154 e.rekeyFront(key); 155 } 156 } 157 return !this->empty(); 158 } 159 160 // GCRekeyableHashMap is movable 161 GCRekeyableHashMap(GCRekeyableHashMap&& rhs) : Base(std::move(rhs)) {} 162 void operator=(GCRekeyableHashMap&& rhs) { 163 MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited"); 164 Base::operator=(std::move(rhs)); 165 } 166 } MOZ_INHERIT_TYPE_ANNOTATIONS_FROM_TEMPLATE_ARGS; 167 168 template <typename Wrapper, typename... Args> 169 class WrappedPtrOperations<JS::GCHashMap<Args...>, Wrapper> { 170 using Map = JS::GCHashMap<Args...>; 171 using Lookup = typename Map::Lookup; 172 173 const Map& map() const { return static_cast<const Wrapper*>(this)->get(); } 174 175 public: 176 using AddPtr = typename Map::AddPtr; 177 using Ptr = typename Map::Ptr; 178 using Range = typename Map::Range; 179 180 Ptr lookup(const Lookup& l) const { return map().lookup(l); } 181 Range all() const { return map().all(); } 182 bool empty() const { return map().empty(); } 183 uint32_t count() const { return map().count(); } 184 size_t capacity() const { return map().capacity(); } 185 bool has(const Lookup& l) const { return map().lookup(l).found(); } 186 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const { 187 return map().sizeOfExcludingThis(mallocSizeOf); 188 } 189 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const { 190 return mallocSizeOf(this) + map().sizeOfExcludingThis(mallocSizeOf); 191 } 192 }; 193 194 template <typename Wrapper, typename... Args> 195 class MutableWrappedPtrOperations<JS::GCHashMap<Args...>, Wrapper> 196 : public WrappedPtrOperations<JS::GCHashMap<Args...>, Wrapper> { 197 using Map = JS::GCHashMap<Args...>; 198 using Lookup = typename Map::Lookup; 199 200 Map& map() { return static_cast<Wrapper*>(this)->get(); } 201 202 public: 203 using AddPtr = typename Map::AddPtr; 204 struct Enum : public Map::Enum { 205 explicit Enum(Wrapper& o) : Map::Enum(o.map()) {} 206 }; 207 using Ptr = typename Map::Ptr; 208 using Range = typename Map::Range; 209 210 void clear() { map().clear(); } 211 void clearAndCompact() { map().clearAndCompact(); } 212 void remove(Ptr p) { map().remove(p); } 213 AddPtr lookupForAdd(const Lookup& l) { return map().lookupForAdd(l); } 214 215 template <typename KeyInput, typename ValueInput> 216 bool add(AddPtr& p, KeyInput&& k, ValueInput&& v) { 217 return map().add(p, std::forward<KeyInput>(k), std::forward<ValueInput>(v)); 218 } 219 220 template <typename KeyInput> 221 bool add(AddPtr& p, KeyInput&& k) { 222 return map().add(p, std::forward<KeyInput>(k), Map::Value()); 223 } 224 225 template <typename KeyInput, typename ValueInput> 226 bool relookupOrAdd(AddPtr& p, KeyInput&& k, ValueInput&& v) { 227 return map().relookupOrAdd(p, k, std::forward<KeyInput>(k), 228 std::forward<ValueInput>(v)); 229 } 230 231 template <typename KeyInput, typename ValueInput> 232 bool put(KeyInput&& k, ValueInput&& v) { 233 return map().put(std::forward<KeyInput>(k), std::forward<ValueInput>(v)); 234 } 235 236 template <typename KeyInput, typename ValueInput> 237 bool putNew(KeyInput&& k, ValueInput&& v) { 238 return map().putNew(std::forward<KeyInput>(k), std::forward<ValueInput>(v)); 239 } 240 }; 241 242 } // namespace js 243 244 namespace JS { 245 246 // A GCHashSet is a HashSet with an additional trace method that knows 247 // be traced to be kept alive will generally want to use this GCHashSet 248 // specialization in lieu of HashSet. 249 // 250 // Most types of GC pointers can be traced with no extra infrastructure. For 251 // structs and non-gc-pointer members, ensure that there is a specialization of 252 // GCPolicy<T> with an appropriate trace method available to handle the custom 253 // type. Generic helpers can be found in js/public/TracingAPI.h. 254 // 255 // Note that although this HashSet's trace will deal correctly with moved 256 // elements, it does not itself know when to barrier or trace elements. To 257 // function properly it must either be used with Rooted or barriered and traced 258 // manually. 259 template <typename T, typename HashPolicy = js::DefaultHasher<T>, 260 typename AllocPolicy = js::TempAllocPolicy> 261 class GCHashSet : public js::HashSet<T, HashPolicy, AllocPolicy> { 262 using Base = js::HashSet<T, HashPolicy, AllocPolicy>; 263 264 public: 265 explicit GCHashSet(AllocPolicy a = AllocPolicy()) : Base(std::move(a)) {} 266 explicit GCHashSet(size_t length) : Base(length) {} 267 GCHashSet(AllocPolicy a, size_t length) : Base(std::move(a), length) {} 268 269 void trace(JSTracer* trc) { 270 for (typename Base::Enum e(*this); !e.empty(); e.popFront()) { 271 GCPolicy<T>::trace(trc, &e.mutableFront(), "hashset element"); 272 } 273 } 274 275 bool traceWeak(JSTracer* trc) { 276 typename Base::Enum e(*this); 277 traceWeakEntries(trc, e); 278 Base::compact(); 279 return !this->empty(); 280 } 281 282 void traceWeakEntries(JSTracer* trc, typename Base::Enum& e) { 283 for (; !e.empty(); e.popFront()) { 284 if (!GCPolicy<T>::traceWeak(trc, &e.mutableFront())) { 285 e.removeFront(); 286 } 287 } 288 } 289 290 bool needsSweep(JSTracer* trc) const { 291 for (auto r = this->all(); !r.empty(); r.popFront()) { 292 if (GCPolicy<T>::needsSweep(trc, &r.front())) { 293 return true; 294 } 295 } 296 return false; 297 } 298 299 // GCHashSet is movable 300 GCHashSet(GCHashSet&& rhs) : Base(std::move(rhs)) {} 301 void operator=(GCHashSet&& rhs) { 302 MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited"); 303 Base::operator=(std::move(rhs)); 304 } 305 306 private: 307 // GCHashSet is not copyable or assignable 308 GCHashSet(const GCHashSet& hs) = delete; 309 GCHashSet& operator=(const GCHashSet& hs) = delete; 310 } MOZ_INHERIT_TYPE_ANNOTATIONS_FROM_TEMPLATE_ARGS; 311 312 } // namespace JS 313 314 namespace js { 315 316 template <typename Wrapper, typename... Args> 317 class WrappedPtrOperations<JS::GCHashSet<Args...>, Wrapper> { 318 using Set = JS::GCHashSet<Args...>; 319 320 const Set& set() const { return static_cast<const Wrapper*>(this)->get(); } 321 322 public: 323 using Lookup = typename Set::Lookup; 324 using AddPtr = typename Set::AddPtr; 325 using Entry = typename Set::Entry; 326 using Ptr = typename Set::Ptr; 327 using Range = typename Set::Range; 328 329 Ptr lookup(const Lookup& l) const { return set().lookup(l); } 330 Range all() const { return set().all(); } 331 bool empty() const { return set().empty(); } 332 uint32_t count() const { return set().count(); } 333 size_t capacity() const { return set().capacity(); } 334 bool has(const Lookup& l) const { return set().lookup(l).found(); } 335 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const { 336 return set().sizeOfExcludingThis(mallocSizeOf); 337 } 338 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const { 339 return mallocSizeOf(this) + set().sizeOfExcludingThis(mallocSizeOf); 340 } 341 }; 342 343 template <typename Wrapper, typename... Args> 344 class MutableWrappedPtrOperations<JS::GCHashSet<Args...>, Wrapper> 345 : public WrappedPtrOperations<JS::GCHashSet<Args...>, Wrapper> { 346 using Set = JS::GCHashSet<Args...>; 347 using Lookup = typename Set::Lookup; 348 349 Set& set() { return static_cast<Wrapper*>(this)->get(); } 350 351 public: 352 using AddPtr = typename Set::AddPtr; 353 using Entry = typename Set::Entry; 354 struct Enum : public Set::Enum { 355 explicit Enum(Wrapper& o) : Set::Enum(o.set()) {} 356 }; 357 using Ptr = typename Set::Ptr; 358 using Range = typename Set::Range; 359 360 void clear() { set().clear(); } 361 void clearAndCompact() { set().clearAndCompact(); } 362 [[nodiscard]] bool reserve(uint32_t len) { return set().reserve(len); } 363 void remove(Ptr p) { set().remove(p); } 364 void remove(const Lookup& l) { set().remove(l); } 365 AddPtr lookupForAdd(const Lookup& l) { return set().lookupForAdd(l); } 366 367 template <typename TInput> 368 void replaceKey(Ptr p, const Lookup& l, TInput&& newValue) { 369 set().replaceKey(p, l, std::forward<TInput>(newValue)); 370 } 371 372 template <typename TInput> 373 bool add(AddPtr& p, TInput&& t) { 374 return set().add(p, std::forward<TInput>(t)); 375 } 376 377 template <typename TInput> 378 bool relookupOrAdd(AddPtr& p, const Lookup& l, TInput&& t) { 379 return set().relookupOrAdd(p, l, std::forward<TInput>(t)); 380 } 381 382 template <typename TInput> 383 bool put(TInput&& t) { 384 return set().put(std::forward<TInput>(t)); 385 } 386 387 template <typename TInput> 388 bool putNew(TInput&& t) { 389 return set().putNew(std::forward<TInput>(t)); 390 } 391 392 template <typename TInput> 393 bool putNew(const Lookup& l, TInput&& t) { 394 return set().putNew(l, std::forward<TInput>(t)); 395 } 396 }; 397 398 } /* namespace js */ 399 400 namespace JS { 401 402 // Specialize WeakCache for GCHashMap to provide a barriered map that does not 403 // need to be swept immediately. 404 template <typename Key, typename Value, typename HashPolicy, 405 typename AllocPolicy, typename MapEntryGCPolicy> 406 class WeakCache< 407 GCHashMap<Key, Value, HashPolicy, AllocPolicy, MapEntryGCPolicy>> 408 final : protected detail::WeakCacheBase { 409 using Map = GCHashMap<Key, Value, HashPolicy, AllocPolicy, MapEntryGCPolicy>; 410 using Self = WeakCache<Map>; 411 412 Map map; 413 JSTracer* barrierTracer = nullptr; 414 415 public: 416 template <typename... Args> 417 explicit WeakCache(Zone* zone, Args&&... args) 418 : WeakCacheBase(zone), map(std::forward<Args>(args)...) {} 419 template <typename... Args> 420 explicit WeakCache(JSRuntime* rt, Args&&... args) 421 : WeakCacheBase(rt), map(std::forward<Args>(args)...) {} 422 ~WeakCache() { MOZ_ASSERT(!barrierTracer); } 423 424 bool empty() override { return map.empty(); } 425 426 size_t traceWeak(JSTracer* trc, NeedsLock needsLock) override { 427 size_t steps = map.count(); 428 429 // Create an Enum and sweep the table entries. 430 mozilla::Maybe<typename Map::Enum> e; 431 e.emplace(map); 432 map.traceWeakEntries(trc, e.ref()); 433 434 // Potentially take a lock while the Enum's destructor is called as this can 435 // rehash/resize the table and access the store buffer. 436 mozilla::Maybe<js::gc::AutoLockStoreBuffer> lock; 437 if (needsLock) { 438 lock.emplace(trc->runtime()); 439 } 440 e.reset(); 441 442 return steps; 443 } 444 445 bool setIncrementalBarrierTracer(JSTracer* trc) override { 446 MOZ_ASSERT(bool(barrierTracer) != bool(trc)); 447 barrierTracer = trc; 448 return true; 449 } 450 451 bool needsIncrementalBarrier() const override { return barrierTracer; } 452 453 private: 454 using Entry = typename Map::Entry; 455 456 static bool entryNeedsSweep(JSTracer* barrierTracer, const Entry& entry) { 457 return MapEntryGCPolicy::needsSweep(barrierTracer, &entry.key(), 458 &entry.value()); 459 } 460 461 public: 462 using Lookup = typename Map::Lookup; 463 using Ptr = typename Map::Ptr; 464 using AddPtr = typename Map::AddPtr; 465 466 // Iterator over the whole collection. 467 struct Range { 468 explicit Range(Self& self) : cache(self), range(self.map.all()) { 469 settle(); 470 } 471 Range() = default; 472 473 bool empty() const { return range.empty(); } 474 const Entry& front() const { return range.front(); } 475 476 void popFront() { 477 range.popFront(); 478 settle(); 479 } 480 481 private: 482 Self& cache; 483 typename Map::Range range; 484 485 void settle() { 486 if (JSTracer* trc = cache.barrierTracer) { 487 while (!empty() && entryNeedsSweep(trc, front())) { 488 popFront(); 489 } 490 } 491 } 492 }; 493 494 struct Enum : public Map::Enum { 495 explicit Enum(Self& cache) : Map::Enum(cache.map) { 496 // This operation is not allowed while barriers are in place as we 497 // may also need to enumerate the set for sweeping. 498 MOZ_ASSERT(!cache.barrierTracer); 499 } 500 }; 501 502 Ptr lookup(const Lookup& l) const { 503 Ptr ptr = map.lookup(l); 504 if (barrierTracer && ptr && entryNeedsSweep(barrierTracer, *ptr)) { 505 const_cast<Map&>(map).remove(ptr); 506 return Ptr(); 507 } 508 return ptr; 509 } 510 511 AddPtr lookupForAdd(const Lookup& l) { 512 AddPtr ptr = map.lookupForAdd(l); 513 if (barrierTracer && ptr && entryNeedsSweep(barrierTracer, *ptr)) { 514 const_cast<Map&>(map).remove(ptr); 515 return map.lookupForAdd(l); 516 } 517 return ptr; 518 } 519 520 Range all() const { return Range(*const_cast<Self*>(this)); } 521 522 bool empty() const { 523 // This operation is not currently allowed while barriers are in place 524 // as it would require iterating the map and the caller expects a 525 // constant time operation. 526 MOZ_ASSERT(!barrierTracer); 527 return map.empty(); 528 } 529 530 uint32_t count() const { 531 // This operation is not currently allowed while barriers are in place 532 // as it would require iterating the set and the caller expects a 533 // constant time operation. 534 MOZ_ASSERT(!barrierTracer); 535 return map.count(); 536 } 537 538 size_t capacity() const { return map.capacity(); } 539 540 bool has(const Lookup& l) const { return lookup(l).found(); } 541 542 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const { 543 return map.shallowSizeOfExcludingThis(mallocSizeOf); 544 } 545 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const { 546 return mallocSizeOf(this) + map.shallowSizeOfExcludingThis(mallocSizeOf); 547 } 548 549 void clear() { 550 // This operation is not currently allowed while barriers are in place 551 // since it doesn't make sense to clear a cache while it is being swept. 552 MOZ_ASSERT(!barrierTracer); 553 map.clear(); 554 } 555 556 void clearAndCompact() { 557 // This operation is not currently allowed while barriers are in place 558 // since it doesn't make sense to clear a cache while it is being swept. 559 MOZ_ASSERT(!barrierTracer); 560 map.clearAndCompact(); 561 } 562 563 void remove(Ptr p) { 564 // This currently supports removing entries during incremental 565 // sweeping. If we allow these tables to be swept incrementally this may 566 // no longer be possible. 567 map.remove(p); 568 } 569 570 void remove(const Lookup& l) { 571 Ptr p = lookup(l); 572 if (p) { 573 remove(p); 574 } 575 } 576 577 template <typename KeyInput, typename ValueInput> 578 bool add(AddPtr& p, KeyInput&& k, ValueInput&& v) { 579 return map.add(p, std::forward<KeyInput>(k), std::forward<ValueInput>(v)); 580 } 581 582 template <typename KeyInput, typename ValueInput> 583 bool relookupOrAdd(AddPtr& p, KeyInput&& k, ValueInput&& v) { 584 return map.relookupOrAdd(p, std::forward<KeyInput>(k), 585 std::forward<ValueInput>(v)); 586 } 587 588 template <typename KeyInput, typename ValueInput> 589 bool put(KeyInput&& k, ValueInput&& v) { 590 return map.put(std::forward<KeyInput>(k), std::forward<ValueInput>(v)); 591 } 592 593 template <typename KeyInput, typename ValueInput> 594 bool putNew(KeyInput&& k, ValueInput&& v) { 595 return map.putNew(std::forward<KeyInput>(k), std::forward<ValueInput>(v)); 596 } 597 } JS_HAZ_NON_GC_POINTER; 598 599 // Specialize WeakCache for GCHashSet to provide a barriered set that does not 600 // need to be swept immediately. 601 template <typename T, typename HashPolicy, typename AllocPolicy> 602 class WeakCache<GCHashSet<T, HashPolicy, AllocPolicy>> final 603 : protected detail::WeakCacheBase { 604 using Set = GCHashSet<T, HashPolicy, AllocPolicy>; 605 using Self = WeakCache<Set>; 606 607 Set set; 608 JSTracer* barrierTracer = nullptr; 609 610 public: 611 using Entry = typename Set::Entry; 612 613 template <typename... Args> 614 explicit WeakCache(Zone* zone, Args&&... args) 615 : WeakCacheBase(zone), set(std::forward<Args>(args)...) {} 616 template <typename... Args> 617 explicit WeakCache(JSRuntime* rt, Args&&... args) 618 : WeakCacheBase(rt), set(std::forward<Args>(args)...) {} 619 620 size_t traceWeak(JSTracer* trc, NeedsLock needsLock) override { 621 size_t steps = set.count(); 622 623 // Create an Enum and sweep the table entries. It's not necessary to take 624 // the store buffer lock yet. 625 mozilla::Maybe<typename Set::Enum> e; 626 e.emplace(set); 627 set.traceWeakEntries(trc, e.ref()); 628 629 // Destroy the Enum, potentially rehashing or resizing the table. Since this 630 // can access the store buffer, we need to take a lock for this if we're 631 // called off main thread. 632 mozilla::Maybe<js::gc::AutoLockStoreBuffer> lock; 633 if (needsLock) { 634 lock.emplace(trc->runtime()); 635 } 636 e.reset(); 637 638 return steps; 639 } 640 641 bool empty() override { return set.empty(); } 642 643 bool setIncrementalBarrierTracer(JSTracer* trc) override { 644 MOZ_ASSERT(bool(barrierTracer) != bool(trc)); 645 barrierTracer = trc; 646 return true; 647 } 648 649 bool needsIncrementalBarrier() const override { return barrierTracer; } 650 651 // Steal the contents of this weak cache. 652 Set stealContents() { 653 // This operation is not currently allowed while barriers are in place 654 // since it doesn't make sense to steal the contents while we are 655 // sweeping. 656 MOZ_ASSERT(!barrierTracer); 657 658 auto rval = std::move(set); 659 // Ensure set is in a specified (empty) state after the move 660 set.clear(); 661 662 // Return set; no move to avoid invalidating NRVO. 663 return rval; 664 } 665 666 private: 667 static bool entryNeedsSweep(JSTracer* barrierTracer, const Entry& prior) { 668 Entry entry(prior); 669 bool needsSweep = !GCPolicy<T>::traceWeak(barrierTracer, &entry); 670 MOZ_ASSERT_IF(!needsSweep, prior == entry); // We shouldn't update here. 671 return needsSweep; 672 } 673 674 public: 675 using Lookup = typename Set::Lookup; 676 using Ptr = typename Set::Ptr; 677 using AddPtr = typename Set::AddPtr; 678 679 // Iterator over the whole collection. 680 struct Range { 681 explicit Range(Self& self) : cache(self), range(self.set.all()) { 682 settle(); 683 } 684 Range() = default; 685 686 bool empty() const { return range.empty(); } 687 const Entry& front() const { return range.front(); } 688 689 void popFront() { 690 range.popFront(); 691 settle(); 692 } 693 694 private: 695 Self& cache; 696 typename Set::Range range; 697 698 void settle() { 699 if (JSTracer* trc = cache.barrierTracer) { 700 while (!empty() && entryNeedsSweep(trc, front())) { 701 popFront(); 702 } 703 } 704 } 705 }; 706 707 struct Enum : public Set::Enum { 708 explicit Enum(Self& cache) : Set::Enum(cache.set) { 709 // This operation is not allowed while barriers are in place as we 710 // may also need to enumerate the set for sweeping. 711 MOZ_ASSERT(!cache.barrierTracer); 712 } 713 }; 714 715 Ptr lookup(const Lookup& l) const { 716 Ptr ptr = set.lookup(l); 717 if (barrierTracer && ptr && entryNeedsSweep(barrierTracer, *ptr)) { 718 const_cast<Set&>(set).remove(ptr); 719 return Ptr(); 720 } 721 return ptr; 722 } 723 724 AddPtr lookupForAdd(const Lookup& l) { 725 AddPtr ptr = set.lookupForAdd(l); 726 if (barrierTracer && ptr && entryNeedsSweep(barrierTracer, *ptr)) { 727 const_cast<Set&>(set).remove(ptr); 728 return set.lookupForAdd(l); 729 } 730 return ptr; 731 } 732 733 Range all() const { return Range(*const_cast<Self*>(this)); } 734 735 bool empty() const { 736 // This operation is not currently allowed while barriers are in place 737 // as it would require iterating the set and the caller expects a 738 // constant time operation. 739 MOZ_ASSERT(!barrierTracer); 740 return set.empty(); 741 } 742 743 uint32_t count() const { 744 // This operation is not currently allowed while barriers are in place 745 // as it would require iterating the set and the caller expects a 746 // constant time operation. 747 MOZ_ASSERT(!barrierTracer); 748 return set.count(); 749 } 750 751 size_t capacity() const { return set.capacity(); } 752 753 bool has(const Lookup& l) const { return lookup(l).found(); } 754 755 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const { 756 return set.shallowSizeOfExcludingThis(mallocSizeOf); 757 } 758 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const { 759 return mallocSizeOf(this) + set.shallowSizeOfExcludingThis(mallocSizeOf); 760 } 761 762 void clear() { 763 // This operation is not currently allowed while barriers are in place 764 // since it doesn't make sense to clear a cache while it is being swept. 765 MOZ_ASSERT(!barrierTracer); 766 set.clear(); 767 } 768 769 void clearAndCompact() { 770 // This operation is not currently allowed while barriers are in place 771 // since it doesn't make sense to clear a cache while it is being swept. 772 MOZ_ASSERT(!barrierTracer); 773 set.clearAndCompact(); 774 } 775 776 void remove(Ptr p) { 777 // This currently supports removing entries during incremental 778 // sweeping. If we allow these tables to be swept incrementally this may 779 // no longer be possible. 780 set.remove(p); 781 } 782 783 void remove(const Lookup& l) { 784 Ptr p = lookup(l); 785 if (p) { 786 remove(p); 787 } 788 } 789 790 template <typename TInput> 791 void replaceKey(Ptr p, const Lookup& l, TInput&& newValue) { 792 set.replaceKey(p, l, std::forward<TInput>(newValue)); 793 } 794 795 template <typename TInput> 796 bool add(AddPtr& p, TInput&& t) { 797 return set.add(p, std::forward<TInput>(t)); 798 } 799 800 template <typename TInput> 801 bool relookupOrAdd(AddPtr& p, const Lookup& l, TInput&& t) { 802 return set.relookupOrAdd(p, l, std::forward<TInput>(t)); 803 } 804 805 template <typename TInput> 806 bool put(TInput&& t) { 807 return set.put(std::forward<TInput>(t)); 808 } 809 810 template <typename TInput> 811 bool putNew(TInput&& t) { 812 return set.putNew(std::forward<TInput>(t)); 813 } 814 815 template <typename TInput> 816 bool putNew(const Lookup& l, TInput&& t) { 817 return set.putNew(l, std::forward<TInput>(t)); 818 } 819 } JS_HAZ_NON_GC_POINTER; 820 821 } // namespace JS 822 823 #endif /* GCHashTable_h */