WeakMap.h (18812B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #ifndef gc_WeakMap_h 8 #define gc_WeakMap_h 9 10 #include "mozilla/Atomics.h" 11 #include "mozilla/LinkedList.h" 12 13 #include "gc/AllocKind.h" 14 #include "gc/Barrier.h" 15 #include "gc/Marking.h" 16 #include "gc/Tracer.h" 17 #include "gc/ZoneAllocator.h" 18 #include "js/GCVector.h" 19 #include "js/HashTable.h" 20 #include "js/HeapAPI.h" 21 #include "vm/JSObject.h" 22 23 namespace JS { 24 class Zone; 25 } 26 27 namespace js { 28 29 class GCMarker; 30 class WeakMapBase; 31 struct WeakMapTracer; 32 33 template <typename T> 34 struct WeakMapKeyHasher; 35 36 extern void DumpWeakMapLog(JSRuntime* rt); 37 38 namespace gc { 39 40 #if defined(JS_GC_ZEAL) || defined(DEBUG) 41 // Check whether a weak map entry is marked correctly. 42 bool CheckWeakMapEntryMarking(const WeakMapBase* map, Cell* key, Cell* value); 43 #endif 44 45 template <typename PtrT> 46 struct MightBeInNursery { 47 using T = std::remove_pointer_t<PtrT>; 48 static_assert(std::is_base_of_v<Cell, T>); 49 static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>); 50 51 #define CAN_NURSERY_ALLOC_KIND_OR(_1, _2, Type, _3, _4, canNurseryAlloc, _5) \ 52 std::is_base_of_v<Type, T> ? canNurseryAlloc: 53 54 // FOR_EACH_ALLOCKIND doesn't cover every possible type: make sure 55 // to default to `true` for unknown types. 56 static constexpr bool value = 57 FOR_EACH_ALLOCKIND(CAN_NURSERY_ALLOC_KIND_OR) true; 58 #undef CAN_NURSERY_ALLOC_KIND_OR 59 }; 60 template <> 61 struct MightBeInNursery<JS::Value> { 62 static constexpr bool value = true; 63 }; 64 65 } // namespace gc 66 67 // A subclass template of js::HashMap whose keys and values may be 68 // garbage-collected. When a key is collected, the table entry disappears, 69 // dropping its reference to the value. 70 // 71 // More precisely: 72 // 73 // A WeakMap entry is live if and only if both the WeakMap and the entry's 74 // key are live. An entry holds a strong reference to its value. 75 // 76 // You must call this table's 'trace' method when its owning object is reached 77 // by the garbage collection tracer. Once a table is known to be live, the 78 // implementation takes care of the special weak marking (ie, marking through 79 // the implicit edges stored in the map) and of removing (sweeping) table 80 // entries when collection is complete. 81 82 // WeakMaps are marked with an incremental linear-time algorithm that handles 83 // all orderings of map and key marking. The basic algorithm is: 84 // 85 // At first while marking, do nothing special when marking WeakMap keys (there 86 // is no straightforward way to know whether a particular object is being used 87 // as a key in some weakmap.) When a WeakMap is marked, scan through it to mark 88 // all entries with live keys, and collect all unmarked keys into a "weak keys" 89 // table. 90 // 91 // At some point, everything reachable has been marked. At this point, enter 92 // "weak marking mode". In this mode, whenever any object is marked, look it up 93 // in the weak keys table to see if it is the key for any WeakMap entry and if 94 // so, mark the value. When entering weak marking mode, scan the weak key table 95 // to find all keys that have been marked since we added them to the table, and 96 // mark those entries. 97 // 98 // In addition, we want weakmap marking to work incrementally. So WeakMap 99 // mutations are barriered to keep the weak keys table up to date: entries are 100 // removed if their key is removed from the table, etc. 101 // 102 // You can break down various ways that WeakMap values get marked based on the 103 // order that the map and key are marked. All of these assume the map and key 104 // get marked at some point: 105 // 106 // key marked, then map marked: 107 // - value was marked with map in `markEntries()` 108 // map marked, key already in map, key marked before weak marking mode: 109 // - key added to gcEphemeronEdges when map marked in `markEntries()` 110 // - value marked during `enterWeakMarkingMode` 111 // map marked, key already in map, key marked after weak marking mode: 112 // - when key is marked, gcEphemeronEdges[key] triggers marking of value in 113 // `markImplicitEdges()` 114 // map marked, key inserted into map, key marked: 115 // - value was live when inserted and must get marked at some point 116 // 117 118 using WeakMapColors = HashMap<WeakMapBase*, js::gc::CellColor, 119 DefaultHasher<WeakMapBase*>, SystemAllocPolicy>; 120 121 // Common base class for all WeakMap specializations, used for calling 122 // subclasses' GC-related methods. 123 class WeakMapBase : public mozilla::LinkedListElement<WeakMapBase> { 124 friend class js::GCMarker; 125 126 public: 127 using CellColor = js::gc::CellColor; 128 129 WeakMapBase(JSObject* memOf, JS::Zone* zone); 130 virtual ~WeakMapBase() {} 131 132 JS::Zone* zone() const { return zone_; } 133 134 // Garbage collector entry points. 135 136 // Unmark all weak maps in a zone. 137 static void unmarkZone(JS::Zone* zone); 138 139 // Check all weak maps in a zone that have been marked as live in this garbage 140 // collection, and mark the values of all entries that have become strong 141 // references to them. Return true if we marked any new values, indicating 142 // that we need to make another pass. In other words, mark my marked maps' 143 // marked members' mid-collection. 144 static bool markZoneIteratively(JS::Zone* zone, GCMarker* marker); 145 146 // Add zone edges for weakmaps in zone |mapZone| with key delegates in a 147 // different zone. 148 [[nodiscard]] static bool findSweepGroupEdgesForZone(JS::Zone* atomsZone, 149 JS::Zone* mapZone); 150 151 // Trace all weak map bindings. Used by the cycle collector. 152 static void traceAllMappings(WeakMapTracer* tracer); 153 154 // Save information about which weak maps are marked for a zone. 155 static bool saveZoneMarkedWeakMaps(JS::Zone* zone, 156 WeakMapColors& markedWeakMaps); 157 158 // Restore information about which weak maps are marked for many zones. 159 static void restoreMarkedWeakMaps(WeakMapColors& markedWeakMaps); 160 161 #if defined(JS_GC_ZEAL) || defined(DEBUG) 162 static bool checkMarkingForZone(JS::Zone* zone); 163 #endif 164 165 #ifdef JSGC_HASH_TABLE_CHECKS 166 static void checkWeakMapsAfterMovingGC(JS::Zone* zone); 167 #endif 168 169 protected: 170 // Instance member functions called by the above. Instantiations of WeakMap 171 // override these with definitions appropriate for their Key and Value types. 172 virtual void trace(JSTracer* tracer) = 0; 173 virtual bool findSweepGroupEdges(Zone* atomsZone) = 0; 174 virtual void traceWeakEdges(JSTracer* trc) = 0; 175 virtual void traceMappings(WeakMapTracer* tracer) = 0; 176 virtual void clearAndCompact() = 0; 177 178 virtual bool markEntries(GCMarker* marker) = 0; 179 180 // Trace any keys and values that are in the nursery. Return false if any 181 // remain in the nursery. 182 virtual bool traceNurseryEntriesOnMinorGC(JSTracer* trc) = 0; 183 virtual bool sweepAfterMinorGC() = 0; 184 185 // We have a key that, if it or its delegate is marked, may lead to a WeakMap 186 // value getting marked. Insert the necessary edges into the appropriate 187 // zone's gcEphemeronEdges or gcNurseryEphemeronEdges tables. 188 [[nodiscard]] bool addEphemeronEdgesForEntry(gc::MarkColor mapColor, 189 gc::TenuredCell* key, 190 gc::Cell* delegate, 191 gc::TenuredCell* value); 192 [[nodiscard]] bool addEphemeronEdge(gc::MarkColor color, gc::TenuredCell* src, 193 gc::TenuredCell* dst); 194 195 gc::CellColor mapColor() const { return gc::CellColor(uint32_t(mapColor_)); } 196 void setMapColor(gc::CellColor newColor) { mapColor_ = uint32_t(newColor); } 197 bool markMap(gc::MarkColor markColor); 198 199 void setHasNurseryEntries(); 200 201 #ifdef JS_GC_ZEAL 202 virtual bool checkMarking() const = 0; 203 virtual bool allowKeysInOtherZones() const { return false; } 204 friend bool gc::CheckWeakMapEntryMarking(const WeakMapBase*, gc::Cell*, 205 gc::Cell*); 206 #endif 207 208 #ifdef JSGC_HASH_TABLE_CHECKS 209 virtual void checkAfterMovingGC() const = 0; 210 #endif 211 212 // Object that this weak map is part of, if any. 213 HeapPtr<JSObject*> memberOf; 214 215 // Zone containing this weak map. 216 JS::Zone* zone_; 217 218 // Whether this object has been marked during garbage collection and which 219 // color it was marked. 220 mozilla::Atomic<uint32_t, mozilla::Relaxed> mapColor_; 221 222 // Cached information about keys to speed up findSweepGroupEdges. 223 bool mayHaveKeyDelegates = false; 224 bool mayHaveSymbolKeys = false; 225 226 // Whether this map contains entries with nursery keys or values. 227 bool hasNurseryEntries = false; 228 229 // Whether the |nurseryKeys| vector contains the keys of all entries with 230 // nursery keys or values. This can be false if it gets too large or on OOM. 231 bool nurseryKeysValid = true; 232 233 friend class JS::Zone; 234 friend class js::Nursery; 235 }; 236 237 // Get the hash from a Symbol. 238 HashNumber GetSymbolHash(JS::Symbol* sym); 239 240 // By default weak maps use default hasher for the key type, which hashes 241 // the pointer itself for pointer types. 242 template <typename T> 243 struct WeakMapKeyHasher : public DefaultHasher<T> {}; 244 245 // We only support JS::Value keys that contain objects or symbols. For objects 246 // we hash the pointer and for symbols we use its stored hash, which is randomly 247 // generated on creation. 248 // 249 // Equality is based on a bitwise test not on JS Value semantics. 250 // 251 // Take care when modifying this code! Previously there have been security 252 // issues around using pointer hashing for maps (e.g. bug 1312001). 253 // 254 // Although this does use pointer hashing for objects, we don't think those 255 // concerns apply here because: 256 // 257 // 1) This uses an open addressed hash table rather than a chained one which 258 // makes the attack much more difficult. 259 // 260 // 2) The allowed key types are restricted to objects and non-registered 261 // symbols, so it's not possible to use int32 keys as were used in the 262 // attack. 263 // 264 // 3) Symbols use their own random hash codes which can't be predicted. 265 // 266 // 4) Registered symbols are not allowed, which means it's not possible to leak 267 // information about such symbols used by another zone. 268 // 269 // 5) Although sequentially allocated objects will have similar pointers, 270 // ScrambleHashCode should work well enough to distribute these keys and 271 // make predicting the hash code from the pointer difficult. 272 template <> 273 struct WeakMapKeyHasher<JS::Value> { 274 using Key = JS::Value; 275 using Lookup = JS::Value; 276 277 static HashNumber hash(const Lookup& l) { 278 checkValueType(l); 279 if (l.isSymbol()) { 280 return GetSymbolHash(l.toSymbol()); 281 } 282 return mozilla::HashGeneric(l.asRawBits()); 283 } 284 285 static bool match(const Key& k, const Lookup& l) { 286 checkValueType(k); 287 return k == l; 288 } 289 290 static void rekey(Key& k, const Key& newKey) { k = newKey; } 291 292 private: 293 static void checkValueType(const Value& value); 294 }; 295 296 template <> 297 struct WeakMapKeyHasher<PreBarriered<JS::Value>> { 298 using Key = PreBarriered<JS::Value>; 299 using Lookup = JS::Value; 300 301 static HashNumber hash(const Lookup& l) { 302 return WeakMapKeyHasher<JS::Value>::hash(l); 303 } 304 static bool match(const Key& k, const Lookup& l) { 305 return WeakMapKeyHasher<JS::Value>::match(k, l); 306 } 307 static void rekey(Key& k, const Key& newKey) { k.unbarrieredSet(newKey); } 308 }; 309 310 template <class Key, class Value, class AllocPolicy> 311 class WeakMap : public WeakMapBase { 312 using BarrieredKey = PreBarriered<Key>; 313 using BarrieredValue = PreBarriered<Value>; 314 315 using Map = HashMap<BarrieredKey, BarrieredValue, 316 WeakMapKeyHasher<BarrieredKey>, AllocPolicy>; 317 using UnbarrieredMap = 318 HashMap<Key, Value, WeakMapKeyHasher<Key>, AllocPolicy>; 319 320 UnbarrieredMap map_; // Barriers are added by |map()| accessor. 321 322 // The keys of entries where either the key or value is allocated in the 323 // nursery. 324 GCVector<Key, 0, SystemAllocPolicy> nurseryKeys; 325 326 public: 327 using Lookup = typename Map::Lookup; 328 using Entry = typename Map::Entry; 329 using Range = typename Map::Range; 330 331 // Restrict the interface of HashMap::Ptr and AddPtr to remove mutable access 332 // to the hash table entry which could otherwise bypass our barriers. 333 334 using MutablePtr = typename Map::Ptr; 335 class Ptr { 336 MutablePtr ptr; 337 friend class WeakMap; 338 339 public: 340 explicit Ptr(const MutablePtr& ptr) : ptr(ptr) {} 341 bool found() const { return ptr.found(); } 342 explicit operator bool() const { return found(); } 343 const Entry& operator*() const { return *ptr; } 344 const Entry* operator->() const { return &*ptr; } 345 }; 346 347 using MutableAddPtr = typename Map::AddPtr; 348 class AddPtr { 349 MutableAddPtr ptr; 350 friend class WeakMap; 351 352 public: 353 explicit AddPtr(const MutableAddPtr& ptr) : ptr(ptr) {} 354 bool found() const { return ptr.found(); } 355 explicit operator bool() const { return found(); } 356 const Entry& operator*() const { return *ptr; } 357 const Entry* operator->() const { return &*ptr; } 358 }; 359 360 struct Enum : public Map::Enum { 361 explicit Enum(WeakMap& map) : Map::Enum(map.map()) {} 362 }; 363 364 explicit WeakMap(JSContext* cx, JSObject* memOf = nullptr); 365 explicit WeakMap(JS::Zone* zone, JSObject* memOf = nullptr); 366 ~WeakMap() override; 367 368 Range all() const { return map().all(); } 369 uint32_t count() const { return map().count(); } 370 bool empty() const { return map().empty(); } 371 bool has(const Lookup& lookup) const { return map().has(lookup); } 372 void remove(const Lookup& lookup) { return map().remove(lookup); } 373 void remove(Ptr ptr) { return map().remove(ptr.ptr); } 374 375 size_t shallowSizeOfExcludingThis(mozilla::MallocSizeOf aMallocSizeOf) const { 376 return map().shallowSizeOfExcludingThis(aMallocSizeOf); 377 } 378 size_t shallowSizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const { 379 return aMallocSizeOf(this) + shallowSizeOfExcludingThis(aMallocSizeOf); 380 } 381 382 // Get the value associated with a key, or a default constructed Value if the 383 // key is not present in the map. 384 Value get(const Lookup& l) const { 385 Ptr ptr = lookup(l); 386 if (!ptr) { 387 return Value(); 388 } 389 return ptr->value(); 390 } 391 392 // Add a read barrier to prevent a gray value from escaping the weak map. This 393 // is necessary because we don't unmark gray through weak maps. 394 Ptr lookup(const Lookup& l) const { 395 Ptr p = lookupUnbarriered(l); 396 if (p) { 397 valueReadBarrier(p->value()); 398 } 399 return p; 400 } 401 402 Ptr lookupUnbarriered(const Lookup& l) const { return Ptr(map().lookup(l)); } 403 404 AddPtr lookupForAdd(const Lookup& l) { 405 AddPtr p(map().lookupForAdd(l)); 406 if (p) { 407 valueReadBarrier(p->value()); 408 } 409 return p; 410 } 411 412 [[nodiscard]] bool add(AddPtr& p, const Key& k, const Value& v) { 413 MOZ_ASSERT(gc::ToMarkable(k)); 414 writeBarrier(k, v); 415 return map().add(p.ptr, k, v); 416 } 417 418 [[nodiscard]] bool relookupOrAdd(AddPtr& p, const Key& k, const Value& v) { 419 MOZ_ASSERT(gc::ToMarkable(k)); 420 writeBarrier(k, v); 421 return map().relookupOrAdd(p.ptr, k, v); 422 } 423 424 [[nodiscard]] bool put(const Key& k, const Value& v) { 425 MOZ_ASSERT(gc::ToMarkable(k)); 426 writeBarrier(k, v); 427 return map().put(k, v); 428 } 429 430 [[nodiscard]] bool putNew(const Key& k, const Value& v) { 431 MOZ_ASSERT(gc::ToMarkable(k)); 432 writeBarrier(k, v); 433 return map().putNew(k, v); 434 } 435 436 void clear() { 437 map().clear(); 438 nurseryKeys.clear(); 439 nurseryKeysValid = true; 440 mayHaveSymbolKeys = false; 441 mayHaveKeyDelegates = false; 442 } 443 444 #ifdef DEBUG 445 bool hasEntry(const Key& key, const Value& value) const { 446 Ptr p = lookupUnbarriered(key); 447 return p && p->value() == value; 448 } 449 #endif 450 451 bool markEntry(GCMarker* marker, gc::CellColor mapColor, Enum& iter, 452 bool populateWeakKeysTable); 453 454 void trace(JSTracer* trc) override; 455 456 // Used by the debugger to trace cross-compartment edges. 457 void traceKeys(JSTracer* trc); 458 void traceKey(JSTracer* trc, Enum& iter); 459 460 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf); 461 462 static size_t offsetOfHashShift() { 463 return offsetof(WeakMap, map_) + UnbarrieredMap::offsetOfHashShift(); 464 } 465 static size_t offsetOfTable() { 466 return offsetof(WeakMap, map_) + UnbarrieredMap::offsetOfTable(); 467 } 468 static size_t offsetOfEntryCount() { 469 return offsetof(WeakMap, map_) + UnbarrieredMap::offsetOfEntryCount(); 470 } 471 472 protected: 473 inline void assertMapIsSameZoneWithValue(const BarrieredValue& v); 474 475 bool markEntries(GCMarker* marker) override; 476 477 // Find sweep group edges for delegates, if the key type has delegates. (If 478 // not, the optimizer should make this a nop.) 479 bool findSweepGroupEdges(Zone* atomsZone) override; 480 481 #if DEBUG 482 void assertEntriesNotAboutToBeFinalized(); 483 #endif 484 485 #ifdef JS_GC_ZEAL 486 bool checkMarking() const override; 487 #endif 488 489 #ifdef JSGC_HASH_TABLE_CHECKS 490 void checkAfterMovingGC() const override; 491 #endif 492 493 private: 494 // Map accessor uses a cast to add barriers. 495 Map& map() { return reinterpret_cast<Map&>(map_); } 496 const Map& map() const { return reinterpret_cast<const Map&>(map_); } 497 498 MutablePtr lookupMutableUnbarriered(const Lookup& l) { 499 return map().lookup(l); 500 } 501 502 static void valueReadBarrier(const JS::Value& v) { 503 JS::ExposeValueToActiveJS(v); 504 } 505 static void valueReadBarrier(JSObject* obj) { 506 JS::ExposeObjectToActiveJS(obj); 507 } 508 509 void writeBarrier(const Key& key, const Value& value) { 510 keyKindBarrier(key); 511 nurseryEntryBarrier(key, value); 512 } 513 514 void keyKindBarrier(const JS::Value& key) { 515 if (key.isSymbol()) { 516 mayHaveSymbolKeys = true; 517 } 518 if (key.isObject()) { 519 keyKindBarrier(&key.toObject()); 520 } 521 } 522 void keyKindBarrier(JSObject* key) { 523 JSObject* delegate = UncheckedUnwrapWithoutExpose(key); 524 if (delegate != key || ObjectMayBeSwapped(key)) { 525 mayHaveKeyDelegates = true; 526 } 527 } 528 void keyKindBarrier(BaseScript* key) {} 529 530 void nurseryEntryBarrier(const Key& key, const Value& value) { 531 if ((gc::MightBeInNursery<Key>::value && 532 !JS::GCPolicy<Key>::isTenured(key)) || 533 (gc::MightBeInNursery<Value>::value && 534 !JS::GCPolicy<Value>::isTenured(value))) { 535 if (!hasNurseryEntries) { 536 setHasNurseryEntries(); 537 } 538 539 addNurseryKey(key); 540 } 541 } 542 543 void addNurseryKey(const Key& key); 544 545 void traceWeakEdges(JSTracer* trc) override; 546 547 void clearAndCompact() override { 548 clear(); 549 map().compact(); 550 nurseryKeys.clearAndFree(); 551 } 552 553 // memberOf can be nullptr, which means that the map is not part of a 554 // JSObject. 555 void traceMappings(WeakMapTracer* tracer) override; 556 557 bool traceNurseryEntriesOnMinorGC(JSTracer* trc) override; 558 bool sweepAfterMinorGC() override; 559 }; 560 561 } /* namespace js */ 562 563 #endif /* gc_WeakMap_h */