WeakMap-inl.h (24598B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #ifndef gc_WeakMap_inl_h 8 #define gc_WeakMap_inl_h 9 10 #include "gc/WeakMap.h" 11 12 #include "mozilla/Maybe.h" 13 14 #include <algorithm> 15 #include <type_traits> 16 17 #include "gc/GCLock.h" 18 #include "gc/Marking.h" 19 #include "gc/Zone.h" 20 #include "js/Prefs.h" 21 #include "js/TraceKind.h" 22 #include "vm/JSContext.h" 23 #include "vm/SymbolType.h" 24 25 #include "gc/AtomMarking-inl.h" 26 #include "gc/Marking-inl.h" 27 #include "gc/StableCellHasher-inl.h" 28 29 namespace js { 30 31 namespace gc::detail { 32 33 static inline bool IsObject(JSObject* obj) { return true; } 34 static inline bool IsObject(BaseScript* script) { return false; } 35 static inline bool IsObject(const JS::Value& value) { return value.isObject(); } 36 37 static inline bool IsSymbol(JSObject* obj) { return false; } 38 static inline bool IsSymbol(BaseScript* script) { return false; } 39 static inline bool IsSymbol(const JS::Value& value) { return value.isSymbol(); } 40 41 // Return the effective cell color given the current marking state. 42 // This must be kept in sync with ShouldMark in Marking.cpp. 43 template <typename T> 44 static CellColor GetEffectiveColor(GCMarker* marker, const T& item) { 45 static_assert(!IsBarriered<T>::value, "Don't pass wrapper types"); 46 47 Cell* cell = ToMarkable(item); 48 if (!cell->isTenured()) { 49 return CellColor::Black; 50 } 51 52 const TenuredCell& t = cell->asTenured(); 53 if (!t.zoneFromAnyThread()->shouldMarkInZone(marker->markColor())) { 54 return CellColor::Black; 55 } 56 MOZ_ASSERT(t.runtimeFromAnyThread() == marker->runtime()); 57 58 return t.color(); 59 } 60 61 // If a wrapper is used as a key in a weakmap, the garbage collector should 62 // keep that object around longer than it otherwise would. We want to avoid 63 // collecting the wrapper (and removing the weakmap entry) as long as the 64 // wrapped object is alive (because the object can be rewrapped and looked up 65 // again). As long as the wrapper is used as a weakmap key, it will not be 66 // collected (and remain in the weakmap) until the wrapped object is 67 // collected. 68 template <typename T> 69 static inline JSObject* GetDelegate(const T& key) { 70 static_assert(!IsBarriered<T>::value, "Don't pass wrapper types"); 71 static_assert(!std::is_same_v<T, gc::Cell*>, "Don't pass Cell*"); 72 73 // Only objects have delegates. 74 if (!IsObject(key)) { 75 return nullptr; 76 } 77 78 auto* obj = static_cast<JSObject*>(ToMarkable(key)); 79 JSObject* delegate = UncheckedUnwrapWithoutExpose(obj); 80 if (delegate == obj) { 81 return nullptr; 82 } 83 84 return delegate; 85 } 86 87 } // namespace gc::detail 88 89 // Weakmap entry -> value edges are only visible if the map is traced, which 90 // only happens if the map zone is being collected. If the map and the value 91 // were in different zones, then we could have a case where the map zone is not 92 // collecting but the value zone is, and incorrectly free a value that is 93 // reachable solely through weakmaps. 94 template <class K, class V, class AP> 95 void WeakMap<K, V, AP>::assertMapIsSameZoneWithValue(const BarrieredValue& v) { 96 #ifdef DEBUG 97 gc::Cell* cell = gc::ToMarkable(v); 98 if (cell) { 99 Zone* cellZone = cell->zoneFromAnyThread(); 100 MOZ_ASSERT(zone() == cellZone || cellZone->isAtomsZone()); 101 } 102 #endif 103 } 104 105 // Initial length chosen to give minimum table capacity on creation. 106 // 107 // Using the default initial length instead means we will often reallocate the 108 // table on sweep because it's too big for the number of entries. 109 static constexpr size_t InitialWeakMapLength = 0; 110 111 template <class K, class V, class AP> 112 WeakMap<K, V, AP>::WeakMap(JSContext* cx, JSObject* memOf) 113 : WeakMap(cx->zone(), memOf) {} 114 115 template <class K, class V, class AP> 116 WeakMap<K, V, AP>::WeakMap(JS::Zone* zone, JSObject* memOf) 117 : WeakMapBase(memOf, zone), map_(zone, InitialWeakMapLength) { 118 static_assert(std::is_same_v<typename RemoveBarrier<K>::Type, K>); 119 static_assert(std::is_same_v<typename RemoveBarrier<V>::Type, V>); 120 121 // The object's TraceKind needs to be added to CC graph if this object is 122 // used as a WeakMap key, otherwise the key is considered to be pointed from 123 // somewhere unknown, and results in leaking the subgraph which contains the 124 // key. See the comments in NoteWeakMapsTracer::trace for more details. 125 if constexpr (std::is_pointer_v<K>) { 126 using NonPtrType = std::remove_pointer_t<K>; 127 static_assert(JS::IsCCTraceKind(NonPtrType::TraceKind), 128 "Object's TraceKind should be added to CC graph."); 129 } 130 131 zone->gcWeakMapList().insertFront(this); 132 if (zone->gcState() > Zone::Prepare) { 133 setMapColor(CellColor::Black); 134 } 135 } 136 137 template <class K, class V, class AP> 138 WeakMap<K, V, AP>::~WeakMap() { 139 #ifdef DEBUG 140 // Weak maps store their data in an unbarriered map (|map_|) meaning that no 141 // barriers are run on destruction. This is safe because: 142 143 // 1. Weak maps have GC lifetime except on construction failure, therefore no 144 // prebarrier is required. 145 MOZ_ASSERT_IF(!empty(), 146 CurrentThreadIsGCSweeping() || CurrentThreadIsGCFinalizing()); 147 148 // 2. If we're finalizing a weak map due to GC then it cannot contain nursery 149 // things, because we evicted the nursery at the start of collection and 150 // writing a nursery thing into the table would require the map to be 151 // live. Therefore no postbarrier is required. 152 size_t i = 0; 153 for (auto r = all(); !r.empty() && i < 1000; r.popFront(), i++) { 154 K key = r.front().key(); 155 MOZ_ASSERT_IF(gc::ToMarkable(key), !IsInsideNursery(gc::ToMarkable(key))); 156 V value = r.front().value(); 157 MOZ_ASSERT_IF(gc::ToMarkable(value), 158 !IsInsideNursery(gc::ToMarkable(value))); 159 } 160 #endif 161 } 162 163 // If the entry is live, ensure its key and value are marked. Also make sure the 164 // key is at least as marked as min(map, delegate), so it cannot get discarded 165 // and then recreated by rewrapping the delegate. 166 // 167 // Optionally adds edges to the ephemeron edges table for any keys (or 168 // delegates) where future changes to their mark color would require marking the 169 // value (or the key). 170 template <class K, class V, class AP> 171 bool WeakMap<K, V, AP>::markEntry(GCMarker* marker, gc::CellColor mapColor, 172 Enum& iter, bool populateWeakKeysTable) { 173 #ifdef DEBUG 174 MOZ_ASSERT(IsMarked(mapColor)); 175 if (marker->isParallelMarking()) { 176 marker->runtime()->gc.assertCurrentThreadHasLockedGC(); 177 } 178 #endif 179 180 BarrieredKey& key = iter.front().mutableKey(); 181 BarrieredValue& value = iter.front().value(); 182 183 JSTracer* trc = marker->tracer(); 184 gc::Cell* keyCell = gc::ToMarkable(key); 185 MOZ_ASSERT(keyCell); 186 187 bool keyIsSymbol = gc::detail::IsSymbol(key.get()); 188 MOZ_ASSERT(keyIsSymbol == (keyCell->getTraceKind() == JS::TraceKind::Symbol)); 189 if (keyIsSymbol) { 190 // For symbols, also check whether it it is referenced by an uncollected 191 // zone, and if so mark it now. There's no need to set |marked| as this 192 // would have been marked later anyway. 193 auto* sym = static_cast<JS::Symbol*>(keyCell); 194 if (marker->runtime()->gc.isSymbolReferencedByUncollectedZone( 195 sym, marker->markColor())) { 196 TraceEdge(trc, &key, "WeakMap symbol key"); 197 } 198 } 199 200 bool marked = false; 201 CellColor markColor = AsCellColor(marker->markColor()); 202 CellColor keyColor = gc::detail::GetEffectiveColor(marker, key.get()); 203 JSObject* delegate = gc::detail::GetDelegate(key.get()); 204 205 if (delegate) { 206 CellColor delegateColor = gc::detail::GetEffectiveColor(marker, delegate); 207 // The key needs to stay alive while both the delegate and map are live. 208 CellColor proxyPreserveColor = std::min(delegateColor, mapColor); 209 if (keyColor < proxyPreserveColor) { 210 MOZ_ASSERT(markColor >= proxyPreserveColor); 211 if (markColor == proxyPreserveColor) { 212 traceKey(trc, iter); 213 MOZ_ASSERT(keyCell->color() >= proxyPreserveColor); 214 marked = true; 215 keyColor = proxyPreserveColor; 216 } 217 } 218 } 219 220 gc::Cell* cellValue = gc::ToMarkable(value); 221 if (IsMarked(keyColor)) { 222 if (cellValue) { 223 CellColor targetColor = std::min(mapColor, keyColor); 224 CellColor valueColor = gc::detail::GetEffectiveColor(marker, value.get()); 225 if (valueColor < targetColor) { 226 MOZ_ASSERT(markColor >= targetColor); 227 if (markColor == targetColor) { 228 TraceEdge(trc, &value, "WeakMap entry value"); 229 MOZ_ASSERT(cellValue->color() >= targetColor); 230 marked = true; 231 } 232 } 233 } 234 } 235 236 if (populateWeakKeysTable) { 237 MOZ_ASSERT(trc->weakMapAction() == JS::WeakMapTraceAction::Expand); 238 239 // Note that delegateColor >= keyColor because marking a key marks its 240 // delegate, so we only need to check whether keyColor < mapColor to tell 241 // this. 242 if (keyColor < mapColor) { 243 // The final color of the key is not yet known. Add an edge to the 244 // relevant ephemerons table to ensure that the value will be marked if 245 // the key is marked. If the key has a delegate, also add an edge to 246 // ensure the key is marked if the delegate is marked. 247 248 // Nursery values are added to the store buffer when writing them into 249 // the entry (via HeapPtr), so they will always get tenured. There's no 250 // need for a key->value ephemeron to keep them alive via the WeakMap. 251 gc::TenuredCell* tenuredValue = nullptr; 252 if (cellValue && cellValue->isTenured()) { 253 tenuredValue = &cellValue->asTenured(); 254 } 255 256 // Nursery key is treated as black, so cannot be less marked than the map. 257 MOZ_ASSERT(keyCell->isTenured()); 258 259 if (!this->addEphemeronEdgesForEntry(AsMarkColor(mapColor), 260 &keyCell->asTenured(), delegate, 261 tenuredValue)) { 262 marker->abortLinearWeakMarking(); 263 } 264 } 265 } 266 267 return marked; 268 } 269 270 template <class K, class V, class AP> 271 void WeakMap<K, V, AP>::trace(JSTracer* trc) { 272 MOZ_ASSERT(isInList()); 273 274 TraceNullableEdge(trc, &memberOf, "WeakMap owner"); 275 276 if (trc->isMarkingTracer()) { 277 MOZ_ASSERT(trc->weakMapAction() == JS::WeakMapTraceAction::Expand); 278 GCMarker* marker = GCMarker::fromTracer(trc); 279 if (markMap(marker->markColor())) { 280 (void)markEntries(marker); 281 } 282 return; 283 } 284 285 if (trc->weakMapAction() == JS::WeakMapTraceAction::Skip) { 286 return; 287 } 288 289 for (Enum e(*this); !e.empty(); e.popFront()) { 290 // Always trace all values (unless weakMapAction() is Skip). 291 TraceEdge(trc, &e.front().value(), "WeakMap entry value"); 292 293 // Trace keys only if weakMapAction() says to. 294 if (trc->weakMapAction() == JS::WeakMapTraceAction::TraceKeysAndValues) { 295 traceKey(trc, e); 296 } 297 } 298 } 299 300 template <class K, class V, class AP> 301 void WeakMap<K, V, AP>::traceKey(JSTracer* trc, Enum& iter) { 302 PreBarriered<K> key = iter.front().key(); 303 TraceWeakMapKeyEdge(trc, zone(), &key, "WeakMap entry key"); 304 if (key != iter.front().key()) { 305 iter.rekeyFront(key); 306 } 307 308 // TODO: This is a work around to prevent the pre-barrier firing. The 309 // rekeyFront() method requires passing in an instance of the key which in 310 // this case has a barrier. It should be possible to create the key in place 311 // by passing in a pointer as happens for other hash table methods that create 312 // entries. 313 key.unbarrieredSet(JS::SafelyInitialized<K>::create()); 314 } 315 316 template <class K, class V, class AP> 317 bool WeakMap<K, V, AP>::markEntries(GCMarker* marker) { 318 // This method is called whenever the map's mark color changes. Mark values 319 // (and keys with delegates) as required for the new color and populate the 320 // ephemeron edges if we're in incremental marking mode. 321 322 // Lock during parallel marking to synchronize updates to the ephemeron edges 323 // table. 324 mozilla::Maybe<AutoLockGC> lock; 325 if (marker->isParallelMarking()) { 326 lock.emplace(marker->runtime()); 327 } 328 329 MOZ_ASSERT(IsMarked(mapColor())); 330 bool markedAny = false; 331 332 // If we don't populate the weak keys table now then we do it when we enter 333 // weak marking mode. 334 bool populateWeakKeysTable = 335 marker->incrementalWeakMapMarkingEnabled || marker->isWeakMarking(); 336 337 // Read the atomic color into a local variable so the compiler doesn't load it 338 // every time. 339 gc::CellColor mapColor = this->mapColor(); 340 341 for (Enum e(*this); !e.empty(); e.popFront()) { 342 if (markEntry(marker, mapColor, e, populateWeakKeysTable)) { 343 markedAny = true; 344 } 345 } 346 347 return markedAny; 348 } 349 350 template <class K, class V, class AP> 351 void WeakMap<K, V, AP>::traceWeakEdges(JSTracer* trc) { 352 // This is used for sweeping but not for anything that can move GC things. 353 MOZ_ASSERT(!trc->isTenuringTracer() && trc->kind() != JS::TracerKind::Moving); 354 355 // Scan the map, removing all entries whose keys remain unmarked. Rebuild 356 // cached key state at the same time. 357 mayHaveSymbolKeys = false; 358 mayHaveKeyDelegates = false; 359 for (Enum e(*this); !e.empty(); e.popFront()) { 360 #ifdef DEBUG 361 K prior = e.front().key(); 362 #endif 363 if (TraceWeakEdge(trc, &e.front().mutableKey(), "WeakMap key")) { 364 MOZ_ASSERT(e.front().key() == prior); 365 keyKindBarrier(e.front().key()); 366 } else { 367 e.removeFront(); 368 } 369 } 370 371 // TODO: Shrink nurseryKeys storage? 372 373 #if DEBUG 374 // Once we've swept, all remaining edges should stay within the known-live 375 // part of the graph. 376 assertEntriesNotAboutToBeFinalized(); 377 #endif 378 } 379 380 template <class K, class V, class AP> 381 void WeakMap<K, V, AP>::addNurseryKey(const K& key) { 382 MOZ_ASSERT(hasNurseryEntries); // Must be set before calling this. 383 384 if (!nurseryKeysValid) { 385 return; 386 } 387 388 // Don't bother recording every key if there a lot of them. We will scan the 389 // map instead. 390 bool tooManyKeys = nurseryKeys.length() >= map().count() / 2; 391 392 if (tooManyKeys || !nurseryKeys.append(key)) { 393 nurseryKeys.clear(); 394 nurseryKeysValid = false; 395 } 396 } 397 398 template <class K, class V, class AP> 399 bool WeakMap<K, V, AP>::traceNurseryEntriesOnMinorGC(JSTracer* trc) { 400 // Called on minor GC to trace nursery keys that have delegates and nursery 401 // values. Nursery keys without delegates are swept at the end of minor GC if 402 // they do not survive. 403 404 MOZ_ASSERT(hasNurseryEntries); 405 406 using Entry = typename Map::Entry; 407 auto traceEntry = [trc](K& key, 408 const Entry& entry) -> std::tuple<bool, bool> { 409 TraceEdge(trc, &entry.value(), "WeakMap nursery value"); 410 bool hasNurseryValue = !JS::GCPolicy<V>::isTenured(entry.value()); 411 412 MOZ_ASSERT(key == entry.key()); 413 JSObject* delegate = gc::detail::GetDelegate(gc::MaybeForwarded(key)); 414 if (delegate) { 415 TraceManuallyBarrieredEdge(trc, &key, "WeakMap nursery key"); 416 } 417 bool hasNurseryKey = !JS::GCPolicy<K>::isTenured(key); 418 bool keyUpdated = key != entry.key(); 419 420 return {keyUpdated, hasNurseryKey || hasNurseryValue}; 421 }; 422 423 if (nurseryKeysValid) { 424 nurseryKeys.mutableEraseIf([&](K& key) { 425 auto ptr = lookupUnbarriered(key); 426 if (!ptr) { 427 if (!gc::IsForwarded(key)) { 428 return true; 429 } 430 431 // WeakMap::trace might have marked the key in the table already so if 432 // the key was forwarded try looking up the forwarded key too. 433 // 434 // TODO: Try to update cached nursery information there instead. 435 key = gc::Forwarded(key); 436 ptr = lookupUnbarriered(key); 437 if (!ptr) { 438 return true; 439 } 440 } 441 442 auto [keyUpdated, hasNurseryKeyOrValue] = traceEntry(key, *ptr); 443 444 if (keyUpdated) { 445 map().rekeyAs(ptr->key(), key, key); 446 } 447 448 return !hasNurseryKeyOrValue; 449 }); 450 } else { 451 MOZ_ASSERT(nurseryKeys.empty()); 452 nurseryKeysValid = true; 453 454 for (Enum e(*this); !e.empty(); e.popFront()) { 455 Entry& entry = e.front(); 456 457 K key = entry.key(); 458 auto [keyUpdated, hasNurseryKeyOrValue] = traceEntry(key, entry); 459 460 if (keyUpdated) { 461 entry.mutableKey() = key; 462 e.rekeyFront(key); 463 } 464 465 if (hasNurseryKeyOrValue) { 466 addNurseryKey(key); 467 } 468 } 469 } 470 471 hasNurseryEntries = !nurseryKeysValid || !nurseryKeys.empty(); 472 473 #ifdef DEBUG 474 bool foundNurseryEntries = false; 475 for (Enum e(*this); !e.empty(); e.popFront()) { 476 if (!JS::GCPolicy<K>::isTenured(e.front().key()) || 477 !JS::GCPolicy<V>::isTenured(e.front().value())) { 478 foundNurseryEntries = true; 479 } 480 } 481 MOZ_ASSERT_IF(foundNurseryEntries, hasNurseryEntries); 482 #endif 483 484 return !hasNurseryEntries; 485 } 486 487 template <class K, class V, class AP> 488 bool WeakMap<K, V, AP>::sweepAfterMinorGC() { 489 #ifdef DEBUG 490 MOZ_ASSERT(hasNurseryEntries); 491 bool foundNurseryEntries = false; 492 for (Enum e(*this); !e.empty(); e.popFront()) { 493 if (!JS::GCPolicy<K>::isTenured(e.front().key()) || 494 !JS::GCPolicy<V>::isTenured(e.front().value())) { 495 foundNurseryEntries = true; 496 } 497 } 498 MOZ_ASSERT(foundNurseryEntries); 499 #endif 500 501 using Entry = typename Map::Entry; 502 using Result = std::tuple<bool /* shouldRemove */, bool /* keyUpdated */, 503 bool /* hasNurseryKeyOrValue */>; 504 auto sweepEntry = [](K& key, const Entry& entry) -> Result { 505 bool hasNurseryValue = !JS::GCPolicy<V>::isTenured(entry.value()); 506 MOZ_ASSERT(!gc::IsForwarded(entry.value().get())); 507 508 gc::Cell* keyCell = gc::ToMarkable(key); 509 if (!gc::InCollectedNurseryRegion(keyCell)) { 510 bool hasNurseryKey = !JS::GCPolicy<K>::isTenured(key); 511 return {false, false, hasNurseryKey || hasNurseryValue}; 512 } 513 514 if (!gc::IsForwarded(key)) { 515 return {true, false, false}; 516 } 517 518 key = gc::Forwarded(key); 519 MOZ_ASSERT(key != entry.key()); 520 521 bool hasNurseryKey = !JS::GCPolicy<K>::isTenured(key); 522 523 return {false, true, hasNurseryKey || hasNurseryValue}; 524 }; 525 526 if (nurseryKeysValid) { 527 nurseryKeys.mutableEraseIf([&](K& key) { 528 auto ptr = lookupMutableUnbarriered(key); 529 if (!ptr) { 530 if (!gc::IsForwarded(key)) { 531 return true; 532 } 533 534 // WeakMap::trace might have marked the key in the table already so if 535 // the key was forwarded try looking up the forwarded key too. 536 // 537 // TODO: Try to update cached nursery information there instead. 538 key = gc::Forwarded(key); 539 ptr = lookupMutableUnbarriered(key); 540 if (!ptr) { 541 return true; 542 } 543 } 544 545 auto [shouldRemove, keyUpdated, hasNurseryKeyOrValue] = 546 sweepEntry(key, *ptr); 547 if (shouldRemove) { 548 map().remove(ptr); 549 return true; 550 } 551 552 if (keyUpdated) { 553 map().rekeyAs(ptr->key(), key, key); 554 } 555 556 return !hasNurseryKeyOrValue; 557 }); 558 } else { 559 MOZ_ASSERT(nurseryKeys.empty()); 560 nurseryKeysValid = true; 561 562 for (Enum e(*this); !e.empty(); e.popFront()) { 563 Entry& entry = e.front(); 564 565 K key = entry.key(); 566 auto [shouldRemove, keyUpdated, hasNurseryKeyOrValue] = 567 sweepEntry(key, entry); 568 569 if (shouldRemove) { 570 e.removeFront(); 571 continue; 572 } 573 574 if (keyUpdated) { 575 entry.mutableKey() = key; 576 e.rekeyFront(key); 577 } 578 579 if (hasNurseryKeyOrValue) { 580 addNurseryKey(key); 581 } 582 } 583 } 584 585 hasNurseryEntries = !nurseryKeysValid || !nurseryKeys.empty(); 586 587 #ifdef DEBUG 588 foundNurseryEntries = false; 589 for (Enum e(*this); !e.empty(); e.popFront()) { 590 if (!JS::GCPolicy<K>::isTenured(e.front().key()) || 591 !JS::GCPolicy<V>::isTenured(e.front().value())) { 592 foundNurseryEntries = true; 593 } 594 } 595 MOZ_ASSERT_IF(foundNurseryEntries, hasNurseryEntries); 596 #endif 597 598 return !hasNurseryEntries; 599 } 600 601 // memberOf can be nullptr, which means that the map is not part of a JSObject. 602 template <class K, class V, class AP> 603 void WeakMap<K, V, AP>::traceMappings(WeakMapTracer* tracer) { 604 for (Range r = all(); !r.empty(); r.popFront()) { 605 gc::Cell* key = gc::ToMarkable(r.front().key()); 606 gc::Cell* value = gc::ToMarkable(r.front().value()); 607 if (key && value) { 608 tracer->trace(memberOf, JS::GCCellPtr(r.front().key().get()), 609 JS::GCCellPtr(r.front().value().get())); 610 } 611 } 612 } 613 614 template <class K, class V, class AP> 615 bool WeakMap<K, V, AP>::findSweepGroupEdges(Zone* atomsZone) { 616 // For weakmap keys with delegates in a different zone, add a zone edge to 617 // ensure that the delegate zone finishes marking before the key zone. 618 619 #ifdef DEBUG 620 if (!mayHaveSymbolKeys || !mayHaveKeyDelegates) { 621 for (Range r = all(); !r.empty(); r.popFront()) { 622 const K& key = r.front().key(); 623 MOZ_ASSERT_IF(!mayHaveKeyDelegates, !gc::detail::GetDelegate(key)); 624 MOZ_ASSERT_IF(!mayHaveSymbolKeys, !gc::detail::IsSymbol(key)); 625 } 626 } 627 #endif 628 629 if (mayHaveSymbolKeys) { 630 MOZ_ASSERT(JS::Prefs::experimental_symbols_as_weakmap_keys()); 631 if (atomsZone->isGCMarking()) { 632 if (!atomsZone->addSweepGroupEdgeTo(zone())) { 633 return false; 634 } 635 } 636 } 637 638 if (mayHaveKeyDelegates) { 639 for (Range r = all(); !r.empty(); r.popFront()) { 640 const K& key = r.front().key(); 641 642 JSObject* delegate = gc::detail::GetDelegate(key); 643 if (delegate) { 644 // Marking a WeakMap key's delegate will mark the key, so process the 645 // delegate zone no later than the key zone. 646 Zone* delegateZone = delegate->zone(); 647 gc::Cell* keyCell = gc::ToMarkable(key); 648 MOZ_ASSERT(keyCell); 649 Zone* keyZone = keyCell->zone(); 650 if (delegateZone != keyZone && delegateZone->isGCMarking() && 651 keyZone->isGCMarking()) { 652 if (!delegateZone->addSweepGroupEdgeTo(keyZone)) { 653 return false; 654 } 655 } 656 } 657 } 658 } 659 660 return true; 661 } 662 663 template <class K, class V, class AP> 664 size_t WeakMap<K, V, AP>::sizeOfIncludingThis( 665 mozilla::MallocSizeOf mallocSizeOf) { 666 return mallocSizeOf(this) + shallowSizeOfExcludingThis(mallocSizeOf); 667 } 668 669 #if DEBUG 670 template <class K, class V, class AP> 671 void WeakMap<K, V, AP>::assertEntriesNotAboutToBeFinalized() { 672 for (Range r = all(); !r.empty(); r.popFront()) { 673 K k = r.front().key(); 674 MOZ_ASSERT(!gc::IsAboutToBeFinalizedUnbarriered(k)); 675 JSObject* delegate = gc::detail::GetDelegate(k); 676 if (delegate) { 677 MOZ_ASSERT(!gc::IsAboutToBeFinalizedUnbarriered(delegate), 678 "weakmap marking depends on a key tracing its delegate"); 679 } 680 MOZ_ASSERT(!gc::IsAboutToBeFinalized(r.front().value())); 681 } 682 } 683 #endif 684 685 #ifdef JS_GC_ZEAL 686 template <class K, class V, class AP> 687 bool WeakMap<K, V, AP>::checkMarking() const { 688 bool ok = true; 689 for (Range r = all(); !r.empty(); r.popFront()) { 690 gc::Cell* key = gc::ToMarkable(r.front().key()); 691 MOZ_RELEASE_ASSERT(key); 692 gc::Cell* value = gc::ToMarkable(r.front().value()); 693 if (!gc::CheckWeakMapEntryMarking(this, key, value)) { 694 ok = false; 695 } 696 } 697 return ok; 698 } 699 #endif 700 701 #ifdef JSGC_HASH_TABLE_CHECKS 702 template <class K, class V, class AP> 703 void WeakMap<K, V, AP>::checkAfterMovingGC() const { 704 MOZ_RELEASE_ASSERT(!hasNurseryEntries); 705 MOZ_RELEASE_ASSERT(nurseryKeysValid); 706 MOZ_RELEASE_ASSERT(nurseryKeys.empty()); 707 708 for (Range r = all(); !r.empty(); r.popFront()) { 709 gc::Cell* key = gc::ToMarkable(r.front().key()); 710 gc::Cell* value = gc::ToMarkable(r.front().value()); 711 CheckGCThingAfterMovingGC(key); 712 if (!allowKeysInOtherZones()) { 713 Zone* keyZone = key->zoneFromAnyThread(); 714 MOZ_RELEASE_ASSERT(keyZone == zone() || keyZone->isAtomsZone()); 715 } 716 CheckGCThingAfterMovingGC(value, zone()); 717 auto ptr = lookupUnbarriered(r.front().key()); 718 MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front()); 719 } 720 } 721 #endif // JSGC_HASH_TABLE_CHECKS 722 723 // https://tc39.es/ecma262/#sec-canbeheldweakly 724 static MOZ_ALWAYS_INLINE bool CanBeHeldWeakly(Value value) { 725 // 1. If v is an Object, return true. 726 if (value.isObject()) { 727 return true; 728 } 729 730 bool symbolsAsWeakMapKeysEnabled = 731 JS::Prefs::experimental_symbols_as_weakmap_keys(); 732 733 // 2. If v is a Symbol and KeyForSymbol(v) is undefined, return true. 734 if (symbolsAsWeakMapKeysEnabled && value.isSymbol() && 735 value.toSymbol()->code() != JS::SymbolCode::InSymbolRegistry) { 736 return true; 737 } 738 739 // 3. Return false. 740 return false; 741 } 742 743 inline HashNumber GetSymbolHash(JS::Symbol* sym) { return sym->hash(); } 744 745 /* static */ 746 inline void WeakMapKeyHasher<JS::Value>::checkValueType(const Value& value) { 747 MOZ_ASSERT(CanBeHeldWeakly(value)); 748 } 749 750 } // namespace js 751 752 #endif /* gc_WeakMap_inl_h */