Zone.h (34692B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #ifndef gc_Zone_h 8 #define gc_Zone_h 9 10 #include "mozilla/Array.h" 11 #include "mozilla/Assertions.h" 12 #include "mozilla/Attributes.h" 13 #include "mozilla/LinkedList.h" 14 #include "mozilla/MemoryReporting.h" 15 #include "mozilla/PodOperations.h" 16 #include "mozilla/TimeStamp.h" 17 18 #include <array> 19 20 #include "jstypes.h" 21 22 #include "ds/Bitmap.h" 23 #include "gc/ArenaList.h" 24 #include "gc/Barrier.h" 25 #include "gc/BufferAllocator.h" 26 #include "gc/FinalizationObservers.h" 27 #include "gc/FindSCCs.h" 28 #include "gc/GCMarker.h" 29 #include "gc/NurseryAwareHashMap.h" 30 #include "gc/Policy.h" 31 #include "gc/Pretenuring.h" 32 #include "gc/Statistics.h" 33 #include "gc/WeakMap.h" 34 #include "gc/ZoneAllocator.h" 35 #include "js/GCHashTable.h" 36 #include "js/Vector.h" 37 #include "vm/AtomsTable.h" 38 #include "vm/InvalidatingFuse.h" 39 #include "vm/JSObject.h" 40 #include "vm/JSScript.h" 41 #include "vm/ObjectFuse.h" 42 #include "vm/ShapeZone.h" 43 44 namespace js { 45 46 class DebugScriptMap; 47 class RegExpZone; 48 class WeakRefObject; 49 50 namespace jit { 51 class JitZone; 52 } // namespace jit 53 54 namespace gc { 55 56 class FinalizationObservers; 57 class ZoneList; 58 59 using ZoneComponentFinder = ComponentFinder<JS::Zone>; 60 61 struct UniqueIdGCPolicy { 62 static bool traceWeak(JSTracer* trc, Cell** keyp, uint64_t* valuep); 63 }; 64 65 // Maps a Cell* to a unique, 64bit id. 66 using UniqueIdMap = GCHashMap<Cell*, uint64_t, PointerHasher<Cell*>, 67 SystemAllocPolicy, UniqueIdGCPolicy>; 68 69 template <typename T> 70 class ZoneAllCellIter; 71 72 template <typename T> 73 class ZoneCellIter; 74 75 #ifdef JS_GC_ZEAL 76 77 class MissingAllocSites { 78 public: 79 using SiteMap = JS::GCHashMap<uint32_t, UniquePtr<AllocSite>, 80 DefaultHasher<uint32_t>, SystemAllocPolicy>; 81 82 using ScriptMap = JS::GCHashMap<WeakHeapPtr<JSScript*>, SiteMap, 83 StableCellHasher<WeakHeapPtr<JSScript*>>, 84 SystemAllocPolicy>; 85 JS::WeakCache<ScriptMap> scriptMap; 86 87 explicit MissingAllocSites(JS::Zone* zone) : scriptMap(zone) {} 88 }; 89 90 #endif // JS_GC_ZEAL 91 92 } // namespace gc 93 94 // If two different nursery strings are wrapped into the same zone, and have 95 // the same contents, then deduplication may make them duplicates. 96 // `DuplicatesPossible` will allow this and map both wrappers to the same (now 97 // tenured) source string. 98 using StringWrapperMap = 99 NurseryAwareHashMap<JSString*, JSString*, ZoneAllocPolicy, 100 DuplicatesPossible>; 101 102 // Cache for NewMaybeExternalString and NewStringFromBuffer. It has separate 103 // cache entries for the Latin1 JSThinInlineString fast path and for the generic 104 // path where we allocate either a JSExternalString, an inline string, or a 105 // string with a StringBuffer. 106 class MOZ_NON_TEMPORARY_CLASS ExternalStringCache { 107 static const size_t NumEntries = 4; 108 mozilla::Array<JSInlineString*, NumEntries> inlineLatin1Entries_; 109 mozilla::Array<JSLinearString*, NumEntries> entries_; 110 111 public: 112 ExternalStringCache() { purge(); } 113 114 ExternalStringCache(const ExternalStringCache&) = delete; 115 void operator=(const ExternalStringCache&) = delete; 116 117 void purge() { 118 inlineLatin1Entries_ = {}; 119 entries_ = {}; 120 } 121 122 MOZ_ALWAYS_INLINE JSLinearString* lookup(const JS::Latin1Char* chars, 123 size_t len) const; 124 MOZ_ALWAYS_INLINE JSLinearString* lookup(const char16_t* chars, 125 size_t len) const; 126 MOZ_ALWAYS_INLINE void put(JSLinearString* s); 127 128 MOZ_ALWAYS_INLINE JSInlineString* lookupInlineLatin1( 129 const JS::Latin1Char* chars, size_t len) const; 130 MOZ_ALWAYS_INLINE JSInlineString* lookupInlineLatin1(const char16_t* chars, 131 size_t len) const; 132 MOZ_ALWAYS_INLINE void putInlineLatin1(JSInlineString* s); 133 134 private: 135 template <typename CharT> 136 MOZ_ALWAYS_INLINE JSLinearString* lookupImpl(const CharT* chars, 137 size_t len) const; 138 template <typename CharT> 139 MOZ_ALWAYS_INLINE JSInlineString* lookupInlineLatin1Impl(const CharT* chars, 140 size_t len) const; 141 }; 142 143 class MOZ_NON_TEMPORARY_CLASS FunctionToStringCache { 144 struct Entry { 145 BaseScript* script; 146 JSString* string; 147 148 void set(BaseScript* scriptArg, JSString* stringArg) { 149 script = scriptArg; 150 string = stringArg; 151 } 152 }; 153 static const size_t NumEntries = 2; 154 mozilla::Array<Entry, NumEntries> entries_; 155 156 public: 157 FunctionToStringCache() { purge(); } 158 159 FunctionToStringCache(const FunctionToStringCache&) = delete; 160 void operator=(const FunctionToStringCache&) = delete; 161 162 void purge() { mozilla::PodArrayZero(entries_); } 163 164 MOZ_ALWAYS_INLINE JSString* lookup(BaseScript* script) const; 165 MOZ_ALWAYS_INLINE void put(BaseScript* script, JSString* string); 166 }; 167 168 // HashAndLength is a simple class encapsulating the combination of a HashNumber 169 // and a (string) length into a single 64-bit value. Having them bundled 170 // together like this enables us to compare pairs of hashes and lengths with a 171 // single 64-bit comparison. 172 class HashAndLength { 173 public: 174 MOZ_ALWAYS_INLINE explicit HashAndLength(uint64_t initialValue = unsetValue()) 175 : mHashAndLength(initialValue) {} 176 MOZ_ALWAYS_INLINE HashAndLength(HashNumber hash, uint32_t length) 177 : mHashAndLength(uint64FromHashAndLength(hash, length)) {} 178 179 void MOZ_ALWAYS_INLINE set(HashNumber hash, uint32_t length) { 180 mHashAndLength = uint64FromHashAndLength(hash, length); 181 } 182 183 constexpr MOZ_ALWAYS_INLINE HashNumber hash() const { 184 return hashFromUint64(mHashAndLength); 185 } 186 constexpr MOZ_ALWAYS_INLINE uint32_t length() const { 187 return lengthFromUint64(mHashAndLength); 188 } 189 190 constexpr MOZ_ALWAYS_INLINE bool isEqual(HashNumber hash, 191 uint32_t length) const { 192 return mHashAndLength == uint64FromHashAndLength(hash, length); 193 } 194 195 // This function is used at compile-time to verify and that we pack and unpack 196 // hash and length values consistently. 197 static constexpr bool staticChecks() { 198 std::array<HashNumber, 5> hashes{0x00000000, 0xffffffff, 0xf0f0f0f0, 199 0x0f0f0f0f, 0x73737373}; 200 std::array<uint32_t, 6> lengths{0, 1, 2, 3, 11, 56}; 201 202 for (const HashNumber hash : hashes) { 203 for (const uint32_t length : lengths) { 204 const uint64_t lengthAndHash = uint64FromHashAndLength(hash, length); 205 if (hashFromUint64(lengthAndHash) != hash) { 206 return false; 207 } 208 if (lengthFromUint64(lengthAndHash) != length) { 209 return false; 210 } 211 } 212 } 213 214 return true; 215 } 216 217 static constexpr MOZ_ALWAYS_INLINE uint64_t unsetValue() { 218 // This needs to be a combination of hash and length that would never occur 219 // together. There is only one string of length zero, and its hash is zero, 220 // so the hash here can be anything except zero. 221 return uint64FromHashAndLength(0xffffffff, 0); 222 } 223 224 private: 225 uint64_t mHashAndLength; 226 227 static constexpr MOZ_ALWAYS_INLINE uint64_t 228 uint64FromHashAndLength(HashNumber hash, uint32_t length) { 229 return (static_cast<uint64_t>(length) << 32) | hash; 230 } 231 232 static constexpr MOZ_ALWAYS_INLINE uint32_t 233 lengthFromUint64(uint64_t hashAndLength) { 234 return static_cast<uint32_t>(hashAndLength >> 32); 235 } 236 237 static constexpr MOZ_ALWAYS_INLINE HashNumber 238 hashFromUint64(uint64_t hashAndLength) { 239 return hashAndLength & 0xffffffff; 240 } 241 }; 242 243 static_assert(HashAndLength::staticChecks()); 244 245 // AtomCacheHashTable is a medium-capacity, low-overhead cache for matching 246 // strings to previously-added JSAtoms. 247 // This cache is very similar to a typical CPU memory cache. We use the low bits 248 // of the hash as an index into a table of sets of entries. Cache eviction 249 // follows a "least recently added" policy. 250 // All of the operations here are designed to be low-cost and efficient for 251 // modern CPU architectures. Failed lookups should incur at most one CPU memory 252 // cache miss and successful lookups should incur at most three (depending on 253 // whether or not the underlying chararacter buffers are already in the cache). 254 class AtomCacheHashTable { 255 public: 256 static MOZ_ALWAYS_INLINE constexpr uint32_t computeIndexFromHash( 257 const HashNumber hash) { 258 // Simply use the low bits of the hash value as the cache index. 259 return hash & (sSize - 1); 260 } 261 262 MOZ_ALWAYS_INLINE JSAtom* lookupForAdd( 263 const AtomHasher::Lookup& lookup) const { 264 MOZ_ASSERT(lookup.atom == nullptr, "Lookup by atom is not supported"); 265 266 const uint32_t index = computeIndexFromHash(lookup.hash); 267 268 const EntrySet& entrySet = mEntrySets[index]; 269 for (const Entry& entry : entrySet.mEntries) { 270 JSAtom* const atom = entry.mAtom; 271 272 if (!entry.mHashAndLength.isEqual(lookup.hash, lookup.length)) { 273 continue; 274 } 275 276 // This is annotated with MOZ_UNLIKELY because it virtually never happens 277 // that, after matching the hash and the length, the string isn't a match. 278 if (MOZ_UNLIKELY(!lookup.StringsMatch(*atom))) { 279 continue; 280 } 281 282 return atom; 283 } 284 285 return nullptr; 286 } 287 288 MOZ_ALWAYS_INLINE void add(const HashNumber hash, JSAtom* atom) { 289 const uint32_t index = computeIndexFromHash(hash); 290 291 mEntrySets[index].add(hash, atom->length(), atom); 292 } 293 294 private: 295 struct Entry { 296 MOZ_ALWAYS_INLINE Entry() 297 : mHashAndLength(HashAndLength::unsetValue()), mAtom(nullptr) {} 298 299 MOZ_ALWAYS_INLINE void set(const HashNumber hash, const uint32_t length, 300 JSAtom* const atom) { 301 mHashAndLength.set(hash, length); 302 mAtom = atom; 303 } 304 305 // Hash and length are also available, from JSAtom and JSString 306 // respectively, but are cached here to avoid likely cache misses in the 307 // frequent case of a missed lookup. 308 HashAndLength mHashAndLength; 309 // No read barrier is required here because the table is cleared at the 310 // start of GC. 311 JSAtom* mAtom; 312 }; 313 314 static_assert(sizeof(Entry) <= 16); 315 316 // EntrySet represents a bundling of all of the Entry's that are mapped to the 317 // same index. 318 // NOTE/TODO: Since we have a tendency to use the entirety of this structure 319 // together, it would be really nice to mark this class with alignas(64) to 320 // ensure that the entire thing ends up on a single (hardware) cache line but 321 // we can't do that because AtomCacheHashTable is allocated with js::UniquePtr 322 // which doesn't support alignments greater than 8. In practice, on my Windows 323 // machine at least, I am seeing that these objects *are* 64-byte aligned, but 324 // it would be nice to guarantee that this will be the case. 325 struct EntrySet { 326 MOZ_ALWAYS_INLINE void add(const HashNumber hash, const uint32_t length, 327 JSAtom* const atom) { 328 MOZ_ASSERT(mEntries[0].mAtom != atom); 329 MOZ_ASSERT(mEntries[1].mAtom != atom); 330 MOZ_ASSERT(mEntries[2].mAtom != atom); 331 MOZ_ASSERT(mEntries[3].mAtom != atom); 332 mEntries[3] = mEntries[2]; 333 mEntries[2] = mEntries[1]; 334 mEntries[1] = mEntries[0]; 335 mEntries[0].set(hash, length, atom); 336 } 337 338 std::array<Entry, 4> mEntries; 339 }; 340 341 static_assert(sizeof(EntrySet) <= 64, 342 "EntrySet will not fit in a cache line"); 343 344 // This value was picked empirically based on performance testing using SP2 345 // and SP3. 2k was better than 1k but 4k was not much better than 2k. 346 static constexpr uint32_t sSize = 2 * 1024; 347 static_assert(mozilla::IsPowerOfTwo(sSize)); 348 std::array<EntrySet, sSize> mEntrySets; 349 }; 350 351 } // namespace js 352 353 namespace JS { 354 355 // [SMDOC] GC Zones 356 // 357 // A zone is a collection of compartments. Every compartment belongs to exactly 358 // one zone. In Firefox, there is roughly one zone per tab along with a system 359 // zone for everything else. Zones mainly serve as boundaries for garbage 360 // collection. Unlike compartments, they have no special security properties. 361 // 362 // Every GC thing belongs to exactly one zone. GC things from the same zone but 363 // different compartments can share an arena (4k page). GC things from different 364 // zones cannot be stored in the same arena. The garbage collector is capable of 365 // collecting one zone at a time; it cannot collect at the granularity of 366 // compartments. 367 // 368 // GC things are tied to zones and compartments as follows: 369 // 370 // - JSObjects belong to a compartment and cannot be shared between 371 // compartments. If an object needs to point to a JSObject in a different 372 // compartment, regardless of zone, it must go through a cross-compartment 373 // wrapper. Each compartment keeps track of its outgoing wrappers in a table. 374 // JSObjects find their compartment via their Realm, which is found by 375 // following their shape and base shape pointers. 376 // 377 // - JSStrings do not belong to any particular compartment, but they do belong 378 // to a zone. Thus, two different compartments in the same zone can point to a 379 // JSString. When a string needs to be wrapped, we copy it if it's in a 380 // different zone and do nothing if it's in the same zone. Thus, transferring 381 // strings within a zone is very efficient. 382 // 383 // - Shapes and base shapes belong to a zone and are shared between compartments 384 // in that zone where possible. Accessor shapes store getter and setter 385 // JSObjects which belong to a single compartment, so these shapes and all 386 // their descendants can't be shared with other compartments. 387 // 388 // - Scripts are also compartment-local and cannot be shared. A script points to 389 // its compartment. 390 // 391 // - JitCode objects belong to a compartment and cannot be shared. There is no 392 // mechanism to obtain the compartment from a JitCode object. 393 // 394 // A zone remains alive as long as any GC things in the zone are alive. A 395 // compartment remains alive as long as any JSObjects, scripts, shapes, or base 396 // shapes within it are alive. 397 // 398 // We always guarantee that a zone has at least one live compartment by refusing 399 // to delete the last compartment in a live zone. 400 class Zone : public js::ZoneAllocator, public js::gc::GraphNodeBase<JS::Zone> { 401 public: 402 js::gc::ArenaLists arenas; 403 404 js::gc::BufferAllocator bufferAllocator; 405 406 // Per-zone data for use by an embedder. 407 js::MainThreadData<void*> data; 408 409 // When true, skip calling the metadata callback. We use this: 410 // - to avoid invoking the callback recursively; 411 // - to avoid observing lazy prototype setup (which confuses callbacks that 412 // want to use the types being set up!); 413 // - to avoid attaching allocation stacks to allocation stack nodes, which 414 // is silly 415 // And so on. 416 js::MainThreadData<bool> suppressAllocationMetadataBuilder; 417 418 // Flags permanently set when nursery allocation is disabled for this zone. 419 js::MainThreadData<bool> nurseryStringsDisabled; 420 js::MainThreadData<bool> nurseryBigIntsDisabled; 421 422 private: 423 // Flags dynamically updated based on more than one condition, including the 424 // flags above. 425 js::MainThreadOrIonCompileData<bool> allocNurseryObjects_; 426 js::MainThreadOrIonCompileData<bool> allocNurseryStrings_; 427 js::MainThreadOrIonCompileData<bool> allocNurseryBigInts_; 428 js::MainThreadOrIonCompileData<bool> allocNurseryGetterSetters_; 429 430 // Minimum Heap value which results in tenured allocation. 431 js::MainThreadData<js::gc::Heap> minObjectHeapToTenure_; 432 js::MainThreadData<js::gc::Heap> minStringHeapToTenure_; 433 js::MainThreadData<js::gc::Heap> minBigintHeapToTenure_; 434 js::MainThreadData<js::gc::Heap> minGetterSetterHeapToTenure_; 435 436 public: 437 // Script side-tables. These used to be held by Realm, but are now placed 438 // here in order to allow JSScript to access them during finalize (see bug 439 // 1568245; this change in 1575350). The tables are initialized lazily by 440 // JSScript. 441 js::UniquePtr<js::ScriptCountsMap> scriptCountsMap; 442 js::UniquePtr<js::ScriptLCovMap> scriptLCovMap; 443 js::MainThreadData<js::DebugScriptMap*> debugScriptMap; 444 #ifdef MOZ_VTUNE 445 js::UniquePtr<js::ScriptVTuneIdMap> scriptVTuneIdMap; 446 #endif 447 #ifdef JS_CACHEIR_SPEW 448 js::UniquePtr<js::ScriptFinalWarmUpCountMap> scriptFinalWarmUpCountMap; 449 #endif 450 451 js::MainThreadData<js::StringStats> previousGCStringStats; 452 js::MainThreadData<js::StringStats> stringStats; 453 454 js::gc::PretenuringZone pretenuring; 455 456 private: 457 // Side map for storing unique ids for cells, independent of address. 458 js::MainThreadOrGCTaskData<js::gc::UniqueIdMap> uniqueIds_; 459 460 // Number of allocations since the most recent minor GC for this thread. 461 uint32_t tenuredAllocsSinceMinorGC_ = 0; 462 463 // Live weakmaps in this zone. 464 js::MainThreadOrGCTaskData<mozilla::LinkedList<js::WeakMapBase>> 465 gcWeakMapList_; 466 467 // The set of compartments in this zone. 468 using CompartmentVector = 469 js::Vector<JS::Compartment*, 1, js::SystemAllocPolicy>; 470 js::MainThreadOrGCTaskData<CompartmentVector> compartments_; 471 472 // All cross-zone string wrappers in the zone. 473 js::MainThreadOrGCTaskData<js::StringWrapperMap> crossZoneStringWrappers_; 474 475 // List of non-ephemeron weak containers to sweep during 476 // beginSweepingSweepGroup. 477 js::MainThreadOrGCTaskData<mozilla::LinkedList<detail::WeakCacheBase>> 478 weakCaches_; 479 480 // Mapping from not yet marked keys to a vector of all values that the key 481 // maps to in any live weak map. 482 js::MainThreadOrGCTaskData<js::gc::EphemeronEdgeTable> gcEphemeronEdges_; 483 484 js::MainThreadData<js::UniquePtr<js::RegExpZone>> regExps_; 485 486 // Bitmap of atoms marked by this zone. 487 js::MainThreadOrGCTaskData<js::SparseBitmap> markedAtoms_; 488 489 // Set of atoms recently used by this Zone. Purged on GC. 490 js::MainThreadOrGCTaskData<js::UniquePtr<js::AtomCacheHashTable>> atomCache_; 491 492 // Cache storing allocated external strings. Purged on GC. 493 js::MainThreadOrGCTaskData<js::ExternalStringCache> externalStringCache_; 494 495 // Cache for Function.prototype.toString. Purged on GC. 496 js::MainThreadOrGCTaskData<js::FunctionToStringCache> functionToStringCache_; 497 498 // Cache for Function.prototype.bind mapping an atom `name` to atom 499 // `"bound " + name`. Purged on GC. 500 using BoundPrefixCache = 501 js::HashMap<JSAtom*, JSAtom*, js::PointerHasher<JSAtom*>, 502 js::SystemAllocPolicy>; 503 js::MainThreadData<BoundPrefixCache> boundPrefixCache_; 504 505 // Information about Shapes and BaseShapes. 506 js::MainThreadData<js::ShapeZone> shapeZone_; 507 508 // Information about finalization registries, created on demand. 509 js::MainThreadOrGCTaskData<js::UniquePtr<js::gc::FinalizationObservers>> 510 finalizationObservers_; 511 512 js::MainThreadOrGCTaskOrIonCompileData<js::jit::JitZone*> jitZone_; 513 514 // Number of realms in this zone that have a non-null object allocation 515 // metadata builder. 516 js::MainThreadOrIonCompileData<size_t> numRealmsWithAllocMetadataBuilder_{0}; 517 518 // Last time at which JIT code was discarded for this zone. This is only set 519 // when JitScripts and Baseline code are discarded as well. 520 js::MainThreadData<mozilla::TimeStamp> lastDiscardedCodeTime_; 521 522 js::MainThreadData<bool> gcScheduled_; 523 js::MainThreadData<bool> gcScheduledSaved_; 524 js::MainThreadData<bool> gcPreserveCode_; 525 js::MainThreadData<bool> keepPropMapTables_; 526 js::MainThreadData<bool> wasCollected_; 527 528 js::MainThreadOrIonCompileData<JSObject**> preservedWrappers_; 529 js::MainThreadOrIonCompileData<size_t> preservedWrappersCount_; 530 js::MainThreadOrIonCompileData<size_t> preservedWrappersCapacity_; 531 532 // Allow zones to be linked into a list 533 js::MainThreadOrGCTaskData<Zone*> listNext_; 534 static Zone* const NotOnList; 535 friend class js::gc::ZoneList; 536 537 using KeptAliveSet = 538 JS::GCHashSet<js::HeapPtr<Value>, js::gc::WeakTargetHasher, 539 js::ZoneAllocPolicy>; 540 friend class js::WeakRefObject; 541 js::MainThreadOrGCTaskData<KeptAliveSet> keptAliveSet; 542 543 // To support weak pointers in some special cases we keep a list of objects 544 // that need to be traced weakly on GC. This is currently only used for the 545 // JIT's ShapeListObject. It's assumed that there will not be many of these 546 // objects. 547 using ObjectVector = js::GCVector<JSObject*, 0, js::SystemAllocPolicy>; 548 js::MainThreadOrGCTaskData<ObjectVector> objectsWithWeakPointers; 549 550 #ifdef DEBUG 551 js::MainThreadData<unsigned> gcSweepGroupIndex; 552 553 // During gray marking, delay AssertCellIsNotGray checks by 554 // recording the cell pointers here and checking after marking has 555 // finished. 556 js::MainThreadData<js::Vector<const js::gc::Cell*, 0, js::SystemAllocPolicy>> 557 cellsToAssertNotGray_; 558 #endif 559 560 public: 561 #ifdef JS_GC_ZEAL 562 // Must come after weakCaches_ above. 563 js::UniquePtr<js::gc::MissingAllocSites> missingSites; 564 #endif // JS_GC_ZEAL 565 566 static JS::Zone* from(ZoneAllocator* zoneAlloc) { 567 return static_cast<Zone*>(zoneAlloc); 568 } 569 570 explicit Zone(JSRuntime* rt, Kind kind = NormalZone); 571 ~Zone(); 572 573 [[nodiscard]] bool init(); 574 575 void destroy(JS::GCContext* gcx); 576 577 [[nodiscard]] bool findSweepGroupEdges(Zone* atomsZone); 578 579 struct JitDiscardOptions { 580 JitDiscardOptions() {} 581 bool discardJitScripts = false; 582 bool resetNurseryAllocSites = false; 583 bool resetPretenuredAllocSites = false; 584 }; 585 586 void maybeDiscardJitCode(JS::GCContext* gcx); 587 588 // Discard JIT code regardless of isPreservingCode(). 589 void forceDiscardJitCode( 590 JS::GCContext* gcx, 591 const JitDiscardOptions& options = JitDiscardOptions()); 592 593 void resetAllocSitesAndInvalidate(bool resetNurserySites, 594 bool resetPretenuredSites); 595 596 void traceWeakJitScripts(JSTracer* trc); 597 598 bool registerObjectWithWeakPointers(JSObject* obj); 599 void sweepObjectsWithWeakPointers(JSTracer* trc); 600 601 void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf, 602 size_t* zoneObject, JS::CodeSizes* code, 603 size_t* regexpZone, size_t* jitZone, 604 size_t* cacheIRStubs, size_t* objectFusesArg, 605 size_t* uniqueIdMap, size_t* initialPropMapTable, 606 size_t* shapeTables, size_t* atomsMarkBitmaps, 607 size_t* compartmentObjects, 608 size_t* crossCompartmentWrappersTables, 609 size_t* compartmentsPrivateData, 610 size_t* scriptCountsMapArg); 611 612 // Iterate over all cells in the zone. See the definition of ZoneCellIter 613 // in gc/GC-inl.h for the possible arguments and documentation. 614 template <typename T, typename... Args> 615 js::gc::ZoneCellIter<T> cellIter(Args&&... args) { 616 return js::gc::ZoneCellIter<T>(const_cast<Zone*>(this), 617 std::forward<Args>(args)...); 618 } 619 620 // As above, but can return about-to-be-finalised things. 621 template <typename T, typename... Args> 622 js::gc::ZoneAllCellIter<T> cellIterUnsafe(Args&&... args) { 623 return js::gc::ZoneAllCellIter<T>(const_cast<Zone*>(this), 624 std::forward<Args>(args)...); 625 } 626 627 bool hasMarkedRealms(); 628 629 void scheduleGC() { 630 MOZ_ASSERT(!RuntimeHeapIsBusy()); 631 gcScheduled_ = true; 632 } 633 void unscheduleGC() { gcScheduled_ = false; } 634 bool isGCScheduled() { return gcScheduled_; } 635 636 void setPreservingCode(bool preserving) { gcPreserveCode_ = preserving; } 637 bool isPreservingCode() const { return gcPreserveCode_; } 638 639 mozilla::TimeStamp lastDiscardedCodeTime() const { 640 return lastDiscardedCodeTime_; 641 } 642 643 void changeGCState(GCState prev, GCState next); 644 645 bool isCollecting() const { 646 MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(runtimeFromMainThread())); 647 return isCollectingFromAnyThread(); 648 } 649 650 inline bool isCollectingFromAnyThread() const { 651 return needsIncrementalBarrier() || wasGCStarted(); 652 } 653 654 GCState initialMarkingState() const; 655 656 bool shouldMarkInZone(js::gc::MarkColor color) const { 657 // Check whether the zone is in one or both of the MarkBlackOnly and 658 // MarkBlackAndGray states, depending on the mark color. Also check for 659 // VerifyPreBarriers when the mark color is black (we don't do any gray 660 // marking when verifying pre-barriers). 661 if (color == js::gc::MarkColor::Black) { 662 return isGCMarkingOrVerifyingPreBarriers(); 663 } 664 665 return isGCMarkingBlackAndGray(); 666 } 667 668 // Was this zone collected in the last GC. 669 bool wasCollected() const { return wasCollected_; } 670 void setWasCollected(bool v) { wasCollected_ = v; } 671 672 void setNeedsIncrementalBarrier(bool needs); 673 const BarrierState* addressOfNeedsIncrementalBarrier() const { 674 return &needsIncrementalBarrier_; 675 } 676 677 static constexpr size_t offsetOfNeedsIncrementalBarrier() { 678 return offsetof(Zone, needsIncrementalBarrier_); 679 } 680 static constexpr size_t offsetOfJitZone() { return offsetof(Zone, jitZone_); } 681 682 js::jit::JitZone* getJitZone(JSContext* cx) { 683 return jitZone_ ? jitZone_ : createJitZone(cx); 684 } 685 js::jit::JitZone* jitZone() { return jitZone_; } 686 687 bool ensureJitZoneExists(JSContext* cx) { return !!getJitZone(cx); } 688 689 bool preserveWrapper(JSObject* obj) { 690 MOZ_ASSERT(preservedWrappersCount_ <= preservedWrappersCapacity_); 691 if (preservedWrappersCount_ >= preservedWrappersCapacity_) { 692 const size_t initialCapacity = 8; 693 const size_t maxCapacity = 8192; 694 size_t newCapacity = 695 std::max(size_t(initialCapacity), preservedWrappersCapacity_ * 2); 696 if (newCapacity > maxCapacity) { 697 return false; 698 } 699 JSObject** oldPtr = preservedWrappers_.ref(); 700 JSObject** newPtr = js_pod_arena_realloc<JSObject*>( 701 js::MallocArena, oldPtr, preservedWrappersCapacity_, newCapacity); 702 if (!newPtr) { 703 return false; 704 } 705 preservedWrappersCapacity_ = newCapacity; 706 preservedWrappers_ = newPtr; 707 } 708 preservedWrappers_[preservedWrappersCount_++] = obj; 709 return true; 710 } 711 712 void purgePendingWrapperPreservationBuffer() { 713 MOZ_RELEASE_ASSERT(preservedWrappersCount_ == 0); 714 js_free(preservedWrappers_); 715 preservedWrappers_ = nullptr; 716 preservedWrappersCapacity_ = 0; 717 } 718 719 const void* addressOfPreservedWrappers() const { 720 return &preservedWrappers_.ref(); 721 } 722 723 const size_t* addressOfPreservedWrappersCount() const { 724 return &preservedWrappersCount_.ref(); 725 } 726 727 const size_t* addressOfPreservedWrappersCapacity() const { 728 return &preservedWrappersCapacity_.ref(); 729 } 730 731 mozilla::Span<JSObject*> slurpPendingWrapperPreservations() { 732 size_t count = preservedWrappersCount_; 733 preservedWrappersCount_ = 0; 734 return mozilla::Span<JSObject*>(preservedWrappers_.ref(), count); 735 } 736 737 void incNumRealmsWithAllocMetadataBuilder() { 738 numRealmsWithAllocMetadataBuilder_++; 739 } 740 void decNumRealmsWithAllocMetadataBuilder() { 741 MOZ_ASSERT(numRealmsWithAllocMetadataBuilder_ > 0); 742 numRealmsWithAllocMetadataBuilder_--; 743 } 744 bool hasRealmWithAllocMetadataBuilder() const { 745 return numRealmsWithAllocMetadataBuilder_ > 0; 746 } 747 748 void traceRootsInMajorGC(JSTracer* trc); 749 750 void sweepAfterMinorGC(JSTracer* trc); 751 void sweepUniqueIds(); 752 void sweepCompartments(JS::GCContext* gcx, bool keepAtleastOne, 753 bool destroyingRuntime); 754 755 // Remove dead weak maps from gcWeakMapList_ and remove entries from the 756 // remaining weak maps whose keys are dead. 757 void sweepWeakMaps(JSTracer* trc); 758 759 // Trace all weak maps in this zone. Used to update edges after a moving GC. 760 void traceWeakMaps(JSTracer* trc); 761 762 js::gc::UniqueIdMap& uniqueIds() { return uniqueIds_.ref(); } 763 764 void notifyObservingDebuggers(); 765 766 void noteTenuredAlloc() { tenuredAllocsSinceMinorGC_++; } 767 768 uint32_t* addressOfTenuredAllocCount() { return &tenuredAllocsSinceMinorGC_; } 769 770 uint32_t getAndResetTenuredAllocsSinceMinorGC() { 771 uint32_t res = tenuredAllocsSinceMinorGC_; 772 tenuredAllocsSinceMinorGC_ = 0; 773 return res; 774 } 775 776 mozilla::LinkedList<js::WeakMapBase>& gcWeakMapList() { 777 return gcWeakMapList_.ref(); 778 } 779 780 CompartmentVector& compartments() { return compartments_.ref(); } 781 782 js::StringWrapperMap& crossZoneStringWrappers() { 783 return crossZoneStringWrappers_.ref(); 784 } 785 const js::StringWrapperMap& crossZoneStringWrappers() const { 786 return crossZoneStringWrappers_.ref(); 787 } 788 789 void dropStringWrappersOnGC(); 790 791 void traceWeakCCWEdges(JSTracer* trc); 792 static void fixupAllCrossCompartmentWrappersAfterMovingGC(JSTracer* trc); 793 794 void prepareForMovingGC(); 795 void fixupAfterMovingGC(); 796 797 void fixupScriptMapsAfterMovingGC(JSTracer* trc); 798 799 void setNurseryAllocFlags(bool allocObjects, bool allocStrings, 800 bool allocBigInts, bool allocGetterSetters); 801 802 bool allocKindInNursery(JS::TraceKind kind) const { 803 switch (kind) { 804 case JS::TraceKind::Object: 805 return allocNurseryObjects_; 806 case JS::TraceKind::String: 807 return allocNurseryStrings_; 808 case JS::TraceKind::BigInt: 809 return allocNurseryBigInts_; 810 case JS::TraceKind::GetterSetter: 811 return allocNurseryGetterSetters_; 812 default: 813 MOZ_CRASH("Unsupported kind for nursery allocation"); 814 } 815 } 816 bool allocNurseryObjects() const { return allocNurseryObjects_; } 817 818 // Note that this covers both allocating JSStrings themselves in the nursery, 819 // as well as (possibly) the character data. 820 bool allocNurseryStrings() const { return allocNurseryStrings_; } 821 822 bool allocNurseryBigInts() const { return allocNurseryBigInts_; } 823 824 bool allocNurseryGetterSetters() const { return allocNurseryGetterSetters_; } 825 826 js::gc::Heap minHeapToTenure(JS::TraceKind kind) const { 827 switch (kind) { 828 case JS::TraceKind::Object: 829 return minObjectHeapToTenure_; 830 case JS::TraceKind::String: 831 return minStringHeapToTenure_; 832 case JS::TraceKind::BigInt: 833 return minBigintHeapToTenure_; 834 case JS::TraceKind::GetterSetter: 835 return minGetterSetterHeapToTenure_; 836 default: 837 MOZ_CRASH("Unsupported kind for nursery allocation"); 838 } 839 } 840 841 mozilla::LinkedList<detail::WeakCacheBase>& weakCaches() { 842 return weakCaches_.ref(); 843 } 844 void registerWeakCache(detail::WeakCacheBase* cachep) { 845 weakCaches().insertBack(cachep); 846 } 847 848 void beforeClearDelegate(JSObject* wrapper, JSObject* delegate) { 849 if (needsIncrementalBarrier()) { 850 beforeClearDelegateInternal(wrapper, delegate); 851 } 852 } 853 854 void beforeClearDelegateInternal(JSObject* wrapper, JSObject* delegate); 855 js::gc::EphemeronEdgeTable& gcEphemeronEdges() { 856 return gcEphemeronEdges_.ref(); 857 } 858 859 // Perform all pending weakmap entry marking for this zone after 860 // transitioning to weak marking mode. 861 js::gc::IncrementalProgress enterWeakMarkingMode(js::GCMarker* marker, 862 JS::SliceBudget& budget); 863 864 // A set of edges from this zone to other zones used during GC to calculate 865 // sweep groups. 866 NodeSet& gcSweepGroupEdges() { 867 return gcGraphEdges; // Defined in GraphNodeBase base class. 868 } 869 bool hasSweepGroupEdgeTo(Zone* otherZone) const { 870 return gcGraphEdges.has(otherZone); 871 } 872 [[nodiscard]] bool addSweepGroupEdgeTo(Zone* otherZone) { 873 MOZ_ASSERT(otherZone->isGCMarking()); 874 return gcSweepGroupEdges().put(otherZone); 875 } 876 void clearSweepGroupEdges() { gcSweepGroupEdges().clear(); } 877 878 js::RegExpZone& regExps() { return *regExps_.ref(); } 879 880 js::SparseBitmap& markedAtoms() { return markedAtoms_.ref(); } 881 882 // The atom cache is "allocate-on-demand". This function can return nullptr if 883 // the allocation failed. 884 js::AtomCacheHashTable* atomCache() { 885 if (atomCache_.ref()) { 886 return atomCache_.ref().get(); 887 } 888 889 atomCache_ = js::MakeUnique<js::AtomCacheHashTable>(); 890 return atomCache_.ref().get(); 891 } 892 893 void purgeAtomCache(); 894 895 js::ExternalStringCache& externalStringCache() { 896 return externalStringCache_.ref(); 897 }; 898 899 js::FunctionToStringCache& functionToStringCache() { 900 return functionToStringCache_.ref(); 901 } 902 903 BoundPrefixCache& boundPrefixCache() { return boundPrefixCache_.ref(); } 904 905 js::ShapeZone& shapeZone() { return shapeZone_.ref(); } 906 907 bool keepPropMapTables() const { return keepPropMapTables_; } 908 void setKeepPropMapTables(bool b) { keepPropMapTables_ = b; } 909 910 void clearRootsForShutdownGC(); 911 void finishRoots(); 912 913 void traceScriptTableRoots(JSTracer* trc); 914 915 void clearScriptCounts(Realm* realm); 916 void clearScriptLCov(Realm* realm); 917 918 // Add the target of JS WeakRef to a kept-alive set maintained by GC. 919 // https://tc39.es/ecma262/#sec-addtokeptobjects 920 bool addToKeptObjects(HandleValue target); 921 922 void traceKeptObjects(JSTracer* trc); 923 924 // Clear the kept-alive set. 925 // See: https://tc39.es/proposal-weakrefs/#sec-clear-kept-objects 926 void clearKeptObjects(); 927 928 js::gc::AllocSite* unknownAllocSite(JS::TraceKind kind) { 929 return &pretenuring.unknownAllocSite(kind); 930 } 931 js::gc::AllocSite* optimizedAllocSite() { 932 return &pretenuring.optimizedAllocSite; 933 } 934 js::gc::AllocSite* tenuringAllocSite() { 935 return &pretenuring.tenuringAllocSite; 936 } 937 uint32_t nurseryPromotedCount(JS::TraceKind kind) const { 938 return pretenuring.nurseryPromotedCount(kind); 939 } 940 941 #ifdef JSGC_HASH_TABLE_CHECKS 942 void checkAllCrossCompartmentWrappersAfterMovingGC(); 943 void checkStringWrappersAfterMovingGC(); 944 945 // Assert that the UniqueId table has been redirected successfully. 946 void checkUniqueIdTableAfterMovingGC(); 947 948 void checkScriptMapsAfterMovingGC(); 949 #endif 950 951 #ifdef DEBUG 952 // For testing purposes, return the index of the sweep group which this zone 953 // was swept in in the last GC. 954 unsigned lastSweepGroupIndex() { return gcSweepGroupIndex; } 955 956 auto& cellsToAssertNotGray() { return cellsToAssertNotGray_.ref(); } 957 #endif 958 959 // Support for invalidating fuses 960 js::DependentIonScriptGroup fuseDependencies; 961 962 // JSObject* => ObjectFuse* map for objects in this zone. 963 js::ObjectFuseMap objectFuses; 964 965 private: 966 js::jit::JitZone* createJitZone(JSContext* cx); 967 968 bool isQueuedForBackgroundSweep() { return isOnList(); } 969 970 js::gc::FinalizationObservers* finalizationObservers() { 971 return finalizationObservers_.ref().get(); 972 } 973 bool ensureFinalizationObservers(); 974 975 bool isOnList() const; 976 Zone* nextZone() const; 977 978 friend bool js::CurrentThreadCanAccessZone(Zone* zone); 979 friend class js::gc::GCRuntime; 980 }; 981 982 } // namespace JS 983 984 namespace js::gc { 985 const char* StateName(JS::Zone::GCState state); 986 } // namespace js::gc 987 988 #endif // gc_Zone_h