MemoryMetrics.cpp (29064B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #include "js/MemoryMetrics.h" 8 9 #include <algorithm> 10 11 #include "gc/BufferAllocator.h" 12 #include "gc/GC.h" 13 #include "gc/Memory.h" 14 #include "gc/Nursery.h" 15 #include "gc/PublicIterators.h" 16 #include "jit/BaselineJIT.h" 17 #include "jit/Ion.h" 18 #include "js/HeapAPI.h" 19 #include "util/Text.h" 20 #include "vm/BigIntType.h" 21 #include "vm/HelperThreadState.h" 22 #include "vm/JSObject.h" 23 #include "vm/JSScript.h" 24 #include "vm/PropMap.h" 25 #include "vm/Realm.h" 26 #include "vm/Runtime.h" 27 #include "vm/Shape.h" 28 #include "vm/StringType.h" 29 #include "wasm/WasmInstance.h" 30 #include "wasm/WasmJS.h" 31 #include "wasm/WasmModule.h" 32 33 #include "wasm/WasmInstance-inl.h" 34 35 using mozilla::MallocSizeOf; 36 37 using namespace js; 38 39 using JS::ObjectPrivateVisitor; 40 using JS::RealmStats; 41 using JS::RuntimeStats; 42 using JS::ZoneStats; 43 44 namespace js { 45 46 JS_PUBLIC_API size_t MemoryReportingSundriesThreshold() { return 8 * 1024; } 47 48 /* static */ 49 HashNumber InefficientNonFlatteningStringHashPolicy::hash(const Lookup& l) { 50 if (l->isLinear()) { 51 return HashStringChars(&l->asLinear()); 52 } 53 54 // Use rope's non-copying hash function. 55 uint32_t hash = 0; 56 if (!l->asRope().hash(&hash)) { 57 MOZ_CRASH("oom"); 58 } 59 return hash; 60 } 61 62 template <typename Char1, typename Char2> 63 static bool EqualStringsPure(JSString* s1, JSString* s2) { 64 if (s1->length() != s2->length()) { 65 return false; 66 } 67 68 const Char1* c1; 69 UniquePtr<Char1[], JS::FreePolicy> ownedChars1; 70 JS::AutoCheckCannotGC nogc; 71 if (s1->isLinear()) { 72 c1 = s1->asLinear().chars<Char1>(nogc); 73 } else { 74 ownedChars1 = 75 s1->asRope().copyChars<Char1>(/* tcx */ nullptr, js::MallocArena); 76 if (!ownedChars1) { 77 MOZ_CRASH("oom"); 78 } 79 c1 = ownedChars1.get(); 80 } 81 82 const Char2* c2; 83 UniquePtr<Char2[], JS::FreePolicy> ownedChars2; 84 if (s2->isLinear()) { 85 c2 = s2->asLinear().chars<Char2>(nogc); 86 } else { 87 ownedChars2 = 88 s2->asRope().copyChars<Char2>(/* tcx */ nullptr, js::MallocArena); 89 if (!ownedChars2) { 90 MOZ_CRASH("oom"); 91 } 92 c2 = ownedChars2.get(); 93 } 94 95 return EqualChars(c1, c2, s1->length()); 96 } 97 98 /* static */ 99 bool InefficientNonFlatteningStringHashPolicy::match(const JSString* const& k, 100 const Lookup& l) { 101 // We can't use js::EqualStrings, because that flattens our strings. 102 JSString* s1 = const_cast<JSString*>(k); 103 if (k->hasLatin1Chars()) { 104 return l->hasLatin1Chars() ? EqualStringsPure<Latin1Char, Latin1Char>(s1, l) 105 : EqualStringsPure<Latin1Char, char16_t>(s1, l); 106 } 107 108 return l->hasLatin1Chars() ? EqualStringsPure<char16_t, Latin1Char>(s1, l) 109 : EqualStringsPure<char16_t, char16_t>(s1, l); 110 } 111 112 } // namespace js 113 114 namespace JS { 115 116 template <typename CharT> 117 static void StoreStringChars(char* buffer, size_t bufferSize, JSString* str) { 118 const CharT* chars; 119 UniquePtr<CharT[], JS::FreePolicy> ownedChars; 120 JS::AutoCheckCannotGC nogc; 121 if (str->isLinear()) { 122 chars = str->asLinear().chars<CharT>(nogc); 123 } else { 124 ownedChars = 125 str->asRope().copyChars<CharT>(/* tcx */ nullptr, js::MallocArena); 126 if (!ownedChars) { 127 MOZ_CRASH("oom"); 128 } 129 chars = ownedChars.get(); 130 } 131 132 // We might truncate |str| even if it's much shorter than 1024 chars, if 133 // |str| contains unicode chars. Since this is just for a memory reporter, 134 // we don't care. 135 PutEscapedString(buffer, bufferSize, chars, str->length(), /* quote */ 0); 136 } 137 138 NotableStringInfo::NotableStringInfo(JSString* str, const StringInfo& info) 139 : StringInfo(info), length(str->length()) { 140 size_t bufferSize = std::min(str->length() + 1, size_t(MAX_SAVED_CHARS)); 141 buffer.reset(js_pod_malloc<char>(bufferSize)); 142 if (!buffer) { 143 MOZ_CRASH("oom"); 144 } 145 146 if (str->hasLatin1Chars()) { 147 StoreStringChars<Latin1Char>(buffer.get(), bufferSize, str); 148 } else { 149 StoreStringChars<char16_t>(buffer.get(), bufferSize, str); 150 } 151 } 152 153 NotableClassInfo::NotableClassInfo(const char* className, const ClassInfo& info) 154 : ClassInfo(info) { 155 className_ = DuplicateString(className); 156 if (!className_) { 157 MOZ_CRASH("oom"); 158 } 159 } 160 161 NotableScriptSourceInfo::NotableScriptSourceInfo(const char* filename, 162 const ScriptSourceInfo& info) 163 : ScriptSourceInfo(info) { 164 filename_ = DuplicateString(filename); 165 if (!filename_) { 166 MOZ_CRASH("oom"); 167 } 168 } 169 170 } // namespace JS 171 172 using SourceSet = 173 HashSet<ScriptSource*, DefaultHasher<ScriptSource*>, SystemAllocPolicy>; 174 175 struct StatsClosure { 176 RuntimeStats* rtStats; 177 ObjectPrivateVisitor* opv; 178 SourceSet seenSources; 179 wasm::CodeMetadata::SeenSet wasmSeenCodeMetadata; 180 js::CodeMetadataForAsmJS::SeenSet wasmSeenCodeMetadataForAsmJS; 181 wasm::Code::SeenSet wasmSeenCode; 182 wasm::Table::SeenSet wasmSeenTables; 183 bool anonymize; 184 185 StatsClosure(RuntimeStats* rt, ObjectPrivateVisitor* v, bool anon) 186 : rtStats(rt), opv(v), anonymize(anon) {} 187 }; 188 189 static void DecommittedPagesChunkCallback(JSRuntime* rt, void* data, 190 gc::ArenaChunk* chunk, 191 const JS::AutoRequireNoGC& nogc) { 192 auto* gcHeapDecommittedPages = static_cast<size_t*>(data); 193 *gcHeapDecommittedPages += chunk->decommittedPages.Count() * gc::PageSize; 194 } 195 196 static void StatsZoneCallback(JSRuntime* rt, void* data, Zone* zone, 197 const JS::AutoRequireNoGC& nogc) { 198 // Append a new RealmStats to the vector. 199 RuntimeStats* rtStats = static_cast<StatsClosure*>(data)->rtStats; 200 201 // CollectRuntimeStats reserves enough space. 202 MOZ_ALWAYS_TRUE(rtStats->zoneStatsVector.growBy(1)); 203 ZoneStats& zStats = rtStats->zoneStatsVector.back(); 204 zStats.initStrings(); 205 rtStats->initExtraZoneStats(zone, &zStats, nogc); 206 rtStats->currZoneStats = &zStats; 207 208 zone->addSizeOfIncludingThis( 209 rtStats->mallocSizeOf_, &zStats.zoneObject, &zStats.code, 210 &zStats.regexpZone, &zStats.jitZone, &zStats.cacheIRStubs, 211 &zStats.objectFuses, &zStats.uniqueIdMap, &zStats.initialPropMapTable, 212 &zStats.shapeTables, &rtStats->runtime.atomsMarkBitmaps, 213 &zStats.compartmentObjects, &zStats.crossCompartmentWrappersTables, 214 &zStats.compartmentsPrivateData, &zStats.scriptCountsMap); 215 zone->bufferAllocator.addSizeOfExcludingThis(&zStats.gcBuffers.usedBytes, 216 &zStats.gcBuffers.freeBytes, 217 &zStats.gcBuffers.adminBytes); 218 } 219 220 static void StatsRealmCallback(JSContext* cx, void* data, Realm* realm, 221 const JS::AutoRequireNoGC& nogc) { 222 // Append a new RealmStats to the vector. 223 RuntimeStats* rtStats = static_cast<StatsClosure*>(data)->rtStats; 224 225 // CollectRuntimeStats reserves enough space. 226 MOZ_ALWAYS_TRUE(rtStats->realmStatsVector.growBy(1)); 227 RealmStats& realmStats = rtStats->realmStatsVector.back(); 228 realmStats.initClasses(); 229 rtStats->initExtraRealmStats(realm, &realmStats, nogc); 230 231 realm->setRealmStats(&realmStats); 232 233 // Measure the realm object itself, and things hanging off it. 234 realm->addSizeOfIncludingThis( 235 rtStats->mallocSizeOf_, &realmStats.realmObject, &realmStats.realmTables, 236 &realmStats.innerViewsTable, &realmStats.objectMetadataTable, 237 &realmStats.savedStacksSet, &realmStats.nonSyntacticLexicalScopesTable); 238 } 239 240 static void StatsArenaCallback(JSRuntime* rt, void* data, gc::Arena* arena, 241 JS::TraceKind traceKind, size_t thingSize, 242 const JS::AutoRequireNoGC& nogc) { 243 RuntimeStats* rtStats = static_cast<StatsClosure*>(data)->rtStats; 244 245 // The admin space includes (a) the header fields and (b) the padding 246 // between the end of the header fields and the first GC thing. 247 size_t allocationSpace = gc::Arena::thingsSpan(arena->getAllocKind()); 248 rtStats->currZoneStats->gcHeapArenaAdmin += gc::ArenaSize - allocationSpace; 249 250 // We don't call the callback on unused things. So we compute the 251 // unused space like this: arenaUnused = maxArenaUnused - arenaUsed. 252 // We do this by setting arenaUnused to maxArenaUnused here, and then 253 // subtracting thingSize for every used cell, in StatsCellCallback(). 254 rtStats->currZoneStats->unusedGCThings.addToKind(traceKind, allocationSpace); 255 } 256 257 // FineGrained is used for normal memory reporting. CoarseGrained is used by 258 // AddSizeOfTab(), which aggregates all the measurements into a handful of 259 // high-level numbers, which means that fine-grained reporting would be a waste 260 // of effort. 261 enum Granularity { FineGrained, CoarseGrained }; 262 263 static void AddClassInfo(Granularity granularity, RealmStats& realmStats, 264 const char* className, JS::ClassInfo& info) { 265 if (granularity == FineGrained) { 266 if (!className) { 267 className = "<no class name>"; 268 } 269 RealmStats::ClassesHashMap::AddPtr p = 270 realmStats.allClasses->lookupForAdd(className); 271 if (!p) { 272 bool ok = realmStats.allClasses->add(p, className, info); 273 // Ignore failure -- we just won't record the 274 // object/shape/base-shape as notable. 275 (void)ok; 276 } else { 277 p->value().add(info); 278 } 279 } 280 } 281 282 template <Granularity granularity> 283 static void CollectScriptSourceStats(StatsClosure* closure, ScriptSource* ss) { 284 RuntimeStats* rtStats = closure->rtStats; 285 286 SourceSet::AddPtr entry = closure->seenSources.lookupForAdd(ss); 287 if (entry) { 288 return; 289 } 290 291 bool ok = closure->seenSources.add(entry, ss); 292 (void)ok; // Not much to be done on failure. 293 294 JS::ScriptSourceInfo info; // This zeroes all the sizes. 295 ss->addSizeOfIncludingThis(rtStats->mallocSizeOf_, &info); 296 297 rtStats->runtime.scriptSourceInfo.add(info); 298 299 if (granularity == FineGrained) { 300 const char* filename = ss->filename(); 301 if (!filename) { 302 filename = "<no filename>"; 303 } 304 305 JS::RuntimeSizes::ScriptSourcesHashMap::AddPtr p = 306 rtStats->runtime.allScriptSources->lookupForAdd(filename); 307 if (!p) { 308 bool ok = rtStats->runtime.allScriptSources->add(p, filename, info); 309 // Ignore failure -- we just won't record the script source as notable. 310 (void)ok; 311 } else { 312 p->value().add(info); 313 } 314 } 315 } 316 317 // The various kinds of hashing are expensive, and the results are unused when 318 // doing coarse-grained measurements. Skipping them more than doubles the 319 // profile speed for complex pages such as gmail.com. 320 template <Granularity granularity> 321 static void StatsCellCallback(JSRuntime* rt, void* data, JS::GCCellPtr cellptr, 322 size_t thingSize, 323 const JS::AutoRequireNoGC& nogc) { 324 StatsClosure* closure = static_cast<StatsClosure*>(data); 325 RuntimeStats* rtStats = closure->rtStats; 326 ZoneStats* zStats = rtStats->currZoneStats; 327 JS::TraceKind kind = cellptr.kind(); 328 switch (kind) { 329 case JS::TraceKind::Object: { 330 JSObject* obj = &cellptr.as<JSObject>(); 331 RealmStats& realmStats = obj->maybeCCWRealm()->realmStats(); 332 JS::ClassInfo info; // This zeroes all the sizes. 333 info.objectsGCHeap += thingSize; 334 335 if (!obj->isTenured()) { 336 info.objectsGCHeap += Nursery::nurseryCellHeaderSize(); 337 } 338 339 obj->addSizeOfExcludingThis(rtStats->mallocSizeOf_, &info, 340 &rtStats->runtime); 341 342 // These classes require special handling due to shared resources which 343 // we must be careful not to report twice. 344 if (obj->is<WasmModuleObject>()) { 345 const wasm::Module& module = obj->as<WasmModuleObject>().module(); 346 ScriptSource* ss = module.codeMetaForAsmJS() 347 ? module.codeMetaForAsmJS()->maybeScriptSource() 348 : nullptr; 349 if (ss) { 350 CollectScriptSourceStats<granularity>(closure, ss); 351 } 352 module.addSizeOfMisc( 353 rtStats->mallocSizeOf_, &closure->wasmSeenCodeMetadata, 354 &closure->wasmSeenCodeMetadataForAsmJS, &closure->wasmSeenCode, 355 &info.objectsNonHeapCodeWasm, &info.objectsMallocHeapMisc); 356 } else if (obj->is<WasmInstanceObject>()) { 357 wasm::Instance& instance = obj->as<WasmInstanceObject>().instance(); 358 ScriptSource* ss = 359 instance.codeMetaForAsmJS() 360 ? instance.codeMetaForAsmJS()->maybeScriptSource() 361 : nullptr; 362 if (ss) { 363 CollectScriptSourceStats<granularity>(closure, ss); 364 } 365 instance.addSizeOfMisc( 366 rtStats->mallocSizeOf_, &closure->wasmSeenCodeMetadata, 367 &closure->wasmSeenCodeMetadataForAsmJS, &closure->wasmSeenCode, 368 &closure->wasmSeenTables, &info.objectsNonHeapCodeWasm, 369 &info.objectsMallocHeapMisc); 370 } 371 372 realmStats.classInfo.add(info); 373 374 const JSClass* clasp = obj->getClass(); 375 const char* className = clasp->name; 376 AddClassInfo(granularity, realmStats, className, info); 377 378 if (ObjectPrivateVisitor* opv = closure->opv) { 379 nsISupports* iface; 380 if (opv->getISupports_(obj, &iface) && iface) { 381 realmStats.objectsPrivate += opv->sizeOfIncludingThis(iface); 382 } 383 } 384 break; 385 } 386 387 case JS::TraceKind::Script: { 388 BaseScript* base = &cellptr.as<BaseScript>(); 389 RealmStats& realmStats = base->realm()->realmStats(); 390 realmStats.scriptsGCHeap += thingSize; 391 realmStats.scriptsMallocHeapData += 392 base->sizeOfExcludingThis(rtStats->mallocSizeOf_); 393 if (base->hasJitScript()) { 394 JSScript* script = static_cast<JSScript*>(base); 395 script->addSizeOfJitScript(rtStats->mallocSizeOf_, 396 &realmStats.jitScripts, 397 &realmStats.allocSites); 398 jit::AddSizeOfBaselineData(script, rtStats->mallocSizeOf_, 399 &realmStats.baselineData); 400 realmStats.ionData += 401 jit::SizeOfIonData(script, rtStats->mallocSizeOf_); 402 } 403 CollectScriptSourceStats<granularity>(closure, base->scriptSource()); 404 break; 405 } 406 407 case JS::TraceKind::String: { 408 JSString* str = &cellptr.as<JSString>(); 409 size_t size = thingSize; 410 if (!str->isTenured()) { 411 size += Nursery::nurseryCellHeaderSize(); 412 } 413 414 JS::StringInfo info; 415 if (str->hasLatin1Chars()) { 416 info.gcHeapLatin1 = size; 417 info.mallocHeapLatin1 = 418 str->sizeOfExcludingThis(rtStats->mallocSizeOf_); 419 } else { 420 info.gcHeapTwoByte = size; 421 info.mallocHeapTwoByte = 422 str->sizeOfExcludingThis(rtStats->mallocSizeOf_); 423 } 424 info.numCopies = 1; 425 426 zStats->stringInfo.add(info); 427 428 // The primary use case for anonymization is automated crash submission 429 // (to help detect OOM crashes). In that case, we don't want to pay the 430 // memory cost required to do notable string detection. 431 if (granularity == FineGrained && !closure->anonymize) { 432 ZoneStats::StringsHashMap::AddPtr p = 433 zStats->allStrings->lookupForAdd(str); 434 if (!p) { 435 bool ok = zStats->allStrings->add(p, str, info); 436 // Ignore failure -- we just won't record the string as notable. 437 (void)ok; 438 } else { 439 p->value().add(info); 440 } 441 } 442 break; 443 } 444 445 case JS::TraceKind::Symbol: 446 zStats->symbolsGCHeap += thingSize; 447 break; 448 449 case JS::TraceKind::BigInt: { 450 JS::BigInt* bi = &cellptr.as<BigInt>(); 451 size_t size = thingSize; 452 if (!bi->isTenured()) { 453 size += Nursery::nurseryCellHeaderSize(); 454 } 455 zStats->bigIntsGCHeap += size; 456 zStats->bigIntsMallocHeap += 457 bi->sizeOfExcludingThis(rtStats->mallocSizeOf_); 458 break; 459 } 460 461 case JS::TraceKind::BaseShape: { 462 JS::ShapeInfo info; // This zeroes all the sizes. 463 info.shapesGCHeapBase += thingSize; 464 // No malloc-heap measurements. 465 466 zStats->shapeInfo.add(info); 467 break; 468 } 469 470 case JS::TraceKind::GetterSetter: { 471 GetterSetter* gs = &cellptr.as<GetterSetter>(); 472 size_t size = thingSize; 473 if (!gs->isTenured()) { 474 size += Nursery::nurseryCellHeaderSize(); 475 } 476 zStats->getterSettersGCHeap += size; 477 break; 478 } 479 480 case JS::TraceKind::PropMap: { 481 PropMap* map = &cellptr.as<PropMap>(); 482 if (map->isDictionary()) { 483 zStats->dictPropMapsGCHeap += thingSize; 484 } else if (map->isCompact()) { 485 zStats->compactPropMapsGCHeap += thingSize; 486 } else { 487 MOZ_ASSERT(map->isNormal()); 488 zStats->normalPropMapsGCHeap += thingSize; 489 } 490 map->addSizeOfExcludingThis(rtStats->mallocSizeOf_, 491 &zStats->propMapChildren, 492 &zStats->propMapTables); 493 break; 494 } 495 496 case JS::TraceKind::JitCode: { 497 zStats->jitCodesGCHeap += thingSize; 498 // The code for a script is counted in ExecutableAllocator::sizeOfCode(). 499 break; 500 } 501 502 case JS::TraceKind::Shape: { 503 Shape* shape = &cellptr.as<Shape>(); 504 505 JS::ShapeInfo info; // This zeroes all the sizes. 506 if (shape->isDictionary()) { 507 info.shapesGCHeapDict += thingSize; 508 } else { 509 info.shapesGCHeapShared += thingSize; 510 } 511 shape->addSizeOfExcludingThis(rtStats->mallocSizeOf_, &info); 512 zStats->shapeInfo.add(info); 513 break; 514 } 515 516 case JS::TraceKind::Scope: { 517 Scope* scope = &cellptr.as<Scope>(); 518 zStats->scopesGCHeap += thingSize; 519 zStats->scopesMallocHeap += scope->sizeOfExcludingThis(); 520 break; 521 } 522 523 case JS::TraceKind::RegExpShared: { 524 auto regexp = &cellptr.as<RegExpShared>(); 525 zStats->regExpSharedsGCHeap += thingSize; 526 zStats->regExpSharedsMallocHeap += 527 regexp->sizeOfExcludingThis(rtStats->mallocSizeOf_); 528 break; 529 } 530 531 default: 532 MOZ_CRASH("invalid traceKind in StatsCellCallback"); 533 } 534 535 // Yes, this is a subtraction: see StatsArenaCallback() for details. 536 zStats->unusedGCThings.addToKind(kind, -thingSize); 537 } 538 539 void ZoneStats::initStrings() { 540 isTotals = false; 541 allStrings.emplace(); 542 } 543 544 void RealmStats::initClasses() { 545 isTotals = false; 546 allClasses.emplace(); 547 } 548 549 static bool FindNotableStrings(ZoneStats& zStats) { 550 using namespace JS; 551 552 // We should only run FindNotableStrings once per ZoneStats object. 553 MOZ_ASSERT(zStats.notableStrings.empty()); 554 555 for (ZoneStats::StringsHashMap::Range r = zStats.allStrings->all(); 556 !r.empty(); r.popFront()) { 557 JSString* str = r.front().key(); 558 StringInfo& info = r.front().value(); 559 560 if (!info.isNotable()) { 561 continue; 562 } 563 564 if (!zStats.notableStrings.emplaceBack(str, info)) { 565 return false; 566 } 567 568 // We're moving this string from a non-notable to a notable bucket, so 569 // subtract it out of the non-notable tallies. 570 zStats.stringInfo.subtract(info); 571 } 572 // Release |allStrings| now, rather than waiting for zStats's destruction, to 573 // reduce peak memory consumption during reporting. 574 zStats.allStrings.reset(); 575 return true; 576 } 577 578 static bool FindNotableClasses(RealmStats& realmStats) { 579 using namespace JS; 580 581 // We should only run FindNotableClasses once per ZoneStats object. 582 MOZ_ASSERT(realmStats.notableClasses.empty()); 583 584 for (RealmStats::ClassesHashMap::Range r = realmStats.allClasses->all(); 585 !r.empty(); r.popFront()) { 586 const char* className = r.front().key(); 587 ClassInfo& info = r.front().value(); 588 589 // If this class isn't notable, or if we can't grow the notableStrings 590 // vector, skip this string. 591 if (!info.isNotable()) { 592 continue; 593 } 594 595 if (!realmStats.notableClasses.emplaceBack(className, info)) { 596 return false; 597 } 598 599 // We're moving this class from a non-notable to a notable bucket, so 600 // subtract it out of the non-notable tallies. 601 realmStats.classInfo.subtract(info); 602 } 603 // Release |allClasses| now, rather than waiting for zStats's destruction, to 604 // reduce peak memory consumption during reporting. 605 realmStats.allClasses.reset(); 606 return true; 607 } 608 609 static bool FindNotableScriptSources(JS::RuntimeSizes& runtime) { 610 using namespace JS; 611 612 // We should only run FindNotableScriptSources once per RuntimeSizes. 613 MOZ_ASSERT(runtime.notableScriptSources.empty()); 614 615 for (RuntimeSizes::ScriptSourcesHashMap::Range r = 616 runtime.allScriptSources->all(); 617 !r.empty(); r.popFront()) { 618 const char* filename = r.front().key(); 619 ScriptSourceInfo& info = r.front().value(); 620 621 if (!info.isNotable()) { 622 continue; 623 } 624 625 if (!runtime.notableScriptSources.emplaceBack(filename, info)) { 626 return false; 627 } 628 629 // We're moving this script source from a non-notable to a notable 630 // bucket, so subtract its sizes from the non-notable tallies. 631 runtime.scriptSourceInfo.subtract(info); 632 } 633 // Release |allScriptSources| now, rather than waiting for zStats's 634 // destruction, to reduce peak memory consumption during reporting. 635 runtime.allScriptSources.reset(); 636 return true; 637 } 638 639 static bool CollectRuntimeStatsHelper(JSContext* cx, RuntimeStats* rtStats, 640 ObjectPrivateVisitor* opv, bool anonymize, 641 IterateCellCallback statsCellCallback) { 642 // Finish any ongoing incremental GC that may change the data we're gathering 643 // and start a trace session. Ensure that we don't do anything that could 644 // start another GC. 645 js::gc::AutoPrepareForTracing session(cx); 646 JS::AutoAssertNoGC nogc(cx); 647 648 // Wait for any background tasks to finish. 649 WaitForAllHelperThreads(); 650 651 JSRuntime* rt = cx->runtime(); 652 if (!rtStats->realmStatsVector.reserve(rt->numRealms)) { 653 return false; 654 } 655 656 size_t totalZones = rt->gc.zones().length(); 657 if (!rtStats->zoneStatsVector.reserve(totalZones)) { 658 return false; 659 } 660 661 rtStats->gcHeapChunkTotal = 662 size_t(JS_GetGCParameter(cx, JSGC_TOTAL_CHUNKS)) * gc::ChunkSize; 663 664 rtStats->gcHeapUnusedChunks = 665 size_t(JS_GetGCParameter(cx, JSGC_UNUSED_CHUNKS)) * gc::ChunkSize; 666 667 if (js::gc::DecommitEnabled()) { 668 IterateChunks(cx, &rtStats->gcHeapDecommittedPages, 669 DecommittedPagesChunkCallback, session); 670 } 671 672 // Take the per-compartment measurements. 673 StatsClosure closure(rtStats, opv, anonymize); 674 IterateHeapUnbarriered(cx, &closure, StatsZoneCallback, StatsRealmCallback, 675 StatsArenaCallback, statsCellCallback, session); 676 677 // Take the "explicit/js/runtime/" measurements. 678 rt->addSizeOfIncludingThis(rtStats->mallocSizeOf_, &rtStats->runtime); 679 680 if (!FindNotableScriptSources(rtStats->runtime)) { 681 return false; 682 } 683 684 JS::ZoneStatsVector& zs = rtStats->zoneStatsVector; 685 ZoneStats& zTotals = rtStats->zTotals; 686 687 // We don't look for notable strings for zTotals. So we first sum all the 688 // zones' measurements to get the totals. Then we find the notable strings 689 // within each zone. 690 for (size_t i = 0; i < zs.length(); i++) { 691 zTotals.addSizes(zs[i]); 692 } 693 694 for (size_t i = 0; i < zs.length(); i++) { 695 if (!FindNotableStrings(zs[i])) { 696 return false; 697 } 698 } 699 700 MOZ_ASSERT(!zTotals.allStrings); 701 702 JS::RealmStatsVector& realmStats = rtStats->realmStatsVector; 703 RealmStats& realmTotals = rtStats->realmTotals; 704 705 // As with the zones, we sum all realms first, and then get the 706 // notable classes within each zone. 707 for (size_t i = 0; i < realmStats.length(); i++) { 708 realmTotals.addSizes(realmStats[i]); 709 } 710 711 for (size_t i = 0; i < realmStats.length(); i++) { 712 if (!FindNotableClasses(realmStats[i])) { 713 return false; 714 } 715 } 716 717 MOZ_ASSERT(!realmTotals.allClasses); 718 719 rtStats->gcHeapGCThings = rtStats->zTotals.sizeOfLiveGCThings() + 720 rtStats->realmTotals.sizeOfLiveGCThings(); 721 722 #ifdef DEBUG 723 // Check that the in-arena measurements look ok. 724 size_t totalArenaSize = rtStats->zTotals.gcHeapArenaAdmin + 725 rtStats->zTotals.unusedGCThings.totalSize() + 726 rtStats->gcHeapGCThings; 727 MOZ_ASSERT(totalArenaSize % gc::ArenaSize == 0); 728 #endif 729 730 for (RealmsIter realm(rt); !realm.done(); realm.next()) { 731 realm->nullRealmStats(); 732 } 733 734 size_t numDirtyChunks = 735 (rtStats->gcHeapChunkTotal - rtStats->gcHeapUnusedChunks) / gc::ChunkSize; 736 size_t perChunkAdmin = 737 sizeof(gc::ArenaChunk) - (sizeof(gc::Arena) * gc::ArenasPerChunk); 738 rtStats->gcHeapChunkAdmin = numDirtyChunks * perChunkAdmin; 739 740 // |gcHeapUnusedArenas| is the only thing left. Compute it in terms of 741 // all the others. See the comment in RuntimeStats for explanation. 742 rtStats->gcHeapUnusedArenas = 743 rtStats->gcHeapChunkTotal - rtStats->gcHeapDecommittedPages - 744 rtStats->gcHeapUnusedChunks - 745 rtStats->zTotals.unusedGCThings.totalSize() - rtStats->gcHeapChunkAdmin - 746 rtStats->zTotals.gcHeapArenaAdmin - rtStats->gcHeapGCThings; 747 return true; 748 } 749 750 JS_PUBLIC_API bool JS::CollectGlobalStats(GlobalStats* gStats) { 751 AutoLockHelperThreadState lock; 752 753 // HelperThreadState holds data that is not part of a Runtime. This does 754 // not include data is is currently being processed by a HelperThread. 755 if (IsHelperThreadStateInitialized()) { 756 HelperThreadState().addSizeOfIncludingThis(gStats, lock); 757 } 758 759 return true; 760 } 761 762 JS_PUBLIC_API bool JS::CollectRuntimeStats(JSContext* cx, RuntimeStats* rtStats, 763 ObjectPrivateVisitor* opv, 764 bool anonymize) { 765 return CollectRuntimeStatsHelper(cx, rtStats, opv, anonymize, 766 StatsCellCallback<FineGrained>); 767 } 768 769 JS_PUBLIC_API size_t JS::SystemCompartmentCount(JSContext* cx) { 770 size_t n = 0; 771 for (CompartmentsIter comp(cx->runtime()); !comp.done(); comp.next()) { 772 if (IsSystemCompartment(comp)) { 773 ++n; 774 } 775 } 776 return n; 777 } 778 779 JS_PUBLIC_API size_t JS::UserCompartmentCount(JSContext* cx) { 780 size_t n = 0; 781 for (CompartmentsIter comp(cx->runtime()); !comp.done(); comp.next()) { 782 if (!IsSystemCompartment(comp)) { 783 ++n; 784 } 785 } 786 return n; 787 } 788 789 JS_PUBLIC_API size_t JS::SystemRealmCount(JSContext* cx) { 790 size_t n = 0; 791 for (RealmsIter realm(cx->runtime()); !realm.done(); realm.next()) { 792 if (realm->isSystem()) { 793 ++n; 794 } 795 } 796 return n; 797 } 798 799 JS_PUBLIC_API size_t JS::UserRealmCount(JSContext* cx) { 800 size_t n = 0; 801 for (RealmsIter realm(cx->runtime()); !realm.done(); realm.next()) { 802 if (!realm->isSystem()) { 803 ++n; 804 } 805 } 806 return n; 807 } 808 809 JS_PUBLIC_API size_t JS::PeakSizeOfTemporary(const JSContext* cx) { 810 return cx->tempLifoAlloc().peakSizeOfExcludingThis(); 811 } 812 813 namespace JS { 814 815 class SimpleJSRuntimeStats : public JS::RuntimeStats { 816 public: 817 explicit SimpleJSRuntimeStats(MallocSizeOf mallocSizeOf) 818 : JS::RuntimeStats(mallocSizeOf) {} 819 820 virtual void initExtraZoneStats(JS::Zone* zone, JS::ZoneStats* zStats, 821 const JS::AutoRequireNoGC& nogc) override {} 822 823 virtual void initExtraRealmStats(Realm* realm, JS::RealmStats* realmStats, 824 const JS::AutoRequireNoGC& nogc) override {} 825 }; 826 827 JS_PUBLIC_API bool AddSizeOfTab(JSContext* cx, JS::Zone* zone, 828 MallocSizeOf mallocSizeOf, 829 ObjectPrivateVisitor* opv, TabSizes* sizes, 830 const JS::AutoRequireNoGC& nogc) { 831 SimpleJSRuntimeStats rtStats(mallocSizeOf); 832 833 size_t numRealms = 0; 834 for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) { 835 numRealms += comp->realms().length(); 836 } 837 838 if (!rtStats.realmStatsVector.reserve(numRealms)) { 839 return false; 840 } 841 842 if (!rtStats.zoneStatsVector.reserve(1)) { 843 return false; 844 } 845 846 // Take the per-compartment measurements. No need to anonymize because 847 // these measurements will be aggregated. 848 StatsClosure closure(&rtStats, opv, /* anonymize = */ false); 849 MOZ_ASSERT(!JS::IsIncrementalGCInProgress(cx)); 850 js::gc::AutoTraceSession session(cx->runtime()); 851 IterateHeapUnbarrieredForZone(cx, zone, &closure, StatsZoneCallback, 852 StatsRealmCallback, StatsArenaCallback, 853 StatsCellCallback<CoarseGrained>, session); 854 855 MOZ_ASSERT(rtStats.zoneStatsVector.length() == 1); 856 rtStats.zTotals.addSizes(rtStats.zoneStatsVector[0]); 857 858 for (size_t i = 0; i < rtStats.realmStatsVector.length(); i++) { 859 rtStats.realmTotals.addSizes(rtStats.realmStatsVector[i]); 860 } 861 862 for (RealmsInZoneIter realm(zone); !realm.done(); realm.next()) { 863 realm->nullRealmStats(); 864 } 865 866 rtStats.zTotals.addToTabSizes(sizes); 867 rtStats.realmTotals.addToTabSizes(sizes); 868 869 return true; 870 } 871 872 } // namespace JS