BaselineJIT.cpp (43250B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #include "jit/BaselineJIT.h" 8 9 #include "mozilla/BinarySearch.h" 10 #include "mozilla/CheckedInt.h" 11 #include "mozilla/MemoryReporting.h" 12 #include "mozilla/ScopeExit.h" 13 14 #include <algorithm> 15 16 #include "debugger/DebugAPI.h" 17 #include "gc/GCContext.h" 18 #include "gc/PublicIterators.h" 19 #include "jit/AutoWritableJitCode.h" 20 #include "jit/BaselineCodeGen.h" 21 #include "jit/BaselineCompileTask.h" 22 #include "jit/BaselineDebugModeOSR.h" 23 #include "jit/BaselineIC.h" 24 #include "jit/CalleeToken.h" 25 #include "jit/Ion.h" 26 #include "jit/IonOptimizationLevels.h" 27 #include "jit/JitCommon.h" 28 #include "jit/JitRuntime.h" 29 #include "jit/JitSpewer.h" 30 #include "jit/MacroAssembler.h" 31 #include "js/friend/StackLimits.h" // js::AutoCheckRecursionLimit 32 #include "vm/Interpreter.h" 33 34 #include "debugger/DebugAPI-inl.h" 35 #include "gc/GC-inl.h" 36 #include "jit/JitHints-inl.h" 37 #include "jit/JitScript-inl.h" 38 #include "vm/GeckoProfiler-inl.h" 39 #include "vm/JSScript-inl.h" 40 #include "vm/Stack-inl.h" 41 42 using mozilla::BinarySearchIf; 43 using mozilla::CheckedInt; 44 45 using namespace js; 46 using namespace js::jit; 47 48 void ICStubSpace::freeAllAfterMinorGC(Zone* zone) { 49 if (zone->isAtomsZone()) { 50 MOZ_ASSERT(allocator_.isEmpty()); 51 } else { 52 JSRuntime* rt = zone->runtimeFromMainThread(); 53 rt->gc.queueAllLifoBlocksForFreeAfterMinorGC(&allocator_); 54 } 55 } 56 57 static bool CheckFrame(InterpreterFrame* fp) { 58 if (fp->isDebuggerEvalFrame()) { 59 // Debugger eval-in-frame. These are likely short-running scripts so 60 // don't bother compiling them for now. 61 JitSpew(JitSpew_BaselineAbort, "debugger frame"); 62 return false; 63 } 64 65 if (fp->isFunctionFrame() && TooManyActualArguments(fp->numActualArgs())) { 66 // Fall back to the interpreter to avoid running out of stack space. 67 JitSpew(JitSpew_BaselineAbort, "Too many arguments (%u)", 68 fp->numActualArgs()); 69 return false; 70 } 71 72 return true; 73 } 74 75 struct EnterJitData { 76 explicit EnterJitData(JSContext* cx) 77 : jitcode(nullptr), 78 osrFrame(nullptr), 79 calleeToken(nullptr), 80 maxArgv(nullptr), 81 maxArgc(0), 82 numActualArgs(0), 83 osrNumStackValues(0), 84 envChain(cx), 85 result(cx), 86 constructing(false) {} 87 88 uint8_t* jitcode; 89 InterpreterFrame* osrFrame; 90 91 void* calleeToken; 92 93 Value* maxArgv; 94 unsigned maxArgc; 95 unsigned numActualArgs; 96 unsigned osrNumStackValues; 97 98 RootedObject envChain; 99 RootedValue result; 100 101 bool constructing; 102 103 Value& thisv() const { return maxArgv[-1]; } 104 }; 105 106 static JitExecStatus EnterBaseline(JSContext* cx, EnterJitData& data) { 107 MOZ_ASSERT(data.osrFrame); 108 109 // Check for potential stack overflow before OSR-ing. 110 uint32_t extra = 111 BaselineFrame::Size() + (data.osrNumStackValues * sizeof(Value)); 112 AutoCheckRecursionLimit recursion(cx); 113 if (!recursion.checkWithExtra(cx, extra)) { 114 return JitExec_Aborted; 115 } 116 117 #ifdef DEBUG 118 // Assert we don't GC before entering JIT code. A GC could discard JIT code 119 // or move the function stored in the CalleeToken (it won't be traced at 120 // this point). We use Maybe<> here so we can call reset() to call the 121 // AutoAssertNoGC destructor before we enter JIT code. 122 mozilla::Maybe<JS::AutoAssertNoGC> nogc; 123 nogc.emplace(cx); 124 #endif 125 126 MOZ_ASSERT(IsBaselineInterpreterEnabled()); 127 MOZ_ASSERT(CheckFrame(data.osrFrame)); 128 129 EnterJitCode enter = cx->runtime()->jitRuntime()->enterJit(); 130 131 // Caller must construct |this| before invoking the function. 132 MOZ_ASSERT_IF(data.constructing, 133 data.thisv().isObject() || 134 data.thisv().isMagic(JS_UNINITIALIZED_LEXICAL)); 135 136 data.result.setInt32(data.numActualArgs); 137 { 138 AssertRealmUnchanged aru(cx); 139 JitActivation activation(cx); 140 141 data.osrFrame->setRunningInJit(); 142 143 #ifdef DEBUG 144 nogc.reset(); 145 #endif 146 // Single transition point from Interpreter to Baseline. 147 CALL_GENERATED_CODE(enter, data.jitcode, data.maxArgc, data.maxArgv, 148 data.osrFrame, data.calleeToken, data.envChain.get(), 149 data.osrNumStackValues, data.result.address()); 150 151 data.osrFrame->clearRunningInJit(); 152 } 153 154 // Jit callers wrap primitive constructor return, except for derived 155 // class constructors, which are forced to do it themselves. 156 if (!data.result.isMagic() && data.constructing && 157 data.result.isPrimitive()) { 158 MOZ_ASSERT(data.thisv().isObject()); 159 data.result = data.thisv(); 160 } 161 162 // Release temporary buffer used for OSR into Ion. 163 cx->runtime()->jitRuntime()->freeIonOsrTempData(); 164 165 MOZ_ASSERT_IF(data.result.isMagic(), data.result.isMagic(JS_ION_ERROR)); 166 return data.result.isMagic() ? JitExec_Error : JitExec_Ok; 167 } 168 169 JitExecStatus jit::EnterBaselineInterpreterAtBranch(JSContext* cx, 170 InterpreterFrame* fp, 171 jsbytecode* pc) { 172 MOZ_ASSERT(JSOp(*pc) == JSOp::LoopHead); 173 174 EnterJitData data(cx); 175 176 // Use the entry point that skips the debug trap because the C++ interpreter 177 // already handled this for the current op. 178 const BaselineInterpreter& interp = 179 cx->runtime()->jitRuntime()->baselineInterpreter(); 180 data.jitcode = interp.interpretOpNoDebugTrapAddr().value; 181 182 data.osrFrame = fp; 183 data.osrNumStackValues = 184 fp->script()->nfixed() + cx->interpreterRegs().stackDepth(); 185 186 if (fp->isFunctionFrame()) { 187 data.constructing = fp->isConstructing(); 188 data.numActualArgs = fp->numActualArgs(); 189 data.maxArgc = std::max(fp->numActualArgs(), fp->numFormalArgs()); 190 data.maxArgv = fp->argv(); 191 data.envChain = nullptr; 192 data.calleeToken = CalleeToToken(&fp->callee(), data.constructing); 193 } else { 194 data.constructing = false; 195 data.numActualArgs = 0; 196 data.maxArgc = 0; 197 data.maxArgv = nullptr; 198 data.envChain = fp->environmentChain(); 199 data.calleeToken = CalleeToToken(fp->script()); 200 } 201 202 JitExecStatus status = EnterBaseline(cx, data); 203 if (status != JitExec_Ok) { 204 return status; 205 } 206 207 fp->setReturnValue(data.result); 208 return JitExec_Ok; 209 } 210 211 bool BaselineCompileTask::OffThreadBaselineCompilationAvailable( 212 JSContext* cx, JSScript* script, bool isEager) { 213 if (!isEager && !cx->runtime()->canUseOffthreadBaselineCompilation()) { 214 return false; 215 } 216 // TODO: Support off-thread scriptcounts? 217 if (cx->runtime()->profilingScripts) { 218 return false; 219 } 220 if (script->hasScriptCounts() || cx->realm()->collectCoverageForDebug()) { 221 return false; 222 } 223 if (script->isDebuggee()) { 224 return false; 225 } 226 if (JS::Prefs::experimental_self_hosted_cache() && script->selfHosted()) { 227 return false; 228 } 229 return CanUseExtraThreads(); 230 } 231 232 static bool DispatchOffThreadBaselineCompile(JSContext* cx, 233 BaselineSnapshot& snapshot) { 234 JSScript* script = snapshot.script(); 235 MOZ_ASSERT( 236 BaselineCompileTask::OffThreadBaselineCompilationAvailable(cx, script)); 237 238 auto alloc = cx->make_unique<LifoAlloc>(TempAllocator::PreferredLifoChunkSize, 239 js::BackgroundMallocArena); 240 if (!alloc) { 241 ReportOutOfMemory(cx); 242 return false; 243 } 244 auto* snapshotCopy = alloc->new_<OffThreadBaselineSnapshot>(snapshot); 245 if (!snapshotCopy) { 246 ReportOutOfMemory(cx); 247 return false; 248 } 249 250 BaselineSnapshotList snapshots; 251 snapshots.insertFront(snapshotCopy); 252 CompileRealm* realm = CompileRealm::get(cx->realm()); 253 BaselineCompileTask* task = alloc->new_<BaselineCompileTask>( 254 realm, alloc.get(), std::move(snapshots)); 255 if (!task) { 256 snapshots.clear(); 257 ReportOutOfMemory(cx); 258 return false; 259 } 260 261 AutoLockHelperThreadState lock; 262 if (!StartOffThreadBaselineCompile(task, lock)) { 263 ReportOutOfMemory(cx); 264 return false; 265 } 266 267 script->jitScript()->setIsBaselineCompiling(script); 268 269 // The allocator and associated data will be destroyed after being 270 // processed in the finishedOffThreadCompilations list. 271 (void)alloc.release(); 272 273 return true; 274 } 275 276 // Off thread baseline batching can be called from two different locations. 277 // Either through stencil instantiation where we perform eager baseline 278 // compilations speculatively based on Jit Hints, or on demand through the JIT. 279 static bool DispatchOffThreadBaselineBatchImpl(JSContext* cx, bool isEager) { 280 BaselineCompileQueue& queue = cx->realm()->baselineCompileQueue(); 281 MOZ_ASSERT(queue.numQueued() > 0); 282 283 auto alloc = cx->make_unique<LifoAlloc>(TempAllocator::PreferredLifoChunkSize, 284 js::BackgroundMallocArena); 285 if (!alloc) { 286 ReportOutOfMemory(cx); 287 return false; 288 } 289 290 gc::AutoSuppressGC suppressGC(cx); 291 292 BaselineSnapshotList snapshots; 293 auto clearSnapshotList = mozilla::MakeScopeExit([&]() { snapshots.clear(); }); 294 295 GlobalLexicalEnvironmentObject* globalLexical = 296 &cx->global()->lexicalEnvironment(); 297 JSObject* globalThis = globalLexical->thisObject(); 298 299 Rooted<JSScript*> script(cx); 300 while (!queue.isEmpty()) { 301 script = queue.pop(); 302 if (script->hasJitScript()) { 303 script->jitScript()->clearIsBaselineQueued(script); 304 } 305 306 MOZ_ASSERT(cx->realm() == script->realm()); 307 308 if (!IsBaselineJitEnabled(cx)) { 309 script->disableBaselineCompile(); 310 continue; 311 } 312 313 if (!BaselineCompileTask::OffThreadBaselineCompilationAvailable(cx, script, 314 isEager)) { 315 BaselineOptions options({BaselineOption::ForceMainThreadCompilation}); 316 MethodStatus status = BaselineCompile(cx, script, options); 317 if (status != Method_Compiled) { 318 return false; 319 } 320 continue; 321 } 322 323 bool compileDebugInstrumentation = false; 324 if (!BaselineCompiler::PrepareToCompile(cx, script, 325 compileDebugInstrumentation)) { 326 return false; 327 } 328 329 uint32_t baseWarmUpThreshold = 330 OptimizationInfo::baseWarmUpThresholdForScript(cx, script); 331 bool isIonCompileable = IsIonEnabled(cx) && CanIonCompileScript(cx, script); 332 333 MOZ_ASSERT(!script->isDebuggee()); 334 335 auto* offThreadSnapshot = alloc->new_<OffThreadBaselineSnapshot>( 336 script, globalLexical, globalThis, baseWarmUpThreshold, 337 isIonCompileable, compileDebugInstrumentation); 338 if (!offThreadSnapshot) { 339 ReportOutOfMemory(cx); 340 return false; 341 } 342 snapshots.insertFront(offThreadSnapshot); 343 } 344 345 if (snapshots.isEmpty()) { 346 return true; 347 } 348 349 CompileRealm* realm = CompileRealm::get(cx->realm()); 350 BaselineCompileTask* task = alloc->new_<BaselineCompileTask>( 351 realm, alloc.get(), std::move(snapshots)); 352 if (!task) { 353 ReportOutOfMemory(cx); 354 return false; 355 } 356 task->markScriptsAsCompiling(); 357 clearSnapshotList.release(); 358 359 AutoLockHelperThreadState lock; 360 if (!StartOffThreadBaselineCompile(task, lock)) { 361 ReportOutOfMemory(cx); 362 return false; 363 } 364 365 (void)alloc.release(); 366 367 return true; 368 } 369 370 bool jit::DispatchOffThreadBaselineBatchEager(JSContext* cx) { 371 return DispatchOffThreadBaselineBatchImpl(cx, /* isEager = */ true); 372 } 373 374 bool jit::DispatchOffThreadBaselineBatch(JSContext* cx) { 375 return DispatchOffThreadBaselineBatchImpl(cx, /* isEager = */ false); 376 } 377 378 MethodStatus jit::BaselineCompile(JSContext* cx, JSScript* script, 379 BaselineOptions options) { 380 cx->check(script); 381 MOZ_ASSERT(!script->hasBaselineScript()); 382 MOZ_ASSERT(script->canBaselineCompile()); 383 MOZ_ASSERT(IsBaselineJitEnabled(cx)); 384 AutoGeckoProfilerEntry pseudoFrame( 385 cx, "Baseline script compilation", 386 JS::ProfilingCategoryPair::JS_BaselineCompilation); 387 388 bool compileDebugInstrumentation = 389 script->isDebuggee() || 390 options.hasFlag(BaselineOption::ForceDebugInstrumentation); 391 bool forceMainThread = 392 compileDebugInstrumentation || 393 options.hasFlag(BaselineOption::ForceMainThreadCompilation); 394 395 JitContext jctx(cx); 396 397 // Suppress GC during compilation. 398 gc::AutoSuppressGC suppressGC(cx); 399 400 Rooted<JSScript*> rooted(cx, script); 401 if (!BaselineCompiler::PrepareToCompile(cx, rooted, 402 compileDebugInstrumentation)) { 403 return Method_Error; 404 } 405 406 GlobalLexicalEnvironmentObject* globalLexical = 407 &cx->global()->lexicalEnvironment(); 408 JSObject* globalThis = globalLexical->thisObject(); 409 uint32_t baseWarmUpThreshold = 410 OptimizationInfo::baseWarmUpThresholdForScript(cx, script); 411 bool isIonCompileable = IsIonEnabled(cx) && CanIonCompileScript(cx, script); 412 413 BaselineSnapshot snapshot(script, globalLexical, globalThis, 414 baseWarmUpThreshold, isIonCompileable, 415 compileDebugInstrumentation); 416 417 if (BaselineCompileTask::OffThreadBaselineCompilationAvailable(cx, script) && 418 !forceMainThread) { 419 if (!DispatchOffThreadBaselineCompile(cx, snapshot)) { 420 ReportOutOfMemory(cx); 421 return Method_Error; 422 } 423 return Method_Skipped; 424 } 425 426 TempAllocator temp(&cx->tempLifoAlloc()); 427 428 mozilla::Maybe<JSAutoNullableRealm> ar; 429 if (JS::Prefs::experimental_self_hosted_cache() && script->selfHosted()) { 430 // realm-independent scripts should not have a realm set 431 ar.emplace(cx, nullptr); 432 } 433 StackMacroAssembler masm(cx, temp); 434 435 BaselineCompiler compiler(temp, CompileRuntime::get(cx->runtime()), masm, 436 &snapshot); 437 if (!compiler.init()) { 438 ReportOutOfMemory(cx); 439 return Method_Error; 440 } 441 442 MethodStatus status = compiler.compile(cx); 443 444 MOZ_ASSERT_IF(status == Method_Compiled, script->hasBaselineScript()); 445 MOZ_ASSERT_IF(status != Method_Compiled, !script->hasBaselineScript()); 446 447 if (status == Method_CantCompile) { 448 script->disableBaselineCompile(); 449 } 450 451 return status; 452 } 453 454 static MethodStatus CanEnterBaselineJIT(JSContext* cx, HandleScript script, 455 AbstractFramePtr osrSourceFrame) { 456 // Skip if the script has been disabled. 457 if (!script->canBaselineCompile()) { 458 return Method_Skipped; 459 } 460 461 if (!IsBaselineJitEnabled(cx)) { 462 script->disableBaselineCompile(); 463 return Method_CantCompile; 464 } 465 466 // This check is needed in the following corner case. Consider a function h, 467 // 468 // function h(x) { 469 // if (!x) 470 // return; 471 // h(false); 472 // for (var i = 0; i < N; i++) 473 // /* do stuff */ 474 // } 475 // 476 // Suppose h is not yet compiled in baseline and is executing in the 477 // interpreter. Let this interpreter frame be f_older. The debugger marks 478 // f_older as isDebuggee. At the point of the recursive call h(false), h is 479 // compiled in baseline without debug instrumentation, pushing a baseline 480 // frame f_newer. The debugger never flags f_newer as isDebuggee, and never 481 // recompiles h. When the recursive call returns and execution proceeds to 482 // the loop, the interpreter attempts to OSR into baseline. Since h is 483 // already compiled in baseline, execution jumps directly into baseline 484 // code. This is incorrect as h's baseline script does not have debug 485 // instrumentation. 486 if (osrSourceFrame && osrSourceFrame.isDebuggee() && 487 !DebugAPI::ensureExecutionObservabilityOfOsrFrame(cx, osrSourceFrame)) { 488 return Method_Error; 489 } 490 491 if (script->length() > BaselineMaxScriptLength) { 492 script->disableBaselineCompile(); 493 return Method_CantCompile; 494 } 495 496 if (script->nslots() > BaselineMaxScriptSlots) { 497 script->disableBaselineCompile(); 498 return Method_CantCompile; 499 } 500 501 if (script->hasBaselineScript()) { 502 return Method_Compiled; 503 } 504 505 if (script->isBaselineCompilingOffThread()) { 506 return Method_Skipped; 507 } 508 509 // If a hint is available, skip the warmup count threshold. 510 bool mightHaveEagerBaselineHint = false; 511 if (!JitOptions.disableJitHints && !script->noEagerBaselineHint() && 512 cx->runtime()->jitRuntime()->hasJitHintsMap()) { 513 JitHintsMap* jitHints = cx->runtime()->jitRuntime()->getJitHintsMap(); 514 // If this lookup fails, the NoEagerBaselineHint script flag is set 515 // to true to prevent any further lookups for this script. 516 if (jitHints->mightHaveEagerBaselineHint(script)) { 517 mightHaveEagerBaselineHint = true; 518 } 519 } 520 // Check script warm-up counter if no hint. 521 if (!mightHaveEagerBaselineHint) { 522 if (script->getWarmUpCount() <= JitOptions.baselineJitWarmUpThreshold) { 523 return Method_Skipped; 524 } 525 } 526 527 // Check this before calling ensureJitZoneExists, so we're less 528 // likely to report OOM in JSRuntime::createJitRuntime. 529 if (!CanLikelyAllocateMoreExecutableMemory()) { 530 return Method_Skipped; 531 } 532 533 if (!cx->zone()->ensureJitZoneExists(cx)) { 534 return Method_Error; 535 } 536 537 if (script->hasForceInterpreterOp()) { 538 script->disableBaselineCompile(); 539 return Method_CantCompile; 540 } 541 542 // Frames can be marked as debuggee frames independently of its underlying 543 // script being a debuggee script, e.g., when performing 544 // Debugger.Frame.prototype.eval. 545 BaselineOptions options; 546 if (osrSourceFrame && osrSourceFrame.isDebuggee()) { 547 options.setFlag(BaselineOption::ForceDebugInstrumentation); 548 } 549 return BaselineCompile(cx, script, options); 550 } 551 552 bool jit::CanBaselineInterpretScript(JSScript* script) { 553 MOZ_ASSERT(IsBaselineInterpreterEnabled()); 554 555 if (script->hasForceInterpreterOp()) { 556 return false; 557 } 558 559 if (script->nslots() > BaselineMaxScriptSlots) { 560 // Avoid overrecursion exceptions when the script has a ton of stack slots 561 // by forcing such scripts to run in the C++ interpreter with heap-allocated 562 // stack frames. 563 return false; 564 } 565 566 return true; 567 } 568 569 static bool MaybeCreateBaselineInterpreterEntryScript(JSContext* cx, 570 JSScript* script) { 571 MOZ_ASSERT(script->hasJitScript()); 572 573 JitRuntime* jitRuntime = cx->runtime()->jitRuntime(); 574 if (script->jitCodeRaw() != jitRuntime->baselineInterpreter().codeRaw()) { 575 // script already has an updated interpreter trampoline. 576 #ifdef DEBUG 577 auto p = jitRuntime->getInterpreterEntryMap()->lookup(script); 578 MOZ_ASSERT(p); 579 MOZ_ASSERT(p->value().raw() == script->jitCodeRaw()); 580 #endif 581 return true; 582 } 583 584 auto p = jitRuntime->getInterpreterEntryMap()->lookupForAdd(script); 585 if (!p) { 586 Rooted<JitCode*> code( 587 cx, jitRuntime->generateEntryTrampolineForScript(cx, script)); 588 if (!code) { 589 return false; 590 } 591 592 EntryTrampoline entry(cx, code); 593 if (!jitRuntime->getInterpreterEntryMap()->add(p, script, entry)) { 594 return false; 595 } 596 } 597 598 script->updateJitCodeRaw(cx->runtime()); 599 return true; 600 } 601 602 static MethodStatus CanEnterBaselineInterpreter(JSContext* cx, 603 JSScript* script) { 604 MOZ_ASSERT(IsBaselineInterpreterEnabled()); 605 606 if (script->hasJitScript()) { 607 return Method_Compiled; 608 } 609 610 if (!CanBaselineInterpretScript(script)) { 611 return Method_CantCompile; 612 } 613 614 // Check script warm-up counter. 615 if (script->getWarmUpCount() <= 616 JitOptions.baselineInterpreterWarmUpThreshold) { 617 return Method_Skipped; 618 } 619 620 if (!cx->zone()->ensureJitZoneExists(cx)) { 621 return Method_Error; 622 } 623 624 AutoKeepJitScripts keepJitScript(cx); 625 if (!script->ensureHasJitScript(cx, keepJitScript)) { 626 return Method_Error; 627 } 628 629 if (JitOptions.emitInterpreterEntryTrampoline) { 630 if (!MaybeCreateBaselineInterpreterEntryScript(cx, script)) { 631 return Method_Error; 632 } 633 } 634 return Method_Compiled; 635 } 636 637 MethodStatus jit::CanEnterBaselineInterpreterAtBranch(JSContext* cx, 638 InterpreterFrame* fp) { 639 if (!CheckFrame(fp)) { 640 return Method_CantCompile; 641 } 642 643 // JITs do not respect the debugger's OnNativeCall hook, so JIT execution is 644 // disabled if this hook might need to be called. 645 if (cx->realm()->debuggerObservesNativeCall()) { 646 return Method_CantCompile; 647 } 648 649 return CanEnterBaselineInterpreter(cx, fp->script()); 650 } 651 652 template <BaselineTier Tier> 653 MethodStatus jit::CanEnterBaselineMethod(JSContext* cx, RunState& state) { 654 if (state.isInvoke()) { 655 InvokeState& invoke = *state.asInvoke(); 656 if (TooManyActualArguments(invoke.args().length())) { 657 JitSpew(JitSpew_BaselineAbort, "Too many arguments (%u)", 658 invoke.args().length()); 659 return Method_CantCompile; 660 } 661 } else { 662 if (state.asExecute()->isDebuggerEval()) { 663 JitSpew(JitSpew_BaselineAbort, "debugger frame"); 664 return Method_CantCompile; 665 } 666 } 667 668 RootedScript script(cx, state.script()); 669 switch (Tier) { 670 case BaselineTier::Interpreter: 671 return CanEnterBaselineInterpreter(cx, script); 672 673 case BaselineTier::Compiler: 674 return CanEnterBaselineJIT(cx, script, 675 /* osrSourceFrame = */ NullFramePtr()); 676 } 677 678 MOZ_CRASH("Unexpected tier"); 679 } 680 681 template MethodStatus jit::CanEnterBaselineMethod<BaselineTier::Interpreter>( 682 JSContext* cx, RunState& state); 683 template MethodStatus jit::CanEnterBaselineMethod<BaselineTier::Compiler>( 684 JSContext* cx, RunState& state); 685 686 bool jit::BaselineCompileFromBaselineInterpreter(JSContext* cx, 687 BaselineFrame* frame, 688 uint8_t** res) { 689 MOZ_ASSERT(frame->runningInInterpreter()); 690 691 RootedScript script(cx, frame->script()); 692 jsbytecode* pc = frame->interpreterPC(); 693 MOZ_ASSERT(pc == script->code() || JSOp(*pc) == JSOp::LoopHead); 694 695 MethodStatus status = CanEnterBaselineJIT(cx, script, 696 /* osrSourceFrame = */ frame); 697 switch (status) { 698 case Method_Error: 699 return false; 700 701 case Method_CantCompile: 702 case Method_Skipped: 703 *res = nullptr; 704 return true; 705 706 case Method_Compiled: { 707 if (JSOp(*pc) == JSOp::LoopHead) { 708 MOZ_ASSERT(pc > script->code(), 709 "Prologue vs OSR cases must not be ambiguous"); 710 BaselineScript* baselineScript = script->baselineScript(); 711 uint32_t pcOffset = script->pcToOffset(pc); 712 *res = baselineScript->nativeCodeForOSREntry(pcOffset); 713 } else { 714 *res = script->baselineScript()->warmUpCheckPrologueAddr(); 715 } 716 frame->prepareForBaselineInterpreterToJitOSR(); 717 return true; 718 } 719 } 720 721 MOZ_CRASH("Unexpected status"); 722 } 723 724 void BaselineCompileQueue::trace(JSTracer* trc) { 725 assertInvariants(); 726 for (uint32_t i = 0; i < numQueued_; i++) { 727 TraceEdge(trc, &queue_[i], "baseline_compile_queue"); 728 } 729 } 730 731 void BaselineCompileQueue::remove(JSScript* script) { 732 assertInvariants(); 733 for (uint32_t i = 0; i < numQueued_; i++) { 734 if (queue_[i] == script) { 735 std::swap(queue_[i], queue_[numQueued_ - 1]); 736 pop(); 737 break; 738 } 739 } 740 assertInvariants(); 741 } 742 743 BaselineScript* BaselineScript::New(JSContext* cx, 744 uint32_t warmUpCheckPrologueOffset, 745 uint32_t profilerEnterToggleOffset, 746 uint32_t profilerExitToggleOffset, 747 size_t retAddrEntries, size_t osrEntries, 748 size_t debugTrapEntries, 749 size_t resumeEntries) { 750 // Compute size including trailing arrays. 751 CheckedInt<Offset> size = sizeof(BaselineScript); 752 size += CheckedInt<Offset>(resumeEntries) * sizeof(uintptr_t); 753 size += CheckedInt<Offset>(retAddrEntries) * sizeof(RetAddrEntry); 754 size += CheckedInt<Offset>(osrEntries) * sizeof(OSREntry); 755 size += CheckedInt<Offset>(debugTrapEntries) * sizeof(DebugTrapEntry); 756 757 if (!size.isValid()) { 758 ReportAllocationOverflow(cx); 759 return nullptr; 760 } 761 762 // Allocate contiguous raw buffer. 763 void* raw = cx->pod_malloc<uint8_t>(size.value()); 764 MOZ_ASSERT(uintptr_t(raw) % alignof(BaselineScript) == 0); 765 if (!raw) { 766 return nullptr; 767 } 768 BaselineScript* script = new (raw) 769 BaselineScript(warmUpCheckPrologueOffset, profilerEnterToggleOffset, 770 profilerExitToggleOffset); 771 772 Offset cursor = sizeof(BaselineScript); 773 774 MOZ_ASSERT(isAlignedOffset<uintptr_t>(cursor)); 775 script->resumeEntriesOffset_ = cursor; 776 cursor += resumeEntries * sizeof(uintptr_t); 777 778 MOZ_ASSERT(isAlignedOffset<RetAddrEntry>(cursor)); 779 script->retAddrEntriesOffset_ = cursor; 780 cursor += retAddrEntries * sizeof(RetAddrEntry); 781 782 MOZ_ASSERT(isAlignedOffset<OSREntry>(cursor)); 783 script->osrEntriesOffset_ = cursor; 784 cursor += osrEntries * sizeof(OSREntry); 785 786 MOZ_ASSERT(isAlignedOffset<DebugTrapEntry>(cursor)); 787 script->debugTrapEntriesOffset_ = cursor; 788 cursor += debugTrapEntries * sizeof(DebugTrapEntry); 789 790 MOZ_ASSERT(isAlignedOffset<uint32_t>(cursor)); 791 792 script->allocBytes_ = cursor; 793 794 MOZ_ASSERT(script->endOffset() == size.value()); 795 796 return script; 797 } 798 799 BaselineScript* BaselineScript::Copy(JSContext* cx, BaselineScript* bs) { 800 BaselineScript* script = jit::BaselineScript::New( 801 cx, bs->warmUpCheckPrologueOffset_, bs->profilerEnterToggleOffset_, 802 bs->profilerExitToggleOffset_, bs->retAddrEntries().size(), 803 bs->osrEntries().size(), bs->debugTrapEntries().size(), 804 bs->resumeEntryList().size()); 805 if (!script) { 806 return nullptr; 807 } 808 809 script->setMethod(bs->method()); 810 script->copyRetAddrEntries(bs->retAddrEntries().data()); 811 script->copyOSREntries(bs->osrEntries().data()); 812 script->copyDebugTrapEntries(bs->debugTrapEntries().data()); 813 814 script->flags_ = bs->flags_; 815 816 // copyResumeNativeOffsets() 817 std::copy_n(bs->resumeEntryList().begin(), script->resumeEntryList().size(), 818 script->resumeEntryList().data()); 819 820 if (bs->hasDebugInstrumentation()) { 821 script->setHasDebugInstrumentation(); 822 } 823 MOZ_ASSERT(script->method_ == bs->method_); 824 MOZ_ASSERT(script->pendingIonCompileTask_ == bs->pendingIonCompileTask_); 825 MOZ_ASSERT(script->warmUpCheckPrologueOffset_ == 826 bs->warmUpCheckPrologueOffset_); 827 MOZ_ASSERT(script->profilerEnterToggleOffset_ == 828 bs->profilerEnterToggleOffset_); 829 MOZ_ASSERT(script->profilerExitToggleOffset_ == 830 bs->profilerExitToggleOffset_); 831 MOZ_ASSERT(script->resumeEntriesOffset_ == bs->resumeEntriesOffset_); 832 MOZ_ASSERT(script->retAddrEntriesOffset_ == bs->retAddrEntriesOffset_); 833 MOZ_ASSERT(script->osrEntriesOffset_ == bs->osrEntriesOffset_); 834 MOZ_ASSERT(script->debugTrapEntriesOffset_ == bs->debugTrapEntriesOffset_); 835 MOZ_ASSERT(script->allocBytes_ == bs->allocBytes_); 836 MOZ_ASSERT(script->flags_ == bs->flags_); 837 return script; 838 } 839 840 void BaselineScript::trace(JSTracer* trc) { 841 TraceEdge(trc, &method_, "baseline-method"); 842 } 843 844 void BaselineScript::Destroy(JS::GCContext* gcx, BaselineScript* script) { 845 MOZ_ASSERT(!script->hasPendingIonCompileTask()); 846 847 // This allocation is tracked by JSScript::setBaselineScriptImpl. 848 gcx->deleteUntracked(script); 849 } 850 851 void JS::DeletePolicy<js::jit::BaselineScript>::operator()( 852 const js::jit::BaselineScript* script) { 853 BaselineScript::Destroy(rt_->gcContext(), 854 const_cast<BaselineScript*>(script)); 855 } 856 857 const RetAddrEntry& BaselineScript::retAddrEntryFromReturnOffset( 858 CodeOffset returnOffset) { 859 mozilla::Span<RetAddrEntry> entries = retAddrEntries(); 860 size_t loc; 861 #ifdef DEBUG 862 bool found = 863 #endif 864 BinarySearchIf( 865 entries.data(), 0, entries.size(), 866 [&returnOffset](const RetAddrEntry& entry) { 867 size_t roffset = returnOffset.offset(); 868 size_t entryRoffset = entry.returnOffset().offset(); 869 if (roffset < entryRoffset) { 870 return -1; 871 } 872 if (entryRoffset < roffset) { 873 return 1; 874 } 875 return 0; 876 }, 877 &loc); 878 879 MOZ_ASSERT(found); 880 MOZ_ASSERT(entries[loc].returnOffset().offset() == returnOffset.offset()); 881 return entries[loc]; 882 } 883 884 template <typename Entry> 885 static bool ComputeBinarySearchMid(mozilla::Span<Entry> entries, 886 uint32_t pcOffset, size_t* loc) { 887 return BinarySearchIf( 888 entries.data(), 0, entries.size(), 889 [pcOffset](const Entry& entry) { 890 uint32_t entryOffset = entry.pcOffset(); 891 if (pcOffset < entryOffset) { 892 return -1; 893 } 894 if (entryOffset < pcOffset) { 895 return 1; 896 } 897 return 0; 898 }, 899 loc); 900 } 901 902 uint8_t* BaselineScript::returnAddressForEntry(const RetAddrEntry& ent) { 903 return method()->raw() + ent.returnOffset().offset(); 904 } 905 906 const RetAddrEntry& BaselineScript::retAddrEntryFromPCOffset( 907 uint32_t pcOffset, RetAddrEntry::Kind kind) { 908 mozilla::Span<RetAddrEntry> entries = retAddrEntries(); 909 size_t mid; 910 MOZ_ALWAYS_TRUE(ComputeBinarySearchMid(entries, pcOffset, &mid)); 911 MOZ_ASSERT(mid < entries.size()); 912 913 // Search for the first entry for this pc. 914 size_t first = mid; 915 while (first > 0 && entries[first - 1].pcOffset() == pcOffset) { 916 first--; 917 } 918 919 // Search for the last entry for this pc. 920 size_t last = mid; 921 while (last + 1 < entries.size() && 922 entries[last + 1].pcOffset() == pcOffset) { 923 last++; 924 } 925 926 MOZ_ASSERT(first <= last); 927 MOZ_ASSERT(entries[first].pcOffset() == pcOffset); 928 MOZ_ASSERT(entries[last].pcOffset() == pcOffset); 929 930 for (size_t i = first; i <= last; i++) { 931 const RetAddrEntry& entry = entries[i]; 932 if (entry.kind() != kind) { 933 continue; 934 } 935 936 #ifdef DEBUG 937 // There must be a unique entry for this pcOffset and Kind to ensure our 938 // return value is well-defined. 939 for (size_t j = i + 1; j <= last; j++) { 940 MOZ_ASSERT(entries[j].kind() != kind); 941 } 942 #endif 943 944 return entry; 945 } 946 947 MOZ_CRASH("Didn't find RetAddrEntry."); 948 } 949 950 const RetAddrEntry& BaselineScript::prologueRetAddrEntry( 951 RetAddrEntry::Kind kind) { 952 MOZ_ASSERT(kind == RetAddrEntry::Kind::StackCheck); 953 954 // The prologue entries will always be at a very low offset, so just do a 955 // linear search from the beginning. 956 for (const RetAddrEntry& entry : retAddrEntries()) { 957 if (entry.pcOffset() != 0) { 958 break; 959 } 960 if (entry.kind() == kind) { 961 return entry; 962 } 963 } 964 MOZ_CRASH("Didn't find prologue RetAddrEntry."); 965 } 966 967 const RetAddrEntry& BaselineScript::retAddrEntryFromReturnAddress( 968 const uint8_t* returnAddr) { 969 MOZ_ASSERT(returnAddr > method_->raw()); 970 MOZ_ASSERT(returnAddr < method_->raw() + method_->instructionsSize()); 971 CodeOffset offset(returnAddr - method_->raw()); 972 return retAddrEntryFromReturnOffset(offset); 973 } 974 975 uint8_t* BaselineScript::nativeCodeForOSREntry(uint32_t pcOffset) { 976 mozilla::Span<OSREntry> entries = osrEntries(); 977 size_t mid; 978 if (!ComputeBinarySearchMid(entries, pcOffset, &mid)) { 979 return nullptr; 980 } 981 982 uint32_t nativeOffset = entries[mid].nativeOffset(); 983 return method_->raw() + nativeOffset; 984 } 985 986 void BaselineScript::computeResumeNativeOffsets( 987 JSScript* script, const ResumeOffsetEntryVector& entries) { 988 // Translate pcOffset to BaselineScript native address. This may return 989 // nullptr if compiler decided code was unreachable. 990 auto computeNative = [this, &entries](uint32_t pcOffset) -> uint8_t* { 991 mozilla::Span<const ResumeOffsetEntry> entriesSpan = 992 mozilla::Span(entries.begin(), entries.length()); 993 size_t mid; 994 if (!ComputeBinarySearchMid(entriesSpan, pcOffset, &mid)) { 995 return nullptr; 996 } 997 998 uint32_t nativeOffset = entries[mid].nativeOffset(); 999 return method_->raw() + nativeOffset; 1000 }; 1001 1002 mozilla::Span<const uint32_t> pcOffsets = script->resumeOffsets(); 1003 mozilla::Span<uint8_t*> nativeOffsets = resumeEntryList(); 1004 std::transform(pcOffsets.begin(), pcOffsets.end(), nativeOffsets.begin(), 1005 computeNative); 1006 } 1007 1008 bool BaselineScript::OSREntryForFrame(JSContext* cx, BaselineFrame* frame, 1009 uint8_t** entry) { 1010 MOZ_ASSERT(frame->runningInInterpreter()); 1011 1012 JSScript* script = frame->script(); 1013 BaselineScript* baselineScript = script->baselineScript(); 1014 jsbytecode* pc = frame->interpreterPC(); 1015 size_t pcOffset = script->pcToOffset(pc); 1016 1017 if (MOZ_UNLIKELY(frame->isDebuggee() && 1018 !baselineScript->hasDebugInstrumentation())) { 1019 // This check is needed in the following corner case. Consider a function h, 1020 // 1021 // function h(x) { 1022 // if (!x) 1023 // return; 1024 // h(false); 1025 // for (var i = 0; i < N; i++) 1026 // /* do stuff */ 1027 // } 1028 // 1029 // Suppose h is not yet compiled in baseline and is executing in the 1030 // baseline interpreter. Let this interpreter frame be f_older. The debugger 1031 // marks f_older as isDebuggee. At the point of the recursive call h(false), 1032 // h is compiled in baseline without debug instrumentation, pushing a 1033 // baseline frame f_newer. The debugger never flags f_newer as isDebuggee, 1034 // and never recompiles h. When the recursive call returns and execution 1035 // proceeds to the loop, the baseline interpreter attempts to OSR into 1036 // baseline. Since h is already compiled in baseline, execution jumps 1037 // directly into baseline code. This is incorrect as h's baseline script 1038 // does not have debug instrumentation. 1039 if (!DebugAPI::ensureExecutionObservabilityOfOsrFrame(cx, frame)) { 1040 return false; 1041 } 1042 baselineScript = script->baselineScript(); 1043 } 1044 1045 if (JSOp(*pc) == JSOp::LoopHead) { 1046 MOZ_ASSERT(pc > script->code(), 1047 "Prologue vs OSR cases must not be ambiguous"); 1048 *entry = baselineScript->nativeCodeForOSREntry(pcOffset); 1049 } else { 1050 *entry = baselineScript->warmUpCheckPrologueAddr(); 1051 } 1052 1053 frame->prepareForBaselineInterpreterToJitOSR(); 1054 return true; 1055 } 1056 1057 void BaselineScript::copyRetAddrEntries(const RetAddrEntry* entries) { 1058 std::copy_n(entries, retAddrEntries().size(), retAddrEntries().data()); 1059 } 1060 1061 void BaselineScript::copyOSREntries(const OSREntry* entries) { 1062 std::copy_n(entries, osrEntries().size(), osrEntries().data()); 1063 } 1064 1065 void BaselineScript::copyDebugTrapEntries(const DebugTrapEntry* entries) { 1066 std::copy_n(entries, debugTrapEntries().size(), debugTrapEntries().data()); 1067 } 1068 1069 jsbytecode* BaselineScript::approximatePcForNativeAddress( 1070 JSScript* script, uint8_t* nativeAddress) { 1071 MOZ_ASSERT(script->baselineScript() == this); 1072 MOZ_ASSERT(containsCodeAddress(nativeAddress)); 1073 1074 uint32_t nativeOffset = nativeAddress - method_->raw(); 1075 1076 // Use the RetAddrEntry list (sorted on pc and return address) to look for the 1077 // first pc that has a return address >= nativeOffset. This isn't perfect but 1078 // it's a reasonable approximation for the profiler because most non-trivial 1079 // bytecode ops have a RetAddrEntry. 1080 1081 for (const RetAddrEntry& entry : retAddrEntries()) { 1082 uint32_t retOffset = entry.returnOffset().offset(); 1083 if (retOffset >= nativeOffset) { 1084 return script->offsetToPC(entry.pcOffset()); 1085 } 1086 } 1087 1088 // Return the last entry's pc. Every BaselineScript has at least one 1089 // RetAddrEntry for the prologue stack overflow check. 1090 MOZ_ASSERT(!retAddrEntries().empty()); 1091 return script->offsetToPC(retAddrEntries().crbegin()->pcOffset()); 1092 } 1093 1094 void BaselineScript::toggleDebugTraps(JSScript* script, jsbytecode* pc) { 1095 MOZ_ASSERT(script->baselineScript() == this); 1096 1097 // Only scripts compiled for debug mode have toggled calls. 1098 if (!hasDebugInstrumentation()) { 1099 return; 1100 } 1101 1102 AutoWritableJitCode awjc(method()); 1103 1104 for (const DebugTrapEntry& entry : debugTrapEntries()) { 1105 jsbytecode* entryPC = script->offsetToPC(entry.pcOffset()); 1106 1107 // If the |pc| argument is non-null we can skip all other bytecode ops. 1108 if (pc && pc != entryPC) { 1109 continue; 1110 } 1111 1112 bool enabled = DebugAPI::stepModeEnabled(script) || 1113 DebugAPI::hasBreakpointsAt(script, entryPC); 1114 1115 // Patch the trap. 1116 CodeLocationLabel label(method(), CodeOffset(entry.nativeOffset())); 1117 Assembler::ToggleCall(label, enabled); 1118 } 1119 } 1120 1121 void BaselineScript::setPendingIonCompileTask(JSRuntime* rt, JSScript* script, 1122 IonCompileTask* task) { 1123 MOZ_ASSERT(script->baselineScript() == this); 1124 MOZ_ASSERT(task); 1125 MOZ_ASSERT(!hasPendingIonCompileTask()); 1126 1127 if (script->isIonCompilingOffThread()) { 1128 script->jitScript()->clearIsIonCompilingOffThread(script); 1129 } 1130 1131 pendingIonCompileTask_ = task; 1132 script->updateJitCodeRaw(rt); 1133 } 1134 1135 void BaselineScript::removePendingIonCompileTask(JSRuntime* rt, 1136 JSScript* script) { 1137 MOZ_ASSERT(script->baselineScript() == this); 1138 MOZ_ASSERT(hasPendingIonCompileTask()); 1139 1140 pendingIonCompileTask_ = nullptr; 1141 script->updateJitCodeRaw(rt); 1142 } 1143 1144 static void ToggleProfilerInstrumentation(JitCode* code, 1145 uint32_t profilerEnterToggleOffset, 1146 uint32_t profilerExitToggleOffset, 1147 bool enable) { 1148 CodeLocationLabel enterToggleLocation(code, 1149 CodeOffset(profilerEnterToggleOffset)); 1150 CodeLocationLabel exitToggleLocation(code, 1151 CodeOffset(profilerExitToggleOffset)); 1152 if (enable) { 1153 Assembler::ToggleToCmp(enterToggleLocation); 1154 Assembler::ToggleToCmp(exitToggleLocation); 1155 } else { 1156 Assembler::ToggleToJmp(enterToggleLocation); 1157 Assembler::ToggleToJmp(exitToggleLocation); 1158 } 1159 } 1160 1161 void BaselineScript::toggleProfilerInstrumentation(bool enable) { 1162 if (enable == isProfilerInstrumentationOn()) { 1163 return; 1164 } 1165 1166 JitSpew(JitSpew_BaselineIC, " toggling profiling %s for BaselineScript %p", 1167 enable ? "on" : "off", this); 1168 1169 ToggleProfilerInstrumentation(method_, profilerEnterToggleOffset_, 1170 profilerExitToggleOffset_, enable); 1171 1172 if (enable) { 1173 flags_ |= uint32_t(PROFILER_INSTRUMENTATION_ON); 1174 } else { 1175 flags_ &= ~uint32_t(PROFILER_INSTRUMENTATION_ON); 1176 } 1177 } 1178 1179 void BaselineInterpreter::toggleProfilerInstrumentation(bool enable) { 1180 if (!IsBaselineInterpreterEnabled()) { 1181 return; 1182 } 1183 1184 AutoWritableJitCode awjc(code_); 1185 ToggleProfilerInstrumentation(code_, profilerEnterToggleOffset_, 1186 profilerExitToggleOffset_, enable); 1187 } 1188 1189 void BaselineInterpreter::toggleDebuggerInstrumentation(bool enable) { 1190 if (!IsBaselineInterpreterEnabled()) { 1191 return; 1192 } 1193 1194 AutoWritableJitCode awjc(code_); 1195 1196 // Toggle jumps for debugger instrumentation. 1197 for (uint32_t offset : debugInstrumentationOffsets_) { 1198 CodeLocationLabel label(code_, CodeOffset(offset)); 1199 if (enable) { 1200 Assembler::ToggleToCmp(label); 1201 } else { 1202 Assembler::ToggleToJmp(label); 1203 } 1204 } 1205 1206 // Toggle DebugTrapHandler calls. 1207 1208 uint8_t* debugTrapHandler = codeAtOffset(debugTrapHandlerOffset_); 1209 1210 for (uint32_t offset : debugTrapOffsets_) { 1211 uint8_t* trap = codeAtOffset(offset); 1212 if (enable) { 1213 MacroAssembler::patchNopToCall(trap, debugTrapHandler); 1214 } else { 1215 MacroAssembler::patchCallToNop(trap); 1216 } 1217 } 1218 } 1219 1220 void BaselineInterpreter::toggleCodeCoverageInstrumentationUnchecked( 1221 bool enable) { 1222 if (!IsBaselineInterpreterEnabled()) { 1223 return; 1224 } 1225 1226 AutoWritableJitCode awjc(code_); 1227 1228 for (uint32_t offset : codeCoverageOffsets_) { 1229 CodeLocationLabel label(code_, CodeOffset(offset)); 1230 if (enable) { 1231 Assembler::ToggleToCmp(label); 1232 } else { 1233 Assembler::ToggleToJmp(label); 1234 } 1235 } 1236 } 1237 1238 void BaselineInterpreter::toggleCodeCoverageInstrumentation(bool enable) { 1239 if (coverage::IsLCovEnabled()) { 1240 // Instrumentation is enabled no matter what. 1241 return; 1242 } 1243 1244 toggleCodeCoverageInstrumentationUnchecked(enable); 1245 } 1246 1247 void jit::FinishDiscardBaselineScript(JS::GCContext* gcx, JSScript* script) { 1248 MOZ_ASSERT(script->hasBaselineScript()); 1249 MOZ_ASSERT(!script->jitScript()->icScript()->active()); 1250 1251 BaselineScript* baseline = 1252 script->jitScript()->clearBaselineScript(gcx, script); 1253 BaselineScript::Destroy(gcx, baseline); 1254 } 1255 1256 void jit::AddSizeOfBaselineData(JSScript* script, 1257 mozilla::MallocSizeOf mallocSizeOf, 1258 size_t* data) { 1259 if (script->hasBaselineScript()) { 1260 script->baselineScript()->addSizeOfIncludingThis(mallocSizeOf, data); 1261 } 1262 } 1263 1264 void jit::ToggleBaselineProfiling(JSContext* cx, bool enable) { 1265 JitRuntime* jrt = cx->runtime()->jitRuntime(); 1266 if (!jrt) { 1267 return; 1268 } 1269 1270 jrt->baselineInterpreter().toggleProfilerInstrumentation(enable); 1271 1272 for (ZonesIter zone(cx->runtime(), SkipAtoms); !zone.done(); zone.next()) { 1273 if (!zone->jitZone()) { 1274 continue; 1275 } 1276 zone->jitZone()->forEachJitScript([&](jit::JitScript* jitScript) { 1277 JSScript* script = jitScript->owningScript(); 1278 if (enable) { 1279 jitScript->ensureProfileString(cx, script); 1280 jitScript->ensureProfilerScriptSource(cx, script); 1281 } 1282 if (script->hasBaselineScript()) { 1283 AutoWritableJitCode awjc(script->baselineScript()->method()); 1284 script->baselineScript()->toggleProfilerInstrumentation(enable); 1285 } 1286 }); 1287 } 1288 } 1289 1290 void BaselineInterpreter::init(JitCode* code, uint32_t interpretOpOffset, 1291 uint32_t interpretOpNoDebugTrapOffset, 1292 uint32_t bailoutPrologueOffset, 1293 uint32_t profilerEnterToggleOffset, 1294 uint32_t profilerExitToggleOffset, 1295 uint32_t debugTrapHandlerOffset, 1296 CodeOffsetVector&& debugInstrumentationOffsets, 1297 CodeOffsetVector&& debugTrapOffsets, 1298 CodeOffsetVector&& codeCoverageOffsets, 1299 ICReturnOffsetVector&& icReturnOffsets, 1300 const CallVMOffsets& callVMOffsets) { 1301 code_ = code; 1302 interpretOpOffset_ = interpretOpOffset; 1303 interpretOpNoDebugTrapOffset_ = interpretOpNoDebugTrapOffset; 1304 bailoutPrologueOffset_ = bailoutPrologueOffset; 1305 profilerEnterToggleOffset_ = profilerEnterToggleOffset; 1306 profilerExitToggleOffset_ = profilerExitToggleOffset; 1307 debugTrapHandlerOffset_ = debugTrapHandlerOffset; 1308 debugInstrumentationOffsets_ = std::move(debugInstrumentationOffsets); 1309 debugTrapOffsets_ = std::move(debugTrapOffsets); 1310 codeCoverageOffsets_ = std::move(codeCoverageOffsets); 1311 icReturnOffsets_ = std::move(icReturnOffsets); 1312 callVMOffsets_ = callVMOffsets; 1313 } 1314 1315 uint8_t* BaselineInterpreter::retAddrForIC(JSOp op) const { 1316 for (const ICReturnOffset& entry : icReturnOffsets_) { 1317 if (entry.op == op) { 1318 return codeAtOffset(entry.offset); 1319 } 1320 } 1321 MOZ_CRASH("Unexpected op"); 1322 } 1323 1324 bool jit::GenerateBaselineInterpreter(JSContext* cx, 1325 BaselineInterpreter& interpreter) { 1326 if (IsBaselineInterpreterEnabled()) { 1327 TempAllocator temp(&cx->tempLifoAlloc()); 1328 StackMacroAssembler masm(cx, temp); 1329 BaselineInterpreterGenerator generator(cx, temp, masm); 1330 return generator.generate(cx, interpreter); 1331 } 1332 1333 return true; 1334 }