BaselineCodeGen.cpp (214433B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #include "jit/BaselineCodeGen.h" 8 9 #include "mozilla/Casting.h" 10 11 #include "gc/GC.h" 12 #include "jit/BaselineCompileQueue.h" 13 #include "jit/BaselineCompileTask.h" 14 #include "jit/BaselineIC.h" 15 #include "jit/BaselineJIT.h" 16 #include "jit/CacheIRCompiler.h" 17 #include "jit/CacheIRGenerator.h" 18 #include "jit/CalleeToken.h" 19 #include "jit/FixedList.h" 20 #include "jit/IonOptimizationLevels.h" 21 #include "jit/JitcodeMap.h" 22 #include "jit/JitFrames.h" 23 #include "jit/JitRuntime.h" 24 #include "jit/JitSpewer.h" 25 #include "jit/Linker.h" 26 #include "jit/PerfSpewer.h" 27 #include "jit/SharedICHelpers.h" 28 #include "jit/TemplateObject.h" 29 #include "jit/TrialInlining.h" 30 #include "jit/VMFunctions.h" 31 #include "js/friend/ErrorMessages.h" // JSMSG_* 32 #include "js/UniquePtr.h" 33 #include "vm/AsyncFunction.h" 34 #include "vm/AsyncIteration.h" 35 #include "vm/BuiltinObjectKind.h" 36 #include "vm/ConstantCompareOperand.h" 37 #include "vm/EnvironmentObject.h" 38 #include "vm/FunctionFlags.h" // js::FunctionFlags 39 #include "vm/Interpreter.h" 40 #include "vm/JSFunction.h" 41 #include "vm/Logging.h" 42 #include "vm/Time.h" 43 #ifdef MOZ_VTUNE 44 # include "vtune/VTuneWrapper.h" 45 #endif 46 47 #include "debugger/DebugAPI-inl.h" 48 #include "jit/BaselineFrameInfo-inl.h" 49 #include "jit/JitHints-inl.h" 50 #include "jit/JitScript-inl.h" 51 #include "jit/MacroAssembler-inl.h" 52 #include "jit/SharedICHelpers-inl.h" 53 #include "jit/TemplateObject-inl.h" 54 #include "jit/VMFunctionList-inl.h" 55 #include "vm/Interpreter-inl.h" 56 #include "vm/JSScript-inl.h" 57 58 using namespace js; 59 using namespace js::jit; 60 61 using JS::TraceKind; 62 63 using mozilla::AssertedCast; 64 using mozilla::Maybe; 65 66 namespace js { 67 68 class PlainObject; 69 70 namespace jit { 71 72 BaselineCompilerHandler::BaselineCompilerHandler(MacroAssembler& masm, 73 TempAllocator& alloc, 74 BaselineSnapshot* snapshot) 75 : frame_(snapshot->script(), masm), 76 alloc_(alloc), 77 analysis_(alloc, snapshot->script()), 78 #ifdef DEBUG 79 masm_(masm), 80 #endif 81 script_(snapshot->script()), 82 pc_(snapshot->script()->code()), 83 nargs_(snapshot->nargs()), 84 globalLexicalEnvironment_(snapshot->globalLexical()), 85 globalThis_(snapshot->globalThis()), 86 callObjectTemplate_(snapshot->callObjectTemplate()), 87 namedLambdaTemplate_(snapshot->namedLambdaTemplate()), 88 icEntryIndex_(0), 89 baseWarmUpThreshold_(snapshot->baseWarmUpThreshold()), 90 compileDebugInstrumentation_(snapshot->compileDebugInstrumentation()), 91 ionCompileable_(snapshot->isIonCompileable()) { 92 } 93 94 BaselineInterpreterHandler::BaselineInterpreterHandler(MacroAssembler& masm) 95 : frame_(masm) {} 96 97 template <typename Handler> 98 template <typename... HandlerArgs> 99 BaselineCodeGen<Handler>::BaselineCodeGen(TempAllocator& alloc, 100 MacroAssembler& masmArg, 101 CompileRuntime* runtimeArg, 102 HandlerArgs&&... args) 103 : handler(masmArg, std::forward<HandlerArgs>(args)...), 104 runtime(runtimeArg), 105 masm(masmArg), 106 frame(handler.frame()) {} 107 108 BaselineCompiler::BaselineCompiler(TempAllocator& alloc, 109 CompileRuntime* runtime, 110 MacroAssembler& masm, 111 BaselineSnapshot* snapshot) 112 : BaselineCodeGen(alloc, masm, runtime, 113 /* HandlerArgs = */ alloc, snapshot) { 114 #ifdef JS_CODEGEN_NONE 115 MOZ_CRASH(); 116 #endif 117 } 118 119 BaselineInterpreterGenerator::BaselineInterpreterGenerator(JSContext* cx, 120 TempAllocator& alloc, 121 MacroAssembler& masm) 122 : BaselineCodeGen(alloc, masm, CompileRuntime::get(cx->runtime()) 123 /* no handlerArgs */) {} 124 125 bool BaselineCompilerHandler::init() { 126 if (!analysis_.init(alloc_)) { 127 return false; 128 } 129 130 uint32_t len = script_->length(); 131 132 if (!labels_.init(alloc_, len)) { 133 return false; 134 } 135 136 for (size_t i = 0; i < len; i++) { 137 new (&labels_[i]) Label(); 138 } 139 140 if (!frame_.init(alloc_)) { 141 return false; 142 } 143 144 return true; 145 } 146 147 bool BaselineCompiler::init() { 148 if (!handler.init()) { 149 return false; 150 } 151 152 return true; 153 } 154 155 bool BaselineCompilerHandler::recordCallRetAddr(RetAddrEntry::Kind kind, 156 uint32_t retOffset) { 157 uint32_t pcOffset = script_->pcToOffset(pc_); 158 159 // Entries must be sorted by pcOffset for binary search to work. 160 // See BaselineScript::retAddrEntryFromPCOffset. 161 MOZ_ASSERT_IF(!retAddrEntries_.empty(), 162 retAddrEntries_.back().pcOffset() <= pcOffset); 163 164 // Similarly, entries must be sorted by return offset and this offset must be 165 // unique. See BaselineScript::retAddrEntryFromReturnOffset. 166 MOZ_ASSERT_IF(!retAddrEntries_.empty() && !masm_.oom(), 167 retAddrEntries_.back().returnOffset().offset() < retOffset); 168 169 if (!retAddrEntries_.emplaceBack(pcOffset, kind, CodeOffset(retOffset))) { 170 return false; 171 } 172 173 return true; 174 } 175 176 bool BaselineInterpreterHandler::recordCallRetAddr(RetAddrEntry::Kind kind, 177 uint32_t retOffset) { 178 switch (kind) { 179 case RetAddrEntry::Kind::DebugPrologue: 180 MOZ_ASSERT(callVMOffsets_.debugPrologueOffset == 0, 181 "expected single DebugPrologue call"); 182 callVMOffsets_.debugPrologueOffset = retOffset; 183 break; 184 case RetAddrEntry::Kind::DebugEpilogue: 185 MOZ_ASSERT(callVMOffsets_.debugEpilogueOffset == 0, 186 "expected single DebugEpilogue call"); 187 callVMOffsets_.debugEpilogueOffset = retOffset; 188 break; 189 case RetAddrEntry::Kind::DebugAfterYield: 190 MOZ_ASSERT(callVMOffsets_.debugAfterYieldOffset == 0, 191 "expected single DebugAfterYield call"); 192 callVMOffsets_.debugAfterYieldOffset = retOffset; 193 break; 194 default: 195 break; 196 } 197 198 return true; 199 } 200 201 bool BaselineInterpreterHandler::addDebugInstrumentationOffset( 202 CodeOffset offset) { 203 return debugInstrumentationOffsets_.append(offset.offset()); 204 } 205 206 /*static*/ 207 bool BaselineCompiler::PrepareToCompile(JSContext* cx, Handle<JSScript*> script, 208 bool compileDebugInstrumentation) { 209 JitSpew(JitSpew_BaselineScripts, "Baseline compiling script %s:%u:%u (%p)", 210 script->filename(), script->lineno(), 211 script->column().oneOriginValue(), script.get()); 212 213 AutoKeepJitScripts keepJitScript(cx); 214 if (!script->ensureHasJitScript(cx, keepJitScript)) { 215 return false; 216 } 217 218 // When code coverage is enabled, we have to create the ScriptCounts if they 219 // do not exist. 220 if (!script->hasScriptCounts() && cx->realm()->collectCoverageForDebug()) { 221 if (!script->initScriptCounts(cx)) { 222 return false; 223 } 224 } 225 226 if (!JitOptions.disableJitHints && 227 cx->runtime()->jitRuntime()->hasJitHintsMap()) { 228 JitHintsMap* jitHints = cx->runtime()->jitRuntime()->getJitHintsMap(); 229 jitHints->setEagerBaselineHint(script); 230 } 231 232 if (!script->jitScript()->ensureHasCachedBaselineJitData(cx, script)) { 233 return false; 234 } 235 236 if (MOZ_UNLIKELY(compileDebugInstrumentation) && 237 !cx->runtime()->jitRuntime()->ensureDebugTrapHandler( 238 cx, DebugTrapHandlerKind::Compiler)) { 239 return false; 240 } 241 242 return true; 243 } 244 245 MethodStatus BaselineCompiler::compile(JSContext* cx) { 246 Rooted<JSScript*> script(cx, handler.script()); 247 248 JitSpew(JitSpew_Codegen, "# Emitting baseline code for script %s:%u:%u", 249 script->filename(), script->lineno(), 250 script->column().oneOriginValue()); 251 252 MOZ_ASSERT(!script->hasBaselineScript()); 253 254 if (!compileImpl()) { 255 ReportOutOfMemory(cx); 256 return Method_Error; 257 } 258 259 if (!finishCompile(cx)) { 260 return Method_Error; 261 } 262 263 return Method_Compiled; 264 } 265 266 MethodStatus BaselineCompiler::compileOffThread() { 267 handler.setCompilingOffThread(); 268 if (!compileImpl()) { 269 return Method_Error; 270 } 271 return Method_Compiled; 272 } 273 274 bool BaselineCompiler::compileImpl() { 275 AutoCreatedBy acb(masm, "BaselineCompiler::compile"); 276 277 perfSpewer_.startRecording(); 278 perfSpewer_.recordOffset(masm, "Prologue"); 279 if (!emitPrologue()) { 280 return false; 281 } 282 283 if (!emitBody()) { 284 return false; 285 } 286 287 perfSpewer_.recordOffset(masm, "Epilogue"); 288 if (!emitEpilogue()) { 289 return false; 290 } 291 292 perfSpewer_.recordOffset(masm, "OOLPostBarrierSlot"); 293 emitOutOfLinePostBarrierSlot(); 294 295 perfSpewer_.endRecording(); 296 297 return true; 298 } 299 300 bool BaselineCompiler::finishCompile(JSContext* cx) { 301 Rooted<JSScript*> script(cx, handler.script()); 302 bool isRealmIndependentJitCodeShared = 303 JS::Prefs::experimental_self_hosted_cache() && script->selfHosted(); 304 305 UniquePtr<BaselineScript> baselineScript( 306 nullptr, JS::DeletePolicy<BaselineScript>(cx->runtime())); 307 JitCode* code = nullptr; 308 { 309 mozilla::Maybe<AutoAllocInAtomsZone> ar; 310 if (isRealmIndependentJitCodeShared) { 311 ar.emplace(cx); 312 } 313 314 AutoCreatedBy acb2(masm, "exception_tail"); 315 Linker linker(masm); 316 if (masm.oom()) { 317 ReportOutOfMemory(cx); 318 return false; 319 } 320 321 code = linker.newCode(cx, CodeKind::Baseline); 322 if (!code) { 323 return false; 324 } 325 326 baselineScript.reset(BaselineScript::New( 327 cx, warmUpCheckPrologueOffset_.offset(), 328 profilerEnterFrameToggleOffset_.offset(), 329 profilerExitFrameToggleOffset_.offset(), 330 handler.retAddrEntries().length(), handler.osrEntries().length(), 331 debugTrapEntries_.length(), script->resumeOffsets().size())); 332 if (!baselineScript) { 333 return false; 334 } 335 336 baselineScript->setMethod(code); 337 338 JitSpew(JitSpew_BaselineScripts, 339 "Created BaselineScript %p (raw %p) for %s:%u:%u", 340 (void*)baselineScript.get(), (void*)code->raw(), script->filename(), 341 script->lineno(), script->column().oneOriginValue()); 342 343 // If profiler instrumentation is enabled, toggle instrumentation on. 344 if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled( 345 cx->runtime())) { 346 baselineScript->toggleProfilerInstrumentation(true); 347 } 348 } 349 baselineScript->copyRetAddrEntries(handler.retAddrEntries().begin()); 350 baselineScript->copyOSREntries(handler.osrEntries().begin()); 351 baselineScript->copyDebugTrapEntries(debugTrapEntries_.begin()); 352 353 // Compute native resume addresses for the script's resume offsets. 354 baselineScript->computeResumeNativeOffsets(script, resumeOffsetEntries_); 355 356 if (compileDebugInstrumentation()) { 357 baselineScript->setHasDebugInstrumentation(); 358 } 359 360 // If BytecodeAnalysis indicated that we should disable Ion or inlining, 361 // update the script now. 362 handler.maybeDisableIon(); 363 364 // AllocSites must be allocated on the main thread. 365 handler.createAllocSites(); 366 367 // Always register a native => bytecode mapping entry, since profiler can be 368 // turned on with baseline jitcode on stack, and baseline jitcode cannot be 369 // invalidated. 370 { 371 UniqueJitcodeGlobalEntry entry; 372 JitSpew(JitSpew_Profiling, 373 "Added JitcodeGlobalEntry for baseline %sscript %s:%u:%u (%p)", 374 isRealmIndependentJitCodeShared ? "shared realm-independent " : "", 375 script->filename(), script->lineno(), 376 script->column().oneOriginValue(), baselineScript.get()); 377 378 // Generate profiling string. 379 UniqueChars str = GeckoProfilerRuntime::allocProfileString(cx, script); 380 if (!str) { 381 return false; 382 } 383 384 if (isRealmIndependentJitCodeShared) { 385 entry = MakeJitcodeGlobalEntry<RealmIndependentSharedEntry>( 386 cx, code, code->raw(), code->rawEnd(), std::move(str)); 387 } else { 388 uint64_t realmId = script->realm()->creationOptions().profilerRealmID(); 389 entry = MakeJitcodeGlobalEntry<BaselineEntry>(cx, code, code->raw(), 390 code->rawEnd(), script, 391 std::move(str), realmId); 392 } 393 if (!entry) { 394 return false; 395 } 396 397 JitcodeGlobalTable* globalTable = 398 cx->runtime()->jitRuntime()->getJitcodeGlobalTable(); 399 if (!globalTable->addEntry(std::move(entry))) { 400 ReportOutOfMemory(cx); 401 return false; 402 } 403 404 // Mark the jitcode as having a bytecode map. 405 code->setHasBytecodeMap(); 406 } 407 408 script->jitScript()->setBaselineScript(script, baselineScript.release()); 409 410 perfSpewer_.saveProfile(cx, script, code); 411 412 #ifdef MOZ_VTUNE 413 vtune::MarkScript(code, script, "baseline"); 414 #endif 415 416 return true; 417 } 418 419 void BaselineCompilerHandler::maybeDisableIon() { 420 if (analysis_.isIonDisabled()) { 421 script()->disableIon(); 422 } 423 if (analysis_.isInliningDisabled()) { 424 script()->setUninlineable(); 425 } 426 script()->jitScript()->setRanBytecodeAnalysis(); 427 } 428 429 // On most platforms we use a dedicated bytecode PC register to avoid many 430 // dependent loads and stores for sequences of simple bytecode ops. This 431 // register must be saved/restored around VM and IC calls. 432 // 433 // On 32-bit x86 we don't have enough registers for this (because R0-R2 require 434 // 6 registers) so there we always store the pc on the frame. 435 static constexpr bool HasInterpreterPCReg() { 436 return InterpreterPCReg != InvalidReg; 437 } 438 439 static Register LoadBytecodePC(MacroAssembler& masm, Register scratch) { 440 if (HasInterpreterPCReg()) { 441 return InterpreterPCReg; 442 } 443 444 Address pcAddr(FramePointer, BaselineFrame::reverseOffsetOfInterpreterPC()); 445 masm.loadPtr(pcAddr, scratch); 446 return scratch; 447 } 448 449 static void LoadInt8Operand(MacroAssembler& masm, Register dest) { 450 Register pc = LoadBytecodePC(masm, dest); 451 masm.load8SignExtend(Address(pc, sizeof(jsbytecode)), dest); 452 } 453 454 static void LoadUint8Operand(MacroAssembler& masm, Register dest) { 455 Register pc = LoadBytecodePC(masm, dest); 456 masm.load8ZeroExtend(Address(pc, sizeof(jsbytecode)), dest); 457 } 458 459 static void LoadUint16Operand(MacroAssembler& masm, Register dest) { 460 Register pc = LoadBytecodePC(masm, dest); 461 masm.load16ZeroExtend(Address(pc, sizeof(jsbytecode)), dest); 462 } 463 464 static void LoadConstantCompareOperand(MacroAssembler& masm, 465 Register constantType, 466 Register payload) { 467 // Note: Baseline interpreter on x86 doesn't have a separate pc register, 468 // see HasInterpreterPCReg(), so we use |payload| as a scratch register first 469 // and then write the actual payload into it after loading the type. 470 Register pc = LoadBytecodePC(masm, payload); 471 masm.load8ZeroExtend(Address(pc, ConstantCompareOperand::OFFSET_OF_TYPE), 472 constantType); 473 masm.load8SignExtend(Address(pc, ConstantCompareOperand::OFFSET_OF_VALUE), 474 payload); 475 } 476 477 static void LoadInt32Operand(MacroAssembler& masm, Register dest) { 478 Register pc = LoadBytecodePC(masm, dest); 479 masm.load32(Address(pc, sizeof(jsbytecode)), dest); 480 } 481 482 static void LoadInt32OperandSignExtendToPtr(MacroAssembler& masm, Register pc, 483 Register dest) { 484 masm.load32SignExtendToPtr(Address(pc, sizeof(jsbytecode)), dest); 485 } 486 487 static void LoadUint24Operand(MacroAssembler& masm, size_t offset, 488 Register dest) { 489 // Load the opcode and operand, then left shift to discard the opcode. 490 Register pc = LoadBytecodePC(masm, dest); 491 masm.load32(Address(pc, offset), dest); 492 masm.rshift32(Imm32(8), dest); 493 } 494 495 static void LoadInlineValueOperand(MacroAssembler& masm, ValueOperand dest) { 496 // Note: the Value might be unaligned but as above we rely on all our 497 // platforms having appropriate support for unaligned accesses (except for 498 // floating point instructions on ARM). 499 Register pc = LoadBytecodePC(masm, dest.scratchReg()); 500 masm.loadUnalignedValue(Address(pc, sizeof(jsbytecode)), dest); 501 } 502 503 template <typename Handler> 504 void BaselineCodeGen<Handler>::loadScript(Register dest) { 505 if (handler.realmIndependentJitcode()) { 506 masm.loadPtr(frame.addressOfInterpreterScript(), dest); 507 } else { 508 masm.movePtr(ImmGCPtr(handler.maybeScript()), dest); 509 } 510 } 511 512 template <typename Handler> 513 void BaselineCodeGen<Handler>::loadJitScript(Register dest) { 514 if (handler.realmIndependentJitcode()) { 515 loadScript(dest); 516 masm.loadPtr(Address(dest, JSScript::offsetOfWarmUpData()), dest); 517 } else { 518 masm.movePtr(ImmPtr(handler.maybeScript()->jitScript()), dest); 519 } 520 } 521 522 template <> 523 void BaselineCompilerCodeGen::saveInterpreterPCReg() {} 524 525 template <> 526 void BaselineInterpreterCodeGen::saveInterpreterPCReg() { 527 if (HasInterpreterPCReg()) { 528 masm.storePtr(InterpreterPCReg, frame.addressOfInterpreterPC()); 529 } 530 } 531 532 template <> 533 void BaselineCompilerCodeGen::restoreInterpreterPCReg() {} 534 535 template <> 536 void BaselineInterpreterCodeGen::restoreInterpreterPCReg() { 537 if (HasInterpreterPCReg()) { 538 masm.loadPtr(frame.addressOfInterpreterPC(), InterpreterPCReg); 539 } 540 } 541 542 template <> 543 void BaselineCompilerCodeGen::emitInitializeLocals() { 544 // Initialize all locals to |undefined|. Lexical bindings are temporal 545 // dead zoned in bytecode. 546 547 size_t n = frame.nlocals(); 548 if (n == 0) { 549 return; 550 } 551 552 // Use R0 to minimize code size. If the number of locals to push is < 553 // LOOP_UNROLL_FACTOR, then the initialization pushes are emitted directly 554 // and inline. Otherwise, they're emitted in a partially unrolled loop. 555 static const size_t LOOP_UNROLL_FACTOR = 4; 556 size_t toPushExtra = n % LOOP_UNROLL_FACTOR; 557 558 masm.moveValue(UndefinedValue(), R0); 559 560 // Handle any extra pushes left over by the optional unrolled loop below. 561 for (size_t i = 0; i < toPushExtra; i++) { 562 masm.pushValue(R0); 563 } 564 565 // Partially unrolled loop of pushes. 566 if (n >= LOOP_UNROLL_FACTOR) { 567 size_t toPush = n - toPushExtra; 568 MOZ_ASSERT(toPush % LOOP_UNROLL_FACTOR == 0); 569 MOZ_ASSERT(toPush >= LOOP_UNROLL_FACTOR); 570 masm.move32(Imm32(toPush), R1.scratchReg()); 571 // Emit unrolled loop with 4 pushes per iteration. 572 Label pushLoop; 573 masm.bind(&pushLoop); 574 for (size_t i = 0; i < LOOP_UNROLL_FACTOR; i++) { 575 masm.pushValue(R0); 576 } 577 masm.branchSub32(Assembler::NonZero, Imm32(LOOP_UNROLL_FACTOR), 578 R1.scratchReg(), &pushLoop); 579 } 580 } 581 582 template <> 583 void BaselineInterpreterCodeGen::emitInitializeLocals() { 584 // Push |undefined| for all locals. 585 586 Register scratch = R0.scratchReg(); 587 loadScript(scratch); 588 masm.loadPtr(Address(scratch, JSScript::offsetOfSharedData()), scratch); 589 masm.loadPtr(Address(scratch, SharedImmutableScriptData::offsetOfISD()), 590 scratch); 591 masm.load32(Address(scratch, ImmutableScriptData::offsetOfNfixed()), scratch); 592 593 Label top, done; 594 masm.branchTest32(Assembler::Zero, scratch, scratch, &done); 595 masm.bind(&top); 596 { 597 masm.pushValue(UndefinedValue()); 598 masm.branchSub32(Assembler::NonZero, Imm32(1), scratch, &top); 599 } 600 masm.bind(&done); 601 } 602 603 // On input: 604 // R2.scratchReg() contains object being written to. 605 // Called with the baseline stack synced, except for R0 which is preserved. 606 // All other registers are usable as scratch. 607 // This calls: 608 // void PostWriteBarrier(JSRuntime* rt, JSObject* obj); 609 template <typename Handler> 610 void BaselineCodeGen<Handler>::emitOutOfLinePostBarrierSlot() { 611 AutoCreatedBy acb(masm, 612 "BaselineCodeGen<Handler>::emitOutOfLinePostBarrierSlot"); 613 614 if (!postBarrierSlot_.used()) { 615 return; 616 } 617 618 masm.bind(&postBarrierSlot_); 619 620 #ifdef JS_USE_LINK_REGISTER 621 masm.pushReturnAddress(); 622 #endif 623 624 Register objReg = R2.scratchReg(); 625 626 // Check one element cache to avoid VM call. 627 Label skipBarrier; 628 auto* lastCellAddr = runtime->addressOfLastBufferedWholeCell(); 629 masm.branchPtr(Assembler::Equal, AbsoluteAddress(lastCellAddr), objReg, 630 &skipBarrier); 631 632 saveInterpreterPCReg(); 633 634 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All()); 635 MOZ_ASSERT(!regs.has(FramePointer)); 636 regs.take(R0); 637 regs.take(objReg); 638 Register scratch = regs.takeAny(); 639 640 masm.pushValue(R0); 641 642 using Fn = void (*)(JSRuntime* rt, js::gc::Cell* cell); 643 masm.setupUnalignedABICall(scratch); 644 masm.movePtr(ImmPtr(runtime), scratch); 645 masm.passABIArg(scratch); 646 masm.passABIArg(objReg); 647 masm.callWithABI<Fn, PostWriteBarrier>(); 648 649 restoreInterpreterPCReg(); 650 651 masm.popValue(R0); 652 653 masm.bind(&skipBarrier); 654 masm.ret(); 655 } 656 657 // Scan the a cache IR stub's fields and create an allocation site for any that 658 // refer to the catch-all unknown allocation site. This will be the case for 659 // stubs created when running in the interpreter. This happens on transition to 660 // baseline. 661 static bool CreateAllocSitesForCacheIRStub(JSScript* script, uint32_t pcOffset, 662 ICCacheIRStub* stub) { 663 const CacheIRStubInfo* stubInfo = stub->stubInfo(); 664 uint8_t* stubData = stub->stubDataStart(); 665 666 ICScript* icScript = script->jitScript()->icScript(); 667 668 uint32_t field = 0; 669 size_t offset = 0; 670 while (true) { 671 StubField::Type fieldType = stubInfo->fieldType(field); 672 if (fieldType == StubField::Type::Limit) { 673 break; 674 } 675 676 if (fieldType == StubField::Type::AllocSite) { 677 gc::AllocSite* site = 678 stubInfo->getPtrStubField<ICCacheIRStub, gc::AllocSite>(stub, offset); 679 if (site->kind() == gc::AllocSite::Kind::Unknown) { 680 gc::AllocSite* newSite = 681 icScript->getOrCreateAllocSite(script, pcOffset); 682 if (!newSite) { 683 return false; 684 } 685 686 stubInfo->replaceStubRawWord(stubData, offset, uintptr_t(site), 687 uintptr_t(newSite)); 688 } 689 } 690 691 field++; 692 offset += StubField::sizeInBytes(fieldType); 693 } 694 695 return true; 696 } 697 698 static void CreateAllocSitesForICChain(JSScript* script, uint32_t entryIndex) { 699 JitScript* jitScript = script->jitScript(); 700 ICStub* stub = jitScript->icEntry(entryIndex).firstStub(); 701 uint32_t pcOffset = jitScript->fallbackStub(entryIndex)->pcOffset(); 702 703 while (!stub->isFallback()) { 704 if (!CreateAllocSitesForCacheIRStub(script, pcOffset, 705 stub->toCacheIRStub())) { 706 // This is an optimization and safe to skip if we hit OOM or per-zone 707 // limit. 708 return; 709 } 710 stub = stub->toCacheIRStub()->next(); 711 } 712 } 713 714 void BaselineCompilerHandler::createAllocSites() { 715 for (uint32_t allocSiteIndex : allocSiteIndices_) { 716 CreateAllocSitesForICChain(script(), allocSiteIndex); 717 } 718 719 if (needsEnvAllocSite_) { 720 script()->jitScript()->icScript()->ensureEnvAllocSite(script()); 721 } 722 } 723 724 template <> 725 bool BaselineCompilerCodeGen::emitNextIC() { 726 AutoCreatedBy acb(masm, "emitNextIC"); 727 728 // Emit a call to an IC stored in JitScript. Calls to this must match the 729 // ICEntry order in JitScript: first the non-op IC entries for |this| and 730 // formal arguments, then the for-op IC entries for JOF_IC ops. 731 732 JSScript* script = handler.script(); 733 uint32_t pcOffset = script->pcToOffset(handler.pc()); 734 735 // We don't use every ICEntry and we can skip unreachable ops, so we have 736 // to loop until we find an ICEntry for the current pc. 737 const ICFallbackStub* stub; 738 uint32_t entryIndex; 739 do { 740 stub = script->jitScript()->fallbackStub(handler.icEntryIndex()); 741 entryIndex = handler.icEntryIndex(); 742 handler.moveToNextICEntry(); 743 } while (stub->pcOffset() < pcOffset); 744 745 MOZ_ASSERT(stub->pcOffset() == pcOffset); 746 MOZ_ASSERT(BytecodeOpHasIC(JSOp(*handler.pc()))); 747 748 if (BytecodeOpCanHaveAllocSite(JSOp(*handler.pc())) && 749 !handler.addAllocSiteIndex(entryIndex)) { 750 return false; 751 } 752 753 // Load stub pointer into ICStubReg. 754 masm.loadPtr(frame.addressOfICScript(), ICStubReg); 755 size_t firstStubOffset = ICScript::offsetOfFirstStub(entryIndex); 756 masm.loadPtr(Address(ICStubReg, firstStubOffset), ICStubReg); 757 758 CodeOffset returnOffset; 759 EmitCallIC(masm, &returnOffset); 760 761 RetAddrEntry::Kind kind = RetAddrEntry::Kind::IC; 762 if (!handler.retAddrEntries().emplaceBack(pcOffset, kind, returnOffset)) { 763 return false; 764 } 765 766 return true; 767 } 768 769 template <> 770 bool BaselineInterpreterCodeGen::emitNextIC() { 771 saveInterpreterPCReg(); 772 masm.loadPtr(frame.addressOfInterpreterICEntry(), ICStubReg); 773 masm.loadPtr(Address(ICStubReg, ICEntry::offsetOfFirstStub()), ICStubReg); 774 uint32_t returnOffset = 775 masm.call(Address(ICStubReg, ICStub::offsetOfStubCode())).offset(); 776 restoreInterpreterPCReg(); 777 778 // If this is an IC for a bytecode op where Ion may inline scripts, we need to 779 // record the return offset for Ion bailouts. 780 if (handler.currentOp()) { 781 JSOp op = *handler.currentOp(); 782 MOZ_ASSERT(BytecodeOpHasIC(op)); 783 if (IsIonInlinableOp(op)) { 784 if (!handler.icReturnOffsets().emplaceBack(returnOffset, op)) { 785 return false; 786 } 787 } 788 } 789 790 return true; 791 } 792 793 template <> 794 void BaselineCompilerCodeGen::computeFrameSize(Register dest) { 795 MOZ_ASSERT(!inCall_, "must not be called in the middle of a VM call"); 796 masm.move32(Imm32(frame.frameSize()), dest); 797 } 798 799 template <> 800 void BaselineInterpreterCodeGen::computeFrameSize(Register dest) { 801 // dest := FramePointer - StackPointer. 802 MOZ_ASSERT(!inCall_, "must not be called in the middle of a VM call"); 803 masm.mov(FramePointer, dest); 804 masm.subStackPtrFrom(dest); 805 } 806 807 template <typename Handler> 808 void BaselineCodeGen<Handler>::prepareVMCall() { 809 pushedBeforeCall_ = masm.framePushed(); 810 #ifdef DEBUG 811 inCall_ = true; 812 #endif 813 814 // Ensure everything is synced. 815 frame.syncStack(0); 816 } 817 818 template <> 819 void BaselineCompilerCodeGen::storeFrameSizeAndPushDescriptor( 820 uint32_t argSize, Register scratch) { 821 #ifdef DEBUG 822 masm.store32(Imm32(frame.frameSize()), frame.addressOfDebugFrameSize()); 823 #endif 824 825 masm.push(FrameDescriptor(FrameType::BaselineJS)); 826 } 827 828 template <> 829 void BaselineInterpreterCodeGen::storeFrameSizeAndPushDescriptor( 830 uint32_t argSize, Register scratch) { 831 #ifdef DEBUG 832 // Store the frame size without VMFunction arguments in debug builds. 833 // scratch := FramePointer - StackPointer - argSize. 834 masm.mov(FramePointer, scratch); 835 masm.subStackPtrFrom(scratch); 836 masm.sub32(Imm32(argSize), scratch); 837 masm.store32(scratch, frame.addressOfDebugFrameSize()); 838 #endif 839 840 masm.push(FrameDescriptor(FrameType::BaselineJS)); 841 } 842 843 static uint32_t GetVMFunctionArgSize(const VMFunctionData& fun) { 844 return fun.explicitStackSlots() * sizeof(void*); 845 } 846 847 template <typename Handler> 848 bool BaselineCodeGen<Handler>::callVMInternal(VMFunctionId id, 849 RetAddrEntry::Kind kind, 850 CallVMPhase phase) { 851 #ifdef DEBUG 852 // Assert prepareVMCall() has been called. 853 MOZ_ASSERT(inCall_); 854 inCall_ = false; 855 #endif 856 857 TrampolinePtr code = runtime->jitRuntime()->getVMWrapper(id); 858 const VMFunctionData& fun = GetVMFunction(id); 859 860 uint32_t argSize = GetVMFunctionArgSize(fun); 861 862 // Assert all arguments were pushed. 863 MOZ_ASSERT(masm.framePushed() - pushedBeforeCall_ == argSize); 864 865 saveInterpreterPCReg(); 866 867 if (phase == CallVMPhase::AfterPushingLocals) { 868 storeFrameSizeAndPushDescriptor(argSize, R0.scratchReg()); 869 } else { 870 MOZ_ASSERT(phase == CallVMPhase::BeforePushingLocals); 871 #ifdef DEBUG 872 uint32_t frameBaseSize = BaselineFrame::frameSizeForNumValueSlots(0); 873 masm.store32(Imm32(frameBaseSize), frame.addressOfDebugFrameSize()); 874 #endif 875 masm.push(FrameDescriptor(FrameType::BaselineJS)); 876 } 877 // Perform the call. 878 masm.call(code); 879 uint32_t callOffset = masm.currentOffset(); 880 881 // Pop arguments from framePushed. 882 masm.implicitPop(argSize); 883 884 restoreInterpreterPCReg(); 885 886 return handler.recordCallRetAddr(kind, callOffset); 887 } 888 889 template <typename Handler> 890 template <typename Fn, Fn fn> 891 bool BaselineCodeGen<Handler>::callVM(RetAddrEntry::Kind kind, 892 CallVMPhase phase) { 893 VMFunctionId fnId = VMFunctionToId<Fn, fn>::id; 894 return callVMInternal(fnId, kind, phase); 895 } 896 897 template <typename Handler> 898 bool BaselineCodeGen<Handler>::emitStackCheck() { 899 Label skipCall; 900 if (handler.mustIncludeSlotsInStackCheck()) { 901 // Subtract the size of script->nslots() first. 902 Register scratch = R1.scratchReg(); 903 masm.moveStackPtrTo(scratch); 904 subtractScriptSlotsSize(scratch, R2.scratchReg()); 905 masm.branchPtr(Assembler::BelowOrEqual, 906 AbsoluteAddress(runtime->addressOfJitStackLimit()), scratch, 907 &skipCall); 908 } else { 909 masm.branchStackPtrRhs(Assembler::BelowOrEqual, 910 AbsoluteAddress(runtime->addressOfJitStackLimit()), 911 &skipCall); 912 } 913 914 prepareVMCall(); 915 masm.loadBaselineFramePtr(FramePointer, R1.scratchReg()); 916 pushArg(R1.scratchReg()); 917 918 const CallVMPhase phase = CallVMPhase::BeforePushingLocals; 919 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::StackCheck; 920 921 using Fn = bool (*)(JSContext*, BaselineFrame*); 922 if (!callVM<Fn, CheckOverRecursedBaseline>(kind, phase)) { 923 return false; 924 } 925 926 masm.bind(&skipCall); 927 return true; 928 } 929 930 static void EmitCallFrameIsDebuggeeCheck(MacroAssembler& masm) { 931 using Fn = void (*)(BaselineFrame* frame); 932 masm.setupUnalignedABICall(R0.scratchReg()); 933 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 934 masm.passABIArg(R0.scratchReg()); 935 masm.callWithABI<Fn, FrameIsDebuggeeCheck>(); 936 } 937 938 template <> 939 bool BaselineCompilerCodeGen::emitIsDebuggeeCheck() { 940 if (handler.compileDebugInstrumentation()) { 941 EmitCallFrameIsDebuggeeCheck(masm); 942 } 943 return true; 944 } 945 946 template <> 947 bool BaselineInterpreterCodeGen::emitIsDebuggeeCheck() { 948 // Use a toggled jump to call FrameIsDebuggeeCheck only if the debugger is 949 // enabled. 950 // 951 // TODO(bug 1522394): consider having a cx->realm->isDebuggee guard before the 952 // call. Consider moving the callWithABI out-of-line. 953 954 Label skipCheck; 955 CodeOffset toggleOffset = masm.toggledJump(&skipCheck); 956 { 957 saveInterpreterPCReg(); 958 EmitCallFrameIsDebuggeeCheck(masm); 959 restoreInterpreterPCReg(); 960 } 961 masm.bind(&skipCheck); 962 return handler.addDebugInstrumentationOffset(toggleOffset); 963 } 964 965 template <typename Handler> 966 static void MaybeIncrementCodeCoverageCounter(MacroAssembler& masm, 967 JSScript* script, jsbytecode* pc, 968 const Handler& handler) { 969 // Realm-independent Jitcode doesn't support code coverage until bug 1980266 970 // is fixed 971 if (!script->hasScriptCounts() || handler.realmIndependentJitcode()) { 972 return; 973 } 974 PCCounts* counts = script->maybeGetPCCounts(pc); 975 uint64_t* counterAddr = &counts->numExec(); 976 masm.inc64(AbsoluteAddress(counterAddr)); 977 } 978 979 template <> 980 bool BaselineCompilerCodeGen::emitHandleCodeCoverageAtPrologue() { 981 // TSAN disapproves of accessing scriptCounts off-thread. 982 // We don't compile off-thread if the script has scriptCounts. 983 if (handler.compilingOffThread()) { 984 return true; 985 } 986 987 // If the main instruction is not a jump target, then we emit the 988 // corresponding code coverage counter. 989 JSScript* script = handler.script(); 990 jsbytecode* main = script->main(); 991 if (!BytecodeIsJumpTarget(JSOp(*main))) { 992 MaybeIncrementCodeCoverageCounter(masm, script, main, handler); 993 } 994 return true; 995 } 996 997 template <> 998 bool BaselineInterpreterCodeGen::emitHandleCodeCoverageAtPrologue() { 999 Label skipCoverage; 1000 CodeOffset toggleOffset = masm.toggledJump(&skipCoverage); 1001 masm.call(handler.codeCoverageAtPrologueLabel()); 1002 masm.bind(&skipCoverage); 1003 return handler.codeCoverageOffsets().append(toggleOffset.offset()); 1004 } 1005 1006 template <> 1007 void BaselineCompilerCodeGen::subtractScriptSlotsSize(Register reg, 1008 Register scratch) { 1009 uint32_t slotsSize = handler.script()->nslots() * sizeof(Value); 1010 masm.subPtr(Imm32(slotsSize), reg); 1011 } 1012 1013 template <> 1014 void BaselineInterpreterCodeGen::subtractScriptSlotsSize(Register reg, 1015 Register scratch) { 1016 // reg = reg - script->nslots() * sizeof(Value) 1017 MOZ_ASSERT(reg != scratch); 1018 loadScript(scratch); 1019 masm.loadPtr(Address(scratch, JSScript::offsetOfSharedData()), scratch); 1020 masm.loadPtr(Address(scratch, SharedImmutableScriptData::offsetOfISD()), 1021 scratch); 1022 masm.load32(Address(scratch, ImmutableScriptData::offsetOfNslots()), scratch); 1023 static_assert(sizeof(Value) == 8, 1024 "shift by 3 below assumes Value is 8 bytes"); 1025 masm.lshiftPtr(Imm32(3), scratch); 1026 masm.subPtr(scratch, reg); 1027 } 1028 1029 template <typename Handler> 1030 void BaselineCodeGen<Handler>::loadGlobalLexicalEnvironment(Register dest) { 1031 if (handler.realmIndependentJitcode()) { 1032 masm.loadGlobalObjectData(dest); 1033 masm.loadPtr(Address(dest, GlobalObjectData::offsetOfLexicalEnvironment()), 1034 dest); 1035 } else { 1036 MOZ_ASSERT(!handler.maybeScript()->hasNonSyntacticScope()); 1037 masm.movePtr(ImmGCPtr(handler.maybeGlobalLexicalEnvironment()), dest); 1038 } 1039 } 1040 1041 template <typename Handler> 1042 void BaselineCodeGen<Handler>::pushGlobalLexicalEnvironmentValue( 1043 ValueOperand scratch) { 1044 if (handler.realmIndependentJitcode()) { 1045 loadGlobalLexicalEnvironment(scratch.scratchReg()); 1046 masm.tagValue(JSVAL_TYPE_OBJECT, scratch.scratchReg(), scratch); 1047 frame.push(scratch); 1048 } else { 1049 frame.push(ObjectValue(*handler.maybeGlobalLexicalEnvironment())); 1050 } 1051 } 1052 1053 template <> 1054 void BaselineCompilerCodeGen::loadGlobalThisValue(ValueOperand dest) { 1055 JSObject* thisObj = handler.globalThis(); 1056 masm.moveValue(ObjectValue(*thisObj), dest); 1057 } 1058 1059 template <> 1060 void BaselineInterpreterCodeGen::loadGlobalThisValue(ValueOperand dest) { 1061 Register scratch = dest.scratchReg(); 1062 loadGlobalLexicalEnvironment(scratch); 1063 static constexpr size_t SlotOffset = 1064 GlobalLexicalEnvironmentObject::offsetOfThisValueSlot(); 1065 masm.loadValue(Address(scratch, SlotOffset), dest); 1066 } 1067 1068 template <typename Handler> 1069 void BaselineCodeGen<Handler>::pushScriptArg() { 1070 if (handler.realmIndependentJitcode()) { 1071 pushArg(frame.addressOfInterpreterScript()); 1072 } else { 1073 pushArg(ImmGCPtr(handler.maybeScript())); 1074 } 1075 } 1076 1077 template <> 1078 void BaselineCompilerCodeGen::pushBytecodePCArg() { 1079 pushArg(ImmPtr(handler.pc())); 1080 } 1081 1082 template <> 1083 void BaselineInterpreterCodeGen::pushBytecodePCArg() { 1084 if (HasInterpreterPCReg()) { 1085 pushArg(InterpreterPCReg); 1086 } else { 1087 pushArg(frame.addressOfInterpreterPC()); 1088 } 1089 } 1090 1091 static gc::Cell* GetScriptGCThing(JSScript* script, jsbytecode* pc, 1092 ScriptGCThingType type) { 1093 switch (type) { 1094 case ScriptGCThingType::Atom: 1095 return script->getAtom(pc); 1096 case ScriptGCThingType::String: 1097 return script->getString(pc); 1098 case ScriptGCThingType::RegExp: 1099 return script->getRegExp(pc); 1100 case ScriptGCThingType::Object: 1101 return script->getObject(pc); 1102 case ScriptGCThingType::Function: 1103 return script->getFunction(pc); 1104 case ScriptGCThingType::Scope: 1105 return script->getScope(pc); 1106 case ScriptGCThingType::BigInt: 1107 return script->getBigInt(pc); 1108 } 1109 MOZ_CRASH("Unexpected GCThing type"); 1110 } 1111 1112 template <typename Handler> 1113 void BaselineCodeGen<Handler>::loadScriptGCThingInternal(ScriptGCThingType type, 1114 Register dest, 1115 Register scratch) { 1116 // Load the GCCellPtr. 1117 loadScript(dest); 1118 masm.loadPtr(Address(dest, JSScript::offsetOfPrivateData()), dest); 1119 masm.loadPtr(BaseIndex(dest, scratch, ScalePointer, 1120 PrivateScriptData::offsetOfGCThings()), 1121 dest); 1122 1123 // Clear the tag bits. 1124 switch (type) { 1125 case ScriptGCThingType::Atom: 1126 case ScriptGCThingType::String: 1127 // Use xorPtr with a 32-bit immediate because it's more efficient than 1128 // andPtr on 64-bit. 1129 static_assert(uintptr_t(TraceKind::String) == 2, 1130 "Unexpected tag bits for string GCCellPtr"); 1131 masm.xorPtr(Imm32(2), dest); 1132 break; 1133 case ScriptGCThingType::RegExp: 1134 case ScriptGCThingType::Object: 1135 case ScriptGCThingType::Function: 1136 // No-op because GCCellPtr tag bits are zero for objects. 1137 static_assert(uintptr_t(TraceKind::Object) == 0, 1138 "Unexpected tag bits for object GCCellPtr"); 1139 break; 1140 case ScriptGCThingType::BigInt: 1141 // Use xorPtr with a 32-bit immediate because it's more efficient than 1142 // andPtr on 64-bit. 1143 static_assert(uintptr_t(TraceKind::BigInt) == 1, 1144 "Unexpected tag bits for BigInt GCCellPtr"); 1145 masm.xorPtr(Imm32(1), dest); 1146 break; 1147 case ScriptGCThingType::Scope: 1148 // Use xorPtr with a 32-bit immediate because it's more efficient than 1149 // andPtr on 64-bit. 1150 static_assert(uintptr_t(TraceKind::Scope) >= JS::OutOfLineTraceKindMask, 1151 "Expected Scopes to have OutOfLineTraceKindMask tag"); 1152 masm.xorPtr(Imm32(JS::OutOfLineTraceKindMask), dest); 1153 break; 1154 } 1155 } 1156 1157 template <> 1158 void BaselineCompilerCodeGen::loadScriptGCThing(ScriptGCThingType type, 1159 Register dest, 1160 Register scratch) { 1161 if (handler.realmIndependentJitcode()) { 1162 masm.move32(Imm32(GET_GCTHING_INDEX(handler.pc())), scratch); 1163 loadScriptGCThingInternal(type, dest, scratch); 1164 } else { 1165 gc::Cell* thing = GetScriptGCThing(handler.script(), handler.pc(), type); 1166 masm.movePtr(ImmGCPtr(thing), dest); 1167 } 1168 } 1169 1170 template <> 1171 void BaselineInterpreterCodeGen::loadScriptGCThing(ScriptGCThingType type, 1172 Register dest, 1173 Register scratch) { 1174 MOZ_ASSERT(dest != scratch); 1175 1176 // Load the index in |scratch|. 1177 LoadInt32Operand(masm, scratch); 1178 1179 loadScriptGCThingInternal(type, dest, scratch); 1180 1181 #ifdef DEBUG 1182 // Assert low bits are not set. 1183 Label ok; 1184 masm.branchTestPtr(Assembler::Zero, dest, Imm32(0b111), &ok); 1185 masm.assumeUnreachable("GC pointer with tag bits set"); 1186 masm.bind(&ok); 1187 #endif 1188 } 1189 1190 template <typename Handler> 1191 void BaselineCodeGen<Handler>::pushScriptGCThingArg(ScriptGCThingType type, 1192 Register scratch1, 1193 Register scratch2) { 1194 if (handler.realmIndependentJitcode()) { 1195 loadScriptGCThing(type, scratch1, scratch2); 1196 pushArg(scratch1); 1197 } else { 1198 gc::Cell* thing = 1199 GetScriptGCThing(handler.maybeScript(), handler.maybePC(), type); 1200 pushArg(ImmGCPtr(thing)); 1201 } 1202 } 1203 1204 template <typename Handler> 1205 void BaselineCodeGen<Handler>::pushScriptNameArg(Register scratch1, 1206 Register scratch2) { 1207 pushScriptGCThingArg(ScriptGCThingType::Atom, scratch1, scratch2); 1208 } 1209 1210 template <> 1211 void BaselineCompilerCodeGen::pushUint8BytecodeOperandArg(Register) { 1212 MOZ_ASSERT(JOF_OPTYPE(JSOp(*handler.pc())) == JOF_UINT8); 1213 pushArg(Imm32(GET_UINT8(handler.pc()))); 1214 } 1215 1216 template <> 1217 void BaselineInterpreterCodeGen::pushUint8BytecodeOperandArg(Register scratch) { 1218 LoadUint8Operand(masm, scratch); 1219 pushArg(scratch); 1220 } 1221 1222 template <> 1223 void BaselineCompilerCodeGen::pushUint16BytecodeOperandArg(Register) { 1224 MOZ_ASSERT(JOF_OPTYPE(JSOp(*handler.pc())) == JOF_UINT16); 1225 pushArg(Imm32(GET_UINT16(handler.pc()))); 1226 } 1227 1228 template <> 1229 void BaselineInterpreterCodeGen::pushUint16BytecodeOperandArg( 1230 Register scratch) { 1231 LoadUint16Operand(masm, scratch); 1232 pushArg(scratch); 1233 } 1234 1235 template <> 1236 void BaselineCompilerCodeGen::loadInt32LengthBytecodeOperand(Register dest) { 1237 uint32_t length = GET_UINT32(handler.pc()); 1238 MOZ_ASSERT(length <= INT32_MAX, 1239 "the bytecode emitter must fail to compile code that would " 1240 "produce a length exceeding int32_t range"); 1241 masm.move32(Imm32(AssertedCast<int32_t>(length)), dest); 1242 } 1243 1244 template <> 1245 void BaselineInterpreterCodeGen::loadInt32LengthBytecodeOperand(Register dest) { 1246 LoadInt32Operand(masm, dest); 1247 } 1248 1249 template <typename Handler> 1250 bool BaselineCodeGen<Handler>::emitDebugPrologue() { 1251 auto ifDebuggee = [this]() { 1252 // Load pointer to BaselineFrame in R0. 1253 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 1254 1255 prepareVMCall(); 1256 pushArg(R0.scratchReg()); 1257 1258 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::DebugPrologue; 1259 1260 using Fn = bool (*)(JSContext*, BaselineFrame*); 1261 if (!callVM<Fn, jit::DebugPrologue>(kind)) { 1262 return false; 1263 } 1264 1265 return true; 1266 }; 1267 return emitDebugInstrumentation(ifDebuggee); 1268 } 1269 1270 template <> 1271 void BaselineCompilerCodeGen::emitInitFrameFields(Register nonFunctionEnv) { 1272 Register scratch = R0.scratchReg(); 1273 Register scratch2 = R2.scratchReg(); 1274 MOZ_ASSERT(nonFunctionEnv != scratch && nonFunctionEnv != scratch2); 1275 1276 uint32_t flags = 1277 handler.realmIndependentJitcode() ? BaselineFrame::REALM_INDEPENDENT : 0; 1278 masm.store32(Imm32(flags), frame.addressOfFlags()); 1279 1280 if (handler.isFunction()) { 1281 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), scratch); 1282 masm.unboxObject(Address(scratch, JSFunction::offsetOfEnvironment()), 1283 scratch2); 1284 masm.storePtr(scratch2, frame.addressOfEnvironmentChain()); 1285 if (handler.realmIndependentJitcode()) { 1286 masm.loadPrivate(Address(scratch, JSFunction::offsetOfJitInfoOrScript()), 1287 scratch); 1288 masm.storePtr(scratch, frame.addressOfInterpreterScript()); 1289 } 1290 } else { 1291 if (handler.realmIndependentJitcode()) { 1292 masm.loadPtr(frame.addressOfCalleeToken(), scratch); 1293 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch); 1294 masm.storePtr(scratch, frame.addressOfInterpreterScript()); 1295 } 1296 masm.storePtr(nonFunctionEnv, frame.addressOfEnvironmentChain()); 1297 } 1298 1299 // If the HasInlinedICScript flag is set in the frame descriptor, then load 1300 // the inlined ICScript from our caller's frame and store it in our own frame. 1301 Label notInlined, done; 1302 masm.branchTest32(Assembler::Zero, frame.addressOfDescriptor(), 1303 Imm32(FrameDescriptor::HasInlinedICScript), ¬Inlined); 1304 masm.loadPtr(Address(FramePointer, 0), scratch); 1305 masm.loadPtr( 1306 Address(scratch, BaselineStubFrameLayout::InlinedICScriptOffsetFromFP), 1307 scratch); 1308 masm.storePtr(scratch, frame.addressOfICScript()); 1309 masm.jump(&done); 1310 1311 // Otherwise, store this script's default ICSCript in the frame. 1312 masm.bind(¬Inlined); 1313 if (handler.realmIndependentJitcode()) { 1314 // When JitCode is reused in a new realm, the frames baked into 1315 // the native bytecode need to refer to the IC list for the new JitScript or 1316 // they will execute the IC scripts using the IC stub fields from the wrong 1317 // script. 1318 loadJitScript(scratch); 1319 masm.addPtr(Imm32(JitScript::offsetOfICScript()), scratch); 1320 masm.storePtr(scratch, frame.addressOfICScript()); 1321 } else { 1322 masm.storePtr(ImmPtr(handler.script()->jitScript()->icScript()), 1323 frame.addressOfICScript()); 1324 } 1325 masm.bind(&done); 1326 } 1327 1328 template <> 1329 void BaselineInterpreterCodeGen::emitInitFrameFields(Register nonFunctionEnv) { 1330 MOZ_ASSERT(nonFunctionEnv == R1.scratchReg(), 1331 "Don't clobber nonFunctionEnv below"); 1332 1333 // If we have a dedicated PC register we use it as scratch1 to avoid a 1334 // register move below. 1335 Register scratch1 = 1336 HasInterpreterPCReg() ? InterpreterPCReg : R0.scratchReg(); 1337 Register scratch2 = R2.scratchReg(); 1338 1339 masm.store32(Imm32(BaselineFrame::RUNNING_IN_INTERPRETER), 1340 frame.addressOfFlags()); 1341 1342 // Initialize interpreterScript. 1343 Label notFunction, done; 1344 masm.loadPtr(frame.addressOfCalleeToken(), scratch1); 1345 masm.branchTestPtr(Assembler::NonZero, scratch1, Imm32(CalleeTokenScriptBit), 1346 ¬Function); 1347 { 1348 // CalleeToken_Function or CalleeToken_FunctionConstructing. 1349 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch1); 1350 masm.unboxObject(Address(scratch1, JSFunction::offsetOfEnvironment()), 1351 scratch2); 1352 masm.storePtr(scratch2, frame.addressOfEnvironmentChain()); 1353 masm.loadPrivate(Address(scratch1, JSFunction::offsetOfJitInfoOrScript()), 1354 scratch1); 1355 masm.jump(&done); 1356 } 1357 masm.bind(¬Function); 1358 { 1359 // CalleeToken_Script. 1360 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch1); 1361 masm.storePtr(nonFunctionEnv, frame.addressOfEnvironmentChain()); 1362 } 1363 masm.bind(&done); 1364 masm.storePtr(scratch1, frame.addressOfInterpreterScript()); 1365 1366 // Load the ICScript in scratch2.. 1367 Label inlined, haveICScript; 1368 masm.branchTest32(Assembler::NonZero, frame.addressOfDescriptor(), 1369 Imm32(FrameDescriptor::HasInlinedICScript), &inlined); 1370 masm.loadJitScript(scratch1, scratch2); 1371 masm.computeEffectiveAddress(Address(scratch2, JitScript::offsetOfICScript()), 1372 scratch2); 1373 masm.jump(&haveICScript); 1374 masm.bind(&inlined); 1375 masm.loadPtr(Address(FramePointer, 0), scratch2); 1376 masm.loadPtr( 1377 Address(scratch2, BaselineStubFrameLayout::InlinedICScriptOffsetFromFP), 1378 scratch2); 1379 masm.bind(&haveICScript); 1380 1381 // Initialize icScript and interpreterICEntry 1382 masm.storePtr(scratch2, frame.addressOfICScript()); 1383 masm.computeEffectiveAddress(Address(scratch2, ICScript::offsetOfICEntries()), 1384 scratch2); 1385 masm.storePtr(scratch2, frame.addressOfInterpreterICEntry()); 1386 1387 // Initialize interpreter pc. 1388 masm.loadPtr(Address(scratch1, JSScript::offsetOfSharedData()), scratch1); 1389 masm.loadPtr(Address(scratch1, SharedImmutableScriptData::offsetOfISD()), 1390 scratch1); 1391 masm.addPtr(Imm32(ImmutableScriptData::offsetOfCode()), scratch1); 1392 1393 if (HasInterpreterPCReg()) { 1394 MOZ_ASSERT(scratch1 == InterpreterPCReg, 1395 "pc must be stored in the pc register"); 1396 } else { 1397 masm.storePtr(scratch1, frame.addressOfInterpreterPC()); 1398 } 1399 } 1400 1401 // Assert we don't need a post write barrier to write sourceObj to a slot of 1402 // destObj. See comments in WarpBuilder::buildNamedLambdaEnv. 1403 static void AssertCanElidePostWriteBarrier(MacroAssembler& masm, 1404 Register destObj, Register sourceObj, 1405 Register temp) { 1406 #ifdef DEBUG 1407 Label ok; 1408 masm.branchPtrInNurseryChunk(Assembler::Equal, destObj, temp, &ok); 1409 masm.branchPtrInNurseryChunk(Assembler::NotEqual, sourceObj, temp, &ok); 1410 masm.assumeUnreachable("Unexpected missing post write barrier in Baseline"); 1411 masm.bind(&ok); 1412 #endif 1413 } 1414 1415 template <> 1416 bool BaselineCompilerCodeGen::initEnvironmentChain() { 1417 if (!handler.isFunction()) { 1418 return true; 1419 } 1420 if (!handler.script()->needsFunctionEnvironmentObjects()) { 1421 return true; 1422 } 1423 1424 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All()); 1425 Register temp = regs.takeAny(); 1426 Label done; 1427 if (!handler.realmIndependentJitcode()) { 1428 // Allocate a NamedLambdaObject and/or a CallObject. If the function needs 1429 // both, the NamedLambdaObject must enclose the CallObject. If one of the 1430 // allocations fails, we perform the whole operation in C++. 1431 1432 auto callObjectTemplate = handler.callObjectTemplate(); 1433 auto namedLambdaTemplate = handler.namedLambdaTemplate(); 1434 MOZ_ASSERT(namedLambdaTemplate || callObjectTemplate); 1435 1436 Register newEnv = regs.takeAny(); 1437 Register enclosingEnv = regs.takeAny(); 1438 Register callee = regs.takeAny(); 1439 Register siteRegister; 1440 1441 Label fail; 1442 masm.loadPtr(frame.addressOfEnvironmentChain(), enclosingEnv); 1443 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), callee); 1444 1445 AllocSiteInput site; 1446 if (handler.addEnvAllocSite()) { 1447 siteRegister = regs.takeAny(); 1448 masm.loadPtr(frame.addressOfICScript(), temp); 1449 masm.loadPtr(Address(temp, ICScript::offsetOfEnvAllocSite()), 1450 siteRegister); 1451 site = AllocSiteInput(siteRegister); 1452 } 1453 1454 // Allocate a NamedLambdaObject if needed. 1455 if (namedLambdaTemplate) { 1456 TemplateObject templateObject(namedLambdaTemplate); 1457 masm.createGCObject(newEnv, temp, templateObject, gc::Heap::Default, 1458 &fail, true, site); 1459 1460 // Store enclosing environment. 1461 Address enclosingSlot(newEnv, 1462 NamedLambdaObject::offsetOfEnclosingEnvironment()); 1463 masm.storeValue(JSVAL_TYPE_OBJECT, enclosingEnv, enclosingSlot); 1464 AssertCanElidePostWriteBarrier(masm, newEnv, enclosingEnv, temp); 1465 1466 // Store callee. 1467 Address lambdaSlot(newEnv, NamedLambdaObject::offsetOfLambdaSlot()); 1468 masm.storeValue(JSVAL_TYPE_OBJECT, callee, lambdaSlot); 1469 AssertCanElidePostWriteBarrier(masm, newEnv, callee, temp); 1470 1471 if (callObjectTemplate) { 1472 masm.movePtr(newEnv, enclosingEnv); 1473 } 1474 } 1475 1476 // Allocate a CallObject if needed. 1477 if (callObjectTemplate) { 1478 TemplateObject templateObject(callObjectTemplate); 1479 masm.createGCObject(newEnv, temp, templateObject, gc::Heap::Default, 1480 &fail, true, site); 1481 1482 // Store enclosing environment. 1483 Address enclosingSlot(newEnv, CallObject::offsetOfEnclosingEnvironment()); 1484 masm.storeValue(JSVAL_TYPE_OBJECT, enclosingEnv, enclosingSlot); 1485 AssertCanElidePostWriteBarrier(masm, newEnv, enclosingEnv, temp); 1486 1487 // Store callee. 1488 Address calleeSlot(newEnv, CallObject::offsetOfCallee()); 1489 masm.storeValue(JSVAL_TYPE_OBJECT, callee, calleeSlot); 1490 AssertCanElidePostWriteBarrier(masm, newEnv, callee, temp); 1491 } 1492 1493 // Update the frame's environment chain and mark it initialized. 1494 masm.storePtr(newEnv, frame.addressOfEnvironmentChain()); 1495 masm.or32(Imm32(BaselineFrame::HAS_INITIAL_ENV), frame.addressOfFlags()); 1496 masm.jump(&done); 1497 1498 masm.bind(&fail); 1499 } 1500 1501 prepareVMCall(); 1502 1503 masm.loadBaselineFramePtr(FramePointer, temp); 1504 pushArg(temp); 1505 1506 const CallVMPhase phase = CallVMPhase::BeforePushingLocals; 1507 1508 using Fn = bool (*)(JSContext*, BaselineFrame*); 1509 if (!callVMNonOp<Fn, jit::InitFunctionEnvironmentObjects>(phase)) { 1510 return false; 1511 } 1512 1513 masm.bind(&done); 1514 return true; 1515 } 1516 1517 template <> 1518 bool BaselineInterpreterCodeGen::initEnvironmentChain() { 1519 // For function scripts, call InitFunctionEnvironmentObjects if needed. For 1520 // non-function scripts this is a no-op. 1521 1522 Label done; 1523 masm.branchTestPtr(Assembler::NonZero, frame.addressOfCalleeToken(), 1524 Imm32(CalleeTokenScriptBit), &done); 1525 { 1526 auto initEnv = [this]() { 1527 // Call into the VM to create the proper environment objects. 1528 prepareVMCall(); 1529 1530 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 1531 pushArg(R0.scratchReg()); 1532 1533 const CallVMPhase phase = CallVMPhase::BeforePushingLocals; 1534 1535 using Fn = bool (*)(JSContext*, BaselineFrame*); 1536 return callVMNonOp<Fn, jit::InitFunctionEnvironmentObjects>(phase); 1537 }; 1538 if (!emitTestScriptFlag( 1539 JSScript::ImmutableFlags::NeedsFunctionEnvironmentObjects, true, 1540 initEnv, R2.scratchReg())) { 1541 return false; 1542 } 1543 } 1544 1545 masm.bind(&done); 1546 return true; 1547 } 1548 1549 template <typename Handler> 1550 bool BaselineCodeGen<Handler>::emitInterruptCheck() { 1551 frame.syncStack(0); 1552 1553 Label done; 1554 masm.branch32(Assembler::Equal, 1555 AbsoluteAddress(runtime->addressOfInterruptBits()), Imm32(0), 1556 &done); 1557 1558 prepareVMCall(); 1559 1560 // Use a custom RetAddrEntry::Kind so DebugModeOSR can distinguish this call 1561 // from other callVMs that might happen at this pc. 1562 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::InterruptCheck; 1563 1564 using Fn = bool (*)(JSContext*); 1565 if (!callVM<Fn, InterruptCheck>(kind)) { 1566 return false; 1567 } 1568 1569 masm.bind(&done); 1570 return true; 1571 } 1572 1573 template <typename Handler> 1574 bool BaselineCodeGen<Handler>::emitTrialInliningCheck(Register count, 1575 Register icScript, 1576 Register scratch) { 1577 if (JitOptions.disableInlining) { 1578 return true; 1579 } 1580 1581 // Consider trial inlining. 1582 // Note: unlike other warmup thresholds, where we try to enter a 1583 // higher tier whenever we are higher than a given warmup count, 1584 // trial inlining triggers once when reaching the threshold. 1585 Label noTrialInlining; 1586 masm.branch32(Assembler::NotEqual, count, 1587 Imm32(JitOptions.trialInliningWarmUpThreshold), 1588 &noTrialInlining); 1589 prepareVMCall(); 1590 1591 masm.PushBaselineFramePtr(FramePointer, scratch); 1592 1593 using Fn = bool (*)(JSContext*, BaselineFrame*); 1594 if (!callVMNonOp<Fn, DoTrialInlining>()) { 1595 return false; 1596 } 1597 // Reload registers potentially clobbered by the call. 1598 Address warmUpCounterAddr(icScript, ICScript::offsetOfWarmUpCount()); 1599 masm.loadPtr(frame.addressOfICScript(), icScript); 1600 masm.load32(warmUpCounterAddr, count); 1601 masm.bind(&noTrialInlining); 1602 1603 return true; 1604 } 1605 1606 template <> 1607 bool BaselineCompilerCodeGen::emitWarmUpCounterIncrement() { 1608 frame.assertSyncedStack(); 1609 1610 // Record native code offset for OSR from Baseline Interpreter into Baseline 1611 // JIT code. This is right before the warm-up check in the Baseline JIT code, 1612 // to make sure we can immediately enter Ion if the script is warm enough or 1613 // if --ion-eager is used. 1614 JSScript* script = handler.script(); 1615 jsbytecode* pc = handler.pc(); 1616 if (JSOp(*pc) == JSOp::LoopHead) { 1617 uint32_t pcOffset = script->pcToOffset(pc); 1618 uint32_t nativeOffset = masm.currentOffset(); 1619 if (!handler.osrEntries().emplaceBack(pcOffset, nativeOffset)) { 1620 return false; 1621 } 1622 } 1623 1624 // Emit no warm-up counter increments if Ion is not enabled or if the script 1625 // will never be Ion-compileable. 1626 if (!handler.maybeIonCompileable()) { 1627 return true; 1628 } 1629 1630 Register scriptReg = R2.scratchReg(); 1631 Register countReg = R0.scratchReg(); 1632 1633 // Load the ICScript* in scriptReg. 1634 masm.loadPtr(frame.addressOfICScript(), scriptReg); 1635 1636 // Bump warm-up counter. 1637 Address warmUpCounterAddr(scriptReg, ICScript::offsetOfWarmUpCount()); 1638 masm.load32(warmUpCounterAddr, countReg); 1639 masm.add32(Imm32(1), countReg); 1640 masm.store32(countReg, warmUpCounterAddr); 1641 1642 if (!emitTrialInliningCheck(countReg, scriptReg, R1.scratchReg())) { 1643 return false; 1644 } 1645 1646 if (JSOp(*pc) == JSOp::LoopHead) { 1647 // If this is a loop where we can't OSR (for example because it's inside a 1648 // catch or finally block), increment the warmup counter but don't attempt 1649 // OSR (Ion/Warp only compiles the try block). 1650 if (!handler.analysis().info(pc).loopHeadCanOsr) { 1651 return true; 1652 } 1653 } 1654 1655 Label done; 1656 1657 uint32_t warmUpThreshold = OptimizationInfo::warmUpThresholdForPC( 1658 script, pc, handler.baseWarmUpThreshold()); 1659 masm.branch32(Assembler::LessThan, countReg, Imm32(warmUpThreshold), &done); 1660 1661 // Don't trigger Warp compilations from trial-inlined scripts. 1662 Address depthAddr(scriptReg, ICScript::offsetOfDepth()); 1663 masm.branch32(Assembler::NotEqual, depthAddr, Imm32(0), &done); 1664 1665 // Load the IonScript* in scriptReg. We can load this from the ICScript* 1666 // because it must be an outer ICScript embedded in the JitScript. 1667 constexpr int32_t offset = -int32_t(JitScript::offsetOfICScript()) + 1668 int32_t(JitScript::offsetOfIonScript()); 1669 masm.loadPtr(Address(scriptReg, offset), scriptReg); 1670 1671 // Do nothing if Ion is already compiling this script off-thread or if Ion has 1672 // been disabled for this script. 1673 masm.branchTestPtr(Assembler::NonZero, scriptReg, Imm32(SpecialScriptBit), 1674 &done); 1675 1676 // Try to compile and/or finish a compilation. 1677 if (JSOp(*pc) == JSOp::LoopHead) { 1678 // Try to OSR into Ion. 1679 computeFrameSize(R0.scratchReg()); 1680 1681 prepareVMCall(); 1682 1683 pushBytecodePCArg(); 1684 pushArg(R0.scratchReg()); 1685 masm.PushBaselineFramePtr(FramePointer, R0.scratchReg()); 1686 1687 using Fn = bool (*)(JSContext*, BaselineFrame*, uint32_t, jsbytecode*, 1688 IonOsrTempData**); 1689 if (!callVM<Fn, IonCompileScriptForBaselineOSR>()) { 1690 return false; 1691 } 1692 1693 // The return register holds the IonOsrTempData*. Perform OSR if it's not 1694 // nullptr. 1695 static_assert(ReturnReg != OsrFrameReg, 1696 "Code below depends on osrDataReg != OsrFrameReg"); 1697 Register osrDataReg = ReturnReg; 1698 masm.branchTestPtr(Assembler::Zero, osrDataReg, osrDataReg, &done); 1699 1700 // Success! Switch from Baseline JIT code to Ion JIT code. 1701 1702 // At this point, stack looks like: 1703 // 1704 // +-> [...Calling-Frame...] 1705 // | [...Actual-Args/ThisV/ArgCount/Callee...] 1706 // | [Descriptor] 1707 // | [Return-Addr] 1708 // +---[Saved-FramePtr] 1709 // [...Baseline-Frame...] 1710 1711 #ifdef DEBUG 1712 // Get a scratch register that's not osrDataReg or OsrFrameReg. 1713 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All()); 1714 MOZ_ASSERT(!regs.has(FramePointer)); 1715 regs.take(osrDataReg); 1716 regs.take(OsrFrameReg); 1717 1718 Register scratchReg = regs.takeAny(); 1719 1720 // If profiler instrumentation is on, ensure that lastProfilingFrame is 1721 // the frame currently being OSR-ed 1722 { 1723 Label checkOk; 1724 AbsoluteAddress addressOfEnabled( 1725 runtime->geckoProfiler().addressOfEnabled()); 1726 masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &checkOk); 1727 masm.loadPtr(AbsoluteAddress(runtime->addressOfJitActivation()), 1728 scratchReg); 1729 masm.loadPtr( 1730 Address(scratchReg, JitActivation::offsetOfLastProfilingFrame()), 1731 scratchReg); 1732 1733 // It may be the case that we entered the baseline frame with 1734 // profiling turned off on, then in a call within a loop (i.e. a 1735 // callee frame), turn on profiling, then return to this frame, 1736 // and then OSR with profiling turned on. In this case, allow for 1737 // lastProfilingFrame to be null. 1738 masm.branchPtr(Assembler::Equal, scratchReg, ImmWord(0), &checkOk); 1739 1740 masm.branchPtr(Assembler::Equal, FramePointer, scratchReg, &checkOk); 1741 masm.assumeUnreachable("Baseline OSR lastProfilingFrame mismatch."); 1742 masm.bind(&checkOk); 1743 } 1744 #endif 1745 1746 // Restore the stack pointer so that the saved frame pointer is on top of 1747 // the stack. 1748 masm.moveToStackPtr(FramePointer); 1749 1750 // Jump into Ion. 1751 masm.loadPtr(Address(osrDataReg, IonOsrTempData::offsetOfBaselineFrame()), 1752 OsrFrameReg); 1753 masm.jump(Address(osrDataReg, IonOsrTempData::offsetOfJitCode())); 1754 } else { 1755 prepareVMCall(); 1756 1757 masm.PushBaselineFramePtr(FramePointer, R0.scratchReg()); 1758 1759 using Fn = bool (*)(JSContext*, BaselineFrame*); 1760 if (!callVMNonOp<Fn, IonCompileScriptForBaselineAtEntry>()) { 1761 return false; 1762 } 1763 } 1764 1765 masm.bind(&done); 1766 return true; 1767 } 1768 1769 template <> 1770 bool BaselineInterpreterCodeGen::emitWarmUpCounterIncrement() { 1771 // Emit no warm-up counter increments if Baseline is disabled. 1772 if (!JitOptions.baselineJit) { 1773 return true; 1774 } 1775 1776 Register scriptReg = R2.scratchReg(); 1777 Register countReg = R0.scratchReg(); 1778 1779 // Load the JitScript* in scriptReg. 1780 loadScript(scriptReg); 1781 masm.loadJitScript(scriptReg, scriptReg); 1782 1783 // Bump warm-up counter. 1784 Address warmUpCounterAddr(scriptReg, JitScript::offsetOfWarmUpCount()); 1785 masm.load32(warmUpCounterAddr, countReg); 1786 masm.add32(Imm32(1), countReg); 1787 masm.store32(countReg, warmUpCounterAddr); 1788 1789 if (!emitTrialInliningCheck(countReg, scriptReg, R1.scratchReg())) { 1790 return false; 1791 } 1792 1793 if (JitOptions.baselineBatching) { 1794 Register scratch = R1.scratchReg(); 1795 Label done, compileBatch; 1796 Address baselineScriptAddr(scriptReg, JitScript::offsetOfBaselineScript()); 1797 1798 // If the script is not warm enough to compile, we're done. 1799 masm.branch32(Assembler::BelowOrEqual, countReg, 1800 Imm32(JitOptions.baselineJitWarmUpThreshold), &done); 1801 1802 // Decide what to do based on the state of the baseline script field. 1803 Label notSpecial; 1804 masm.loadPtr(baselineScriptAddr, scratch); 1805 masm.branchTestPtr(Assembler::Zero, scratch, Imm32(SpecialScriptBit), 1806 ¬Special); 1807 1808 // The baseline script is a special tagged value: disabled, queued, or 1809 // compiling. If it's queued and the warmup count is high enough, 1810 // trigger a batch compilation with whatever is currently queued. 1811 // Otherwise, we're done. 1812 uint32_t eagerWarmUpThreshold = JitOptions.baselineJitWarmUpThreshold * 2; 1813 masm.branchPtr(Assembler::NotEqual, scratch, 1814 ImmPtr(BaselineQueuedScriptPtr), &done); 1815 1816 masm.branch32(Assembler::Below, countReg, Imm32(eagerWarmUpThreshold), 1817 &done); 1818 1819 masm.jump(&compileBatch); 1820 1821 masm.bind(¬Special); 1822 1823 // If we already have a valid BaselineScript, tier up now. 1824 Label notCompiled; 1825 masm.branchPtr(Assembler::BelowOrEqual, scratch, 1826 ImmPtr(BaselineCompilingScriptPtr), ¬Compiled); 1827 1828 // We just need to update our frame, find the OSR address, and jump to it. 1829 saveInterpreterPCReg(); 1830 1831 prepareVMCall(); 1832 masm.PushBaselineFramePtr(FramePointer, R0.scratchReg()); 1833 1834 using Fn = bool (*)(JSContext*, BaselineFrame*, uint8_t**); 1835 if (!callVMNonOp<Fn, BaselineScript::OSREntryForFrame>()) { 1836 return false; 1837 } 1838 1839 // If we are a debuggee frame, and our baseline script was compiled 1840 // without debug instrumentation, and recompilation failed, we may 1841 // not have an OSR entry available. 1842 masm.branchTestPtr(Assembler::Zero, ReturnReg, ReturnReg, &done); 1843 1844 // Otherwise: OSR! 1845 masm.jump(ReturnReg); 1846 1847 masm.bind(¬Compiled); 1848 1849 // Otherwise, add this script to the queue. 1850 // First, mark the jitscript as queued. 1851 masm.storePtr(ImmPtr(BaselineQueuedScriptPtr), baselineScriptAddr); 1852 1853 Register queueReg = scratch; 1854 masm.loadBaselineCompileQueue(queueReg); 1855 1856 Address numQueuedAddr(queueReg, BaselineCompileQueue::offsetOfNumQueued()); 1857 masm.load32(numQueuedAddr, countReg); 1858 1859 BaseIndex queueSlot(queueReg, countReg, ScalePointer, 1860 BaselineCompileQueue::offsetOfQueue()); 1861 1862 // Store the JSScript in the compilation queue. Note that we don't need 1863 // a prebarrier here because we will always be overwriting a nullptr, 1864 // and we don't need a postbarrier because the script is always tenured. 1865 #ifdef DEBUG 1866 Label queueSlotIsEmpty; 1867 masm.branchPtr(Assembler::Equal, queueSlot, ImmWord(0), &queueSlotIsEmpty); 1868 masm.assumeUnreachable( 1869 "Overwriting non-null slot in baseline compile queue"); 1870 masm.bind(&queueSlotIsEmpty); 1871 #endif 1872 loadScript(scriptReg); 1873 masm.storePtr(scriptReg, queueSlot); 1874 1875 // Update `numQueued`. 1876 masm.add32(Imm32(1), countReg); 1877 masm.store32(countReg, numQueuedAddr); 1878 1879 // If the queue is now full, trigger a batch compilation. 1880 masm.branch32(Assembler::Below, countReg, 1881 Imm32(JitOptions.baselineQueueCapacity), &done); 1882 1883 masm.bind(&compileBatch); 1884 prepareVMCall(); 1885 1886 using Fn2 = bool (*)(JSContext*); 1887 if (!callVMNonOp<Fn2, DispatchOffThreadBaselineBatch>()) { 1888 return false; 1889 } 1890 masm.bind(&done); 1891 return true; 1892 } 1893 1894 // If the script is warm enough for Baseline compilation, call into the VM to 1895 // compile it. 1896 Label done; 1897 masm.branch32(Assembler::BelowOrEqual, countReg, 1898 Imm32(JitOptions.baselineJitWarmUpThreshold), &done); 1899 1900 masm.branchTestPtr(Assembler::NonZero, 1901 Address(scriptReg, JitScript::offsetOfBaselineScript()), 1902 Imm32(SpecialScriptBit), &done); 1903 { 1904 prepareVMCall(); 1905 1906 masm.PushBaselineFramePtr(FramePointer, R0.scratchReg()); 1907 1908 using Fn = bool (*)(JSContext*, BaselineFrame*, uint8_t**); 1909 if (!callVM<Fn, BaselineCompileFromBaselineInterpreter>()) { 1910 return false; 1911 } 1912 1913 // If the function returned nullptr we either skipped compilation or were 1914 // unable to compile the script. Continue running in the interpreter. 1915 masm.branchTestPtr(Assembler::Zero, ReturnReg, ReturnReg, &done); 1916 1917 // Success! Switch from interpreter to JIT code by jumping to the 1918 // corresponding code in the BaselineScript. 1919 // 1920 // This works because BaselineCompiler uses the same frame layout (stack is 1921 // synced at OSR points) and BaselineCompileFromBaselineInterpreter has 1922 // already cleared the RUNNING_IN_INTERPRETER flag for us. 1923 // See BaselineFrame::prepareForBaselineInterpreterToJitOSR. 1924 masm.jump(ReturnReg); 1925 } 1926 1927 masm.bind(&done); 1928 return true; 1929 } 1930 1931 bool BaselineCompiler::emitDebugTrap() { 1932 MOZ_ASSERT(compileDebugInstrumentation()); 1933 MOZ_ASSERT(frame.numUnsyncedSlots() == 0); 1934 1935 JSScript* script = handler.script(); 1936 bool enabled = DebugAPI::stepModeEnabled(script) || 1937 DebugAPI::hasBreakpointsAt(script, handler.pc()); 1938 1939 // Emit patchable call to debug trap handler. 1940 JitCode* handlerCode = 1941 runtime->jitRuntime()->debugTrapHandler(DebugTrapHandlerKind::Compiler); 1942 CodeOffset nativeOffset = masm.toggledCall(handlerCode, enabled); 1943 1944 uint32_t pcOffset = script->pcToOffset(handler.pc()); 1945 if (!debugTrapEntries_.emplaceBack(pcOffset, nativeOffset.offset())) { 1946 return false; 1947 } 1948 1949 // Add a RetAddrEntry for the return offset -> pc mapping. 1950 return handler.recordCallRetAddr(RetAddrEntry::Kind::DebugTrap, 1951 masm.currentOffset()); 1952 } 1953 1954 template <typename Handler> 1955 void BaselineCodeGen<Handler>::emitProfilerEnterFrame() { 1956 // Store stack position to lastProfilingFrame variable, guarded by a toggled 1957 // jump. Starts off initially disabled. 1958 Label noInstrument; 1959 CodeOffset toggleOffset = masm.toggledJump(&noInstrument); 1960 masm.profilerEnterFrame(FramePointer, R0.scratchReg()); 1961 masm.bind(&noInstrument); 1962 1963 // Store the start offset in the appropriate location. 1964 MOZ_ASSERT(!profilerEnterFrameToggleOffset_.bound()); 1965 profilerEnterFrameToggleOffset_ = toggleOffset; 1966 } 1967 1968 template <typename Handler> 1969 void BaselineCodeGen<Handler>::emitProfilerExitFrame() { 1970 // Store previous frame to lastProfilingFrame variable, guarded by a toggled 1971 // jump. Starts off initially disabled. 1972 Label noInstrument; 1973 CodeOffset toggleOffset = masm.toggledJump(&noInstrument); 1974 masm.profilerExitFrame(); 1975 masm.bind(&noInstrument); 1976 1977 // Store the start offset in the appropriate location. 1978 MOZ_ASSERT(!profilerExitFrameToggleOffset_.bound()); 1979 profilerExitFrameToggleOffset_ = toggleOffset; 1980 } 1981 1982 template <typename Handler> 1983 bool BaselineCodeGen<Handler>::emit_Nop() { 1984 return true; 1985 } 1986 1987 template <typename Handler> 1988 bool BaselineCodeGen<Handler>::emit_NopDestructuring() { 1989 return true; 1990 } 1991 1992 template <typename Handler> 1993 bool BaselineCodeGen<Handler>::emit_NopIsAssignOp() { 1994 return true; 1995 } 1996 1997 template <typename Handler> 1998 bool BaselineCodeGen<Handler>::emit_TryDestructuring() { 1999 return true; 2000 } 2001 2002 template <typename Handler> 2003 bool BaselineCodeGen<Handler>::emit_Pop() { 2004 frame.pop(); 2005 return true; 2006 } 2007 2008 template <> 2009 bool BaselineCompilerCodeGen::emit_PopN() { 2010 frame.popn(GET_UINT16(handler.pc())); 2011 return true; 2012 } 2013 2014 template <> 2015 bool BaselineInterpreterCodeGen::emit_PopN() { 2016 LoadUint16Operand(masm, R0.scratchReg()); 2017 frame.popn(R0.scratchReg()); 2018 return true; 2019 } 2020 2021 template <> 2022 bool BaselineCompilerCodeGen::emit_DupAt() { 2023 frame.syncStack(0); 2024 2025 // DupAt takes a value on the stack and re-pushes it on top. It's like 2026 // GetLocal but it addresses from the top of the stack instead of from the 2027 // stack frame. 2028 2029 int depth = -(GET_UINT24(handler.pc()) + 1); 2030 masm.loadValue(frame.addressOfStackValue(depth), R0); 2031 frame.push(R0); 2032 return true; 2033 } 2034 2035 template <> 2036 bool BaselineInterpreterCodeGen::emit_DupAt() { 2037 LoadUint24Operand(masm, 0, R0.scratchReg()); 2038 masm.loadValue(frame.addressOfStackValue(R0.scratchReg()), R0); 2039 frame.push(R0); 2040 return true; 2041 } 2042 2043 template <typename Handler> 2044 bool BaselineCodeGen<Handler>::emit_Dup() { 2045 // Keep top stack value in R0, sync the rest so that we can use R1. We use 2046 // separate registers because every register can be used by at most one 2047 // StackValue. 2048 frame.popRegsAndSync(1); 2049 masm.moveValue(R0, R1); 2050 2051 // inc/dec ops use Dup followed by Inc/Dec. Push R0 last to avoid a move. 2052 frame.push(R1); 2053 frame.push(R0); 2054 return true; 2055 } 2056 2057 template <typename Handler> 2058 bool BaselineCodeGen<Handler>::emit_Dup2() { 2059 frame.syncStack(0); 2060 2061 masm.loadValue(frame.addressOfStackValue(-2), R0); 2062 masm.loadValue(frame.addressOfStackValue(-1), R1); 2063 2064 frame.push(R0); 2065 frame.push(R1); 2066 return true; 2067 } 2068 2069 template <typename Handler> 2070 bool BaselineCodeGen<Handler>::emit_Swap() { 2071 // Keep top stack values in R0 and R1. 2072 frame.popRegsAndSync(2); 2073 2074 frame.push(R1); 2075 frame.push(R0); 2076 return true; 2077 } 2078 2079 template <> 2080 bool BaselineCompilerCodeGen::emit_Pick() { 2081 frame.syncStack(0); 2082 2083 // Pick takes a value on the stack and moves it to the top. 2084 // For instance, pick 2: 2085 // before: A B C D E 2086 // after : A B D E C 2087 2088 // First, move value at -(amount + 1) into R0. 2089 int32_t depth = -(GET_INT8(handler.pc()) + 1); 2090 masm.loadValue(frame.addressOfStackValue(depth), R0); 2091 2092 // Move the other values down. 2093 depth++; 2094 for (; depth < 0; depth++) { 2095 Address source = frame.addressOfStackValue(depth); 2096 Address dest = frame.addressOfStackValue(depth - 1); 2097 masm.loadValue(source, R1); 2098 masm.storeValue(R1, dest); 2099 } 2100 2101 // Push R0. 2102 frame.pop(); 2103 frame.push(R0); 2104 return true; 2105 } 2106 2107 template <> 2108 bool BaselineInterpreterCodeGen::emit_Pick() { 2109 // First, move the value to move up into R0. 2110 Register scratch = R2.scratchReg(); 2111 LoadUint8Operand(masm, scratch); 2112 masm.loadValue(frame.addressOfStackValue(scratch), R0); 2113 2114 // Move the other values down. 2115 Label top, done; 2116 masm.bind(&top); 2117 masm.branchSub32(Assembler::Signed, Imm32(1), scratch, &done); 2118 { 2119 masm.loadValue(frame.addressOfStackValue(scratch), R1); 2120 masm.storeValue(R1, frame.addressOfStackValue(scratch, sizeof(Value))); 2121 masm.jump(&top); 2122 } 2123 2124 masm.bind(&done); 2125 2126 // Replace value on top of the stack with R0. 2127 masm.storeValue(R0, frame.addressOfStackValue(-1)); 2128 return true; 2129 } 2130 2131 template <> 2132 bool BaselineCompilerCodeGen::emit_Unpick() { 2133 frame.syncStack(0); 2134 2135 // Pick takes the top of the stack value and moves it under the nth value. 2136 // For instance, unpick 2: 2137 // before: A B C D E 2138 // after : A B E C D 2139 2140 // First, move value at -1 into R0. 2141 masm.loadValue(frame.addressOfStackValue(-1), R0); 2142 2143 MOZ_ASSERT(GET_INT8(handler.pc()) > 0, 2144 "Interpreter code assumes JSOp::Unpick operand > 0"); 2145 2146 // Move the other values up. 2147 int32_t depth = -(GET_INT8(handler.pc()) + 1); 2148 for (int32_t i = -1; i > depth; i--) { 2149 Address source = frame.addressOfStackValue(i - 1); 2150 Address dest = frame.addressOfStackValue(i); 2151 masm.loadValue(source, R1); 2152 masm.storeValue(R1, dest); 2153 } 2154 2155 // Store R0 under the nth value. 2156 Address dest = frame.addressOfStackValue(depth); 2157 masm.storeValue(R0, dest); 2158 return true; 2159 } 2160 2161 template <> 2162 bool BaselineInterpreterCodeGen::emit_Unpick() { 2163 Register scratch = R2.scratchReg(); 2164 LoadUint8Operand(masm, scratch); 2165 2166 // Move the top value into R0. 2167 masm.loadValue(frame.addressOfStackValue(-1), R0); 2168 2169 // Overwrite the nth stack value with R0 but first save the old value in R1. 2170 masm.loadValue(frame.addressOfStackValue(scratch), R1); 2171 masm.storeValue(R0, frame.addressOfStackValue(scratch)); 2172 2173 // Now for each slot x in [n-1, 1] do the following: 2174 // 2175 // * Store the value in slot x in R0. 2176 // * Store the value in the previous slot (now in R1) in slot x. 2177 // * Move R0 to R1. 2178 2179 #ifdef DEBUG 2180 // Assert the operand > 0 so the branchSub32 below doesn't "underflow" to 2181 // negative values. 2182 { 2183 Label ok; 2184 masm.branch32(Assembler::GreaterThan, scratch, Imm32(0), &ok); 2185 masm.assumeUnreachable("JSOp::Unpick with operand <= 0?"); 2186 masm.bind(&ok); 2187 } 2188 #endif 2189 2190 Label top, done; 2191 masm.bind(&top); 2192 masm.branchSub32(Assembler::Zero, Imm32(1), scratch, &done); 2193 { 2194 // Overwrite stack slot x with slot x + 1, saving the old value in R1. 2195 masm.loadValue(frame.addressOfStackValue(scratch), R0); 2196 masm.storeValue(R1, frame.addressOfStackValue(scratch)); 2197 masm.moveValue(R0, R1); 2198 masm.jump(&top); 2199 } 2200 2201 // Finally, replace the value on top of the stack (slot 0) with R1. This is 2202 // the value that used to be in slot 1. 2203 masm.bind(&done); 2204 masm.storeValue(R1, frame.addressOfStackValue(-1)); 2205 return true; 2206 } 2207 2208 template <> 2209 void BaselineCompilerCodeGen::emitJump() { 2210 jsbytecode* pc = handler.pc(); 2211 MOZ_ASSERT(IsJumpOpcode(JSOp(*pc))); 2212 frame.assertSyncedStack(); 2213 2214 jsbytecode* target = pc + GET_JUMP_OFFSET(pc); 2215 masm.jump(handler.labelOf(target)); 2216 } 2217 2218 template <> 2219 void BaselineInterpreterCodeGen::emitJump() { 2220 // We have to add the current pc's jump offset to the current pc. We can use 2221 // R0 and R1 as scratch because we jump to the "next op" label so these 2222 // registers aren't in use at this point. 2223 Register scratch1 = R0.scratchReg(); 2224 Register scratch2 = R1.scratchReg(); 2225 Register pc = LoadBytecodePC(masm, scratch1); 2226 LoadInt32OperandSignExtendToPtr(masm, pc, scratch2); 2227 if (HasInterpreterPCReg()) { 2228 masm.addPtr(scratch2, InterpreterPCReg); 2229 } else { 2230 masm.addPtr(pc, scratch2); 2231 masm.storePtr(scratch2, frame.addressOfInterpreterPC()); 2232 } 2233 masm.jump(handler.interpretOpWithPCRegLabel()); 2234 } 2235 2236 template <> 2237 void BaselineCompilerCodeGen::emitTestBooleanTruthy(bool branchIfTrue, 2238 ValueOperand val) { 2239 jsbytecode* pc = handler.pc(); 2240 MOZ_ASSERT(IsJumpOpcode(JSOp(*pc))); 2241 frame.assertSyncedStack(); 2242 2243 jsbytecode* target = pc + GET_JUMP_OFFSET(pc); 2244 masm.branchTestBooleanTruthy(branchIfTrue, val, handler.labelOf(target)); 2245 } 2246 2247 template <> 2248 void BaselineInterpreterCodeGen::emitTestBooleanTruthy(bool branchIfTrue, 2249 ValueOperand val) { 2250 Label done; 2251 masm.branchTestBooleanTruthy(!branchIfTrue, val, &done); 2252 emitJump(); 2253 masm.bind(&done); 2254 } 2255 2256 template <> 2257 template <typename F1, typename F2> 2258 [[nodiscard]] bool BaselineCompilerCodeGen::emitTestScriptFlag( 2259 JSScript::ImmutableFlags flag, const F1& ifSet, const F2& ifNotSet, 2260 Register scratch) { 2261 if (handler.script()->hasFlag(flag)) { 2262 return ifSet(); 2263 } 2264 return ifNotSet(); 2265 } 2266 2267 template <> 2268 template <typename F1, typename F2> 2269 [[nodiscard]] bool BaselineInterpreterCodeGen::emitTestScriptFlag( 2270 JSScript::ImmutableFlags flag, const F1& ifSet, const F2& ifNotSet, 2271 Register scratch) { 2272 Label flagNotSet, done; 2273 loadScript(scratch); 2274 masm.branchTest32(Assembler::Zero, 2275 Address(scratch, JSScript::offsetOfImmutableFlags()), 2276 Imm32(uint32_t(flag)), &flagNotSet); 2277 { 2278 if (!ifSet()) { 2279 return false; 2280 } 2281 masm.jump(&done); 2282 } 2283 masm.bind(&flagNotSet); 2284 { 2285 if (!ifNotSet()) { 2286 return false; 2287 } 2288 } 2289 2290 masm.bind(&done); 2291 return true; 2292 } 2293 2294 template <> 2295 template <typename F> 2296 [[nodiscard]] bool BaselineCompilerCodeGen::emitTestScriptFlag( 2297 JSScript::ImmutableFlags flag, bool value, const F& emit, 2298 Register scratch) { 2299 if (handler.script()->hasFlag(flag) == value) { 2300 return emit(); 2301 } 2302 return true; 2303 } 2304 2305 template <> 2306 template <typename F> 2307 [[nodiscard]] bool BaselineCompilerCodeGen::emitTestScriptFlag( 2308 JSScript::MutableFlags flag, bool value, const F& emit, Register scratch) { 2309 if (handler.script()->hasFlag(flag) == value) { 2310 return emit(); 2311 } 2312 return true; 2313 } 2314 2315 template <> 2316 template <typename F> 2317 [[nodiscard]] bool BaselineInterpreterCodeGen::emitTestScriptFlag( 2318 JSScript::ImmutableFlags flag, bool value, const F& emit, 2319 Register scratch) { 2320 Label done; 2321 loadScript(scratch); 2322 masm.branchTest32(value ? Assembler::Zero : Assembler::NonZero, 2323 Address(scratch, JSScript::offsetOfImmutableFlags()), 2324 Imm32(uint32_t(flag)), &done); 2325 { 2326 if (!emit()) { 2327 return false; 2328 } 2329 } 2330 2331 masm.bind(&done); 2332 return true; 2333 } 2334 2335 template <> 2336 template <typename F> 2337 [[nodiscard]] bool BaselineInterpreterCodeGen::emitTestScriptFlag( 2338 JSScript::MutableFlags flag, bool value, const F& emit, Register scratch) { 2339 Label done; 2340 loadScript(scratch); 2341 masm.branchTest32(value ? Assembler::Zero : Assembler::NonZero, 2342 Address(scratch, JSScript::offsetOfMutableFlags()), 2343 Imm32(uint32_t(flag)), &done); 2344 { 2345 if (!emit()) { 2346 return false; 2347 } 2348 } 2349 2350 masm.bind(&done); 2351 return true; 2352 } 2353 2354 template <typename Handler> 2355 bool BaselineCodeGen<Handler>::emit_Goto() { 2356 frame.syncStack(0); 2357 emitJump(); 2358 return true; 2359 } 2360 2361 template <typename Handler> 2362 bool BaselineCodeGen<Handler>::emitTest(bool branchIfTrue) { 2363 bool knownBoolean = frame.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN); 2364 2365 // Keep top stack value in R0. 2366 frame.popRegsAndSync(1); 2367 2368 if (!knownBoolean && !emitNextIC()) { 2369 return false; 2370 } 2371 2372 // IC will leave a BooleanValue in R0, just need to branch on it. 2373 emitTestBooleanTruthy(branchIfTrue, R0); 2374 return true; 2375 } 2376 2377 template <typename Handler> 2378 bool BaselineCodeGen<Handler>::emit_JumpIfFalse() { 2379 return emitTest(false); 2380 } 2381 2382 template <typename Handler> 2383 bool BaselineCodeGen<Handler>::emit_JumpIfTrue() { 2384 return emitTest(true); 2385 } 2386 2387 template <typename Handler> 2388 bool BaselineCodeGen<Handler>::emitAndOr(bool branchIfTrue) { 2389 bool knownBoolean = frame.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN); 2390 2391 // And and Or leave the original value on the stack. 2392 frame.syncStack(0); 2393 2394 masm.loadValue(frame.addressOfStackValue(-1), R0); 2395 if (!knownBoolean && !emitNextIC()) { 2396 return false; 2397 } 2398 2399 emitTestBooleanTruthy(branchIfTrue, R0); 2400 return true; 2401 } 2402 2403 template <typename Handler> 2404 bool BaselineCodeGen<Handler>::emit_And() { 2405 return emitAndOr(false); 2406 } 2407 2408 template <typename Handler> 2409 bool BaselineCodeGen<Handler>::emit_Or() { 2410 return emitAndOr(true); 2411 } 2412 2413 template <typename Handler> 2414 bool BaselineCodeGen<Handler>::emit_Coalesce() { 2415 // Coalesce leaves the original value on the stack. 2416 frame.syncStack(0); 2417 2418 masm.loadValue(frame.addressOfStackValue(-1), R0); 2419 2420 Label undefinedOrNull; 2421 2422 masm.branchTestUndefined(Assembler::Equal, R0, &undefinedOrNull); 2423 masm.branchTestNull(Assembler::Equal, R0, &undefinedOrNull); 2424 emitJump(); 2425 2426 masm.bind(&undefinedOrNull); 2427 // fall through 2428 return true; 2429 } 2430 2431 template <typename Handler> 2432 bool BaselineCodeGen<Handler>::emit_Not() { 2433 bool knownBoolean = frame.stackValueHasKnownType(-1, JSVAL_TYPE_BOOLEAN); 2434 2435 // Keep top stack value in R0. 2436 frame.popRegsAndSync(1); 2437 2438 if (!knownBoolean && !emitNextIC()) { 2439 return false; 2440 } 2441 2442 masm.notBoolean(R0); 2443 2444 frame.push(R0, JSVAL_TYPE_BOOLEAN); 2445 return true; 2446 } 2447 2448 template <typename Handler> 2449 bool BaselineCodeGen<Handler>::emit_Pos() { 2450 return emitUnaryArith(); 2451 } 2452 2453 template <typename Handler> 2454 bool BaselineCodeGen<Handler>::emit_ToNumeric() { 2455 return emitUnaryArith(); 2456 } 2457 2458 template <typename Handler> 2459 bool BaselineCodeGen<Handler>::emit_LoopHead() { 2460 if (!emit_JumpTarget()) { 2461 return false; 2462 } 2463 if (!emitInterruptCheck()) { 2464 return false; 2465 } 2466 if (!emitWarmUpCounterIncrement()) { 2467 return false; 2468 } 2469 return true; 2470 } 2471 2472 template <typename Handler> 2473 bool BaselineCodeGen<Handler>::emit_Void() { 2474 frame.pop(); 2475 frame.push(UndefinedValue()); 2476 return true; 2477 } 2478 2479 template <typename Handler> 2480 bool BaselineCodeGen<Handler>::emit_Undefined() { 2481 frame.push(UndefinedValue()); 2482 return true; 2483 } 2484 2485 template <typename Handler> 2486 bool BaselineCodeGen<Handler>::emit_Hole() { 2487 frame.push(MagicValue(JS_ELEMENTS_HOLE)); 2488 return true; 2489 } 2490 2491 template <typename Handler> 2492 bool BaselineCodeGen<Handler>::emit_Null() { 2493 frame.push(NullValue()); 2494 return true; 2495 } 2496 2497 template <typename Handler> 2498 bool BaselineCodeGen<Handler>::emit_CheckIsObj() { 2499 frame.syncStack(0); 2500 masm.loadValue(frame.addressOfStackValue(-1), R0); 2501 2502 Label ok; 2503 masm.branchTestObject(Assembler::Equal, R0, &ok); 2504 2505 prepareVMCall(); 2506 2507 pushUint8BytecodeOperandArg(R0.scratchReg()); 2508 2509 using Fn = bool (*)(JSContext*, CheckIsObjectKind); 2510 if (!callVM<Fn, ThrowCheckIsObject>()) { 2511 return false; 2512 } 2513 2514 masm.bind(&ok); 2515 return true; 2516 } 2517 2518 template <typename Handler> 2519 bool BaselineCodeGen<Handler>::emit_CheckThis() { 2520 frame.syncStack(0); 2521 masm.loadValue(frame.addressOfStackValue(-1), R0); 2522 2523 return emitCheckThis(R0); 2524 } 2525 2526 template <typename Handler> 2527 bool BaselineCodeGen<Handler>::emit_CheckThisReinit() { 2528 frame.syncStack(0); 2529 masm.loadValue(frame.addressOfStackValue(-1), R0); 2530 2531 return emitCheckThis(R0, /* reinit = */ true); 2532 } 2533 2534 template <typename Handler> 2535 bool BaselineCodeGen<Handler>::emitCheckThis(ValueOperand val, bool reinit) { 2536 Label thisOK; 2537 if (reinit) { 2538 masm.branchTestMagic(Assembler::Equal, val, &thisOK); 2539 } else { 2540 masm.branchTestMagic(Assembler::NotEqual, val, &thisOK); 2541 } 2542 2543 prepareVMCall(); 2544 2545 if (reinit) { 2546 using Fn = bool (*)(JSContext*); 2547 if (!callVM<Fn, ThrowInitializedThis>()) { 2548 return false; 2549 } 2550 } else { 2551 using Fn = bool (*)(JSContext*); 2552 if (!callVM<Fn, ThrowUninitializedThis>()) { 2553 return false; 2554 } 2555 } 2556 2557 masm.bind(&thisOK); 2558 return true; 2559 } 2560 2561 template <typename Handler> 2562 bool BaselineCodeGen<Handler>::emit_CheckReturn() { 2563 MOZ_ASSERT_IF(handler.maybeScript(), 2564 handler.maybeScript()->isDerivedClassConstructor()); 2565 2566 // Load |this| in R0, return value in R1. 2567 frame.popRegsAndSync(1); 2568 emitLoadReturnValue(R1); 2569 2570 Label done, returnBad, checkThis; 2571 masm.branchTestObject(Assembler::NotEqual, R1, &checkThis); 2572 { 2573 masm.moveValue(R1, R0); 2574 masm.jump(&done); 2575 } 2576 masm.bind(&checkThis); 2577 masm.branchTestUndefined(Assembler::NotEqual, R1, &returnBad); 2578 masm.branchTestMagic(Assembler::NotEqual, R0, &done); 2579 masm.bind(&returnBad); 2580 2581 prepareVMCall(); 2582 pushArg(R1); 2583 2584 using Fn = bool (*)(JSContext*, HandleValue); 2585 if (!callVM<Fn, ThrowBadDerivedReturnOrUninitializedThis>()) { 2586 return false; 2587 } 2588 masm.assumeUnreachable("Should throw on bad derived constructor return"); 2589 2590 masm.bind(&done); 2591 2592 // Push |rval| or |this| onto the stack. 2593 frame.push(R0); 2594 return true; 2595 } 2596 2597 template <typename Handler> 2598 bool BaselineCodeGen<Handler>::emit_FunctionThis() { 2599 frame.pushThis(); 2600 2601 auto boxThis = [this]() { 2602 // Load |thisv| in R0. Skip the call if it's already an object. 2603 Label skipCall; 2604 frame.popRegsAndSync(1); 2605 masm.branchTestObject(Assembler::Equal, R0, &skipCall); 2606 2607 prepareVMCall(); 2608 masm.loadBaselineFramePtr(FramePointer, R1.scratchReg()); 2609 2610 pushArg(R1.scratchReg()); 2611 2612 using Fn = bool (*)(JSContext*, BaselineFrame*, MutableHandleValue); 2613 if (!callVM<Fn, BaselineGetFunctionThis>()) { 2614 return false; 2615 } 2616 2617 masm.bind(&skipCall); 2618 frame.push(R0); 2619 return true; 2620 }; 2621 2622 // In strict mode code, |this| is left alone. 2623 return emitTestScriptFlag(JSScript::ImmutableFlags::Strict, false, boxThis, 2624 R2.scratchReg()); 2625 } 2626 2627 template <typename Handler> 2628 bool BaselineCodeGen<Handler>::emit_GlobalThis() { 2629 frame.syncStack(0); 2630 2631 loadGlobalThisValue(R0); 2632 frame.push(R0); 2633 return true; 2634 } 2635 2636 template <typename Handler> 2637 bool BaselineCodeGen<Handler>::emit_NonSyntacticGlobalThis() { 2638 frame.syncStack(0); 2639 2640 prepareVMCall(); 2641 2642 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg()); 2643 pushArg(R0.scratchReg()); 2644 2645 using Fn = void (*)(JSContext*, HandleObject, MutableHandleValue); 2646 if (!callVM<Fn, GetNonSyntacticGlobalThis>()) { 2647 return false; 2648 } 2649 2650 frame.push(R0); 2651 return true; 2652 } 2653 2654 template <typename Handler> 2655 bool BaselineCodeGen<Handler>::emit_True() { 2656 frame.push(BooleanValue(true)); 2657 return true; 2658 } 2659 2660 template <typename Handler> 2661 bool BaselineCodeGen<Handler>::emit_False() { 2662 frame.push(BooleanValue(false)); 2663 return true; 2664 } 2665 2666 template <typename Handler> 2667 bool BaselineCodeGen<Handler>::emit_Zero() { 2668 frame.push(Int32Value(0)); 2669 return true; 2670 } 2671 2672 template <typename Handler> 2673 bool BaselineCodeGen<Handler>::emit_One() { 2674 frame.push(Int32Value(1)); 2675 return true; 2676 } 2677 2678 template <> 2679 bool BaselineCompilerCodeGen::emit_Int8() { 2680 frame.push(Int32Value(GET_INT8(handler.pc()))); 2681 return true; 2682 } 2683 2684 template <> 2685 bool BaselineInterpreterCodeGen::emit_Int8() { 2686 LoadInt8Operand(masm, R0.scratchReg()); 2687 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0); 2688 frame.push(R0); 2689 return true; 2690 } 2691 2692 template <> 2693 bool BaselineCompilerCodeGen::emit_Int32() { 2694 frame.push(Int32Value(GET_INT32(handler.pc()))); 2695 return true; 2696 } 2697 2698 template <> 2699 bool BaselineInterpreterCodeGen::emit_Int32() { 2700 LoadInt32Operand(masm, R0.scratchReg()); 2701 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0); 2702 frame.push(R0); 2703 return true; 2704 } 2705 2706 template <> 2707 bool BaselineCompilerCodeGen::emit_Uint16() { 2708 frame.push(Int32Value(GET_UINT16(handler.pc()))); 2709 return true; 2710 } 2711 2712 template <> 2713 bool BaselineInterpreterCodeGen::emit_Uint16() { 2714 LoadUint16Operand(masm, R0.scratchReg()); 2715 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0); 2716 frame.push(R0); 2717 return true; 2718 } 2719 2720 template <> 2721 bool BaselineCompilerCodeGen::emit_Uint24() { 2722 frame.push(Int32Value(GET_UINT24(handler.pc()))); 2723 return true; 2724 } 2725 2726 template <> 2727 bool BaselineInterpreterCodeGen::emit_Uint24() { 2728 LoadUint24Operand(masm, 0, R0.scratchReg()); 2729 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0); 2730 frame.push(R0); 2731 return true; 2732 } 2733 2734 template <> 2735 bool BaselineCompilerCodeGen::emit_Double() { 2736 frame.push(GET_INLINE_VALUE(handler.pc())); 2737 return true; 2738 } 2739 2740 template <> 2741 bool BaselineInterpreterCodeGen::emit_Double() { 2742 LoadInlineValueOperand(masm, R0); 2743 frame.push(R0); 2744 return true; 2745 } 2746 2747 template <typename Handler> 2748 bool BaselineCodeGen<Handler>::emit_BigInt() { 2749 if (handler.realmIndependentJitcode()) { 2750 frame.syncStack(0); 2751 Register scratch1 = R0.scratchReg(); 2752 Register scratch2 = R1.scratchReg(); 2753 loadScriptGCThing(ScriptGCThingType::BigInt, scratch1, scratch2); 2754 masm.tagValue(JSVAL_TYPE_BIGINT, scratch1, R0); 2755 frame.push(R0); 2756 } else { 2757 BigInt* bi = handler.maybeScript()->getBigInt(handler.maybePC()); 2758 frame.push(BigIntValue(bi)); 2759 } 2760 return true; 2761 } 2762 2763 template <typename Handler> 2764 bool BaselineCodeGen<Handler>::emit_String() { 2765 if (handler.realmIndependentJitcode()) { 2766 frame.syncStack(0); 2767 Register scratch1 = R0.scratchReg(); 2768 Register scratch2 = R1.scratchReg(); 2769 loadScriptGCThing(ScriptGCThingType::String, scratch1, scratch2); 2770 masm.tagValue(JSVAL_TYPE_STRING, scratch1, R0); 2771 frame.push(R0); 2772 } else { 2773 frame.push( 2774 StringValue(handler.maybeScript()->getString(handler.maybePC()))); 2775 } 2776 return true; 2777 } 2778 2779 template <> 2780 bool BaselineCompilerCodeGen::emit_Symbol() { 2781 unsigned which = GET_UINT8(handler.pc()); 2782 JS::Symbol* sym = runtime->wellKnownSymbols().get(which); 2783 frame.push(SymbolValue(sym)); 2784 return true; 2785 } 2786 2787 template <> 2788 bool BaselineInterpreterCodeGen::emit_Symbol() { 2789 Register scratch1 = R0.scratchReg(); 2790 Register scratch2 = R1.scratchReg(); 2791 LoadUint8Operand(masm, scratch1); 2792 2793 masm.movePtr(ImmPtr(&runtime->wellKnownSymbols()), scratch2); 2794 masm.loadPtr(BaseIndex(scratch2, scratch1, ScalePointer), scratch1); 2795 2796 masm.tagValue(JSVAL_TYPE_SYMBOL, scratch1, R0); 2797 frame.push(R0); 2798 return true; 2799 } 2800 2801 template <typename Handler> 2802 bool BaselineCodeGen<Handler>::emit_Object() { 2803 if (handler.realmIndependentJitcode()) { 2804 Register scratch1 = R0.scratchReg(); 2805 Register scratch2 = R1.scratchReg(); 2806 loadScriptGCThing(ScriptGCThingType::Object, scratch1, scratch2); 2807 masm.tagValue(JSVAL_TYPE_OBJECT, scratch1, R0); 2808 frame.push(R0); 2809 } else { 2810 frame.push( 2811 ObjectValue(*handler.maybeScript()->getObject(handler.maybePC()))); 2812 } 2813 return true; 2814 } 2815 2816 template <typename Handler> 2817 bool BaselineCodeGen<Handler>::emit_CallSiteObj() { 2818 return emit_Object(); 2819 } 2820 2821 template <typename Handler> 2822 bool BaselineCodeGen<Handler>::emit_RegExp() { 2823 prepareVMCall(); 2824 pushScriptGCThingArg(ScriptGCThingType::RegExp, R0.scratchReg(), 2825 R1.scratchReg()); 2826 2827 using Fn = JSObject* (*)(JSContext*, Handle<RegExpObject*>); 2828 if (!callVM<Fn, CloneRegExpObject>()) { 2829 return false; 2830 } 2831 2832 // Box and push return value. 2833 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0); 2834 frame.push(R0); 2835 return true; 2836 } 2837 2838 template <typename Handler> 2839 bool BaselineCodeGen<Handler>::emit_Lambda() { 2840 frame.syncStack(0); 2841 2842 if (!emitNextIC()) { 2843 return false; 2844 } 2845 2846 frame.push(R0); 2847 return true; 2848 } 2849 2850 template <typename Handler> 2851 bool BaselineCodeGen<Handler>::emit_SetFunName() { 2852 frame.popRegsAndSync(2); 2853 2854 frame.push(R0); 2855 frame.syncStack(0); 2856 2857 masm.unboxObject(R0, R0.scratchReg()); 2858 2859 prepareVMCall(); 2860 2861 pushUint8BytecodeOperandArg(R2.scratchReg()); 2862 pushArg(R1); 2863 pushArg(R0.scratchReg()); 2864 2865 using Fn = 2866 bool (*)(JSContext*, HandleFunction, HandleValue, FunctionPrefixKind); 2867 return callVM<Fn, SetFunctionName>(); 2868 } 2869 2870 template <typename Handler> 2871 bool BaselineCodeGen<Handler>::emit_BitOr() { 2872 return emitBinaryArith(); 2873 } 2874 2875 template <typename Handler> 2876 bool BaselineCodeGen<Handler>::emit_BitXor() { 2877 return emitBinaryArith(); 2878 } 2879 2880 template <typename Handler> 2881 bool BaselineCodeGen<Handler>::emit_BitAnd() { 2882 return emitBinaryArith(); 2883 } 2884 2885 template <typename Handler> 2886 bool BaselineCodeGen<Handler>::emit_Lsh() { 2887 return emitBinaryArith(); 2888 } 2889 2890 template <typename Handler> 2891 bool BaselineCodeGen<Handler>::emit_Rsh() { 2892 return emitBinaryArith(); 2893 } 2894 2895 template <typename Handler> 2896 bool BaselineCodeGen<Handler>::emit_Ursh() { 2897 return emitBinaryArith(); 2898 } 2899 2900 template <typename Handler> 2901 bool BaselineCodeGen<Handler>::emit_Add() { 2902 return emitBinaryArith(); 2903 } 2904 2905 template <typename Handler> 2906 bool BaselineCodeGen<Handler>::emit_Sub() { 2907 return emitBinaryArith(); 2908 } 2909 2910 template <typename Handler> 2911 bool BaselineCodeGen<Handler>::emit_Mul() { 2912 return emitBinaryArith(); 2913 } 2914 2915 template <typename Handler> 2916 bool BaselineCodeGen<Handler>::emit_Div() { 2917 return emitBinaryArith(); 2918 } 2919 2920 template <typename Handler> 2921 bool BaselineCodeGen<Handler>::emit_Mod() { 2922 return emitBinaryArith(); 2923 } 2924 2925 template <typename Handler> 2926 bool BaselineCodeGen<Handler>::emit_Pow() { 2927 return emitBinaryArith(); 2928 } 2929 2930 template <typename Handler> 2931 bool BaselineCodeGen<Handler>::emitBinaryArith() { 2932 // Keep top JSStack value in R0 and R2 2933 frame.popRegsAndSync(2); 2934 2935 // Call IC 2936 if (!emitNextIC()) { 2937 return false; 2938 } 2939 2940 // Mark R0 as pushed stack value. 2941 frame.push(R0); 2942 return true; 2943 } 2944 2945 template <typename Handler> 2946 bool BaselineCodeGen<Handler>::emitUnaryArith() { 2947 // Keep top stack value in R0. 2948 frame.popRegsAndSync(1); 2949 2950 // Call IC 2951 if (!emitNextIC()) { 2952 return false; 2953 } 2954 2955 // Mark R0 as pushed stack value. 2956 frame.push(R0); 2957 return true; 2958 } 2959 2960 template <typename Handler> 2961 bool BaselineCodeGen<Handler>::emit_BitNot() { 2962 return emitUnaryArith(); 2963 } 2964 2965 template <typename Handler> 2966 bool BaselineCodeGen<Handler>::emit_Neg() { 2967 return emitUnaryArith(); 2968 } 2969 2970 template <typename Handler> 2971 bool BaselineCodeGen<Handler>::emit_Inc() { 2972 return emitUnaryArith(); 2973 } 2974 2975 template <typename Handler> 2976 bool BaselineCodeGen<Handler>::emit_Dec() { 2977 return emitUnaryArith(); 2978 } 2979 2980 template <typename Handler> 2981 bool BaselineCodeGen<Handler>::emit_Lt() { 2982 return emitCompare(); 2983 } 2984 2985 template <typename Handler> 2986 bool BaselineCodeGen<Handler>::emit_Le() { 2987 return emitCompare(); 2988 } 2989 2990 template <typename Handler> 2991 bool BaselineCodeGen<Handler>::emit_Gt() { 2992 return emitCompare(); 2993 } 2994 2995 template <typename Handler> 2996 bool BaselineCodeGen<Handler>::emit_Ge() { 2997 return emitCompare(); 2998 } 2999 3000 template <typename Handler> 3001 bool BaselineCodeGen<Handler>::emit_Eq() { 3002 return emitCompare(); 3003 } 3004 3005 template <typename Handler> 3006 bool BaselineCodeGen<Handler>::emit_Ne() { 3007 return emitCompare(); 3008 } 3009 3010 template <typename Handler> 3011 bool BaselineCodeGen<Handler>::emitCompare() { 3012 // Keep top JSStack value in R0 and R1. 3013 frame.popRegsAndSync(2); 3014 3015 // Call IC. 3016 if (!emitNextIC()) { 3017 return false; 3018 } 3019 3020 // Mark R0 as pushed stack value. 3021 frame.push(R0, JSVAL_TYPE_BOOLEAN); 3022 return true; 3023 } 3024 3025 template <typename Handler> 3026 bool BaselineCodeGen<Handler>::emit_StrictEq() { 3027 return emitCompare(); 3028 } 3029 3030 template <typename Handler> 3031 bool BaselineCodeGen<Handler>::emit_StrictNe() { 3032 return emitCompare(); 3033 } 3034 3035 template <> 3036 bool BaselineCompilerCodeGen::emitConstantStrictEq(JSOp op) { 3037 ConstantCompareOperand data = 3038 ConstantCompareOperand::fromRawValue(GET_UINT16(handler.pc())); 3039 3040 frame.popRegsAndSync(1); 3041 3042 ValueOperand value = R0; 3043 Label pass, done; 3044 3045 switch (data.type()) { 3046 case ConstantCompareOperand::EncodedType::Int32: { 3047 int32_t constantVal = data.toInt32(); 3048 3049 Label fail; 3050 masm.branchTestValue(Assembler::Equal, value, Int32Value(constantVal), 3051 op == JSOp::StrictEq ? &pass : &fail); 3052 if (constantVal != 0) { 3053 masm.branchTestValue(JSOpToCondition(op, false), value, 3054 DoubleValue(constantVal), &pass); 3055 } else { 3056 masm.branchTestValue(Assembler::Equal, value, DoubleValue(0.0), 3057 op == JSOp::StrictEq ? &pass : &fail); 3058 masm.branchTestValue(JSOpToCondition(op, false), value, 3059 DoubleValue(-0.0), &pass); 3060 } 3061 masm.bind(&fail); 3062 break; 3063 } 3064 3065 case ConstantCompareOperand::EncodedType::Boolean: { 3066 bool constantVal = data.toBoolean(); 3067 3068 masm.branchTestValue(JSOpToCondition(op, false), value, 3069 BooleanValue(constantVal), &pass); 3070 break; 3071 } 3072 3073 case ConstantCompareOperand::EncodedType::Null: { 3074 masm.branchTestNull(JSOpToCondition(op, false), value, &pass); 3075 break; 3076 } 3077 3078 case ConstantCompareOperand::EncodedType::Undefined: { 3079 masm.branchTestUndefined(JSOpToCondition(op, false), value, &pass); 3080 break; 3081 } 3082 } 3083 3084 { 3085 masm.moveValue(BooleanValue(false), R0); 3086 masm.jump(&done); 3087 } 3088 3089 masm.bind(&pass); 3090 { 3091 masm.moveValue(BooleanValue(true), R0); 3092 } 3093 3094 masm.bind(&done); 3095 frame.push(R0, JSVAL_TYPE_BOOLEAN); 3096 return true; 3097 } 3098 3099 template <> 3100 bool BaselineCompilerCodeGen::emit_StrictConstantEq() { 3101 return emitConstantStrictEq(JSOp::StrictEq); 3102 } 3103 3104 template <> 3105 bool BaselineCompilerCodeGen::emit_StrictConstantNe() { 3106 return emitConstantStrictEq(JSOp::StrictNe); 3107 } 3108 3109 template <> 3110 bool BaselineInterpreterCodeGen::emitConstantStrictEq(JSOp op) { 3111 frame.popRegsAndSync(1); 3112 3113 ValueOperand value = R0; 3114 3115 #if defined(JS_NUNBOX32) 3116 Register constantType = R1.typeReg(); 3117 Register payload = R1.payloadReg(); 3118 #else 3119 Register constantType = R1.scratchReg(); 3120 Register payload = R2.scratchReg(); 3121 #endif 3122 3123 LoadConstantCompareOperand(masm, constantType, payload); 3124 3125 Label pass, fail, done; 3126 3127 // Int32 constants need to check for double-valued inputs. 3128 Label compareValueBitwise; 3129 masm.branch32(Assembler::NotEqual, constantType, 3130 Imm32(int32_t(ConstantCompareOperand::EncodedType::Int32)), 3131 &compareValueBitwise); 3132 masm.branchTestDouble(Assembler::NotEqual, value, &compareValueBitwise); 3133 { 3134 FloatRegister unboxedValue = FloatReg0; 3135 FloatRegister floatPayload = FloatReg1; 3136 masm.unboxDouble(value, unboxedValue); 3137 masm.convertInt32ToDouble(payload, floatPayload); 3138 masm.branchDouble(JSOpToDoubleCondition(op), unboxedValue, floatPayload, 3139 &pass); 3140 masm.jump(&fail); 3141 } 3142 masm.bind(&compareValueBitwise); 3143 3144 // Box constant value into R1. 3145 masm.boxNonDouble(constantType, payload, R1); 3146 3147 // Bitwise comparison for int32, boolean, null, and undefined values. 3148 masm.branch64(JSOpToCondition(op, false), value.toRegister64(), 3149 R1.toRegister64(), &pass); 3150 3151 masm.bind(&fail); 3152 { 3153 masm.moveValue(BooleanValue(false), R0); 3154 masm.jump(&done); 3155 } 3156 3157 masm.bind(&pass); 3158 { 3159 masm.moveValue(BooleanValue(true), R0); 3160 } 3161 3162 masm.bind(&done); 3163 frame.push(R0, JSVAL_TYPE_BOOLEAN); 3164 return true; 3165 } 3166 3167 template <> 3168 bool BaselineInterpreterCodeGen::emit_StrictConstantEq() { 3169 return emitConstantStrictEq(JSOp::StrictEq); 3170 } 3171 3172 template <> 3173 bool BaselineInterpreterCodeGen::emit_StrictConstantNe() { 3174 return emitConstantStrictEq(JSOp::StrictNe); 3175 } 3176 3177 template <typename Handler> 3178 bool BaselineCodeGen<Handler>::emit_Case() { 3179 frame.popRegsAndSync(1); 3180 3181 Label done; 3182 masm.branchTestBooleanTruthy(/* branchIfTrue */ false, R0, &done); 3183 { 3184 // Pop the switch value if the case matches. 3185 masm.addToStackPtr(Imm32(sizeof(Value))); 3186 emitJump(); 3187 } 3188 masm.bind(&done); 3189 return true; 3190 } 3191 3192 template <typename Handler> 3193 bool BaselineCodeGen<Handler>::emit_Default() { 3194 frame.pop(); 3195 return emit_Goto(); 3196 } 3197 3198 template <typename Handler> 3199 bool BaselineCodeGen<Handler>::emit_Lineno() { 3200 return true; 3201 } 3202 3203 template <typename Handler> 3204 bool BaselineCodeGen<Handler>::emit_NewArray() { 3205 frame.syncStack(0); 3206 3207 if (!emitNextIC()) { 3208 return false; 3209 } 3210 3211 frame.push(R0); 3212 return true; 3213 } 3214 3215 static void MarkElementsNonPackedIfHoleValue(MacroAssembler& masm, 3216 Register elements, 3217 ValueOperand val) { 3218 Label notHole; 3219 masm.branchTestMagic(Assembler::NotEqual, val, ¬Hole); 3220 { 3221 Address elementsFlags(elements, ObjectElements::offsetOfFlags()); 3222 masm.or32(Imm32(ObjectElements::NON_PACKED), elementsFlags); 3223 } 3224 masm.bind(¬Hole); 3225 } 3226 3227 template <> 3228 bool BaselineInterpreterCodeGen::emit_InitElemArray() { 3229 // Pop value into R0, keep the object on the stack. 3230 frame.popRegsAndSync(1); 3231 3232 // Load object in R2. 3233 Register obj = R2.scratchReg(); 3234 masm.unboxObject(frame.addressOfStackValue(-1), obj); 3235 3236 // Load index in R1. 3237 Register index = R1.scratchReg(); 3238 LoadInt32Operand(masm, index); 3239 3240 // Store the Value. No pre-barrier because this is an initialization. 3241 masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), obj); 3242 masm.storeValue(R0, BaseObjectElementIndex(obj, index)); 3243 3244 // Bump initialized length. 3245 Address initLength(obj, ObjectElements::offsetOfInitializedLength()); 3246 masm.add32(Imm32(1), index); 3247 masm.store32(index, initLength); 3248 3249 // Mark elements as NON_PACKED if we stored the hole value. 3250 MarkElementsNonPackedIfHoleValue(masm, obj, R0); 3251 3252 // Post-barrier. 3253 Label skipBarrier; 3254 Register scratch = index; 3255 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, scratch, &skipBarrier); 3256 { 3257 masm.unboxObject(frame.addressOfStackValue(-1), obj); 3258 masm.branchPtrInNurseryChunk(Assembler::Equal, obj, scratch, &skipBarrier); 3259 MOZ_ASSERT(obj == R2.scratchReg(), "post barrier expects object in R2"); 3260 masm.call(&postBarrierSlot_); 3261 } 3262 masm.bind(&skipBarrier); 3263 return true; 3264 } 3265 3266 template <> 3267 bool BaselineCompilerCodeGen::emit_InitElemArray() { 3268 // Pop value into R0, keep the object on the stack. 3269 Maybe<Value> knownValue = frame.knownStackValue(-1); 3270 frame.popRegsAndSync(1); 3271 3272 // Load object in R2. 3273 Register obj = R2.scratchReg(); 3274 masm.unboxObject(frame.addressOfStackValue(-1), obj); 3275 3276 uint32_t index = GET_UINT32(handler.pc()); 3277 MOZ_ASSERT(index <= INT32_MAX, 3278 "the bytecode emitter must fail to compile code that would " 3279 "produce an index exceeding int32_t range"); 3280 3281 // Store the Value. No pre-barrier because this is an initialization. 3282 masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), obj); 3283 masm.storeValue(R0, Address(obj, index * sizeof(Value))); 3284 3285 // Bump initialized length. 3286 Address initLength(obj, ObjectElements::offsetOfInitializedLength()); 3287 masm.store32(Imm32(index + 1), initLength); 3288 3289 // Mark elements as NON_PACKED if we stored the hole value. We know this 3290 // statically except when debugger instrumentation is enabled because that 3291 // forces a stack-sync (which discards constants and known types) for each op. 3292 if (knownValue && knownValue->isMagic(JS_ELEMENTS_HOLE)) { 3293 Address elementsFlags(obj, ObjectElements::offsetOfFlags()); 3294 masm.or32(Imm32(ObjectElements::NON_PACKED), elementsFlags); 3295 } else if (handler.compileDebugInstrumentation()) { 3296 MarkElementsNonPackedIfHoleValue(masm, obj, R0); 3297 } else { 3298 #ifdef DEBUG 3299 Label notHole; 3300 masm.branchTestMagic(Assembler::NotEqual, R0, ¬Hole); 3301 masm.assumeUnreachable("Unexpected hole value"); 3302 masm.bind(¬Hole); 3303 #endif 3304 } 3305 3306 // Post-barrier. 3307 if (knownValue) { 3308 MOZ_ASSERT(JS::GCPolicy<Value>::isTenured(*knownValue)); 3309 } else { 3310 Label skipBarrier; 3311 Register scratch = R1.scratchReg(); 3312 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, scratch, 3313 &skipBarrier); 3314 { 3315 masm.unboxObject(frame.addressOfStackValue(-1), obj); 3316 masm.branchPtrInNurseryChunk(Assembler::Equal, obj, scratch, 3317 &skipBarrier); 3318 MOZ_ASSERT(obj == R2.scratchReg(), "post barrier expects object in R2"); 3319 masm.call(&postBarrierSlot_); 3320 } 3321 masm.bind(&skipBarrier); 3322 } 3323 return true; 3324 } 3325 3326 template <typename Handler> 3327 bool BaselineCodeGen<Handler>::emit_NewObject() { 3328 return emitNewObject(); 3329 } 3330 3331 template <typename Handler> 3332 bool BaselineCodeGen<Handler>::emit_NewInit() { 3333 return emitNewObject(); 3334 } 3335 3336 template <typename Handler> 3337 bool BaselineCodeGen<Handler>::emitNewObject() { 3338 frame.syncStack(0); 3339 3340 if (!emitNextIC()) { 3341 return false; 3342 } 3343 3344 frame.push(R0); 3345 return true; 3346 } 3347 3348 template <typename Handler> 3349 bool BaselineCodeGen<Handler>::emit_InitElem() { 3350 // Store RHS in the scratch slot. 3351 frame.storeStackValue(-1, frame.addressOfScratchValue(), R2); 3352 frame.pop(); 3353 3354 // Keep object and index in R0 and R1. 3355 frame.popRegsAndSync(2); 3356 3357 // Push the object to store the result of the IC. 3358 frame.push(R0); 3359 frame.syncStack(0); 3360 3361 // Keep RHS on the stack. 3362 frame.pushScratchValue(); 3363 3364 // Call IC. 3365 if (!emitNextIC()) { 3366 return false; 3367 } 3368 3369 // Pop the rhs, so that the object is on the top of the stack. 3370 frame.pop(); 3371 return true; 3372 } 3373 3374 template <typename Handler> 3375 bool BaselineCodeGen<Handler>::emit_InitHiddenElem() { 3376 return emit_InitElem(); 3377 } 3378 3379 template <typename Handler> 3380 bool BaselineCodeGen<Handler>::emit_InitLockedElem() { 3381 return emit_InitElem(); 3382 } 3383 3384 template <typename Handler> 3385 bool BaselineCodeGen<Handler>::emit_MutateProto() { 3386 // Keep values on the stack for the decompiler. 3387 frame.syncStack(0); 3388 3389 masm.unboxObject(frame.addressOfStackValue(-2), R0.scratchReg()); 3390 masm.loadValue(frame.addressOfStackValue(-1), R1); 3391 3392 prepareVMCall(); 3393 3394 pushArg(R1); 3395 pushArg(R0.scratchReg()); 3396 3397 using Fn = bool (*)(JSContext*, Handle<PlainObject*>, HandleValue); 3398 if (!callVM<Fn, MutatePrototype>()) { 3399 return false; 3400 } 3401 3402 frame.pop(); 3403 return true; 3404 } 3405 3406 template <typename Handler> 3407 bool BaselineCodeGen<Handler>::emit_InitProp() { 3408 // Load lhs in R0, rhs in R1. 3409 frame.syncStack(0); 3410 masm.loadValue(frame.addressOfStackValue(-2), R0); 3411 masm.loadValue(frame.addressOfStackValue(-1), R1); 3412 3413 // Call IC. 3414 if (!emitNextIC()) { 3415 return false; 3416 } 3417 3418 // Leave the object on the stack. 3419 frame.pop(); 3420 return true; 3421 } 3422 3423 template <typename Handler> 3424 bool BaselineCodeGen<Handler>::emit_InitLockedProp() { 3425 return emit_InitProp(); 3426 } 3427 3428 template <typename Handler> 3429 bool BaselineCodeGen<Handler>::emit_InitHiddenProp() { 3430 return emit_InitProp(); 3431 } 3432 3433 template <typename Handler> 3434 bool BaselineCodeGen<Handler>::emit_GetElem() { 3435 // Keep top two stack values in R0 and R1. 3436 frame.popRegsAndSync(2); 3437 3438 // Call IC. 3439 if (!emitNextIC()) { 3440 return false; 3441 } 3442 3443 // Mark R0 as pushed stack value. 3444 frame.push(R0); 3445 return true; 3446 } 3447 3448 template <typename Handler> 3449 bool BaselineCodeGen<Handler>::emit_GetElemSuper() { 3450 // Store obj in the scratch slot. 3451 frame.storeStackValue(-1, frame.addressOfScratchValue(), R2); 3452 frame.pop(); 3453 3454 // Keep receiver and index in R0 and R1. 3455 frame.popRegsAndSync(2); 3456 3457 // Keep obj on the stack. 3458 frame.pushScratchValue(); 3459 3460 if (!emitNextIC()) { 3461 return false; 3462 } 3463 3464 frame.pop(); 3465 frame.push(R0); 3466 return true; 3467 } 3468 3469 template <typename Handler> 3470 bool BaselineCodeGen<Handler>::emit_SetElem() { 3471 // Store RHS in the scratch slot. 3472 frame.storeStackValue(-1, frame.addressOfScratchValue(), R2); 3473 frame.pop(); 3474 3475 // Keep object and index in R0 and R1. 3476 frame.popRegsAndSync(2); 3477 3478 // Keep RHS on the stack. 3479 frame.pushScratchValue(); 3480 3481 // Call IC. 3482 if (!emitNextIC()) { 3483 return false; 3484 } 3485 3486 return true; 3487 } 3488 3489 template <typename Handler> 3490 bool BaselineCodeGen<Handler>::emit_StrictSetElem() { 3491 return emit_SetElem(); 3492 } 3493 3494 template <typename Handler> 3495 bool BaselineCodeGen<Handler>::emitSetElemSuper(bool strict) { 3496 // Incoming stack is |receiver, propval, obj, rval|. We need to shuffle 3497 // stack to leave rval when operation is complete. 3498 3499 // Pop rval into R0, then load receiver into R1 and replace with rval. 3500 frame.popRegsAndSync(1); 3501 masm.loadValue(frame.addressOfStackValue(-3), R1); 3502 masm.storeValue(R0, frame.addressOfStackValue(-3)); 3503 3504 prepareVMCall(); 3505 3506 pushArg(Imm32(strict)); 3507 pushArg(R0); // rval 3508 masm.loadValue(frame.addressOfStackValue(-2), R0); 3509 pushArg(R0); // propval 3510 pushArg(R1); // receiver 3511 masm.loadValue(frame.addressOfStackValue(-1), R0); 3512 pushArg(R0); // obj 3513 3514 using Fn = bool (*)(JSContext*, HandleValue, HandleValue, HandleValue, 3515 HandleValue, bool); 3516 if (!callVM<Fn, js::SetElementSuper>()) { 3517 return false; 3518 } 3519 3520 frame.popn(2); 3521 return true; 3522 } 3523 3524 template <typename Handler> 3525 bool BaselineCodeGen<Handler>::emit_SetElemSuper() { 3526 return emitSetElemSuper(/* strict = */ false); 3527 } 3528 3529 template <typename Handler> 3530 bool BaselineCodeGen<Handler>::emit_StrictSetElemSuper() { 3531 return emitSetElemSuper(/* strict = */ true); 3532 } 3533 3534 template <typename Handler> 3535 bool BaselineCodeGen<Handler>::emitDelElem(bool strict) { 3536 // Keep values on the stack for the decompiler. 3537 frame.syncStack(0); 3538 masm.loadValue(frame.addressOfStackValue(-2), R0); 3539 masm.loadValue(frame.addressOfStackValue(-1), R1); 3540 3541 prepareVMCall(); 3542 3543 pushArg(R1); 3544 pushArg(R0); 3545 3546 using Fn = bool (*)(JSContext*, HandleValue, HandleValue, bool*); 3547 if (strict) { 3548 if (!callVM<Fn, DelElemOperation<true>>()) { 3549 return false; 3550 } 3551 } else { 3552 if (!callVM<Fn, DelElemOperation<false>>()) { 3553 return false; 3554 } 3555 } 3556 3557 masm.boxNonDouble(JSVAL_TYPE_BOOLEAN, ReturnReg, R1); 3558 frame.popn(2); 3559 frame.push(R1, JSVAL_TYPE_BOOLEAN); 3560 return true; 3561 } 3562 3563 template <typename Handler> 3564 bool BaselineCodeGen<Handler>::emit_DelElem() { 3565 return emitDelElem(/* strict = */ false); 3566 } 3567 3568 template <typename Handler> 3569 bool BaselineCodeGen<Handler>::emit_StrictDelElem() { 3570 return emitDelElem(/* strict = */ true); 3571 } 3572 3573 template <typename Handler> 3574 bool BaselineCodeGen<Handler>::emit_In() { 3575 frame.popRegsAndSync(2); 3576 3577 if (!emitNextIC()) { 3578 return false; 3579 } 3580 3581 frame.push(R0, JSVAL_TYPE_BOOLEAN); 3582 return true; 3583 } 3584 3585 template <typename Handler> 3586 bool BaselineCodeGen<Handler>::emit_HasOwn() { 3587 frame.popRegsAndSync(2); 3588 3589 if (!emitNextIC()) { 3590 return false; 3591 } 3592 3593 frame.push(R0, JSVAL_TYPE_BOOLEAN); 3594 return true; 3595 } 3596 3597 template <typename Handler> 3598 bool BaselineCodeGen<Handler>::emit_CheckPrivateField() { 3599 // Keep key and val on the stack. 3600 frame.syncStack(0); 3601 masm.loadValue(frame.addressOfStackValue(-2), R0); 3602 masm.loadValue(frame.addressOfStackValue(-1), R1); 3603 3604 if (!emitNextIC()) { 3605 return false; 3606 } 3607 3608 frame.push(R0, JSVAL_TYPE_BOOLEAN); 3609 return true; 3610 } 3611 3612 template <typename Handler> 3613 bool BaselineCodeGen<Handler>::emit_NewPrivateName() { 3614 prepareVMCall(); 3615 3616 pushScriptNameArg(R0.scratchReg(), R1.scratchReg()); 3617 3618 using Fn = JS::Symbol* (*)(JSContext*, Handle<JSAtom*>); 3619 if (!callVM<Fn, NewPrivateName>()) { 3620 return false; 3621 } 3622 3623 masm.tagValue(JSVAL_TYPE_SYMBOL, ReturnReg, R0); 3624 frame.push(R0); 3625 return true; 3626 } 3627 3628 template <typename Handler> 3629 bool BaselineCodeGen<Handler>::emit_GetGName() { 3630 frame.syncStack(0); 3631 3632 loadGlobalLexicalEnvironment(R0.scratchReg()); 3633 3634 // Call IC. 3635 if (!emitNextIC()) { 3636 return false; 3637 } 3638 3639 // Mark R0 as pushed stack value. 3640 frame.push(R0); 3641 return true; 3642 } 3643 3644 template <> 3645 bool BaselineCompilerCodeGen::tryOptimizeBindUnqualifiedGlobalName() { 3646 if (handler.realmIndependentJitcode()) { 3647 return false; 3648 } 3649 JSScript* script = handler.script(); 3650 MOZ_ASSERT(!script->hasNonSyntacticScope()); 3651 3652 if (handler.compilingOffThread()) { 3653 return false; 3654 } 3655 3656 GlobalObject* global = &script->global(); 3657 PropertyName* name = script->getName(handler.pc()); 3658 if (JSObject* binding = 3659 MaybeOptimizeBindUnqualifiedGlobalName(global, name)) { 3660 frame.push(ObjectValue(*binding)); 3661 return true; 3662 } 3663 return false; 3664 } 3665 3666 template <> 3667 bool BaselineInterpreterCodeGen::tryOptimizeBindUnqualifiedGlobalName() { 3668 // Interpreter doesn't optimize simple BindUnqualifiedGNames. 3669 return false; 3670 } 3671 3672 template <typename Handler> 3673 bool BaselineCodeGen<Handler>::emit_BindUnqualifiedGName() { 3674 if (tryOptimizeBindUnqualifiedGlobalName()) { 3675 return true; 3676 } 3677 3678 frame.syncStack(0); 3679 loadGlobalLexicalEnvironment(R0.scratchReg()); 3680 3681 // Call IC. 3682 if (!emitNextIC()) { 3683 return false; 3684 } 3685 3686 // Mark R0 as pushed stack value. 3687 frame.push(R0); 3688 return true; 3689 } 3690 3691 template <typename Handler> 3692 bool BaselineCodeGen<Handler>::emit_BindVar() { 3693 frame.syncStack(0); 3694 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg()); 3695 3696 prepareVMCall(); 3697 pushArg(R0.scratchReg()); 3698 3699 using Fn = JSObject* (*)(JSContext*, JSObject*); 3700 if (!callVM<Fn, BindVarOperation>()) { 3701 return false; 3702 } 3703 3704 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0); 3705 frame.push(R0); 3706 return true; 3707 } 3708 3709 template <typename Handler> 3710 bool BaselineCodeGen<Handler>::emit_SetProp() { 3711 // Keep lhs in R0, rhs in R1. 3712 frame.popRegsAndSync(2); 3713 3714 // Keep RHS on the stack. 3715 frame.push(R1); 3716 frame.syncStack(0); 3717 3718 // Call IC. 3719 if (!emitNextIC()) { 3720 return false; 3721 } 3722 3723 return true; 3724 } 3725 3726 template <typename Handler> 3727 bool BaselineCodeGen<Handler>::emit_StrictSetProp() { 3728 return emit_SetProp(); 3729 } 3730 3731 template <typename Handler> 3732 bool BaselineCodeGen<Handler>::emit_SetName() { 3733 return emit_SetProp(); 3734 } 3735 3736 template <typename Handler> 3737 bool BaselineCodeGen<Handler>::emit_StrictSetName() { 3738 return emit_SetProp(); 3739 } 3740 3741 template <typename Handler> 3742 bool BaselineCodeGen<Handler>::emit_SetGName() { 3743 return emit_SetProp(); 3744 } 3745 3746 template <typename Handler> 3747 bool BaselineCodeGen<Handler>::emit_StrictSetGName() { 3748 return emit_SetProp(); 3749 } 3750 3751 template <typename Handler> 3752 bool BaselineCodeGen<Handler>::emitSetPropSuper(bool strict) { 3753 // Incoming stack is |receiver, obj, rval|. We need to shuffle stack to 3754 // leave rval when operation is complete. 3755 3756 // Pop rval into R0, then load receiver into R1 and replace with rval. 3757 frame.popRegsAndSync(1); 3758 masm.loadValue(frame.addressOfStackValue(-2), R1); 3759 masm.storeValue(R0, frame.addressOfStackValue(-2)); 3760 3761 prepareVMCall(); 3762 3763 pushArg(Imm32(strict)); 3764 pushArg(R0); // rval 3765 pushScriptNameArg(R0.scratchReg(), R2.scratchReg()); 3766 pushArg(R1); // receiver 3767 masm.loadValue(frame.addressOfStackValue(-1), R0); 3768 pushArg(R0); // obj 3769 3770 using Fn = bool (*)(JSContext*, HandleValue, HandleValue, 3771 Handle<PropertyName*>, HandleValue, bool); 3772 if (!callVM<Fn, js::SetPropertySuper>()) { 3773 return false; 3774 } 3775 3776 frame.pop(); 3777 return true; 3778 } 3779 3780 template <typename Handler> 3781 bool BaselineCodeGen<Handler>::emit_SetPropSuper() { 3782 return emitSetPropSuper(/* strict = */ false); 3783 } 3784 3785 template <typename Handler> 3786 bool BaselineCodeGen<Handler>::emit_StrictSetPropSuper() { 3787 return emitSetPropSuper(/* strict = */ true); 3788 } 3789 3790 template <typename Handler> 3791 bool BaselineCodeGen<Handler>::emit_GetProp() { 3792 // Keep object in R0. 3793 frame.popRegsAndSync(1); 3794 3795 // Call IC. 3796 if (!emitNextIC()) { 3797 return false; 3798 } 3799 3800 // Mark R0 as pushed stack value. 3801 frame.push(R0); 3802 return true; 3803 } 3804 3805 template <typename Handler> 3806 bool BaselineCodeGen<Handler>::emit_GetBoundName() { 3807 return emit_GetProp(); 3808 } 3809 3810 template <typename Handler> 3811 bool BaselineCodeGen<Handler>::emit_GetPropSuper() { 3812 // Receiver -> R1, ObjectOrNull -> R0 3813 frame.popRegsAndSync(1); 3814 masm.loadValue(frame.addressOfStackValue(-1), R1); 3815 frame.pop(); 3816 3817 if (!emitNextIC()) { 3818 return false; 3819 } 3820 3821 frame.push(R0); 3822 return true; 3823 } 3824 3825 template <typename Handler> 3826 bool BaselineCodeGen<Handler>::emitDelProp(bool strict) { 3827 // Keep value on the stack for the decompiler. 3828 frame.syncStack(0); 3829 masm.loadValue(frame.addressOfStackValue(-1), R0); 3830 3831 prepareVMCall(); 3832 3833 pushScriptNameArg(R1.scratchReg(), R2.scratchReg()); 3834 pushArg(R0); 3835 3836 using Fn = bool (*)(JSContext*, HandleValue, Handle<PropertyName*>, bool*); 3837 if (strict) { 3838 if (!callVM<Fn, DelPropOperation<true>>()) { 3839 return false; 3840 } 3841 } else { 3842 if (!callVM<Fn, DelPropOperation<false>>()) { 3843 return false; 3844 } 3845 } 3846 3847 masm.boxNonDouble(JSVAL_TYPE_BOOLEAN, ReturnReg, R1); 3848 frame.pop(); 3849 frame.push(R1, JSVAL_TYPE_BOOLEAN); 3850 return true; 3851 } 3852 3853 template <typename Handler> 3854 bool BaselineCodeGen<Handler>::emit_DelProp() { 3855 return emitDelProp(/* strict = */ false); 3856 } 3857 3858 template <typename Handler> 3859 bool BaselineCodeGen<Handler>::emit_StrictDelProp() { 3860 return emitDelProp(/* strict = */ true); 3861 } 3862 3863 template <> 3864 void BaselineCompilerCodeGen::getEnvironmentCoordinateObject(Register reg) { 3865 EnvironmentCoordinate ec(handler.pc()); 3866 3867 masm.loadPtr(frame.addressOfEnvironmentChain(), reg); 3868 for (unsigned i = ec.hops(); i; i--) { 3869 masm.unboxObject( 3870 Address(reg, EnvironmentObject::offsetOfEnclosingEnvironment()), reg); 3871 } 3872 } 3873 3874 template <> 3875 void BaselineInterpreterCodeGen::getEnvironmentCoordinateObject(Register reg) { 3876 MOZ_CRASH("Shouldn't call this for interpreter"); 3877 } 3878 3879 template <> 3880 Address BaselineCompilerCodeGen::getEnvironmentCoordinateAddressFromObject( 3881 Register objReg, Register reg) { 3882 EnvironmentCoordinate ec(handler.pc()); 3883 3884 if (EnvironmentObject::nonExtensibleIsFixedSlot(ec)) { 3885 return Address(objReg, NativeObject::getFixedSlotOffset(ec.slot())); 3886 } 3887 3888 uint32_t slot = EnvironmentObject::nonExtensibleDynamicSlotIndex(ec); 3889 masm.loadPtr(Address(objReg, NativeObject::offsetOfSlots()), reg); 3890 return Address(reg, slot * sizeof(Value)); 3891 } 3892 3893 template <> 3894 Address BaselineInterpreterCodeGen::getEnvironmentCoordinateAddressFromObject( 3895 Register objReg, Register reg) { 3896 MOZ_CRASH("Shouldn't call this for interpreter"); 3897 } 3898 3899 template <typename Handler> 3900 Address BaselineCodeGen<Handler>::getEnvironmentCoordinateAddress( 3901 Register reg) { 3902 getEnvironmentCoordinateObject(reg); 3903 return getEnvironmentCoordinateAddressFromObject(reg, reg); 3904 } 3905 3906 // For a JOF_ENVCOORD op load the number of hops from the bytecode and skip this 3907 // number of environment objects. 3908 static void LoadAliasedVarEnv(MacroAssembler& masm, Register env, 3909 Register scratch) { 3910 static_assert(ENVCOORD_HOPS_LEN == 2, 3911 "Code assumes number of hops is stored in uint16 operand"); 3912 LoadUint16Operand(masm, scratch); 3913 3914 Label top, done; 3915 masm.branchTest32(Assembler::Zero, scratch, scratch, &done); 3916 masm.bind(&top); 3917 { 3918 Address nextEnv(env, EnvironmentObject::offsetOfEnclosingEnvironment()); 3919 masm.unboxObject(nextEnv, env); 3920 masm.branchSub32(Assembler::NonZero, Imm32(1), scratch, &top); 3921 } 3922 masm.bind(&done); 3923 } 3924 3925 template <> 3926 void BaselineCompilerCodeGen::emitGetAliasedVar(ValueOperand dest) { 3927 frame.syncStack(0); 3928 3929 Address address = getEnvironmentCoordinateAddress(R0.scratchReg()); 3930 masm.loadValue(address, dest); 3931 } 3932 3933 template <> 3934 void BaselineInterpreterCodeGen::emitGetAliasedVar(ValueOperand dest) { 3935 Register env = R0.scratchReg(); 3936 Register scratch = R1.scratchReg(); 3937 3938 // Load the right environment object. 3939 masm.loadPtr(frame.addressOfEnvironmentChain(), env); 3940 LoadAliasedVarEnv(masm, env, scratch); 3941 3942 // Load the slot index. 3943 static_assert(ENVCOORD_SLOT_LEN == 3, 3944 "Code assumes slot is stored in uint24 operand"); 3945 LoadUint24Operand(masm, ENVCOORD_HOPS_LEN, scratch); 3946 3947 // Load the Value from a fixed or dynamic slot. 3948 // See EnvironmentObject::nonExtensibleIsFixedSlot. 3949 Label isDynamic, done; 3950 masm.branch32(Assembler::AboveOrEqual, scratch, 3951 Imm32(NativeObject::MAX_FIXED_SLOTS), &isDynamic); 3952 { 3953 uint32_t offset = NativeObject::getFixedSlotOffset(0); 3954 masm.loadValue(BaseValueIndex(env, scratch, offset), dest); 3955 masm.jump(&done); 3956 } 3957 masm.bind(&isDynamic); 3958 { 3959 masm.loadPtr(Address(env, NativeObject::offsetOfSlots()), env); 3960 3961 // Use an offset to subtract the number of fixed slots. 3962 int32_t offset = -int32_t(NativeObject::MAX_FIXED_SLOTS * sizeof(Value)); 3963 masm.loadValue(BaseValueIndex(env, scratch, offset), dest); 3964 } 3965 masm.bind(&done); 3966 } 3967 3968 template <typename Handler> 3969 bool BaselineCodeGen<Handler>::emitGetAliasedDebugVar(ValueOperand dest) { 3970 frame.syncStack(0); 3971 Register env = R0.scratchReg(); 3972 // Load the right environment object. 3973 masm.loadPtr(frame.addressOfEnvironmentChain(), env); 3974 3975 prepareVMCall(); 3976 pushBytecodePCArg(); 3977 pushArg(env); 3978 3979 using Fn = 3980 bool (*)(JSContext*, JSObject* env, jsbytecode*, MutableHandleValue); 3981 return callVM<Fn, LoadAliasedDebugVar>(); 3982 } 3983 3984 template <typename Handler> 3985 bool BaselineCodeGen<Handler>::emit_GetAliasedDebugVar() { 3986 if (!emitGetAliasedDebugVar(R0)) { 3987 return false; 3988 } 3989 3990 frame.push(R0); 3991 return true; 3992 } 3993 3994 template <typename Handler> 3995 bool BaselineCodeGen<Handler>::emit_GetAliasedVar() { 3996 emitGetAliasedVar(R0); 3997 3998 frame.push(R0); 3999 return true; 4000 } 4001 4002 template <> 4003 bool BaselineCompilerCodeGen::emit_SetAliasedVar() { 4004 // Keep rvalue in R0. 4005 frame.popRegsAndSync(1); 4006 Register objReg = R2.scratchReg(); 4007 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All()); 4008 MOZ_ASSERT(!regs.has(FramePointer)); 4009 regs.take(R0); 4010 regs.take(R2); 4011 Register temp = regs.takeAny(); 4012 Register temp2 = regs.takeAny(); 4013 4014 getEnvironmentCoordinateObject(objReg); 4015 Address address = getEnvironmentCoordinateAddressFromObject(objReg, temp); 4016 emitGuardedCallPreBarrierAnyZone(address, MIRType::Value, temp2); 4017 masm.storeValue(R0, address); 4018 frame.push(R0); 4019 4020 // Only R0 and R2 are live at this point. 4021 // R2.scratchReg() has the scope coordinate object. 4022 4023 Label skipBarrier; 4024 masm.branchPtrInNurseryChunk(Assembler::Equal, objReg, temp, &skipBarrier); 4025 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier); 4026 4027 // Uses R2.scratchReg() as input 4028 masm.call(&postBarrierSlot_); // Won't clobber R0 4029 4030 masm.bind(&skipBarrier); 4031 return true; 4032 } 4033 4034 template <> 4035 bool BaselineInterpreterCodeGen::emit_SetAliasedVar() { 4036 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All()); 4037 MOZ_ASSERT(!regs.has(FramePointer)); 4038 regs.take(R2); 4039 if (HasInterpreterPCReg()) { 4040 regs.take(InterpreterPCReg); 4041 } 4042 4043 Register env = regs.takeAny(); 4044 Register scratch1 = regs.takeAny(); 4045 Register scratch2 = regs.takeAny(); 4046 Register scratch3 = regs.takeAny(); 4047 4048 // Load the right environment object. 4049 masm.loadPtr(frame.addressOfEnvironmentChain(), env); 4050 LoadAliasedVarEnv(masm, env, scratch1); 4051 4052 // Load the slot index. 4053 static_assert(ENVCOORD_SLOT_LEN == 3, 4054 "Code assumes slot is stored in uint24 operand"); 4055 LoadUint24Operand(masm, ENVCOORD_HOPS_LEN, scratch1); 4056 4057 // Store the RHS Value in R2. 4058 masm.loadValue(frame.addressOfStackValue(-1), R2); 4059 4060 // Load a pointer to the fixed or dynamic slot into scratch2. We want to call 4061 // guardedCallPreBarrierAnyZone once to avoid code bloat. 4062 4063 // See EnvironmentObject::nonExtensibleIsFixedSlot. 4064 Label isDynamic, done; 4065 masm.branch32(Assembler::AboveOrEqual, scratch1, 4066 Imm32(NativeObject::MAX_FIXED_SLOTS), &isDynamic); 4067 { 4068 uint32_t offset = NativeObject::getFixedSlotOffset(0); 4069 BaseValueIndex slotAddr(env, scratch1, offset); 4070 masm.computeEffectiveAddress(slotAddr, scratch2); 4071 masm.jump(&done); 4072 } 4073 masm.bind(&isDynamic); 4074 { 4075 masm.loadPtr(Address(env, NativeObject::offsetOfSlots()), scratch2); 4076 4077 // Use an offset to subtract the number of fixed slots. 4078 int32_t offset = -int32_t(NativeObject::MAX_FIXED_SLOTS * sizeof(Value)); 4079 BaseValueIndex slotAddr(scratch2, scratch1, offset); 4080 masm.computeEffectiveAddress(slotAddr, scratch2); 4081 } 4082 masm.bind(&done); 4083 4084 // Pre-barrier and store. 4085 Address slotAddr(scratch2, 0); 4086 emitGuardedCallPreBarrierAnyZone(slotAddr, MIRType::Value, scratch3); 4087 masm.storeValue(R2, slotAddr); 4088 4089 // Post barrier. 4090 Label skipBarrier; 4091 masm.branchPtrInNurseryChunk(Assembler::Equal, env, scratch1, &skipBarrier); 4092 masm.branchValueIsNurseryCell(Assembler::NotEqual, R2, scratch1, 4093 &skipBarrier); 4094 { 4095 // Post barrier code expects the object in R2. 4096 masm.movePtr(env, R2.scratchReg()); 4097 masm.call(&postBarrierSlot_); 4098 } 4099 masm.bind(&skipBarrier); 4100 return true; 4101 } 4102 4103 template <typename Handler> 4104 bool BaselineCodeGen<Handler>::emit_GetName() { 4105 frame.syncStack(0); 4106 4107 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg()); 4108 4109 // Call IC. 4110 if (!emitNextIC()) { 4111 return false; 4112 } 4113 4114 // Mark R0 as pushed stack value. 4115 frame.push(R0); 4116 return true; 4117 } 4118 4119 template <typename Handler> 4120 bool BaselineCodeGen<Handler>::emit_BindUnqualifiedName() { 4121 frame.syncStack(0); 4122 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg()); 4123 4124 // Call IC. 4125 if (!emitNextIC()) { 4126 return false; 4127 } 4128 4129 // Mark R0 as pushed stack value. 4130 frame.push(R0); 4131 return true; 4132 } 4133 4134 template <typename Handler> 4135 bool BaselineCodeGen<Handler>::emit_BindName() { 4136 frame.syncStack(0); 4137 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg()); 4138 4139 // Call IC. 4140 if (!emitNextIC()) { 4141 return false; 4142 } 4143 4144 // Mark R0 as pushed stack value. 4145 frame.push(R0); 4146 return true; 4147 } 4148 4149 template <typename Handler> 4150 bool BaselineCodeGen<Handler>::emit_DelName() { 4151 frame.syncStack(0); 4152 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg()); 4153 4154 prepareVMCall(); 4155 4156 pushArg(R0.scratchReg()); 4157 pushScriptNameArg(R1.scratchReg(), R2.scratchReg()); 4158 4159 using Fn = bool (*)(JSContext*, Handle<PropertyName*>, HandleObject, 4160 MutableHandleValue); 4161 if (!callVM<Fn, js::DeleteNameOperation>()) { 4162 return false; 4163 } 4164 4165 frame.push(R0); 4166 return true; 4167 } 4168 4169 template <typename Handler> 4170 bool BaselineCodeGen<Handler>::emit_GetImport() { 4171 frame.syncStack(0); 4172 4173 if (!emitNextIC()) { 4174 return false; 4175 } 4176 4177 frame.push(R0); 4178 return true; 4179 } 4180 4181 template <typename Handler> 4182 bool BaselineCodeGen<Handler>::emit_GetIntrinsic() { 4183 frame.syncStack(0); 4184 4185 if (!emitNextIC()) { 4186 return false; 4187 } 4188 4189 frame.push(R0); 4190 return true; 4191 } 4192 4193 template <typename Handler> 4194 bool BaselineCodeGen<Handler>::emit_SetIntrinsic() { 4195 frame.syncStack(0); 4196 masm.loadValue(frame.addressOfStackValue(-1), R0); 4197 4198 prepareVMCall(); 4199 4200 pushArg(R0); 4201 pushBytecodePCArg(); 4202 pushScriptArg(); 4203 4204 using Fn = bool (*)(JSContext*, JSScript*, jsbytecode*, HandleValue); 4205 return callVM<Fn, SetIntrinsicOperation>(); 4206 } 4207 4208 template <typename Handler> 4209 bool BaselineCodeGen<Handler>::emit_GlobalOrEvalDeclInstantiation() { 4210 frame.syncStack(0); 4211 4212 prepareVMCall(); 4213 4214 loadInt32LengthBytecodeOperand(R0.scratchReg()); 4215 pushArg(R0.scratchReg()); 4216 pushScriptArg(); 4217 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg()); 4218 pushArg(R0.scratchReg()); 4219 4220 using Fn = bool (*)(JSContext*, HandleObject, HandleScript, GCThingIndex); 4221 return callVM<Fn, js::GlobalOrEvalDeclInstantiation>(); 4222 } 4223 4224 template <typename Handler> 4225 bool BaselineCodeGen<Handler>::emitInitPropGetterSetter() { 4226 // Keep values on the stack for the decompiler. 4227 frame.syncStack(0); 4228 4229 prepareVMCall(); 4230 4231 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg()); 4232 masm.unboxObject(frame.addressOfStackValue(-2), R1.scratchReg()); 4233 4234 pushArg(R0.scratchReg()); 4235 pushScriptNameArg(R0.scratchReg(), R2.scratchReg()); 4236 pushArg(R1.scratchReg()); 4237 pushBytecodePCArg(); 4238 4239 using Fn = bool (*)(JSContext*, jsbytecode*, HandleObject, 4240 Handle<PropertyName*>, HandleObject); 4241 if (!callVM<Fn, InitPropGetterSetterOperation>()) { 4242 return false; 4243 } 4244 4245 frame.pop(); 4246 return true; 4247 } 4248 4249 template <typename Handler> 4250 bool BaselineCodeGen<Handler>::emit_InitPropGetter() { 4251 return emitInitPropGetterSetter(); 4252 } 4253 4254 template <typename Handler> 4255 bool BaselineCodeGen<Handler>::emit_InitHiddenPropGetter() { 4256 return emitInitPropGetterSetter(); 4257 } 4258 4259 template <typename Handler> 4260 bool BaselineCodeGen<Handler>::emit_InitPropSetter() { 4261 return emitInitPropGetterSetter(); 4262 } 4263 4264 template <typename Handler> 4265 bool BaselineCodeGen<Handler>::emit_InitHiddenPropSetter() { 4266 return emitInitPropGetterSetter(); 4267 } 4268 4269 template <typename Handler> 4270 bool BaselineCodeGen<Handler>::emitInitElemGetterSetter() { 4271 // Load index and value in R0 and R1, but keep values on the stack for the 4272 // decompiler. 4273 frame.syncStack(0); 4274 masm.loadValue(frame.addressOfStackValue(-2), R0); 4275 masm.unboxObject(frame.addressOfStackValue(-1), R1.scratchReg()); 4276 4277 prepareVMCall(); 4278 4279 pushArg(R1.scratchReg()); 4280 pushArg(R0); 4281 masm.unboxObject(frame.addressOfStackValue(-3), R0.scratchReg()); 4282 pushArg(R0.scratchReg()); 4283 pushBytecodePCArg(); 4284 4285 using Fn = bool (*)(JSContext*, jsbytecode*, HandleObject, HandleValue, 4286 HandleObject); 4287 if (!callVM<Fn, InitElemGetterSetterOperation>()) { 4288 return false; 4289 } 4290 4291 frame.popn(2); 4292 return true; 4293 } 4294 4295 template <typename Handler> 4296 bool BaselineCodeGen<Handler>::emit_InitElemGetter() { 4297 return emitInitElemGetterSetter(); 4298 } 4299 4300 template <typename Handler> 4301 bool BaselineCodeGen<Handler>::emit_InitHiddenElemGetter() { 4302 return emitInitElemGetterSetter(); 4303 } 4304 4305 template <typename Handler> 4306 bool BaselineCodeGen<Handler>::emit_InitElemSetter() { 4307 return emitInitElemGetterSetter(); 4308 } 4309 4310 template <typename Handler> 4311 bool BaselineCodeGen<Handler>::emit_InitHiddenElemSetter() { 4312 return emitInitElemGetterSetter(); 4313 } 4314 4315 template <typename Handler> 4316 bool BaselineCodeGen<Handler>::emit_InitElemInc() { 4317 // Keep the object and rhs on the stack. 4318 frame.syncStack(0); 4319 4320 // Load object in R0, index in R1. 4321 masm.loadValue(frame.addressOfStackValue(-3), R0); 4322 masm.loadValue(frame.addressOfStackValue(-2), R1); 4323 4324 // Call IC. 4325 if (!emitNextIC()) { 4326 return false; 4327 } 4328 4329 // Pop the rhs 4330 frame.pop(); 4331 4332 // Increment index 4333 Address indexAddr = frame.addressOfStackValue(-1); 4334 #ifdef DEBUG 4335 Label isInt32; 4336 masm.branchTestInt32(Assembler::Equal, indexAddr, &isInt32); 4337 masm.assumeUnreachable("INITELEM_INC index must be Int32"); 4338 masm.bind(&isInt32); 4339 #endif 4340 masm.incrementInt32Value(indexAddr); 4341 return true; 4342 } 4343 4344 template <> 4345 bool BaselineCompilerCodeGen::emit_GetLocal() { 4346 frame.pushLocal(GET_LOCALNO(handler.pc())); 4347 return true; 4348 } 4349 4350 static BaseValueIndex ComputeAddressOfLocal(MacroAssembler& masm, 4351 Register indexScratch) { 4352 // Locals are stored in memory at a negative offset from the frame pointer. We 4353 // negate the index first to effectively subtract it. 4354 masm.negPtr(indexScratch); 4355 return BaseValueIndex(FramePointer, indexScratch, 4356 BaselineFrame::reverseOffsetOfLocal(0)); 4357 } 4358 4359 template <> 4360 bool BaselineInterpreterCodeGen::emit_GetLocal() { 4361 Register scratch = R0.scratchReg(); 4362 LoadUint24Operand(masm, 0, scratch); 4363 BaseValueIndex addr = ComputeAddressOfLocal(masm, scratch); 4364 masm.loadValue(addr, R0); 4365 frame.push(R0); 4366 return true; 4367 } 4368 4369 template <> 4370 bool BaselineCompilerCodeGen::emit_SetLocal() { 4371 // Ensure no other StackValue refers to the old value, for instance i + (i = 4372 // 3). This also allows us to use R0 as scratch below. 4373 frame.syncStack(1); 4374 4375 uint32_t local = GET_LOCALNO(handler.pc()); 4376 frame.storeStackValue(-1, frame.addressOfLocal(local), R0); 4377 return true; 4378 } 4379 4380 template <> 4381 bool BaselineInterpreterCodeGen::emit_SetLocal() { 4382 Register scratch = R0.scratchReg(); 4383 LoadUint24Operand(masm, 0, scratch); 4384 BaseValueIndex addr = ComputeAddressOfLocal(masm, scratch); 4385 masm.loadValue(frame.addressOfStackValue(-1), R1); 4386 masm.storeValue(R1, addr); 4387 return true; 4388 } 4389 4390 template <> 4391 bool BaselineCompilerCodeGen::emitFormalArgAccess(JSOp op) { 4392 MOZ_ASSERT(op == JSOp::GetArg || op == JSOp::SetArg); 4393 4394 uint32_t arg = GET_ARGNO(handler.pc()); 4395 4396 // Fast path: the script does not use |arguments| or formals don't 4397 // alias the arguments object. 4398 if (!handler.script()->argsObjAliasesFormals()) { 4399 if (op == JSOp::GetArg) { 4400 frame.pushArg(arg); 4401 } else { 4402 // See the comment in emit_SetLocal. 4403 frame.syncStack(1); 4404 frame.storeStackValue(-1, frame.addressOfArg(arg), R0); 4405 } 4406 4407 return true; 4408 } 4409 4410 // Sync so that we can use R0. 4411 frame.syncStack(0); 4412 4413 // Load the arguments object data vector. 4414 Register reg = R2.scratchReg(); 4415 masm.loadPtr(frame.addressOfArgsObj(), reg); 4416 masm.loadPrivate(Address(reg, ArgumentsObject::getDataSlotOffset()), reg); 4417 4418 // Load/store the argument. 4419 Address argAddr(reg, ArgumentsData::offsetOfArgs() + arg * sizeof(Value)); 4420 if (op == JSOp::GetArg) { 4421 masm.loadValue(argAddr, R0); 4422 frame.push(R0); 4423 } else { 4424 Register temp = R1.scratchReg(); 4425 emitGuardedCallPreBarrierAnyZone(argAddr, MIRType::Value, temp); 4426 masm.loadValue(frame.addressOfStackValue(-1), R0); 4427 masm.storeValue(R0, argAddr); 4428 4429 MOZ_ASSERT(frame.numUnsyncedSlots() == 0); 4430 4431 // Reload the arguments object. 4432 Register reg = R2.scratchReg(); 4433 masm.loadPtr(frame.addressOfArgsObj(), reg); 4434 4435 Label skipBarrier; 4436 4437 masm.branchPtrInNurseryChunk(Assembler::Equal, reg, temp, &skipBarrier); 4438 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier); 4439 4440 masm.call(&postBarrierSlot_); 4441 4442 masm.bind(&skipBarrier); 4443 } 4444 4445 return true; 4446 } 4447 4448 template <> 4449 bool BaselineInterpreterCodeGen::emitFormalArgAccess(JSOp op) { 4450 MOZ_ASSERT(op == JSOp::GetArg || op == JSOp::SetArg); 4451 4452 // Load the index. 4453 Register argReg = R1.scratchReg(); 4454 LoadUint16Operand(masm, argReg); 4455 4456 // If the frame has no arguments object, this must be an unaliased access. 4457 Label isUnaliased, done; 4458 masm.branchTest32(Assembler::Zero, frame.addressOfFlags(), 4459 Imm32(BaselineFrame::HAS_ARGS_OBJ), &isUnaliased); 4460 { 4461 Register reg = R2.scratchReg(); 4462 4463 // If it's an unmapped arguments object, this is an unaliased access. 4464 loadScript(reg); 4465 masm.branchTest32( 4466 Assembler::Zero, Address(reg, JSScript::offsetOfImmutableFlags()), 4467 Imm32(uint32_t(JSScript::ImmutableFlags::HasMappedArgsObj)), 4468 &isUnaliased); 4469 4470 // Load the arguments object data vector. 4471 masm.loadPtr(frame.addressOfArgsObj(), reg); 4472 masm.loadPrivate(Address(reg, ArgumentsObject::getDataSlotOffset()), reg); 4473 4474 // Load/store the argument. 4475 BaseValueIndex argAddr(reg, argReg, ArgumentsData::offsetOfArgs()); 4476 if (op == JSOp::GetArg) { 4477 masm.loadValue(argAddr, R0); 4478 frame.push(R0); 4479 } else { 4480 emitGuardedCallPreBarrierAnyZone(argAddr, MIRType::Value, 4481 R0.scratchReg()); 4482 masm.loadValue(frame.addressOfStackValue(-1), R0); 4483 masm.storeValue(R0, argAddr); 4484 4485 // Reload the arguments object. 4486 masm.loadPtr(frame.addressOfArgsObj(), reg); 4487 4488 Register temp = R1.scratchReg(); 4489 masm.branchPtrInNurseryChunk(Assembler::Equal, reg, temp, &done); 4490 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &done); 4491 4492 masm.call(&postBarrierSlot_); 4493 } 4494 masm.jump(&done); 4495 } 4496 masm.bind(&isUnaliased); 4497 { 4498 BaseValueIndex addr(FramePointer, argReg, 4499 JitFrameLayout::offsetOfActualArgs()); 4500 if (op == JSOp::GetArg) { 4501 masm.loadValue(addr, R0); 4502 frame.push(R0); 4503 } else { 4504 masm.loadValue(frame.addressOfStackValue(-1), R0); 4505 masm.storeValue(R0, addr); 4506 } 4507 } 4508 4509 masm.bind(&done); 4510 return true; 4511 } 4512 4513 template <typename Handler> 4514 bool BaselineCodeGen<Handler>::emit_GetArg() { 4515 return emitFormalArgAccess(JSOp::GetArg); 4516 } 4517 4518 template <typename Handler> 4519 bool BaselineCodeGen<Handler>::emit_SetArg() { 4520 return emitFormalArgAccess(JSOp::SetArg); 4521 } 4522 4523 template <> 4524 bool BaselineInterpreterCodeGen::emit_GetFrameArg() { 4525 frame.syncStack(0); 4526 4527 Register argReg = R1.scratchReg(); 4528 LoadUint16Operand(masm, argReg); 4529 4530 BaseValueIndex addr(FramePointer, argReg, 4531 JitFrameLayout::offsetOfActualArgs()); 4532 masm.loadValue(addr, R0); 4533 frame.push(R0); 4534 return true; 4535 } 4536 4537 template <> 4538 bool BaselineCompilerCodeGen::emit_GetFrameArg() { 4539 uint32_t arg = GET_ARGNO(handler.pc()); 4540 frame.pushArg(arg); 4541 return true; 4542 } 4543 4544 template <typename Handler> 4545 bool BaselineCodeGen<Handler>::emit_ArgumentsLength() { 4546 frame.syncStack(0); 4547 4548 masm.loadNumActualArgs(FramePointer, R0.scratchReg()); 4549 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0); 4550 4551 frame.push(R0); 4552 return true; 4553 } 4554 4555 template <typename Handler> 4556 bool BaselineCodeGen<Handler>::emit_GetActualArg() { 4557 frame.popRegsAndSync(1); 4558 4559 #ifdef DEBUG 4560 { 4561 Label ok; 4562 masm.branchTestInt32(Assembler::Equal, R0, &ok); 4563 masm.assumeUnreachable("GetActualArg unexpected type"); 4564 masm.bind(&ok); 4565 } 4566 #endif 4567 4568 Register index = R0.scratchReg(); 4569 masm.unboxInt32(R0, index); 4570 4571 #ifdef DEBUG 4572 { 4573 Label ok; 4574 masm.loadNumActualArgs(FramePointer, R1.scratchReg()); 4575 masm.branch32(Assembler::Above, R1.scratchReg(), index, &ok); 4576 masm.assumeUnreachable("GetActualArg invalid index"); 4577 masm.bind(&ok); 4578 } 4579 #endif 4580 4581 BaseValueIndex addr(FramePointer, index, 4582 JitFrameLayout::offsetOfActualArgs()); 4583 masm.loadValue(addr, R0); 4584 frame.push(R0); 4585 return true; 4586 } 4587 4588 template <> 4589 void BaselineCompilerCodeGen::loadNumFormalArguments(Register dest) { 4590 masm.move32(Imm32(handler.nargs()), dest); 4591 } 4592 4593 template <> 4594 void BaselineInterpreterCodeGen::loadNumFormalArguments(Register dest) { 4595 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), dest); 4596 masm.loadFunctionArgCount(dest, dest); 4597 } 4598 4599 template <typename Handler> 4600 bool BaselineCodeGen<Handler>::emit_NewTarget() { 4601 frame.syncStack(0); 4602 4603 #ifdef DEBUG 4604 Register scratch1 = R0.scratchReg(); 4605 Register scratch2 = R1.scratchReg(); 4606 4607 Label isFunction; 4608 masm.loadPtr(frame.addressOfCalleeToken(), scratch1); 4609 masm.branchTestPtr(Assembler::Zero, scratch1, Imm32(CalleeTokenScriptBit), 4610 &isFunction); 4611 masm.assumeUnreachable("Unexpected non-function script"); 4612 masm.bind(&isFunction); 4613 4614 Label notArrow; 4615 masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch1); 4616 masm.branchFunctionKind(Assembler::NotEqual, 4617 FunctionFlags::FunctionKind::Arrow, scratch1, 4618 scratch2, ¬Arrow); 4619 masm.assumeUnreachable("Unexpected arrow function"); 4620 masm.bind(¬Arrow); 4621 #endif 4622 4623 // if (isConstructing()) push(argv[Max(numActualArgs, numFormalArgs)]) 4624 Label notConstructing, done; 4625 masm.branchTestPtr(Assembler::Zero, frame.addressOfCalleeToken(), 4626 Imm32(CalleeToken_FunctionConstructing), ¬Constructing); 4627 { 4628 Register argvLen = R0.scratchReg(); 4629 Register nformals = R1.scratchReg(); 4630 masm.loadNumActualArgs(FramePointer, argvLen); 4631 4632 // If argvLen < nformals, set argvlen := nformals. 4633 loadNumFormalArguments(nformals); 4634 masm.cmp32Move32(Assembler::Below, argvLen, nformals, nformals, argvLen); 4635 4636 BaseValueIndex newTarget(FramePointer, argvLen, 4637 JitFrameLayout::offsetOfActualArgs()); 4638 masm.loadValue(newTarget, R0); 4639 masm.jump(&done); 4640 } 4641 // else push(undefined) 4642 masm.bind(¬Constructing); 4643 masm.moveValue(UndefinedValue(), R0); 4644 4645 masm.bind(&done); 4646 frame.push(R0); 4647 return true; 4648 } 4649 4650 template <typename Handler> 4651 bool BaselineCodeGen<Handler>::emit_ThrowSetConst() { 4652 prepareVMCall(); 4653 pushArg(Imm32(JSMSG_BAD_CONST_ASSIGN)); 4654 4655 using Fn = bool (*)(JSContext*, unsigned); 4656 return callVM<Fn, jit::ThrowRuntimeLexicalError>(); 4657 } 4658 4659 template <typename Handler> 4660 bool BaselineCodeGen<Handler>::emitUninitializedLexicalCheck( 4661 const ValueOperand& val) { 4662 Label done; 4663 masm.branchTestMagicValue(Assembler::NotEqual, val, JS_UNINITIALIZED_LEXICAL, 4664 &done); 4665 4666 prepareVMCall(); 4667 pushArg(Imm32(JSMSG_UNINITIALIZED_LEXICAL)); 4668 4669 using Fn = bool (*)(JSContext*, unsigned); 4670 if (!callVM<Fn, jit::ThrowRuntimeLexicalError>()) { 4671 return false; 4672 } 4673 4674 masm.bind(&done); 4675 return true; 4676 } 4677 4678 template <typename Handler> 4679 bool BaselineCodeGen<Handler>::emit_CheckLexical() { 4680 frame.syncStack(0); 4681 masm.loadValue(frame.addressOfStackValue(-1), R0); 4682 return emitUninitializedLexicalCheck(R0); 4683 } 4684 4685 template <typename Handler> 4686 bool BaselineCodeGen<Handler>::emit_CheckAliasedLexical() { 4687 return emit_CheckLexical(); 4688 } 4689 4690 template <typename Handler> 4691 bool BaselineCodeGen<Handler>::emit_InitLexical() { 4692 return emit_SetLocal(); 4693 } 4694 4695 template <typename Handler> 4696 bool BaselineCodeGen<Handler>::emit_InitGLexical() { 4697 frame.popRegsAndSync(1); 4698 pushGlobalLexicalEnvironmentValue(R1); 4699 frame.push(R0); 4700 return emit_SetProp(); 4701 } 4702 4703 template <typename Handler> 4704 bool BaselineCodeGen<Handler>::emit_InitAliasedLexical() { 4705 return emit_SetAliasedVar(); 4706 } 4707 4708 template <typename Handler> 4709 bool BaselineCodeGen<Handler>::emit_Uninitialized() { 4710 frame.push(MagicValue(JS_UNINITIALIZED_LEXICAL)); 4711 return true; 4712 } 4713 4714 template <> 4715 bool BaselineCompilerCodeGen::emitCall(JSOp op) { 4716 MOZ_ASSERT(IsInvokeOp(op)); 4717 4718 frame.syncStack(0); 4719 4720 uint32_t argc = GET_ARGC(handler.pc()); 4721 masm.move32(Imm32(argc), R0.scratchReg()); 4722 4723 // Call IC 4724 if (!emitNextIC()) { 4725 return false; 4726 } 4727 4728 // Update FrameInfo. 4729 bool construct = IsConstructOp(op); 4730 frame.popn(2 + argc + construct); 4731 frame.push(R0); 4732 return true; 4733 } 4734 4735 template <> 4736 bool BaselineInterpreterCodeGen::emitCall(JSOp op) { 4737 MOZ_ASSERT(IsInvokeOp(op)); 4738 4739 // The IC expects argc in R0. 4740 LoadUint16Operand(masm, R0.scratchReg()); 4741 if (!emitNextIC()) { 4742 return false; 4743 } 4744 4745 // Pop the arguments. We have to reload pc/argc because the IC clobbers them. 4746 // The return value is in R0 so we can't use that. 4747 Register scratch = R1.scratchReg(); 4748 uint32_t extraValuesToPop = IsConstructOp(op) ? 3 : 2; 4749 Register spReg = AsRegister(masm.getStackPointer()); 4750 LoadUint16Operand(masm, scratch); 4751 masm.computeEffectiveAddress( 4752 BaseValueIndex(spReg, scratch, extraValuesToPop * sizeof(Value)), spReg); 4753 frame.push(R0); 4754 return true; 4755 } 4756 4757 template <typename Handler> 4758 bool BaselineCodeGen<Handler>::emitSpreadCall(JSOp op) { 4759 MOZ_ASSERT(IsInvokeOp(op)); 4760 4761 frame.syncStack(0); 4762 masm.move32(Imm32(1), R0.scratchReg()); 4763 4764 // Call IC 4765 if (!emitNextIC()) { 4766 return false; 4767 } 4768 4769 // Update FrameInfo. 4770 bool construct = op == JSOp::SpreadNew || op == JSOp::SpreadSuperCall; 4771 frame.popn(3 + construct); 4772 frame.push(R0); 4773 return true; 4774 } 4775 4776 template <typename Handler> 4777 bool BaselineCodeGen<Handler>::emit_Call() { 4778 return emitCall(JSOp::Call); 4779 } 4780 4781 template <typename Handler> 4782 bool BaselineCodeGen<Handler>::emit_CallContent() { 4783 return emitCall(JSOp::CallContent); 4784 } 4785 4786 template <typename Handler> 4787 bool BaselineCodeGen<Handler>::emit_CallIgnoresRv() { 4788 return emitCall(JSOp::CallIgnoresRv); 4789 } 4790 4791 template <typename Handler> 4792 bool BaselineCodeGen<Handler>::emit_CallIter() { 4793 return emitCall(JSOp::CallIter); 4794 } 4795 4796 template <typename Handler> 4797 bool BaselineCodeGen<Handler>::emit_CallContentIter() { 4798 return emitCall(JSOp::CallContentIter); 4799 } 4800 4801 template <typename Handler> 4802 bool BaselineCodeGen<Handler>::emit_New() { 4803 return emitCall(JSOp::New); 4804 } 4805 4806 template <typename Handler> 4807 bool BaselineCodeGen<Handler>::emit_NewContent() { 4808 return emitCall(JSOp::NewContent); 4809 } 4810 4811 template <typename Handler> 4812 bool BaselineCodeGen<Handler>::emit_SuperCall() { 4813 return emitCall(JSOp::SuperCall); 4814 } 4815 4816 template <typename Handler> 4817 bool BaselineCodeGen<Handler>::emit_Eval() { 4818 return emitCall(JSOp::Eval); 4819 } 4820 4821 template <typename Handler> 4822 bool BaselineCodeGen<Handler>::emit_StrictEval() { 4823 return emitCall(JSOp::StrictEval); 4824 } 4825 4826 template <typename Handler> 4827 bool BaselineCodeGen<Handler>::emit_SpreadCall() { 4828 return emitSpreadCall(JSOp::SpreadCall); 4829 } 4830 4831 template <typename Handler> 4832 bool BaselineCodeGen<Handler>::emit_SpreadNew() { 4833 return emitSpreadCall(JSOp::SpreadNew); 4834 } 4835 4836 template <typename Handler> 4837 bool BaselineCodeGen<Handler>::emit_SpreadSuperCall() { 4838 return emitSpreadCall(JSOp::SpreadSuperCall); 4839 } 4840 4841 template <typename Handler> 4842 bool BaselineCodeGen<Handler>::emit_SpreadEval() { 4843 return emitSpreadCall(JSOp::SpreadEval); 4844 } 4845 4846 template <typename Handler> 4847 bool BaselineCodeGen<Handler>::emit_StrictSpreadEval() { 4848 return emitSpreadCall(JSOp::StrictSpreadEval); 4849 } 4850 4851 template <typename Handler> 4852 bool BaselineCodeGen<Handler>::emit_OptimizeSpreadCall() { 4853 frame.popRegsAndSync(1); 4854 4855 if (!emitNextIC()) { 4856 return false; 4857 } 4858 4859 frame.push(R0); 4860 return true; 4861 } 4862 4863 template <typename Handler> 4864 bool BaselineCodeGen<Handler>::emit_ImplicitThis() { 4865 frame.popRegsAndSync(1); 4866 4867 Register env = R1.scratchReg(); 4868 masm.unboxObject(R0, env); 4869 4870 Label slowPath, skipCall; 4871 masm.computeImplicitThis(env, R0, &slowPath); 4872 masm.jump(&skipCall); 4873 4874 masm.bind(&slowPath); 4875 { 4876 prepareVMCall(); 4877 4878 pushArg(env); 4879 4880 using Fn = void (*)(JSContext*, HandleObject, MutableHandleValue); 4881 if (!callVM<Fn, ImplicitThisOperation>()) { 4882 return false; 4883 } 4884 } 4885 4886 masm.bind(&skipCall); 4887 frame.push(R0); 4888 return true; 4889 } 4890 4891 template <typename Handler> 4892 bool BaselineCodeGen<Handler>::emit_Instanceof() { 4893 frame.popRegsAndSync(2); 4894 4895 if (!emitNextIC()) { 4896 return false; 4897 } 4898 4899 frame.push(R0, JSVAL_TYPE_BOOLEAN); 4900 return true; 4901 } 4902 4903 template <typename Handler> 4904 bool BaselineCodeGen<Handler>::emit_Typeof() { 4905 frame.popRegsAndSync(1); 4906 4907 if (!emitNextIC()) { 4908 return false; 4909 } 4910 4911 frame.push(R0); 4912 return true; 4913 } 4914 4915 template <typename Handler> 4916 bool BaselineCodeGen<Handler>::emit_TypeofExpr() { 4917 return emit_Typeof(); 4918 } 4919 4920 template <typename Handler> 4921 bool BaselineCodeGen<Handler>::emit_TypeofEq() { 4922 frame.popRegsAndSync(1); 4923 4924 if (!emitNextIC()) { 4925 return false; 4926 } 4927 4928 frame.push(R0); 4929 return true; 4930 } 4931 4932 template <typename Handler> 4933 bool BaselineCodeGen<Handler>::emit_ThrowMsg() { 4934 prepareVMCall(); 4935 pushUint8BytecodeOperandArg(R2.scratchReg()); 4936 4937 using Fn = bool (*)(JSContext*, const unsigned); 4938 return callVM<Fn, js::ThrowMsgOperation>(); 4939 } 4940 4941 template <typename Handler> 4942 bool BaselineCodeGen<Handler>::emit_Throw() { 4943 // Keep value to throw in R0. 4944 frame.popRegsAndSync(1); 4945 4946 prepareVMCall(); 4947 pushArg(R0); 4948 4949 using Fn = bool (*)(JSContext*, HandleValue); 4950 return callVM<Fn, js::ThrowOperation>(); 4951 } 4952 4953 template <typename Handler> 4954 bool BaselineCodeGen<Handler>::emit_ThrowWithStack() { 4955 // Keep value to throw in R0 and the stack in R1. 4956 frame.popRegsAndSync(2); 4957 4958 prepareVMCall(); 4959 pushArg(R1); 4960 pushArg(R0); 4961 4962 using Fn = bool (*)(JSContext*, HandleValue, HandleValue); 4963 return callVM<Fn, js::ThrowWithStackOperation>(); 4964 } 4965 4966 template <typename Handler> 4967 bool BaselineCodeGen<Handler>::emit_Try() { 4968 return true; 4969 } 4970 4971 template <typename Handler> 4972 bool BaselineCodeGen<Handler>::emit_Finally() { 4973 // To match the interpreter, emit an interrupt check at the start of the 4974 // finally block. 4975 return emitInterruptCheck(); 4976 } 4977 4978 template <typename Handler> 4979 void BaselineCodeGen<Handler>::loadBaselineScriptResumeEntries( 4980 Register dest, Register scratch) { 4981 MOZ_ASSERT(dest != scratch); 4982 4983 loadJitScript(dest); 4984 masm.loadPtr(Address(dest, JitScript::offsetOfBaselineScript()), dest); 4985 masm.load32(Address(dest, BaselineScript::offsetOfResumeEntriesOffset()), 4986 scratch); 4987 masm.addPtr(scratch, dest); 4988 } 4989 4990 template <typename Handler> 4991 void BaselineCodeGen<Handler>::emitInterpJumpToResumeEntry(Register script, 4992 Register resumeIndex, 4993 Register scratch) { 4994 // Load JSScript::immutableScriptData() into |script|. 4995 masm.loadPtr(Address(script, JSScript::offsetOfSharedData()), script); 4996 masm.loadPtr(Address(script, SharedImmutableScriptData::offsetOfISD()), 4997 script); 4998 4999 // Load the resume pcOffset in |resumeIndex|. 5000 masm.load32( 5001 Address(script, ImmutableScriptData::offsetOfResumeOffsetsOffset()), 5002 scratch); 5003 masm.computeEffectiveAddress(BaseIndex(scratch, resumeIndex, TimesFour), 5004 scratch); 5005 masm.load32(BaseIndex(script, scratch, TimesOne), resumeIndex); 5006 5007 // Add resume offset to PC, jump to it. 5008 masm.computeEffectiveAddress(BaseIndex(script, resumeIndex, TimesOne, 5009 ImmutableScriptData::offsetOfCode()), 5010 script); 5011 Address pcAddr(FramePointer, BaselineFrame::reverseOffsetOfInterpreterPC()); 5012 masm.storePtr(script, pcAddr); 5013 emitJumpToInterpretOpLabel(); 5014 } 5015 5016 template <> 5017 void BaselineCompilerCodeGen::jumpToResumeEntry(Register resumeIndex, 5018 Register scratch1, 5019 Register scratch2) { 5020 loadBaselineScriptResumeEntries(scratch1, scratch2); 5021 masm.loadPtr( 5022 BaseIndex(scratch1, resumeIndex, ScaleFromElemWidth(sizeof(uintptr_t))), 5023 scratch1); 5024 masm.jump(scratch1); 5025 } 5026 5027 template <> 5028 void BaselineInterpreterCodeGen::jumpToResumeEntry(Register resumeIndex, 5029 Register scratch1, 5030 Register scratch2) { 5031 loadScript(scratch1); 5032 emitInterpJumpToResumeEntry(scratch1, resumeIndex, scratch2); 5033 } 5034 5035 template <> 5036 template <typename F1, typename F2> 5037 [[nodiscard]] bool BaselineCompilerCodeGen::emitDebugInstrumentation( 5038 const F1& ifDebuggee, const Maybe<F2>& ifNotDebuggee) { 5039 // The JIT calls either ifDebuggee or (if present) ifNotDebuggee, because it 5040 // knows statically whether we're compiling with debug instrumentation. 5041 5042 if (handler.compileDebugInstrumentation()) { 5043 return ifDebuggee(); 5044 } 5045 5046 if (ifNotDebuggee) { 5047 return (*ifNotDebuggee)(); 5048 } 5049 5050 return true; 5051 } 5052 5053 template <> 5054 template <typename F1, typename F2> 5055 [[nodiscard]] bool BaselineInterpreterCodeGen::emitDebugInstrumentation( 5056 const F1& ifDebuggee, const Maybe<F2>& ifNotDebuggee) { 5057 // The interpreter emits both ifDebuggee and (if present) ifNotDebuggee 5058 // paths, with a toggled jump followed by a branch on the frame's DEBUGGEE 5059 // flag. 5060 5061 Label isNotDebuggee, done; 5062 5063 CodeOffset toggleOffset = masm.toggledJump(&isNotDebuggee); 5064 if (!handler.addDebugInstrumentationOffset(toggleOffset)) { 5065 return false; 5066 } 5067 5068 masm.branchTest32(Assembler::Zero, frame.addressOfFlags(), 5069 Imm32(BaselineFrame::DEBUGGEE), &isNotDebuggee); 5070 5071 if (!ifDebuggee()) { 5072 return false; 5073 } 5074 5075 if (ifNotDebuggee) { 5076 masm.jump(&done); 5077 } 5078 5079 masm.bind(&isNotDebuggee); 5080 5081 if (ifNotDebuggee && !(*ifNotDebuggee)()) { 5082 return false; 5083 } 5084 5085 masm.bind(&done); 5086 return true; 5087 } 5088 5089 template <typename Handler> 5090 bool BaselineCodeGen<Handler>::emit_PushLexicalEnv() { 5091 // Call a stub to push the block on the block chain. 5092 prepareVMCall(); 5093 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 5094 5095 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(), 5096 R2.scratchReg()); 5097 pushArg(R0.scratchReg()); 5098 5099 using Fn = bool (*)(JSContext*, BaselineFrame*, Handle<LexicalScope*>); 5100 return callVM<Fn, jit::PushLexicalEnv>(); 5101 } 5102 5103 template <typename Handler> 5104 bool BaselineCodeGen<Handler>::emit_PushClassBodyEnv() { 5105 prepareVMCall(); 5106 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 5107 5108 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(), 5109 R2.scratchReg()); 5110 pushArg(R0.scratchReg()); 5111 5112 using Fn = bool (*)(JSContext*, BaselineFrame*, Handle<ClassBodyScope*>); 5113 return callVM<Fn, jit::PushClassBodyEnv>(); 5114 } 5115 5116 template <typename Handler> 5117 bool BaselineCodeGen<Handler>::emit_PopLexicalEnv() { 5118 frame.syncStack(0); 5119 5120 Register scratch1 = R0.scratchReg(); 5121 5122 auto ifDebuggee = [this, scratch1]() { 5123 masm.loadBaselineFramePtr(FramePointer, scratch1); 5124 5125 prepareVMCall(); 5126 pushBytecodePCArg(); 5127 pushArg(scratch1); 5128 5129 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*); 5130 return callVM<Fn, jit::DebugLeaveThenPopLexicalEnv>(); 5131 }; 5132 auto ifNotDebuggee = [this, scratch1]() { 5133 Register scratch2 = R1.scratchReg(); 5134 masm.loadPtr(frame.addressOfEnvironmentChain(), scratch1); 5135 masm.debugAssertObjectHasClass(scratch1, scratch2, 5136 &LexicalEnvironmentObject::class_); 5137 Address enclosingAddr(scratch1, 5138 EnvironmentObject::offsetOfEnclosingEnvironment()); 5139 masm.unboxObject(enclosingAddr, scratch1); 5140 masm.storePtr(scratch1, frame.addressOfEnvironmentChain()); 5141 return true; 5142 }; 5143 return emitDebugInstrumentation(ifDebuggee, mozilla::Some(ifNotDebuggee)); 5144 } 5145 5146 template <typename Handler> 5147 bool BaselineCodeGen<Handler>::emit_FreshenLexicalEnv() { 5148 frame.syncStack(0); 5149 5150 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 5151 5152 auto ifDebuggee = [this]() { 5153 prepareVMCall(); 5154 pushBytecodePCArg(); 5155 pushArg(R0.scratchReg()); 5156 5157 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*); 5158 return callVM<Fn, jit::DebuggeeFreshenLexicalEnv>(); 5159 }; 5160 auto ifNotDebuggee = [this]() { 5161 prepareVMCall(); 5162 pushArg(R0.scratchReg()); 5163 5164 using Fn = bool (*)(JSContext*, BaselineFrame*); 5165 return callVM<Fn, jit::FreshenLexicalEnv>(); 5166 }; 5167 return emitDebugInstrumentation(ifDebuggee, mozilla::Some(ifNotDebuggee)); 5168 } 5169 5170 template <typename Handler> 5171 bool BaselineCodeGen<Handler>::emit_RecreateLexicalEnv() { 5172 frame.syncStack(0); 5173 5174 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 5175 5176 auto ifDebuggee = [this]() { 5177 prepareVMCall(); 5178 pushBytecodePCArg(); 5179 pushArg(R0.scratchReg()); 5180 5181 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*); 5182 return callVM<Fn, jit::DebuggeeRecreateLexicalEnv>(); 5183 }; 5184 auto ifNotDebuggee = [this]() { 5185 prepareVMCall(); 5186 pushArg(R0.scratchReg()); 5187 5188 using Fn = bool (*)(JSContext*, BaselineFrame*); 5189 return callVM<Fn, jit::RecreateLexicalEnv>(); 5190 }; 5191 return emitDebugInstrumentation(ifDebuggee, mozilla::Some(ifNotDebuggee)); 5192 } 5193 5194 template <typename Handler> 5195 bool BaselineCodeGen<Handler>::emit_DebugLeaveLexicalEnv() { 5196 auto ifDebuggee = [this]() { 5197 prepareVMCall(); 5198 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 5199 pushBytecodePCArg(); 5200 pushArg(R0.scratchReg()); 5201 5202 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*); 5203 return callVM<Fn, jit::DebugLeaveLexicalEnv>(); 5204 }; 5205 return emitDebugInstrumentation(ifDebuggee); 5206 } 5207 5208 template <typename Handler> 5209 bool BaselineCodeGen<Handler>::emit_PushVarEnv() { 5210 prepareVMCall(); 5211 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 5212 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(), 5213 R2.scratchReg()); 5214 pushArg(R0.scratchReg()); 5215 5216 using Fn = bool (*)(JSContext*, BaselineFrame*, Handle<Scope*>); 5217 return callVM<Fn, jit::PushVarEnv>(); 5218 } 5219 5220 template <typename Handler> 5221 bool BaselineCodeGen<Handler>::emit_EnterWith() { 5222 // Pop "with" object to R0. 5223 frame.popRegsAndSync(1); 5224 5225 // Call a stub to push the object onto the environment chain. 5226 prepareVMCall(); 5227 5228 pushScriptGCThingArg(ScriptGCThingType::Scope, R1.scratchReg(), 5229 R2.scratchReg()); 5230 pushArg(R0); 5231 masm.loadBaselineFramePtr(FramePointer, R1.scratchReg()); 5232 pushArg(R1.scratchReg()); 5233 5234 using Fn = 5235 bool (*)(JSContext*, BaselineFrame*, HandleValue, Handle<WithScope*>); 5236 return callVM<Fn, jit::EnterWith>(); 5237 } 5238 5239 template <typename Handler> 5240 bool BaselineCodeGen<Handler>::emit_LeaveWith() { 5241 // Call a stub to pop the with object from the environment chain. 5242 prepareVMCall(); 5243 5244 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 5245 pushArg(R0.scratchReg()); 5246 5247 using Fn = bool (*)(JSContext*, BaselineFrame*); 5248 return callVM<Fn, jit::LeaveWith>(); 5249 } 5250 5251 #ifdef ENABLE_EXPLICIT_RESOURCE_MANAGEMENT 5252 template <typename Handler> 5253 bool BaselineCodeGen<Handler>::emit_AddDisposable() { 5254 frame.syncStack(0); 5255 prepareVMCall(); 5256 5257 pushUint8BytecodeOperandArg(R0.scratchReg()); // hint 5258 5259 masm.unboxBoolean(frame.addressOfStackValue(-1), R0.scratchReg()); 5260 pushArg(R0.scratchReg()); // needsClosure 5261 5262 masm.loadValue(frame.addressOfStackValue(-2), R1); 5263 pushArg(R1); // method 5264 5265 masm.loadValue(frame.addressOfStackValue(-3), R2); 5266 pushArg(R2); // object 5267 5268 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg()); 5269 pushArg(R0.scratchReg()); 5270 5271 using Fn = bool (*)(JSContext*, JS::Handle<JSObject*>, JS::Handle<JS::Value>, 5272 JS::Handle<JS::Value>, bool, UsingHint); 5273 if (!callVM<Fn, js::AddDisposableResourceToCapability>()) { 5274 return false; 5275 } 5276 frame.popn(3); 5277 return true; 5278 } 5279 5280 template <typename Handler> 5281 bool BaselineCodeGen<Handler>::emit_TakeDisposeCapability() { 5282 frame.syncStack(0); 5283 5284 masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg()); 5285 Address capAddr(R0.scratchReg(), 5286 DisposableEnvironmentObject::offsetOfDisposeCapability()); 5287 emitGuardedCallPreBarrierAnyZone(capAddr, MIRType::Value, R2.scratchReg()); 5288 masm.loadValue(capAddr, R1); 5289 masm.storeValue(UndefinedValue(), capAddr); 5290 5291 frame.push(R1); 5292 return true; 5293 } 5294 5295 template <typename Handler> 5296 bool BaselineCodeGen<Handler>::emit_CreateSuppressedError() { 5297 frame.popRegsAndSync(2); 5298 prepareVMCall(); 5299 5300 pushArg(R1); // suppressed 5301 pushArg(R0); // error 5302 5303 using Fn = ErrorObject* (*)(JSContext*, JS::Handle<JS::Value>, 5304 JS::Handle<JS::Value>); 5305 5306 if (!callVM<Fn, js::CreateSuppressedError>()) { 5307 return false; 5308 } 5309 5310 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0); 5311 frame.push(R0); 5312 return true; 5313 } 5314 #endif 5315 5316 template <typename Handler> 5317 bool BaselineCodeGen<Handler>::emit_Exception() { 5318 prepareVMCall(); 5319 5320 using Fn = bool (*)(JSContext*, MutableHandleValue); 5321 if (!callVM<Fn, GetAndClearException>()) { 5322 return false; 5323 } 5324 5325 frame.push(R0); 5326 return true; 5327 } 5328 5329 template <typename Handler> 5330 bool BaselineCodeGen<Handler>::emit_ExceptionAndStack() { 5331 // First call into the VM to store the exception stack. 5332 { 5333 prepareVMCall(); 5334 5335 using Fn = bool (*)(JSContext*, MutableHandleValue); 5336 if (!callVM<Fn, GetPendingExceptionStack>()) { 5337 return false; 5338 } 5339 5340 frame.push(R0); 5341 } 5342 5343 // Now get the actual exception value and clear the exception state. 5344 { 5345 prepareVMCall(); 5346 5347 using Fn = bool (*)(JSContext*, MutableHandleValue); 5348 if (!callVM<Fn, GetAndClearException>()) { 5349 return false; 5350 } 5351 5352 frame.push(R0); 5353 } 5354 5355 // Finally swap the stack and the exception. 5356 frame.popRegsAndSync(2); 5357 frame.push(R1); 5358 frame.push(R0); 5359 5360 return true; 5361 } 5362 5363 template <typename Handler> 5364 bool BaselineCodeGen<Handler>::emit_Debugger() { 5365 prepareVMCall(); 5366 5367 frame.assertSyncedStack(); 5368 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 5369 pushArg(R0.scratchReg()); 5370 5371 using Fn = bool (*)(JSContext*, BaselineFrame*); 5372 if (!callVM<Fn, jit::OnDebuggerStatement>()) { 5373 return false; 5374 } 5375 5376 return true; 5377 } 5378 5379 template <typename Handler> 5380 bool BaselineCodeGen<Handler>::emitDebugEpilogue() { 5381 auto ifDebuggee = [this]() { 5382 // Move return value into the frame's rval slot. 5383 masm.storeValue(JSReturnOperand, frame.addressOfReturnValue()); 5384 masm.or32(Imm32(BaselineFrame::HAS_RVAL), frame.addressOfFlags()); 5385 5386 // Load BaselineFrame pointer in R0. 5387 frame.syncStack(0); 5388 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 5389 5390 prepareVMCall(); 5391 pushBytecodePCArg(); 5392 pushArg(R0.scratchReg()); 5393 5394 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::DebugEpilogue; 5395 5396 using Fn = bool (*)(JSContext*, BaselineFrame*, const jsbytecode*); 5397 if (!callVM<Fn, jit::DebugEpilogueOnBaselineReturn>(kind)) { 5398 return false; 5399 } 5400 5401 masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand); 5402 return true; 5403 }; 5404 return emitDebugInstrumentation(ifDebuggee); 5405 } 5406 5407 template <typename Handler> 5408 bool BaselineCodeGen<Handler>::emitReturn() { 5409 if (handler.shouldEmitDebugEpilogueAtReturnOp()) { 5410 if (!emitDebugEpilogue()) { 5411 return false; 5412 } 5413 } 5414 5415 // Only emit the jump if this JSOp::RetRval is not the last instruction. 5416 // Not needed for last instruction, because last instruction flows 5417 // into return label. 5418 if (!handler.isDefinitelyLastOp()) { 5419 masm.jump(&return_); 5420 } 5421 5422 return true; 5423 } 5424 5425 template <typename Handler> 5426 bool BaselineCodeGen<Handler>::emit_Return() { 5427 frame.assertStackDepth(1); 5428 5429 frame.popValue(JSReturnOperand); 5430 return emitReturn(); 5431 } 5432 5433 template <typename Handler> 5434 void BaselineCodeGen<Handler>::emitLoadReturnValue(ValueOperand val) { 5435 Label done, noRval; 5436 masm.branchTest32(Assembler::Zero, frame.addressOfFlags(), 5437 Imm32(BaselineFrame::HAS_RVAL), &noRval); 5438 masm.loadValue(frame.addressOfReturnValue(), val); 5439 masm.jump(&done); 5440 5441 masm.bind(&noRval); 5442 masm.moveValue(UndefinedValue(), val); 5443 5444 masm.bind(&done); 5445 } 5446 5447 template <typename Handler> 5448 bool BaselineCodeGen<Handler>::emit_RetRval() { 5449 frame.assertStackDepth(0); 5450 5451 masm.moveValue(UndefinedValue(), JSReturnOperand); 5452 5453 if (!handler.maybeScript() || !handler.maybeScript()->noScriptRval()) { 5454 // Return the value in the return value slot, if any. 5455 Label done; 5456 Address flags = frame.addressOfFlags(); 5457 masm.branchTest32(Assembler::Zero, flags, Imm32(BaselineFrame::HAS_RVAL), 5458 &done); 5459 masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand); 5460 masm.bind(&done); 5461 } 5462 5463 return emitReturn(); 5464 } 5465 5466 template <typename Handler> 5467 bool BaselineCodeGen<Handler>::emit_ToPropertyKey() { 5468 frame.popRegsAndSync(1); 5469 5470 if (!emitNextIC()) { 5471 return false; 5472 } 5473 5474 frame.push(R0); 5475 return true; 5476 } 5477 5478 template <typename Handler> 5479 bool BaselineCodeGen<Handler>::emit_ToAsyncIter() { 5480 frame.syncStack(0); 5481 masm.unboxObject(frame.addressOfStackValue(-2), R0.scratchReg()); 5482 masm.loadValue(frame.addressOfStackValue(-1), R1); 5483 5484 prepareVMCall(); 5485 pushArg(R1); 5486 pushArg(R0.scratchReg()); 5487 5488 using Fn = JSObject* (*)(JSContext*, HandleObject, HandleValue); 5489 if (!callVM<Fn, js::CreateAsyncFromSyncIterator>()) { 5490 return false; 5491 } 5492 5493 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0); 5494 frame.popn(2); 5495 frame.push(R0); 5496 return true; 5497 } 5498 5499 template <typename Handler> 5500 bool BaselineCodeGen<Handler>::emit_CanSkipAwait() { 5501 frame.syncStack(0); 5502 masm.loadValue(frame.addressOfStackValue(-1), R0); 5503 5504 prepareVMCall(); 5505 pushArg(R0); 5506 5507 using Fn = bool (*)(JSContext*, HandleValue, bool* canSkip); 5508 if (!callVM<Fn, js::CanSkipAwait>()) { 5509 return false; 5510 } 5511 5512 masm.tagValue(JSVAL_TYPE_BOOLEAN, ReturnReg, R0); 5513 frame.push(R0, JSVAL_TYPE_BOOLEAN); 5514 return true; 5515 } 5516 5517 template <typename Handler> 5518 bool BaselineCodeGen<Handler>::emit_MaybeExtractAwaitValue() { 5519 frame.syncStack(0); 5520 masm.loadValue(frame.addressOfStackValue(-2), R0); 5521 5522 masm.unboxBoolean(frame.addressOfStackValue(-1), R1.scratchReg()); 5523 5524 Label cantExtract; 5525 masm.branchIfFalseBool(R1.scratchReg(), &cantExtract); 5526 5527 prepareVMCall(); 5528 pushArg(R0); 5529 5530 using Fn = bool (*)(JSContext*, HandleValue, MutableHandleValue); 5531 if (!callVM<Fn, js::ExtractAwaitValue>()) { 5532 return false; 5533 } 5534 5535 masm.storeValue(R0, frame.addressOfStackValue(-2)); 5536 masm.bind(&cantExtract); 5537 5538 return true; 5539 } 5540 5541 template <typename Handler> 5542 bool BaselineCodeGen<Handler>::emit_AsyncAwait() { 5543 frame.syncStack(0); 5544 masm.loadValue(frame.addressOfStackValue(-2), R1); 5545 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg()); 5546 5547 prepareVMCall(); 5548 pushArg(R1); 5549 pushArg(R0.scratchReg()); 5550 5551 using Fn = JSObject* (*)(JSContext*, Handle<AsyncFunctionGeneratorObject*>, 5552 HandleValue); 5553 if (!callVM<Fn, js::AsyncFunctionAwait>()) { 5554 return false; 5555 } 5556 5557 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0); 5558 frame.popn(2); 5559 frame.push(R0); 5560 return true; 5561 } 5562 5563 template <typename Handler> 5564 bool BaselineCodeGen<Handler>::emit_AsyncResolve() { 5565 frame.syncStack(0); 5566 masm.loadValue(frame.addressOfStackValue(-2), R1); 5567 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg()); 5568 5569 prepareVMCall(); 5570 pushArg(R1); 5571 pushArg(R0.scratchReg()); 5572 5573 using Fn = JSObject* (*)(JSContext*, Handle<AsyncFunctionGeneratorObject*>, 5574 HandleValue); 5575 if (!callVM<Fn, js::AsyncFunctionResolve>()) { 5576 return false; 5577 } 5578 5579 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0); 5580 frame.popn(2); 5581 frame.push(R0); 5582 return true; 5583 } 5584 5585 template <typename Handler> 5586 bool BaselineCodeGen<Handler>::emit_AsyncReject() { 5587 frame.syncStack(0); 5588 masm.loadValue(frame.addressOfStackValue(-3), R2); 5589 masm.loadValue(frame.addressOfStackValue(-2), R1); 5590 masm.unboxObject(frame.addressOfStackValue(-1), R0.scratchReg()); 5591 5592 prepareVMCall(); 5593 pushArg(R1); 5594 pushArg(R2); 5595 pushArg(R0.scratchReg()); 5596 5597 using Fn = JSObject* (*)(JSContext*, Handle<AsyncFunctionGeneratorObject*>, 5598 HandleValue, HandleValue); 5599 if (!callVM<Fn, js::AsyncFunctionReject>()) { 5600 return false; 5601 } 5602 5603 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0); 5604 frame.popn(3); 5605 frame.push(R0); 5606 return true; 5607 } 5608 5609 template <typename Handler> 5610 bool BaselineCodeGen<Handler>::emit_CheckObjCoercible() { 5611 frame.syncStack(0); 5612 masm.loadValue(frame.addressOfStackValue(-1), R0); 5613 5614 Label fail, done; 5615 5616 masm.branchTestUndefined(Assembler::Equal, R0, &fail); 5617 masm.branchTestNull(Assembler::NotEqual, R0, &done); 5618 5619 masm.bind(&fail); 5620 prepareVMCall(); 5621 5622 pushArg(R0); 5623 5624 using Fn = bool (*)(JSContext*, HandleValue); 5625 if (!callVM<Fn, ThrowObjectCoercible>()) { 5626 return false; 5627 } 5628 5629 masm.bind(&done); 5630 return true; 5631 } 5632 5633 template <typename Handler> 5634 bool BaselineCodeGen<Handler>::emit_ToString() { 5635 // Keep top stack value in R0. 5636 frame.popRegsAndSync(1); 5637 5638 // Inline path for string. 5639 Label done; 5640 masm.branchTestString(Assembler::Equal, R0, &done); 5641 5642 prepareVMCall(); 5643 5644 pushArg(R0); 5645 5646 // Call ToStringSlow which doesn't handle string inputs. 5647 using Fn = JSString* (*)(JSContext*, HandleValue); 5648 if (!callVM<Fn, ToStringSlow<CanGC>>()) { 5649 return false; 5650 } 5651 5652 masm.tagValue(JSVAL_TYPE_STRING, ReturnReg, R0); 5653 5654 masm.bind(&done); 5655 frame.push(R0); 5656 return true; 5657 } 5658 5659 static constexpr uint32_t TableSwitchOpLowOffset = 1 * JUMP_OFFSET_LEN; 5660 static constexpr uint32_t TableSwitchOpHighOffset = 2 * JUMP_OFFSET_LEN; 5661 static constexpr uint32_t TableSwitchOpFirstResumeIndexOffset = 5662 3 * JUMP_OFFSET_LEN; 5663 5664 template <> 5665 void BaselineCompilerCodeGen::emitGetTableSwitchIndex(ValueOperand val, 5666 Register dest, 5667 Register scratch1, 5668 Register scratch2) { 5669 jsbytecode* pc = handler.pc(); 5670 jsbytecode* defaultpc = pc + GET_JUMP_OFFSET(pc); 5671 Label* defaultLabel = handler.labelOf(defaultpc); 5672 5673 int32_t low = GET_JUMP_OFFSET(pc + TableSwitchOpLowOffset); 5674 int32_t high = GET_JUMP_OFFSET(pc + TableSwitchOpHighOffset); 5675 int32_t length = high - low + 1; 5676 5677 // Jump to the 'default' pc if not int32 (tableswitch is only used when 5678 // all cases are int32). 5679 masm.branchTestInt32(Assembler::NotEqual, val, defaultLabel); 5680 masm.unboxInt32(val, dest); 5681 5682 // Subtract 'low'. Bounds check. 5683 if (low != 0) { 5684 masm.sub32(Imm32(low), dest); 5685 } 5686 masm.branch32(Assembler::AboveOrEqual, dest, Imm32(length), defaultLabel); 5687 } 5688 5689 template <> 5690 void BaselineInterpreterCodeGen::emitGetTableSwitchIndex(ValueOperand val, 5691 Register dest, 5692 Register scratch1, 5693 Register scratch2) { 5694 // Jump to the 'default' pc if not int32 (tableswitch is only used when 5695 // all cases are int32). 5696 Label done, jumpToDefault; 5697 masm.branchTestInt32(Assembler::NotEqual, val, &jumpToDefault); 5698 masm.unboxInt32(val, dest); 5699 5700 Register pcReg = LoadBytecodePC(masm, scratch1); 5701 Address lowAddr(pcReg, sizeof(jsbytecode) + TableSwitchOpLowOffset); 5702 Address highAddr(pcReg, sizeof(jsbytecode) + TableSwitchOpHighOffset); 5703 5704 // Jump to default if val > high. 5705 masm.branch32(Assembler::LessThan, highAddr, dest, &jumpToDefault); 5706 5707 // Jump to default if val < low. 5708 masm.load32(lowAddr, scratch2); 5709 masm.branch32(Assembler::GreaterThan, scratch2, dest, &jumpToDefault); 5710 5711 // index := val - low. 5712 masm.sub32(scratch2, dest); 5713 masm.jump(&done); 5714 5715 masm.bind(&jumpToDefault); 5716 emitJump(); 5717 5718 masm.bind(&done); 5719 } 5720 5721 template <> 5722 void BaselineCompilerCodeGen::emitTableSwitchJump(Register key, 5723 Register scratch1, 5724 Register scratch2) { 5725 // Jump to resumeEntries[firstResumeIndex + key]. 5726 5727 // Note: BytecodeEmitter::allocateResumeIndex static_asserts 5728 // |firstResumeIndex * sizeof(uintptr_t)| fits in int32_t. 5729 uint32_t firstResumeIndex = 5730 GET_RESUMEINDEX(handler.pc() + TableSwitchOpFirstResumeIndexOffset); 5731 loadBaselineScriptResumeEntries(scratch1, scratch2); 5732 masm.loadPtr(BaseIndex(scratch1, key, ScaleFromElemWidth(sizeof(uintptr_t)), 5733 firstResumeIndex * sizeof(uintptr_t)), 5734 scratch1); 5735 masm.jump(scratch1); 5736 } 5737 5738 template <> 5739 void BaselineInterpreterCodeGen::emitTableSwitchJump(Register key, 5740 Register scratch1, 5741 Register scratch2) { 5742 // Load the op's firstResumeIndex in scratch1. 5743 LoadUint24Operand(masm, TableSwitchOpFirstResumeIndexOffset, scratch1); 5744 5745 masm.add32(key, scratch1); 5746 jumpToResumeEntry(scratch1, key, scratch2); 5747 } 5748 5749 template <typename Handler> 5750 bool BaselineCodeGen<Handler>::emit_TableSwitch() { 5751 frame.popRegsAndSync(1); 5752 5753 Register key = R0.scratchReg(); 5754 Register scratch1 = R1.scratchReg(); 5755 Register scratch2 = R2.scratchReg(); 5756 5757 // Call a stub to convert R0 from double to int32 if needed. 5758 // Note: this stub may clobber scratch1. 5759 masm.call(runtime->jitRuntime()->getDoubleToInt32ValueStub()); 5760 5761 // Load the index in the jump table in |key|, or branch to default pc if not 5762 // int32 or out-of-range. 5763 emitGetTableSwitchIndex(R0, key, scratch1, scratch2); 5764 5765 // Jump to the target pc. 5766 emitTableSwitchJump(key, scratch1, scratch2); 5767 return true; 5768 } 5769 5770 template <typename Handler> 5771 bool BaselineCodeGen<Handler>::emit_Iter() { 5772 frame.popRegsAndSync(1); 5773 5774 if (!emitNextIC()) { 5775 return false; 5776 } 5777 5778 frame.push(R0); 5779 return true; 5780 } 5781 5782 template <typename Handler> 5783 bool BaselineCodeGen<Handler>::emit_MoreIter() { 5784 frame.syncStack(0); 5785 5786 masm.unboxObject(frame.addressOfStackValue(-1), R1.scratchReg()); 5787 5788 masm.iteratorMore(R1.scratchReg(), R0, R2.scratchReg()); 5789 frame.push(R0); 5790 return true; 5791 } 5792 5793 template <typename Handler> 5794 bool BaselineCodeGen<Handler>::emitIsMagicValue() { 5795 frame.syncStack(0); 5796 5797 Label isMagic, done; 5798 masm.branchTestMagic(Assembler::Equal, frame.addressOfStackValue(-1), 5799 &isMagic); 5800 masm.moveValue(BooleanValue(false), R0); 5801 masm.jump(&done); 5802 5803 masm.bind(&isMagic); 5804 masm.moveValue(BooleanValue(true), R0); 5805 5806 masm.bind(&done); 5807 frame.push(R0, JSVAL_TYPE_BOOLEAN); 5808 return true; 5809 } 5810 5811 template <typename Handler> 5812 bool BaselineCodeGen<Handler>::emit_IsNoIter() { 5813 return emitIsMagicValue(); 5814 } 5815 5816 template <typename Handler> 5817 bool BaselineCodeGen<Handler>::emit_EndIter() { 5818 // Pop iterator value. 5819 frame.pop(); 5820 5821 // Pop the iterator object to close in R0. 5822 frame.popRegsAndSync(1); 5823 5824 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All()); 5825 MOZ_ASSERT(!regs.has(FramePointer)); 5826 if (HasInterpreterPCReg()) { 5827 regs.take(InterpreterPCReg); 5828 } 5829 5830 Register obj = R0.scratchReg(); 5831 regs.take(obj); 5832 masm.unboxObject(R0, obj); 5833 5834 Register temp1 = regs.takeAny(); 5835 Register temp2 = regs.takeAny(); 5836 Register temp3 = regs.takeAny(); 5837 masm.iteratorClose(obj, temp1, temp2, temp3); 5838 return true; 5839 } 5840 5841 template <typename Handler> 5842 bool BaselineCodeGen<Handler>::emit_CloseIter() { 5843 frame.popRegsAndSync(1); 5844 5845 Register iter = R0.scratchReg(); 5846 masm.unboxObject(R0, iter); 5847 5848 return emitNextIC(); 5849 } 5850 5851 template <typename Handler> 5852 bool BaselineCodeGen<Handler>::emit_OptimizeGetIterator() { 5853 frame.popRegsAndSync(1); 5854 5855 if (!emitNextIC()) { 5856 return false; 5857 } 5858 5859 frame.push(R0); 5860 return true; 5861 } 5862 5863 template <typename Handler> 5864 bool BaselineCodeGen<Handler>::emit_IsGenClosing() { 5865 return emitIsMagicValue(); 5866 } 5867 5868 template <typename Handler> 5869 bool BaselineCodeGen<Handler>::emit_IsNullOrUndefined() { 5870 frame.syncStack(0); 5871 5872 Label isNullOrUndefined, done; 5873 masm.branchTestNull(Assembler::Equal, frame.addressOfStackValue(-1), 5874 &isNullOrUndefined); 5875 masm.branchTestUndefined(Assembler::Equal, frame.addressOfStackValue(-1), 5876 &isNullOrUndefined); 5877 masm.moveValue(BooleanValue(false), R0); 5878 masm.jump(&done); 5879 5880 masm.bind(&isNullOrUndefined); 5881 masm.moveValue(BooleanValue(true), R0); 5882 5883 masm.bind(&done); 5884 frame.push(R0, JSVAL_TYPE_BOOLEAN); 5885 return true; 5886 } 5887 5888 template <typename Handler> 5889 bool BaselineCodeGen<Handler>::emit_GetRval() { 5890 frame.syncStack(0); 5891 5892 emitLoadReturnValue(R0); 5893 5894 frame.push(R0); 5895 return true; 5896 } 5897 5898 template <typename Handler> 5899 bool BaselineCodeGen<Handler>::emit_SetRval() { 5900 // Store to the frame's return value slot. 5901 frame.storeStackValue(-1, frame.addressOfReturnValue(), R2); 5902 masm.or32(Imm32(BaselineFrame::HAS_RVAL), frame.addressOfFlags()); 5903 frame.pop(); 5904 return true; 5905 } 5906 5907 template <typename Handler> 5908 bool BaselineCodeGen<Handler>::emit_Callee() { 5909 MOZ_ASSERT_IF(handler.maybeScript(), handler.maybeScript()->function()); 5910 frame.syncStack(0); 5911 masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), 5912 R0.scratchReg()); 5913 masm.tagValue(JSVAL_TYPE_OBJECT, R0.scratchReg(), R0); 5914 frame.push(R0); 5915 return true; 5916 } 5917 5918 template <> 5919 bool BaselineCompilerCodeGen::emit_EnvCallee() { 5920 frame.syncStack(0); 5921 uint16_t numHops = GET_ENVCOORD_HOPS(handler.pc()); 5922 Register scratch = R0.scratchReg(); 5923 5924 masm.loadPtr(frame.addressOfEnvironmentChain(), scratch); 5925 for (unsigned i = 0; i < numHops; i++) { 5926 Address nextAddr(scratch, 5927 EnvironmentObject::offsetOfEnclosingEnvironment()); 5928 masm.unboxObject(nextAddr, scratch); 5929 } 5930 5931 masm.loadValue(Address(scratch, CallObject::offsetOfCallee()), R0); 5932 frame.push(R0); 5933 return true; 5934 } 5935 5936 template <> 5937 bool BaselineInterpreterCodeGen::emit_EnvCallee() { 5938 Register scratch = R0.scratchReg(); 5939 Register env = R1.scratchReg(); 5940 5941 static_assert(JSOpLength_EnvCallee - sizeof(jsbytecode) == ENVCOORD_HOPS_LEN, 5942 "op must have uint16 operand for LoadAliasedVarEnv"); 5943 5944 // Load the right environment object. 5945 masm.loadPtr(frame.addressOfEnvironmentChain(), env); 5946 LoadAliasedVarEnv(masm, env, scratch); 5947 5948 masm.pushValue(Address(env, CallObject::offsetOfCallee())); 5949 return true; 5950 } 5951 5952 template <typename Handler> 5953 bool BaselineCodeGen<Handler>::emit_SuperBase() { 5954 frame.popRegsAndSync(1); 5955 5956 Register scratch = R0.scratchReg(); 5957 Register proto = R1.scratchReg(); 5958 5959 // Unbox callee. 5960 masm.unboxObject(R0, scratch); 5961 5962 // Load [[HomeObject]] 5963 Address homeObjAddr(scratch, 5964 FunctionExtended::offsetOfMethodHomeObjectSlot()); 5965 5966 masm.assertFunctionIsExtended(scratch); 5967 #ifdef DEBUG 5968 Label isObject; 5969 masm.branchTestObject(Assembler::Equal, homeObjAddr, &isObject); 5970 masm.assumeUnreachable("[[HomeObject]] must be Object"); 5971 masm.bind(&isObject); 5972 #endif 5973 masm.unboxObject(homeObjAddr, scratch); 5974 5975 // Load prototype from [[HomeObject]] 5976 masm.loadObjProto(scratch, proto); 5977 5978 #ifdef DEBUG 5979 // We won't encounter a lazy proto, because the prototype is guaranteed to 5980 // either be a JSFunction or a PlainObject, and only proxy objects can have a 5981 // lazy proto. 5982 MOZ_ASSERT(uintptr_t(TaggedProto::LazyProto) == 1); 5983 5984 Label proxyCheckDone; 5985 masm.branchPtr(Assembler::NotEqual, proto, ImmWord(1), &proxyCheckDone); 5986 masm.assumeUnreachable("Unexpected lazy proto in JSOp::SuperBase"); 5987 masm.bind(&proxyCheckDone); 5988 #endif 5989 5990 Label nullProto, done; 5991 masm.branchPtr(Assembler::Equal, proto, ImmWord(0), &nullProto); 5992 5993 // Box prototype and return 5994 masm.tagValue(JSVAL_TYPE_OBJECT, proto, R1); 5995 masm.jump(&done); 5996 5997 masm.bind(&nullProto); 5998 masm.moveValue(NullValue(), R1); 5999 6000 masm.bind(&done); 6001 frame.push(R1); 6002 return true; 6003 } 6004 6005 template <typename Handler> 6006 bool BaselineCodeGen<Handler>::emit_SuperFun() { 6007 frame.popRegsAndSync(1); 6008 6009 Register callee = R0.scratchReg(); 6010 Register proto = R1.scratchReg(); 6011 #ifdef DEBUG 6012 Register scratch = R2.scratchReg(); 6013 #endif 6014 6015 // Unbox callee. 6016 masm.unboxObject(R0, callee); 6017 6018 #ifdef DEBUG 6019 Label classCheckDone; 6020 masm.branchTestObjIsFunction(Assembler::Equal, callee, scratch, callee, 6021 &classCheckDone); 6022 masm.assumeUnreachable("Unexpected non-JSFunction callee in JSOp::SuperFun"); 6023 masm.bind(&classCheckDone); 6024 #endif 6025 6026 // Load prototype of callee 6027 masm.loadObjProto(callee, proto); 6028 6029 #ifdef DEBUG 6030 // We won't encounter a lazy proto, because |callee| is guaranteed to be a 6031 // JSFunction and only proxy objects can have a lazy proto. 6032 MOZ_ASSERT(uintptr_t(TaggedProto::LazyProto) == 1); 6033 6034 Label proxyCheckDone; 6035 masm.branchPtr(Assembler::NotEqual, proto, ImmWord(1), &proxyCheckDone); 6036 masm.assumeUnreachable("Unexpected lazy proto in JSOp::SuperFun"); 6037 masm.bind(&proxyCheckDone); 6038 #endif 6039 6040 Label nullProto, done; 6041 masm.branchPtr(Assembler::Equal, proto, ImmWord(0), &nullProto); 6042 6043 // Box prototype and return 6044 masm.tagValue(JSVAL_TYPE_OBJECT, proto, R1); 6045 masm.jump(&done); 6046 6047 masm.bind(&nullProto); 6048 masm.moveValue(NullValue(), R1); 6049 6050 masm.bind(&done); 6051 frame.push(R1); 6052 return true; 6053 } 6054 6055 template <typename Handler> 6056 bool BaselineCodeGen<Handler>::emit_Arguments() { 6057 frame.syncStack(0); 6058 6059 MOZ_ASSERT_IF(handler.maybeScript(), handler.maybeScript()->needsArgsObj()); 6060 6061 prepareVMCall(); 6062 6063 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 6064 pushArg(R0.scratchReg()); 6065 6066 using Fn = bool (*)(JSContext*, BaselineFrame*, MutableHandleValue); 6067 if (!callVM<Fn, jit::NewArgumentsObject>()) { 6068 return false; 6069 } 6070 6071 frame.push(R0); 6072 return true; 6073 } 6074 6075 template <typename Handler> 6076 bool BaselineCodeGen<Handler>::emit_Rest() { 6077 frame.syncStack(0); 6078 6079 if (!emitNextIC()) { 6080 return false; 6081 } 6082 6083 // Mark R0 as pushed stack value. 6084 frame.push(R0); 6085 return true; 6086 } 6087 6088 template <typename Handler> 6089 bool BaselineCodeGen<Handler>::emit_Generator() { 6090 frame.assertStackDepth(0); 6091 6092 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 6093 6094 prepareVMCall(); 6095 pushArg(R0.scratchReg()); 6096 6097 using Fn = JSObject* (*)(JSContext*, BaselineFrame*); 6098 if (!callVM<Fn, jit::CreateGeneratorFromFrame>()) { 6099 return false; 6100 } 6101 6102 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0); 6103 frame.push(R0); 6104 return true; 6105 } 6106 6107 template <typename Handler> 6108 bool BaselineCodeGen<Handler>::emitSuspend(JSOp op) { 6109 MOZ_ASSERT(op == JSOp::InitialYield || op == JSOp::Yield || 6110 op == JSOp::Await); 6111 6112 // Load the generator object in R2, but leave the return value on the 6113 // expression stack. 6114 Register genObj = R2.scratchReg(); 6115 if (op == JSOp::InitialYield) { 6116 // Generator and return value are one and the same. 6117 frame.syncStack(0); 6118 frame.assertStackDepth(1); 6119 masm.unboxObject(frame.addressOfStackValue(-1), genObj); 6120 } else { 6121 frame.popRegsAndSync(1); 6122 masm.unboxObject(R0, genObj); 6123 } 6124 6125 if (frame.hasKnownStackDepth(1) && !handler.canHaveFixedSlots()) { 6126 // If the expression stack is empty, we can inline the Yield. Note that this 6127 // branch is never taken for the interpreter because it doesn't know static 6128 // stack depths. 6129 MOZ_ASSERT_IF(op == JSOp::InitialYield && handler.maybePC(), 6130 GET_RESUMEINDEX(handler.maybePC()) == 0); 6131 Address resumeIndexSlot(genObj, 6132 AbstractGeneratorObject::offsetOfResumeIndexSlot()); 6133 Register temp = R1.scratchReg(); 6134 if (op == JSOp::InitialYield) { 6135 masm.storeValue(Int32Value(0), resumeIndexSlot); 6136 } else { 6137 jsbytecode* pc = handler.maybePC(); 6138 MOZ_ASSERT(pc, "compiler-only code never has a null pc"); 6139 masm.move32(Imm32(GET_RESUMEINDEX(pc)), temp); 6140 masm.storeValue(JSVAL_TYPE_INT32, temp, resumeIndexSlot); 6141 } 6142 6143 Register envObj = R0.scratchReg(); 6144 Address envChainSlot( 6145 genObj, AbstractGeneratorObject::offsetOfEnvironmentChainSlot()); 6146 masm.loadPtr(frame.addressOfEnvironmentChain(), envObj); 6147 emitGuardedCallPreBarrierAnyZone(envChainSlot, MIRType::Value, temp); 6148 masm.storeValue(JSVAL_TYPE_OBJECT, envObj, envChainSlot); 6149 6150 Label skipBarrier; 6151 masm.branchPtrInNurseryChunk(Assembler::Equal, genObj, temp, &skipBarrier); 6152 masm.branchPtrInNurseryChunk(Assembler::NotEqual, envObj, temp, 6153 &skipBarrier); 6154 MOZ_ASSERT(genObj == R2.scratchReg()); 6155 masm.call(&postBarrierSlot_); 6156 masm.bind(&skipBarrier); 6157 } else { 6158 masm.loadBaselineFramePtr(FramePointer, R1.scratchReg()); 6159 computeFrameSize(R0.scratchReg()); 6160 6161 prepareVMCall(); 6162 pushBytecodePCArg(); 6163 pushArg(R0.scratchReg()); 6164 pushArg(R1.scratchReg()); 6165 pushArg(genObj); 6166 6167 using Fn = bool (*)(JSContext*, HandleObject, BaselineFrame*, uint32_t, 6168 const jsbytecode*); 6169 if (!callVM<Fn, jit::NormalSuspend>()) { 6170 return false; 6171 } 6172 } 6173 6174 masm.loadValue(frame.addressOfStackValue(-1), JSReturnOperand); 6175 if (!emitReturn()) { 6176 return false; 6177 } 6178 6179 // Three values are pushed onto the stack when resuming the generator, 6180 // replacing the one slot that holds the return value. 6181 frame.incStackDepth(2); 6182 return true; 6183 } 6184 6185 template <typename Handler> 6186 bool BaselineCodeGen<Handler>::emit_InitialYield() { 6187 return emitSuspend(JSOp::InitialYield); 6188 } 6189 6190 template <typename Handler> 6191 bool BaselineCodeGen<Handler>::emit_Yield() { 6192 return emitSuspend(JSOp::Yield); 6193 } 6194 6195 template <typename Handler> 6196 bool BaselineCodeGen<Handler>::emit_Await() { 6197 return emitSuspend(JSOp::Await); 6198 } 6199 6200 template <> 6201 bool BaselineCompilerCodeGen::emitAfterYieldDebugInstrumentation(Register) { 6202 if (handler.compileDebugInstrumentation()) { 6203 return emitDebugAfterYield(); 6204 } 6205 return true; 6206 } 6207 6208 template <> 6209 bool BaselineInterpreterCodeGen::emitAfterYieldDebugInstrumentation( 6210 Register scratch) { 6211 // Note that we can't use emitDebugInstrumentation here because the frame's 6212 // DEBUGGEE flag hasn't been initialized yet. 6213 6214 // If the current Realm is not a debuggee we're done. 6215 Label done; 6216 CodeOffset toggleOffset = masm.toggledJump(&done); 6217 if (!handler.addDebugInstrumentationOffset(toggleOffset)) { 6218 return false; 6219 } 6220 masm.loadPtr(AbsoluteAddress(runtime->addressOfRealm()), scratch); 6221 masm.branchTest32(Assembler::Zero, 6222 Address(scratch, Realm::offsetOfDebugModeBits()), 6223 Imm32(Realm::debugModeIsDebuggeeBit()), &done); 6224 6225 if (!emitDebugAfterYield()) { 6226 return false; 6227 } 6228 6229 masm.bind(&done); 6230 return true; 6231 } 6232 6233 template <typename Handler> 6234 bool BaselineCodeGen<Handler>::emitDebugAfterYield() { 6235 frame.assertSyncedStack(); 6236 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 6237 prepareVMCall(); 6238 pushArg(R0.scratchReg()); 6239 6240 const RetAddrEntry::Kind kind = RetAddrEntry::Kind::DebugAfterYield; 6241 6242 using Fn = bool (*)(JSContext*, BaselineFrame*); 6243 return callVM<Fn, jit::DebugAfterYield>(kind); 6244 }; 6245 6246 template <typename Handler> 6247 bool BaselineCodeGen<Handler>::emit_FinalYieldRval() { 6248 // Store generator in R0. 6249 frame.popRegsAndSync(1); 6250 masm.unboxObject(R0, R0.scratchReg()); 6251 6252 prepareVMCall(); 6253 pushBytecodePCArg(); 6254 pushArg(R0.scratchReg()); 6255 6256 using Fn = bool (*)(JSContext*, HandleObject, const jsbytecode*); 6257 if (!callVM<Fn, jit::FinalSuspend>()) { 6258 return false; 6259 } 6260 6261 masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand); 6262 return emitReturn(); 6263 } 6264 6265 template <> 6266 void BaselineCompilerCodeGen::emitJumpToInterpretOpLabel() { 6267 TrampolinePtr code = 6268 runtime->jitRuntime()->baselineInterpreter().interpretOpAddr(); 6269 masm.jump(code); 6270 } 6271 6272 template <> 6273 void BaselineInterpreterCodeGen::emitJumpToInterpretOpLabel() { 6274 masm.jump(handler.interpretOpLabel()); 6275 } 6276 6277 template <typename Handler> 6278 bool BaselineCodeGen<Handler>::emitEnterGeneratorCode(Register script, 6279 Register resumeIndex, 6280 Register scratch) { 6281 // Resume in either the BaselineScript (if present) or Baseline Interpreter. 6282 6283 static_assert(CompilingScript == 0x5, 6284 "Comparison below requires specific sentinel encoding"); 6285 6286 // Initialize the icScript slot in the baseline frame. 6287 masm.loadJitScript(script, scratch); 6288 masm.computeEffectiveAddress(Address(scratch, JitScript::offsetOfICScript()), 6289 scratch); 6290 Address icScriptAddr(FramePointer, BaselineFrame::reverseOffsetOfICScript()); 6291 masm.storePtr(scratch, icScriptAddr); 6292 6293 Label noBaselineScript; 6294 // Needed if running in interpreter or if generator is realm-independent, 6295 // but it's faster to set it than to check every time 6296 masm.storePtr(script, frame.addressOfInterpreterScript()); 6297 6298 masm.loadJitScript(script, scratch); 6299 masm.loadPtr(Address(scratch, JitScript::offsetOfBaselineScript()), scratch); 6300 masm.branchPtr(Assembler::BelowOrEqual, scratch, 6301 ImmPtr(BaselineCompilingScriptPtr), &noBaselineScript); 6302 6303 masm.load32(Address(scratch, BaselineScript::offsetOfResumeEntriesOffset()), 6304 script); 6305 masm.addPtr(scratch, script); 6306 masm.loadPtr( 6307 BaseIndex(script, resumeIndex, ScaleFromElemWidth(sizeof(uintptr_t))), 6308 scratch); 6309 masm.jump(scratch); 6310 6311 masm.bind(&noBaselineScript); 6312 6313 // Initialize interpreter frame fields. 6314 masm.or32(Imm32(BaselineFrame::RUNNING_IN_INTERPRETER), 6315 frame.addressOfFlags()); 6316 // interpreterScript_ is set above 6317 6318 // Initialize pc and jump to it. 6319 emitInterpJumpToResumeEntry(script, resumeIndex, scratch); 6320 return true; 6321 } 6322 6323 template <typename Handler> 6324 bool BaselineCodeGen<Handler>::emit_Resume() { 6325 frame.syncStack(0); 6326 masm.assertStackAlignment(sizeof(Value), 0); 6327 6328 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All()); 6329 MOZ_ASSERT(!regs.has(FramePointer)); 6330 if (HasInterpreterPCReg()) { 6331 regs.take(InterpreterPCReg); 6332 } 6333 6334 saveInterpreterPCReg(); 6335 6336 // Load generator object. 6337 Register genObj = regs.takeAny(); 6338 masm.unboxObject(frame.addressOfStackValue(-3), genObj); 6339 6340 // Load callee. 6341 Register callee = regs.takeAny(); 6342 masm.unboxObject( 6343 Address(genObj, AbstractGeneratorObject::offsetOfCalleeSlot()), callee); 6344 6345 // Save a pointer to the JSOp::Resume operand stack Values. 6346 Register callerStackPtr = regs.takeAny(); 6347 masm.computeEffectiveAddress(frame.addressOfStackValue(-1), callerStackPtr); 6348 6349 // Branch to |interpret| to resume the generator in the C++ interpreter if the 6350 // script does not have a JitScript. 6351 Label interpret; 6352 Register scratch1 = regs.takeAny(); 6353 masm.loadPrivate(Address(callee, JSFunction::offsetOfJitInfoOrScript()), 6354 scratch1); 6355 masm.branchIfScriptHasNoJitScript(scratch1, &interpret); 6356 6357 // Push |undefined| for all formals. 6358 Register scratch2 = regs.takeAny(); 6359 Label loop, loopDone; 6360 masm.loadFunctionArgCount(callee, scratch2); 6361 6362 static_assert(sizeof(Value) == 8); 6363 #ifndef JS_CODEGEN_NONE 6364 static_assert(JitStackAlignment == 16 || JitStackAlignment == 8); 6365 #endif 6366 // If JitStackValueAlignment == 1, then we were already correctly aligned on 6367 // entry, as guaranteed by the assertStackAlignment at the entry to this 6368 // function. 6369 if (JitStackValueAlignment > 1) { 6370 Register alignment = regs.takeAny(); 6371 masm.moveStackPtrTo(alignment); 6372 masm.alignJitStackBasedOnNArgs(scratch2, false); 6373 6374 // Compute alignment adjustment. 6375 masm.subStackPtrFrom(alignment); 6376 6377 // Some code, like BaselineFrame::trace, will inspect the whole range of 6378 // the stack frame. In order to ensure that garbage data left behind from 6379 // previous activations doesn't confuse other machinery, we zero out the 6380 // alignment bytes. 6381 Label alignmentZero; 6382 masm.branchPtr(Assembler::Equal, alignment, ImmWord(0), &alignmentZero); 6383 6384 // Since we know prior to the stack alignment that the stack was 8 byte 6385 // aligned, and JitStackAlignment is 8 or 16 bytes, if we are doing an 6386 // alignment then we -must- have aligned by subtracting 8 bytes from 6387 // the stack pointer. 6388 // 6389 // So we can freely store a valid double here. 6390 masm.storeValue(DoubleValue(0), Address(masm.getStackPointer(), 0)); 6391 masm.bind(&alignmentZero); 6392 } 6393 6394 masm.branchTest32(Assembler::Zero, scratch2, scratch2, &loopDone); 6395 masm.bind(&loop); 6396 { 6397 masm.pushValue(UndefinedValue()); 6398 masm.branchSub32(Assembler::NonZero, Imm32(1), scratch2, &loop); 6399 } 6400 masm.bind(&loopDone); 6401 6402 // Push |undefined| for |this|. 6403 masm.pushValue(UndefinedValue()); 6404 6405 #ifdef DEBUG 6406 // Update BaselineFrame debugFrameSize field. 6407 masm.mov(FramePointer, scratch2); 6408 masm.subStackPtrFrom(scratch2); 6409 masm.store32(scratch2, frame.addressOfDebugFrameSize()); 6410 #endif 6411 6412 masm.PushCalleeToken(callee, /* constructing = */ false); 6413 masm.push(FrameDescriptor(FrameType::BaselineJS, /* argc = */ 0)); 6414 6415 // PushCalleeToken bumped framePushed. Reset it. 6416 MOZ_ASSERT(masm.framePushed() == sizeof(uintptr_t)); 6417 masm.setFramePushed(0); 6418 6419 regs.add(callee); 6420 6421 // Push a fake return address on the stack. We will resume here when the 6422 // generator returns. 6423 Label genStart, returnTarget; 6424 #ifdef JS_USE_LINK_REGISTER 6425 const CodeOffset retAddr = masm.call(&genStart); 6426 #else 6427 masm.callAndPushReturnAddress(&genStart); 6428 const CodeOffset retAddr = CodeOffset(masm.currentOffset()); 6429 #endif 6430 6431 // Record the return address so the return offset -> pc mapping works. 6432 if (!handler.recordCallRetAddr(RetAddrEntry::Kind::IC, retAddr.offset())) { 6433 return false; 6434 } 6435 6436 masm.jump(&returnTarget); 6437 masm.bind(&genStart); 6438 #ifdef JS_USE_LINK_REGISTER 6439 masm.pushReturnAddress(); 6440 #endif 6441 6442 // Construct BaselineFrame. 6443 masm.push(FramePointer); 6444 masm.moveStackPtrTo(FramePointer); 6445 6446 // If profiler instrumentation is on, update lastProfilingFrame on 6447 // current JitActivation 6448 { 6449 Register scratchReg = scratch2; 6450 Label skip; 6451 AbsoluteAddress addressOfEnabled( 6452 runtime->geckoProfiler().addressOfEnabled()); 6453 masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skip); 6454 masm.loadJSContext(scratchReg); 6455 masm.loadPtr(Address(scratchReg, JSContext::offsetOfProfilingActivation()), 6456 scratchReg); 6457 masm.storePtr( 6458 FramePointer, 6459 Address(scratchReg, JitActivation::offsetOfLastProfilingFrame())); 6460 masm.bind(&skip); 6461 } 6462 6463 masm.subFromStackPtr(Imm32(BaselineFrame::Size())); 6464 masm.assertStackAlignment(sizeof(Value), 0); 6465 6466 // Store flags and env chain. 6467 masm.store32(Imm32(BaselineFrame::HAS_INITIAL_ENV), frame.addressOfFlags()); 6468 masm.unboxObject( 6469 Address(genObj, AbstractGeneratorObject::offsetOfEnvironmentChainSlot()), 6470 scratch2); 6471 masm.storePtr(scratch2, frame.addressOfEnvironmentChain()); 6472 6473 // Store the arguments object if there is one. 6474 Label noArgsObj; 6475 Address argsObjSlot(genObj, AbstractGeneratorObject::offsetOfArgsObjSlot()); 6476 masm.fallibleUnboxObject(argsObjSlot, scratch2, &noArgsObj); 6477 { 6478 masm.storePtr(scratch2, frame.addressOfArgsObj()); 6479 masm.or32(Imm32(BaselineFrame::HAS_ARGS_OBJ), frame.addressOfFlags()); 6480 } 6481 masm.bind(&noArgsObj); 6482 6483 // Push locals and expression slots if needed. 6484 Label noStackStorage; 6485 Address stackStorageSlot(genObj, 6486 AbstractGeneratorObject::offsetOfStackStorageSlot()); 6487 masm.fallibleUnboxObject(stackStorageSlot, scratch2, &noStackStorage); 6488 { 6489 Register initLength = regs.takeAny(); 6490 masm.loadPtr(Address(scratch2, NativeObject::offsetOfElements()), scratch2); 6491 masm.load32(Address(scratch2, ObjectElements::offsetOfInitializedLength()), 6492 initLength); 6493 masm.store32( 6494 Imm32(0), 6495 Address(scratch2, ObjectElements::offsetOfInitializedLength())); 6496 6497 Label loop, loopDone; 6498 masm.branchTest32(Assembler::Zero, initLength, initLength, &loopDone); 6499 masm.bind(&loop); 6500 { 6501 masm.pushValue(Address(scratch2, 0)); 6502 emitGuardedCallPreBarrierAnyZone(Address(scratch2, 0), MIRType::Value, 6503 scratch1); 6504 masm.addPtr(Imm32(sizeof(Value)), scratch2); 6505 masm.branchSub32(Assembler::NonZero, Imm32(1), initLength, &loop); 6506 } 6507 masm.bind(&loopDone); 6508 regs.add(initLength); 6509 } 6510 6511 masm.bind(&noStackStorage); 6512 6513 // Push arg, generator, resumeKind stack Values, in that order. 6514 masm.pushValue(Address(callerStackPtr, sizeof(Value))); 6515 masm.pushValue(JSVAL_TYPE_OBJECT, genObj); 6516 masm.pushValue(Address(callerStackPtr, 0)); 6517 6518 masm.switchToObjectRealm(genObj, scratch2); 6519 6520 // Load script in scratch1. 6521 masm.unboxObject( 6522 Address(genObj, AbstractGeneratorObject::offsetOfCalleeSlot()), scratch1); 6523 masm.loadPrivate(Address(scratch1, JSFunction::offsetOfJitInfoOrScript()), 6524 scratch1); 6525 6526 // Load resume index in scratch2 and mark generator as running. 6527 Address resumeIndexSlot(genObj, 6528 AbstractGeneratorObject::offsetOfResumeIndexSlot()); 6529 masm.unboxInt32(resumeIndexSlot, scratch2); 6530 masm.storeValue(Int32Value(AbstractGeneratorObject::RESUME_INDEX_RUNNING), 6531 resumeIndexSlot); 6532 6533 if (!emitEnterGeneratorCode(scratch1, scratch2, regs.getAny())) { 6534 return false; 6535 } 6536 6537 // Call into the VM to resume the generator in the C++ interpreter if there's 6538 // no JitScript. 6539 masm.bind(&interpret); 6540 6541 prepareVMCall(); 6542 6543 pushArg(callerStackPtr); 6544 pushArg(genObj); 6545 6546 using Fn = bool (*)(JSContext*, HandleObject, Value*, MutableHandleValue); 6547 if (!callVM<Fn, jit::InterpretResume>()) { 6548 return false; 6549 } 6550 6551 masm.bind(&returnTarget); 6552 6553 // Restore Stack pointer 6554 masm.computeEffectiveAddress(frame.addressOfStackValue(-1), 6555 masm.getStackPointer()); 6556 6557 // After the generator returns, we restore the stack pointer, switch back to 6558 // the current realm, push the return value, and we're done. 6559 if (!handler.realmIndependentJitcode()) { 6560 masm.switchToRealm(handler.maybeScript()->realm(), R2.scratchReg()); 6561 } else { 6562 masm.switchToBaselineFrameRealm(R2.scratchReg()); 6563 } 6564 restoreInterpreterPCReg(); 6565 frame.popn(3); 6566 frame.push(R0); 6567 return true; 6568 } 6569 6570 template <typename Handler> 6571 bool BaselineCodeGen<Handler>::emit_CheckResumeKind() { 6572 // Load resumeKind in R1, generator in R0. 6573 frame.popRegsAndSync(2); 6574 6575 #ifdef DEBUG 6576 Label ok; 6577 masm.branchTestInt32(Assembler::Equal, R1, &ok); 6578 masm.assumeUnreachable("Expected int32 resumeKind"); 6579 masm.bind(&ok); 6580 #endif 6581 6582 // If resumeKind is 'next' we don't have to do anything. 6583 Label done; 6584 masm.unboxInt32(R1, R1.scratchReg()); 6585 masm.branch32(Assembler::Equal, R1.scratchReg(), 6586 Imm32(int32_t(GeneratorResumeKind::Next)), &done); 6587 6588 prepareVMCall(); 6589 6590 pushArg(R1.scratchReg()); // resumeKind 6591 6592 masm.loadValue(frame.addressOfStackValue(-1), R2); 6593 pushArg(R2); // arg 6594 6595 masm.unboxObject(R0, R0.scratchReg()); 6596 pushArg(R0.scratchReg()); // genObj 6597 6598 masm.loadBaselineFramePtr(FramePointer, R2.scratchReg()); 6599 pushArg(R2.scratchReg()); // frame 6600 6601 using Fn = bool (*)(JSContext*, BaselineFrame*, 6602 Handle<AbstractGeneratorObject*>, HandleValue, int32_t); 6603 if (!callVM<Fn, jit::GeneratorThrowOrReturn>()) { 6604 return false; 6605 } 6606 6607 masm.bind(&done); 6608 return true; 6609 } 6610 6611 template <> 6612 bool BaselineCompilerCodeGen::emit_ResumeKind() { 6613 GeneratorResumeKind resumeKind = ResumeKindFromPC(handler.pc()); 6614 frame.push(Int32Value(int32_t(resumeKind))); 6615 return true; 6616 } 6617 6618 template <> 6619 bool BaselineInterpreterCodeGen::emit_ResumeKind() { 6620 LoadUint8Operand(masm, R0.scratchReg()); 6621 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0); 6622 frame.push(R0); 6623 return true; 6624 } 6625 6626 template <typename Handler> 6627 bool BaselineCodeGen<Handler>::emit_DebugCheckSelfHosted() { 6628 #ifdef DEBUG 6629 frame.syncStack(0); 6630 6631 masm.loadValue(frame.addressOfStackValue(-1), R0); 6632 6633 prepareVMCall(); 6634 pushArg(R0); 6635 6636 using Fn = bool (*)(JSContext*, HandleValue); 6637 if (!callVM<Fn, js::Debug_CheckSelfHosted>()) { 6638 return false; 6639 } 6640 #endif 6641 return true; 6642 } 6643 6644 template <typename Handler> 6645 bool BaselineCodeGen<Handler>::emit_IsConstructing() { 6646 frame.push(MagicValue(JS_IS_CONSTRUCTING)); 6647 return true; 6648 } 6649 6650 template <> 6651 bool BaselineCompilerCodeGen::emit_JumpTarget() { 6652 if (!handler.compilingOffThread()) { 6653 MaybeIncrementCodeCoverageCounter(masm, handler.script(), handler.pc(), 6654 handler); 6655 } 6656 return true; 6657 } 6658 6659 template <> 6660 bool BaselineInterpreterCodeGen::emit_JumpTarget() { 6661 Register scratch1 = R0.scratchReg(); 6662 Register scratch2 = R1.scratchReg(); 6663 6664 Label skipCoverage; 6665 CodeOffset toggleOffset = masm.toggledJump(&skipCoverage); 6666 masm.call(handler.codeCoverageAtPCLabel()); 6667 masm.bind(&skipCoverage); 6668 if (!handler.codeCoverageOffsets().append(toggleOffset.offset())) { 6669 return false; 6670 } 6671 6672 // Load icIndex in scratch1. 6673 LoadInt32Operand(masm, scratch1); 6674 6675 // Compute ICEntry* and store to frame->interpreterICEntry. 6676 masm.loadPtr(frame.addressOfICScript(), scratch2); 6677 static_assert(sizeof(ICEntry) == sizeof(uintptr_t)); 6678 masm.computeEffectiveAddress(BaseIndex(scratch2, scratch1, ScalePointer, 6679 ICScript::offsetOfICEntries()), 6680 scratch2); 6681 masm.storePtr(scratch2, frame.addressOfInterpreterICEntry()); 6682 return true; 6683 } 6684 6685 template <> 6686 bool BaselineCompilerCodeGen::emit_AfterYield() { 6687 if (!emit_JumpTarget()) { 6688 return false; 6689 } 6690 6691 if (handler.realmIndependentJitcode()) { 6692 masm.or32(Imm32(BaselineFrame::Flags::REALM_INDEPENDENT), 6693 frame.addressOfFlags()); 6694 } 6695 6696 return emitAfterYieldDebugInstrumentation(R0.scratchReg()); 6697 } 6698 6699 template <> 6700 bool BaselineInterpreterCodeGen::emit_AfterYield() { 6701 if (!emit_JumpTarget()) { 6702 return false; 6703 } 6704 6705 return emitAfterYieldDebugInstrumentation(R0.scratchReg()); 6706 } 6707 6708 template <typename Handler> 6709 bool BaselineCodeGen<Handler>::emit_CheckClassHeritage() { 6710 frame.syncStack(0); 6711 6712 // Leave the heritage value on the stack. 6713 masm.loadValue(frame.addressOfStackValue(-1), R0); 6714 6715 prepareVMCall(); 6716 pushArg(R0); 6717 6718 using Fn = bool (*)(JSContext*, HandleValue); 6719 return callVM<Fn, js::CheckClassHeritageOperation>(); 6720 } 6721 6722 template <typename Handler> 6723 bool BaselineCodeGen<Handler>::emit_InitHomeObject() { 6724 // Load HomeObject in R0. 6725 frame.popRegsAndSync(1); 6726 6727 // Load function off stack 6728 Register func = R2.scratchReg(); 6729 masm.unboxObject(frame.addressOfStackValue(-1), func); 6730 6731 masm.assertFunctionIsExtended(func); 6732 6733 // Set HOMEOBJECT_SLOT 6734 Register temp = R1.scratchReg(); 6735 Address addr(func, FunctionExtended::offsetOfMethodHomeObjectSlot()); 6736 emitGuardedCallPreBarrierAnyZone(addr, MIRType::Value, temp); 6737 masm.storeValue(R0, addr); 6738 6739 Label skipBarrier; 6740 masm.branchPtrInNurseryChunk(Assembler::Equal, func, temp, &skipBarrier); 6741 masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier); 6742 masm.call(&postBarrierSlot_); 6743 masm.bind(&skipBarrier); 6744 6745 return true; 6746 } 6747 6748 template <typename Handler> 6749 bool BaselineCodeGen<Handler>::emit_BuiltinObject() { 6750 frame.syncStack(0); 6751 6752 if (!emitNextIC()) { 6753 return false; 6754 } 6755 6756 // Mark R0 as pushed stack value. 6757 frame.push(R0); 6758 return true; 6759 } 6760 6761 template <typename Handler> 6762 bool BaselineCodeGen<Handler>::emit_ObjWithProto() { 6763 frame.syncStack(0); 6764 6765 // Leave the proto value on the stack for the decompiler 6766 masm.loadValue(frame.addressOfStackValue(-1), R0); 6767 6768 prepareVMCall(); 6769 pushArg(R0); 6770 6771 using Fn = PlainObject* (*)(JSContext*, HandleValue); 6772 if (!callVM<Fn, js::ObjectWithProtoOperation>()) { 6773 return false; 6774 } 6775 6776 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0); 6777 frame.pop(); 6778 frame.push(R0); 6779 return true; 6780 } 6781 6782 template <typename Handler> 6783 bool BaselineCodeGen<Handler>::emit_FunWithProto() { 6784 frame.popRegsAndSync(1); 6785 6786 masm.unboxObject(R0, R0.scratchReg()); 6787 masm.loadPtr(frame.addressOfEnvironmentChain(), R1.scratchReg()); 6788 6789 prepareVMCall(); 6790 pushArg(R0.scratchReg()); 6791 pushArg(R1.scratchReg()); 6792 pushScriptGCThingArg(ScriptGCThingType::Function, R0.scratchReg(), 6793 R1.scratchReg()); 6794 6795 using Fn = 6796 JSObject* (*)(JSContext*, HandleFunction, HandleObject, HandleObject); 6797 if (!callVM<Fn, js::FunWithProtoOperation>()) { 6798 return false; 6799 } 6800 6801 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0); 6802 frame.push(R0); 6803 return true; 6804 } 6805 6806 template <typename Handler> 6807 bool BaselineCodeGen<Handler>::emit_ImportMeta() { 6808 frame.syncStack(0); 6809 6810 if (!emitNextIC()) { 6811 return false; 6812 } 6813 6814 // Mark R0 as pushed stack value. 6815 frame.push(R0); 6816 return true; 6817 } 6818 6819 template <typename Handler> 6820 bool BaselineCodeGen<Handler>::emit_DynamicImport() { 6821 // Put specifier into R0 and object value into R1 6822 frame.popRegsAndSync(2); 6823 6824 prepareVMCall(); 6825 pushArg(R1); 6826 pushArg(R0); 6827 pushScriptArg(); 6828 6829 using Fn = JSObject* (*)(JSContext*, HandleScript, HandleValue, HandleValue); 6830 if (!callVM<Fn, js::StartDynamicModuleImport>()) { 6831 return false; 6832 } 6833 6834 masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0); 6835 frame.push(R0); 6836 return true; 6837 } 6838 6839 template <> 6840 bool BaselineCompilerCodeGen::emit_ForceInterpreter() { 6841 // Caller is responsible for checking script->hasForceInterpreterOp(). 6842 MOZ_CRASH("JSOp::ForceInterpreter in baseline"); 6843 } 6844 6845 template <> 6846 bool BaselineInterpreterCodeGen::emit_ForceInterpreter() { 6847 masm.assumeUnreachable("JSOp::ForceInterpreter"); 6848 return true; 6849 } 6850 6851 template <typename Handler> 6852 bool BaselineCodeGen<Handler>::emitPrologue() { 6853 AutoCreatedBy acb(masm, "BaselineCodeGen<Handler>::emitPrologue"); 6854 6855 #ifdef JS_USE_LINK_REGISTER 6856 // Push link register from generateEnterJIT()'s BLR. 6857 masm.pushReturnAddress(); 6858 #endif 6859 6860 masm.push(FramePointer); 6861 masm.moveStackPtrTo(FramePointer); 6862 6863 masm.checkStackAlignment(); 6864 6865 emitProfilerEnterFrame(); 6866 6867 masm.subFromStackPtr(Imm32(BaselineFrame::Size())); 6868 6869 // Initialize BaselineFrame. Also handles env chain pre-initialization (in 6870 // case GC gets run during stack check). For global and eval scripts, the env 6871 // chain is in R1. For function scripts, the env chain is in the callee. 6872 emitInitFrameFields(R1.scratchReg()); 6873 6874 // When compiling with Debugger instrumentation, set the debuggeeness of 6875 // the frame before any operation that can call into the VM. 6876 if (!emitIsDebuggeeCheck()) { 6877 return false; 6878 } 6879 6880 // Initialize the env chain before any operation that may call into the VM and 6881 // trigger a GC. 6882 if (!initEnvironmentChain()) { 6883 return false; 6884 } 6885 6886 // Check for overrecursion before initializing locals. 6887 if (!emitStackCheck()) { 6888 return false; 6889 } 6890 6891 emitInitializeLocals(); 6892 6893 // Ion prologue bailouts will enter here in the Baseline Interpreter. 6894 masm.bind(&bailoutPrologue_); 6895 6896 frame.assertSyncedStack(); 6897 6898 if (!handler.realmIndependentJitcode()) { 6899 masm.debugAssertContextRealm(handler.maybeScript()->realm(), 6900 R1.scratchReg()); 6901 } 6902 6903 if (!emitDebugPrologue()) { 6904 return false; 6905 } 6906 6907 if (!emitHandleCodeCoverageAtPrologue()) { 6908 return false; 6909 } 6910 6911 if (!emitWarmUpCounterIncrement()) { 6912 return false; 6913 } 6914 6915 warmUpCheckPrologueOffset_ = CodeOffset(masm.currentOffset()); 6916 6917 return true; 6918 } 6919 6920 template <typename Handler> 6921 bool BaselineCodeGen<Handler>::emitEpilogue() { 6922 AutoCreatedBy acb(masm, "BaselineCodeGen<Handler>::emitEpilogue"); 6923 6924 masm.bind(&return_); 6925 6926 if (!handler.shouldEmitDebugEpilogueAtReturnOp()) { 6927 if (!emitDebugEpilogue()) { 6928 return false; 6929 } 6930 } 6931 6932 emitProfilerExitFrame(); 6933 6934 masm.moveToStackPtr(FramePointer); 6935 masm.pop(FramePointer); 6936 6937 masm.ret(); 6938 return true; 6939 } 6940 6941 bool BaselineCompiler::emitBody() { 6942 AutoCreatedBy acb(masm, "BaselineCompiler::emitBody"); 6943 6944 JSScript* script = handler.script(); 6945 MOZ_ASSERT(handler.pc() == script->code()); 6946 6947 mozilla::DebugOnly<jsbytecode*> prevpc = handler.pc(); 6948 6949 while (true) { 6950 JSOp op = JSOp(*handler.pc()); 6951 JitSpew(JitSpew_BaselineOp, "Compiling op @ %d: %s", 6952 int(script->pcToOffset(handler.pc())), CodeName(op)); 6953 6954 BytecodeInfo* info = handler.analysis().maybeInfo(handler.pc()); 6955 6956 // Skip unreachable ops. 6957 if (!info) { 6958 // Test if last instructions and stop emitting in that case. 6959 handler.moveToNextPC(); 6960 if (handler.pc() >= script->codeEnd()) { 6961 break; 6962 } 6963 6964 prevpc = handler.pc(); 6965 continue; 6966 } 6967 6968 if (info->jumpTarget) { 6969 // Fully sync the stack if there are incoming jumps. 6970 frame.syncStack(0); 6971 frame.setStackDepth(info->stackDepth); 6972 masm.bind(handler.labelOf(handler.pc())); 6973 } else if (MOZ_UNLIKELY(compileDebugInstrumentation())) { 6974 // Also fully sync the stack if the debugger is enabled. 6975 frame.syncStack(0); 6976 } else { 6977 // At the beginning of any op, at most the top 2 stack-values are 6978 // unsynced. 6979 if (frame.stackDepth() > 2) { 6980 frame.syncStack(2); 6981 } 6982 } 6983 6984 frame.assertValidState(*info); 6985 6986 // If the script has a resume offset for this pc we need to keep track of 6987 // the native code offset. 6988 if (info->hasResumeOffset) { 6989 frame.assertSyncedStack(); 6990 uint32_t pcOffset = script->pcToOffset(handler.pc()); 6991 uint32_t nativeOffset = masm.currentOffset(); 6992 if (!resumeOffsetEntries_.emplaceBack(pcOffset, nativeOffset)) { 6993 return false; 6994 } 6995 } 6996 6997 // Emit traps for breakpoints and step mode. 6998 if (MOZ_UNLIKELY(compileDebugInstrumentation()) && !emitDebugTrap()) { 6999 return false; 7000 } 7001 7002 perfSpewer_.recordInstruction(masm, handler.pc(), handler.script(), frame); 7003 7004 #define EMIT_OP(OP, ...) \ 7005 case JSOp::OP: { \ 7006 AutoCreatedBy acb(masm, "op=" #OP); \ 7007 if (MOZ_UNLIKELY(!this->emit_##OP())) return false; \ 7008 } break; 7009 7010 switch (op) { 7011 FOR_EACH_OPCODE(EMIT_OP) 7012 default: 7013 MOZ_CRASH("Unexpected op"); 7014 } 7015 7016 #undef EMIT_OP 7017 7018 MOZ_ASSERT(masm.framePushed() == 0); 7019 7020 // Test if last instructions and stop emitting in that case. 7021 handler.moveToNextPC(); 7022 if (handler.pc() >= script->codeEnd()) { 7023 break; 7024 } 7025 7026 #ifdef DEBUG 7027 prevpc = handler.pc(); 7028 #endif 7029 } 7030 7031 MOZ_ASSERT(JSOp(*prevpc) == JSOp::RetRval || JSOp(*prevpc) == JSOp::Return); 7032 return true; 7033 } 7034 7035 bool BaselineInterpreterGenerator::emitDebugTrap() { 7036 CodeOffset offset = masm.nopPatchableToCall(); 7037 if (!debugTrapOffsets_.append(offset.offset())) { 7038 return false; 7039 } 7040 7041 return true; 7042 } 7043 7044 // Register holding the bytecode pc during dispatch. This exists so the debug 7045 // trap handler can reload the pc into this register when it's done. 7046 static constexpr Register InterpreterPCRegAtDispatch = 7047 HasInterpreterPCReg() ? InterpreterPCReg : R0.scratchReg(); 7048 7049 bool BaselineInterpreterGenerator::emitInterpreterLoop() { 7050 AutoCreatedBy acb(masm, "BaselineInterpreterGenerator::emitInterpreterLoop"); 7051 7052 Register scratch1 = R0.scratchReg(); 7053 Register scratch2 = R1.scratchReg(); 7054 7055 // Entry point for interpreting a bytecode op. No registers are live except 7056 // for InterpreterPCReg. 7057 masm.bind(handler.interpretOpWithPCRegLabel()); 7058 7059 // Emit a patchable call for debugger breakpoints/stepping. 7060 if (!emitDebugTrap()) { 7061 return false; 7062 } 7063 Label interpretOpAfterDebugTrap; 7064 masm.bind(&interpretOpAfterDebugTrap); 7065 7066 // Load pc, bytecode op. 7067 Register pcReg = LoadBytecodePC(masm, scratch1); 7068 masm.load8ZeroExtend(Address(pcReg, 0), scratch1); 7069 7070 // Jump to table[op]. 7071 { 7072 CodeOffset label = masm.moveNearAddressWithPatch(scratch2); 7073 if (!tableLabels_.append(label)) { 7074 return false; 7075 } 7076 BaseIndex pointer(scratch2, scratch1, ScalePointer); 7077 masm.branchToComputedAddress(pointer); 7078 } 7079 7080 // At the end of each op, emit code to bump the pc and jump to the 7081 // next op (this is also known as a threaded interpreter). 7082 auto opEpilogue = [&](JSOp op, size_t opLength) -> bool { 7083 MOZ_ASSERT(masm.framePushed() == 0); 7084 7085 if (!BytecodeFallsThrough(op)) { 7086 // Nothing to do. 7087 masm.assumeUnreachable("unexpected fall through"); 7088 return true; 7089 } 7090 7091 // Bump frame->interpreterICEntry if needed. 7092 if (BytecodeOpHasIC(op)) { 7093 frame.bumpInterpreterICEntry(); 7094 } 7095 7096 // Bump bytecode PC. 7097 if (HasInterpreterPCReg()) { 7098 MOZ_ASSERT(InterpreterPCRegAtDispatch == InterpreterPCReg); 7099 masm.addPtr(Imm32(opLength), InterpreterPCReg); 7100 } else { 7101 MOZ_ASSERT(InterpreterPCRegAtDispatch == scratch1); 7102 masm.loadPtr(frame.addressOfInterpreterPC(), InterpreterPCRegAtDispatch); 7103 masm.addPtr(Imm32(opLength), InterpreterPCRegAtDispatch); 7104 masm.storePtr(InterpreterPCRegAtDispatch, frame.addressOfInterpreterPC()); 7105 } 7106 7107 if (!emitDebugTrap()) { 7108 return false; 7109 } 7110 7111 // Load the opcode, jump to table[op]. 7112 masm.load8ZeroExtend(Address(InterpreterPCRegAtDispatch, 0), scratch1); 7113 CodeOffset label = masm.moveNearAddressWithPatch(scratch2); 7114 if (!tableLabels_.append(label)) { 7115 return false; 7116 } 7117 BaseIndex pointer(scratch2, scratch1, ScalePointer); 7118 masm.branchToComputedAddress(pointer); 7119 return true; 7120 }; 7121 7122 // Emit code for each bytecode op. 7123 Label opLabels[JSOP_LIMIT]; 7124 #define EMIT_OP(OP, ...) \ 7125 { \ 7126 AutoCreatedBy acb(masm, "op=" #OP); \ 7127 perfSpewer_.recordOffset(masm, JSOp::OP); \ 7128 masm.bind(&opLabels[uint8_t(JSOp::OP)]); \ 7129 handler.setCurrentOp(JSOp::OP); \ 7130 if (!this->emit_##OP()) { \ 7131 return false; \ 7132 } \ 7133 if (!opEpilogue(JSOp::OP, JSOpLength_##OP)) { \ 7134 return false; \ 7135 } \ 7136 handler.resetCurrentOp(); \ 7137 } 7138 FOR_EACH_OPCODE(EMIT_OP) 7139 #undef EMIT_OP 7140 7141 // External entry point to start interpreting bytecode ops. This is used for 7142 // things like exception handling and OSR. DebugModeOSR patches JIT frames to 7143 // return here from the DebugTrapHandler. 7144 masm.bind(handler.interpretOpLabel()); 7145 interpretOpOffset_ = masm.currentOffset(); 7146 restoreInterpreterPCReg(); 7147 masm.jump(handler.interpretOpWithPCRegLabel()); 7148 7149 // Second external entry point: this skips the debug trap for the first op 7150 // and is used by OSR. 7151 interpretOpNoDebugTrapOffset_ = masm.currentOffset(); 7152 restoreInterpreterPCReg(); 7153 masm.jump(&interpretOpAfterDebugTrap); 7154 7155 // External entry point for Ion prologue bailouts. 7156 bailoutPrologueOffset_ = CodeOffset(masm.currentOffset()); 7157 restoreInterpreterPCReg(); 7158 masm.jump(&bailoutPrologue_); 7159 7160 // Emit debug trap handler code (target of patchable call instructions). This 7161 // is just a tail call to the debug trap handler trampoline code. 7162 { 7163 JitCode* handlerCode = runtime->jitRuntime()->debugTrapHandler( 7164 DebugTrapHandlerKind::Interpreter); 7165 debugTrapHandlerOffset_ = masm.currentOffset(); 7166 masm.jump(handlerCode); 7167 } 7168 7169 // Emit the table. 7170 masm.haltingAlign(sizeof(void*)); 7171 7172 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64) 7173 size_t numInstructions = JSOP_LIMIT * (sizeof(uintptr_t) / sizeof(uint32_t)); 7174 AutoForbidPoolsAndNops afp(&masm, numInstructions); 7175 #endif 7176 7177 tableOffset_ = masm.currentOffset(); 7178 7179 for (size_t i = 0; i < JSOP_LIMIT; i++) { 7180 const Label& opLabel = opLabels[i]; 7181 MOZ_ASSERT(opLabel.bound()); 7182 CodeLabel cl; 7183 masm.writeCodePointer(&cl); 7184 cl.target()->bind(opLabel.offset()); 7185 masm.addCodeLabel(cl); 7186 } 7187 7188 return true; 7189 } 7190 7191 void BaselineInterpreterGenerator::emitOutOfLineCodeCoverageInstrumentation() { 7192 AutoCreatedBy acb(masm, 7193 "BaselineInterpreterGenerator::" 7194 "emitOutOfLineCodeCoverageInstrumentation"); 7195 7196 masm.bind(handler.codeCoverageAtPrologueLabel()); 7197 #ifdef JS_USE_LINK_REGISTER 7198 masm.pushReturnAddress(); 7199 #endif 7200 7201 saveInterpreterPCReg(); 7202 7203 using Fn1 = void (*)(BaselineFrame* frame); 7204 masm.setupUnalignedABICall(R0.scratchReg()); 7205 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 7206 masm.passABIArg(R0.scratchReg()); 7207 masm.callWithABI<Fn1, HandleCodeCoverageAtPrologue>(); 7208 7209 restoreInterpreterPCReg(); 7210 masm.ret(); 7211 7212 masm.bind(handler.codeCoverageAtPCLabel()); 7213 #ifdef JS_USE_LINK_REGISTER 7214 masm.pushReturnAddress(); 7215 #endif 7216 7217 saveInterpreterPCReg(); 7218 7219 using Fn2 = void (*)(BaselineFrame* frame, jsbytecode* pc); 7220 masm.setupUnalignedABICall(R0.scratchReg()); 7221 masm.loadBaselineFramePtr(FramePointer, R0.scratchReg()); 7222 masm.passABIArg(R0.scratchReg()); 7223 Register pcReg = LoadBytecodePC(masm, R2.scratchReg()); 7224 masm.passABIArg(pcReg); 7225 masm.callWithABI<Fn2, HandleCodeCoverageAtPC>(); 7226 7227 restoreInterpreterPCReg(); 7228 masm.ret(); 7229 } 7230 7231 bool BaselineInterpreterGenerator::generate(JSContext* cx, 7232 BaselineInterpreter& interpreter) { 7233 AutoCreatedBy acb(masm, "BaselineInterpreterGenerator::generate"); 7234 7235 if (!cx->runtime()->jitRuntime()->ensureDebugTrapHandler( 7236 cx, DebugTrapHandlerKind::Interpreter)) { 7237 return false; 7238 } 7239 7240 perfSpewer_.startRecording(); 7241 perfSpewer_.recordOffset(masm, "Prologue"); 7242 if (!emitPrologue()) { 7243 ReportOutOfMemory(cx); 7244 return false; 7245 } 7246 7247 perfSpewer_.recordOffset(masm, "InterpreterLoop"); 7248 if (!emitInterpreterLoop()) { 7249 ReportOutOfMemory(cx); 7250 return false; 7251 } 7252 7253 perfSpewer_.recordOffset(masm, "Epilogue"); 7254 if (!emitEpilogue()) { 7255 ReportOutOfMemory(cx); 7256 return false; 7257 } 7258 7259 perfSpewer_.recordOffset(masm, "OOLPostBarrierSlot"); 7260 emitOutOfLinePostBarrierSlot(); 7261 7262 perfSpewer_.recordOffset(masm, "OOLCodeCoverageInstrumentation"); 7263 emitOutOfLineCodeCoverageInstrumentation(); 7264 7265 { 7266 AutoCreatedBy acb(masm, "everything_else"); 7267 Linker linker(masm); 7268 if (masm.oom()) { 7269 ReportOutOfMemory(cx); 7270 return false; 7271 } 7272 7273 JitCode* code = linker.newCode(cx, CodeKind::Other); 7274 if (!code) { 7275 return false; 7276 } 7277 7278 // Register BaselineInterpreter code with the profiler's JitCode table. 7279 { 7280 auto entry = MakeJitcodeGlobalEntry<BaselineInterpreterEntry>( 7281 cx, code, code->raw(), code->rawEnd()); 7282 if (!entry) { 7283 return false; 7284 } 7285 7286 JitcodeGlobalTable* globalTable = 7287 cx->runtime()->jitRuntime()->getJitcodeGlobalTable(); 7288 if (!globalTable->addEntry(std::move(entry))) { 7289 ReportOutOfMemory(cx); 7290 return false; 7291 } 7292 7293 code->setHasBytecodeMap(); 7294 } 7295 7296 // Patch loads now that we know the tableswitch base address. 7297 CodeLocationLabel tableLoc(code, CodeOffset(tableOffset_)); 7298 for (CodeOffset off : tableLabels_) { 7299 MacroAssembler::patchNearAddressMove(CodeLocationLabel(code, off), 7300 tableLoc); 7301 } 7302 7303 perfSpewer_.endRecording(); 7304 perfSpewer_.saveProfile(code); 7305 7306 #ifdef MOZ_VTUNE 7307 vtune::MarkStub(code, "BaselineInterpreter"); 7308 #endif 7309 7310 interpreter.init( 7311 code, interpretOpOffset_, interpretOpNoDebugTrapOffset_, 7312 bailoutPrologueOffset_.offset(), 7313 profilerEnterFrameToggleOffset_.offset(), 7314 profilerExitFrameToggleOffset_.offset(), debugTrapHandlerOffset_, 7315 std::move(handler.debugInstrumentationOffsets()), 7316 std::move(debugTrapOffsets_), std::move(handler.codeCoverageOffsets()), 7317 std::move(handler.icReturnOffsets()), handler.callVMOffsets()); 7318 } 7319 7320 if (cx->runtime()->geckoProfiler().enabled()) { 7321 interpreter.toggleProfilerInstrumentation(true); 7322 } 7323 7324 if (coverage::IsLCovEnabled()) { 7325 interpreter.toggleCodeCoverageInstrumentationUnchecked(true); 7326 } 7327 7328 return true; 7329 } 7330 7331 JitCode* JitRuntime::generateDebugTrapHandler(JSContext* cx, 7332 DebugTrapHandlerKind kind) { 7333 TempAllocator temp(&cx->tempLifoAlloc()); 7334 StackMacroAssembler masm(cx, temp); 7335 AutoCreatedBy acb(masm, "JitRuntime::generateDebugTrapHandler"); 7336 7337 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All()); 7338 MOZ_ASSERT(!regs.has(FramePointer)); 7339 regs.takeUnchecked(ICStubReg); 7340 if (HasInterpreterPCReg()) { 7341 regs.takeUnchecked(InterpreterPCReg); 7342 } 7343 #ifdef JS_CODEGEN_ARM 7344 regs.takeUnchecked(BaselineSecondScratchReg); 7345 AutoNonDefaultSecondScratchRegister andssr(masm, BaselineSecondScratchReg); 7346 #endif 7347 Register scratch1 = regs.takeAny(); 7348 Register scratch2 = regs.takeAny(); 7349 Register scratch3 = regs.takeAny(); 7350 7351 if (kind == DebugTrapHandlerKind::Interpreter) { 7352 // The interpreter calls this for every script when debugging, so check if 7353 // the script has any breakpoints or is in step mode before calling into 7354 // C++. 7355 Label hasDebugScript; 7356 Address scriptAddr(FramePointer, 7357 BaselineFrame::reverseOffsetOfInterpreterScript()); 7358 masm.loadPtr(scriptAddr, scratch1); 7359 masm.branchTest32(Assembler::NonZero, 7360 Address(scratch1, JSScript::offsetOfMutableFlags()), 7361 Imm32(int32_t(JSScript::MutableFlags::HasDebugScript)), 7362 &hasDebugScript); 7363 masm.abiret(); 7364 masm.bind(&hasDebugScript); 7365 7366 if (HasInterpreterPCReg()) { 7367 // Update frame's bytecode pc because the debugger depends on it. 7368 Address pcAddr(FramePointer, 7369 BaselineFrame::reverseOffsetOfInterpreterPC()); 7370 masm.storePtr(InterpreterPCReg, pcAddr); 7371 } 7372 } 7373 7374 // Load the return address in scratch1. 7375 masm.loadAbiReturnAddress(scratch1); 7376 7377 // Load BaselineFrame pointer in scratch2. 7378 masm.loadBaselineFramePtr(FramePointer, scratch2); 7379 7380 // Enter a stub frame and call the HandleDebugTrap VM function. Ensure 7381 // the stub frame has a nullptr ICStub pointer, since this pointer is marked 7382 // during GC. 7383 masm.movePtr(ImmPtr(nullptr), ICStubReg); 7384 EmitBaselineEnterStubFrame(masm, scratch3); 7385 7386 using Fn = bool (*)(JSContext*, BaselineFrame*, const uint8_t*); 7387 VMFunctionId id = VMFunctionToId<Fn, jit::HandleDebugTrap>::id; 7388 TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(id); 7389 7390 masm.push(scratch1); 7391 masm.push(scratch2); 7392 EmitBaselineCallVM(code, masm); 7393 7394 EmitBaselineLeaveStubFrame(masm); 7395 7396 if (kind == DebugTrapHandlerKind::Interpreter) { 7397 // We have to reload the bytecode pc register. 7398 Address pcAddr(FramePointer, BaselineFrame::reverseOffsetOfInterpreterPC()); 7399 masm.loadPtr(pcAddr, InterpreterPCRegAtDispatch); 7400 } 7401 masm.abiret(); 7402 7403 Linker linker(masm); 7404 JitCode* handlerCode = linker.newCode(cx, CodeKind::Other); 7405 if (!handlerCode) { 7406 return nullptr; 7407 } 7408 7409 CollectPerfSpewerJitCodeProfile(handlerCode, "DebugTrapHandler"); 7410 7411 #ifdef MOZ_VTUNE 7412 vtune::MarkStub(handlerCode, "DebugTrapHandler"); 7413 #endif 7414 7415 return handlerCode; 7416 } 7417 7418 } // namespace jit 7419 } // namespace js