BaselineBailouts.cpp (74149B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #include "mozilla/Assertions.h" 8 #include "mozilla/CheckedArithmetic.h" 9 #include "mozilla/Likely.h" 10 #include "mozilla/ScopeExit.h" 11 12 #include "builtin/ModuleObject.h" 13 #include "debugger/DebugAPI.h" 14 #include "gc/GC.h" 15 #include "jit/Bailouts.h" 16 #include "jit/BaselineFrame.h" 17 #include "jit/BaselineIC.h" 18 #include "jit/BaselineJIT.h" 19 #include "jit/CalleeToken.h" 20 #include "jit/Invalidation.h" 21 #include "jit/Ion.h" 22 #include "jit/IonScript.h" 23 #include "jit/JitFrames.h" 24 #include "jit/JitRuntime.h" 25 #include "jit/JitSpewer.h" 26 #include "jit/JitZone.h" 27 #include "jit/RematerializedFrame.h" 28 #include "jit/SharedICRegisters.h" 29 #include "jit/Simulator.h" 30 #include "js/friend/StackLimits.h" // js::AutoCheckRecursionLimit, js::ReportOverRecursed 31 #include "js/Utility.h" 32 #include "proxy/ScriptedProxyHandler.h" 33 #include "util/Memory.h" 34 #include "vm/ArgumentsObject.h" 35 #include "vm/BytecodeUtil.h" 36 #include "vm/JitActivation.h" 37 38 #include "jit/JitFrames-inl.h" 39 #include "vm/JSAtomUtils-inl.h" 40 #include "vm/JSContext-inl.h" 41 #include "vm/JSScript-inl.h" 42 43 using namespace js; 44 using namespace js::jit; 45 46 using mozilla::DebugOnly; 47 using mozilla::Maybe; 48 49 // BaselineStackBuilder may reallocate its buffer if the current one is too 50 // small. To avoid dangling pointers, BufferPointer represents a pointer into 51 // this buffer as a pointer to the header and a fixed offset. 52 template <typename T> 53 class BufferPointer { 54 const UniquePtr<BaselineBailoutInfo>& header_; 55 size_t offset_; 56 bool heap_; 57 58 public: 59 BufferPointer(const UniquePtr<BaselineBailoutInfo>& header, size_t offset, 60 bool heap) 61 : header_(header), offset_(offset), heap_(heap) {} 62 63 T* get() const { 64 BaselineBailoutInfo* header = header_.get(); 65 if (!heap_) { 66 return (T*)(header->incomingStack + offset_); 67 } 68 69 uint8_t* p = header->copyStackTop - offset_; 70 MOZ_ASSERT(p >= header->copyStackBottom && p < header->copyStackTop); 71 return (T*)p; 72 } 73 74 void set(const T& value) { *get() = value; } 75 76 // Note: we return a copy instead of a reference, to avoid potential memory 77 // safety hazards when the underlying buffer gets resized. 78 const T operator*() const { return *get(); } 79 T* operator->() const { return get(); } 80 }; 81 82 /** 83 * BaselineStackBuilder helps abstract the process of rebuilding the C stack on 84 * the heap. It takes a bailout iterator and keeps track of the point on the C 85 * stack from which the reconstructed frames will be written. 86 * 87 * It exposes methods to write data into the heap memory storing the 88 * reconstructed stack. It also exposes method to easily calculate addresses. 89 * This includes both the virtual address that a particular value will be at 90 * when it's eventually copied onto the stack, as well as the current actual 91 * address of that value (whether on the heap allocated portion being 92 * constructed or the existing stack). 93 * 94 * The abstraction handles transparent re-allocation of the heap memory when it 95 * needs to be enlarged to accommodate new data. Similarly to the C stack, the 96 * data that's written to the reconstructed stack grows from high to low in 97 * memory. 98 * 99 * The lowest region of the allocated memory contains a BaselineBailoutInfo 100 * structure that points to the start and end of the written data. 101 */ 102 class MOZ_STACK_CLASS BaselineStackBuilder { 103 JSContext* cx_; 104 JitFrameLayout* frame_ = nullptr; 105 SnapshotIterator& iter_; 106 RootedValueVector outermostFrameFormals_; 107 108 size_t bufferTotal_ = 1024; 109 size_t bufferAvail_ = 0; 110 size_t bufferUsed_ = 0; 111 size_t framePushed_ = 0; 112 113 UniquePtr<BaselineBailoutInfo> header_; 114 115 JSScript* script_; 116 JSFunction* fun_; 117 const ExceptionBailoutInfo* excInfo_; 118 ICScript* icScript_; 119 120 jsbytecode* pc_ = nullptr; 121 JSOp op_ = JSOp::Nop; 122 mozilla::Maybe<ResumeMode> resumeMode_; 123 uint32_t exprStackSlots_ = 0; 124 void* prevFramePtr_ = nullptr; 125 Maybe<BufferPointer<BaselineFrame>> blFrame_; 126 127 size_t frameNo_ = 0; 128 JSFunction* nextCallee_ = nullptr; 129 130 BailoutKind bailoutKind_; 131 132 bool canUseTrialInlinedICScripts_ = true; 133 134 // The baseline frames we will reconstruct on the heap are not 135 // rooted, so GC must be suppressed. 136 gc::AutoSuppressGC suppress_; 137 138 public: 139 BaselineStackBuilder(JSContext* cx, const JSJitFrameIter& frameIter, 140 SnapshotIterator& iter, 141 const ExceptionBailoutInfo* excInfo, 142 BailoutReason reason); 143 144 [[nodiscard]] bool init() { 145 MOZ_ASSERT(!header_); 146 MOZ_ASSERT(bufferUsed_ == 0); 147 148 uint8_t* bufferRaw = cx_->pod_calloc<uint8_t>(bufferTotal_); 149 if (!bufferRaw) { 150 return false; 151 } 152 bufferAvail_ = bufferTotal_ - sizeof(BaselineBailoutInfo); 153 154 header_.reset(new (bufferRaw) BaselineBailoutInfo()); 155 header_->incomingStack = reinterpret_cast<uint8_t*>(frame_); 156 header_->copyStackTop = bufferRaw + bufferTotal_; 157 header_->copyStackBottom = header_->copyStackTop; 158 return true; 159 } 160 161 [[nodiscard]] bool buildOneFrame(); 162 bool done(); 163 void nextFrame(); 164 165 JSScript* script() const { return script_; } 166 size_t frameNo() const { return frameNo_; } 167 bool isOutermostFrame() const { return frameNo_ == 0; } 168 MutableHandleValueVector outermostFrameFormals() { 169 return &outermostFrameFormals_; 170 } 171 BailoutKind bailoutKind() const { return bailoutKind_; } 172 173 inline JitFrameLayout* startFrame() { return frame_; } 174 175 BaselineBailoutInfo* info() { 176 MOZ_ASSERT(header_); 177 return header_.get(); 178 } 179 180 BaselineBailoutInfo* takeBuffer() { 181 MOZ_ASSERT(header_); 182 return header_.release(); 183 } 184 185 private: 186 [[nodiscard]] bool initFrame(); 187 [[nodiscard]] bool buildBaselineFrame(); 188 [[nodiscard]] bool buildArguments(); 189 [[nodiscard]] bool buildFixedSlots(); 190 [[nodiscard]] bool fixUpCallerArgs(MutableHandleValueVector savedCallerArgs, 191 bool* fixedUp); 192 [[nodiscard]] bool buildFinallyException(); 193 [[nodiscard]] bool buildExpressionStack(); 194 [[nodiscard]] bool finishLastFrame(); 195 196 [[nodiscard]] bool prepareForNextFrame(HandleValueVector savedCallerArgs); 197 [[nodiscard]] bool finishOuterFrame(); 198 199 template <typename GetSlot> 200 [[nodiscard]] bool buildStubFrameArgs(uint32_t actualArgs, bool constructing, 201 GetSlot getSlot); 202 [[nodiscard]] bool buildStubFrame(uint32_t frameSize, 203 HandleValueVector savedCallerArgs); 204 205 #ifdef DEBUG 206 [[nodiscard]] bool validateFrame(); 207 #endif 208 209 #ifdef DEBUG 210 bool envChainSlotCanBeOptimized(); 211 #endif 212 213 bool isPrologueBailout(); 214 jsbytecode* getResumePC(); 215 void* getStubReturnAddress(); 216 217 uint32_t exprStackSlots() const { return exprStackSlots_; } 218 219 // Returns true if we're bailing out to a catch or finally block in this frame 220 bool catchingException() const { 221 return excInfo_ && excInfo_->catchingException() && 222 excInfo_->frameNo() == frameNo_; 223 } 224 225 // Returns true if we're bailing out to a finally block in this frame. 226 bool resumingInFinallyBlock() const { 227 return catchingException() && excInfo_->isFinally(); 228 } 229 230 bool forcedReturn() const { return excInfo_ && excInfo_->forcedReturn(); } 231 232 // Returns true if we're bailing out in place for debug mode 233 bool propagatingIonExceptionForDebugMode() const { 234 return excInfo_ && excInfo_->propagatingIonExceptionForDebugMode(); 235 } 236 237 void* prevFramePtr() const { 238 MOZ_ASSERT(prevFramePtr_); 239 return prevFramePtr_; 240 } 241 BufferPointer<BaselineFrame>& blFrame() { return blFrame_.ref(); } 242 243 void setNextCallee(JSFunction* nextCallee, 244 TrialInliningState trialInliningState); 245 JSFunction* nextCallee() const { return nextCallee_; } 246 247 jsbytecode* pc() const { return pc_; } 248 bool resumeAfter() const { 249 return !catchingException() && iter_.resumeAfter(); 250 } 251 252 ResumeMode resumeMode() const { return *resumeMode_; } 253 254 bool needToSaveCallerArgs() const { 255 return resumeMode() == ResumeMode::InlinedAccessor; 256 } 257 258 [[nodiscard]] bool enlarge() { 259 MOZ_ASSERT(header_ != nullptr); 260 size_t newSize; 261 262 if (MOZ_UNLIKELY(!mozilla::SafeMul(bufferTotal_, size_t(2), &newSize))) { 263 ReportOutOfMemory(cx_); 264 return false; 265 } 266 267 uint8_t* newBufferRaw = cx_->pod_calloc<uint8_t>(newSize); 268 if (!newBufferRaw) { 269 return false; 270 } 271 272 // Initialize the new buffer. 273 // 274 // Before: 275 // 276 // [ Header | .. | Payload ] 277 // 278 // After: 279 // 280 // [ Header | ............... | Payload ] 281 // 282 // Size of Payload is |bufferUsed_|. 283 // 284 // We need to copy from the old buffer and header to the new buffer before 285 // we set header_ (this deletes the old buffer). 286 // 287 // We also need to update |copyStackBottom| and |copyStackTop| because these 288 // fields point to the Payload's start and end, respectively. 289 using BailoutInfoPtr = UniquePtr<BaselineBailoutInfo>; 290 BailoutInfoPtr newHeader(new (newBufferRaw) BaselineBailoutInfo(*header_)); 291 newHeader->copyStackTop = newBufferRaw + newSize; 292 newHeader->copyStackBottom = newHeader->copyStackTop - bufferUsed_; 293 memcpy(newHeader->copyStackBottom, header_->copyStackBottom, bufferUsed_); 294 bufferTotal_ = newSize; 295 bufferAvail_ = newSize - (sizeof(BaselineBailoutInfo) + bufferUsed_); 296 header_ = std::move(newHeader); 297 return true; 298 } 299 300 void resetFramePushed() { framePushed_ = 0; } 301 302 size_t framePushed() const { return framePushed_; } 303 304 [[nodiscard]] bool subtract(size_t size, const char* info = nullptr) { 305 // enlarge the buffer if need be. 306 while (size > bufferAvail_) { 307 if (!enlarge()) { 308 return false; 309 } 310 } 311 312 // write out element. 313 header_->copyStackBottom -= size; 314 bufferAvail_ -= size; 315 bufferUsed_ += size; 316 framePushed_ += size; 317 if (info) { 318 JitSpew(JitSpew_BaselineBailouts, " SUB_%03d %p/%p %-15s", 319 (int)size, header_->copyStackBottom, 320 virtualPointerAtStackOffset(0), info); 321 } 322 return true; 323 } 324 325 template <typename T> 326 [[nodiscard]] bool write(const T& t) { 327 MOZ_ASSERT(!(uintptr_t(&t) >= uintptr_t(header_->copyStackBottom) && 328 uintptr_t(&t) < uintptr_t(header_->copyStackTop)), 329 "Should not reference memory that can be freed"); 330 if (!subtract(sizeof(T))) { 331 return false; 332 } 333 memcpy(header_->copyStackBottom, &t, sizeof(T)); 334 return true; 335 } 336 337 template <typename T> 338 [[nodiscard]] bool writePtr(T* t, const char* info) { 339 if (!write<T*>(t)) { 340 return false; 341 } 342 if (info) { 343 JitSpew(JitSpew_BaselineBailouts, " WRITE_PTR %p/%p %-15s %p", 344 header_->copyStackBottom, virtualPointerAtStackOffset(0), info, 345 t); 346 } 347 return true; 348 } 349 350 [[nodiscard]] bool writeWord(size_t w, const char* info) { 351 if (!write<size_t>(w)) { 352 return false; 353 } 354 if (info) { 355 if (sizeof(size_t) == 4) { 356 JitSpew(JitSpew_BaselineBailouts, " WRITE_WRD %p/%p %-15s %08zx", 357 header_->copyStackBottom, virtualPointerAtStackOffset(0), info, 358 w); 359 } else { 360 JitSpew(JitSpew_BaselineBailouts, " WRITE_WRD %p/%p %-15s %016zx", 361 header_->copyStackBottom, virtualPointerAtStackOffset(0), info, 362 w); 363 } 364 } 365 return true; 366 } 367 368 [[nodiscard]] bool writeValue(const Value& val, const char* info) { 369 if (!write<Value>(val)) { 370 return false; 371 } 372 if (info) { 373 JitSpew(JitSpew_BaselineBailouts, 374 " WRITE_VAL %p/%p %-15s %016" PRIx64, 375 header_->copyStackBottom, virtualPointerAtStackOffset(0), info, 376 *((uint64_t*)&val)); 377 } 378 return true; 379 } 380 381 [[nodiscard]] bool peekLastValue(Value* result) { 382 if (bufferUsed_ < sizeof(Value)) { 383 return false; 384 } 385 386 memcpy(result, header_->copyStackBottom, sizeof(Value)); 387 return true; 388 } 389 390 [[nodiscard]] bool maybeWritePadding(size_t alignment, size_t after, 391 const char* info) { 392 MOZ_ASSERT(framePushed_ % sizeof(Value) == 0); 393 MOZ_ASSERT(after % sizeof(Value) == 0); 394 size_t offset = ComputeByteAlignment(after, alignment); 395 while (framePushed_ % alignment != offset) { 396 if (!writeValue(MagicValue(JS_ARG_POISON), info)) { 397 return false; 398 } 399 } 400 401 return true; 402 } 403 404 void setResumeFramePtr(void* resumeFramePtr) { 405 header_->resumeFramePtr = resumeFramePtr; 406 } 407 408 void setResumeAddr(void* resumeAddr) { header_->resumeAddr = resumeAddr; } 409 410 template <typename T> 411 BufferPointer<T> pointerAtStackOffset(size_t offset) { 412 if (offset < bufferUsed_) { 413 // Calculate offset from copyStackTop. 414 offset = header_->copyStackTop - (header_->copyStackBottom + offset); 415 return BufferPointer<T>(header_, offset, /* heap = */ true); 416 } 417 418 return BufferPointer<T>(header_, offset - bufferUsed_, /* heap = */ false); 419 } 420 421 BufferPointer<Value> valuePointerAtStackOffset(size_t offset) { 422 return pointerAtStackOffset<Value>(offset); 423 } 424 425 inline uint8_t* virtualPointerAtStackOffset(size_t offset) { 426 if (offset < bufferUsed_) { 427 return reinterpret_cast<uint8_t*>(frame_) - (bufferUsed_ - offset); 428 } 429 return reinterpret_cast<uint8_t*>(frame_) + (offset - bufferUsed_); 430 } 431 }; 432 433 void BaselineBailoutInfo::trace(JSTracer* trc) { 434 TraceRoot(trc, &tempId, "BaselineBailoutInfo::tempId"); 435 } 436 437 BaselineStackBuilder::BaselineStackBuilder(JSContext* cx, 438 const JSJitFrameIter& frameIter, 439 SnapshotIterator& iter, 440 const ExceptionBailoutInfo* excInfo, 441 BailoutReason reason) 442 : cx_(cx), 443 frame_(static_cast<JitFrameLayout*>(frameIter.current())), 444 iter_(iter), 445 outermostFrameFormals_(cx), 446 script_(frameIter.script()), 447 fun_(frameIter.maybeCallee()), 448 excInfo_(excInfo), 449 icScript_(script_->jitScript()->icScript()), 450 bailoutKind_(iter.bailoutKind()), 451 suppress_(cx) { 452 MOZ_ASSERT(bufferTotal_ >= sizeof(BaselineBailoutInfo)); 453 if (reason == BailoutReason::Invalidate) { 454 bailoutKind_ = BailoutKind::OnStackInvalidation; 455 } 456 } 457 458 bool BaselineStackBuilder::initFrame() { 459 // Get the pc and ResumeMode. If we are handling an exception, resume at the 460 // pc of the catch or finally block. 461 if (catchingException()) { 462 pc_ = excInfo_->resumePC(); 463 resumeMode_ = mozilla::Some(ResumeMode::ResumeAt); 464 } else { 465 pc_ = script_->offsetToPC(iter_.pcOffset()); 466 resumeMode_ = mozilla::Some(iter_.resumeMode()); 467 } 468 op_ = JSOp(*pc_); 469 470 // If we are catching an exception, we are bailing out to a catch or 471 // finally block and this is the frame where we will resume. Usually the 472 // expression stack should be empty in this case but there can be 473 // iterators on the stack. 474 if (catchingException()) { 475 exprStackSlots_ = excInfo_->numExprSlots(); 476 } else { 477 uint32_t totalFrameSlots = iter_.numAllocations(); 478 uint32_t fixedSlots = script_->nfixed(); 479 uint32_t argSlots = CountArgSlots(script_, fun_); 480 uint32_t intermediates = NumIntermediateValues(resumeMode()); 481 exprStackSlots_ = totalFrameSlots - fixedSlots - argSlots - intermediates; 482 483 // Verify that there was no underflow. 484 MOZ_ASSERT(exprStackSlots_ <= totalFrameSlots); 485 } 486 487 JitSpew(JitSpew_BaselineBailouts, " Unpacking %s:%u:%u", 488 script_->filename(), script_->lineno(), 489 script_->column().oneOriginValue()); 490 JitSpew(JitSpew_BaselineBailouts, " [BASELINE-JS FRAME]"); 491 492 // Write the previous frame pointer value. For the outermost frame we reuse 493 // the value in the JitFrameLayout already on the stack. Record the virtual 494 // stack offset at this location. Later on, if we end up writing out a 495 // BaselineStub frame for the next callee, we'll need to save the address. 496 if (!isOutermostFrame()) { 497 if (!writePtr(prevFramePtr(), "PrevFramePtr")) { 498 return false; 499 } 500 } 501 prevFramePtr_ = virtualPointerAtStackOffset(0); 502 503 resetFramePushed(); 504 505 return true; 506 } 507 508 void BaselineStackBuilder::setNextCallee( 509 JSFunction* nextCallee, TrialInliningState trialInliningState) { 510 nextCallee_ = nextCallee; 511 512 if (trialInliningState == TrialInliningState::Inlined && 513 !iter_.ionScript()->purgedICScripts() && canUseTrialInlinedICScripts_) { 514 // Update icScript_ to point to the icScript of nextCallee 515 const uint32_t pcOff = script_->pcToOffset(pc_); 516 icScript_ = icScript_->findInlinedChild(pcOff); 517 } else { 518 // If we don't know for certain that it's TrialInliningState::Inlined, 519 // just use the callee's own ICScript. We could still have the trial 520 // inlined ICScript available, but we also could not if we transitioned 521 // to TrialInliningState::Failure after being monomorphic inlined. 522 // 523 // Also use the callee's own ICScript if we purged callee ICScripts. 524 icScript_ = nextCallee->nonLazyScript()->jitScript()->icScript(); 525 526 if (trialInliningState != TrialInliningState::MonomorphicInlined) { 527 // Don't use specialized ICScripts for any of the callees if we had an 528 // inlining failure. We're now using the generic ICScript but compilation 529 // might have used the trial-inlined ICScript and these can have very 530 // different inlining graphs. 531 canUseTrialInlinedICScripts_ = false; 532 } 533 } 534 535 // Assert the ICScript matches nextCallee. 536 JSScript* calleeScript = nextCallee->nonLazyScript(); 537 MOZ_RELEASE_ASSERT(icScript_->numICEntries() == calleeScript->numICEntries()); 538 MOZ_RELEASE_ASSERT(icScript_->bytecodeSize() == calleeScript->length()); 539 } 540 541 bool BaselineStackBuilder::done() { 542 if (!iter_.moreFrames()) { 543 MOZ_ASSERT(!nextCallee_); 544 return true; 545 } 546 return catchingException(); 547 } 548 549 void BaselineStackBuilder::nextFrame() { 550 MOZ_ASSERT(nextCallee_); 551 fun_ = nextCallee_; 552 script_ = fun_->nonLazyScript(); 553 nextCallee_ = nullptr; 554 555 // Scripts with an IonScript must also have a BaselineScript. 556 MOZ_ASSERT(script_->hasBaselineScript()); 557 558 frameNo_++; 559 iter_.nextInstruction(); 560 } 561 562 // Build the BaselineFrame struct 563 bool BaselineStackBuilder::buildBaselineFrame() { 564 if (!subtract(BaselineFrame::Size(), "BaselineFrame")) { 565 return false; 566 } 567 blFrame_.reset(); 568 blFrame_.emplace(pointerAtStackOffset<BaselineFrame>(0)); 569 570 uint32_t flags = BaselineFrame::RUNNING_IN_INTERPRETER; 571 572 // If we are bailing to a script whose execution is observed, mark the 573 // baseline frame as a debuggee frame. This is to cover the case where we 574 // don't rematerialize the Ion frame via the Debugger. 575 if (script_->isDebuggee()) { 576 flags |= BaselineFrame::DEBUGGEE; 577 } 578 579 // Get |envChain|. 580 JSObject* envChain = nullptr; 581 Value envChainSlot = iter_.read(); 582 if (envChainSlot.isObject()) { 583 // The env slot has been updated from UndefinedValue. It must be the 584 // complete initial environment. 585 envChain = &envChainSlot.toObject(); 586 587 // Set the HAS_INITIAL_ENV flag if needed. See IsFrameInitialEnvironment. 588 MOZ_ASSERT(!script_->isForEval()); 589 if (fun_ && fun_->needsFunctionEnvironmentObjects()) { 590 MOZ_ASSERT(fun_->nonLazyScript()->initialEnvironmentShape()); 591 flags |= BaselineFrame::HAS_INITIAL_ENV; 592 } 593 } else { 594 MOZ_ASSERT(envChainSlot.isUndefined() || 595 envChainSlot.isMagic(JS_OPTIMIZED_OUT)); 596 MOZ_ASSERT(envChainSlotCanBeOptimized()); 597 598 // The env slot has been optimized out. 599 // Get it from the function or script. 600 if (fun_) { 601 envChain = fun_->environment(); 602 } else if (script_->isModule()) { 603 envChain = script_->module()->environment(); 604 } else { 605 // For global scripts without a non-syntactic env the env 606 // chain is the script's global lexical environment. (We do 607 // not compile scripts with a non-syntactic global scope). 608 // Also note that it's invalid to resume into the prologue in 609 // this case because the prologue expects the env chain in R1 610 // for eval and global scripts. 611 MOZ_ASSERT(!script_->isForEval()); 612 MOZ_ASSERT(!script_->hasNonSyntacticScope()); 613 envChain = &(script_->global().lexicalEnvironment()); 614 } 615 } 616 617 // Write |envChain|. 618 MOZ_ASSERT(envChain); 619 JitSpew(JitSpew_BaselineBailouts, " EnvChain=%p", envChain); 620 blFrame()->setEnvironmentChain(envChain); 621 622 // Get |returnValue| if present. 623 Value returnValue = UndefinedValue(); 624 if (script_->noScriptRval()) { 625 // Don't use the return value (likely a JS_OPTIMIZED_OUT MagicValue) to 626 // not confuse Baseline. 627 iter_.skip(); 628 } else { 629 returnValue = iter_.read(); 630 flags |= BaselineFrame::HAS_RVAL; 631 } 632 633 // Write |returnValue|. 634 JitSpew(JitSpew_BaselineBailouts, " ReturnValue=%016" PRIx64, 635 *((uint64_t*)&returnValue)); 636 blFrame()->setReturnValue(returnValue); 637 638 // Get |argsObj| if present. 639 ArgumentsObject* argsObj = nullptr; 640 if (script_->needsArgsObj()) { 641 Value maybeArgsObj = iter_.read(); 642 MOZ_ASSERT(maybeArgsObj.isObject() || maybeArgsObj.isUndefined() || 643 maybeArgsObj.isMagic(JS_OPTIMIZED_OUT)); 644 if (maybeArgsObj.isObject()) { 645 argsObj = &maybeArgsObj.toObject().as<ArgumentsObject>(); 646 } 647 } 648 649 // Note: we do not need to initialize the scratchValue field in BaselineFrame. 650 651 // Write |flags|. 652 blFrame()->setFlags(flags); 653 654 // Write |icScript|. 655 JitSpew(JitSpew_BaselineBailouts, " ICScript=%p", icScript_); 656 blFrame()->setICScript(icScript_); 657 658 // initArgsObjUnchecked modifies the frame's flags, so call it after setFlags. 659 if (argsObj) { 660 blFrame()->initArgsObjUnchecked(*argsObj); 661 } 662 return true; 663 } 664 665 // Overwrite the pushed args present in the calling frame with 666 // the unpacked |thisv| and argument values. 667 bool BaselineStackBuilder::buildArguments() { 668 Value thisv = iter_.read(); 669 JitSpew(JitSpew_BaselineBailouts, " Is function!"); 670 JitSpew(JitSpew_BaselineBailouts, " thisv=%016" PRIx64, 671 *((uint64_t*)&thisv)); 672 673 size_t thisvOffset = framePushed() + JitFrameLayout::offsetOfThis(); 674 valuePointerAtStackOffset(thisvOffset).set(thisv); 675 676 MOZ_ASSERT(iter_.numAllocations() >= CountArgSlots(script_, fun_)); 677 JitSpew(JitSpew_BaselineBailouts, 678 " frame slots %u, nargs %zu, nfixed %zu", iter_.numAllocations(), 679 fun_->nargs(), script_->nfixed()); 680 681 bool shouldStoreOutermostFormals = 682 isOutermostFrame() && !script_->argsObjAliasesFormals(); 683 if (shouldStoreOutermostFormals) { 684 // This is the first (outermost) frame and we don't have an 685 // arguments object aliasing the formals. Due to UCE and phi 686 // elimination, we could store an UndefinedValue() here for 687 // formals we think are unused, but locals may still reference the 688 // original argument slot (MParameter/LArgument) and expect the 689 // original Value. To avoid this problem, store the formals in a 690 // Vector until we are done. 691 MOZ_ASSERT(outermostFrameFormals().empty()); 692 if (!outermostFrameFormals().resize(fun_->nargs())) { 693 return false; 694 } 695 } 696 697 for (uint32_t i = 0; i < fun_->nargs(); i++) { 698 Value arg = iter_.read(); 699 JitSpew(JitSpew_BaselineBailouts, " arg %d = %016" PRIx64, (int)i, 700 *((uint64_t*)&arg)); 701 if (!isOutermostFrame()) { 702 size_t argOffset = framePushed() + JitFrameLayout::offsetOfActualArg(i); 703 valuePointerAtStackOffset(argOffset).set(arg); 704 } else if (shouldStoreOutermostFormals) { 705 outermostFrameFormals()[i].set(arg); 706 } else { 707 // When the arguments object aliases the formal arguments, then 708 // JSOp::SetArg mutates the argument object. In such cases, the 709 // list of arguments reported by the snapshot are only aliases 710 // of argument object slots which are optimized to only store 711 // differences compared to arguments which are on the stack. 712 } 713 } 714 return true; 715 } 716 717 bool BaselineStackBuilder::buildFixedSlots() { 718 for (uint32_t i = 0; i < script_->nfixed(); i++) { 719 Value slot = iter_.read(); 720 if (!writeValue(slot, "FixedValue")) { 721 return false; 722 } 723 } 724 return true; 725 } 726 727 // The caller side of inlined js::fun_call and accessors must look 728 // like the function wasn't inlined. 729 bool BaselineStackBuilder::fixUpCallerArgs( 730 MutableHandleValueVector savedCallerArgs, bool* fixedUp) { 731 MOZ_ASSERT(!*fixedUp); 732 733 // Inlining of SpreadCall-like frames not currently supported. 734 MOZ_ASSERT(!IsSpreadOp(op_)); 735 736 if (resumeMode() != ResumeMode::InlinedFunCall && !needToSaveCallerArgs()) { 737 return true; 738 } 739 740 // Calculate how many arguments are consumed by the inlined call. 741 // All calls pass |callee| and |this|. 742 uint32_t inlinedArgs = 2; 743 if (resumeMode() == ResumeMode::InlinedFunCall) { 744 // The first argument to an inlined FunCall becomes |this|, 745 // if it exists. The rest are passed normally. 746 MOZ_ASSERT(IsInvokeOp(op_)); 747 inlinedArgs += GET_ARGC(pc_) > 0 ? GET_ARGC(pc_) - 1 : 0; 748 } else { 749 MOZ_ASSERT(resumeMode() == ResumeMode::InlinedAccessor); 750 MOZ_ASSERT(IsIonInlinableGetterOrSetterOp(op_)); 751 // Setters are passed one argument. Getters are passed none. 752 if (IsSetPropOp(op_)) { 753 inlinedArgs++; 754 } 755 } 756 757 // Calculate how many values are live on the stack across the call, 758 // and push them. 759 MOZ_ASSERT(inlinedArgs <= exprStackSlots()); 760 uint32_t liveStackSlots = exprStackSlots() - inlinedArgs; 761 762 JitSpew(JitSpew_BaselineBailouts, 763 " pushing %u expression stack slots before fixup", 764 liveStackSlots); 765 for (uint32_t i = 0; i < liveStackSlots; i++) { 766 Value v = iter_.read(); 767 if (!writeValue(v, "StackValue")) { 768 return false; 769 } 770 } 771 772 // When we inline js::fun_call, we bypass the native and inline the 773 // target directly. When rebuilding the stack, we need to fill in 774 // the right number of slots to make it look like the js_native was 775 // actually called. 776 if (resumeMode() == ResumeMode::InlinedFunCall) { 777 // We must transform the stack from |target, this, args| to 778 // |js_fun_call, target, this, args|. The value of |js_fun_call| 779 // will never be observed, so we push |undefined| for it, followed 780 // by the remaining arguments. 781 JitSpew(JitSpew_BaselineBailouts, 782 " pushing undefined to fixup funcall"); 783 if (!writeValue(UndefinedValue(), "StackValue")) { 784 return false; 785 } 786 if (GET_ARGC(pc_) > 0) { 787 JitSpew(JitSpew_BaselineBailouts, 788 " pushing %u expression stack slots", inlinedArgs); 789 for (uint32_t i = 0; i < inlinedArgs; i++) { 790 Value arg = iter_.read(); 791 if (!writeValue(arg, "StackValue")) { 792 return false; 793 } 794 } 795 } else { 796 // When we inline FunCall with no arguments, we push an extra 797 // |undefined| value for |this|. That value should not appear 798 // in the rebuilt baseline frame. 799 JitSpew(JitSpew_BaselineBailouts, " pushing target of funcall"); 800 Value target = iter_.read(); 801 if (!writeValue(target, "StackValue")) { 802 return false; 803 } 804 // Skip |this|. 805 iter_.skip(); 806 } 807 } 808 809 if (needToSaveCallerArgs()) { 810 // Save the actual arguments. They are needed to rebuild the callee frame. 811 if (!savedCallerArgs.resize(inlinedArgs)) { 812 return false; 813 } 814 for (uint32_t i = 0; i < inlinedArgs; i++) { 815 savedCallerArgs[i].set(iter_.read()); 816 } 817 818 if (IsSetPropOp(op_)) { 819 // The RHS argument to SetProp remains on the stack after the 820 // operation and is observable, so we have to fill it in. 821 Value initialArg = savedCallerArgs[inlinedArgs - 1]; 822 JitSpew(JitSpew_BaselineBailouts, 823 " pushing setter's initial argument"); 824 if (!writeValue(initialArg, "StackValue")) { 825 return false; 826 } 827 } 828 } 829 830 *fixedUp = true; 831 return true; 832 } 833 834 bool BaselineStackBuilder::buildExpressionStack() { 835 JitSpew(JitSpew_BaselineBailouts, " pushing %u expression stack slots", 836 exprStackSlots()); 837 838 for (uint32_t i = 0; i < exprStackSlots(); i++) { 839 Value v; 840 // If we're at the newest frame and in the middle of throwing an exception, 841 // some expression stack slots might not be available. 842 // 843 // For example, if we call a function that throws, and then catch the 844 // exception, the return value won't be available. This isn't usually a 845 // problem, because the expression stack is generally empty when we enter a 846 // catch block. However, if a catch is inside a for-of, there are 847 // iterator-related values on the stack. If one of those values is defined 848 // by the function that threw, then its value will be unavailable. This is 849 // not possible for a user-written catch, but can happen for synthetic 850 // catches generated via desugaring. See bug 1934425 for a case involving 851 // `for (await using ...)`. 852 // 853 // Similar issues of trying to recover the result of a throwing function can 854 // also occur when bailing out while propagating an exception due to debug 855 // mode. 856 // 857 // We therefore use a fallible read here. 858 if (!iter_.tryRead(&v)) { 859 MOZ_ASSERT( 860 !iter_.moreFrames() && 861 (catchingException() || propagatingIonExceptionForDebugMode())); 862 v = MagicValue(JS_OPTIMIZED_OUT); 863 } 864 if (!writeValue(v, "StackValue")) { 865 return false; 866 } 867 } 868 869 if (resumeMode() == ResumeMode::ResumeAfterCheckProxyGetResult) { 870 JitSpew(JitSpew_BaselineBailouts, 871 " Checking that the proxy's get trap result matches " 872 "expectations."); 873 Value returnVal; 874 if (peekLastValue(&returnVal) && !returnVal.isMagic(JS_OPTIMIZED_OUT)) { 875 Value idVal = iter_.read(); 876 Value targetVal = iter_.read(); 877 878 MOZ_RELEASE_ASSERT(!idVal.isMagic()); 879 MOZ_RELEASE_ASSERT(targetVal.isObject()); 880 RootedObject target(cx_, &targetVal.toObject()); 881 RootedValue rootedIdVal(cx_, idVal); 882 RootedId id(cx_); 883 if (!PrimitiveValueToId<CanGC>(cx_, rootedIdVal, &id)) { 884 return false; 885 } 886 RootedValue value(cx_, returnVal); 887 888 auto validation = 889 ScriptedProxyHandler::checkGetTrapResult(cx_, target, id, value); 890 if (validation != ScriptedProxyHandler::GetTrapValidationResult::OK) { 891 header_->tempId = id.get(); 892 893 JitSpew( 894 JitSpew_BaselineBailouts, 895 " Proxy get trap result mismatch! Overwriting bailout kind"); 896 if (validation == ScriptedProxyHandler::GetTrapValidationResult:: 897 MustReportSameValue) { 898 bailoutKind_ = BailoutKind::ThrowProxyTrapMustReportSameValue; 899 } else if (validation == ScriptedProxyHandler::GetTrapValidationResult:: 900 MustReportUndefined) { 901 bailoutKind_ = BailoutKind::ThrowProxyTrapMustReportUndefined; 902 } else { 903 return false; 904 } 905 } 906 } 907 908 return true; 909 } 910 911 if (resumeMode() == ResumeMode::ResumeAfterCheckIsObject) { 912 JitSpew(JitSpew_BaselineBailouts, 913 " Checking that intermediate value is an object"); 914 Value returnVal; 915 if (iter_.tryRead(&returnVal) && !returnVal.isObject()) { 916 MOZ_ASSERT(!returnVal.isMagic()); 917 JitSpew(JitSpew_BaselineBailouts, 918 " Not an object! Overwriting bailout kind"); 919 bailoutKind_ = BailoutKind::ThrowCheckIsObject; 920 } 921 } 922 923 return true; 924 } 925 926 bool BaselineStackBuilder::buildFinallyException() { 927 MOZ_ASSERT(resumingInFinallyBlock()); 928 929 if (!writeValue(excInfo_->finallyException(), "Exception")) { 930 return false; 931 } 932 if (!writeValue(excInfo_->finallyExceptionStack(), "ExceptionStack")) { 933 return false; 934 } 935 if (!writeValue(BooleanValue(true), "throwing")) { 936 return false; 937 } 938 939 return true; 940 } 941 942 bool BaselineStackBuilder::prepareForNextFrame( 943 HandleValueVector savedCallerArgs) { 944 const uint32_t frameSize = framePushed(); 945 946 // Write out descriptor and return address for the baseline frame. 947 // The icEntry in question MUST have an inlinable fallback stub. 948 if (!finishOuterFrame()) { 949 return false; 950 } 951 952 return buildStubFrame(frameSize, savedCallerArgs); 953 } 954 955 bool BaselineStackBuilder::finishOuterFrame() { 956 // . . 957 // | Descr(BLJS) | 958 // +---------------+ 959 // | ReturnAddr | 960 // +===============+ 961 962 const BaselineInterpreter& baselineInterp = 963 cx_->runtime()->jitRuntime()->baselineInterpreter(); 964 965 blFrame()->setInterpreterFields(script_, pc_); 966 967 // Write out descriptor of BaselineJS frame. 968 size_t baselineFrameDescr = MakeFrameDescriptor(FrameType::BaselineJS); 969 if (!writeWord(baselineFrameDescr, "Descriptor")) { 970 return false; 971 } 972 973 uint8_t* retAddr = baselineInterp.retAddrForIC(op_); 974 return writePtr(retAddr, "ReturnAddr"); 975 } 976 977 template <typename GetSlot> 978 bool BaselineStackBuilder::buildStubFrameArgs(uint32_t actualArgc, 979 bool constructing, 980 GetSlot getSlot) { 981 const uint32_t CalleeOffset = 0; 982 const uint32_t ThisOffset = 1; 983 const uint32_t ArgsOffset = 2; // callee + this 984 985 Value callee = getSlot(CalleeOffset); 986 JSFunction* calleeFun = &callee.toObject().as<JSFunction>(); 987 988 bool hasUnderflow = actualArgc < calleeFun->nargs(); 989 uint32_t argsPushed = hasUnderflow ? calleeFun->nargs() : actualArgc; 990 uint32_t afterFrameSize = 991 (1 + argsPushed + constructing) * sizeof(Value) + JitFrameLayout::Size(); 992 if (!maybeWritePadding(JitStackAlignment, afterFrameSize, "Padding")) { 993 return false; 994 } 995 996 if (constructing) { 997 Value newTarget = getSlot(ArgsOffset + actualArgc); 998 if (!writeValue(newTarget, "NewTarget")) { 999 return false; 1000 } 1001 } 1002 1003 if (hasUnderflow) { 1004 uint32_t numUndef = argsPushed - actualArgc; 1005 for (uint32_t i = 0; i < numUndef; i++) { 1006 if (!writeValue(UndefinedValue(), "UndefArgVal")) { 1007 return false; 1008 } 1009 } 1010 } 1011 1012 for (int32_t arg = actualArgc - 1; arg >= 0; arg--) { 1013 Value v = getSlot(ArgsOffset + arg); // callee + this 1014 if (!writeValue(v, "ArgVal")) { 1015 return false; 1016 } 1017 } 1018 1019 Value v = getSlot(ThisOffset); // callee + this 1020 if (!writeValue(v, "ThisVal")) { 1021 return false; 1022 } 1023 1024 // Push callee token (must be a JS Function) 1025 JitSpew(JitSpew_BaselineBailouts, " Callee = %016" PRIx64, 1026 callee.asRawBits()); 1027 1028 if (!writePtr(CalleeToToken(calleeFun, constructing), "CalleeToken")) { 1029 return false; 1030 } 1031 1032 return true; 1033 } 1034 1035 bool BaselineStackBuilder::buildStubFrame(uint32_t frameSize, 1036 HandleValueVector savedCallerArgs) { 1037 // Build baseline stub frame: 1038 // +===============+ 1039 // | FramePtr | 1040 // +---------------+ 1041 // | StubPtr | 1042 // +---------------+ 1043 // | Padding? | 1044 // +---------------+ 1045 // | ArgA | 1046 // +---------------+ 1047 // | ... | 1048 // +---------------+ 1049 // | Arg0 | 1050 // +---------------+ 1051 // | ThisV | 1052 // +---------------+ 1053 // | CalleeToken | 1054 // +---------------+ 1055 // | Descr(BLStub) | 1056 // +---------------+ 1057 // | ReturnAddr | 1058 // +===============+ 1059 1060 JitSpew(JitSpew_BaselineBailouts, " [BASELINE-STUB FRAME]"); 1061 1062 // Write previous frame pointer (saved earlier). 1063 if (!writePtr(prevFramePtr(), "PrevFramePtr")) { 1064 return false; 1065 } 1066 prevFramePtr_ = virtualPointerAtStackOffset(0); 1067 1068 // Write stub pointer. 1069 uint32_t pcOff = script_->pcToOffset(pc_); 1070 JitScript* jitScript = script_->jitScript(); 1071 const ICEntry& icEntry = jitScript->icEntryFromPCOffset(pcOff); 1072 ICFallbackStub* fallback = jitScript->fallbackStubForICEntry(&icEntry); 1073 if (!writePtr(fallback, "StubPtr")) { 1074 return false; 1075 } 1076 1077 // Write out the arguments, copied from the baseline frame. The order 1078 // of the arguments is reversed relative to the baseline frame's stack 1079 // values. 1080 MOZ_ASSERT(IsIonInlinableOp(op_)); 1081 bool constructing = IsConstructPC(pc_); 1082 unsigned actualArgc; 1083 Value callee; 1084 if (needToSaveCallerArgs()) { 1085 MOZ_ASSERT(!constructing); 1086 callee = savedCallerArgs[0]; 1087 actualArgc = IsSetPropOp(op_) ? 1 : 0; 1088 1089 // For accessors, the arguments are not on the stack anymore, 1090 // but they are copied in the savedCallerArgs vector. 1091 if (!buildStubFrameArgs(actualArgc, constructing, [&](uint32_t idx) { 1092 return savedCallerArgs[idx]; 1093 })) { 1094 return false; 1095 } 1096 } else if (resumeMode() == ResumeMode::InlinedFunCall && GET_ARGC(pc_) == 0) { 1097 // When calling FunCall with 0 arguments, we push |undefined| 1098 // for this. See BaselineCacheIRCompiler::pushFunCallArguments. 1099 MOZ_ASSERT(!constructing); 1100 actualArgc = 0; 1101 1102 size_t calleeSlot = blFrame()->numValueSlots(frameSize) - 1; 1103 callee = *blFrame()->valueSlot(calleeSlot); 1104 if (!buildStubFrameArgs(actualArgc, constructing, [&](uint32_t idx) { 1105 switch (idx) { 1106 case 0: 1107 return callee; 1108 case 1: 1109 return UndefinedValue(); // this 1110 default: 1111 MOZ_CRASH("unreachable"); 1112 } 1113 })) { 1114 return false; 1115 } 1116 } else { 1117 MOZ_ASSERT(resumeMode() == ResumeMode::InlinedStandardCall || 1118 resumeMode() == ResumeMode::InlinedFunCall); 1119 actualArgc = GET_ARGC(pc_); 1120 if (resumeMode() == ResumeMode::InlinedFunCall) { 1121 // See BaselineCacheIRCompiler::pushFunCallArguments. 1122 MOZ_ASSERT(actualArgc > 0); 1123 actualArgc--; 1124 } 1125 1126 size_t valueSlot = blFrame()->numValueSlots(frameSize) - 1; 1127 size_t calleeSlot = valueSlot - actualArgc - 1 - constructing; 1128 if (!buildStubFrameArgs(actualArgc, constructing, [&](uint32_t idx) { 1129 return *blFrame()->valueSlot(calleeSlot + idx); 1130 })) { 1131 return false; 1132 } 1133 callee = *blFrame()->valueSlot(calleeSlot); 1134 } 1135 1136 JSFunction* calleeFun = &callee.toObject().as<JSFunction>(); 1137 const ICEntry& icScriptEntry = icScript_->icEntryFromPCOffset(pcOff); 1138 ICFallbackStub* icScriptFallback = 1139 icScript_->fallbackStubForICEntry(&icScriptEntry); 1140 setNextCallee(calleeFun, icScriptFallback->trialInliningState()); 1141 1142 // Push BaselineStub frame descriptor 1143 size_t baselineStubFrameDescr = 1144 MakeFrameDescriptorForJitCall(FrameType::BaselineStub, actualArgc); 1145 if (!writeWord(baselineStubFrameDescr, "Descriptor")) { 1146 return false; 1147 } 1148 1149 // Push return address into ICCall_Scripted stub, immediately after the call. 1150 void* baselineCallReturnAddr = getStubReturnAddress(); 1151 MOZ_ASSERT(baselineCallReturnAddr); 1152 if (!writePtr(baselineCallReturnAddr, "ReturnAddr")) { 1153 return false; 1154 } 1155 1156 // The stack must be aligned after the callee pushes the frame pointer. 1157 MOZ_ASSERT((framePushed() + sizeof(void*)) % JitStackAlignment == 0); 1158 1159 return true; 1160 } 1161 1162 bool BaselineStackBuilder::finishLastFrame() { 1163 const BaselineInterpreter& baselineInterp = 1164 cx_->runtime()->jitRuntime()->baselineInterpreter(); 1165 1166 setResumeFramePtr(prevFramePtr()); 1167 1168 // Compute the native address (within the Baseline Interpreter) that we will 1169 // resume at and initialize the frame's interpreter fields. 1170 uint8_t* resumeAddr; 1171 if (isPrologueBailout()) { 1172 JitSpew(JitSpew_BaselineBailouts, " Resuming into prologue."); 1173 MOZ_ASSERT(pc_ == script_->code()); 1174 blFrame()->setInterpreterFieldsForPrologue(script_); 1175 resumeAddr = baselineInterp.bailoutPrologueEntryAddr(); 1176 } else if (propagatingIonExceptionForDebugMode()) { 1177 // When propagating an exception for debug mode, set the 1178 // resume pc to the throwing pc, so that Debugger hooks report 1179 // the correct pc offset of the throwing op instead of its 1180 // successor. 1181 jsbytecode* throwPC = script_->offsetToPC(iter_.pcOffset()); 1182 blFrame()->setInterpreterFields(script_, throwPC); 1183 resumeAddr = baselineInterp.interpretOpAddr().value; 1184 } else { 1185 jsbytecode* resumePC = getResumePC(); 1186 blFrame()->setInterpreterFields(script_, resumePC); 1187 resumeAddr = baselineInterp.interpretOpAddr().value; 1188 } 1189 setResumeAddr(resumeAddr); 1190 JitSpew(JitSpew_BaselineBailouts, " Set resumeAddr=%p", resumeAddr); 1191 1192 if (cx_->runtime()->geckoProfiler().enabled()) { 1193 // Register bailout with profiler. 1194 const char* filename = script_->filename(); 1195 if (filename == nullptr) { 1196 filename = "<unknown>"; 1197 } 1198 unsigned len = strlen(filename) + 200; 1199 UniqueChars buf(js_pod_malloc<char>(len)); 1200 if (buf == nullptr) { 1201 ReportOutOfMemory(cx_); 1202 return false; 1203 } 1204 snprintf(buf.get(), len, "%s %s %s on line %u of %s:%u", 1205 BailoutKindString(bailoutKind()), resumeAfter() ? "after" : "at", 1206 CodeName(op_), PCToLineNumber(script_, pc_), filename, 1207 script_->lineno()); 1208 cx_->runtime()->geckoProfiler().markEvent("Bailout", buf.get()); 1209 } 1210 1211 return true; 1212 } 1213 1214 #ifdef DEBUG 1215 // The |envChain| slot must not be optimized out if the currently 1216 // active scope requires any EnvironmentObjects beyond what is 1217 // available at body scope. This checks that scope chain does not 1218 // require any such EnvironmentObjects. 1219 // See also: |CompileInfo::isObservableFrameSlot| 1220 bool BaselineStackBuilder::envChainSlotCanBeOptimized() { 1221 jsbytecode* pc = script_->offsetToPC(iter_.pcOffset()); 1222 Scope* scopeIter = script_->innermostScope(pc); 1223 while (scopeIter != script_->bodyScope()) { 1224 if (!scopeIter || scopeIter->hasEnvironment()) { 1225 return false; 1226 } 1227 scopeIter = scopeIter->enclosing(); 1228 } 1229 return true; 1230 } 1231 1232 bool jit::AssertBailoutStackDepth(JSContext* cx, JSScript* script, 1233 jsbytecode* pc, ResumeMode mode, 1234 uint32_t exprStackSlots) { 1235 if (IsResumeAfter(mode)) { 1236 pc = GetNextPc(pc); 1237 } 1238 1239 uint32_t expectedDepth; 1240 bool reachablePC; 1241 if (!ReconstructStackDepth(cx, script, pc, &expectedDepth, &reachablePC)) { 1242 return false; 1243 } 1244 if (!reachablePC) { 1245 return true; 1246 } 1247 1248 JSOp op = JSOp(*pc); 1249 1250 if (mode == ResumeMode::InlinedFunCall) { 1251 // For inlined fun.call(this, ...); the reconstructed stack depth will 1252 // include the |this|, but the exprStackSlots won't. 1253 // Exception: if there are no arguments, the depths do match. 1254 MOZ_ASSERT(IsInvokeOp(op)); 1255 if (GET_ARGC(pc) > 0) { 1256 MOZ_ASSERT(expectedDepth == exprStackSlots + 1); 1257 } else { 1258 MOZ_ASSERT(expectedDepth == exprStackSlots); 1259 } 1260 return true; 1261 } 1262 1263 if (mode == ResumeMode::InlinedAccessor) { 1264 // Accessors coming out of ion are inlined via a complete lie perpetrated by 1265 // the compiler internally. Ion just rearranges the stack, and pretends that 1266 // it looked like a call all along. 1267 // This means that the depth is actually one *more* than expected by the 1268 // interpreter, as there is now a JSFunction, |this| and [arg], rather than 1269 // the expected |this| and [arg]. 1270 // If the inlined accessor is a GetElem operation, the numbers do match, but 1271 // that's just because GetElem expects one more item on the stack. Note that 1272 // none of that was pushed, but it's still reflected in exprStackSlots. 1273 MOZ_ASSERT(IsIonInlinableGetterOrSetterOp(op)); 1274 if (IsGetElemOp(op)) { 1275 MOZ_ASSERT(exprStackSlots == expectedDepth); 1276 } else { 1277 MOZ_ASSERT(exprStackSlots == expectedDepth + 1); 1278 } 1279 return true; 1280 } 1281 1282 // In all other cases, the depth must match. 1283 MOZ_ASSERT(exprStackSlots == expectedDepth); 1284 return true; 1285 } 1286 1287 bool BaselineStackBuilder::validateFrame() { 1288 const uint32_t frameSize = framePushed(); 1289 blFrame()->setDebugFrameSize(frameSize); 1290 JitSpew(JitSpew_BaselineBailouts, " FrameSize=%u", frameSize); 1291 1292 // debugNumValueSlots() is based on the frame size, do some sanity checks. 1293 MOZ_ASSERT(blFrame()->debugNumValueSlots() >= script_->nfixed()); 1294 MOZ_ASSERT(blFrame()->debugNumValueSlots() <= script_->nslots()); 1295 1296 uint32_t expectedSlots = exprStackSlots(); 1297 if (resumingInFinallyBlock()) { 1298 // If we are resuming in a finally block, we push three extra values on the 1299 // stack (the exception, the exception stack, and |throwing|), so the depth 1300 // at the resume PC should be the depth at the fault PC plus three. 1301 expectedSlots += 3; 1302 } 1303 return AssertBailoutStackDepth(cx_, script_, pc_, resumeMode(), 1304 expectedSlots); 1305 } 1306 #endif 1307 1308 void* BaselineStackBuilder::getStubReturnAddress() { 1309 const BaselineICFallbackCode& code = 1310 cx_->runtime()->jitRuntime()->baselineICFallbackCode(); 1311 1312 if (IsGetPropOp(op_)) { 1313 return code.bailoutReturnAddr(BailoutReturnKind::GetProp); 1314 } 1315 if (IsSetPropOp(op_)) { 1316 return code.bailoutReturnAddr(BailoutReturnKind::SetProp); 1317 } 1318 if (IsGetElemOp(op_)) { 1319 return code.bailoutReturnAddr(BailoutReturnKind::GetElem); 1320 } 1321 1322 // This should be a call op of some kind, now. 1323 MOZ_ASSERT(IsInvokeOp(op_) && !IsSpreadOp(op_)); 1324 if (IsConstructOp(op_)) { 1325 return code.bailoutReturnAddr(BailoutReturnKind::New); 1326 } 1327 return code.bailoutReturnAddr(BailoutReturnKind::Call); 1328 } 1329 1330 static inline jsbytecode* GetNextNonLoopHeadPc(jsbytecode* pc) { 1331 JSOp op = JSOp(*pc); 1332 switch (op) { 1333 case JSOp::Goto: 1334 return pc + GET_JUMP_OFFSET(pc); 1335 1336 case JSOp::LoopHead: 1337 case JSOp::Nop: 1338 return GetNextPc(pc); 1339 1340 default: 1341 return pc; 1342 } 1343 } 1344 1345 // Returns the pc to resume execution at in Baseline after a bailout. 1346 jsbytecode* BaselineStackBuilder::getResumePC() { 1347 if (resumeAfter()) { 1348 return GetNextPc(pc_); 1349 } 1350 1351 // If we are resuming at a LoopHead op, resume at the next op to avoid 1352 // a bailout -> enter Ion -> bailout loop with --ion-eager. 1353 // 1354 // Cycles can cause the loop below to not terminate. Empty loops are one 1355 // such example: 1356 // 1357 // L: loophead 1358 // goto L 1359 // 1360 // We do cycle detection below with the "tortoise and the hare" algorithm. 1361 jsbytecode* slowerPc = pc_; 1362 jsbytecode* fasterPc = pc_; 1363 while (true) { 1364 // Advance fasterPc twice as fast as slowerPc. 1365 slowerPc = GetNextNonLoopHeadPc(slowerPc); 1366 fasterPc = GetNextNonLoopHeadPc(fasterPc); 1367 fasterPc = GetNextNonLoopHeadPc(fasterPc); 1368 1369 // Break on cycles or at the end of goto sequences. 1370 if (fasterPc == slowerPc) { 1371 break; 1372 } 1373 } 1374 1375 return slowerPc; 1376 } 1377 1378 bool BaselineStackBuilder::isPrologueBailout() { 1379 // If we are propagating an exception for debug mode, we will not resume 1380 // into baseline code, but instead into HandleExceptionBaseline (i.e., 1381 // never before the prologue). 1382 return iter_.pcOffset() == 0 && !iter_.resumeAfter() && 1383 !propagatingIonExceptionForDebugMode(); 1384 } 1385 1386 // Build a baseline stack frame. 1387 bool BaselineStackBuilder::buildOneFrame() { 1388 // Build a baseline frame: 1389 // +===============+ 1390 // | PrevFramePtr | <-- initFrame() 1391 // +---------------+ 1392 // | Baseline | <-- buildBaselineFrame() 1393 // | Frame | 1394 // +---------------+ 1395 // | Fixed0 | <-- buildFixedSlots() 1396 // +---------------+ 1397 // | ... | 1398 // +---------------+ 1399 // | FixedF | 1400 // +---------------+ 1401 // | Stack0 | <-- buildExpressionStack() -or- fixupCallerArgs() 1402 // +---------------+ 1403 // | ... | 1404 // +---------------+ If we are building the frame in which we will 1405 // | StackS | <-- resume, we stop here. 1406 // +---------------+ finishLastFrame() sets up the interpreter fields. 1407 // . . 1408 // . . 1409 // . . <-- If there are additional frames inlined into this 1410 // | Descr(BLJS) | one, we finish this frame. We generate a stub 1411 // +---------------+ frame between this frame and the inlined frame. 1412 // | ReturnAddr | See: prepareForNextFrame() 1413 // +===============+ 1414 1415 if (!initFrame()) { 1416 return false; 1417 } 1418 1419 if (!buildBaselineFrame()) { 1420 return false; 1421 } 1422 1423 if (fun_ && !buildArguments()) { 1424 return false; 1425 } 1426 1427 if (!buildFixedSlots()) { 1428 return false; 1429 } 1430 1431 bool fixedUp = false; 1432 RootedValueVector savedCallerArgs(cx_); 1433 if (iter_.moreFrames() && !fixUpCallerArgs(&savedCallerArgs, &fixedUp)) { 1434 return false; 1435 } 1436 1437 if (!fixedUp) { 1438 if (!buildExpressionStack()) { 1439 return false; 1440 } 1441 if (resumingInFinallyBlock() && !buildFinallyException()) { 1442 return false; 1443 } 1444 } 1445 1446 #ifdef DEBUG 1447 if (!validateFrame()) { 1448 return false; 1449 } 1450 #endif 1451 1452 #ifdef JS_JITSPEW 1453 const uint32_t pcOff = script_->pcToOffset(pc()); 1454 JitSpew(JitSpew_BaselineBailouts, 1455 " Resuming %s pc offset %d (op %s) (line %u) of %s:%u:%u", 1456 resumeAfter() ? "after" : "at", (int)pcOff, CodeName(op_), 1457 PCToLineNumber(script_, pc()), script_->filename(), script_->lineno(), 1458 script_->column().oneOriginValue()); 1459 JitSpew(JitSpew_BaselineBailouts, " Bailout kind: %s", 1460 BailoutKindString(bailoutKind())); 1461 #endif 1462 1463 // If this was the last inline frame, or we are bailing out to a catch or 1464 // finally block in this frame, then unpacking is almost done. 1465 if (done()) { 1466 return finishLastFrame(); 1467 } 1468 1469 // Otherwise, this is an outer frame for an inlined call or 1470 // accessor. We will be building an inner frame. Before that, 1471 // we must create a stub frame. 1472 return prepareForNextFrame(savedCallerArgs); 1473 } 1474 1475 bool jit::BailoutIonToBaseline(JSContext* cx, JitActivation* activation, 1476 const JSJitFrameIter& iter, 1477 BaselineBailoutInfo** bailoutInfo, 1478 const ExceptionBailoutInfo* excInfo, 1479 BailoutReason reason) { 1480 MOZ_ASSERT(bailoutInfo != nullptr); 1481 MOZ_ASSERT(*bailoutInfo == nullptr); 1482 MOZ_ASSERT(iter.isBailoutJS()); 1483 1484 // Caller should have saved the exception while we perform the bailout. 1485 MOZ_ASSERT(!cx->isExceptionPending()); 1486 1487 // Ion bailout can fail due to overrecursion and OOM. In such cases we 1488 // cannot honor any further Debugger hooks on the frame, and need to 1489 // ensure that its Debugger.Frame entry is cleaned up. 1490 auto guardRemoveRematerializedFramesFromDebugger = 1491 mozilla::MakeScopeExit([&] { 1492 activation->removeRematerializedFramesFromDebugger(cx, iter.fp()); 1493 }); 1494 1495 // Always remove the RInstructionResults from the JitActivation, even in 1496 // case of failures as the stack frame is going away after the bailout. 1497 auto removeIonFrameRecovery = mozilla::MakeScopeExit( 1498 [&] { activation->removeIonFrameRecovery(iter.jsFrame()); }); 1499 1500 // The caller of the top frame must be one of the following: 1501 // IonJS - Ion calling into Ion. 1502 // BaselineStub - Baseline calling into Ion. 1503 // Entry / WasmToJSJit - Interpreter or other (wasm) calling into Ion. 1504 // BaselineJS - Resume'd Baseline, then likely OSR'd into Ion. 1505 MOZ_ASSERT(iter.isBailoutJS()); 1506 #if defined(DEBUG) || defined(JS_JITSPEW) 1507 FrameType prevFrameType = iter.prevType(); 1508 MOZ_ASSERT(JSJitFrameIter::isEntry(prevFrameType) || 1509 prevFrameType == FrameType::IonJS || 1510 prevFrameType == FrameType::BaselineStub || 1511 prevFrameType == FrameType::TrampolineNative || 1512 prevFrameType == FrameType::IonICCall || 1513 prevFrameType == FrameType::BaselineJS || 1514 prevFrameType == FrameType::BaselineInterpreterEntry); 1515 #endif 1516 1517 // All incoming frames are going to look like this: 1518 // 1519 // +---------------+ 1520 // | ... | 1521 // +---------------+ 1522 // | Args | 1523 // | ... | 1524 // +---------------+ 1525 // | ThisV | 1526 // +---------------+ 1527 // | ActualArgC | 1528 // +---------------+ 1529 // | CalleeToken | 1530 // +---------------+ 1531 // | Descriptor | 1532 // +---------------+ 1533 // | ReturnAddr | 1534 // +---------------+ 1535 // | ||||| | <---- Overwrite starting here. 1536 // | ||||| | 1537 // | ||||| | 1538 // +---------------+ 1539 1540 JitSpew(JitSpew_BaselineBailouts, 1541 "Bailing to baseline %s:%u:%u (IonScript=%p) (FrameType=%d)", 1542 iter.script()->filename(), iter.script()->lineno(), 1543 iter.script()->column().oneOriginValue(), (void*)iter.ionScript(), 1544 (int)prevFrameType); 1545 1546 if (excInfo) { 1547 if (excInfo->catchingException()) { 1548 JitSpew(JitSpew_BaselineBailouts, "Resuming in catch or finally block"); 1549 } 1550 if (excInfo->propagatingIonExceptionForDebugMode()) { 1551 JitSpew(JitSpew_BaselineBailouts, "Resuming in-place for debug mode"); 1552 } 1553 } 1554 1555 JitSpew(JitSpew_BaselineBailouts, 1556 " Reading from snapshot offset %u size %zu", iter.snapshotOffset(), 1557 iter.ionScript()->snapshotsListSize()); 1558 1559 iter.script()->updateJitCodeRaw(cx->runtime()); 1560 1561 // Under a bailout, there is no need to invalidate the frame after 1562 // evaluating the recover instruction, as the invalidation is only needed in 1563 // cases where the frame is introspected ahead of the bailout. 1564 MaybeReadFallback recoverBailout(cx, activation, &iter, 1565 MaybeReadFallback::Fallback_DoNothing); 1566 1567 // Ensure that all value locations are readable from the SnapshotIterator. 1568 // Get the RInstructionResults from the JitActivation if the frame got 1569 // recovered ahead of the bailout. 1570 SnapshotIterator snapIter(iter, activation->bailoutData()->machineState()); 1571 if (!snapIter.initInstructionResults(recoverBailout)) { 1572 return false; 1573 } 1574 1575 #ifdef TRACK_SNAPSHOTS 1576 snapIter.spewBailingFrom(); 1577 #endif 1578 1579 BaselineStackBuilder builder(cx, iter, snapIter, excInfo, reason); 1580 if (!builder.init()) { 1581 return false; 1582 } 1583 1584 JitSpew(JitSpew_BaselineBailouts, " Incoming frame ptr = %p", 1585 builder.startFrame()); 1586 if (iter.maybeCallee()) { 1587 JitSpew(JitSpew_BaselineBailouts, " Callee function (%s:%u:%u)", 1588 iter.script()->filename(), iter.script()->lineno(), 1589 iter.script()->column().oneOriginValue()); 1590 } else { 1591 JitSpew(JitSpew_BaselineBailouts, " No callee!"); 1592 } 1593 1594 if (iter.isConstructing()) { 1595 JitSpew(JitSpew_BaselineBailouts, " Constructing!"); 1596 } else { 1597 JitSpew(JitSpew_BaselineBailouts, " Not constructing!"); 1598 } 1599 1600 JitSpew(JitSpew_BaselineBailouts, " Restoring frames:"); 1601 1602 while (true) { 1603 // Skip recover instructions as they are already recovered by 1604 // |initInstructionResults|. 1605 snapIter.settleOnFrame(); 1606 1607 JitSpew(JitSpew_BaselineBailouts, " FrameNo %zu", builder.frameNo()); 1608 1609 if (!builder.buildOneFrame()) { 1610 MOZ_ASSERT(cx->isExceptionPending()); 1611 return false; 1612 } 1613 1614 if (builder.done()) { 1615 break; 1616 } 1617 1618 builder.nextFrame(); 1619 } 1620 JitSpew(JitSpew_BaselineBailouts, " Done restoring frames"); 1621 1622 BailoutKind bailoutKind = builder.bailoutKind(); 1623 1624 if (!builder.outermostFrameFormals().empty()) { 1625 // Set the first frame's formals, see the comment in InitFromBailout. 1626 Value* argv = builder.startFrame()->actualArgs(); 1627 mozilla::PodCopy(argv, builder.outermostFrameFormals().begin(), 1628 builder.outermostFrameFormals().length()); 1629 } 1630 1631 // Do stack check. 1632 bool overRecursed = false; 1633 BaselineBailoutInfo* info = builder.info(); 1634 size_t numBytesToPush = info->copyStackTop - info->copyStackBottom; 1635 MOZ_ASSERT((numBytesToPush % sizeof(uintptr_t)) == 0); 1636 uint8_t* newsp = info->incomingStack - numBytesToPush; 1637 #ifdef JS_SIMULATOR 1638 if (Simulator::Current()->overRecursed(uintptr_t(newsp))) { 1639 overRecursed = true; 1640 } 1641 #else 1642 AutoCheckRecursionLimit recursion(cx); 1643 if (!recursion.checkWithStackPointerDontReport(cx, newsp)) { 1644 overRecursed = true; 1645 } 1646 #endif 1647 if (overRecursed) { 1648 JitSpew(JitSpew_BaselineBailouts, " Overrecursion check failed!"); 1649 ReportOverRecursed(cx); 1650 return false; 1651 } 1652 1653 // Take the reconstructed baseline stack so it doesn't get freed when builder 1654 // destructs. 1655 info = builder.takeBuffer(); 1656 info->numFrames = builder.frameNo() + 1; 1657 info->bailoutKind.emplace(bailoutKind); 1658 *bailoutInfo = info; 1659 guardRemoveRematerializedFramesFromDebugger.release(); 1660 return true; 1661 } 1662 1663 static void InvalidateAfterBailout(JSContext* cx, HandleScript outerScript, 1664 const char* reason) { 1665 // In some cases, the computation of recover instruction can invalidate the 1666 // Ion script before we reach the end of the bailout. Thus, if the outer 1667 // script no longer have any Ion script attached, then we just skip the 1668 // invalidation. 1669 // 1670 // For example, such case can happen if the template object for an unboxed 1671 // objects no longer match the content of its properties (see Bug 1174547) 1672 if (!outerScript->hasIonScript()) { 1673 JitSpew(JitSpew_BaselineBailouts, "Ion script is already invalidated"); 1674 return; 1675 } 1676 1677 // Record a invalidation for this script in the jit hints map 1678 if (cx->runtime()->jitRuntime()->hasJitHintsMap()) { 1679 JitHintsMap* jitHints = cx->runtime()->jitRuntime()->getJitHintsMap(); 1680 jitHints->recordInvalidation(outerScript); 1681 } 1682 1683 MOZ_ASSERT(!outerScript->ionScript()->invalidated()); 1684 1685 JitSpew(JitSpew_BaselineBailouts, "Invalidating due to %s", reason); 1686 Invalidate(cx, outerScript); 1687 } 1688 1689 static void HandleLexicalCheckFailure(JSContext* cx, HandleScript outerScript, 1690 HandleScript innerScript) { 1691 JitSpew(JitSpew_IonBailouts, 1692 "Lexical check failure %s:%u:%u, inlined into %s:%u:%u", 1693 innerScript->filename(), innerScript->lineno(), 1694 innerScript->column().oneOriginValue(), outerScript->filename(), 1695 outerScript->lineno(), outerScript->column().oneOriginValue()); 1696 1697 if (!innerScript->failedLexicalCheck()) { 1698 innerScript->setFailedLexicalCheck(); 1699 } 1700 1701 InvalidateAfterBailout(cx, outerScript, "lexical check failure"); 1702 if (innerScript->hasIonScript()) { 1703 Invalidate(cx, innerScript); 1704 } 1705 } 1706 1707 static bool CopyFromRematerializedFrame(JSContext* cx, JitActivation* act, 1708 uint8_t* fp, size_t inlineDepth, 1709 BaselineFrame* frame) { 1710 RematerializedFrame* rematFrame = 1711 act->lookupRematerializedFrame(fp, inlineDepth); 1712 1713 // We might not have rematerialized a frame if the user never requested a 1714 // Debugger.Frame for it. 1715 if (!rematFrame) { 1716 return true; 1717 } 1718 1719 MOZ_ASSERT(rematFrame->script() == frame->script()); 1720 MOZ_ASSERT(rematFrame->numActualArgs() == frame->numActualArgs()); 1721 1722 frame->setEnvironmentChain(rematFrame->environmentChain()); 1723 1724 if (frame->isFunctionFrame()) { 1725 frame->thisArgument() = rematFrame->thisArgument(); 1726 } 1727 1728 for (unsigned i = 0; i < frame->numActualArgs(); i++) { 1729 frame->argv()[i] = rematFrame->argv()[i]; 1730 } 1731 1732 for (size_t i = 0; i < frame->script()->nfixed(); i++) { 1733 *frame->valueSlot(i) = rematFrame->locals()[i]; 1734 } 1735 1736 if (frame->script()->noScriptRval()) { 1737 frame->setReturnValue(UndefinedValue()); 1738 } else { 1739 frame->setReturnValue(rematFrame->returnValue()); 1740 } 1741 1742 // Don't copy over the hasCachedSavedFrame bit. The new BaselineFrame we're 1743 // building has a different AbstractFramePtr, so it won't be found in the 1744 // LiveSavedFrameCache if we look there. 1745 1746 JitSpew(JitSpew_BaselineBailouts, 1747 " Copied from rematerialized frame at (%p,%zu)", fp, inlineDepth); 1748 1749 // Propagate the debuggee frame flag. For the case where the Debugger did 1750 // not rematerialize an Ion frame, the baseline frame has its debuggee 1751 // flag set iff its script is considered a debuggee. See the debuggee case 1752 // in InitFromBailout. 1753 if (rematFrame->isDebuggee()) { 1754 frame->setIsDebuggee(); 1755 return DebugAPI::handleIonBailout(cx, rematFrame, frame); 1756 } 1757 1758 return true; 1759 } 1760 1761 enum class BailoutAction { 1762 InvalidateImmediately, 1763 InvalidateIfFrequent, 1764 DisableIfFrequent, 1765 NoAction 1766 }; 1767 1768 bool jit::FinishBailoutToBaseline(BaselineBailoutInfo* bailoutInfoArg) { 1769 JitSpew(JitSpew_BaselineBailouts, " Done restoring frames"); 1770 1771 JSContext* cx = TlsContext.get(); 1772 // Use UniquePtr to free the bailoutInfo before we return, and root it for 1773 // the tempId field. 1774 Rooted<UniquePtr<BaselineBailoutInfo>> bailoutInfo(cx, bailoutInfoArg); 1775 bailoutInfoArg = nullptr; 1776 1777 MOZ_DIAGNOSTIC_ASSERT(*bailoutInfo->bailoutKind != BailoutKind::Unreachable); 1778 1779 // jit::Bailout(), jit::InvalidationBailout(), and jit::HandleException() 1780 // should have reset the counter to zero. 1781 MOZ_ASSERT(!cx->isInUnsafeRegion()); 1782 1783 BaselineFrame* topFrame = GetTopBaselineFrame(cx); 1784 1785 // We have to get rid of the rematerialized frame, whether it is 1786 // restored or unwound. 1787 uint8_t* incomingStack = bailoutInfo->incomingStack; 1788 auto guardRemoveRematerializedFramesFromDebugger = 1789 mozilla::MakeScopeExit([&] { 1790 JitActivation* act = cx->activation()->asJit(); 1791 act->removeRematerializedFramesFromDebugger(cx, incomingStack); 1792 }); 1793 1794 // Ensure the frame has a call object if it needs one. 1795 if (!EnsureHasEnvironmentObjects(cx, topFrame)) { 1796 return false; 1797 } 1798 1799 // Create arguments objects for bailed out frames, to maintain the invariant 1800 // that script->needsArgsObj() implies frame->hasArgsObj(). 1801 RootedScript innerScript(cx, nullptr); 1802 RootedScript outerScript(cx, nullptr); 1803 1804 MOZ_ASSERT(cx->currentlyRunningInJit()); 1805 JSJitFrameIter iter(cx->activation()->asJit()); 1806 uint8_t* outerFp = nullptr; 1807 1808 // Iter currently points at the exit frame. Get the previous frame 1809 // (which must be a baseline frame), and set it as the last profiling 1810 // frame. 1811 if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled( 1812 cx->runtime())) { 1813 MOZ_ASSERT(iter.prevType() == FrameType::BaselineJS); 1814 JitFrameLayout* fp = reinterpret_cast<JitFrameLayout*>(iter.prevFp()); 1815 cx->jitActivation->setLastProfilingFrame(fp); 1816 } 1817 1818 uint32_t numFrames = bailoutInfo->numFrames; 1819 MOZ_ASSERT(numFrames > 0); 1820 1821 uint32_t frameno = 0; 1822 while (frameno < numFrames) { 1823 MOZ_ASSERT(!iter.isIonJS()); 1824 1825 if (iter.isBaselineJS()) { 1826 BaselineFrame* frame = iter.baselineFrame(); 1827 MOZ_ASSERT(frame->script()->hasBaselineScript()); 1828 1829 // If the frame doesn't even have a env chain set yet, then it's resuming 1830 // into the the prologue before the env chain is initialized. Any 1831 // necessary args object will also be initialized there. 1832 if (frame->environmentChain() && frame->script()->needsArgsObj()) { 1833 ArgumentsObject* argsObj; 1834 if (frame->hasArgsObj()) { 1835 argsObj = &frame->argsObj(); 1836 } else { 1837 argsObj = ArgumentsObject::createExpected(cx, frame); 1838 if (!argsObj) { 1839 return false; 1840 } 1841 } 1842 1843 // The arguments is a local binding and needsArgsObj does not 1844 // check if it is clobbered. Ensure that the local binding 1845 // restored during bailout before storing the arguments object 1846 // to the slot. 1847 SetFrameArgumentsObject(cx, frame, argsObj); 1848 } 1849 1850 if (frameno == 0) { 1851 innerScript = frame->script(); 1852 } 1853 1854 if (frameno == numFrames - 1) { 1855 outerScript = frame->script(); 1856 outerFp = iter.fp(); 1857 MOZ_ASSERT(outerFp == incomingStack); 1858 } 1859 1860 frameno++; 1861 } 1862 1863 ++iter; 1864 } 1865 1866 MOZ_ASSERT(innerScript); 1867 MOZ_ASSERT(outerScript); 1868 MOZ_ASSERT(outerFp); 1869 1870 // If we rematerialized Ion frames due to debug mode toggling, copy their 1871 // values into the baseline frame. We need to do this even when debug mode 1872 // is off, as we should respect the mutations made while debug mode was 1873 // on. 1874 JitActivation* act = cx->activation()->asJit(); 1875 if (act->hasRematerializedFrame(outerFp)) { 1876 JSJitFrameIter iter(act); 1877 size_t inlineDepth = numFrames; 1878 bool ok = true; 1879 while (inlineDepth > 0) { 1880 if (iter.isBaselineJS()) { 1881 // We must attempt to copy all rematerialized frames over, 1882 // even if earlier ones failed, to invoke the proper frame 1883 // cleanup in the Debugger. 1884 if (!CopyFromRematerializedFrame(cx, act, outerFp, --inlineDepth, 1885 iter.baselineFrame())) { 1886 ok = false; 1887 } 1888 } 1889 ++iter; 1890 } 1891 1892 if (!ok) { 1893 return false; 1894 } 1895 1896 // After copying from all the rematerialized frames, remove them from 1897 // the table to keep the table up to date. 1898 guardRemoveRematerializedFramesFromDebugger.release(); 1899 act->removeRematerializedFrame(outerFp); 1900 } 1901 1902 // If we are unwinding for an exception, we need to unwind scopes. 1903 // See |SettleOnTryNote| 1904 if (bailoutInfo->faultPC) { 1905 EnvironmentIter ei(cx, topFrame, bailoutInfo->faultPC); 1906 UnwindEnvironment(cx, ei, bailoutInfo->tryPC); 1907 } 1908 1909 BailoutKind bailoutKind = *bailoutInfo->bailoutKind; 1910 JitSpew(JitSpew_BaselineBailouts, 1911 " Restored outerScript=(%s:%u:%u,%u) innerScript=(%s:%u:%u,%u) " 1912 "(bailoutKind=%u)", 1913 outerScript->filename(), outerScript->lineno(), 1914 outerScript->column().oneOriginValue(), outerScript->getWarmUpCount(), 1915 innerScript->filename(), innerScript->lineno(), 1916 innerScript->column().oneOriginValue(), innerScript->getWarmUpCount(), 1917 (unsigned)bailoutKind); 1918 1919 BailoutAction action = BailoutAction::InvalidateImmediately; 1920 DebugOnly<bool> saveFailedICHash = false; 1921 switch (bailoutKind) { 1922 case BailoutKind::TranspiledCacheIR: 1923 // A transpiled guard failed. If this happens often enough, we will 1924 // invalidate and recompile. 1925 action = BailoutAction::InvalidateIfFrequent; 1926 saveFailedICHash = true; 1927 break; 1928 1929 case BailoutKind::StubFoldingGuardMultipleShapes: 1930 action = BailoutAction::InvalidateIfFrequent; 1931 saveFailedICHash = true; 1932 // A GuardMultipleShapes LIR instruction bailed out. 1933 // 1934 // Call noteStubFoldingBailout so that AttachBaselineCacheIRStub can 1935 // distinguish this from a bailout from GuardShapeList. This lets us avoid 1936 // an invalidation if we add a new case to the folded stub. We also need 1937 // to store the connection between the inner script and the outer script 1938 // so that we can find the outer script after monomorphic inlining. 1939 cx->zone()->jitZone()->noteStubFoldingBailout(innerScript, outerScript); 1940 break; 1941 1942 case BailoutKind::SpeculativePhi: 1943 // A value of an unexpected type flowed into a phi. 1944 MOZ_ASSERT(!outerScript->hadSpeculativePhiBailout()); 1945 if (!outerScript->hasIonScript() || 1946 outerScript->ionScript()->numFixableBailouts() == 0) { 1947 outerScript->setHadSpeculativePhiBailout(); 1948 } 1949 InvalidateAfterBailout(cx, outerScript, "phi specialization failure"); 1950 break; 1951 1952 case BailoutKind::TypePolicy: 1953 // A conversion inserted by a type policy failed. 1954 // We will invalidate and disable recompilation if this happens too often. 1955 action = BailoutAction::DisableIfFrequent; 1956 break; 1957 1958 case BailoutKind::LICM: 1959 // LICM may cause spurious bailouts by hoisting unreachable 1960 // guards past branches. To prevent bailout loops, when an 1961 // instruction hoisted by LICM bails out, we update the 1962 // IonScript and resume in baseline. If the guard would have 1963 // been executed anyway, then we will hit the baseline fallback, 1964 // and call noteBaselineFallback. If that does not happen, 1965 // then the next time we reach this point, we will disable LICM 1966 // for this script. 1967 MOZ_ASSERT(!outerScript->hadLICMInvalidation()); 1968 if (outerScript->hasIonScript()) { 1969 switch (outerScript->ionScript()->licmState()) { 1970 case IonScript::LICMState::NeverBailed: 1971 outerScript->ionScript()->setHadLICMBailout(); 1972 action = BailoutAction::NoAction; 1973 break; 1974 case IonScript::LICMState::Bailed: 1975 outerScript->setHadLICMInvalidation(); 1976 InvalidateAfterBailout(cx, outerScript, "LICM failure"); 1977 break; 1978 case IonScript::LICMState::BailedAndHitFallback: 1979 // This bailout is not due to LICM. Treat it like a 1980 // regular TranspiledCacheIR bailout. 1981 action = BailoutAction::InvalidateIfFrequent; 1982 break; 1983 } 1984 } 1985 break; 1986 1987 case BailoutKind::InstructionReordering: 1988 // An instruction moved up by instruction reordering bailed out. 1989 outerScript->setHadReorderingBailout(); 1990 action = BailoutAction::InvalidateIfFrequent; 1991 break; 1992 1993 case BailoutKind::HoistBoundsCheck: 1994 // An instruction hoisted or generated by tryHoistBoundsCheck bailed out. 1995 MOZ_ASSERT(!outerScript->failedBoundsCheck()); 1996 outerScript->setFailedBoundsCheck(); 1997 InvalidateAfterBailout(cx, outerScript, "bounds check failure"); 1998 break; 1999 2000 case BailoutKind::EagerTruncation: 2001 // An eager truncation generated by range analysis bailed out. 2002 // To avoid bailout loops, we set a flag to avoid generating 2003 // eager truncations next time we recompile. 2004 MOZ_ASSERT(!outerScript->hadEagerTruncationBailout()); 2005 outerScript->setHadEagerTruncationBailout(); 2006 InvalidateAfterBailout(cx, outerScript, "eager range analysis failure"); 2007 break; 2008 2009 case BailoutKind::UnboxFolding: 2010 // An unbox that was hoisted to fold with a load bailed out. 2011 // To avoid bailout loops, we set a flag to avoid folding 2012 // loads with unboxes next time we recompile. 2013 MOZ_ASSERT(!outerScript->hadUnboxFoldingBailout()); 2014 outerScript->setHadUnboxFoldingBailout(); 2015 InvalidateAfterBailout(cx, outerScript, "unbox folding failure"); 2016 break; 2017 2018 case BailoutKind::TooManyArguments: 2019 // A funapply or spread call had more than JIT_ARGS_LENGTH_MAX arguments. 2020 // We will invalidate and disable recompilation if this happens too often. 2021 action = BailoutAction::DisableIfFrequent; 2022 break; 2023 2024 case BailoutKind::DuringVMCall: 2025 if (cx->isExceptionPending()) { 2026 // We are bailing out to catch an exception. We will invalidate 2027 // and disable recompilation if this happens too often. 2028 action = BailoutAction::DisableIfFrequent; 2029 } 2030 break; 2031 2032 case BailoutKind::Finally: 2033 // We are bailing out for a finally block. We will invalidate 2034 // and disable recompilation if this happens too often. 2035 action = BailoutAction::DisableIfFrequent; 2036 break; 2037 2038 case BailoutKind::Inevitable: 2039 case BailoutKind::Debugger: 2040 // Do nothing. 2041 action = BailoutAction::NoAction; 2042 break; 2043 2044 case BailoutKind::FirstExecution: 2045 // We reached an instruction that had not been executed yet at 2046 // the time we compiled. If this happens often enough, we will 2047 // invalidate and recompile. 2048 action = BailoutAction::InvalidateIfFrequent; 2049 saveFailedICHash = true; 2050 break; 2051 2052 case BailoutKind::UninitializedLexical: 2053 HandleLexicalCheckFailure(cx, outerScript, innerScript); 2054 break; 2055 2056 case BailoutKind::ThrowCheckIsObject: 2057 MOZ_ASSERT(!cx->isExceptionPending()); 2058 return ThrowCheckIsObject(cx, CheckIsObjectKind::IteratorReturn); 2059 2060 case BailoutKind::ThrowProxyTrapMustReportSameValue: 2061 case BailoutKind::ThrowProxyTrapMustReportUndefined: { 2062 MOZ_ASSERT(!cx->isExceptionPending()); 2063 RootedId rootedId(cx, bailoutInfo->tempId); 2064 ScriptedProxyHandler::reportGetTrapValidationError( 2065 cx, rootedId, 2066 bailoutKind == BailoutKind::ThrowProxyTrapMustReportSameValue 2067 ? ScriptedProxyHandler::GetTrapValidationResult:: 2068 MustReportSameValue 2069 : ScriptedProxyHandler::GetTrapValidationResult:: 2070 MustReportUndefined); 2071 return false; 2072 } 2073 2074 case BailoutKind::IonExceptionDebugMode: 2075 // Return false to resume in HandleException with reconstructed 2076 // baseline frame. 2077 return false; 2078 2079 case BailoutKind::OnStackInvalidation: 2080 // The script has already been invalidated. There is nothing left to do. 2081 action = BailoutAction::NoAction; 2082 break; 2083 2084 default: 2085 MOZ_CRASH("Unknown bailout kind!"); 2086 } 2087 2088 #ifdef DEBUG 2089 if (MOZ_UNLIKELY(cx->runtime()->jitRuntime()->ionBailAfterEnabled())) { 2090 action = BailoutAction::NoAction; 2091 } 2092 #endif 2093 2094 if (outerScript->hasIonScript()) { 2095 IonScript* ionScript = outerScript->ionScript(); 2096 switch (action) { 2097 case BailoutAction::InvalidateImmediately: 2098 // The IonScript should already have been invalidated. 2099 MOZ_ASSERT(false); 2100 break; 2101 case BailoutAction::InvalidateIfFrequent: 2102 ionScript->incNumFixableBailouts(); 2103 if (ionScript->shouldInvalidate()) { 2104 #ifdef DEBUG 2105 // To detect bailout loops, we save a hash of the CacheIR used to 2106 // compile this script, and assert that we don't recompile with the 2107 // exact same inputs. Some of our bailout detection strategies, like 2108 // LICM and stub folding, rely on bailing out, updating some state 2109 // when we hit the baseline fallback, and using that information when 2110 // we invalidate. If the frequentBailoutThreshold is set too low, we 2111 // will instead invalidate the first time we bail out, so we don't 2112 // have the chance to make those decisions. That doesn't happen in 2113 // regular code, so we just skip bailout loop detection in that case. 2114 if (saveFailedICHash && !JitOptions.disableBailoutLoopCheck && 2115 JitOptions.frequentBailoutThreshold > 1) { 2116 outerScript->jitScript()->setFailedICHash(ionScript->icHash()); 2117 } 2118 #endif 2119 InvalidateAfterBailout(cx, outerScript, "fixable bailouts"); 2120 } 2121 break; 2122 case BailoutAction::DisableIfFrequent: 2123 ionScript->incNumUnfixableBailouts(); 2124 if (ionScript->shouldInvalidateAndDisable()) { 2125 InvalidateAfterBailout(cx, outerScript, "unfixable bailouts"); 2126 outerScript->disableIon(); 2127 } 2128 break; 2129 case BailoutAction::NoAction: 2130 break; 2131 } 2132 } 2133 2134 return true; 2135 }