tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

JitFrames.cpp (93735B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "jit/JitFrames-inl.h"
      8 
      9 #include "mozilla/ScopeExit.h"
     10 
     11 #include <algorithm>
     12 
     13 #include "builtin/ModuleObject.h"
     14 #include "builtin/Sorting.h"
     15 #include "gc/GC.h"
     16 #include "jit/BaselineFrame.h"
     17 #include "jit/BaselineIC.h"
     18 #include "jit/BaselineJIT.h"
     19 #include "jit/Ion.h"
     20 #include "jit/IonScript.h"
     21 #include "jit/JitRuntime.h"
     22 #include "jit/JitSpewer.h"
     23 #include "jit/LIR.h"
     24 #include "jit/Recover.h"
     25 #include "jit/Safepoints.h"
     26 #include "jit/ScriptFromCalleeToken.h"
     27 #include "jit/Snapshots.h"
     28 #include "jit/VMFunctions.h"
     29 #include "js/Exception.h"
     30 #include "js/friend/DumpFunctions.h"  // js::DumpObject, js::DumpValue
     31 #include "vm/Interpreter.h"
     32 #include "vm/JSContext.h"
     33 #include "vm/JSFunction.h"
     34 #include "vm/JSObject.h"
     35 #include "vm/JSScript.h"
     36 #include "wasm/WasmBuiltins.h"
     37 #include "wasm/WasmInstance.h"
     38 
     39 #include "builtin/Sorting-inl.h"
     40 #include "debugger/DebugAPI-inl.h"
     41 #include "jit/JSJitFrameIter-inl.h"
     42 #include "vm/GeckoProfiler-inl.h"
     43 #include "vm/JSScript-inl.h"
     44 #include "vm/Probes-inl.h"
     45 
     46 namespace js {
     47 namespace jit {
     48 
     49 // Given a slot index, returns the offset, in bytes, of that slot from an
     50 // JitFrameLayout. Slot distances are uniform across architectures, however,
     51 // the distance does depend on the size of the frame header.
     52 static inline int32_t OffsetOfFrameSlot(int32_t slot) { return -slot; }
     53 
     54 static inline uint8_t* AddressOfFrameSlot(JitFrameLayout* fp, int32_t slot) {
     55  return (uint8_t*)fp + OffsetOfFrameSlot(slot);
     56 }
     57 
     58 static inline uintptr_t ReadFrameSlot(JitFrameLayout* fp, int32_t slot) {
     59  return *(uintptr_t*)AddressOfFrameSlot(fp, slot);
     60 }
     61 
     62 static inline void WriteFrameSlot(JitFrameLayout* fp, int32_t slot,
     63                                  uintptr_t value) {
     64  *(uintptr_t*)AddressOfFrameSlot(fp, slot) = value;
     65 }
     66 
     67 static inline double ReadFrameDoubleSlot(JitFrameLayout* fp, int32_t slot) {
     68  return *(double*)AddressOfFrameSlot(fp, slot);
     69 }
     70 
     71 static inline float ReadFrameFloat32Slot(JitFrameLayout* fp, int32_t slot) {
     72  return *(float*)AddressOfFrameSlot(fp, slot);
     73 }
     74 
     75 static inline int32_t ReadFrameInt32Slot(JitFrameLayout* fp, int32_t slot) {
     76  return *(int32_t*)AddressOfFrameSlot(fp, slot);
     77 }
     78 
     79 static inline bool ReadFrameBooleanSlot(JitFrameLayout* fp, int32_t slot) {
     80  return *(bool*)AddressOfFrameSlot(fp, slot);
     81 }
     82 
     83 static uint32_t NumArgAndLocalSlots(const InlineFrameIterator& frame) {
     84  JSScript* script = frame.script();
     85  return CountArgSlots(script, frame.maybeCalleeTemplate()) + script->nfixed();
     86 }
     87 
     88 static TrampolineNative TrampolineNativeForFrame(
     89    JSRuntime* rt, TrampolineNativeFrameLayout* layout) {
     90  JSFunction* nativeFun = CalleeTokenToFunction(layout->calleeToken());
     91  MOZ_ASSERT(nativeFun->isBuiltinNative());
     92  void** jitEntry = nativeFun->nativeJitEntry();
     93  return rt->jitRuntime()->trampolineNativeForJitEntry(jitEntry);
     94 }
     95 
     96 static void UnwindTrampolineNativeFrame(JSRuntime* rt,
     97                                        const JSJitFrameIter& frame) {
     98  auto* layout = (TrampolineNativeFrameLayout*)frame.fp();
     99  TrampolineNative native = TrampolineNativeForFrame(rt, layout);
    100  switch (native) {
    101    case TrampolineNative::ArraySort:
    102    case TrampolineNative::TypedArraySort:
    103      layout->getFrameData<ArraySortData>()->freeMallocData();
    104      break;
    105    case TrampolineNative::Count:
    106      MOZ_CRASH("Invalid value");
    107  }
    108 }
    109 
    110 static void CloseLiveIteratorIon(JSContext* cx,
    111                                 const InlineFrameIterator& frame,
    112                                 const TryNote* tn) {
    113  MOZ_ASSERT(tn->kind() == TryNoteKind::ForIn ||
    114             tn->kind() == TryNoteKind::Destructuring);
    115 
    116  bool isDestructuring = tn->kind() == TryNoteKind::Destructuring;
    117  MOZ_ASSERT_IF(!isDestructuring, tn->stackDepth > 0);
    118  MOZ_ASSERT_IF(isDestructuring, tn->stackDepth > 1);
    119 
    120  // Save any pending exception, because some recover operations call into
    121  // AutoUnsafeCallWithABI functions, which don't allow pending exceptions.
    122  JS::AutoSaveExceptionState savedExc(cx);
    123 
    124  SnapshotIterator si = frame.snapshotIterator();
    125 
    126  // Skip stack slots until we reach the iterator object on the stack. For
    127  // the destructuring case, we also need to get the "done" value.
    128  uint32_t stackSlot = tn->stackDepth;
    129  uint32_t adjust = isDestructuring ? 2 : 1;
    130  uint32_t skipSlots = NumArgAndLocalSlots(frame) + stackSlot - adjust;
    131 
    132  for (unsigned i = 0; i < skipSlots; i++) {
    133    si.skip();
    134  }
    135 
    136  MaybeReadFallback recover(cx, cx->activation()->asJit(), &frame.frame(),
    137                            MaybeReadFallback::Fallback_DoNothing);
    138  Value v = si.maybeRead(recover);
    139  MOZ_RELEASE_ASSERT(v.isObject());
    140  RootedObject iterObject(cx, &v.toObject());
    141 
    142  if (isDestructuring) {
    143    RootedValue doneValue(cx, si.read());
    144    MOZ_RELEASE_ASSERT(!doneValue.isMagic());
    145    bool done = ToBoolean(doneValue);
    146    // Do not call IteratorClose if the destructuring iterator is already
    147    // done.
    148    if (done) {
    149      return;
    150    }
    151  }
    152 
    153  // Restore any pending exception before the closing the iterator.
    154  savedExc.restore();
    155 
    156  if (cx->isExceptionPending()) {
    157    if (tn->kind() == TryNoteKind::ForIn) {
    158      CloseIterator(iterObject);
    159    } else {
    160      IteratorCloseForException(cx, iterObject);
    161    }
    162  } else {
    163    UnwindIteratorForUncatchableException(iterObject);
    164  }
    165 }
    166 
    167 class IonTryNoteFilter {
    168  uint32_t depth_;
    169 
    170 public:
    171  explicit IonTryNoteFilter(const InlineFrameIterator& frame) {
    172    uint32_t base = NumArgAndLocalSlots(frame);
    173    SnapshotIterator si = frame.snapshotIterator();
    174    MOZ_ASSERT(si.numAllocations() >= base);
    175    depth_ = si.numAllocations() - base;
    176  }
    177 
    178  bool operator()(const TryNote* note) { return note->stackDepth <= depth_; }
    179 };
    180 
    181 class TryNoteIterIon : public TryNoteIter<IonTryNoteFilter> {
    182 public:
    183  TryNoteIterIon(JSContext* cx, const InlineFrameIterator& frame)
    184      : TryNoteIter(cx, frame.script(), frame.pc(), IonTryNoteFilter(frame)) {}
    185 };
    186 
    187 static bool ShouldBailoutForDebugger(JSContext* cx,
    188                                     const InlineFrameIterator& frame,
    189                                     bool hitBailoutException) {
    190  if (hitBailoutException) {
    191    MOZ_ASSERT(!cx->isPropagatingForcedReturn());
    192    return false;
    193  }
    194 
    195  // Bail out if we're propagating a forced return from an inlined frame,
    196  // even if the realm is no longer a debuggee.
    197  if (cx->isPropagatingForcedReturn() && frame.more()) {
    198    return true;
    199  }
    200 
    201  if (!cx->realm()->isDebuggee()) {
    202    return false;
    203  }
    204 
    205  // Bail out if there's a catchable exception and we are the debuggee of a
    206  // Debugger with a live onExceptionUnwind hook.
    207  if (cx->isExceptionPending() &&
    208      DebugAPI::hasExceptionUnwindHook(cx->global())) {
    209    return true;
    210  }
    211 
    212  // Bail out if a Debugger has observed this frame (e.g., for onPop).
    213  JitActivation* act = cx->activation()->asJit();
    214  RematerializedFrame* rematFrame =
    215      act->lookupRematerializedFrame(frame.frame().fp(), frame.frameNo());
    216  return rematFrame && rematFrame->isDebuggee();
    217 }
    218 
    219 static void OnLeaveIonFrame(JSContext* cx, const InlineFrameIterator& frame,
    220                            ResumeFromException* rfe) {
    221  bool returnFromThisFrame =
    222      cx->isPropagatingForcedReturn() || cx->isClosingGenerator();
    223  if (!returnFromThisFrame) {
    224    return;
    225  }
    226 
    227  JitActivation* act = cx->activation()->asJit();
    228  RematerializedFrame* rematFrame = nullptr;
    229  {
    230    JS::AutoSaveExceptionState savedExc(cx);
    231    rematFrame = act->getRematerializedFrame(cx, frame.frame(), frame.frameNo(),
    232                                             IsLeavingFrame::Yes);
    233    if (!rematFrame) {
    234      return;
    235    }
    236  }
    237 
    238  MOZ_ASSERT(!frame.more());
    239 
    240  if (cx->isClosingGenerator()) {
    241    HandleClosingGeneratorReturn(cx, rematFrame, /*frameOk=*/true);
    242  } else {
    243    cx->clearPropagatingForcedReturn();
    244  }
    245 
    246  Value& rval = rematFrame->returnValue();
    247  MOZ_RELEASE_ASSERT(!rval.isMagic());
    248 
    249  // Set both framePointer and stackPointer to the address of the
    250  // JitFrameLayout.
    251  rfe->kind = ExceptionResumeKind::ForcedReturnIon;
    252  rfe->framePointer = frame.frame().fp();
    253  rfe->stackPointer = frame.frame().fp();
    254  rfe->exception = rval;
    255  rfe->exceptionStack = NullValue();
    256 
    257  act->removeIonFrameRecovery(frame.frame().jsFrame());
    258  act->removeRematerializedFrame(frame.frame().fp());
    259 }
    260 
    261 static void HandleExceptionIon(JSContext* cx, const InlineFrameIterator& frame,
    262                               ResumeFromException* rfe,
    263                               bool* hitBailoutException) {
    264  if (ShouldBailoutForDebugger(cx, frame, *hitBailoutException)) {
    265    // We do the following:
    266    //
    267    //   1. Bailout to baseline to reconstruct a baseline frame.
    268    //   2. Resume immediately into the exception tail afterwards, and
    269    //      handle the exception again with the top frame now a baseline
    270    //      frame.
    271    //
    272    // An empty exception info denotes that we're propagating an Ion
    273    // exception due to debug mode, which BailoutIonToBaseline needs to
    274    // know. This is because we might not be able to fully reconstruct up
    275    // to the stack depth at the snapshot, as we could've thrown in the
    276    // middle of a call.
    277    ExceptionBailoutInfo propagateInfo(cx);
    278    if (ExceptionHandlerBailout(cx, frame, rfe, propagateInfo)) {
    279      return;
    280    }
    281    *hitBailoutException = true;
    282  }
    283 
    284  RootedScript script(cx, frame.script());
    285 
    286  for (TryNoteIterIon tni(cx, frame); !tni.done(); ++tni) {
    287    const TryNote* tn = *tni;
    288    switch (tn->kind()) {
    289      case TryNoteKind::ForIn:
    290      case TryNoteKind::Destructuring:
    291        CloseLiveIteratorIon(cx, frame, tn);
    292        break;
    293 
    294      case TryNoteKind::Catch:
    295        // If we're closing a generator, we have to skip catch blocks.
    296        if (cx->isClosingGenerator()) {
    297          break;
    298        }
    299 
    300        if (cx->isExceptionPending()) {
    301          // Ion can compile try-catch, but bailing out to catch
    302          // exceptions is slow. Reset the warm-up counter so that if we
    303          // catch many exceptions we won't Ion-compile the script.
    304          script->resetWarmUpCounterToDelayIonCompilation();
    305 
    306          if (*hitBailoutException) {
    307            break;
    308          }
    309 
    310          // Bailout at the start of the catch block.
    311          jsbytecode* catchPC = script->offsetToPC(tn->start + tn->length);
    312          ExceptionBailoutInfo excInfo(cx, frame.frameNo(), catchPC,
    313                                       tn->stackDepth);
    314          if (ExceptionHandlerBailout(cx, frame, rfe, excInfo)) {
    315            // Record exception locations to allow scope unwinding in
    316            // |FinishBailoutToBaseline|
    317            MOZ_ASSERT(cx->isExceptionPending());
    318            rfe->bailoutInfo->tryPC =
    319                UnwindEnvironmentToTryPc(frame.script(), tn);
    320            rfe->bailoutInfo->faultPC = frame.pc();
    321            return;
    322          }
    323 
    324          *hitBailoutException = true;
    325          MOZ_ASSERT(cx->isExceptionPending() || cx->hadUncatchableException());
    326        }
    327        break;
    328 
    329      case TryNoteKind::Finally: {
    330        if (!cx->isExceptionPending()) {
    331          // We don't catch uncatchable exceptions.
    332          break;
    333        }
    334 
    335        script->resetWarmUpCounterToDelayIonCompilation();
    336 
    337        if (*hitBailoutException) {
    338          break;
    339        }
    340 
    341        // Bailout at the start of the finally block.
    342        jsbytecode* finallyPC = script->offsetToPC(tn->start + tn->length);
    343        ExceptionBailoutInfo excInfo(cx, frame.frameNo(), finallyPC,
    344                                     tn->stackDepth);
    345 
    346        RootedValue exception(cx);
    347        RootedValue exceptionStack(cx);
    348        if (!cx->getPendingException(&exception) ||
    349            !cx->getPendingExceptionStack(&exceptionStack)) {
    350          exception = UndefinedValue();
    351          exceptionStack = NullValue();
    352        }
    353        excInfo.setFinallyException(exception.get(), exceptionStack.get());
    354        cx->clearPendingException();
    355 
    356        if (ExceptionHandlerBailout(cx, frame, rfe, excInfo)) {
    357          // Record exception locations to allow scope unwinding in
    358          // |FinishBailoutToBaseline|
    359          rfe->bailoutInfo->tryPC =
    360              UnwindEnvironmentToTryPc(frame.script(), tn);
    361          rfe->bailoutInfo->faultPC = frame.pc();
    362          return;
    363        }
    364 
    365        *hitBailoutException = true;
    366        MOZ_ASSERT(cx->isExceptionPending());
    367        break;
    368      }
    369 
    370      case TryNoteKind::ForOf:
    371      case TryNoteKind::Loop:
    372        break;
    373 
    374      // TryNoteKind::ForOfIterclose is handled internally by the try note
    375      // iterator.
    376      default:
    377        MOZ_CRASH("Unexpected try note");
    378    }
    379  }
    380 
    381  OnLeaveIonFrame(cx, frame, rfe);
    382 }
    383 
    384 static void OnLeaveBaselineFrame(JSContext* cx, const JSJitFrameIter& frame,
    385                                 jsbytecode* pc, ResumeFromException* rfe,
    386                                 bool frameOk) {
    387  BaselineFrame* baselineFrame = frame.baselineFrame();
    388  bool returnFromThisFrame = jit::DebugEpilogue(cx, baselineFrame, pc, frameOk);
    389  if (returnFromThisFrame) {
    390    rfe->kind = ExceptionResumeKind::ForcedReturnBaseline;
    391    rfe->framePointer = frame.fp();
    392    rfe->stackPointer = reinterpret_cast<uint8_t*>(baselineFrame);
    393  }
    394 }
    395 
    396 static inline void BaselineFrameAndStackPointersFromTryNote(
    397    const TryNote* tn, const JSJitFrameIter& frame, uint8_t** framePointer,
    398    uint8_t** stackPointer) {
    399  JSScript* script = frame.baselineFrame()->script();
    400  *framePointer = frame.fp();
    401  *stackPointer = *framePointer - BaselineFrame::Size() -
    402                  (script->nfixed() + tn->stackDepth) * sizeof(Value);
    403 }
    404 
    405 static void SettleOnTryNote(JSContext* cx, const TryNote* tn,
    406                            const JSJitFrameIter& frame, EnvironmentIter& ei,
    407                            ResumeFromException* rfe, jsbytecode** pc) {
    408  RootedScript script(cx, frame.baselineFrame()->script());
    409 
    410  // Unwind environment chain (pop block objects).
    411  if (cx->isExceptionPending()) {
    412    UnwindEnvironment(cx, ei, UnwindEnvironmentToTryPc(script, tn));
    413  }
    414 
    415  // Compute base pointer and stack pointer.
    416  BaselineFrameAndStackPointersFromTryNote(tn, frame, &rfe->framePointer,
    417                                           &rfe->stackPointer);
    418 
    419  // Compute the pc.
    420  *pc = script->offsetToPC(tn->start + tn->length);
    421 }
    422 
    423 class BaselineTryNoteFilter {
    424  const JSJitFrameIter& frame_;
    425 
    426 public:
    427  explicit BaselineTryNoteFilter(const JSJitFrameIter& frame) : frame_(frame) {}
    428  bool operator()(const TryNote* note) {
    429    BaselineFrame* frame = frame_.baselineFrame();
    430 
    431    uint32_t numValueSlots = frame_.baselineFrameNumValueSlots();
    432    MOZ_RELEASE_ASSERT(numValueSlots >= frame->script()->nfixed());
    433 
    434    uint32_t currDepth = numValueSlots - frame->script()->nfixed();
    435    return note->stackDepth <= currDepth;
    436  }
    437 };
    438 
    439 class TryNoteIterBaseline : public TryNoteIter<BaselineTryNoteFilter> {
    440 public:
    441  TryNoteIterBaseline(JSContext* cx, const JSJitFrameIter& frame,
    442                      jsbytecode* pc)
    443      : TryNoteIter(cx, frame.script(), pc, BaselineTryNoteFilter(frame)) {}
    444 };
    445 
    446 // Close all live iterators on a BaselineFrame due to exception unwinding. The
    447 // pc parameter is updated to where the envs have been unwound to.
    448 static void CloseLiveIteratorsBaselineForUncatchableException(
    449    JSContext* cx, const JSJitFrameIter& frame, jsbytecode* pc) {
    450  for (TryNoteIterBaseline tni(cx, frame, pc); !tni.done(); ++tni) {
    451    const TryNote* tn = *tni;
    452    switch (tn->kind()) {
    453      case TryNoteKind::ForIn: {
    454        uint8_t* framePointer;
    455        uint8_t* stackPointer;
    456        BaselineFrameAndStackPointersFromTryNote(tn, frame, &framePointer,
    457                                                 &stackPointer);
    458        Value iterValue(*(Value*)stackPointer);
    459        RootedObject iterObject(cx, &iterValue.toObject());
    460        UnwindIteratorForUncatchableException(iterObject);
    461        break;
    462      }
    463 
    464      default:
    465        break;
    466    }
    467  }
    468 }
    469 
    470 static bool ProcessTryNotesBaseline(JSContext* cx, const JSJitFrameIter& frame,
    471                                    EnvironmentIter& ei,
    472                                    ResumeFromException* rfe, jsbytecode** pc) {
    473  MOZ_ASSERT(frame.baselineFrame()->runningInInterpreter(),
    474             "Caller must ensure frame is an interpreter frame");
    475 
    476  RootedScript script(cx, frame.baselineFrame()->script());
    477 
    478  for (TryNoteIterBaseline tni(cx, frame, *pc); !tni.done(); ++tni) {
    479    const TryNote* tn = *tni;
    480 
    481    MOZ_ASSERT(cx->isExceptionPending());
    482    switch (tn->kind()) {
    483      case TryNoteKind::Catch: {
    484        // If we're closing a generator, we have to skip catch blocks.
    485        if (cx->isClosingGenerator()) {
    486          break;
    487        }
    488 
    489        SettleOnTryNote(cx, tn, frame, ei, rfe, pc);
    490 
    491        // Ion can compile try-catch, but bailing out to catch
    492        // exceptions is slow. Reset the warm-up counter so that if we
    493        // catch many exceptions we won't Ion-compile the script.
    494        script->resetWarmUpCounterToDelayIonCompilation();
    495 
    496        // Resume at the start of the catch block.
    497        frame.baselineFrame()->setInterpreterFields(*pc);
    498        rfe->kind = ExceptionResumeKind::Catch;
    499        if (IsBaselineInterpreterEnabled()) {
    500          const BaselineInterpreter& interp =
    501              cx->runtime()->jitRuntime()->baselineInterpreter();
    502          rfe->target = interp.interpretOpAddr().value;
    503        }
    504        return true;
    505      }
    506 
    507      case TryNoteKind::Finally: {
    508        SettleOnTryNote(cx, tn, frame, ei, rfe, pc);
    509 
    510        frame.baselineFrame()->setInterpreterFields(*pc);
    511        rfe->kind = ExceptionResumeKind::Finally;
    512        if (IsBaselineInterpreterEnabled()) {
    513          const BaselineInterpreter& interp =
    514              cx->runtime()->jitRuntime()->baselineInterpreter();
    515          rfe->target = interp.interpretOpAddr().value;
    516        }
    517 
    518        // Drop the exception instead of leaking cross compartment data.
    519        RootedValue exception(cx);
    520        RootedValue exceptionStack(cx);
    521        if (!cx->getPendingException(&exception) ||
    522            !cx->getPendingExceptionStack(&exceptionStack)) {
    523          rfe->exception = UndefinedValue();
    524          rfe->exceptionStack = NullValue();
    525        } else {
    526          rfe->exception = exception;
    527          rfe->exceptionStack = exceptionStack;
    528        }
    529        cx->clearPendingException();
    530        return true;
    531      }
    532 
    533      case TryNoteKind::ForIn: {
    534        uint8_t* framePointer;
    535        uint8_t* stackPointer;
    536        BaselineFrameAndStackPointersFromTryNote(tn, frame, &framePointer,
    537                                                 &stackPointer);
    538        Value iterValue(*reinterpret_cast<Value*>(stackPointer));
    539        JSObject* iterObject = &iterValue.toObject();
    540        CloseIterator(iterObject);
    541        break;
    542      }
    543 
    544      case TryNoteKind::Destructuring: {
    545        uint8_t* framePointer;
    546        uint8_t* stackPointer;
    547        BaselineFrameAndStackPointersFromTryNote(tn, frame, &framePointer,
    548                                                 &stackPointer);
    549        // Note: if this ever changes, also update the
    550        // TryNoteKind::Destructuring code in WarpBuilder.cpp!
    551        RootedValue doneValue(cx, *(reinterpret_cast<Value*>(stackPointer)));
    552        MOZ_RELEASE_ASSERT(!doneValue.isMagic());
    553        bool done = ToBoolean(doneValue);
    554        if (!done) {
    555          Value iterValue(*(reinterpret_cast<Value*>(stackPointer) + 1));
    556          RootedObject iterObject(cx, &iterValue.toObject());
    557          if (!IteratorCloseForException(cx, iterObject)) {
    558            SettleOnTryNote(cx, tn, frame, ei, rfe, pc);
    559            return false;
    560          }
    561        }
    562        break;
    563      }
    564 
    565      case TryNoteKind::ForOf:
    566      case TryNoteKind::Loop:
    567        break;
    568 
    569      // TryNoteKind::ForOfIterClose is handled internally by the try note
    570      // iterator.
    571      default:
    572        MOZ_CRASH("Invalid try note");
    573    }
    574  }
    575  return true;
    576 }
    577 
    578 static void HandleExceptionBaseline(JSContext* cx, JSJitFrameIter& frame,
    579                                    CommonFrameLayout* prevFrame,
    580                                    ResumeFromException* rfe) {
    581  MOZ_ASSERT(frame.isBaselineJS());
    582  MOZ_ASSERT(prevFrame);
    583 
    584  jsbytecode* pc;
    585  frame.baselineScriptAndPc(nullptr, &pc);
    586 
    587  // Ensure the BaselineFrame is an interpreter frame. This is easy to do and
    588  // simplifies the code below and interaction with DebugModeOSR.
    589  //
    590  // Note that we never return to this frame via the previous frame's return
    591  // address. We could set the return address to nullptr to ensure it's never
    592  // used, but the profiler expects a non-null return value for its JitCode map
    593  // lookup so we have to use an address in the interpreter code instead.
    594  if (!frame.baselineFrame()->runningInInterpreter()) {
    595    const BaselineInterpreter& interp =
    596        cx->runtime()->jitRuntime()->baselineInterpreter();
    597    uint8_t* retAddr = interp.codeRaw();
    598    BaselineFrame* baselineFrame = frame.baselineFrame();
    599 
    600    // Suppress profiler sampling while we fix up the frame to ensure the
    601    // sampler thread doesn't see an inconsistent state.
    602    AutoSuppressProfilerSampling suppressProfilerSampling(cx);
    603    baselineFrame->switchFromJitToInterpreterForExceptionHandler(cx, pc);
    604    prevFrame->setReturnAddress(retAddr);
    605 
    606    // Ensure the current iterator's resumePCInCurrentFrame_ isn't used
    607    // anywhere.
    608    frame.setResumePCInCurrentFrame(nullptr);
    609  }
    610 
    611  bool frameOk = false;
    612  RootedScript script(cx, frame.baselineFrame()->script());
    613 
    614  if (script->hasScriptCounts()) {
    615    PCCounts* counts = script->getThrowCounts(pc);
    616    // If we failed to allocate, then skip the increment and continue to
    617    // handle the exception.
    618    if (counts) {
    619      counts->numExec()++;
    620    }
    621  }
    622 
    623  bool hasTryNotes = !script->trynotes().empty();
    624 
    625 again:
    626  if (cx->isExceptionPending()) {
    627    if (!cx->isClosingGenerator()) {
    628      if (!DebugAPI::onExceptionUnwind(cx, frame.baselineFrame())) {
    629        if (!cx->isExceptionPending()) {
    630          goto again;
    631        }
    632      }
    633      // Ensure that the debugger hasn't returned 'true' while clearing the
    634      // exception state.
    635      MOZ_ASSERT(cx->isExceptionPending());
    636    }
    637 
    638    if (hasTryNotes) {
    639      EnvironmentIter ei(cx, frame.baselineFrame(), pc);
    640      if (!ProcessTryNotesBaseline(cx, frame, ei, rfe, &pc)) {
    641        goto again;
    642      }
    643      if (rfe->kind != ExceptionResumeKind::EntryFrame) {
    644        // No need to increment the PCCounts number of execution here,
    645        // as the interpreter increments any PCCounts if present.
    646        MOZ_ASSERT_IF(script->hasScriptCounts(), script->maybeGetPCCounts(pc));
    647        return;
    648      }
    649    }
    650 
    651    frameOk = HandleClosingGeneratorReturn(cx, frame.baselineFrame(), frameOk);
    652  } else {
    653    if (hasTryNotes) {
    654      CloseLiveIteratorsBaselineForUncatchableException(cx, frame, pc);
    655    }
    656 
    657    // We may be propagating a forced return from a debugger hook function.
    658    if (MOZ_UNLIKELY(cx->isPropagatingForcedReturn())) {
    659      cx->clearPropagatingForcedReturn();
    660      frameOk = true;
    661    }
    662  }
    663 
    664  OnLeaveBaselineFrame(cx, frame, pc, rfe, frameOk);
    665 }
    666 
    667 static JitFrameLayout* GetLastProfilingFrame(ResumeFromException* rfe) {
    668  switch (rfe->kind) {
    669    case ExceptionResumeKind::EntryFrame:
    670    case ExceptionResumeKind::WasmInterpEntry:
    671    case ExceptionResumeKind::WasmCatch:
    672      return nullptr;
    673 
    674    // The following all return into Baseline or Ion frames.
    675    case ExceptionResumeKind::Catch:
    676    case ExceptionResumeKind::Finally:
    677    case ExceptionResumeKind::ForcedReturnBaseline:
    678    case ExceptionResumeKind::ForcedReturnIon:
    679      return reinterpret_cast<JitFrameLayout*>(rfe->framePointer);
    680 
    681    // When resuming into a bailed-out ion frame, use the bailout info to
    682    // find the frame we are resuming into.
    683    case ExceptionResumeKind::Bailout:
    684      return reinterpret_cast<JitFrameLayout*>(rfe->bailoutInfo->incomingStack);
    685  }
    686 
    687  MOZ_CRASH("Invalid ResumeFromException type!");
    688  return nullptr;
    689 }
    690 
    691 void HandleException(ResumeFromException* rfe) {
    692  JSContext* cx = TlsContext.get();
    693 
    694  if (!CheckForOOMStackTraceInterrupt(cx)) {
    695    return;
    696  }
    697 
    698  cx->realm()->localAllocSite = nullptr;
    699 #ifdef DEBUG
    700  if (!IsPortableBaselineInterpreterEnabled()) {
    701    cx->runtime()->jitRuntime()->clearDisallowArbitraryCode();
    702  }
    703 
    704  // Reset the counter when we bailed after MDebugEnterGCUnsafeRegion, but
    705  // before the matching MDebugLeaveGCUnsafeRegion.
    706  //
    707  // NOTE: EnterJit ensures the counter is zero when we enter JIT code.
    708  cx->resetInUnsafeRegion();
    709 #endif
    710 
    711  auto resetProfilerFrame = mozilla::MakeScopeExit([=] {
    712    if (!IsPortableBaselineInterpreterEnabled()) {
    713      if (!cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(
    714              cx->runtime())) {
    715        return;
    716      }
    717    }
    718 
    719    MOZ_ASSERT(cx->jitActivation == cx->profilingActivation());
    720 
    721    auto* lastProfilingFrame = GetLastProfilingFrame(rfe);
    722    cx->jitActivation->setLastProfilingFrame(lastProfilingFrame);
    723  });
    724 
    725  rfe->kind = ExceptionResumeKind::EntryFrame;
    726 
    727  JitSpew(JitSpew_IonInvalidate, "handling exception");
    728 
    729  JitActivation* activation = cx->activation()->asJit();
    730 
    731 #ifdef CHECK_OSIPOINT_REGISTERS
    732  if (JitOptions.checkOsiPointRegisters) {
    733    activation->setCheckRegs(false);
    734  }
    735 #endif
    736 
    737  JitFrameIter iter(cx->activation()->asJit(),
    738                    /* mustUnwindActivation = */ true);
    739 
    740  // Live wasm code on the stack is kept alive (in TraceJitActivation) by
    741  // marking the instance of every wasm::Frame found by WasmFrameIter.
    742  // However, we're going to pop frames while iterating which means that a GC
    743  // during this loop could collect the code of frames whose code is still on
    744  // the stack.
    745  //
    746  // This is actually mostly fine: after we return to the Wasm throw stub, we'll
    747  // jump to the JIT's exception handling trampoline. However, we must keep the
    748  // throw stub alive itself which is owned by the innermost instance.
    749  Rooted<WasmInstanceObject*> keepAlive(cx);
    750  if (iter.isWasm()) {
    751    keepAlive = iter.asWasm().instance()->object();
    752  }
    753 
    754  CommonFrameLayout* prevJitFrame = nullptr;
    755  while (!iter.done()) {
    756    if (iter.isWasm()) {
    757      prevJitFrame = nullptr;
    758      wasm::HandleExceptionWasm(cx, iter, rfe);
    759      if (rfe->kind == ExceptionResumeKind::WasmCatch) {
    760        // Jump to a Wasm try-catch handler.
    761        return;
    762      }
    763      // We either reached a JS JIT frame or we stopped at the activation's Wasm
    764      // interpreter entry frame.
    765      MOZ_ASSERT(iter.isJSJit() || (iter.isWasm() && iter.done()));
    766      continue;
    767    }
    768 
    769    JSJitFrameIter& frame = iter.asJSJit();
    770 
    771    // JIT code can enter same-compartment realms, so reset cx->realm to
    772    // this frame's realm.
    773    if (frame.isScripted() || frame.isTrampolineNative()) {
    774      cx->setRealmForJitExceptionHandler(iter.realm());
    775    }
    776 
    777    if (frame.isIonJS()) {
    778      // Search each inlined frame for live iterator objects, and close
    779      // them.
    780      InlineFrameIterator frames(cx, &frame);
    781 
    782      // Invalidation state will be the same for all inlined scripts in the
    783      // frame.
    784      IonScript* ionScript = nullptr;
    785      bool invalidated = frame.checkInvalidation(&ionScript);
    786 
    787      // If we hit OOM or overrecursion while bailing out, we don't
    788      // attempt to bail out a second time for this Ion frame. Just unwind
    789      // and continue at the next frame.
    790      bool hitBailoutException = false;
    791      for (;;) {
    792        HandleExceptionIon(cx, frames, rfe, &hitBailoutException);
    793 
    794        if (rfe->kind == ExceptionResumeKind::Bailout ||
    795            rfe->kind == ExceptionResumeKind::ForcedReturnIon) {
    796          if (invalidated) {
    797            ionScript->decrementInvalidationCount(cx->gcContext());
    798          }
    799          return;
    800        }
    801 
    802        MOZ_ASSERT(rfe->kind == ExceptionResumeKind::EntryFrame);
    803 
    804        // When profiling, each frame popped needs a notification that
    805        // the function has exited, so invoke the probe that a function
    806        // is exiting.
    807 
    808        JSScript* script = frames.script();
    809        probes::ExitScript(cx, script, script->function(),
    810                           /* popProfilerFrame = */ false);
    811        if (!frames.more()) {
    812          break;
    813        }
    814        ++frames;
    815      }
    816 
    817      // Remove left-over state which might have been needed for bailout.
    818      activation->removeIonFrameRecovery(frame.jsFrame());
    819      activation->removeRematerializedFrame(frame.fp());
    820 
    821      // If invalidated, decrement the number of frames remaining on the
    822      // stack for the given IonScript.
    823      if (invalidated) {
    824        ionScript->decrementInvalidationCount(cx->gcContext());
    825      }
    826 
    827    } else if (frame.isBaselineJS()) {
    828      HandleExceptionBaseline(cx, frame, prevJitFrame, rfe);
    829 
    830      if (rfe->kind != ExceptionResumeKind::EntryFrame &&
    831          rfe->kind != ExceptionResumeKind::ForcedReturnBaseline) {
    832        return;
    833      }
    834 
    835      // Unwind profiler pseudo-stack
    836      JSScript* script = frame.script();
    837      probes::ExitScript(cx, script, script->function(),
    838                         /* popProfilerFrame = */ false);
    839 
    840      if (rfe->kind == ExceptionResumeKind::ForcedReturnBaseline) {
    841        return;
    842      }
    843    } else if (frame.isTrampolineNative()) {
    844      UnwindTrampolineNativeFrame(cx->runtime(), frame);
    845    }
    846 
    847    prevJitFrame = frame.current();
    848    ++iter;
    849  }
    850 
    851  // Return to C++ code by returning to the activation's JS or Wasm entry frame.
    852  if (iter.isJSJit()) {
    853    MOZ_ASSERT(rfe->kind == ExceptionResumeKind::EntryFrame);
    854    rfe->framePointer = iter.asJSJit().current()->callerFramePtr();
    855    rfe->stackPointer =
    856        iter.asJSJit().fp() + CommonFrameLayout::offsetOfReturnAddress();
    857  } else {
    858    MOZ_ASSERT(iter.isWasm());
    859    // In case of no handler, exit wasm via ret(). The exception handling
    860    // trampoline will return InterpFailInstanceReg in InstanceReg to signal
    861    // to the interpreter entry stub to do a failure return.
    862    rfe->kind = ExceptionResumeKind::WasmInterpEntry;
    863    rfe->framePointer = (uint8_t*)iter.asWasm().unwoundCallerFP();
    864    rfe->stackPointer = (uint8_t*)iter.asWasm().unwoundAddressOfReturnAddress();
    865    rfe->instance = nullptr;
    866    rfe->target = nullptr;
    867  }
    868 }
    869 
    870 // Turns a JitFrameLayout into an UnwoundJit ExitFrameLayout.
    871 void EnsureUnwoundJitExitFrame(JitActivation* act, JitFrameLayout* frame) {
    872  ExitFrameLayout* exitFrame = reinterpret_cast<ExitFrameLayout*>(frame);
    873 
    874  if (act->jsExitFP() == (uint8_t*)frame) {
    875    // If we already called this function for the current frame, do
    876    // nothing.
    877    MOZ_ASSERT(exitFrame->isUnwoundJitExit());
    878    return;
    879  }
    880 
    881 #ifdef DEBUG
    882  JSJitFrameIter iter(act);
    883  while (!iter.isScripted()) {
    884    ++iter;
    885  }
    886  MOZ_ASSERT(iter.current() == frame, "|frame| must be the top JS frame");
    887 
    888  MOZ_ASSERT(!!act->jsExitFP());
    889  MOZ_ASSERT((uint8_t*)exitFrame->footer() >= act->jsExitFP(),
    890             "Must have space for ExitFooterFrame before jsExitFP");
    891 #endif
    892 
    893  act->setJSExitFP((uint8_t*)frame);
    894  exitFrame->footer()->setUnwoundJitExitFrame();
    895  MOZ_ASSERT(exitFrame->isUnwoundJitExit());
    896 }
    897 
    898 JSScript* MaybeForwardedScriptFromCalleeToken(CalleeToken token) {
    899  switch (GetCalleeTokenTag(token)) {
    900    case CalleeToken_Script:
    901      return MaybeForwarded(CalleeTokenToScript(token));
    902    case CalleeToken_Function:
    903    case CalleeToken_FunctionConstructing: {
    904      JSFunction* fun = MaybeForwarded(CalleeTokenToFunction(token));
    905      return MaybeForwarded(fun)->nonLazyScript();
    906    }
    907  }
    908  MOZ_CRASH("invalid callee token tag");
    909 }
    910 
    911 CalleeToken TraceCalleeToken(JSTracer* trc, CalleeToken token) {
    912  switch (CalleeTokenTag tag = GetCalleeTokenTag(token)) {
    913    case CalleeToken_Function:
    914    case CalleeToken_FunctionConstructing: {
    915      JSFunction* fun = CalleeTokenToFunction(token);
    916      TraceRoot(trc, &fun, "jit-callee");
    917      return CalleeToToken(fun, tag == CalleeToken_FunctionConstructing);
    918    }
    919    case CalleeToken_Script: {
    920      JSScript* script = CalleeTokenToScript(token);
    921      TraceRoot(trc, &script, "jit-script");
    922      return CalleeToToken(script);
    923    }
    924    default:
    925      MOZ_CRASH("unknown callee token type");
    926  }
    927 }
    928 
    929 uintptr_t* JitFrameLayout::slotRef(SafepointSlotEntry where) {
    930  if (where.stack) {
    931    return (uintptr_t*)((uint8_t*)this - where.slot);
    932  }
    933  return (uintptr_t*)((uint8_t*)thisAndActualArgs() + where.slot);
    934 }
    935 
    936 #ifdef DEBUG
    937 void ExitFooterFrame::assertValidVMFunctionId() const {
    938  MOZ_ASSERT(data_ >= uintptr_t(ExitFrameType::VMFunction));
    939  MOZ_ASSERT(data_ - uintptr_t(ExitFrameType::VMFunction) < NumVMFunctions());
    940 }
    941 #endif
    942 
    943 #ifdef JS_NUNBOX32
    944 static inline uintptr_t ReadAllocation(const JSJitFrameIter& frame,
    945                                       const LAllocation* a) {
    946  if (a->isGeneralReg()) {
    947    Register reg = a->toGeneralReg()->reg();
    948    return frame.machineState().read(reg);
    949  }
    950  return *frame.jsFrame()->slotRef(SafepointSlotEntry(a));
    951 }
    952 #endif
    953 
    954 static void TraceThisAndArguments(JSTracer* trc, const JSJitFrameIter& frame,
    955                                  JitFrameLayout* layout) {
    956  // Trace |this| and the actual and formal arguments of a JIT frame.
    957  //
    958  // Tracing of formal arguments of an Ion frame is taken care of by the frame's
    959  // safepoint/snapshot. We skip tracing formal arguments if the script doesn't
    960  // use |arguments| or rest, because the register allocator can spill values to
    961  // argument slots in this case.
    962  //
    963  // For other frames such as LazyLink frames or InterpreterStub frames, we
    964  // always trace all actual and formal arguments.
    965 
    966  if (!CalleeTokenIsFunction(layout->calleeToken())) {
    967    return;
    968  }
    969 
    970  JSFunction* fun = CalleeTokenToFunction(layout->calleeToken());
    971 
    972  size_t numFormals = fun->nargs();
    973  size_t numArgs = std::max(layout->numActualArgs(), numFormals);
    974  size_t firstArg = 0;
    975 
    976  if (frame.isIonScripted() &&
    977      !fun->nonLazyScript()->mayReadFrameArgsDirectly()) {
    978    firstArg = numFormals;
    979  }
    980 
    981  Value* argv = layout->thisAndActualArgs();
    982 
    983  // Trace |this|.
    984  TraceRoot(trc, argv, "jit-thisv");
    985 
    986  // Trace arguments. Note + 1 for thisv.
    987  for (size_t i = firstArg; i < numArgs; i++) {
    988    TraceRoot(trc, &argv[i + 1], "jit-argv");
    989  }
    990 
    991  // Always trace the new.target from the frame. It's not in the snapshots.
    992  // +1 to pass |this|
    993  if (CalleeTokenIsConstructing(layout->calleeToken())) {
    994    TraceRoot(trc, &argv[1 + numArgs], "jit-newTarget");
    995  }
    996 }
    997 
    998 #ifdef JS_NUNBOX32
    999 static inline void WriteAllocation(const JSJitFrameIter& frame,
   1000                                   const LAllocation* a, uintptr_t value) {
   1001  if (a->isGeneralReg()) {
   1002    Register reg = a->toGeneralReg()->reg();
   1003    frame.machineState().write(reg, value);
   1004  } else {
   1005    *frame.jsFrame()->slotRef(SafepointSlotEntry(a)) = value;
   1006  }
   1007 }
   1008 #endif
   1009 
   1010 static void TraceIonJSFrame(JSTracer* trc, const JSJitFrameIter& frame) {
   1011  JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
   1012 
   1013  layout->replaceCalleeToken(TraceCalleeToken(trc, layout->calleeToken()));
   1014 
   1015  IonScript* ionScript = nullptr;
   1016  if (frame.checkInvalidation(&ionScript)) {
   1017    // This frame has been invalidated, meaning that its IonScript is no
   1018    // longer reachable through the callee token (JSFunction/JSScript->ion
   1019    // is now nullptr or recompiled). Manually trace it here.
   1020    ionScript->trace(trc);
   1021  } else {
   1022    ionScript = frame.ionScriptFromCalleeToken();
   1023  }
   1024 
   1025  TraceThisAndArguments(trc, frame, frame.jsFrame());
   1026 
   1027  const SafepointIndex* si =
   1028      ionScript->getSafepointIndex(frame.resumePCinCurrentFrame());
   1029 
   1030  SafepointReader safepoint(ionScript, si);
   1031 
   1032  // Scan through slots which contain pointers (or on punboxing systems,
   1033  // actual values).
   1034  SafepointSlotEntry entry;
   1035 
   1036  while (safepoint.getGcSlot(&entry)) {
   1037    uintptr_t* ref = layout->slotRef(entry);
   1038    TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(ref),
   1039                            "ion-gc-slot");
   1040  }
   1041 
   1042  uintptr_t* spill = frame.spillBase();
   1043  LiveGeneralRegisterSet gcRegs = safepoint.gcSpills();
   1044  LiveGeneralRegisterSet valueRegs = safepoint.valueSpills();
   1045  LiveGeneralRegisterSet wasmAnyRefRegs = safepoint.wasmAnyRefSpills();
   1046  for (GeneralRegisterBackwardIterator iter(safepoint.allGprSpills());
   1047       iter.more(); ++iter) {
   1048    --spill;
   1049    if (gcRegs.has(*iter)) {
   1050      TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(spill),
   1051                              "ion-gc-spill");
   1052    } else if (valueRegs.has(*iter)) {
   1053      TraceRoot(trc, reinterpret_cast<Value*>(spill), "ion-value-spill");
   1054    } else if (wasmAnyRefRegs.has(*iter)) {
   1055      TraceRoot(trc, reinterpret_cast<wasm::AnyRef*>(spill),
   1056                "ion-anyref-spill");
   1057    }
   1058  }
   1059 
   1060 #ifdef JS_PUNBOX64
   1061  while (safepoint.getValueSlot(&entry)) {
   1062    Value* v = (Value*)layout->slotRef(entry);
   1063    TraceRoot(trc, v, "ion-gc-slot");
   1064  }
   1065 #else
   1066  LAllocation type, payload;
   1067  while (safepoint.getNunboxSlot(&type, &payload)) {
   1068    JSValueTag tag = JSValueTag(ReadAllocation(frame, &type));
   1069    uintptr_t rawPayload = ReadAllocation(frame, &payload);
   1070 
   1071    Value v = Value::fromTagAndPayload(tag, rawPayload);
   1072    TraceRoot(trc, &v, "ion-torn-value");
   1073 
   1074    if (v != Value::fromTagAndPayload(tag, rawPayload)) {
   1075      // GC moved the value, replace the stored payload.
   1076      rawPayload = v.toNunboxPayload();
   1077      WriteAllocation(frame, &payload, rawPayload);
   1078    }
   1079  }
   1080 #endif
   1081 
   1082  // Skip over slots/elements to get to wasm anyrefs
   1083  while (safepoint.getSlotsOrElementsSlot(&entry)) {
   1084  }
   1085 
   1086  while (safepoint.getWasmAnyRefSlot(&entry)) {
   1087    wasm::AnyRef* v = (wasm::AnyRef*)layout->slotRef(entry);
   1088    TraceRoot(trc, v, "ion-wasm-anyref-slot");
   1089  }
   1090 }
   1091 
   1092 static void TraceBailoutFrame(JSTracer* trc, const JSJitFrameIter& frame) {
   1093  JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
   1094 
   1095  layout->replaceCalleeToken(TraceCalleeToken(trc, layout->calleeToken()));
   1096 
   1097  // We have to trace the list of actual arguments, as only formal arguments
   1098  // are represented in the Snapshot.
   1099  TraceThisAndArguments(trc, frame, frame.jsFrame());
   1100 
   1101  // Under a bailout, do not have a Safepoint to only iterate over GC-things.
   1102  // Thus we use a SnapshotIterator to trace all the locations which would be
   1103  // used to reconstruct the Baseline frame.
   1104  //
   1105  // Note that at the time where this function is called, we have not yet
   1106  // started to reconstruct baseline frames.
   1107 
   1108  // The vector of recover instructions is already traced as part of the
   1109  // JitActivation.
   1110  SnapshotIterator snapIter(frame,
   1111                            frame.activation()->bailoutData()->machineState());
   1112 
   1113  // For each instruction, we read the allocations without evaluating the
   1114  // recover instruction, nor reconstructing the frame. We are only looking at
   1115  // tracing readable allocations.
   1116  while (true) {
   1117    while (snapIter.moreAllocations()) {
   1118      snapIter.traceAllocation(trc);
   1119    }
   1120 
   1121    if (!snapIter.moreInstructions()) {
   1122      break;
   1123    }
   1124    snapIter.nextInstruction();
   1125  }
   1126 }
   1127 
   1128 static void UpdateIonJSFrameForMinorGC(JSRuntime* rt,
   1129                                       const JSJitFrameIter& frame) {
   1130  // Minor GCs may move slots/elements allocated in the nursery. Update
   1131  // any slots/elements pointers stored in this frame.
   1132 
   1133  JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
   1134 
   1135  IonScript* ionScript = nullptr;
   1136  if (frame.checkInvalidation(&ionScript)) {
   1137    // This frame has been invalidated, meaning that its IonScript is no
   1138    // longer reachable through the callee token (JSFunction/JSScript->ion
   1139    // is now nullptr or recompiled).
   1140  } else {
   1141    ionScript = frame.ionScriptFromCalleeToken();
   1142  }
   1143 
   1144  Nursery& nursery = rt->gc.nursery();
   1145 
   1146  const SafepointIndex* si =
   1147      ionScript->getSafepointIndex(frame.resumePCinCurrentFrame());
   1148  SafepointReader safepoint(ionScript, si);
   1149 
   1150  LiveGeneralRegisterSet slotsRegs = safepoint.slotsOrElementsSpills();
   1151  uintptr_t* spill = frame.spillBase();
   1152  for (GeneralRegisterBackwardIterator iter(safepoint.allGprSpills());
   1153       iter.more(); ++iter) {
   1154    --spill;
   1155    if (slotsRegs.has(*iter)) {
   1156      nursery.forwardBufferPointer(spill);
   1157    }
   1158  }
   1159 
   1160  // Skip to the right place in the safepoint
   1161  SafepointSlotEntry entry;
   1162  while (safepoint.getGcSlot(&entry)) {
   1163  }
   1164 
   1165 #ifdef JS_PUNBOX64
   1166  while (safepoint.getValueSlot(&entry)) {
   1167  }
   1168 #else
   1169  LAllocation type, payload;
   1170  while (safepoint.getNunboxSlot(&type, &payload)) {
   1171  }
   1172 #endif
   1173 
   1174  while (safepoint.getSlotsOrElementsSlot(&entry)) {
   1175    nursery.forwardBufferPointer(layout->slotRef(entry));
   1176  }
   1177 }
   1178 
   1179 static void TraceBaselineStubFrame(JSTracer* trc, const JSJitFrameIter& frame) {
   1180  // Trace the ICStub pointer stored in the stub frame. This is necessary
   1181  // so that we don't destroy the stub code after unlinking the stub.
   1182 
   1183  MOZ_ASSERT(frame.type() == FrameType::BaselineStub);
   1184  BaselineStubFrameLayout* layout = (BaselineStubFrameLayout*)frame.fp();
   1185 
   1186  if (ICStub* stub = layout->maybeStubPtr()) {
   1187    if (stub->isFallback()) {
   1188      // Fallback stubs use runtime-wide trampoline code we don't need to trace.
   1189      MOZ_ASSERT(stub->usesTrampolineCode());
   1190    } else {
   1191      MOZ_ASSERT(stub->toCacheIRStub()->makesGCCalls());
   1192      stub->toCacheIRStub()->trace(trc);
   1193 
   1194 #ifndef ENABLE_PORTABLE_BASELINE_INTERP
   1195      for (int i = 0; i < stub->jitCode()->localTracingSlots(); ++i) {
   1196        TraceRoot(trc, layout->locallyTracedValuePtr(i),
   1197                  "baseline-local-tracing-slot");
   1198      }
   1199 #endif
   1200    }
   1201  }
   1202 }
   1203 
   1204 static void TraceWeakBaselineStubFrame(JSTracer* trc,
   1205                                       const JSJitFrameIter& frame) {
   1206  MOZ_ASSERT(frame.type() == FrameType::BaselineStub);
   1207  BaselineStubFrameLayout* layout = (BaselineStubFrameLayout*)frame.fp();
   1208 
   1209  if (ICStub* stub = layout->maybeStubPtr()) {
   1210    if (!stub->isFallback()) {
   1211      MOZ_ASSERT(stub->toCacheIRStub()->makesGCCalls());
   1212      stub->toCacheIRStub()->traceWeak(trc);
   1213    }
   1214  }
   1215 }
   1216 
   1217 static void TraceIonICCallFrame(JSTracer* trc, const JSJitFrameIter& frame) {
   1218  MOZ_ASSERT(frame.type() == FrameType::IonICCall);
   1219  IonICCallFrameLayout* layout = (IonICCallFrameLayout*)frame.fp();
   1220  TraceRoot(trc, layout->stubCode(), "ion-ic-call-code");
   1221 
   1222  for (int i = 0; i < (*layout->stubCode())->localTracingSlots(); ++i) {
   1223    TraceRoot(trc, layout->locallyTracedValuePtr(i),
   1224              "ion-ic-local-tracing-slot");
   1225  }
   1226 }
   1227 
   1228 #if defined(JS_CODEGEN_ARM64)
   1229 uint8_t* alignDoubleSpill(uint8_t* pointer) {
   1230  uintptr_t address = reinterpret_cast<uintptr_t>(pointer);
   1231  address &= ~(uintptr_t(ABIStackAlignment) - 1);
   1232  return reinterpret_cast<uint8_t*>(address);
   1233 }
   1234 #endif
   1235 
   1236 static void TraceJitExitFrame(JSTracer* trc, const JSJitFrameIter& frame) {
   1237  ExitFooterFrame* footer = frame.exitFrame()->footer();
   1238 
   1239  // This corresponds to the case where we have build a fake exit frame which
   1240  // handles the case of a native function call. We need to trace the argument
   1241  // vector of the function call, and also new.target if it was a constructing
   1242  // call.
   1243  if (frame.isExitFrameLayout<NativeExitFrameLayout>()) {
   1244    NativeExitFrameLayout* native =
   1245        frame.exitFrame()->as<NativeExitFrameLayout>();
   1246    size_t len = native->argc() + 2;
   1247    Value* vp = native->vp();
   1248    TraceRootRange(trc, len, vp, "ion-native-args");
   1249    if (frame.isExitFrameLayout<ConstructNativeExitFrameLayout>()) {
   1250      TraceRoot(trc, vp + len, "ion-native-new-target");
   1251    }
   1252    return;
   1253  }
   1254 
   1255  if (frame.isExitFrameLayout<IonOOLNativeExitFrameLayout>()) {
   1256    IonOOLNativeExitFrameLayout* oolnative =
   1257        frame.exitFrame()->as<IonOOLNativeExitFrameLayout>();
   1258    TraceRoot(trc, oolnative->stubCode(), "ion-ool-native-code");
   1259    TraceRoot(trc, oolnative->vp(), "iol-ool-native-vp");
   1260    size_t len = oolnative->argc() + 1;
   1261    TraceRootRange(trc, len, oolnative->thisp(), "ion-ool-native-thisargs");
   1262    return;
   1263  }
   1264 
   1265  if (frame.isExitFrameLayout<IonOOLProxyExitFrameLayout>()) {
   1266    IonOOLProxyExitFrameLayout* oolproxy =
   1267        frame.exitFrame()->as<IonOOLProxyExitFrameLayout>();
   1268    TraceRoot(trc, oolproxy->stubCode(), "ion-ool-proxy-code");
   1269    TraceRoot(trc, oolproxy->vp(), "ion-ool-proxy-vp");
   1270    TraceRoot(trc, oolproxy->id(), "ion-ool-proxy-id");
   1271    TraceRoot(trc, oolproxy->proxy(), "ion-ool-proxy-proxy");
   1272    return;
   1273  }
   1274 
   1275  if (frame.isExitFrameLayout<IonDOMExitFrameLayout>()) {
   1276    IonDOMExitFrameLayout* dom = frame.exitFrame()->as<IonDOMExitFrameLayout>();
   1277    TraceRoot(trc, dom->thisObjAddress(), "ion-dom-args");
   1278    if (dom->isMethodFrame()) {
   1279      IonDOMMethodExitFrameLayout* method =
   1280          reinterpret_cast<IonDOMMethodExitFrameLayout*>(dom);
   1281      size_t len = method->argc() + 2;
   1282      Value* vp = method->vp();
   1283      TraceRootRange(trc, len, vp, "ion-dom-args");
   1284    } else {
   1285      TraceRoot(trc, dom->vp(), "ion-dom-args");
   1286    }
   1287    return;
   1288  }
   1289 
   1290  if (frame.isExitFrameLayout<CalledFromJitExitFrameLayout>()) {
   1291    auto* layout = frame.exitFrame()->as<CalledFromJitExitFrameLayout>();
   1292    JitFrameLayout* jsLayout = layout->jsFrame();
   1293    jsLayout->replaceCalleeToken(
   1294        TraceCalleeToken(trc, jsLayout->calleeToken()));
   1295    TraceThisAndArguments(trc, frame, jsLayout);
   1296    return;
   1297  }
   1298 
   1299  if (frame.isExitFrameLayout<DirectWasmJitCallFrameLayout>()) {
   1300    // Nothing needs to be traced here at the moment -- the arguments to the
   1301    // callee are traced by the callee, and the inlined caller does not push
   1302    // anything else.
   1303    return;
   1304  }
   1305 
   1306  if (frame.isBareExit() || frame.isUnwoundJitExit()) {
   1307    // Nothing to trace. Fake exit frame pushed for VM functions with
   1308    // nothing to trace on the stack or unwound JitFrameLayout.
   1309    return;
   1310  }
   1311 
   1312  MOZ_ASSERT(frame.exitFrame()->isWrapperExit());
   1313 
   1314  const VMFunctionData& f = GetVMFunction(footer->functionId());
   1315 
   1316  // Trace arguments of the VM wrapper.
   1317  uint8_t* argBase = frame.exitFrame()->argBase();
   1318  for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
   1319    switch (f.argRootType(explicitArg)) {
   1320      case VMFunctionData::RootNone:
   1321        break;
   1322      case VMFunctionData::RootObject: {
   1323        // Sometimes we can bake in HandleObjects to nullptr.
   1324        JSObject** pobj = reinterpret_cast<JSObject**>(argBase);
   1325        if (*pobj) {
   1326          TraceRoot(trc, pobj, "ion-vm-args");
   1327        }
   1328        break;
   1329      }
   1330      case VMFunctionData::RootString:
   1331        TraceRoot(trc, reinterpret_cast<JSString**>(argBase), "ion-vm-args");
   1332        break;
   1333      case VMFunctionData::RootValue:
   1334        TraceRoot(trc, reinterpret_cast<Value*>(argBase), "ion-vm-args");
   1335        break;
   1336      case VMFunctionData::RootId:
   1337        TraceRoot(trc, reinterpret_cast<jsid*>(argBase), "ion-vm-args");
   1338        break;
   1339      case VMFunctionData::RootCell:
   1340        TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(argBase),
   1341                                "ion-vm-args");
   1342        break;
   1343      case VMFunctionData::RootBigInt:
   1344        TraceRoot(trc, reinterpret_cast<JS::BigInt**>(argBase), "ion-vm-args");
   1345        break;
   1346    }
   1347 
   1348    switch (f.argProperties(explicitArg)) {
   1349      case VMFunctionData::WordByValue:
   1350      case VMFunctionData::WordByRef:
   1351        argBase += sizeof(void*);
   1352        break;
   1353      case VMFunctionData::DoubleByValue:
   1354      case VMFunctionData::DoubleByRef:
   1355        argBase += 2 * sizeof(void*);
   1356        break;
   1357    }
   1358  }
   1359 
   1360  if (f.outParam == Type_Handle) {
   1361    switch (f.outParamRootType) {
   1362      case VMFunctionData::RootNone:
   1363        MOZ_CRASH("Handle outparam must have root type");
   1364      case VMFunctionData::RootObject:
   1365        TraceRoot(trc, footer->outParam<JSObject*>(), "ion-vm-out");
   1366        break;
   1367      case VMFunctionData::RootString:
   1368        TraceRoot(trc, footer->outParam<JSString*>(), "ion-vm-out");
   1369        break;
   1370      case VMFunctionData::RootValue:
   1371        TraceRoot(trc, footer->outParam<Value>(), "ion-vm-outvp");
   1372        break;
   1373      case VMFunctionData::RootId:
   1374        TraceRoot(trc, footer->outParam<jsid>(), "ion-vm-outvp");
   1375        break;
   1376      case VMFunctionData::RootCell:
   1377        TraceGenericPointerRoot(trc, footer->outParam<gc::Cell*>(),
   1378                                "ion-vm-out");
   1379        break;
   1380      case VMFunctionData::RootBigInt:
   1381        TraceRoot(trc, footer->outParam<JS::BigInt*>(), "ion-vm-out");
   1382        break;
   1383    }
   1384  }
   1385 }
   1386 
   1387 static void TraceBaselineInterpreterEntryFrame(JSTracer* trc,
   1388                                               const JSJitFrameIter& frame) {
   1389  // Baseline Interpreter entry code generated under --emit-interpreter-entry.
   1390  BaselineInterpreterEntryFrameLayout* layout =
   1391      (BaselineInterpreterEntryFrameLayout*)frame.fp();
   1392  layout->replaceCalleeToken(TraceCalleeToken(trc, layout->calleeToken()));
   1393  TraceThisAndArguments(trc, frame, layout);
   1394 }
   1395 
   1396 static void TraceTrampolineNativeFrame(JSTracer* trc,
   1397                                       const JSJitFrameIter& frame) {
   1398  auto* layout = (TrampolineNativeFrameLayout*)frame.fp();
   1399  layout->replaceCalleeToken(TraceCalleeToken(trc, layout->calleeToken()));
   1400  TraceThisAndArguments(trc, frame, layout);
   1401 
   1402  TrampolineNative native = TrampolineNativeForFrame(trc->runtime(), layout);
   1403  switch (native) {
   1404    case TrampolineNative::ArraySort:
   1405    case TrampolineNative::TypedArraySort:
   1406      layout->getFrameData<ArraySortData>()->trace(trc);
   1407      break;
   1408    case TrampolineNative::Count:
   1409      MOZ_CRASH("Invalid value");
   1410  }
   1411 }
   1412 
   1413 static void TraceJitActivation(JSTracer* trc, JitActivation* activation) {
   1414 #ifdef CHECK_OSIPOINT_REGISTERS
   1415  if (JitOptions.checkOsiPointRegisters) {
   1416    // GC can modify spilled registers, breaking our register checks.
   1417    // To handle this, we disable these checks for the current VM call
   1418    // when a GC happens.
   1419    activation->setCheckRegs(false);
   1420  }
   1421 #endif
   1422 
   1423  activation->trace(trc);
   1424 
   1425  // This is used for sanity checking continuity of the sequence of wasm stack
   1426  // maps as we unwind.  It has no functional purpose.
   1427  uintptr_t highestByteVisitedInPrevWasmFrame = 0;
   1428 
   1429  for (JitFrameIter frames(activation); !frames.done(); ++frames) {
   1430    if (frames.isJSJit()) {
   1431      const JSJitFrameIter& jitFrame = frames.asJSJit();
   1432      switch (jitFrame.type()) {
   1433        case FrameType::Exit:
   1434          TraceJitExitFrame(trc, jitFrame);
   1435          break;
   1436        case FrameType::BaselineJS:
   1437          jitFrame.baselineFrame()->trace(trc, jitFrame);
   1438          break;
   1439        case FrameType::IonJS:
   1440          TraceIonJSFrame(trc, jitFrame);
   1441          break;
   1442        case FrameType::BaselineStub:
   1443          TraceBaselineStubFrame(trc, jitFrame);
   1444          break;
   1445        case FrameType::Bailout:
   1446          TraceBailoutFrame(trc, jitFrame);
   1447          break;
   1448        case FrameType::BaselineInterpreterEntry:
   1449          TraceBaselineInterpreterEntryFrame(trc, jitFrame);
   1450          break;
   1451        case FrameType::TrampolineNative:
   1452          TraceTrampolineNativeFrame(trc, jitFrame);
   1453          break;
   1454        case FrameType::IonICCall:
   1455          TraceIonICCallFrame(trc, jitFrame);
   1456          break;
   1457        case FrameType::WasmToJSJit:
   1458          // Ignore: this is a special marker used to let the
   1459          // JitFrameIter know the frame above is a wasm frame, handled
   1460          // in the next iteration.
   1461          break;
   1462        default:
   1463          MOZ_CRASH("unexpected frame type");
   1464      }
   1465      highestByteVisitedInPrevWasmFrame = 0; /* "unknown" */
   1466    } else {
   1467      gc::AssertRootMarkingPhase(trc);
   1468      MOZ_ASSERT(frames.isWasm());
   1469      uint8_t* nextPC = frames.resumePCinCurrentFrame();
   1470      MOZ_ASSERT(nextPC != 0);
   1471      wasm::WasmFrameIter& wasmFrameIter = frames.asWasm();
   1472 #ifdef ENABLE_WASM_JSPI
   1473      if (wasmFrameIter.currentFrameStackSwitched()) {
   1474        highestByteVisitedInPrevWasmFrame = 0;
   1475      }
   1476 #endif
   1477      wasm::Instance* instance = wasmFrameIter.instance();
   1478      wasm::TraceInstanceEdge(trc, instance, "WasmFrameIter instance");
   1479      highestByteVisitedInPrevWasmFrame = instance->traceFrame(
   1480          trc, wasmFrameIter, nextPC, highestByteVisitedInPrevWasmFrame);
   1481    }
   1482  }
   1483 }
   1484 
   1485 void TraceJitActivations(JSContext* cx, JSTracer* trc) {
   1486  for (JitActivationIterator activations(cx); !activations.done();
   1487       ++activations) {
   1488    TraceJitActivation(trc, activations->asJit());
   1489  }
   1490 #ifdef ENABLE_WASM_JSPI
   1491  cx->wasm().traceRoots(trc);
   1492 #endif
   1493 }
   1494 
   1495 void TraceWeakJitActivationsInSweepingZones(JSContext* cx, JSTracer* trc) {
   1496  for (JitActivationIterator activation(cx); !activation.done(); ++activation) {
   1497    if (activation->compartment()->zone()->isGCSweeping()) {
   1498      for (JitFrameIter frame(activation->asJit()); !frame.done(); ++frame) {
   1499        if (frame.isJSJit()) {
   1500          const JSJitFrameIter& jitFrame = frame.asJSJit();
   1501          if (jitFrame.type() == FrameType::BaselineStub) {
   1502            TraceWeakBaselineStubFrame(trc, jitFrame);
   1503          }
   1504        }
   1505      }
   1506    }
   1507  }
   1508 }
   1509 
   1510 void UpdateJitActivationsForMinorGC(JSRuntime* rt) {
   1511  MOZ_ASSERT(JS::RuntimeHeapIsMinorCollecting());
   1512  Nursery& nursery = rt->gc.nursery();
   1513  JSContext* cx = rt->mainContextFromOwnThread();
   1514  for (JitActivationIterator activations(cx); !activations.done();
   1515       ++activations) {
   1516    for (JitFrameIter iter(activations->asJit()); !iter.done(); ++iter) {
   1517      if (iter.isJSJit()) {
   1518        const JSJitFrameIter& jitFrame = iter.asJSJit();
   1519        if (jitFrame.type() == FrameType::IonJS) {
   1520          UpdateIonJSFrameForMinorGC(rt, jitFrame);
   1521        }
   1522      } else if (iter.isWasm()) {
   1523        const wasm::WasmFrameIter& frame = iter.asWasm();
   1524        frame.instance()->updateFrameForMovingGC(
   1525            frame, frame.resumePCinCurrentFrame(), nursery);
   1526      }
   1527    }
   1528  }
   1529 }
   1530 
   1531 void UpdateJitActivationsForCompactingGC(JSRuntime* rt) {
   1532  MOZ_ASSERT(JS::RuntimeHeapIsMajorCollecting());
   1533  Nursery& nursery = rt->gc.nursery();
   1534  JSContext* cx = rt->mainContextFromOwnThread();
   1535  for (JitActivationIterator activations(cx); !activations.done();
   1536       ++activations) {
   1537    for (JitFrameIter iter(activations->asJit()); !iter.done(); ++iter) {
   1538      if (iter.isWasm()) {
   1539        const wasm::WasmFrameIter& frame = iter.asWasm();
   1540        frame.instance()->updateFrameForMovingGC(
   1541            frame, frame.resumePCinCurrentFrame(), nursery);
   1542      }
   1543    }
   1544  }
   1545 }
   1546 
   1547 JSScript* GetTopJitJSScript(JSContext* cx) {
   1548  JSJitFrameIter frame(cx->activation()->asJit());
   1549  MOZ_ASSERT(frame.type() == FrameType::Exit);
   1550  ++frame;
   1551 
   1552  if (frame.isBaselineStub()) {
   1553    ++frame;
   1554    MOZ_ASSERT(frame.isBaselineJS());
   1555  }
   1556 
   1557  MOZ_ASSERT(frame.isScripted());
   1558  return frame.script();
   1559 }
   1560 
   1561 RInstructionResults::RInstructionResults(JitFrameLayout* fp)
   1562    : results_(nullptr), fp_(fp), initialized_(false) {}
   1563 
   1564 RInstructionResults::RInstructionResults(RInstructionResults&& src)
   1565    : results_(std::move(src.results_)),
   1566      fp_(src.fp_),
   1567      initialized_(src.initialized_) {
   1568  src.initialized_ = false;
   1569 }
   1570 
   1571 RInstructionResults& RInstructionResults::operator=(RInstructionResults&& rhs) {
   1572  MOZ_ASSERT(&rhs != this, "self-moves are prohibited");
   1573  this->~RInstructionResults();
   1574  new (this) RInstructionResults(std::move(rhs));
   1575  return *this;
   1576 }
   1577 
   1578 RInstructionResults::~RInstructionResults() {
   1579  // results_ is freed by the UniquePtr.
   1580 }
   1581 
   1582 bool RInstructionResults::init(JSContext* cx, uint32_t numResults) {
   1583  if (numResults) {
   1584    results_ = cx->make_unique<Values>();
   1585    if (!results_) {
   1586      return false;
   1587    }
   1588    if (!results_->growBy(numResults)) {
   1589      ReportOutOfMemory(cx);
   1590      return false;
   1591    }
   1592 
   1593    Value guard = MagicValue(JS_ION_BAILOUT);
   1594    for (size_t i = 0; i < numResults; i++) {
   1595      (*results_)[i].init(guard);
   1596    }
   1597  }
   1598 
   1599  initialized_ = true;
   1600  return true;
   1601 }
   1602 
   1603 bool RInstructionResults::isInitialized() const { return initialized_; }
   1604 
   1605 size_t RInstructionResults::length() const { return results_->length(); }
   1606 
   1607 JitFrameLayout* RInstructionResults::frame() const {
   1608  MOZ_ASSERT(fp_);
   1609  return fp_;
   1610 }
   1611 
   1612 HeapPtr<Value>& RInstructionResults::operator[](size_t index) {
   1613  return (*results_)[index];
   1614 }
   1615 
   1616 void RInstructionResults::trace(JSTracer* trc) {
   1617  // Note: The vector necessary exists, otherwise this object would not have
   1618  // been stored on the activation from where the trace function is called.
   1619  TraceRange(trc, results_->length(), results_->begin(), "ion-recover-results");
   1620 }
   1621 
   1622 SnapshotIterator::SnapshotIterator(const JSJitFrameIter& iter,
   1623                                   const MachineState* machineState)
   1624    : snapshot_(iter.ionScript()->snapshots(), iter.snapshotOffset(),
   1625                iter.ionScript()->snapshotsRVATableSize(),
   1626                iter.ionScript()->snapshotsListSize()),
   1627      recover_(snapshot_, iter.ionScript()->recovers(),
   1628               iter.ionScript()->recoversSize()),
   1629      fp_(iter.jsFrame()),
   1630      machine_(machineState),
   1631      ionScript_(iter.ionScript()),
   1632      instructionResults_(nullptr) {}
   1633 
   1634 SnapshotIterator::SnapshotIterator()
   1635    : snapshot_(nullptr, 0, 0, 0),
   1636      recover_(snapshot_, nullptr, 0),
   1637      fp_(nullptr),
   1638      machine_(nullptr),
   1639      ionScript_(nullptr),
   1640      instructionResults_(nullptr) {}
   1641 
   1642 uintptr_t SnapshotIterator::fromStack(int32_t offset) const {
   1643  return ReadFrameSlot(fp_, offset);
   1644 }
   1645 
   1646 static Value FromObjectPayload(uintptr_t payload) {
   1647  MOZ_ASSERT(payload != 0);
   1648  return ObjectValue(*reinterpret_cast<JSObject*>(payload));
   1649 }
   1650 
   1651 static Value FromStringPayload(uintptr_t payload) {
   1652  return StringValue(reinterpret_cast<JSString*>(payload));
   1653 }
   1654 
   1655 static Value FromSymbolPayload(uintptr_t payload) {
   1656  return SymbolValue(reinterpret_cast<JS::Symbol*>(payload));
   1657 }
   1658 
   1659 static Value FromBigIntPayload(uintptr_t payload) {
   1660  return BigIntValue(reinterpret_cast<JS::BigInt*>(payload));
   1661 }
   1662 
   1663 static Value FromTypedPayload(JSValueType type, uintptr_t payload) {
   1664  switch (type) {
   1665    case JSVAL_TYPE_INT32:
   1666      return Int32Value(payload);
   1667    case JSVAL_TYPE_BOOLEAN:
   1668      return BooleanValue(!!payload);
   1669    case JSVAL_TYPE_STRING:
   1670      return FromStringPayload(payload);
   1671    case JSVAL_TYPE_SYMBOL:
   1672      return FromSymbolPayload(payload);
   1673    case JSVAL_TYPE_BIGINT:
   1674      return FromBigIntPayload(payload);
   1675    case JSVAL_TYPE_OBJECT:
   1676      return FromObjectPayload(payload);
   1677    default:
   1678      MOZ_CRASH("unexpected type - needs payload");
   1679  }
   1680 }
   1681 
   1682 bool SnapshotIterator::allocationReadable(const RValueAllocation& alloc,
   1683                                          ReadMethod rm) {
   1684  // If we have to recover stores, and if we are not interested in the
   1685  // default value of the instruction, then we have to check if the recover
   1686  // instruction results are available.
   1687  if (alloc.needSideEffect() && rm != ReadMethod::AlwaysDefault) {
   1688    if (!hasInstructionResults()) {
   1689      return false;
   1690    }
   1691  }
   1692 
   1693  switch (alloc.mode()) {
   1694    case RValueAllocation::DOUBLE_REG:
   1695      return hasRegister(alloc.fpuReg());
   1696 
   1697    case RValueAllocation::TYPED_REG:
   1698      return hasRegister(alloc.reg2());
   1699 
   1700 #if defined(JS_NUNBOX32)
   1701    case RValueAllocation::UNTYPED_REG_REG:
   1702      return hasRegister(alloc.reg()) && hasRegister(alloc.reg2());
   1703    case RValueAllocation::UNTYPED_REG_STACK:
   1704      return hasRegister(alloc.reg()) && hasStack(alloc.stackOffset2());
   1705    case RValueAllocation::UNTYPED_STACK_REG:
   1706      return hasStack(alloc.stackOffset()) && hasRegister(alloc.reg2());
   1707    case RValueAllocation::UNTYPED_STACK_STACK:
   1708      return hasStack(alloc.stackOffset()) && hasStack(alloc.stackOffset2());
   1709 #elif defined(JS_PUNBOX64)
   1710    case RValueAllocation::UNTYPED_REG:
   1711      return hasRegister(alloc.reg());
   1712    case RValueAllocation::UNTYPED_STACK:
   1713      return hasStack(alloc.stackOffset());
   1714 #endif
   1715 
   1716    case RValueAllocation::RECOVER_INSTRUCTION:
   1717      return hasInstructionResult(alloc.index());
   1718    case RValueAllocation::RI_WITH_DEFAULT_CST:
   1719      return rm == ReadMethod::AlwaysDefault ||
   1720             hasInstructionResult(alloc.index());
   1721 
   1722    case RValueAllocation::INTPTR_REG:
   1723      return hasRegister(alloc.reg());
   1724    case RValueAllocation::INTPTR_STACK:
   1725    case RValueAllocation::INTPTR_INT32_STACK:
   1726      return hasStack(alloc.stackOffset());
   1727 
   1728 #if defined(JS_NUNBOX32)
   1729    case RValueAllocation::INT64_REG_REG:
   1730      return hasRegister(alloc.reg()) && hasRegister(alloc.reg2());
   1731    case RValueAllocation::INT64_REG_STACK:
   1732      return hasRegister(alloc.reg()) && hasStack(alloc.stackOffset2());
   1733    case RValueAllocation::INT64_STACK_REG:
   1734      return hasStack(alloc.stackOffset()) && hasRegister(alloc.reg2());
   1735    case RValueAllocation::INT64_STACK_STACK:
   1736      return hasStack(alloc.stackOffset()) && hasStack(alloc.stackOffset2());
   1737 #elif defined(JS_PUNBOX64)
   1738    case RValueAllocation::INT64_REG:
   1739      return hasRegister(alloc.reg());
   1740    case RValueAllocation::INT64_STACK:
   1741      return hasStack(alloc.stackOffset());
   1742 #endif
   1743 
   1744    default:
   1745      return true;
   1746  }
   1747 }
   1748 
   1749 Value SnapshotIterator::allocationValue(const RValueAllocation& alloc,
   1750                                        ReadMethod rm) {
   1751  switch (alloc.mode()) {
   1752    case RValueAllocation::CONSTANT:
   1753      return ionScript_->getConstant(alloc.index());
   1754 
   1755    case RValueAllocation::CST_UNDEFINED:
   1756      return UndefinedValue();
   1757 
   1758    case RValueAllocation::CST_NULL:
   1759      return NullValue();
   1760 
   1761    case RValueAllocation::DOUBLE_REG:
   1762      return DoubleValue(fromRegister<double>(alloc.fpuReg()));
   1763 
   1764    case RValueAllocation::FLOAT32_REG:
   1765      return Float32Value(fromRegister<float>(alloc.fpuReg()));
   1766 
   1767    case RValueAllocation::FLOAT32_STACK:
   1768      return Float32Value(ReadFrameFloat32Slot(fp_, alloc.stackOffset()));
   1769 
   1770    case RValueAllocation::TYPED_REG:
   1771      return FromTypedPayload(alloc.knownType(), fromRegister(alloc.reg2()));
   1772 
   1773    case RValueAllocation::TYPED_STACK: {
   1774      switch (alloc.knownType()) {
   1775        case JSVAL_TYPE_DOUBLE:
   1776          return DoubleValue(ReadFrameDoubleSlot(fp_, alloc.stackOffset2()));
   1777        case JSVAL_TYPE_INT32:
   1778          return Int32Value(ReadFrameInt32Slot(fp_, alloc.stackOffset2()));
   1779        case JSVAL_TYPE_BOOLEAN:
   1780          return BooleanValue(ReadFrameBooleanSlot(fp_, alloc.stackOffset2()));
   1781        case JSVAL_TYPE_STRING:
   1782          return FromStringPayload(fromStack(alloc.stackOffset2()));
   1783        case JSVAL_TYPE_SYMBOL:
   1784          return FromSymbolPayload(fromStack(alloc.stackOffset2()));
   1785        case JSVAL_TYPE_BIGINT:
   1786          return FromBigIntPayload(fromStack(alloc.stackOffset2()));
   1787        case JSVAL_TYPE_OBJECT:
   1788          return FromObjectPayload(fromStack(alloc.stackOffset2()));
   1789        default:
   1790          MOZ_CRASH("Unexpected type");
   1791      }
   1792    }
   1793 
   1794 #if defined(JS_NUNBOX32)
   1795    case RValueAllocation::UNTYPED_REG_REG: {
   1796      return Value::fromTagAndPayload(JSValueTag(fromRegister(alloc.reg())),
   1797                                      fromRegister(alloc.reg2()));
   1798    }
   1799 
   1800    case RValueAllocation::UNTYPED_REG_STACK: {
   1801      return Value::fromTagAndPayload(JSValueTag(fromRegister(alloc.reg())),
   1802                                      fromStack(alloc.stackOffset2()));
   1803    }
   1804 
   1805    case RValueAllocation::UNTYPED_STACK_REG: {
   1806      return Value::fromTagAndPayload(
   1807          JSValueTag(fromStack(alloc.stackOffset())),
   1808          fromRegister(alloc.reg2()));
   1809    }
   1810 
   1811    case RValueAllocation::UNTYPED_STACK_STACK: {
   1812      return Value::fromTagAndPayload(
   1813          JSValueTag(fromStack(alloc.stackOffset())),
   1814          fromStack(alloc.stackOffset2()));
   1815    }
   1816 #elif defined(JS_PUNBOX64)
   1817    case RValueAllocation::UNTYPED_REG: {
   1818      return Value::fromRawBits(fromRegister(alloc.reg()));
   1819    }
   1820 
   1821    case RValueAllocation::UNTYPED_STACK: {
   1822      return Value::fromRawBits(fromStack(alloc.stackOffset()));
   1823    }
   1824 #endif
   1825 
   1826    case RValueAllocation::RECOVER_INSTRUCTION:
   1827      return fromInstructionResult(alloc.index());
   1828 
   1829    case RValueAllocation::RI_WITH_DEFAULT_CST:
   1830      if (rm == ReadMethod::Normal && hasInstructionResult(alloc.index())) {
   1831        return fromInstructionResult(alloc.index());
   1832      }
   1833      MOZ_ASSERT(rm == ReadMethod::AlwaysDefault);
   1834      return ionScript_->getConstant(alloc.index2());
   1835 
   1836    case RValueAllocation::INTPTR_CST:
   1837    case RValueAllocation::INTPTR_REG:
   1838    case RValueAllocation::INTPTR_STACK:
   1839    case RValueAllocation::INTPTR_INT32_STACK:
   1840      MOZ_CRASH("Can't read IntPtr as Value");
   1841 
   1842    case RValueAllocation::INT64_CST:
   1843 #if defined(JS_NUNBOX32)
   1844    case RValueAllocation::INT64_REG_REG:
   1845    case RValueAllocation::INT64_REG_STACK:
   1846    case RValueAllocation::INT64_STACK_REG:
   1847    case RValueAllocation::INT64_STACK_STACK:
   1848 #elif defined(JS_PUNBOX64)
   1849    case RValueAllocation::INT64_REG:
   1850    case RValueAllocation::INT64_STACK:
   1851 #endif
   1852      MOZ_CRASH("Can't read Int64 as Value");
   1853 
   1854    default:
   1855      MOZ_CRASH("huh?");
   1856  }
   1857 }
   1858 
   1859 Value SnapshotIterator::maybeRead(const RValueAllocation& a,
   1860                                  MaybeReadFallback& fallback) {
   1861  if (allocationReadable(a)) {
   1862    return allocationValue(a);
   1863  }
   1864 
   1865  if (fallback.canRecoverResults()) {
   1866    // Code paths which are calling maybeRead are not always capable of
   1867    // returning an error code, as these code paths used to be infallible.
   1868    AutoEnterOOMUnsafeRegion oomUnsafe;
   1869    if (!initInstructionResults(fallback)) {
   1870      oomUnsafe.crash("js::jit::SnapshotIterator::maybeRead");
   1871    }
   1872 
   1873    if (allocationReadable(a)) {
   1874      return allocationValue(a);
   1875    }
   1876 
   1877    MOZ_ASSERT_UNREACHABLE("All allocations should be readable.");
   1878  }
   1879 
   1880  return UndefinedValue();
   1881 }
   1882 
   1883 bool SnapshotIterator::tryRead(Value* result) {
   1884  RValueAllocation a = readAllocation();
   1885  if (allocationReadable(a)) {
   1886    *result = allocationValue(a);
   1887    return true;
   1888  }
   1889  return false;
   1890 }
   1891 
   1892 bool SnapshotIterator::readMaybeUnpackedBigInt(JSContext* cx,
   1893                                               MutableHandle<Value> result) {
   1894  RValueAllocation alloc = readAllocation();
   1895  MOZ_ASSERT(allocationReadable(alloc));
   1896 
   1897  switch (alloc.mode()) {
   1898    case RValueAllocation::INT64_CST:
   1899 #if defined(JS_NUNBOX32)
   1900    case RValueAllocation::INT64_REG_REG:
   1901    case RValueAllocation::INT64_REG_STACK:
   1902    case RValueAllocation::INT64_STACK_REG:
   1903    case RValueAllocation::INT64_STACK_STACK:
   1904 #elif defined(JS_PUNBOX64)
   1905    case RValueAllocation::INT64_REG:
   1906    case RValueAllocation::INT64_STACK:
   1907 #endif
   1908    {
   1909      auto* bigInt = JS::BigInt::createFromInt64(cx, allocationInt64(alloc));
   1910      if (!bigInt) {
   1911        return false;
   1912      }
   1913      result.setBigInt(bigInt);
   1914      return true;
   1915    }
   1916    case RValueAllocation::INTPTR_CST:
   1917    case RValueAllocation::INTPTR_REG:
   1918    case RValueAllocation::INTPTR_STACK:
   1919    case RValueAllocation::INTPTR_INT32_STACK: {
   1920      auto* bigInt = JS::BigInt::createFromIntPtr(cx, allocationIntPtr(alloc));
   1921      if (!bigInt) {
   1922        return false;
   1923      }
   1924      result.setBigInt(bigInt);
   1925      return true;
   1926    }
   1927    default:
   1928      result.set(allocationValue(alloc));
   1929      return true;
   1930  }
   1931 }
   1932 
   1933 int64_t SnapshotIterator::allocationInt64(const RValueAllocation& alloc) {
   1934  MOZ_ASSERT(allocationReadable(alloc));
   1935 
   1936  auto fromParts = [](uint32_t hi, uint32_t lo) {
   1937    return static_cast<int64_t>((static_cast<uint64_t>(hi) << 32) | lo);
   1938  };
   1939 
   1940  switch (alloc.mode()) {
   1941    case RValueAllocation::INT64_CST: {
   1942      uint32_t lo = ionScript_->getConstant(alloc.index()).toInt32();
   1943      uint32_t hi = ionScript_->getConstant(alloc.index2()).toInt32();
   1944      return fromParts(hi, lo);
   1945    }
   1946 #if defined(JS_NUNBOX32)
   1947    case RValueAllocation::INT64_REG_REG: {
   1948      uintptr_t lo = fromRegister(alloc.reg());
   1949      uintptr_t hi = fromRegister(alloc.reg2());
   1950      return fromParts(hi, lo);
   1951    }
   1952    case RValueAllocation::INT64_REG_STACK: {
   1953      uintptr_t lo = fromRegister(alloc.reg());
   1954      uintptr_t hi = fromStack(alloc.stackOffset2());
   1955      return fromParts(hi, lo);
   1956    }
   1957    case RValueAllocation::INT64_STACK_REG: {
   1958      uintptr_t lo = fromStack(alloc.stackOffset());
   1959      uintptr_t hi = fromRegister(alloc.reg2());
   1960      return fromParts(hi, lo);
   1961    }
   1962    case RValueAllocation::INT64_STACK_STACK: {
   1963      uintptr_t lo = fromStack(alloc.stackOffset());
   1964      uintptr_t hi = fromStack(alloc.stackOffset2());
   1965      return fromParts(hi, lo);
   1966    }
   1967 #elif defined(JS_PUNBOX64)
   1968    case RValueAllocation::INT64_REG: {
   1969      return static_cast<int64_t>(fromRegister(alloc.reg()));
   1970    }
   1971    case RValueAllocation::INT64_STACK: {
   1972      return static_cast<int64_t>(fromStack(alloc.stackOffset()));
   1973    }
   1974 #endif
   1975    default:
   1976      break;
   1977  }
   1978  MOZ_CRASH("invalid int64 allocation");
   1979 }
   1980 
   1981 intptr_t SnapshotIterator::allocationIntPtr(const RValueAllocation& alloc) {
   1982  MOZ_ASSERT(allocationReadable(alloc));
   1983  switch (alloc.mode()) {
   1984    case RValueAllocation::INTPTR_CST: {
   1985 #if !defined(JS_64BIT)
   1986      int32_t cst = ionScript_->getConstant(alloc.index()).toInt32();
   1987      return static_cast<intptr_t>(cst);
   1988 #else
   1989      uint32_t lo = ionScript_->getConstant(alloc.index()).toInt32();
   1990      uint32_t hi = ionScript_->getConstant(alloc.index2()).toInt32();
   1991      return static_cast<intptr_t>((static_cast<uint64_t>(hi) << 32) | lo);
   1992 #endif
   1993    }
   1994    case RValueAllocation::INTPTR_REG:
   1995      return static_cast<intptr_t>(fromRegister(alloc.reg()));
   1996    case RValueAllocation::INTPTR_STACK:
   1997      return static_cast<intptr_t>(fromStack(alloc.stackOffset()));
   1998    case RValueAllocation::INTPTR_INT32_STACK:
   1999      return static_cast<intptr_t>(
   2000          ReadFrameInt32Slot(fp_, alloc.stackOffset()));
   2001    default:
   2002      break;
   2003  }
   2004  MOZ_CRASH("invalid intptr allocation");
   2005 }
   2006 
   2007 JS::BigInt* SnapshotIterator::readBigInt(JSContext* cx) {
   2008  RValueAllocation alloc = readAllocation();
   2009  switch (alloc.mode()) {
   2010    case RValueAllocation::INTPTR_CST:
   2011    case RValueAllocation::INTPTR_REG:
   2012    case RValueAllocation::INTPTR_STACK:
   2013    case RValueAllocation::INTPTR_INT32_STACK:
   2014      return JS::BigInt::createFromIntPtr(cx, allocationIntPtr(alloc));
   2015    default:
   2016      return allocationValue(alloc).toBigInt();
   2017  }
   2018 }
   2019 
   2020 void SnapshotIterator::writeAllocationValuePayload(
   2021    const RValueAllocation& alloc, const Value& v) {
   2022  MOZ_ASSERT(v.isGCThing());
   2023 
   2024  switch (alloc.mode()) {
   2025    case RValueAllocation::CONSTANT:
   2026      ionScript_->getConstant(alloc.index()) = v;
   2027      break;
   2028 
   2029    case RValueAllocation::CST_UNDEFINED:
   2030    case RValueAllocation::CST_NULL:
   2031    case RValueAllocation::DOUBLE_REG:
   2032    case RValueAllocation::FLOAT32_REG:
   2033    case RValueAllocation::FLOAT32_STACK:
   2034    case RValueAllocation::INTPTR_CST:
   2035    case RValueAllocation::INTPTR_REG:
   2036    case RValueAllocation::INTPTR_STACK:
   2037    case RValueAllocation::INTPTR_INT32_STACK:
   2038    case RValueAllocation::INT64_CST:
   2039 #if defined(JS_NUNBOX32)
   2040    case RValueAllocation::INT64_REG_REG:
   2041    case RValueAllocation::INT64_REG_STACK:
   2042    case RValueAllocation::INT64_STACK_REG:
   2043    case RValueAllocation::INT64_STACK_STACK:
   2044 #elif defined(JS_PUNBOX64)
   2045    case RValueAllocation::INT64_REG:
   2046    case RValueAllocation::INT64_STACK:
   2047 #endif
   2048      MOZ_CRASH("Not a GC thing: Unexpected write");
   2049      break;
   2050 
   2051    case RValueAllocation::TYPED_REG:
   2052      machine_->write(alloc.reg2(), uintptr_t(v.toGCThing()));
   2053      break;
   2054 
   2055    case RValueAllocation::TYPED_STACK:
   2056      switch (alloc.knownType()) {
   2057        default:
   2058          MOZ_CRASH("Not a GC thing: Unexpected write");
   2059          break;
   2060        case JSVAL_TYPE_STRING:
   2061        case JSVAL_TYPE_SYMBOL:
   2062        case JSVAL_TYPE_BIGINT:
   2063        case JSVAL_TYPE_OBJECT:
   2064          WriteFrameSlot(fp_, alloc.stackOffset2(), uintptr_t(v.toGCThing()));
   2065          break;
   2066      }
   2067      break;
   2068 
   2069 #if defined(JS_NUNBOX32)
   2070    case RValueAllocation::UNTYPED_REG_REG:
   2071    case RValueAllocation::UNTYPED_STACK_REG:
   2072      machine_->write(alloc.reg2(), uintptr_t(v.toGCThing()));
   2073      break;
   2074 
   2075    case RValueAllocation::UNTYPED_REG_STACK:
   2076    case RValueAllocation::UNTYPED_STACK_STACK:
   2077      WriteFrameSlot(fp_, alloc.stackOffset2(), uintptr_t(v.toGCThing()));
   2078      break;
   2079 #elif defined(JS_PUNBOX64)
   2080    case RValueAllocation::UNTYPED_REG:
   2081      machine_->write(alloc.reg(), v.asRawBits());
   2082      break;
   2083 
   2084    case RValueAllocation::UNTYPED_STACK:
   2085      WriteFrameSlot(fp_, alloc.stackOffset(), v.asRawBits());
   2086      break;
   2087 #endif
   2088 
   2089    case RValueAllocation::RECOVER_INSTRUCTION:
   2090      MOZ_CRASH("Recover instructions are handled by the JitActivation.");
   2091      break;
   2092 
   2093    case RValueAllocation::RI_WITH_DEFAULT_CST:
   2094      // Assume that we are always going to be writing on the default value
   2095      // while tracing.
   2096      ionScript_->getConstant(alloc.index2()) = v;
   2097      break;
   2098 
   2099    default:
   2100      MOZ_CRASH("huh?");
   2101  }
   2102 }
   2103 
   2104 void SnapshotIterator::traceAllocation(JSTracer* trc) {
   2105  RValueAllocation alloc = readAllocation();
   2106  if (!allocationReadable(alloc, ReadMethod::AlwaysDefault)) {
   2107    return;
   2108  }
   2109 
   2110  Value v = allocationValue(alloc, ReadMethod::AlwaysDefault);
   2111  if (!v.isGCThing()) {
   2112    return;
   2113  }
   2114 
   2115  Value copy = v;
   2116  TraceRoot(trc, &v, "ion-typed-reg");
   2117  if (v != copy) {
   2118    MOZ_ASSERT(SameType(v, copy));
   2119    writeAllocationValuePayload(alloc, v);
   2120  }
   2121 }
   2122 
   2123 const RResumePoint* SnapshotIterator::resumePoint() const {
   2124  return instruction()->toResumePoint();
   2125 }
   2126 
   2127 uint32_t SnapshotIterator::numAllocations() const {
   2128  return instruction()->numOperands();
   2129 }
   2130 
   2131 uint32_t SnapshotIterator::pcOffset() const {
   2132  return resumePoint()->pcOffset();
   2133 }
   2134 
   2135 ResumeMode SnapshotIterator::resumeMode() const {
   2136  return resumePoint()->mode();
   2137 }
   2138 
   2139 void SnapshotIterator::skipInstruction() {
   2140  MOZ_ASSERT(snapshot_.numAllocationsRead() == 0);
   2141  size_t numOperands = instruction()->numOperands();
   2142  for (size_t i = 0; i < numOperands; i++) {
   2143    skip();
   2144  }
   2145  nextInstruction();
   2146 }
   2147 
   2148 bool SnapshotIterator::initInstructionResults(MaybeReadFallback& fallback) {
   2149  MOZ_ASSERT(fallback.canRecoverResults());
   2150  JSContext* cx = fallback.maybeCx;
   2151 
   2152  // If there is only one resume point in the list of instructions, then there
   2153  // is no instruction to recover, and thus no need to register any results.
   2154  if (recover_.numInstructions() == 1) {
   2155    return true;
   2156  }
   2157 
   2158  JitFrameLayout* fp = fallback.frame->jsFrame();
   2159  RInstructionResults* results = fallback.activation->maybeIonFrameRecovery(fp);
   2160  if (!results) {
   2161    AutoRealm ar(cx, fallback.frame->script());
   2162 
   2163    // We are going to run recover instructions. To avoid problems where recover
   2164    // instructions are not idempotent (for example, if we allocate an object,
   2165    // object identity may be observable), we should not execute code in the
   2166    // Ion stack frame afterwards. To avoid doing so, we invalidate the script.
   2167    // This is not necessary for bailouts or other cases where we are leaving
   2168    // the frame anyway. We only need it for niche cases like debugger
   2169    // introspection or Function.arguments.
   2170    if (fallback.consequence == MaybeReadFallback::Fallback_Invalidate) {
   2171      ionScript_->invalidate(cx, fallback.frame->script(),
   2172                             /* resetUses = */ false,
   2173                             "Observe recovered instruction.");
   2174    }
   2175 
   2176    // Register the list of result on the activation.  We need to do that
   2177    // before we initialize the list such as if any recover instruction
   2178    // cause a GC, we can ensure that the results are properly traced by the
   2179    // activation.
   2180    RInstructionResults tmp(fallback.frame->jsFrame());
   2181    if (!fallback.activation->registerIonFrameRecovery(std::move(tmp))) {
   2182      return false;
   2183    }
   2184 
   2185    results = fallback.activation->maybeIonFrameRecovery(fp);
   2186 
   2187    // Start a new snapshot at the beginning of the JSJitFrameIter.  This
   2188    // SnapshotIterator is used for evaluating the content of all recover
   2189    // instructions.  The result is then saved on the JitActivation.
   2190    MachineState machine = fallback.frame->machineState();
   2191    SnapshotIterator s(*fallback.frame, &machine);
   2192    if (!s.computeInstructionResults(cx, results)) {
   2193      // If the evaluation failed because of OOMs, then we discard the
   2194      // current set of result that we collected so far.
   2195      fallback.activation->removeIonFrameRecovery(fp);
   2196      return false;
   2197    }
   2198  }
   2199 
   2200  MOZ_ASSERT(results->isInitialized());
   2201  MOZ_RELEASE_ASSERT(results->length() == recover_.numInstructions() - 1);
   2202  instructionResults_ = results;
   2203  return true;
   2204 }
   2205 
   2206 bool SnapshotIterator::computeInstructionResults(
   2207    JSContext* cx, RInstructionResults* results) const {
   2208  MOZ_ASSERT(!results->isInitialized());
   2209  MOZ_ASSERT(recover_.numInstructionsRead() == 1);
   2210 
   2211  // The last instruction will always be a resume point.
   2212  size_t numResults = recover_.numInstructions() - 1;
   2213  if (!results->isInitialized()) {
   2214    if (!results->init(cx, numResults)) {
   2215      return false;
   2216    }
   2217 
   2218    // No need to iterate over the only resume point.
   2219    if (!numResults) {
   2220      MOZ_ASSERT(results->isInitialized());
   2221      return true;
   2222    }
   2223 
   2224    // Avoid invoking the object metadata callback, which could try to walk the
   2225    // stack while bailing out.
   2226    gc::AutoSuppressGC suppressGC(cx);
   2227    js::AutoSuppressAllocationMetadataBuilder suppressMetadata(cx);
   2228 
   2229    // Fill with the results of recover instructions.
   2230    SnapshotIterator s(*this);
   2231    s.instructionResults_ = results;
   2232    while (s.moreInstructions()) {
   2233      // Skip resume point and only interpret recover instructions.
   2234      if (s.instruction()->isResumePoint()) {
   2235        s.skipInstruction();
   2236        continue;
   2237      }
   2238 
   2239      if (!s.instruction()->recover(cx, s)) {
   2240        return false;
   2241      }
   2242      s.nextInstruction();
   2243    }
   2244  }
   2245 
   2246  MOZ_ASSERT(results->isInitialized());
   2247  return true;
   2248 }
   2249 
   2250 void SnapshotIterator::storeInstructionResult(const Value& v) {
   2251  uint32_t currIns = recover_.numInstructionsRead() - 1;
   2252  MOZ_ASSERT((*instructionResults_)[currIns].isMagic(JS_ION_BAILOUT));
   2253  (*instructionResults_)[currIns] = v;
   2254 }
   2255 
   2256 Value SnapshotIterator::fromInstructionResult(uint32_t index) const {
   2257  MOZ_ASSERT(!(*instructionResults_)[index].isMagic(JS_ION_BAILOUT));
   2258  return (*instructionResults_)[index];
   2259 }
   2260 
   2261 void SnapshotIterator::settleOnFrame() {
   2262  // Check that the current instruction can still be use.
   2263  MOZ_ASSERT(snapshot_.numAllocationsRead() == 0);
   2264  while (!instruction()->isResumePoint()) {
   2265    skipInstruction();
   2266  }
   2267 }
   2268 
   2269 void SnapshotIterator::nextFrame() {
   2270  nextInstruction();
   2271  settleOnFrame();
   2272 }
   2273 
   2274 Value SnapshotIterator::maybeReadAllocByIndex(size_t index) {
   2275  while (index--) {
   2276    MOZ_ASSERT(moreAllocations());
   2277    skip();
   2278  }
   2279 
   2280  Value s;
   2281  {
   2282    // This MaybeReadFallback method cannot GC.
   2283    JS::AutoSuppressGCAnalysis nogc;
   2284    MaybeReadFallback fallback;
   2285    s = maybeRead(fallback);
   2286  }
   2287 
   2288  while (moreAllocations()) {
   2289    skip();
   2290  }
   2291 
   2292  return s;
   2293 }
   2294 
   2295 InlineFrameIterator::InlineFrameIterator(JSContext* cx,
   2296                                         const JSJitFrameIter* iter)
   2297    : calleeTemplate_(cx), script_(cx), pc_(nullptr), numActualArgs_(0) {
   2298  resetOn(iter);
   2299 }
   2300 
   2301 InlineFrameIterator::InlineFrameIterator(JSContext* cx,
   2302                                         const InlineFrameIterator* iter)
   2303    : frame_(iter ? iter->frame_ : nullptr),
   2304      framesRead_(0),
   2305      frameCount_(iter ? iter->frameCount_ : UINT32_MAX),
   2306      calleeTemplate_(cx),
   2307      script_(cx),
   2308      pc_(nullptr),
   2309      numActualArgs_(0) {
   2310  if (frame_) {
   2311    machine_ = iter->machine_;
   2312    start_ = SnapshotIterator(*frame_, &machine_);
   2313 
   2314    // findNextFrame will iterate to the next frame and init. everything.
   2315    // Therefore to settle on the same frame, we report one frame less readed.
   2316    framesRead_ = iter->framesRead_ - 1;
   2317    findNextFrame();
   2318  }
   2319 }
   2320 
   2321 void InlineFrameIterator::resetOn(const JSJitFrameIter* iter) {
   2322  frame_ = iter;
   2323  framesRead_ = 0;
   2324  frameCount_ = UINT32_MAX;
   2325 
   2326  if (iter) {
   2327    machine_ = iter->machineState();
   2328    start_ = SnapshotIterator(*iter, &machine_);
   2329    findNextFrame();
   2330  }
   2331 }
   2332 
   2333 void InlineFrameIterator::findNextFrame() {
   2334  MOZ_ASSERT(more());
   2335 
   2336  si_ = start_;
   2337 
   2338  // Read the initial frame out of the C stack.
   2339  calleeTemplate_ = frame_->maybeCallee();
   2340  calleeRVA_ = RValueAllocation();
   2341  script_ = frame_->script();
   2342  MOZ_ASSERT(script_->hasBaselineScript());
   2343 
   2344  // Settle on the outermost frame without evaluating any instructions before
   2345  // looking for a pc.
   2346  si_.settleOnFrame();
   2347 
   2348  pc_ = script_->offsetToPC(si_.pcOffset());
   2349  numActualArgs_ = 0xbadbad;
   2350 
   2351  // This unfortunately is O(n*m), because we must skip over outer frames
   2352  // before reading inner ones.
   2353 
   2354  // The first time (frameCount_ == UINT32_MAX) we do not know the number of
   2355  // frames that we are going to inspect.  So we are iterating until there is
   2356  // no more frames, to settle on the inner most frame and to count the number
   2357  // of frames.
   2358  size_t remaining = (frameCount_ != UINT32_MAX) ? frameNo() - 1 : SIZE_MAX;
   2359 
   2360  size_t i = 1;
   2361  for (; i <= remaining && si_.moreFrames(); i++) {
   2362    ResumeMode mode = si_.resumeMode();
   2363    MOZ_ASSERT(IsIonInlinableOp(JSOp(*pc_)));
   2364 
   2365    // Recover the number of actual arguments from the script.
   2366    if (IsInvokeOp(JSOp(*pc_))) {
   2367      MOZ_ASSERT(mode == ResumeMode::InlinedStandardCall ||
   2368                 mode == ResumeMode::InlinedFunCall);
   2369      numActualArgs_ = GET_ARGC(pc_);
   2370      if (mode == ResumeMode::InlinedFunCall && numActualArgs_ > 0) {
   2371        numActualArgs_--;
   2372      }
   2373    } else if (IsGetPropPC(pc_) || IsGetElemPC(pc_)) {
   2374      MOZ_ASSERT(mode == ResumeMode::InlinedAccessor);
   2375      numActualArgs_ = 0;
   2376    } else {
   2377      MOZ_RELEASE_ASSERT(IsSetPropPC(pc_));
   2378      MOZ_ASSERT(mode == ResumeMode::InlinedAccessor);
   2379      numActualArgs_ = 1;
   2380    }
   2381 
   2382    // Skip over non-argument slots, as well as |this|.
   2383    bool skipNewTarget = IsConstructPC(pc_);
   2384    unsigned skipCount =
   2385        (si_.numAllocations() - 1) - numActualArgs_ - 1 - skipNewTarget;
   2386    for (unsigned j = 0; j < skipCount; j++) {
   2387      si_.skip();
   2388    }
   2389 
   2390    // This value should correspond to the function which is being inlined.
   2391    // The value must be readable to iterate over the inline frame. Most of
   2392    // the time, these functions are stored as JSFunction constants,
   2393    // register which are holding the JSFunction pointer, or recover
   2394    // instruction with Default value.
   2395    Value funval = si_.readWithDefault(&calleeRVA_);
   2396 
   2397    // Skip extra value allocations.
   2398    while (si_.moreAllocations()) {
   2399      si_.skip();
   2400    }
   2401 
   2402    si_.nextFrame();
   2403 
   2404    calleeTemplate_ = &funval.toObject().as<JSFunction>();
   2405    script_ = calleeTemplate_->nonLazyScript();
   2406    MOZ_ASSERT(script_->hasBaselineScript());
   2407 
   2408    pc_ = script_->offsetToPC(si_.pcOffset());
   2409  }
   2410 
   2411  // The first time we do not know the number of frames, we only settle on the
   2412  // last frame, and update the number of frames based on the number of
   2413  // iteration that we have done.
   2414  if (frameCount_ == UINT32_MAX) {
   2415    MOZ_ASSERT(!si_.moreFrames());
   2416    frameCount_ = i;
   2417  }
   2418 
   2419  framesRead_++;
   2420 }
   2421 
   2422 JSFunction* InlineFrameIterator::callee(MaybeReadFallback& fallback) const {
   2423  MOZ_ASSERT(isFunctionFrame());
   2424  if (calleeRVA_.mode() == RValueAllocation::INVALID ||
   2425      !fallback.canRecoverResults()) {
   2426    return calleeTemplate_;
   2427  }
   2428 
   2429  SnapshotIterator s(si_);
   2430  // :TODO: Handle allocation failures from recover instruction.
   2431  Value funval = s.maybeRead(calleeRVA_, fallback);
   2432  return &funval.toObject().as<JSFunction>();
   2433 }
   2434 
   2435 JSObject* InlineFrameIterator::computeEnvironmentChain(
   2436    const Value& envChainValue, MaybeReadFallback& fallback,
   2437    bool* hasInitialEnv) const {
   2438  if (envChainValue.isObject()) {
   2439    if (hasInitialEnv) {
   2440      if (fallback.canRecoverResults()) {
   2441        RootedObject obj(fallback.maybeCx, &envChainValue.toObject());
   2442        *hasInitialEnv = isFunctionFrame() &&
   2443                         callee(fallback)->needsFunctionEnvironmentObjects();
   2444        return obj;
   2445      }
   2446      JS::AutoSuppressGCAnalysis
   2447          nogc;  // If we cannot recover then we cannot GC.
   2448      *hasInitialEnv = isFunctionFrame() &&
   2449                       callee(fallback)->needsFunctionEnvironmentObjects();
   2450    }
   2451 
   2452    return &envChainValue.toObject();
   2453  }
   2454 
   2455  // Note we can hit this case even for functions with a CallObject, in case
   2456  // we are walking the frame during the function prologue, before the env
   2457  // chain has been initialized.
   2458  if (isFunctionFrame()) {
   2459    return callee(fallback)->environment();
   2460  }
   2461 
   2462  if (isModuleFrame()) {
   2463    return script()->module()->environment();
   2464  }
   2465 
   2466  // Ion does not handle non-function scripts that have anything other than
   2467  // the global on their env chain.
   2468  MOZ_ASSERT(!script()->isForEval());
   2469  MOZ_ASSERT(!script()->hasNonSyntacticScope());
   2470  return &script()->global().lexicalEnvironment();
   2471 }
   2472 
   2473 bool InlineFrameIterator::isFunctionFrame() const { return !!calleeTemplate_; }
   2474 
   2475 bool InlineFrameIterator::isModuleFrame() const { return script()->isModule(); }
   2476 
   2477 uintptr_t* MachineState::SafepointState::addressOfRegister(Register reg) const {
   2478  size_t offset = regs.offsetOfPushedRegister(reg);
   2479 
   2480  MOZ_ASSERT((offset % sizeof(uintptr_t)) == 0);
   2481  uint32_t index = offset / sizeof(uintptr_t);
   2482 
   2483 #ifdef DEBUG
   2484  // Assert correctness with a slower algorithm in debug builds.
   2485  uint32_t expectedIndex = 0;
   2486  bool found = false;
   2487  for (GeneralRegisterBackwardIterator iter(regs); iter.more(); ++iter) {
   2488    expectedIndex++;
   2489    if (*iter == reg) {
   2490      found = true;
   2491      break;
   2492    }
   2493  }
   2494  MOZ_ASSERT(found);
   2495  MOZ_ASSERT(expectedIndex == index);
   2496 #endif
   2497 
   2498  return spillBase - index;
   2499 }
   2500 
   2501 char* MachineState::SafepointState::addressOfRegister(FloatRegister reg) const {
   2502  // Note: this could be optimized similar to the GPR case above by implementing
   2503  // offsetOfPushedRegister for FloatRegisterSet. Float register sets are
   2504  // complicated though and this case is very uncommon: it's only reachable for
   2505  // exception bailouts with live float registers.
   2506  MOZ_ASSERT(!reg.isSimd128());
   2507  char* ptr = floatSpillBase;
   2508  for (FloatRegisterBackwardIterator iter(floatRegs); iter.more(); ++iter) {
   2509    ptr -= (*iter).size();
   2510    for (uint32_t a = 0; a < (*iter).numAlignedAliased(); a++) {
   2511      // Only say that registers that actually start here start here.
   2512      // e.g. d0 should not start at s1, only at s0.
   2513      FloatRegister ftmp = (*iter).alignedAliased(a);
   2514      if (ftmp == reg) {
   2515        return ptr;
   2516      }
   2517    }
   2518  }
   2519  MOZ_CRASH("Invalid register");
   2520 }
   2521 
   2522 uintptr_t MachineState::read(Register reg) const {
   2523  if (state_.is<BailoutState>()) {
   2524    return state_.as<BailoutState>().regs[reg.code()].r;
   2525  }
   2526  if (state_.is<SafepointState>()) {
   2527    uintptr_t* addr = state_.as<SafepointState>().addressOfRegister(reg);
   2528    return *addr;
   2529  }
   2530  MOZ_CRASH("Invalid state");
   2531 }
   2532 
   2533 template <typename T>
   2534 T MachineState::read(FloatRegister reg) const {
   2535  MOZ_ASSERT(reg.size() == sizeof(T));
   2536 
   2537 #if !defined(JS_CODEGEN_NONE) && !defined(JS_CODEGEN_WASM32)
   2538  if (state_.is<BailoutState>()) {
   2539    uint32_t offset = reg.getRegisterDumpOffsetInBytes();
   2540    MOZ_ASSERT((offset % sizeof(T)) == 0);
   2541    MOZ_ASSERT((offset + sizeof(T)) <= sizeof(RegisterDump::FPUArray));
   2542 
   2543    const BailoutState& state = state_.as<BailoutState>();
   2544    char* addr = reinterpret_cast<char*>(state.floatRegs.begin()) + offset;
   2545    return *reinterpret_cast<T*>(addr);
   2546  }
   2547  if (state_.is<SafepointState>()) {
   2548    char* addr = state_.as<SafepointState>().addressOfRegister(reg);
   2549    return *reinterpret_cast<T*>(addr);
   2550  }
   2551 #endif
   2552  MOZ_CRASH("Invalid state");
   2553 }
   2554 
   2555 void MachineState::write(Register reg, uintptr_t value) const {
   2556  if (state_.is<SafepointState>()) {
   2557    uintptr_t* addr = state_.as<SafepointState>().addressOfRegister(reg);
   2558    *addr = value;
   2559    return;
   2560  }
   2561  MOZ_CRASH("Invalid state");
   2562 }
   2563 
   2564 bool InlineFrameIterator::isConstructing() const {
   2565  // Skip the current frame and look at the caller's.
   2566  if (more()) {
   2567    InlineFrameIterator parent(TlsContext.get(), this);
   2568    ++parent;
   2569 
   2570    // In the case of a JS frame, look up the pc from the snapshot.
   2571    JSOp parentOp = JSOp(*parent.pc());
   2572 
   2573    // Inlined Getters and Setters are never constructing.
   2574    if (IsIonInlinableGetterOrSetterOp(parentOp)) {
   2575      return false;
   2576    }
   2577 
   2578    MOZ_ASSERT(IsInvokeOp(parentOp) && !IsSpreadOp(parentOp));
   2579 
   2580    return IsConstructOp(parentOp);
   2581  }
   2582 
   2583  return frame_->isConstructing();
   2584 }
   2585 
   2586 void SnapshotIterator::warnUnreadableAllocation() {
   2587  fprintf(stderr,
   2588          "Warning! Tried to access unreadable value allocation (possible "
   2589          "f.arguments).\n");
   2590 }
   2591 
   2592 struct DumpOverflownOp {
   2593  const unsigned numFormals_;
   2594  unsigned i_ = 0;
   2595 
   2596  explicit DumpOverflownOp(unsigned numFormals) : numFormals_(numFormals) {}
   2597 
   2598  void operator()(const Value& v) {
   2599    if (i_ >= numFormals_) {
   2600      fprintf(stderr, "  actual (arg %u): ", i_);
   2601 #if defined(DEBUG) || defined(JS_JITSPEW)
   2602      DumpValue(v);
   2603 #else
   2604      fprintf(stderr, "?\n");
   2605 #endif
   2606    }
   2607    i_++;
   2608  }
   2609 };
   2610 
   2611 void InlineFrameIterator::dump() const {
   2612  MaybeReadFallback fallback;
   2613 
   2614  if (more()) {
   2615    fprintf(stderr, " JS frame (inlined)\n");
   2616  } else {
   2617    fprintf(stderr, " JS frame\n");
   2618  }
   2619 
   2620  bool isFunction = false;
   2621  if (isFunctionFrame()) {
   2622    isFunction = true;
   2623    fprintf(stderr, "  callee fun: ");
   2624 #if defined(DEBUG) || defined(JS_JITSPEW)
   2625    DumpObject(callee(fallback));
   2626 #else
   2627    fprintf(stderr, "?\n");
   2628 #endif
   2629  } else {
   2630    fprintf(stderr, "  global frame, no callee\n");
   2631  }
   2632 
   2633  fprintf(stderr, "  file %s line %u\n", script()->filename(),
   2634          script()->lineno());
   2635 
   2636  fprintf(stderr, "  script = %p, pc = %p\n", (void*)script(), pc());
   2637  fprintf(stderr, "  current op: %s\n", CodeName(JSOp(*pc())));
   2638 
   2639  if (!more()) {
   2640    numActualArgs();
   2641  }
   2642 
   2643  SnapshotIterator si = snapshotIterator();
   2644  fprintf(stderr, "  slots: %u\n", si.numAllocations() - 1);
   2645  for (unsigned i = 0; i < si.numAllocations() - 1; i++) {
   2646    if (isFunction) {
   2647      if (i == 0) {
   2648        fprintf(stderr, "  env chain: ");
   2649      } else if (i == 1) {
   2650        fprintf(stderr, "  this: ");
   2651      } else if (i - 2 < calleeTemplate()->nargs()) {
   2652        fprintf(stderr, "  formal (arg %u): ", i - 2);
   2653      } else {
   2654        if (i - 2 == calleeTemplate()->nargs() &&
   2655            numActualArgs() > calleeTemplate()->nargs()) {
   2656          DumpOverflownOp d(calleeTemplate()->nargs());
   2657          unaliasedForEachActual(TlsContext.get(), d, fallback);
   2658        }
   2659 
   2660        fprintf(stderr, "  slot %d: ", int(i - 2 - calleeTemplate()->nargs()));
   2661      }
   2662    } else
   2663      fprintf(stderr, "  slot %u: ", i);
   2664 #if defined(DEBUG) || defined(JS_JITSPEW)
   2665    DumpValue(si.maybeRead(fallback));
   2666 #else
   2667    fprintf(stderr, "?\n");
   2668 #endif
   2669  }
   2670 
   2671  fputc('\n', stderr);
   2672 }
   2673 
   2674 JitFrameLayout* InvalidationBailoutStack::fp() const {
   2675  return (JitFrameLayout*)(sp() + ionScript_->frameSize());
   2676 }
   2677 
   2678 void InvalidationBailoutStack::checkInvariants() const {
   2679 #ifdef DEBUG
   2680  JitFrameLayout* frame = fp();
   2681  CalleeToken token = frame->calleeToken();
   2682  MOZ_ASSERT(token);
   2683 
   2684  uint8_t* rawBase = ionScript()->method()->raw();
   2685  uint8_t* rawLimit = rawBase + ionScript()->method()->instructionsSize();
   2686  uint8_t* osiPoint = osiPointReturnAddress();
   2687  MOZ_ASSERT(rawBase <= osiPoint && osiPoint <= rawLimit);
   2688 #endif
   2689 }
   2690 
   2691 void AssertJitStackInvariants(JSContext* cx) {
   2692  for (JitActivationIterator activations(cx); !activations.done();
   2693       ++activations) {
   2694    JitFrameIter iter(activations->asJit());
   2695    if (iter.isJSJit()) {
   2696      JSJitFrameIter& frames = iter.asJSJit();
   2697      size_t prevFrameSize = 0;
   2698      size_t frameSize = 0;
   2699      bool isScriptedCallee = false;
   2700      for (; !frames.done(); ++frames) {
   2701        size_t calleeFp = reinterpret_cast<size_t>(frames.fp());
   2702        size_t callerFp = reinterpret_cast<size_t>(frames.prevFp());
   2703        MOZ_ASSERT(callerFp >= calleeFp);
   2704        prevFrameSize = frameSize;
   2705        frameSize = callerFp - calleeFp;
   2706 
   2707        if (frames.isScripted() &&
   2708            frames.prevType() == FrameType::BaselineInterpreterEntry) {
   2709          MOZ_RELEASE_ASSERT(
   2710              frameSize % JitStackAlignment == 0,
   2711              "The blinterp entry frame should keep the alignment");
   2712 
   2713          size_t expectedFrameSize =
   2714              sizeof(Value) *
   2715                  (frames.callee()->nargs() + 1 /* |this| argument */ +
   2716                   frames.isConstructing() /* new.target */) +
   2717              sizeof(JitFrameLayout);
   2718          MOZ_RELEASE_ASSERT(frameSize >= expectedFrameSize,
   2719                             "The frame is large enough to hold all arguments");
   2720          MOZ_RELEASE_ASSERT(expectedFrameSize + JitStackAlignment > frameSize,
   2721                             "The frame size is optimal");
   2722        }
   2723 
   2724        if (frames.isExitFrame()) {
   2725          // For the moment, we do not keep the JitStackAlignment
   2726          // alignment for exit frames.
   2727          frameSize -= ExitFrameLayout::Size();
   2728        }
   2729 
   2730        if (frames.isIonJS()) {
   2731          // Ideally, we should not have such requirement, but keep the
   2732          // alignment-delta as part of the Safepoint such that we can pad
   2733          // accordingly when making out-of-line calls.  In the mean time,
   2734          // let us have check-points where we can garantee that
   2735          // everything can properly be aligned before adding complexity.
   2736          MOZ_RELEASE_ASSERT(
   2737              frames.ionScript()->frameSize() % JitStackAlignment == 0,
   2738              "Ensure that if the Ion frame is aligned, then the spill base is "
   2739              "also aligned");
   2740 
   2741          if (isScriptedCallee) {
   2742            MOZ_RELEASE_ASSERT(prevFrameSize % JitStackAlignment == 0,
   2743                               "The ion frame should keep the alignment");
   2744          }
   2745        }
   2746 
   2747        // The stack is dynamically aligned by baseline stubs before calling
   2748        // any jitted code.
   2749        if (frames.prevType() == FrameType::BaselineStub && isScriptedCallee) {
   2750          MOZ_RELEASE_ASSERT(calleeFp % JitStackAlignment == 0,
   2751                             "The baseline stub restores the stack alignment");
   2752        }
   2753 
   2754        isScriptedCallee = frames.isScripted();
   2755      }
   2756 
   2757      MOZ_RELEASE_ASSERT(
   2758          JSJitFrameIter::isEntry(frames.type()),
   2759          "The first frame of a Jit activation should be an entry frame");
   2760      MOZ_RELEASE_ASSERT(
   2761          reinterpret_cast<size_t>(frames.fp()) % JitStackAlignment == 0,
   2762          "The entry frame should be properly aligned");
   2763    } else {
   2764      MOZ_ASSERT(iter.isWasm());
   2765      wasm::WasmFrameIter& frames = iter.asWasm();
   2766      while (!frames.done()) {
   2767        ++frames;
   2768      }
   2769    }
   2770  }
   2771 }
   2772 
   2773 }  // namespace jit
   2774 }  // namespace js