tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

WasmFrameIter.cpp (88089B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 *
      4 * Copyright 2014 Mozilla Foundation
      5 *
      6 * Licensed under the Apache License, Version 2.0 (the "License");
      7 * you may not use this file except in compliance with the License.
      8 * You may obtain a copy of the License at
      9 *
     10 *     http://www.apache.org/licenses/LICENSE-2.0
     11 *
     12 * Unless required by applicable law or agreed to in writing, software
     13 * distributed under the License is distributed on an "AS IS" BASIS,
     14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     15 * See the License for the specific language governing permissions and
     16 * limitations under the License.
     17 */
     18 
     19 #include "wasm/WasmFrameIter.h"
     20 
     21 #include "jit/JitFrames.h"
     22 #include "jit/JitRuntime.h"
     23 #include "jit/shared/IonAssemblerBuffer.h"  // jit::BufferOffset
     24 #include "js/ColumnNumber.h"  // JS::WasmFunctionIndex, LimitedColumnNumberOneOrigin, JS::TaggedColumnNumberOneOrigin, JS::TaggedColumnNumberOneOrigin
     25 #include "vm/JitActivation.h"  // js::jit::JitActivation
     26 #include "vm/JSAtomState.h"
     27 #include "vm/JSContext.h"
     28 #include "wasm/WasmBuiltinModuleGenerated.h"
     29 #include "wasm/WasmDebugFrame.h"
     30 #include "wasm/WasmInstance.h"
     31 #include "wasm/WasmInstanceData.h"
     32 #include "wasm/WasmPI.h"
     33 #include "wasm/WasmStubs.h"
     34 
     35 #include "jit/MacroAssembler-inl.h"
     36 #include "wasm/WasmInstance-inl.h"
     37 
     38 #ifdef XP_WIN
     39 // We only need the `windows.h` header, but this file can get unified built
     40 // with WasmSignalHandlers.cpp, which requires `winternal.h` to be included
     41 // before the `windows.h` header, and so we must include it here for that case.
     42 #  include <winternl.h>  // must include before util/WindowsWrapper.h's `#undef`s
     43 
     44 #  include "util/WindowsWrapper.h"
     45 #endif
     46 
     47 using namespace js;
     48 using namespace js::jit;
     49 using namespace js::wasm;
     50 
     51 using mozilla::DebugOnly;
     52 using mozilla::Maybe;
     53 
     54 static Instance* ExtractCallerInstanceFromFrameWithInstances(Frame* fp) {
     55  return *reinterpret_cast<Instance**>(
     56      reinterpret_cast<uint8_t*>(fp) +
     57      FrameWithInstances::callerInstanceOffset());
     58 }
     59 
     60 static const Instance* ExtractCalleeInstanceFromFrameWithInstances(
     61    const Frame* fp) {
     62  return *reinterpret_cast<Instance* const*>(
     63      reinterpret_cast<const uint8_t*>(fp) +
     64      FrameWithInstances::calleeInstanceOffset());
     65 }
     66 
     67 static uint32_t FuncIndexForLineOrBytecode(const Code& code,
     68                                           uint32_t lineOrBytecode,
     69                                           const CodeRange& codeRange) {
     70  // If this is asm.js, then this is a line number and we also will not be
     71  // doing any inlining. Report the physical func index.
     72  //
     73  // Or else if there is no bytecode offset in the call site, then this must be
     74  // something internal we've generated and no inlining should be involved.
     75  if (code.codeMeta().isAsmJS() ||
     76      lineOrBytecode == CallSite::NO_LINE_OR_BYTECODE) {
     77    // Fall back to the physical function index of the code range.
     78    return codeRange.funcIndex();
     79  }
     80  return code.codeTailMeta().findFuncIndex(lineOrBytecode);
     81 }
     82 
     83 /*****************************************************************************/
     84 // WasmFrameIter implementation
     85 
     86 WasmFrameIter::WasmFrameIter(JitActivation* activation, wasm::Frame* fp)
     87    : activation_(activation), fp_(fp ? fp : activation->wasmExitFP()) {
     88  MOZ_ASSERT(fp_);
     89  instance_ = GetNearestEffectiveInstance(fp_);
     90 
     91  // When the stack is captured during a trap (viz., to create the .stack
     92  // for an Error object), use the pc/bytecode information captured by the
     93  // signal handler in the runtime. Take care not to use this trap unwind
     94  // state for wasm frames in the middle of a JitActivation, i.e., wasm frames
     95  // that called into JIT frames before the trap.
     96 
     97  if (activation->isWasmTrapping() && fp_ == activation->wasmExitFP()) {
     98    const TrapData& trapData = activation->wasmTrapData();
     99    void* unwoundPC = trapData.unwoundPC;
    100 
    101    code_ = &instance_->code();
    102    MOZ_ASSERT(code_ == LookupCode(unwoundPC));
    103 
    104    const CodeRange* codeRange = code_->lookupFuncRange(unwoundPC);
    105    lineOrBytecode_ = trapData.trapSite.bytecodeOffset.offset();
    106    funcIndex_ =
    107        FuncIndexForLineOrBytecode(*code_, lineOrBytecode_, *codeRange);
    108    inlinedCallerOffsets_ = trapData.trapSite.inlinedCallerOffsetsSpan();
    109    failedUnwindSignatureMismatch_ = trapData.failedUnwindSignatureMismatch;
    110 
    111    // The debugEnabled() relies on valid value of resumePCinCurrentFrame_
    112    // to identify DebugFrame. Normally this field is updated at popFrame().
    113    // The only case when this can happend is during IndirectCallBadSig
    114    // trapping and stack unwinding. The top frame will never be at ReturnStub
    115    // callsite, except during IndirectCallBadSig unwinding.
    116    CallSite site;
    117    if (code_->lookupCallSite(unwoundPC, &site) &&
    118        site.kind() == CallSiteKind::ReturnStub) {
    119      MOZ_ASSERT(trapData.trap == Trap::IndirectCallBadSig);
    120      resumePCinCurrentFrame_ = (uint8_t*)unwoundPC;
    121    } else {
    122      resumePCinCurrentFrame_ = (uint8_t*)trapData.resumePC;
    123    }
    124 
    125    MOZ_ASSERT(!done());
    126    return;
    127  }
    128 
    129  // Otherwise, execution exits wasm code via an exit stub which sets exitFP
    130  // to the exit stub's frame. Thus, in this case, we want to start iteration
    131  // at the caller of the exit frame, whose Code, CodeRange and CallSite are
    132  // indicated by the returnAddress of the exit stub's frame. If the caller
    133  // was Ion, we can just skip the wasm frames.
    134 
    135  popFrame(/*isLeavingFrame=*/false);
    136  MOZ_ASSERT(!done() || unwoundCallerFP_);
    137 }
    138 
    139 WasmFrameIter::WasmFrameIter(FrameWithInstances* fp, void* returnAddress)
    140    : lineOrBytecode_(0),
    141      fp_(fp),
    142      instance_(fp->calleeInstance()),
    143      resumePCinCurrentFrame_((uint8_t*)returnAddress) {
    144  // Specialized implementation to avoid popFrame() interation.
    145  // It is expected that the iterator starts at a callsite that is in
    146  // the function body and has instance reference.
    147  const CodeRange* codeRange;
    148  code_ = LookupCode(returnAddress, &codeRange);
    149  MOZ_ASSERT(code_ && codeRange->kind() == CodeRange::Function);
    150 
    151  CallSite site;
    152  MOZ_ALWAYS_TRUE(code_->lookupCallSite(returnAddress, &site));
    153  MOZ_ASSERT(site.mightBeCrossInstance());
    154 
    155 #ifdef ENABLE_WASM_JSPI
    156  currentFrameStackSwitched_ = site.isStackSwitch();
    157 #endif
    158 
    159  MOZ_ASSERT(code_ == &instance_->code());
    160  lineOrBytecode_ = site.lineOrBytecode();
    161  funcIndex_ =
    162      FuncIndexForLineOrBytecode(*code_, site.lineOrBytecode(), *codeRange);
    163  inlinedCallerOffsets_ = site.inlinedCallerOffsetsSpan();
    164 
    165  MOZ_ASSERT(!done());
    166 }
    167 
    168 bool WasmFrameIter::done() const {
    169  MOZ_ASSERT(!!fp_ == !!code_);
    170  return !fp_;
    171 }
    172 
    173 void WasmFrameIter::operator++() {
    174  MOZ_ASSERT(!done());
    175  popFrame(/*isLeavingFrame=*/isLeavingFrames_);
    176 }
    177 
    178 static inline void AssertJitExitFrame(const void* fp,
    179                                      jit::ExitFrameType expected) {
    180  // Called via a JIT to wasm call: in this case, FP is pointing in the middle
    181  // of the exit frame, right before the exit footer; ensure the exit frame type
    182  // is the expected one.
    183 #ifdef DEBUG
    184  auto* jitCaller = (ExitFrameLayout*)fp;
    185  MOZ_ASSERT(jitCaller->footer()->type() == expected);
    186 #endif
    187 }
    188 
    189 static inline void AssertDirectJitCall(const void* fp) {
    190  AssertJitExitFrame(fp, jit::ExitFrameType::DirectWasmJitCall);
    191 }
    192 
    193 void WasmFrameIter::popFrame(bool isLeavingFrame) {
    194  // If we're visiting inlined frames, see if this frame was inlined.
    195  if (enableInlinedFrames_ && inlinedCallerOffsets_.size() > 0) {
    196    // We do not support inlining and debugging. If we did we'd need to support
    197    // `isLeavingFrame` here somehow to remove inlined frames from the
    198    // JitActivation.
    199    MOZ_ASSERT(!code_->debugEnabled());
    200 
    201    // The inlined callee offsets are ordered so that our immediate caller is
    202    // the last offset.
    203    //
    204    // Set our current offset and func index to the last entry, then shift the
    205    // span over by one.
    206    const BytecodeOffset* first = inlinedCallerOffsets_.data();
    207    const BytecodeOffset* last =
    208        inlinedCallerOffsets_.data() + inlinedCallerOffsets_.size() - 1;
    209    lineOrBytecode_ = last->offset();
    210    inlinedCallerOffsets_ = BytecodeOffsetSpan(first, last);
    211    MOZ_ASSERT(lineOrBytecode_ != CallSite::NO_LINE_OR_BYTECODE);
    212    funcIndex_ = code_->codeTailMeta().findFuncIndex(lineOrBytecode_);
    213    // An inlined frame will never do a stack switch, nor fail a signature
    214    // mismatch.
    215    currentFrameStackSwitched_ = false;
    216    failedUnwindSignatureMismatch_ = false;
    217    // Invalidate the resumePC, it should not be accessed anyways
    218    resumePCinCurrentFrame_ = nullptr;
    219    // Preserve fp_ for unwinding to the next frame when we're done with inline
    220    // frames.
    221    return;
    222  }
    223 
    224  uint8_t* returnAddress = fp_->returnAddress();
    225  const CodeRange* codeRange;
    226  code_ = LookupCode(returnAddress, &codeRange);
    227 #ifdef ENABLE_WASM_JSPI
    228  currentFrameStackSwitched_ = false;
    229 #endif
    230 
    231  // Track the current suspender if we are leaving frames.
    232 #ifdef ENABLE_WASM_JSPI
    233  wasm::SuspenderObject* currentSuspender = nullptr;
    234 #endif
    235  if (isLeavingFrame) {
    236    MOZ_ASSERT(activation_->hasWasmExitFP());
    237 #ifdef ENABLE_WASM_JSPI
    238    currentSuspender = activation_->wasmExitSuspender();
    239 #endif
    240 
    241    // If we are trapping and leaving frames, then remove the trapping state.
    242    if (activation_->isWasmTrapping()) {
    243      activation_->finishWasmTrap(/*isResuming=*/false);
    244    }
    245  }
    246 
    247  if (!code_) {
    248    // This is a direct call from the jit into the wasm function's body. The
    249    // call stack resembles this at this point:
    250    //
    251    // |---------------------|
    252    // |      JIT FRAME      |
    253    // | JIT FAKE EXIT FRAME | <-- fp_->callerFP_
    254    // |      WASM FRAME     | <-- fp_
    255    // |---------------------|
    256    //
    257    // fp_->callerFP_ points to the fake exit frame set up by the jit caller,
    258    // and the return-address-to-fp is in JIT code, thus doesn't belong to any
    259    // wasm instance's code (in particular, there's no associated CodeRange).
    260    // Mark the frame as such.
    261    AssertDirectJitCall(fp_->jitEntryCaller());
    262 
    263    unwoundCallerFP_ = fp_->jitEntryCaller();
    264    unwoundCallerFPIsJSJit_ = true;
    265    unwoundAddressOfReturnAddress_ = fp_->addressOfReturnAddress();
    266 
    267    if (isLeavingFrame) {
    268      activation_->setJSExitFP(unwoundCallerFP_);
    269    }
    270 
    271    fp_ = nullptr;
    272    code_ = nullptr;
    273    funcIndex_ = UINT32_MAX;
    274    lineOrBytecode_ = UINT32_MAX;
    275    inlinedCallerOffsets_ = BytecodeOffsetSpan();
    276    resumePCinCurrentFrame_ = nullptr;
    277 
    278    MOZ_ASSERT(done());
    279    return;
    280  }
    281 
    282  MOZ_ASSERT(codeRange);
    283 
    284  Frame* prevFP = fp_;
    285  fp_ = fp_->wasmCaller();
    286  resumePCinCurrentFrame_ = returnAddress;
    287 
    288  if (codeRange->isInterpEntry()) {
    289    // Interpreter entry has a simple frame, record FP from it.
    290    unwoundCallerFP_ = reinterpret_cast<uint8_t*>(fp_);
    291    MOZ_ASSERT(!unwoundCallerFPIsJSJit_);
    292    unwoundAddressOfReturnAddress_ = prevFP->addressOfReturnAddress();
    293 
    294    fp_ = nullptr;
    295    code_ = nullptr;
    296    funcIndex_ = UINT32_MAX;
    297    lineOrBytecode_ = UINT32_MAX;
    298    inlinedCallerOffsets_ = BytecodeOffsetSpan();
    299 
    300    if (isLeavingFrame) {
    301      // We're exiting via the interpreter entry; we can safely reset
    302      // exitFP.
    303      activation_->setWasmExitFP(nullptr, nullptr);
    304    }
    305 
    306    MOZ_ASSERT(done());
    307    return;
    308  }
    309 
    310  if (codeRange->isJitEntry()) {
    311    // This wasm function has been called through the generic JIT entry by
    312    // a JIT caller, so the call stack resembles this:
    313    //
    314    // |---------------------|
    315    // |      JIT FRAME      |
    316    // |  JSJIT TO WASM EXIT | <-- fp_
    317    // |    WASM JIT ENTRY   | <-- prevFP (already unwound)
    318    // |      WASM FRAME     | (already unwound)
    319    // |---------------------|
    320    //
    321    // The next value of FP is a jit exit frame with type WasmGenericJitEntry.
    322    // This lets us transition to a JSJit frame iterator.
    323    unwoundCallerFP_ = reinterpret_cast<uint8_t*>(fp_);
    324    unwoundCallerFPIsJSJit_ = true;
    325    AssertJitExitFrame(unwoundCallerFP_,
    326                       jit::ExitFrameType::WasmGenericJitEntry);
    327    unwoundAddressOfReturnAddress_ = prevFP->addressOfReturnAddress();
    328 
    329    fp_ = nullptr;
    330    code_ = nullptr;
    331    funcIndex_ = UINT32_MAX;
    332    lineOrBytecode_ = UINT32_MAX;
    333    inlinedCallerOffsets_ = BytecodeOffsetSpan();
    334 
    335    if (isLeavingFrame) {
    336      activation_->setJSExitFP(unwoundCallerFP());
    337    }
    338 
    339    MOZ_ASSERT(done());
    340    return;
    341  }
    342 
    343  MOZ_ASSERT(codeRange->kind() == CodeRange::Function);
    344 
    345  CallSite site;
    346  MOZ_ALWAYS_TRUE(code_->lookupCallSite(returnAddress, &site));
    347 
    348  if (site.mightBeCrossInstance()) {
    349    instance_ = ExtractCallerInstanceFromFrameWithInstances(prevFP);
    350  }
    351 
    352 #ifdef ENABLE_WASM_JSPI
    353  currentFrameStackSwitched_ = site.isStackSwitch();
    354 #endif
    355 
    356  MOZ_ASSERT(code_ == &instance_->code());
    357 
    358  lineOrBytecode_ = site.lineOrBytecode();
    359  funcIndex_ =
    360      FuncIndexForLineOrBytecode(*code_, site.lineOrBytecode(), *codeRange);
    361  inlinedCallerOffsets_ = site.inlinedCallerOffsetsSpan();
    362  failedUnwindSignatureMismatch_ = false;
    363 
    364  if (isLeavingFrame) {
    365 #ifdef ENABLE_WASM_JSPI
    366    wasm::SuspenderObject* newSuspender = currentSuspender;
    367    // If we switched stacks, look up the new suspender using the new FP.
    368    if (currentFrameStackSwitched_) {
    369      newSuspender =
    370          activation_->cx()->wasm().findSuspenderForStackAddress(fp_);
    371    }
    372 
    373    // If we are unwinding past a suspender, unwind it to release its
    374    // resources.
    375    if (newSuspender != currentSuspender) {
    376      currentSuspender->unwind(activation_->cx());
    377    }
    378 #else
    379    wasm::SuspenderObject* newSuspender = nullptr;
    380 #endif
    381    // Any future frame iteration will start by popping the exitFP, so setting
    382    // it to `prevFP` ensures that frame iteration starts at our new `fp_`.
    383    activation_->setWasmExitFP(prevFP, newSuspender);
    384  }
    385 
    386  MOZ_ASSERT(!done());
    387 }
    388 
    389 bool WasmFrameIter::hasSourceInfo() const {
    390  // Source information is not available unless you're visiting inline frames,
    391  // or you're debugging and therefore no inlining is happening.
    392  return enableInlinedFrames_ || code_->debugEnabled();
    393 }
    394 
    395 const char* WasmFrameIter::filename() const {
    396  MOZ_ASSERT(!done());
    397  MOZ_ASSERT(hasSourceInfo());
    398  return code_->codeMeta().scriptedCaller().filename.get();
    399 }
    400 
    401 const char16_t* WasmFrameIter::displayURL() const {
    402  MOZ_ASSERT(!done());
    403  MOZ_ASSERT(hasSourceInfo());
    404  return code_->codeMetaForAsmJS()
    405             ? code_->codeMetaForAsmJS()->displayURL()  // asm.js
    406             : nullptr;                                 // wasm
    407 }
    408 
    409 bool WasmFrameIter::mutedErrors() const {
    410  MOZ_ASSERT(!done());
    411  MOZ_ASSERT(hasSourceInfo());
    412  return code_->codeMetaForAsmJS()
    413             ? code_->codeMetaForAsmJS()->mutedErrors()  // asm.js
    414             : false;                                    // wasm
    415 }
    416 
    417 JSAtom* WasmFrameIter::functionDisplayAtom() const {
    418  MOZ_ASSERT(!done());
    419  MOZ_ASSERT(hasSourceInfo());
    420 
    421  JSContext* cx = activation_->cx();
    422  JSAtom* atom = instance_->getFuncDisplayAtom(cx, funcIndex_);
    423  if (!atom) {
    424    cx->clearPendingException();
    425    return cx->names().empty_;
    426  }
    427 
    428  return atom;
    429 }
    430 
    431 unsigned WasmFrameIter::lineOrBytecode() const {
    432  MOZ_ASSERT(!done());
    433  MOZ_ASSERT(hasSourceInfo());
    434  return lineOrBytecode_;
    435 }
    436 
    437 uint32_t WasmFrameIter::funcIndex() const {
    438  MOZ_ASSERT(!done());
    439  MOZ_ASSERT(hasSourceInfo());
    440  return funcIndex_;
    441 }
    442 
    443 unsigned WasmFrameIter::computeLine(
    444    JS::TaggedColumnNumberOneOrigin* column) const {
    445  MOZ_ASSERT(!done());
    446  MOZ_ASSERT(hasSourceInfo());
    447  if (instance_->isAsmJS()) {
    448    if (column) {
    449      *column =
    450          JS::TaggedColumnNumberOneOrigin(JS::LimitedColumnNumberOneOrigin(
    451              JS::WasmFunctionIndex::DefaultBinarySourceColumnNumberOneOrigin));
    452    }
    453    return lineOrBytecode_;
    454  }
    455 
    456  MOZ_ASSERT(!(funcIndex_ & JS::TaggedColumnNumberOneOrigin::WasmFunctionTag));
    457  if (column) {
    458    *column =
    459        JS::TaggedColumnNumberOneOrigin(JS::WasmFunctionIndex(funcIndex_));
    460  }
    461  return lineOrBytecode_;
    462 }
    463 
    464 bool WasmFrameIter::debugEnabled() const {
    465  MOZ_ASSERT(!done());
    466 
    467  // Metadata::debugEnabled is only set if debugging is actually enabled (both
    468  // requested, and available via baseline compilation), and Tier::Debug code
    469  // will be available.
    470  if (!code_->debugEnabled()) {
    471    return false;
    472  }
    473 
    474  // Debug information is not available in prologue when the iterator is
    475  // failing to unwind invalid signature trap.
    476  if (failedUnwindSignatureMismatch_) {
    477    return false;
    478  }
    479 
    480  // Only non-imported functions can have debug frames.
    481  if (funcIndex_ < code_->funcImports().length()) {
    482    return false;
    483  }
    484 
    485  // Debug frame is not present at the return stub.
    486  CallSite site;
    487  return !(code_->lookupCallSite((void*)resumePCinCurrentFrame_, &site) &&
    488           site.kind() == CallSiteKind::ReturnStub);
    489 }
    490 
    491 DebugFrame* WasmFrameIter::debugFrame() const {
    492  MOZ_ASSERT(!done());
    493  return DebugFrame::from(fp_);
    494 }
    495 
    496 /*****************************************************************************/
    497 // Prologue/epilogue code generation
    498 
    499 // These constants reflect statically-determined offsets in the
    500 // prologue/epilogue. The offsets are dynamically asserted during code
    501 // generation.
    502 #if defined(JS_CODEGEN_X64)
    503 static const unsigned PushedRetAddr = 0;
    504 static const unsigned PushedFP = 1;
    505 static const unsigned SetFP = 4;
    506 static const unsigned PoppedFP = 0;
    507 static const unsigned PoppedFPJitEntry = 0;
    508 #elif defined(JS_CODEGEN_X86)
    509 static const unsigned PushedRetAddr = 0;
    510 static const unsigned PushedFP = 1;
    511 static const unsigned SetFP = 3;
    512 static const unsigned PoppedFP = 0;
    513 static const unsigned PoppedFPJitEntry = 0;
    514 #elif defined(JS_CODEGEN_ARM)
    515 static const unsigned BeforePushRetAddr = 0;
    516 static const unsigned PushedRetAddr = 4;
    517 static const unsigned PushedFP = 8;
    518 static const unsigned SetFP = 12;
    519 static const unsigned PoppedFP = 0;
    520 static const unsigned PoppedFPJitEntry = 0;
    521 #elif defined(JS_CODEGEN_ARM64)
    522 // On ARM64 we do not use push or pop; the prologues and epilogues are
    523 // structured differently due to restrictions on SP alignment.  Even so,
    524 // PushedRetAddr and PushedFP are used in some restricted contexts
    525 // and must be superficially meaningful.
    526 static const unsigned BeforePushRetAddr = 0;
    527 static const unsigned PushedRetAddr = 8;
    528 static const unsigned PushedFP = 12;
    529 static const unsigned SetFP = 16;
    530 static const unsigned PoppedFP = 8;
    531 static const unsigned PoppedFPJitEntry = 8;
    532 static_assert(BeforePushRetAddr == 0, "Required by StartUnwinding");
    533 static_assert(PushedFP > PushedRetAddr, "Required by StartUnwinding");
    534 #elif defined(JS_CODEGEN_MIPS64)
    535 static const unsigned PushedRetAddr = 8;
    536 static const unsigned PushedFP = 16;
    537 static const unsigned SetFP = 20;
    538 static const unsigned PoppedFP = 4;
    539 static const unsigned PoppedFPJitEntry = 8;
    540 #elif defined(JS_CODEGEN_LOONG64)
    541 static const unsigned PushedRetAddr = 8;
    542 static const unsigned PushedFP = 16;
    543 static const unsigned SetFP = 20;
    544 static const unsigned PoppedFP = 4;
    545 static const unsigned PoppedFPJitEntry = 8;
    546 #elif defined(JS_CODEGEN_RISCV64)
    547 static const unsigned PushedRetAddr = 8;
    548 static const unsigned PushedFP = 16;
    549 static const unsigned SetFP = 20;
    550 static const unsigned PoppedFP = 4;
    551 static const unsigned PoppedFPJitEntry = 8;
    552 #elif defined(JS_CODEGEN_NONE) || defined(JS_CODEGEN_WASM32)
    553 // Synthetic values to satisfy asserts and avoid compiler warnings.
    554 static const unsigned PushedRetAddr = 0;
    555 static const unsigned PushedFP = 1;
    556 static const unsigned SetFP = 2;
    557 static const unsigned PoppedFP = 3;
    558 static const unsigned PoppedFPJitEntry = 4;
    559 #else
    560 #  error "Unknown architecture!"
    561 #endif
    562 
    563 void wasm::LoadActivation(MacroAssembler& masm, Register instance,
    564                          Register dest) {
    565  // WasmCall pushes a JitActivation.
    566  masm.loadPtr(Address(instance, wasm::Instance::offsetOfCx()), dest);
    567  masm.loadPtr(Address(dest, JSContext::offsetOfActivation()), dest);
    568 }
    569 
    570 void wasm::SetExitFP(MacroAssembler& masm, ExitReason reason,
    571                     Register activation, Register scratch) {
    572  MOZ_ASSERT(!reason.isNone());
    573  MOZ_ASSERT(activation != scratch);
    574 
    575  // Write the encoded exit reason to the activation
    576  masm.store32(
    577      Imm32(reason.encode()),
    578      Address(activation, JitActivation::offsetOfEncodedWasmExitReason()));
    579 
    580  // Tag the frame pointer in a different register so that we don't break
    581  // async profiler unwinding.
    582  masm.orPtr(Imm32(ExitFPTag), FramePointer, scratch);
    583 
    584  // Write the tagged exitFP to the activation
    585  masm.storePtr(scratch,
    586                Address(activation, JitActivation::offsetOfPackedExitFP()));
    587 }
    588 
    589 void wasm::ClearExitFP(MacroAssembler& masm, Register activation) {
    590  masm.storePtr(ImmWord(0x0),
    591                Address(activation, JitActivation::offsetOfPackedExitFP()));
    592  masm.store32(
    593      Imm32(0x0),
    594      Address(activation, JitActivation::offsetOfEncodedWasmExitReason()));
    595 }
    596 
    597 static void GenerateCallablePrologue(MacroAssembler& masm, uint32_t* entry) {
    598  AutoCreatedBy acb(masm, "GenerateCallablePrologue");
    599  masm.setFramePushed(0);
    600 
    601  // ProfilingFrameIterator needs to know the offsets of several key
    602  // instructions from entry. To save space, we make these offsets static
    603  // constants and assert that they match the actual codegen below. On ARM,
    604  // this requires AutoForbidPoolsAndNops to prevent a constant pool from being
    605  // randomly inserted between two instructions.
    606 
    607 #if defined(JS_CODEGEN_MIPS64)
    608  {
    609    *entry = masm.currentOffset();
    610 
    611    masm.ma_push(ra);
    612    MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
    613    masm.ma_push(FramePointer);
    614    MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
    615    masm.moveStackPtrTo(FramePointer);
    616    MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
    617  }
    618 #elif defined(JS_CODEGEN_LOONG64)
    619  {
    620    *entry = masm.currentOffset();
    621 
    622    masm.ma_push(ra);
    623    MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
    624    masm.ma_push(FramePointer);
    625    MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
    626    masm.moveStackPtrTo(FramePointer);
    627    MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
    628  }
    629 #elif defined(JS_CODEGEN_RISCV64)
    630  {
    631    *entry = masm.currentOffset();
    632    BlockTrampolinePoolScope block_trampoline_pool(&masm, 5);
    633    masm.ma_push(ra);
    634    MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
    635    masm.ma_push(FramePointer);
    636    MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
    637    masm.moveStackPtrTo(FramePointer);
    638    MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
    639  }
    640 #elif defined(JS_CODEGEN_ARM64)
    641  {
    642    // We do not use the PseudoStackPointer.  However, we may be called in a
    643    // context -- compilation using Ion -- in which the PseudoStackPointer is
    644    // in use.  Rather than risk confusion in the uses of `masm` here, let's
    645    // just switch in the real SP, do what we need to do, and restore the
    646    // existing setting afterwards.
    647    const vixl::Register stashedSPreg = masm.GetStackPointer64();
    648    masm.SetStackPointer64(vixl::sp);
    649 
    650    AutoForbidPoolsAndNops afp(&masm,
    651                               /* number of instructions in scope = */ 4);
    652 
    653    *entry = masm.currentOffset();
    654 
    655    masm.Sub(sp, sp, sizeof(Frame));
    656    masm.Str(ARMRegister(lr, 64), MemOperand(sp, Frame::returnAddressOffset()));
    657    MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
    658    masm.Str(ARMRegister(FramePointer, 64),
    659             MemOperand(sp, Frame::callerFPOffset()));
    660    MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
    661    masm.Mov(ARMRegister(FramePointer, 64), sp);
    662    MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
    663 
    664    // And restore the SP-reg setting, per comment above.
    665    masm.SetStackPointer64(stashedSPreg);
    666  }
    667 #else
    668  {
    669 #  if defined(JS_CODEGEN_ARM)
    670    AutoForbidPoolsAndNops afp(&masm,
    671                               /* number of instructions in scope = */ 3);
    672 
    673    *entry = masm.currentOffset();
    674 
    675    static_assert(BeforePushRetAddr == 0);
    676    masm.push(lr);
    677 #  else
    678    *entry = masm.currentOffset();
    679    // The x86/x64 call instruction pushes the return address.
    680 #  endif
    681 
    682    MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
    683    masm.push(FramePointer);
    684    MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
    685    masm.moveStackPtrTo(FramePointer);
    686    MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
    687  }
    688 #endif
    689 }
    690 
    691 static void GenerateCallableEpilogue(MacroAssembler& masm, unsigned framePushed,
    692                                     uint32_t* ret) {
    693  AutoCreatedBy acb(masm, "GenerateCallableEpilogue");
    694 
    695  if (framePushed) {
    696    masm.freeStack(framePushed);
    697  }
    698 
    699  DebugOnly<uint32_t> poppedFP{};
    700 
    701 #if defined(JS_CODEGEN_MIPS64)
    702 
    703  masm.loadPtr(Address(StackPointer, Frame::callerFPOffset()), FramePointer);
    704  poppedFP = masm.currentOffset();
    705  masm.loadPtr(Address(StackPointer, Frame::returnAddressOffset()), ra);
    706 
    707  *ret = masm.currentOffset();
    708  masm.as_jr(ra);
    709  masm.addToStackPtr(Imm32(sizeof(Frame)));
    710 
    711 #elif defined(JS_CODEGEN_LOONG64)
    712 
    713  masm.loadPtr(Address(StackPointer, Frame::returnAddressOffset()), ra);
    714  masm.loadPtr(Address(StackPointer, Frame::callerFPOffset()), FramePointer);
    715  poppedFP = masm.currentOffset();
    716 
    717  masm.addToStackPtr(Imm32(sizeof(Frame)));
    718  *ret = masm.currentOffset();
    719  masm.as_jirl(zero, ra, BOffImm16(0));
    720 
    721 #elif defined(JS_CODEGEN_RISCV64)
    722  {
    723    BlockTrampolinePoolScope block_trampoline_pool(&masm, 20);
    724    masm.loadPtr(Address(StackPointer, Frame::callerFPOffset()), FramePointer);
    725    poppedFP = masm.currentOffset();
    726    masm.loadPtr(Address(StackPointer, Frame::returnAddressOffset()), ra);
    727 
    728    *ret = masm.currentOffset();
    729    masm.addToStackPtr(Imm32(sizeof(Frame)));
    730    masm.jalr(zero, ra, 0);
    731    masm.nop();
    732  }
    733 #elif defined(JS_CODEGEN_ARM64)
    734 
    735  // See comment at equivalent place in |GenerateCallablePrologue| above.
    736  const vixl::Register stashedSPreg = masm.GetStackPointer64();
    737  masm.SetStackPointer64(vixl::sp);
    738 
    739  AutoForbidPoolsAndNops afp(&masm, /* number of instructions in scope = */ 5);
    740 
    741  masm.Ldr(ARMRegister(lr, 64), MemOperand(sp, Frame::returnAddressOffset()));
    742  masm.Ldr(ARMRegister(FramePointer, 64),
    743           MemOperand(sp, Frame::callerFPOffset()));
    744  poppedFP = masm.currentOffset();
    745 
    746  masm.Add(sp, sp, sizeof(Frame));
    747 
    748  // Reinitialise PSP from SP. This is less than elegant because the prologue
    749  // operates on the raw stack pointer SP and does not keep the PSP in sync.
    750  // We can't use initPseudoStackPtr here because we just set up masm to not
    751  // use it.  Hence we have to do it "by hand".
    752  masm.Mov(PseudoStackPointer64, vixl::sp);
    753 
    754  *ret = masm.currentOffset();
    755  masm.Ret(ARMRegister(lr, 64));
    756 
    757  // See comment at equivalent place in |GenerateCallablePrologue| above.
    758  masm.SetStackPointer64(stashedSPreg);
    759 
    760 #else
    761  // Forbid pools for the same reason as described in GenerateCallablePrologue.
    762 #  if defined(JS_CODEGEN_ARM)
    763  AutoForbidPoolsAndNops afp(&masm, /* number of instructions in scope = */ 6);
    764 #  endif
    765 
    766  // There is an important ordering constraint here: fp must be repointed to
    767  // the caller's frame before any field of the frame currently pointed to by
    768  // fp is popped: asynchronous signal handlers (which use stack space
    769  // starting at sp) could otherwise clobber these fields while they are still
    770  // accessible via fp (fp fields are read during frame iteration which is
    771  // *also* done asynchronously).
    772 
    773  masm.pop(FramePointer);
    774  poppedFP = masm.currentOffset();
    775 
    776  *ret = masm.currentOffset();
    777  masm.ret();
    778 
    779 #endif
    780 
    781  MOZ_ASSERT_IF(!masm.oom(), PoppedFP == *ret - poppedFP);
    782 }
    783 
    784 // Generate the most minimal possible prologue: `push FP; FP := SP`.
    785 void wasm::GenerateMinimalPrologue(MacroAssembler& masm, uint32_t* entry) {
    786  MOZ_ASSERT(masm.framePushed() == 0);
    787  GenerateCallablePrologue(masm, entry);
    788 }
    789 
    790 // Generate the most minimal possible epilogue: `pop FP; return`.
    791 void wasm::GenerateMinimalEpilogue(MacroAssembler& masm, uint32_t* ret) {
    792  MOZ_ASSERT(masm.framePushed() == 0);
    793  GenerateCallableEpilogue(masm, /*framePushed=*/0, ret);
    794 }
    795 
    796 void wasm::GenerateFunctionPrologue(MacroAssembler& masm,
    797                                    const CallIndirectId& callIndirectId,
    798                                    const Maybe<uint32_t>& tier1FuncIndex,
    799                                    FuncOffsets* offsets) {
    800  AutoCreatedBy acb(masm, "wasm::GenerateFunctionPrologue");
    801 
    802  // We are going to generate this code layout:
    803  // ---------------------------------------------
    804  // checked call entry:    callable prologue
    805  //                        check signature
    806  //                        jump functionBody ──┐
    807  // unchecked call entry:  callable prologue   │
    808  //                        functionBody  <─────┘
    809  // -----------------------------------------------
    810  // checked call entry - used for call_indirect when we have to check the
    811  // signature.
    812  //
    813  // unchecked call entry - used for regular direct same-instance calls.
    814 
    815  // The checked call entry is a call target, so must have CodeAlignment.
    816  // Its offset is normally zero.
    817  static_assert(WasmCheckedCallEntryOffset % CodeAlignment == 0,
    818                "code aligned");
    819 
    820  // Flush pending pools so they do not get dumped between the 'begin' and
    821  // 'uncheckedCallEntry' offsets since the difference must be less than
    822  // UINT8_MAX to be stored in CodeRange::funcbeginToUncheckedCallEntry_.
    823  // (Pending pools can be large.)
    824  masm.flushBuffer();
    825  masm.haltingAlign(CodeAlignment);
    826 
    827  Label functionBody;
    828 
    829  offsets->begin = masm.currentOffset();
    830 
    831  // Only first-class functions (those that can be referenced in a table) need
    832  // the checked call prologue w/ signature check. It is impossible to perform
    833  // a checked call otherwise.
    834  //
    835  // asm.js function tables are homogeneous and don't need a signature check.
    836  // However, they can be put in tables which expect a checked call entry point,
    837  // so we generate a no-op entry point for consistency. If asm.js performance
    838  // was important we could refine this in the future.
    839  if (callIndirectId.kind() != CallIndirectIdKind::None) {
    840    // Generate checked call entry. The BytecodeOffset of the trap is fixed up
    841    // to be the bytecode offset of the callsite by
    842    // JitActivation::startWasmTrap.
    843    MOZ_ASSERT_IF(!masm.oom(), masm.currentOffset() - offsets->begin ==
    844                                   WasmCheckedCallEntryOffset);
    845    uint32_t dummy;
    846    GenerateCallablePrologue(masm, &dummy);
    847 
    848    switch (callIndirectId.kind()) {
    849      case CallIndirectIdKind::Global: {
    850        Label fail;
    851        Register scratch1 = WasmTableCallScratchReg0;
    852        Register scratch2 = WasmTableCallScratchReg1;
    853 
    854        // Load the STV of this callee's function type
    855        masm.loadPtr(
    856            Address(InstanceReg,
    857                    Instance::offsetInData(
    858                        callIndirectId.instanceDataOffset() +
    859                        offsetof(wasm::TypeDefInstanceData, superTypeVector))),
    860            scratch1);
    861 
    862        // Emit a longer check when the callee function type has a super type,
    863        // as the caller may be using one of the super type's of this callee.
    864        if (callIndirectId.hasSuperType()) {
    865          // Check if this function's type is exactly the expected function type
    866          masm.branchPtr(Assembler::Condition::Equal, WasmTableCallSigReg,
    867                         scratch1, &functionBody);
    868 
    869          // Otherwise, we need to see if this function's type is a sub type of
    870          // the expected function type. This requires us to check if the
    871          // expected's type is in the super type vector of this function's
    872          // type.
    873 
    874          // Check if the expected function type was an immediate, not a
    875          // type definition. Because we only allow the immediate form for
    876          // final types without super types, this implies that we have a
    877          // signature mismatch.
    878          masm.branchTestPtr(Assembler::Condition::NonZero, WasmTableCallSigReg,
    879                             Imm32(FuncType::ImmediateBit), &fail);
    880 
    881          // Load the subtyping depth of the expected function type. Re-use the
    882          // index register, as it's no longer needed.
    883          Register subTypingDepth = WasmTableCallIndexReg;
    884          masm.load32(
    885              Address(WasmTableCallSigReg,
    886                      int32_t(SuperTypeVector::offsetOfSubTypingDepth())),
    887              subTypingDepth);
    888 
    889          // Perform the check
    890          masm.branchWasmSTVIsSubtypeDynamicDepth(scratch1, WasmTableCallSigReg,
    891                                                  subTypingDepth, scratch2,
    892                                                  &fail, false);
    893        } else {
    894          // This callee function type has no super types, there is only one
    895          // possible type we should be called with. Check for it.
    896          masm.branchPtr(Assembler::Condition::NotEqual, WasmTableCallSigReg,
    897                         scratch1, &fail);
    898        }
    899        masm.jump(&functionBody);
    900 
    901        // Put the trap behind a jump so that we play nice with static code
    902        // prediction. We can't move this out of the prologue or it will mess
    903        // up wasm::StartUnwinding, which uses the PC to determine if the frame
    904        // has been constructed or not.
    905        masm.bind(&fail);
    906        masm.wasmTrap(Trap::IndirectCallBadSig, TrapSiteDesc());
    907        break;
    908      }
    909      case CallIndirectIdKind::Immediate: {
    910        Label fail;
    911        masm.branch32(Assembler::Condition::NotEqual, WasmTableCallSigReg,
    912                      Imm32(callIndirectId.immediate()), &fail);
    913        masm.jump(&functionBody);
    914 
    915        // Put the trap behind a jump so that we play nice with static code
    916        // prediction. We can't move this out of the prologue or it will mess
    917        // up wasm::StartUnwinding, which uses the PC to determine if the frame
    918        // has been constructed or not.
    919        masm.bind(&fail);
    920        masm.wasmTrap(Trap::IndirectCallBadSig, TrapSiteDesc());
    921        break;
    922      }
    923      case CallIndirectIdKind::AsmJS:
    924        masm.jump(&functionBody);
    925        break;
    926      case CallIndirectIdKind::None:
    927        break;
    928    }
    929 
    930    // The preceding code may have generated a small constant pool to support
    931    // the comparison in the signature check.  But if we flush the pool here we
    932    // will also force the creation of an unused branch veneer in the pool for
    933    // the jump to functionBody from the signature check on some platforms, thus
    934    // needlessly inflating the size of the prologue.
    935    //
    936    // On no supported platform that uses a pool (arm, arm64) is there any risk
    937    // at present of that branch or other elements in the pool going out of
    938    // range while we're generating the following padding and prologue,
    939    // therefore no pool elements will be emitted in the prologue, therefore it
    940    // is safe not to flush here.
    941    //
    942    // We assert that this holds at runtime by comparing the expected entry
    943    // offset to the recorded ditto; if they are not the same then
    944    // GenerateCallablePrologue flushed a pool before the prologue code,
    945    // contrary to assumption.
    946 
    947    masm.nopAlign(CodeAlignment);
    948  }
    949 
    950  // Generate unchecked call entry:
    951  DebugOnly<uint32_t> expectedEntry = masm.currentOffset();
    952  GenerateCallablePrologue(masm, &offsets->uncheckedCallEntry);
    953  MOZ_ASSERT(expectedEntry == offsets->uncheckedCallEntry);
    954  masm.bind(&functionBody);
    955 #ifdef JS_CODEGEN_ARM64
    956  // GenerateCallablePrologue creates a prologue which operates on the raw
    957  // stack pointer and does not keep the PSP in sync.  So we have to resync it
    958  // here.  But we can't use initPseudoStackPtr here because masm may not be
    959  // set up to use it, depending on which compiler is in use.  Hence do it
    960  // "manually".
    961  masm.Mov(PseudoStackPointer64, vixl::sp);
    962 #endif
    963 
    964  // See comment block in WasmCompile.cpp for an explanation tiering.
    965  if (tier1FuncIndex) {
    966    Register scratch = ABINonArgReg0;
    967    masm.loadPtr(Address(InstanceReg, Instance::offsetOfJumpTable()), scratch);
    968    masm.jump(Address(scratch, *tier1FuncIndex * sizeof(uintptr_t)));
    969  }
    970 
    971  offsets->tierEntry = masm.currentOffset();
    972 
    973  MOZ_ASSERT(masm.framePushed() == 0);
    974 }
    975 
    976 void wasm::GenerateFunctionEpilogue(MacroAssembler& masm, unsigned framePushed,
    977                                    FuncOffsets* offsets) {
    978  // Inverse of GenerateFunctionPrologue:
    979  MOZ_ASSERT(masm.framePushed() == framePushed);
    980  GenerateCallableEpilogue(masm, framePushed, &offsets->ret);
    981  MOZ_ASSERT(masm.framePushed() == 0);
    982 }
    983 
    984 #ifdef ENABLE_WASM_JSPI
    985 void wasm::GenerateExitPrologueMainStackSwitch(MacroAssembler& masm,
    986                                               Register instance,
    987                                               Register scratch1,
    988                                               Register scratch2,
    989                                               Register scratch3) {
    990  // Load the JSContext from the Instance into scratch1.
    991  masm.loadPtr(Address(instance, wasm::Instance::offsetOfCx()), scratch1);
    992 
    993  // Load wasm::Context::activeSuspender_ into scratch2.
    994  masm.loadPtr(Address(scratch1, JSContext::offsetOfWasm() +
    995                                     wasm::Context::offsetOfActiveSuspender()),
    996               scratch2);
    997 
    998  // If the activeSuspender_ is non-null, then we're on a suspendable stack
    999  // and need to switch to the main stack.
   1000  Label alreadyOnSystemStack;
   1001  masm.branchTestPtr(Assembler::Zero, scratch2, scratch2,
   1002                     &alreadyOnSystemStack);
   1003 
   1004  // Reset the stack limit on wasm::Context to the main stack limit. We
   1005  // clobber scratch3 here.
   1006  masm.loadPtr(Address(scratch1, JSContext::offsetOfWasm() +
   1007                                     wasm::Context::offsetOfMainStackLimit()),
   1008               scratch3);
   1009  masm.storePtr(scratch3,
   1010                Address(scratch1, JSContext::offsetOfWasm() +
   1011                                      wasm::Context::offsetOfStackLimit()));
   1012 
   1013  // Clear wasm::Context::activeSuspender_.
   1014  masm.storePtr(
   1015      ImmWord(0),
   1016      Address(scratch1, JSContext::offsetOfWasm() +
   1017                            wasm::Context::offsetOfActiveSuspender()));
   1018 
   1019  // Load the JitActivation from JSContext, and store the activeSuspender
   1020  // into wasmExitSuspender_. We clobber scratch3 here.
   1021  masm.loadPtr(Address(scratch1, JSContext::offsetOfActivation()), scratch3);
   1022  masm.storePtr(scratch2,
   1023                Address(scratch3, JitActivation::offsetOfWasmExitSuspender()));
   1024 
   1025  // Switch the suspender's state to CalledOnMain.
   1026  masm.storeValue(JS::Int32Value(wasm::SuspenderState::CalledOnMain),
   1027                  Address(scratch2, SuspenderObject::offsetOfState()));
   1028 
   1029  // Switch the active SP to the Suspender's MainSP.
   1030  //
   1031  // NOTE: the FP is still pointing at our frame on the suspendable stack.
   1032  // This lets us address our incoming stack arguments using FP, and also
   1033  // switch back to the suspendable stack on return.
   1034  masm.loadStackPtrFromPrivateValue(
   1035      Address(scratch2, wasm::SuspenderObject::offsetOfMainSP()));
   1036 
   1037  // Clear the disallow arbitrary code flag that is set when we enter a
   1038  // suspendable stack.
   1039 #  ifdef DEBUG
   1040  masm.loadPtr(Address(scratch1, JSContext::offsetOfRuntime()), scratch3);
   1041  masm.loadPtr(Address(scratch3, JSRuntime::offsetOfJitRuntime()), scratch3);
   1042  masm.store32(Imm32(0),
   1043               Address(scratch3, JitRuntime::offsetOfDisallowArbitraryCode()));
   1044 #  endif
   1045 
   1046  // Update the Win32 TIB StackBase and StackLimit fields last. We clobber
   1047  // scratch2 and scratch3 here.
   1048 #  ifdef _WIN32
   1049  masm.loadPtr(Address(scratch1, JSContext::offsetOfWasm() +
   1050                                     wasm::Context::offsetOfTib()),
   1051               scratch2);
   1052  masm.loadPtr(Address(scratch1, JSContext::offsetOfWasm() +
   1053                                     wasm::Context::offsetOfTibStackBase()),
   1054               scratch3);
   1055  masm.storePtr(scratch3, Address(scratch2, offsetof(_NT_TIB, StackBase)));
   1056  masm.loadPtr(Address(scratch1, JSContext::offsetOfWasm() +
   1057                                     wasm::Context::offsetOfTibStackLimit()),
   1058               scratch3);
   1059  masm.storePtr(scratch3, Address(scratch2, offsetof(_NT_TIB, StackLimit)));
   1060 #  endif
   1061 
   1062  masm.bind(&alreadyOnSystemStack);
   1063 }
   1064 
   1065 void wasm::GenerateExitEpilogueMainStackReturn(MacroAssembler& masm,
   1066                                               Register instance,
   1067                                               Register activationAndScratch1,
   1068                                               Register scratch2) {
   1069  // scratch1 starts out with the JitActivation already loaded.
   1070  Register scratch1 = activationAndScratch1;
   1071 
   1072  // Load JitActivation::wasmExitSuspender_ into scratch2.
   1073  masm.loadPtr(Address(scratch1, JitActivation::offsetOfWasmExitSuspender()),
   1074               scratch2);
   1075 
   1076  // If wasmExitSuspender_ is null, then we were originally on the main stack
   1077  // and have no work to do here.
   1078  Label originallyOnSystemStack;
   1079  masm.branchTestPtr(Assembler::Zero, scratch2, scratch2,
   1080                     &originallyOnSystemStack);
   1081 
   1082  // Clear JitActivation::wasmExitSuspender.
   1083  masm.storePtr(ImmWord(0),
   1084                Address(scratch1, JitActivation::offsetOfWasmExitSuspender()));
   1085 
   1086  // Restore the Suspender state back to Active.
   1087  masm.storeValue(JS::Int32Value(wasm::SuspenderState::Active),
   1088                  Address(scratch2, SuspenderObject::offsetOfState()));
   1089 
   1090  // We no longer need the JitActivation, reload the JSContext from
   1091  // instance into scratch1.
   1092  masm.loadPtr(Address(instance, wasm::Instance::offsetOfCx()), scratch1);
   1093 
   1094  // Restore wasm::Context::activeSuspender_ using the wasmExitSuspender_.
   1095  masm.storePtr(
   1096      scratch2,
   1097      Address(scratch1, JSContext::offsetOfWasm() +
   1098                            wasm::Context::offsetOfActiveSuspender()));
   1099 
   1100  // Reset the stack limit to the suspender stack limit. This clobbers the
   1101  // suspender/scratch2, but it can now be reloaded from
   1102  // wasm::Context::activeSuspender_.
   1103  masm.loadPrivate(Address(scratch2, SuspenderObject::offsetOfStackMemory()),
   1104                   scratch2);
   1105  masm.addPtr(Imm32(SuspendableRedZoneSize), scratch2);
   1106  masm.storePtr(scratch2,
   1107                Address(scratch1, JSContext::offsetOfWasm() +
   1108                                      wasm::Context::offsetOfStackLimit()));
   1109 
   1110  // Update the Win32 TIB StackBase and StackLimit fields. This code is
   1111  // really register constrained and would benefit if we could use the Win32
   1112  // TIB directly through its segment register in masm.
   1113 #  ifdef _WIN32
   1114  // Load the TIB into scratch2.
   1115  masm.loadPtr(Address(scratch1, JSContext::offsetOfWasm() +
   1116                                     wasm::Context::offsetOfTib()),
   1117               scratch2);
   1118 
   1119  // Load the sytem stack limit for this suspender and store to
   1120  // TIB->StackLimit. This clobbers scratch1.
   1121  masm.loadPtr(Address(scratch1, JSContext::offsetOfWasm() +
   1122                                     wasm::Context::offsetOfActiveSuspender()),
   1123               scratch1);
   1124  masm.loadPtr(Address(scratch1, wasm::SuspenderObject::offsetOfStackMemory()),
   1125               scratch1);
   1126  masm.storePtr(scratch1, Address(scratch2, offsetof(_NT_TIB, StackBase)));
   1127 
   1128  // Reload JSContext into scratch1.
   1129  masm.loadPtr(Address(instance, wasm::Instance::offsetOfCx()), scratch1);
   1130 
   1131  // Compute the stack base for this suspender and store to TIB->StackBase.
   1132  // This clobbers scratch1.
   1133  masm.loadPtr(Address(scratch1, JSContext::offsetOfWasm() +
   1134                                     wasm::Context::offsetOfActiveSuspender()),
   1135               scratch1);
   1136  masm.loadPtr(Address(scratch1, wasm::SuspenderObject::offsetOfStackMemory()),
   1137               scratch1);
   1138  masm.addPtr(Imm32(SuspendableStackPlusRedZoneSize), scratch1);
   1139  masm.storePtr(scratch1, Address(scratch2, offsetof(_NT_TIB, StackBase)));
   1140 
   1141  // Reload JSContext into scratch1.
   1142  masm.loadPtr(Address(instance, wasm::Instance::offsetOfCx()), scratch1);
   1143 #  endif
   1144 
   1145  // Set the disallow arbitrary code flag now that we're going back to a
   1146  // suspendable stack.
   1147 #  ifdef DEBUG
   1148  masm.loadPtr(Address(scratch1, JSContext::offsetOfRuntime()), scratch1);
   1149  masm.loadPtr(Address(scratch1, JSRuntime::offsetOfJitRuntime()), scratch1);
   1150  masm.store32(Imm32(1),
   1151               Address(scratch1, JitRuntime::offsetOfDisallowArbitraryCode()));
   1152 #  endif
   1153 
   1154  masm.bind(&originallyOnSystemStack);
   1155 }
   1156 #endif  // ENABLE_WASM_JSPI
   1157 
   1158 void wasm::GenerateExitPrologue(MacroAssembler& masm, ExitReason reason,
   1159                                bool switchToMainStack,
   1160                                unsigned framePushedPreSwitch,
   1161                                unsigned framePushedPostSwitch,
   1162                                CallableOffsets* offsets) {
   1163  MOZ_ASSERT(masm.framePushed() == 0);
   1164 
   1165  masm.haltingAlign(CodeAlignment);
   1166  GenerateCallablePrologue(masm, &offsets->begin);
   1167 
   1168  Register scratch1 = ABINonArgReg0;
   1169  Register scratch2 = ABINonArgReg1;
   1170 #ifdef ENABLE_WASM_JSPI
   1171  Register scratch3 = ABINonArgReg2;
   1172 #endif
   1173 
   1174  // This frame will be exiting compiled code to C++ so record the fp and
   1175  // reason in the JitActivation so the frame iterators can unwind.
   1176  LoadActivation(masm, InstanceReg, scratch1);
   1177  SetExitFP(masm, reason, scratch1, scratch2);
   1178 
   1179 #ifdef ENABLE_WASM_JSPI
   1180  if (switchToMainStack) {
   1181    masm.reserveStack(framePushedPreSwitch);
   1182 
   1183    GenerateExitPrologueMainStackSwitch(masm, InstanceReg, scratch1, scratch2,
   1184                                        scratch3);
   1185 
   1186    // We may be on another stack now, reset the framePushed.
   1187    masm.setFramePushed(0);
   1188    masm.reserveStack(framePushedPostSwitch);
   1189  } else {
   1190    masm.reserveStack(framePushedPreSwitch + framePushedPostSwitch);
   1191  }
   1192 #else
   1193  masm.reserveStack(framePushedPreSwitch + framePushedPostSwitch);
   1194 #endif  // ENABLE_WASM_JSPI
   1195 }
   1196 
   1197 void wasm::GenerateExitEpilogue(MacroAssembler& masm, ExitReason reason,
   1198                                bool switchToMainStack,
   1199                                CallableOffsets* offsets) {
   1200  Register scratch1 = ABINonArgReturnReg0;
   1201 #if ENABLE_WASM_JSPI
   1202  Register scratch2 = ABINonArgReturnReg1;
   1203 #endif
   1204 
   1205  LoadActivation(masm, InstanceReg, scratch1);
   1206  ClearExitFP(masm, scratch1);
   1207 
   1208 #ifdef ENABLE_WASM_JSPI
   1209  // The exit prologue may have switched from a suspender's stack to the main
   1210  // stack, and we need to detect this and revert back to the suspender's
   1211  // stack. See GenerateExitPrologue for more information.
   1212  if (switchToMainStack) {
   1213    GenerateExitEpilogueMainStackReturn(masm, InstanceReg, scratch1, scratch2);
   1214  }
   1215 #endif  // ENABLE_WASM_JSPI
   1216 
   1217  // Reset our stack pointer back to the frame pointer. This may switch the
   1218  // stack pointer back to our original stack.
   1219  masm.moveToStackPtr(FramePointer);
   1220  masm.setFramePushed(0);
   1221 
   1222  GenerateCallableEpilogue(masm, /*framePushed*/ 0, &offsets->ret);
   1223  MOZ_ASSERT(masm.framePushed() == 0);
   1224 }
   1225 
   1226 static void AssertNoWasmExitFPInJitExit(MacroAssembler& masm) {
   1227  // As a general stack invariant, if Activation::packedExitFP is tagged as
   1228  // wasm, it must point to a valid wasm::Frame. The JIT exit stub calls into
   1229  // JIT code and thus does not really exit, thus, when entering/leaving the
   1230  // JIT exit stub from/to normal wasm code, packedExitFP is not tagged wasm.
   1231 #ifdef DEBUG
   1232  Register scratch = ABINonArgReturnReg0;
   1233  LoadActivation(masm, InstanceReg, scratch);
   1234 
   1235  Label ok;
   1236  masm.branchTestPtr(Assembler::Zero,
   1237                     Address(scratch, JitActivation::offsetOfPackedExitFP()),
   1238                     Imm32(ExitFPTag), &ok);
   1239  masm.breakpoint();
   1240  masm.bind(&ok);
   1241 #endif
   1242 }
   1243 
   1244 void wasm::GenerateJitExitPrologue(MacroAssembler& masm,
   1245                                   uint32_t fallbackOffset,
   1246                                   ImportOffsets* offsets) {
   1247  masm.haltingAlign(CodeAlignment);
   1248 
   1249 #ifdef ENABLE_WASM_JSPI
   1250  {
   1251 #  if defined(JS_CODEGEN_ARM64)
   1252    AutoForbidPoolsAndNops afp(&masm,
   1253                               /* number of instructions in scope = */ 3);
   1254 #  endif
   1255    offsets->begin = masm.currentOffset();
   1256    Label fallback;
   1257    masm.bind(&fallback, BufferOffset(fallbackOffset));
   1258 
   1259    const Register scratch = ABINonArgReg0;
   1260    masm.loadPtr(Address(InstanceReg, Instance::offsetOfCx()), scratch);
   1261    masm.loadPtr(Address(scratch, JSContext::offsetOfWasm() +
   1262                                      wasm::Context::offsetOfActiveSuspender()),
   1263                 scratch);
   1264    masm.branchTestPtr(Assembler::NonZero, scratch, scratch, &fallback);
   1265  }
   1266 
   1267  uint32_t entryOffset;
   1268  GenerateCallablePrologue(masm, &entryOffset);
   1269  offsets->afterFallbackCheck = entryOffset;
   1270 #else
   1271  GenerateCallablePrologue(masm, &offsets->begin);
   1272  offsets->afterFallbackCheck = offsets->begin;
   1273 #endif  // ENABLE_WASM_JSPI
   1274 
   1275  AssertNoWasmExitFPInJitExit(masm);
   1276 
   1277  MOZ_ASSERT(masm.framePushed() == 0);
   1278 }
   1279 
   1280 void wasm::GenerateJitExitEpilogue(MacroAssembler& masm,
   1281                                   CallableOffsets* offsets) {
   1282  // Inverse of GenerateJitExitPrologue:
   1283  MOZ_ASSERT(masm.framePushed() == 0);
   1284  AssertNoWasmExitFPInJitExit(masm);
   1285  GenerateCallableEpilogue(masm, /*framePushed*/ 0, &offsets->ret);
   1286  MOZ_ASSERT(masm.framePushed() == 0);
   1287 }
   1288 
   1289 void wasm::GenerateJitEntryPrologue(MacroAssembler& masm,
   1290                                    CallableOffsets* offsets) {
   1291  masm.haltingAlign(CodeAlignment);
   1292 
   1293  {
   1294    // Push the return address.
   1295 #if defined(JS_CODEGEN_ARM)
   1296    AutoForbidPoolsAndNops afp(&masm,
   1297                               /* number of instructions in scope = */ 3);
   1298    offsets->begin = masm.currentOffset();
   1299    static_assert(BeforePushRetAddr == 0);
   1300    masm.push(lr);
   1301 #elif defined(JS_CODEGEN_MIPS64)
   1302    offsets->begin = masm.currentOffset();
   1303    masm.push(ra);
   1304 #elif defined(JS_CODEGEN_LOONG64)
   1305    offsets->begin = masm.currentOffset();
   1306    masm.push(ra);
   1307 #elif defined(JS_CODEGEN_RISCV64)
   1308    BlockTrampolinePoolScope block_trampoline_pool(&masm, 10);
   1309    offsets->begin = masm.currentOffset();
   1310    masm.push(ra);
   1311 #elif defined(JS_CODEGEN_ARM64)
   1312    {
   1313      AutoForbidPoolsAndNops afp(&masm,
   1314                                 /* number of instructions in scope = */ 4);
   1315      offsets->begin = masm.currentOffset();
   1316      static_assert(BeforePushRetAddr == 0);
   1317      // Subtract from SP first as SP must be aligned before offsetting.
   1318      masm.Sub(sp, sp, 16);
   1319      static_assert(JitFrameLayout::offsetOfReturnAddress() == 8);
   1320      masm.Str(ARMRegister(lr, 64), MemOperand(sp, 8));
   1321    }
   1322 #else
   1323    // The x86/x64 call instruction pushes the return address.
   1324    offsets->begin = masm.currentOffset();
   1325 #endif
   1326    MOZ_ASSERT_IF(!masm.oom(),
   1327                  PushedRetAddr == masm.currentOffset() - offsets->begin);
   1328    // Save jit frame pointer, so unwinding from wasm to jit frames is trivial.
   1329 #if defined(JS_CODEGEN_ARM64)
   1330    static_assert(JitFrameLayout::offsetOfCallerFramePtr() == 0);
   1331    masm.Str(ARMRegister(FramePointer, 64), MemOperand(sp, 0));
   1332 #else
   1333    masm.Push(FramePointer);
   1334 #endif
   1335    MOZ_ASSERT_IF(!masm.oom(),
   1336                  PushedFP == masm.currentOffset() - offsets->begin);
   1337 
   1338    masm.moveStackPtrTo(FramePointer);
   1339    MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - offsets->begin);
   1340  }
   1341 
   1342  masm.setFramePushed(0);
   1343 }
   1344 
   1345 void wasm::GenerateJitEntryEpilogue(MacroAssembler& masm,
   1346                                    CallableOffsets* offsets) {
   1347  DebugOnly<uint32_t> poppedFP{};
   1348 #ifdef JS_CODEGEN_ARM64
   1349  {
   1350    RegisterOrSP sp = masm.getStackPointer();
   1351    AutoForbidPoolsAndNops afp(&masm,
   1352                               /* number of instructions in scope = */ 5);
   1353    masm.loadPtr(Address(sp, 8), lr);
   1354    masm.loadPtr(Address(sp, 0), FramePointer);
   1355    poppedFP = masm.currentOffset();
   1356 
   1357    masm.addToStackPtr(Imm32(2 * sizeof(void*)));
   1358    // Copy SP into PSP to enforce return-point invariants (SP == PSP).
   1359    // `addToStackPtr` won't sync them because SP is the active pointer here.
   1360    // For the same reason, we can't use initPseudoStackPtr to do the sync, so
   1361    // we have to do it "by hand".  Omitting this causes many tests to segfault.
   1362    masm.moveStackPtrTo(PseudoStackPointer);
   1363 
   1364    offsets->ret = masm.currentOffset();
   1365    masm.Ret(ARMRegister(lr, 64));
   1366    masm.setFramePushed(0);
   1367  }
   1368 #else
   1369  // Forbid pools for the same reason as described in GenerateCallablePrologue.
   1370 #  if defined(JS_CODEGEN_ARM)
   1371  AutoForbidPoolsAndNops afp(&masm, /* number of instructions in scope = */ 2);
   1372 #  endif
   1373 
   1374  masm.pop(FramePointer);
   1375  poppedFP = masm.currentOffset();
   1376 
   1377  offsets->ret = masm.ret().getOffset();
   1378 #endif
   1379  MOZ_ASSERT_IF(!masm.oom(), PoppedFPJitEntry == offsets->ret - poppedFP);
   1380 }
   1381 
   1382 /*****************************************************************************/
   1383 // ProfilingFrameIterator
   1384 
   1385 ProfilingFrameIterator::ProfilingFrameIterator()
   1386    : code_(nullptr),
   1387      codeRange_(nullptr),
   1388      category_(Category::Other),
   1389      callerFP_(nullptr),
   1390      callerPC_(nullptr),
   1391      stackAddress_(nullptr),
   1392      unwoundJitCallerFP_(nullptr),
   1393      exitReason_(ExitReason::Fixed::None) {
   1394  MOZ_ASSERT(done());
   1395 }
   1396 
   1397 ProfilingFrameIterator::ProfilingFrameIterator(const JitActivation& activation)
   1398    : code_(nullptr),
   1399      codeRange_(nullptr),
   1400      category_(Category::Other),
   1401      callerFP_(nullptr),
   1402      callerPC_(nullptr),
   1403      stackAddress_(nullptr),
   1404      unwoundJitCallerFP_(nullptr),
   1405      exitReason_(activation.wasmExitReason()) {
   1406  initFromExitFP(activation.wasmExitFP());
   1407 }
   1408 
   1409 ProfilingFrameIterator::ProfilingFrameIterator(const Frame* fp)
   1410    : code_(nullptr),
   1411      codeRange_(nullptr),
   1412      category_(Category::Other),
   1413      callerFP_(nullptr),
   1414      callerPC_(nullptr),
   1415      stackAddress_(nullptr),
   1416      unwoundJitCallerFP_(nullptr),
   1417      exitReason_(ExitReason::Fixed::ImportJit) {
   1418  MOZ_ASSERT(fp);
   1419  initFromExitFP(fp);
   1420 }
   1421 
   1422 static inline void AssertMatchesCallSite(void* callerPC, uint8_t* callerFP) {
   1423 #ifdef DEBUG
   1424  const CodeRange* callerCodeRange;
   1425  const Code* code = LookupCode(callerPC, &callerCodeRange);
   1426 
   1427  if (!code) {
   1428    AssertDirectJitCall(callerFP);
   1429    return;
   1430  }
   1431 
   1432  MOZ_ASSERT(callerCodeRange);
   1433 
   1434  if (callerCodeRange->isInterpEntry()) {
   1435    // callerFP is the value of the frame pointer register when we were called
   1436    // from C++.
   1437    return;
   1438  }
   1439 
   1440  if (callerCodeRange->isJitEntry()) {
   1441    MOZ_ASSERT(callerFP != nullptr);
   1442    return;
   1443  }
   1444 
   1445  CallSite site;
   1446  MOZ_ALWAYS_TRUE(code->lookupCallSite(callerPC, &site));
   1447 #endif
   1448 }
   1449 
   1450 void ProfilingFrameIterator::initFromExitFP(const Frame* fp) {
   1451  MOZ_ASSERT(fp);
   1452  stackAddress_ = (void*)fp;
   1453  endStackAddress_ = stackAddress_;
   1454  const CodeBlock* codeBlock =
   1455      LookupCodeBlock(fp->returnAddress(), &codeRange_);
   1456 
   1457  if (!codeBlock) {
   1458    category_ = Category::Other;
   1459    code_ = nullptr;
   1460  } else {
   1461    code_ = codeBlock->code;
   1462    category_ = categoryFromCodeBlock(codeBlock->kind);
   1463  }
   1464 
   1465  if (!code_) {
   1466    // This is a direct call from the JIT, the caller FP is pointing to the JIT
   1467    // caller's frame.
   1468    AssertDirectJitCall(fp->jitEntryCaller());
   1469 
   1470    unwoundJitCallerFP_ = fp->jitEntryCaller();
   1471    MOZ_ASSERT(done());
   1472    return;
   1473  }
   1474 
   1475  MOZ_ASSERT(codeRange_);
   1476 
   1477  // Since we don't have the pc for fp, start unwinding at the caller of fp.
   1478  // This means that the innermost frame is skipped. This is fine because:
   1479  //  - for import exit calls, the innermost frame is a thunk, so the first
   1480  //    frame that shows up is the function calling the import;
   1481  //  - for Math and other builtin calls, we note the absence of an exit
   1482  //    reason and inject a fake "builtin" frame; and
   1483  switch (codeRange_->kind()) {
   1484    case CodeRange::InterpEntry:
   1485      callerPC_ = nullptr;
   1486      callerFP_ = nullptr;
   1487      break;
   1488    case CodeRange::JitEntry:
   1489      callerPC_ = nullptr;
   1490      callerFP_ = fp->rawCaller();
   1491      break;
   1492    case CodeRange::Function:
   1493      fp = fp->wasmCaller();
   1494      callerPC_ = fp->returnAddress();
   1495      callerFP_ = fp->rawCaller();
   1496      AssertMatchesCallSite(callerPC_, callerFP_);
   1497      break;
   1498    case CodeRange::ImportJitExit:
   1499    case CodeRange::ImportInterpExit:
   1500    case CodeRange::BuiltinThunk:
   1501    case CodeRange::TrapExit:
   1502    case CodeRange::DebugStub:
   1503    case CodeRange::RequestTierUpStub:
   1504    case CodeRange::UpdateCallRefMetricsStub:
   1505    case CodeRange::Throw:
   1506    case CodeRange::FarJumpIsland:
   1507      MOZ_CRASH("Unexpected CodeRange kind");
   1508  }
   1509 
   1510  MOZ_ASSERT(!done());
   1511 }
   1512 
   1513 static bool IsSignatureCheckFail(uint32_t offsetInCode,
   1514                                 const CodeRange* codeRange) {
   1515  if (!codeRange->isFunction()) {
   1516    return false;
   1517  }
   1518  // checked call entry:    1. push Frame
   1519  //                        2. set FP
   1520  //                        3. signature check <--- check if we are here.
   1521  //                        4. jump 7
   1522  // unchecked call entry:  5. push Frame
   1523  //                        6. set FP
   1524  //                        7. function's code
   1525  return offsetInCode < codeRange->funcUncheckedCallEntry() &&
   1526         (offsetInCode - codeRange->funcCheckedCallEntry()) > SetFP;
   1527 }
   1528 
   1529 static bool CanUnwindSignatureCheck(uint8_t* fp) {
   1530  const auto* frame = Frame::fromUntaggedWasmExitFP(fp);
   1531  uint8_t* const pc = frame->returnAddress();
   1532 
   1533  const CodeRange* codeRange;
   1534  const Code* code = LookupCode(pc, &codeRange);
   1535  // If a JIT call or JIT/interpreter entry was found,
   1536  // unwinding is not possible.
   1537  return code && !codeRange->isEntry();
   1538 }
   1539 
   1540 static bool GetUnwindInfo(const CodeBlock* codeBlock,
   1541                          const CodeRange* codeRange, uint8_t* pc,
   1542                          const CodeRangeUnwindInfo** info) {
   1543  if (!codeRange->isFunction() || !codeRange->funcHasUnwindInfo()) {
   1544    return false;
   1545  }
   1546 
   1547  *info = codeBlock->code->lookupUnwindInfo(pc);
   1548  return *info;
   1549 }
   1550 
   1551 const Instance* js::wasm::GetNearestEffectiveInstance(const Frame* fp) {
   1552  while (true) {
   1553    uint8_t* returnAddress = fp->returnAddress();
   1554    const CodeRange* codeRange = nullptr;
   1555    const Code* code = LookupCode(returnAddress, &codeRange);
   1556 
   1557    if (!code) {
   1558      // It is a direct call from JIT.
   1559      AssertDirectJitCall(fp->jitEntryCaller());
   1560      return ExtractCalleeInstanceFromFrameWithInstances(fp);
   1561    }
   1562 
   1563    MOZ_ASSERT(codeRange);
   1564 
   1565    if (codeRange->isEntry()) {
   1566      return ExtractCalleeInstanceFromFrameWithInstances(fp);
   1567    }
   1568 
   1569    MOZ_ASSERT(codeRange->kind() == CodeRange::Function);
   1570    MOZ_ASSERT(code);
   1571    CallSite site;
   1572    MOZ_ALWAYS_TRUE(code->lookupCallSite(returnAddress, &site));
   1573    if (site.mightBeCrossInstance()) {
   1574      return ExtractCalleeInstanceFromFrameWithInstances(fp);
   1575    }
   1576 
   1577    fp = fp->wasmCaller();
   1578  }
   1579 }
   1580 
   1581 Instance* js::wasm::GetNearestEffectiveInstance(Frame* fp) {
   1582  return const_cast<Instance*>(
   1583      GetNearestEffectiveInstance(const_cast<const Frame*>(fp)));
   1584 }
   1585 
   1586 bool js::wasm::StartUnwinding(const RegisterState& registers,
   1587                              UnwindState* unwindState, bool* unwoundCaller) {
   1588  // Shorthands.
   1589  uint8_t* const pc = (uint8_t*)registers.pc;
   1590  void** const sp = (void**)registers.sp;
   1591 
   1592  // The frame pointer might be:
   1593  // - in the process of tagging/untagging when calling into C++ code (this
   1594  //   happens in wasm::SetExitFP); make sure it's untagged.
   1595  // - unreliable if it's not been set yet, in prologues.
   1596  uint8_t* fp = Frame::isExitFP(registers.fp)
   1597                    ? Frame::untagExitFP(registers.fp)
   1598                    : reinterpret_cast<uint8_t*>(registers.fp);
   1599 
   1600  // Get the CodeRange describing pc and the base address to which the
   1601  // CodeRange is relative. If the pc is not in a wasm module or a builtin
   1602  // thunk, then execution must be entering from or leaving to the C++ caller
   1603  // that pushed the JitActivation.
   1604  const CodeRange* codeRange;
   1605  const uint8_t* codeBase;
   1606  const Code* code = nullptr;
   1607 
   1608  const CodeBlock* codeBlock = LookupCodeBlock(pc, &codeRange);
   1609  if (codeBlock) {
   1610    code = codeBlock->code;
   1611    codeBase = codeBlock->base();
   1612    MOZ_ASSERT(codeRange);
   1613  } else if (!LookupBuiltinThunk(pc, &codeRange, &codeBase)) {
   1614    return false;
   1615  }
   1616 
   1617  // When the pc is inside the prologue/epilogue, the innermost call's Frame
   1618  // is not complete and thus fp points to the second-to-innermost call's
   1619  // Frame. Since fp can only tell you about its caller, naively unwinding
   1620  // while pc is in the prologue/epilogue would skip the second-to-innermost
   1621  // call. To avoid this problem, we use the static structure of the code in
   1622  // the prologue and epilogue to do the Right Thing.
   1623  uint32_t offsetInCode = pc - codeBase;
   1624  MOZ_ASSERT(offsetInCode >= codeRange->begin());
   1625  MOZ_ASSERT(offsetInCode < codeRange->end());
   1626 
   1627  // Compute the offset of the pc from the (unchecked call) entry of the code
   1628  // range. The checked call entry and the unchecked call entry have common
   1629  // prefix, so pc before signature check in the checked call entry is
   1630  // equivalent to the pc of the unchecked-call-entry. Thus, we can simplify the
   1631  // below case analysis by redirecting all pc-in-checked-call-entry before
   1632  // signature check cases to the pc-at-unchecked-call-entry case.
   1633  uint32_t offsetFromEntry;
   1634  if (codeRange->isFunction()) {
   1635    if (offsetInCode < codeRange->funcUncheckedCallEntry()) {
   1636      offsetFromEntry = offsetInCode - codeRange->funcCheckedCallEntry();
   1637    } else {
   1638      offsetFromEntry = offsetInCode - codeRange->funcUncheckedCallEntry();
   1639    }
   1640  } else if (codeRange->isImportJitExit()) {
   1641    if (offsetInCode < codeRange->importJitExitEntry()) {
   1642      // Anything above entry shall not change stack/frame pointer --
   1643      // collapse this code into single point.
   1644      offsetFromEntry = 0;
   1645    } else {
   1646      offsetFromEntry = offsetInCode - codeRange->importJitExitEntry();
   1647    }
   1648  } else {
   1649    offsetFromEntry = offsetInCode - codeRange->begin();
   1650  }
   1651 
   1652  // Most cases end up unwinding to the caller state; not unwinding is the
   1653  // exception here.
   1654  *unwoundCaller = true;
   1655 
   1656  uint8_t* fixedFP = nullptr;
   1657  void* fixedPC = nullptr;
   1658  switch (codeRange->kind()) {
   1659    case CodeRange::Function:
   1660    case CodeRange::FarJumpIsland:
   1661    case CodeRange::ImportJitExit:
   1662    case CodeRange::ImportInterpExit:
   1663    case CodeRange::BuiltinThunk:
   1664    case CodeRange::DebugStub:
   1665    case CodeRange::RequestTierUpStub:
   1666    case CodeRange::UpdateCallRefMetricsStub:
   1667 #if defined(JS_CODEGEN_MIPS64)
   1668      if (codeRange->isThunk()) {
   1669        // The FarJumpIsland sequence temporary scrambles ra.
   1670        // Don't unwind to caller.
   1671        fixedPC = pc;
   1672        fixedFP = fp;
   1673        *unwoundCaller = false;
   1674        AssertMatchesCallSite(
   1675            Frame::fromUntaggedWasmExitFP(fp)->returnAddress(),
   1676            Frame::fromUntaggedWasmExitFP(fp)->rawCaller());
   1677      } else if (offsetFromEntry < PushedFP) {
   1678        // On MIPS we rely on register state instead of state saved on
   1679        // stack until the wasm::Frame is completely built.
   1680        // On entry the return address is in ra (registers.lr) and
   1681        // fp holds the caller's fp.
   1682        fixedPC = (uint8_t*)registers.lr;
   1683        fixedFP = fp;
   1684        AssertMatchesCallSite(fixedPC, fixedFP);
   1685      } else
   1686 #elif defined(JS_CODEGEN_LOONG64)
   1687      if (codeRange->isThunk()) {
   1688        // The FarJumpIsland sequence temporary scrambles ra.
   1689        // Don't unwind to caller.
   1690        fixedPC = pc;
   1691        fixedFP = fp;
   1692        *unwoundCaller = false;
   1693        AssertMatchesCallSite(
   1694            Frame::fromUntaggedWasmExitFP(fp)->returnAddress(),
   1695            Frame::fromUntaggedWasmExitFP(fp)->rawCaller());
   1696      } else if (offsetFromEntry < PushedFP) {
   1697        // On LoongArch we rely on register state instead of state saved on
   1698        // stack until the wasm::Frame is completely built.
   1699        // On entry the return address is in ra (registers.lr) and
   1700        // fp holds the caller's fp.
   1701        fixedPC = (uint8_t*)registers.lr;
   1702        fixedFP = fp;
   1703        AssertMatchesCallSite(fixedPC, fixedFP);
   1704      } else
   1705 #elif defined(JS_CODEGEN_RISCV64)
   1706      if (codeRange->isThunk()) {
   1707        // The FarJumpIsland sequence temporary scrambles ra.
   1708        // Don't unwind to caller.
   1709        fixedPC = pc;
   1710        fixedFP = fp;
   1711        *unwoundCaller = false;
   1712        AssertMatchesCallSite(
   1713            Frame::fromUntaggedWasmExitFP(fp)->returnAddress(),
   1714            Frame::fromUntaggedWasmExitFP(fp)->rawCaller());
   1715      } else if (offsetFromEntry < PushedFP) {
   1716        // On Riscv64 we rely on register state instead of state saved on
   1717        // stack until the wasm::Frame is completely built.
   1718        // On entry the return address is in ra (registers.lr) and
   1719        // fp holds the caller's fp.
   1720        fixedPC = (uint8_t*)registers.lr;
   1721        fixedFP = fp;
   1722        AssertMatchesCallSite(fixedPC, fixedFP);
   1723      } else
   1724 #elif defined(JS_CODEGEN_ARM64)
   1725      if (offsetFromEntry < PushedFP || codeRange->isThunk()) {
   1726        // Constraints above ensure that this covers BeforePushRetAddr and
   1727        // PushedRetAddr.
   1728        //
   1729        // On ARM64 we subtract the size of the Frame from SP and then store
   1730        // values into the stack.  Execution can be interrupted at various
   1731        // places in that sequence.  We rely on the register state for our
   1732        // values.
   1733        fixedPC = (uint8_t*)registers.lr;
   1734        fixedFP = fp;
   1735        AssertMatchesCallSite(fixedPC, fixedFP);
   1736      } else
   1737 #elif defined(JS_CODEGEN_ARM)
   1738      if (offsetFromEntry == BeforePushRetAddr || codeRange->isThunk()) {
   1739        // The return address is still in lr and fp holds the caller's fp.
   1740        fixedPC = (uint8_t*)registers.lr;
   1741        fixedFP = fp;
   1742        AssertMatchesCallSite(fixedPC, fixedFP);
   1743      } else
   1744 #endif
   1745          if (offsetFromEntry == PushedRetAddr || codeRange->isThunk()) {
   1746        // The return address has been pushed on the stack but fp still
   1747        // points to the caller's fp.
   1748        fixedPC = sp[0];
   1749        fixedFP = fp;
   1750        AssertMatchesCallSite(fixedPC, fixedFP);
   1751      } else if (offsetFromEntry == PushedFP) {
   1752        // The full Frame has been pushed; fp is still the caller's fp.
   1753        const auto* frame = Frame::fromUntaggedWasmExitFP(sp);
   1754        MOZ_ASSERT(frame->rawCaller() == fp);
   1755        fixedPC = frame->returnAddress();
   1756        fixedFP = fp;
   1757        AssertMatchesCallSite(fixedPC, fixedFP);
   1758 #if defined(JS_CODEGEN_MIPS64)
   1759      } else if (offsetInCode >= codeRange->ret() - PoppedFP &&
   1760                 offsetInCode <= codeRange->ret()) {
   1761        // The fixedFP field of the Frame has been loaded into fp.
   1762        // The ra and instance might also be loaded, but the Frame structure is
   1763        // still on stack, so we can acess the ra form there.
   1764        MOZ_ASSERT(*sp == fp);
   1765        fixedPC = Frame::fromUntaggedWasmExitFP(sp)->returnAddress();
   1766        fixedFP = fp;
   1767        AssertMatchesCallSite(fixedPC, fixedFP);
   1768 #elif defined(JS_CODEGEN_RISCV64)
   1769      } else if (offsetInCode >= codeRange->ret() - PoppedFP &&
   1770                 offsetInCode <= codeRange->ret()) {
   1771        // The fixedFP field of the Frame has been loaded into fp.
   1772        // The ra might also be loaded, but the Frame structure is still on
   1773        // stack, so we can acess the ra from there.
   1774        MOZ_ASSERT(*sp == fp);
   1775        fixedPC = Frame::fromUntaggedWasmExitFP(sp)->returnAddress();
   1776        fixedFP = fp;
   1777        AssertMatchesCallSite(fixedPC, fixedFP);
   1778 #elif defined(JS_CODEGEN_ARM64) || defined(JS_CODEGEN_LOONG64)
   1779        // The stack pointer does not move until all values have
   1780        // been restored so several cases can be coalesced here.
   1781      } else if (offsetInCode >= codeRange->ret() - PoppedFP &&
   1782                 offsetInCode <= codeRange->ret()) {
   1783        fixedPC = (uint8_t*)registers.lr;
   1784        fixedFP = fp;
   1785        AssertMatchesCallSite(fixedPC, fixedFP);
   1786 #else
   1787      } else if (offsetInCode >= codeRange->ret() - PoppedFP &&
   1788                 offsetInCode < codeRange->ret()) {
   1789        // The fixedFP field of the Frame has been popped into fp.
   1790        fixedPC = sp[1];
   1791        fixedFP = fp;
   1792        AssertMatchesCallSite(fixedPC, fixedFP);
   1793      } else if (offsetInCode == codeRange->ret()) {
   1794        // Both the instance and fixedFP fields have been popped and fp now
   1795        // points to the caller's frame.
   1796        fixedPC = sp[0];
   1797        fixedFP = fp;
   1798        AssertMatchesCallSite(fixedPC, fixedFP);
   1799 #endif
   1800      } else {
   1801        if (IsSignatureCheckFail(offsetInCode, codeRange) &&
   1802            CanUnwindSignatureCheck(fp)) {
   1803          // Frame has been pushed and FP has been set.
   1804          const auto* frame = Frame::fromUntaggedWasmExitFP(fp);
   1805          fixedFP = frame->rawCaller();
   1806          fixedPC = frame->returnAddress();
   1807          AssertMatchesCallSite(fixedPC, fixedFP);
   1808          break;
   1809        }
   1810 
   1811        const CodeRangeUnwindInfo* unwindInfo;
   1812        if (codeBlock && GetUnwindInfo(codeBlock, codeRange, pc, &unwindInfo)) {
   1813          switch (unwindInfo->unwindHow()) {
   1814            case CodeRangeUnwindInfo::RestoreFpRa:
   1815              fixedPC = (uint8_t*)registers.tempRA;
   1816              fixedFP = (uint8_t*)registers.tempFP;
   1817              break;
   1818            case CodeRangeUnwindInfo::RestoreFp:
   1819              fixedPC = sp[0];
   1820              fixedFP = (uint8_t*)registers.tempFP;
   1821              break;
   1822            case CodeRangeUnwindInfo::UseFpLr:
   1823              fixedPC = (uint8_t*)registers.lr;
   1824              fixedFP = fp;
   1825              break;
   1826            case CodeRangeUnwindInfo::UseFp:
   1827              fixedPC = sp[0];
   1828              fixedFP = fp;
   1829              break;
   1830            default:
   1831              MOZ_CRASH();
   1832          }
   1833          MOZ_ASSERT(fixedPC && fixedFP);
   1834          break;
   1835        }
   1836 
   1837        // Not in the prologue/epilogue.
   1838        fixedPC = pc;
   1839        fixedFP = fp;
   1840        *unwoundCaller = false;
   1841        AssertMatchesCallSite(
   1842            Frame::fromUntaggedWasmExitFP(fp)->returnAddress(),
   1843            Frame::fromUntaggedWasmExitFP(fp)->rawCaller());
   1844        break;
   1845      }
   1846      break;
   1847    case CodeRange::TrapExit:
   1848      // These code stubs execute after the prologue/epilogue have completed
   1849      // so pc/fp contains the right values here.
   1850      fixedPC = pc;
   1851      fixedFP = fp;
   1852      *unwoundCaller = false;
   1853      AssertMatchesCallSite(Frame::fromUntaggedWasmExitFP(fp)->returnAddress(),
   1854                            Frame::fromUntaggedWasmExitFP(fp)->rawCaller());
   1855      break;
   1856    case CodeRange::InterpEntry:
   1857      // The entry trampoline is the final frame in an wasm JitActivation. The
   1858      // entry trampoline also doesn't GeneratePrologue/Epilogue so we can't
   1859      // use the general unwinding logic above.
   1860      break;
   1861    case CodeRange::JitEntry:
   1862      // There's a jit frame above the current one; we don't care about pc
   1863      // since the Jit entry frame is a jit frame which can be considered as
   1864      // an exit frame.
   1865      if (offsetFromEntry < PushedFP) {
   1866        // We haven't pushed the jit caller's frame pointer yet, thus the jit
   1867        // frame is incomplete. During profiling frame iteration, it means that
   1868        // the jit profiling frame iterator won't be able to unwind this frame;
   1869        // drop it.
   1870        return false;
   1871      }
   1872      if (offsetInCode >= codeRange->ret() - PoppedFPJitEntry &&
   1873          offsetInCode <= codeRange->ret()) {
   1874        // We've popped FP but still have to return. Similar to the
   1875        // |offsetFromEntry < PushedFP| case above, the JIT frame is now
   1876        // incomplete and we can't unwind.
   1877        return false;
   1878      }
   1879      // Set fixedFP to the address of the JitFrameLayout on the stack.
   1880      if (offsetFromEntry < SetFP) {
   1881        fixedFP = reinterpret_cast<uint8_t*>(sp);
   1882      } else {
   1883        fixedFP = fp;
   1884      }
   1885      fixedPC = nullptr;
   1886      break;
   1887    case CodeRange::Throw:
   1888      // The throw stub executes a small number of instructions before popping
   1889      // the entire activation. To simplify testing, we simply pretend throw
   1890      // stubs have already popped the entire stack.
   1891      return false;
   1892  }
   1893 
   1894  unwindState->code = code;
   1895  unwindState->codeRange = codeRange;
   1896  unwindState->fp = fixedFP;
   1897  unwindState->pc = fixedPC;
   1898  return true;
   1899 }
   1900 
   1901 ProfilingFrameIterator::ProfilingFrameIterator(const JitActivation& activation,
   1902                                               const RegisterState& state)
   1903    : code_(nullptr),
   1904      codeRange_(nullptr),
   1905      category_(Category::Other),
   1906      callerFP_(nullptr),
   1907      callerPC_(nullptr),
   1908      stackAddress_(nullptr),
   1909      unwoundJitCallerFP_(nullptr),
   1910      exitReason_(ExitReason::Fixed::None) {
   1911  // Let wasmExitFP take precedence to StartUnwinding when it is set since
   1912  // during the body of an exit stub, the register state may not be valid
   1913  // causing StartUnwinding() to abandon unwinding this activation.
   1914  if (activation.hasWasmExitFP()) {
   1915    exitReason_ = activation.wasmExitReason();
   1916    initFromExitFP(activation.wasmExitFP());
   1917    return;
   1918  }
   1919 
   1920  bool unwoundCaller;
   1921  UnwindState unwindState;
   1922  if (!StartUnwinding(state, &unwindState, &unwoundCaller)) {
   1923    MOZ_ASSERT(done());
   1924    return;
   1925  }
   1926 
   1927  MOZ_ASSERT(unwindState.codeRange);
   1928 
   1929  if (unwoundCaller) {
   1930    callerFP_ = unwindState.fp;
   1931    callerPC_ = unwindState.pc;
   1932  } else {
   1933    callerFP_ = Frame::fromUntaggedWasmExitFP(unwindState.fp)->rawCaller();
   1934    callerPC_ = Frame::fromUntaggedWasmExitFP(unwindState.fp)->returnAddress();
   1935  }
   1936 
   1937  code_ = unwindState.code;
   1938  codeRange_ = unwindState.codeRange;
   1939  stackAddress_ = state.sp;
   1940  endStackAddress_ = state.sp;
   1941 
   1942  // Initialize the category if it's not already done.
   1943  if (const CodeBlock* codeBlock = LookupCodeBlock(callerPC_)) {
   1944    category_ = categoryFromCodeBlock(codeBlock->kind);
   1945  }
   1946 
   1947  MOZ_ASSERT(!done());
   1948 }
   1949 
   1950 void ProfilingFrameIterator::operator++() {
   1951  MOZ_ASSERT(!done());
   1952  MOZ_ASSERT(!unwoundJitCallerFP_);
   1953 
   1954  if (!exitReason_.isNone()) {
   1955    if (const CodeBlock* codeBlock = LookupCodeBlock(callerPC_)) {
   1956      category_ = categoryFromCodeBlock(codeBlock->kind);
   1957    } else {
   1958      category_ = Category::Other;
   1959    }
   1960    exitReason_ = ExitReason::None();
   1961    MOZ_ASSERT(codeRange_);
   1962    MOZ_ASSERT(!done());
   1963    return;
   1964  }
   1965 
   1966  if (codeRange_->isInterpEntry()) {
   1967    category_ = Category::Other;
   1968    codeRange_ = nullptr;
   1969    MOZ_ASSERT(done());
   1970    return;
   1971  }
   1972 
   1973  if (codeRange_->isJitEntry()) {
   1974    category_ = Category::Other;
   1975    MOZ_ASSERT(callerFP_);
   1976    unwoundJitCallerFP_ = callerFP_;
   1977    callerPC_ = nullptr;
   1978    callerFP_ = nullptr;
   1979    codeRange_ = nullptr;
   1980    MOZ_ASSERT(done());
   1981    return;
   1982  }
   1983 
   1984  MOZ_RELEASE_ASSERT(callerPC_);
   1985 
   1986  const CodeBlock* codeBlock = LookupCodeBlock(callerPC_, &codeRange_);
   1987  code_ = codeBlock ? codeBlock->code : nullptr;
   1988 
   1989  if (!code_) {
   1990    category_ = Category::Other;
   1991    // The parent frame is an inlined wasm call, callerFP_ points to the fake
   1992    // exit frame.
   1993    MOZ_ASSERT(!codeRange_);
   1994    AssertDirectJitCall(callerFP_);
   1995    unwoundJitCallerFP_ = callerFP_;
   1996    MOZ_ASSERT(done());
   1997    return;
   1998  }
   1999 
   2000  MOZ_ASSERT(codeRange_);
   2001 
   2002  if (codeRange_->isInterpEntry()) {
   2003    category_ = Category::Other;
   2004    callerPC_ = nullptr;
   2005    callerFP_ = nullptr;
   2006    MOZ_ASSERT(!done());
   2007    return;
   2008  }
   2009 
   2010  if (codeRange_->isJitEntry()) {
   2011    category_ = Category::Other;
   2012    MOZ_ASSERT(!done());
   2013    return;
   2014  }
   2015 
   2016  MOZ_ASSERT(code_ == &GetNearestEffectiveInstance(
   2017                           Frame::fromUntaggedWasmExitFP(callerFP_))
   2018                           ->code());
   2019 
   2020  category_ = categoryFromCodeBlock(codeBlock->kind);
   2021 
   2022  switch (codeRange_->kind()) {
   2023    case CodeRange::Function:
   2024    case CodeRange::ImportJitExit:
   2025    case CodeRange::ImportInterpExit:
   2026    case CodeRange::BuiltinThunk:
   2027    case CodeRange::TrapExit:
   2028    case CodeRange::DebugStub:
   2029    case CodeRange::RequestTierUpStub:
   2030    case CodeRange::UpdateCallRefMetricsStub:
   2031    case CodeRange::FarJumpIsland: {
   2032      stackAddress_ = callerFP_;
   2033      const auto* frame = Frame::fromUntaggedWasmExitFP(callerFP_);
   2034      callerPC_ = frame->returnAddress();
   2035      AssertMatchesCallSite(callerPC_, frame->rawCaller());
   2036      callerFP_ = frame->rawCaller();
   2037      break;
   2038    }
   2039    case CodeRange::InterpEntry:
   2040    case CodeRange::JitEntry:
   2041      MOZ_CRASH("should have been guarded above");
   2042    case CodeRange::Throw:
   2043      MOZ_CRASH("code range doesn't have frame");
   2044  }
   2045 
   2046  MOZ_ASSERT(!done());
   2047 }
   2048 
   2049 static const char* ThunkedNativeToDescription(SymbolicAddress func) {
   2050  MOZ_ASSERT(NeedsBuiltinThunk(func));
   2051  switch (func) {
   2052    case SymbolicAddress::HandleDebugTrap:
   2053    case SymbolicAddress::HandleRequestTierUp:
   2054    case SymbolicAddress::HandleThrow:
   2055    case SymbolicAddress::HandleTrap:
   2056    case SymbolicAddress::CallImport_General:
   2057    case SymbolicAddress::CoerceInPlace_ToInt32:
   2058    case SymbolicAddress::CoerceInPlace_ToNumber:
   2059    case SymbolicAddress::CoerceInPlace_ToBigInt:
   2060    case SymbolicAddress::BoxValue_Anyref:
   2061      MOZ_ASSERT(!NeedsBuiltinThunk(func),
   2062                 "not in sync with NeedsBuiltinThunk");
   2063      break;
   2064    case SymbolicAddress::ToInt32:
   2065      return "call to asm.js native ToInt32 coercion (in wasm)";
   2066    case SymbolicAddress::DivI64:
   2067      return "call to native i64.div_s (in wasm)";
   2068    case SymbolicAddress::UDivI64:
   2069      return "call to native i64.div_u (in wasm)";
   2070    case SymbolicAddress::ModI64:
   2071      return "call to native i64.rem_s (in wasm)";
   2072    case SymbolicAddress::UModI64:
   2073      return "call to native i64.rem_u (in wasm)";
   2074    case SymbolicAddress::TruncateDoubleToUint64:
   2075      return "call to native i64.trunc_f64_u (in wasm)";
   2076    case SymbolicAddress::TruncateDoubleToInt64:
   2077      return "call to native i64.trunc_f64_s (in wasm)";
   2078    case SymbolicAddress::SaturatingTruncateDoubleToUint64:
   2079      return "call to native i64.trunc_sat_f64_u (in wasm)";
   2080    case SymbolicAddress::SaturatingTruncateDoubleToInt64:
   2081      return "call to native i64.trunc_sat_f64_s (in wasm)";
   2082    case SymbolicAddress::Uint64ToDouble:
   2083      return "call to native f64.convert_i64_u (in wasm)";
   2084    case SymbolicAddress::Uint64ToFloat32:
   2085      return "call to native f32.convert_i64_u (in wasm)";
   2086    case SymbolicAddress::Int64ToDouble:
   2087      return "call to native f64.convert_i64_s (in wasm)";
   2088    case SymbolicAddress::Int64ToFloat32:
   2089      return "call to native f32.convert_i64_s (in wasm)";
   2090 #if defined(JS_CODEGEN_ARM)
   2091    case SymbolicAddress::aeabi_idivmod:
   2092      return "call to native i32.div_s (in wasm)";
   2093    case SymbolicAddress::aeabi_uidivmod:
   2094      return "call to native i32.div_u (in wasm)";
   2095 #endif
   2096    case SymbolicAddress::AllocateBigInt:
   2097      return "call to native newCell<BigInt, NoGC> (in wasm)";
   2098    case SymbolicAddress::ModD:
   2099      return "call to asm.js native f64 % (mod)";
   2100    case SymbolicAddress::SinNativeD:
   2101      return "call to asm.js native f64 Math.sin";
   2102    case SymbolicAddress::SinFdlibmD:
   2103      return "call to asm.js fdlibm f64 Math.sin";
   2104    case SymbolicAddress::CosNativeD:
   2105      return "call to asm.js native f64 Math.cos";
   2106    case SymbolicAddress::CosFdlibmD:
   2107      return "call to asm.js fdlibm f64 Math.cos";
   2108    case SymbolicAddress::TanNativeD:
   2109      return "call to asm.js native f64 Math.tan";
   2110    case SymbolicAddress::TanFdlibmD:
   2111      return "call to asm.js fdlibm f64 Math.tan";
   2112    case SymbolicAddress::ASinD:
   2113      return "call to asm.js native f64 Math.asin";
   2114    case SymbolicAddress::ACosD:
   2115      return "call to asm.js native f64 Math.acos";
   2116    case SymbolicAddress::ATanD:
   2117      return "call to asm.js native f64 Math.atan";
   2118    case SymbolicAddress::CeilD:
   2119      return "call to native f64.ceil (in wasm)";
   2120    case SymbolicAddress::CeilF:
   2121      return "call to native f32.ceil (in wasm)";
   2122    case SymbolicAddress::FloorD:
   2123      return "call to native f64.floor (in wasm)";
   2124    case SymbolicAddress::FloorF:
   2125      return "call to native f32.floor (in wasm)";
   2126    case SymbolicAddress::TruncD:
   2127      return "call to native f64.trunc (in wasm)";
   2128    case SymbolicAddress::TruncF:
   2129      return "call to native f32.trunc (in wasm)";
   2130    case SymbolicAddress::NearbyIntD:
   2131      return "call to native f64.nearest (in wasm)";
   2132    case SymbolicAddress::NearbyIntF:
   2133      return "call to native f32.nearest (in wasm)";
   2134    case SymbolicAddress::ExpD:
   2135      return "call to asm.js native f64 Math.exp";
   2136    case SymbolicAddress::LogD:
   2137      return "call to asm.js native f64 Math.log";
   2138    case SymbolicAddress::PowD:
   2139      return "call to asm.js native f64 Math.pow";
   2140    case SymbolicAddress::ATan2D:
   2141      return "call to asm.js native f64 Math.atan2";
   2142    case SymbolicAddress::ArrayMemMove:
   2143      return "call to native array.copy (data)";
   2144    case SymbolicAddress::ArrayRefsMove:
   2145      return "call to native array.copy (references)";
   2146    case SymbolicAddress::MemoryGrowM32:
   2147      return "call to native memory.grow m32 (in wasm)";
   2148    case SymbolicAddress::MemoryGrowM64:
   2149      return "call to native memory.grow m64 (in wasm)";
   2150    case SymbolicAddress::MemorySizeM32:
   2151      return "call to native memory.size m32 (in wasm)";
   2152    case SymbolicAddress::MemorySizeM64:
   2153      return "call to native memory.size m64 (in wasm)";
   2154    case SymbolicAddress::WaitI32M32:
   2155      return "call to native i32.wait m32 (in wasm)";
   2156    case SymbolicAddress::WaitI32M64:
   2157      return "call to native i32.wait m64 (in wasm)";
   2158    case SymbolicAddress::WaitI64M32:
   2159      return "call to native i64.wait m32 (in wasm)";
   2160    case SymbolicAddress::WaitI64M64:
   2161      return "call to native i64.wait m64 (in wasm)";
   2162    case SymbolicAddress::WakeM32:
   2163      return "call to native wake m32 (in wasm)";
   2164    case SymbolicAddress::WakeM64:
   2165      return "call to native wake m64 (in wasm)";
   2166    case SymbolicAddress::CoerceInPlace_JitEntry:
   2167      return "out-of-line coercion for jit entry arguments (in wasm)";
   2168    case SymbolicAddress::ReportV128JSCall:
   2169      return "jit call to v128 wasm function";
   2170    case SymbolicAddress::MemCopyM32:
   2171    case SymbolicAddress::MemCopySharedM32:
   2172      return "call to native memory.copy m32 function";
   2173    case SymbolicAddress::MemCopyM64:
   2174    case SymbolicAddress::MemCopySharedM64:
   2175      return "call to native memory.copy m64 function";
   2176    case SymbolicAddress::MemCopyAny:
   2177      return "call to native memory.copy any function";
   2178    case SymbolicAddress::DataDrop:
   2179      return "call to native data.drop function";
   2180    case SymbolicAddress::MemFillM32:
   2181    case SymbolicAddress::MemFillSharedM32:
   2182      return "call to native memory.fill m32 function";
   2183    case SymbolicAddress::MemFillM64:
   2184    case SymbolicAddress::MemFillSharedM64:
   2185      return "call to native memory.fill m64 function";
   2186    case SymbolicAddress::MemInitM32:
   2187      return "call to native memory.init m32 function";
   2188    case SymbolicAddress::MemInitM64:
   2189      return "call to native memory.init m64 function";
   2190    case SymbolicAddress::TableCopy:
   2191      return "call to native table.copy function";
   2192    case SymbolicAddress::TableFill:
   2193      return "call to native table.fill function";
   2194    case SymbolicAddress::MemDiscardM32:
   2195    case SymbolicAddress::MemDiscardSharedM32:
   2196      return "call to native memory.discard m32 function";
   2197    case SymbolicAddress::MemDiscardM64:
   2198    case SymbolicAddress::MemDiscardSharedM64:
   2199      return "call to native memory.discard m64 function";
   2200    case SymbolicAddress::ElemDrop:
   2201      return "call to native elem.drop function";
   2202    case SymbolicAddress::TableGet:
   2203      return "call to native table.get function";
   2204    case SymbolicAddress::TableGrow:
   2205      return "call to native table.grow function";
   2206    case SymbolicAddress::TableInit:
   2207      return "call to native table.init function";
   2208    case SymbolicAddress::TableSet:
   2209      return "call to native table.set function";
   2210    case SymbolicAddress::TableSize:
   2211      return "call to native table.size function";
   2212    case SymbolicAddress::RefFunc:
   2213      return "call to native ref.func function";
   2214    case SymbolicAddress::PostBarrierEdge:
   2215    case SymbolicAddress::PostBarrierEdgePrecise:
   2216    case SymbolicAddress::PostBarrierWholeCell:
   2217      return "call to native GC postbarrier (in wasm)";
   2218    case SymbolicAddress::ExceptionNew:
   2219      return "call to native exception new (in wasm)";
   2220    case SymbolicAddress::ThrowException:
   2221      return "call to native throw exception (in wasm)";
   2222    case SymbolicAddress::StructNewIL_true:
   2223    case SymbolicAddress::StructNewIL_false:
   2224    case SymbolicAddress::StructNewOOL_true:
   2225    case SymbolicAddress::StructNewOOL_false:
   2226      return "call to native struct.new (in wasm)";
   2227    case SymbolicAddress::ArrayNew_true:
   2228    case SymbolicAddress::ArrayNew_false:
   2229      return "call to native array.new (in wasm)";
   2230    case SymbolicAddress::ArrayNewData:
   2231      return "call to native array.new_data function";
   2232    case SymbolicAddress::ArrayNewElem:
   2233      return "call to native array.new_elem function";
   2234    case SymbolicAddress::ArrayInitData:
   2235      return "call to native array.init_data function";
   2236    case SymbolicAddress::ArrayInitElem:
   2237      return "call to native array.init_elem function";
   2238    case SymbolicAddress::ArrayCopy:
   2239      return "call to native array.copy function";
   2240    case SymbolicAddress::SlotsToAllocKindBytesTable:
   2241      MOZ_CRASH(
   2242          "symbolic address was not code and should not have appeared here");
   2243 #define VISIT_BUILTIN_FUNC(op, export, sa_name, ...) \
   2244  case SymbolicAddress::sa_name:                     \
   2245    return "call to native " #op " builtin (in wasm)";
   2246      FOR_EACH_BUILTIN_MODULE_FUNC(VISIT_BUILTIN_FUNC)
   2247 #undef VISIT_BUILTIN_FUNC
   2248 #ifdef ENABLE_WASM_JSPI
   2249    case SymbolicAddress::UpdateSuspenderState:
   2250      return "call to native update suspender state util";
   2251 #endif
   2252 #ifdef WASM_CODEGEN_DEBUG
   2253    case SymbolicAddress::PrintI32:
   2254    case SymbolicAddress::PrintPtr:
   2255    case SymbolicAddress::PrintF32:
   2256    case SymbolicAddress::PrintF64:
   2257    case SymbolicAddress::PrintText:
   2258 #endif
   2259    case SymbolicAddress::Limit:
   2260      break;
   2261  }
   2262  return "?";
   2263 }
   2264 
   2265 const char* ProfilingFrameIterator::label() const {
   2266  MOZ_ASSERT(!done());
   2267 
   2268  // Use the same string for both time inside and under so that the two
   2269  // entries will be coalesced by the profiler.
   2270  // Must be kept in sync with /tools/profiler/tests/test_asm.js
   2271  static const char importJitDescription[] = "fast exit trampoline (in wasm)";
   2272  static const char importInterpDescription[] =
   2273      "slow exit trampoline (in wasm)";
   2274  static const char builtinNativeDescription[] =
   2275      "fast exit trampoline to native (in wasm)";
   2276  static const char trapDescription[] = "trap handling (in wasm)";
   2277  static const char debugStubDescription[] = "debug trap handling (in wasm)";
   2278  static const char requestTierUpDescription[] = "tier-up request (in wasm)";
   2279  static const char updateCallRefMetricsDescription[] =
   2280      "update call_ref metrics (in wasm)";
   2281 
   2282  if (!exitReason_.isFixed()) {
   2283    return ThunkedNativeToDescription(exitReason_.symbolic());
   2284  }
   2285 
   2286  switch (exitReason_.fixed()) {
   2287    case ExitReason::Fixed::None:
   2288      break;
   2289    case ExitReason::Fixed::ImportJit:
   2290      return importJitDescription;
   2291    case ExitReason::Fixed::ImportInterp:
   2292      return importInterpDescription;
   2293    case ExitReason::Fixed::BuiltinNative:
   2294      return builtinNativeDescription;
   2295    case ExitReason::Fixed::Trap:
   2296      return trapDescription;
   2297    case ExitReason::Fixed::DebugStub:
   2298      return debugStubDescription;
   2299    case ExitReason::Fixed::RequestTierUp:
   2300      return requestTierUpDescription;
   2301  }
   2302 
   2303  switch (codeRange_->kind()) {
   2304    case CodeRange::Function:
   2305      return code_->profilingLabel(codeRange_->funcIndex());
   2306    case CodeRange::InterpEntry:
   2307      return "slow entry trampoline (in wasm)";
   2308    case CodeRange::JitEntry:
   2309      return "fast entry trampoline (in wasm)";
   2310    case CodeRange::ImportJitExit:
   2311      return importJitDescription;
   2312    case CodeRange::BuiltinThunk:
   2313      return builtinNativeDescription;
   2314    case CodeRange::ImportInterpExit:
   2315      return importInterpDescription;
   2316    case CodeRange::TrapExit:
   2317      return trapDescription;
   2318    case CodeRange::DebugStub:
   2319      return debugStubDescription;
   2320    case CodeRange::RequestTierUpStub:
   2321      return requestTierUpDescription;
   2322    case CodeRange::UpdateCallRefMetricsStub:
   2323      return updateCallRefMetricsDescription;
   2324    case CodeRange::FarJumpIsland:
   2325      return "interstitial (in wasm)";
   2326    case CodeRange::Throw:
   2327      MOZ_CRASH("does not have a frame");
   2328  }
   2329 
   2330  MOZ_CRASH("bad code range kind");
   2331 }
   2332 
   2333 ProfilingFrameIterator::Category ProfilingFrameIterator::category() const {
   2334  MOZ_ASSERT(!done());
   2335  return category_;
   2336 }