tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Trampoline-riscv64.cpp (19505B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "jit/Bailouts.h"
      8 #include "jit/BaselineFrame.h"
      9 #include "jit/CalleeToken.h"
     10 #include "jit/JitFrames.h"
     11 #include "jit/JitRuntime.h"
     12 #ifdef JS_ION_PERF
     13 #  include "jit/PerfSpewer.h"
     14 #endif
     15 #include "jit/riscv64/SharedICRegisters-riscv64.h"
     16 #include "jit/VMFunctions.h"
     17 #include "vm/JitActivation.h"  // js::jit::JitActivation
     18 #include "vm/JSContext.h"
     19 
     20 #include "jit/MacroAssembler-inl.h"
     21 
     22 using namespace js;
     23 using namespace js::jit;
     24 
     25 // This file includes stubs for generating the JIT trampolines when there is no
     26 // JIT backend, and also includes implementations for assorted random things
     27 // which can't be implemented in headers.
     28 
     29 // All registers to save and restore. This includes the stack pointer, since we
     30 // use the ability to reference register values on the stack by index.
     31 static const LiveRegisterSet AllRegs =
     32    LiveRegisterSet(GeneralRegisterSet(Registers::AllMask),
     33                    FloatRegisterSet(FloatRegisters::AllMask));
     34 
     35 static void PushBailoutFrame(MacroAssembler& masm, Register spArg) {
     36  // Push the frameSize_ stored in ra
     37  // See: CodeGeneratorRiscv64::generateOutOfLineCode()
     38  masm.push(ra);
     39 
     40  // Push registers such that we can access them from [base + code].
     41  masm.PushRegsInMask(AllRegs);
     42 
     43  // Put pointer to BailoutStack as first argument to the Bailout()
     44  masm.movePtr(StackPointer, spArg);
     45 }
     46 
     47 struct EnterJITRegs {
     48  double fs11;
     49  double fs10;
     50  double fs9;
     51  double fs8;
     52  double fs7;
     53  double fs6;
     54  double fs5;
     55  double fs4;
     56  double fs3;
     57  double fs2;
     58  double fs1;
     59  double fs0;
     60 
     61  //  uintptr_t align;
     62 
     63  // non-volatile registers.
     64  uint64_t ra;
     65  uint64_t sp;
     66  uint64_t fp;
     67  uint64_t gp;
     68  uint64_t s11;
     69  uint64_t s10;
     70  uint64_t s9;
     71  uint64_t s8;
     72  uint64_t s7;
     73  uint64_t s6;
     74  uint64_t s5;
     75  uint64_t s4;
     76  uint64_t s3;
     77  uint64_t s2;
     78  uint64_t s1;
     79  // Save reg_vp(a7) on stack, use it after call jit code.
     80  uint64_t a7;
     81 };
     82 
     83 static void GenerateReturn(MacroAssembler& masm, int returnCode) {
     84  MOZ_ASSERT(masm.framePushed() == sizeof(EnterJITRegs));
     85 
     86  // Restore non-volatile registers
     87  masm.ld(s1, StackPointer, offsetof(EnterJITRegs, s1));
     88  masm.ld(s2, StackPointer, offsetof(EnterJITRegs, s2));
     89  masm.ld(s3, StackPointer, offsetof(EnterJITRegs, s3));
     90  masm.ld(s4, StackPointer, offsetof(EnterJITRegs, s4));
     91  masm.ld(s5, StackPointer, offsetof(EnterJITRegs, s5));
     92  masm.ld(s6, StackPointer, offsetof(EnterJITRegs, s6));
     93  masm.ld(s7, StackPointer, offsetof(EnterJITRegs, s7));
     94  masm.ld(s8, StackPointer, offsetof(EnterJITRegs, s8));
     95  masm.ld(s9, StackPointer, offsetof(EnterJITRegs, s9));
     96  masm.ld(s10, StackPointer, offsetof(EnterJITRegs, s10));
     97  masm.ld(s11, StackPointer, offsetof(EnterJITRegs, s11));
     98  masm.ld(gp, StackPointer, offsetof(EnterJITRegs, gp));
     99  masm.ld(fp, StackPointer, offsetof(EnterJITRegs, fp));
    100  masm.ld(sp, StackPointer, offsetof(EnterJITRegs, sp));
    101  masm.ld(ra, StackPointer, offsetof(EnterJITRegs, ra));
    102 
    103  // Restore non-volatile floating point registers
    104  masm.fld(fs11, StackPointer, offsetof(EnterJITRegs, fs11));
    105  masm.fld(fs10, StackPointer, offsetof(EnterJITRegs, fs10));
    106  masm.fld(fs9, StackPointer, offsetof(EnterJITRegs, fs9));
    107  masm.fld(fs8, StackPointer, offsetof(EnterJITRegs, fs8));
    108  masm.fld(fs7, StackPointer, offsetof(EnterJITRegs, fs7));
    109  masm.fld(fs6, StackPointer, offsetof(EnterJITRegs, fs6));
    110  masm.fld(fs5, StackPointer, offsetof(EnterJITRegs, fs5));
    111  masm.fld(fs4, StackPointer, offsetof(EnterJITRegs, fs4));
    112  masm.fld(fs3, StackPointer, offsetof(EnterJITRegs, fs3));
    113  masm.fld(fs2, StackPointer, offsetof(EnterJITRegs, fs2));
    114  masm.fld(fs1, StackPointer, offsetof(EnterJITRegs, fs1));
    115  masm.fld(fs0, StackPointer, offsetof(EnterJITRegs, fs0));
    116 
    117  masm.freeStack(sizeof(EnterJITRegs));
    118 
    119  masm.branch(ra);
    120 }
    121 
    122 static void GeneratePrologue(MacroAssembler& masm) {
    123  masm.reserveStack(sizeof(EnterJITRegs));
    124 
    125  masm.sd(s1, StackPointer, offsetof(EnterJITRegs, s1));
    126  masm.sd(s2, StackPointer, offsetof(EnterJITRegs, s2));
    127  masm.sd(s3, StackPointer, offsetof(EnterJITRegs, s3));
    128  masm.sd(s4, StackPointer, offsetof(EnterJITRegs, s4));
    129  masm.sd(s5, StackPointer, offsetof(EnterJITRegs, s5));
    130  masm.sd(s6, StackPointer, offsetof(EnterJITRegs, s6));
    131  masm.sd(s7, StackPointer, offsetof(EnterJITRegs, s7));
    132  masm.sd(s8, StackPointer, offsetof(EnterJITRegs, s8));
    133  masm.sd(s9, StackPointer, offsetof(EnterJITRegs, s9));
    134  masm.sd(s10, StackPointer, offsetof(EnterJITRegs, s10));
    135  masm.sd(s11, StackPointer, offsetof(EnterJITRegs, s11));
    136  masm.sd(gp, StackPointer, offsetof(EnterJITRegs, gp));
    137  masm.sd(fp, StackPointer, offsetof(EnterJITRegs, fp));
    138  masm.sd(sp, StackPointer, offsetof(EnterJITRegs, sp));
    139  masm.sd(ra, StackPointer, offsetof(EnterJITRegs, ra));
    140  masm.sd(a7, StackPointer, offsetof(EnterJITRegs, a7));
    141 
    142  masm.fsd(fs11, StackPointer, offsetof(EnterJITRegs, fs11));
    143  masm.fsd(fs10, StackPointer, offsetof(EnterJITRegs, fs10));
    144  masm.fsd(fs9, StackPointer, offsetof(EnterJITRegs, fs9));
    145  masm.fsd(fs8, StackPointer, offsetof(EnterJITRegs, fs8));
    146  masm.fsd(fs7, StackPointer, offsetof(EnterJITRegs, fs7));
    147  masm.fsd(fs6, StackPointer, offsetof(EnterJITRegs, fs6));
    148  masm.fsd(fs5, StackPointer, offsetof(EnterJITRegs, fs5));
    149  masm.fsd(fs4, StackPointer, offsetof(EnterJITRegs, fs4));
    150  masm.fsd(fs3, StackPointer, offsetof(EnterJITRegs, fs3));
    151  masm.fsd(fs2, StackPointer, offsetof(EnterJITRegs, fs2));
    152  masm.fsd(fs1, StackPointer, offsetof(EnterJITRegs, fs1));
    153  masm.fsd(fs0, StackPointer, offsetof(EnterJITRegs, fs0));
    154 }
    155 
    156 static void GenerateBailoutThunk(MacroAssembler& masm, Label* bailoutTail) {
    157  PushBailoutFrame(masm, a0);
    158 
    159  // Make space for Bailout's bailoutInfo outparam.
    160  masm.reserveStack(sizeof(void*));
    161  masm.movePtr(StackPointer, a1);
    162 
    163  // Call the bailout function.
    164  using Fn = bool (*)(BailoutStack* sp, BaselineBailoutInfo** info);
    165  masm.setupUnalignedABICall(a2);
    166  masm.passABIArg(a0);
    167  masm.passABIArg(a1);
    168  masm.callWithABI<Fn, Bailout>(ABIType::General,
    169                                CheckUnsafeCallWithABI::DontCheckOther);
    170 
    171  // Get the bailoutInfo outparam.
    172  masm.pop(a2);
    173 
    174  // Remove both the bailout frame and the topmost Ion frame's stack.
    175  masm.moveToStackPtr(FramePointer);
    176 
    177  // Jump to shared bailout tail. The BailoutInfo pointer has to be in a2.
    178  masm.jump(bailoutTail);
    179 }
    180 
    181 // Generates a trampoline for calling Jit compiled code from a C++ function.
    182 // The trampoline use the EnterJitCode signature, with the standard x64 fastcall
    183 // calling convention.
    184 void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
    185  AutoCreatedBy acb(masm, "JitRuntime::generateEnterJIT");
    186 
    187  enterJITOffset_ = startTrampolineCode(masm);
    188 
    189  const Register reg_code = IntArgReg0;
    190  const Register reg_argc = IntArgReg1;
    191  const Register reg_argv = IntArgReg2;
    192  const mozilla::DebugOnly<Register> reg_frame = IntArgReg3;
    193  const Register reg_token = IntArgReg4;
    194  const Register reg_chain = IntArgReg5;
    195  const Register reg_values = IntArgReg6;
    196  const Register reg_vp = IntArgReg7;
    197 
    198  MOZ_ASSERT(OsrFrameReg == reg_frame);
    199 
    200  GeneratePrologue(masm);
    201 
    202  // Save stack pointer as baseline frame.
    203  masm.movePtr(StackPointer, FramePointer);
    204 
    205  generateEnterJitShared(masm, reg_argc, reg_argv, reg_token, s1, s2, s3);
    206 
    207  // Push the descriptor.
    208  masm.unboxInt32(Address(reg_vp, 0), s3);
    209  masm.pushFrameDescriptorForJitCall(FrameType::CppToJSJit, s3, s3);
    210 
    211  CodeLabel returnLabel;
    212  Label oomReturnLabel;
    213  {
    214    // Handle Interpreter -> Baseline OSR.
    215    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
    216    MOZ_ASSERT(!regs.has(FramePointer));
    217    regs.take(OsrFrameReg);
    218    regs.take(reg_code);
    219    MOZ_ASSERT(!regs.has(ReturnReg), "ReturnReg matches reg_code");
    220 
    221    Label notOsr;
    222    masm.ma_b(OsrFrameReg, OsrFrameReg, &notOsr, Assembler::Zero, ShortJump);
    223 
    224    Register numStackValues = reg_values;
    225    regs.take(numStackValues);
    226    Register scratch = regs.takeAny();
    227 
    228    // Push return address.
    229    masm.subPtr(Imm32(sizeof(uintptr_t)), StackPointer);
    230    masm.ma_li(scratch, &returnLabel);
    231    masm.storePtr(scratch, Address(StackPointer, 0));
    232 
    233    // Push previous frame pointer.
    234    masm.subPtr(Imm32(sizeof(uintptr_t)), StackPointer);
    235    masm.storePtr(FramePointer, Address(StackPointer, 0));
    236 
    237    // Reserve frame.
    238    Register framePtr = FramePointer;
    239    masm.movePtr(StackPointer, framePtr);
    240    masm.subPtr(Imm32(BaselineFrame::Size()), StackPointer);
    241 
    242    Register framePtrScratch = regs.takeAny();
    243    masm.movePtr(sp, framePtrScratch);
    244 
    245    // Reserve space for locals and stack values.
    246    masm.slli(scratch, numStackValues, 3);
    247    masm.subPtr(scratch, StackPointer);
    248 
    249    // Enter exit frame.
    250    masm.reserveStack(3 * sizeof(uintptr_t));
    251    masm.storePtr(
    252        ImmWord(MakeFrameDescriptor(FrameType::BaselineJS)),
    253        Address(StackPointer, 2 * sizeof(uintptr_t)));  // Frame descriptor
    254    masm.storePtr(
    255        zero, Address(StackPointer, sizeof(uintptr_t)));  // fake return address
    256    masm.storePtr(FramePointer, Address(StackPointer, 0));
    257 
    258    // No GC things to mark, push a bare token.
    259    masm.loadJSContext(scratch);
    260    masm.enterFakeExitFrame(scratch, scratch, ExitFrameType::Bare);
    261 
    262    masm.reserveStack(2 * sizeof(uintptr_t));
    263    masm.storePtr(framePtr,
    264                  Address(StackPointer, sizeof(uintptr_t)));  // BaselineFrame
    265    masm.storePtr(reg_code, Address(StackPointer, 0));        // jitcode
    266 
    267    using Fn = bool (*)(BaselineFrame* frame, InterpreterFrame* interpFrame,
    268                        uint32_t numStackValues);
    269    masm.setupUnalignedABICall(scratch);
    270    masm.passABIArg(framePtrScratch);  // BaselineFrame
    271    masm.passABIArg(OsrFrameReg);      // InterpreterFrame
    272    masm.passABIArg(numStackValues);
    273    masm.callWithABI<Fn, jit::InitBaselineFrameForOsr>(
    274        ABIType::General, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
    275 
    276    regs.add(OsrFrameReg);
    277    Register jitcode = regs.takeAny();
    278    masm.loadPtr(Address(StackPointer, 0), jitcode);
    279    masm.loadPtr(Address(StackPointer, sizeof(uintptr_t)), framePtr);
    280    masm.freeStack(2 * sizeof(uintptr_t));
    281 
    282    Label error;
    283    masm.freeStack(ExitFrameLayout::SizeWithFooter());
    284    masm.branchIfFalseBool(ReturnReg, &error);
    285 
    286    // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
    287    // if profiler instrumentation is enabled.
    288    {
    289      Label skipProfilingInstrumentation;
    290      AbsoluteAddress addressOfEnabled(
    291          cx->runtime()->geckoProfiler().addressOfEnabled());
    292      masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
    293                    &skipProfilingInstrumentation);
    294      masm.profilerEnterFrame(framePtr, scratch);
    295      masm.bind(&skipProfilingInstrumentation);
    296    }
    297 
    298    masm.jump(jitcode);
    299 
    300    // OOM: load error value, discard return address and previous frame
    301    // pointer and return.
    302    masm.bind(&error);
    303    masm.movePtr(framePtr, StackPointer);
    304    masm.addPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
    305    masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
    306    masm.jump(&oomReturnLabel);
    307 
    308    masm.bind(&notOsr);
    309    // Load the scope chain in R1.
    310    MOZ_ASSERT(R1.scratchReg() != reg_code);
    311    masm.ma_or(R1.scratchReg(), reg_chain, zero);
    312  }
    313  JitSpew(JitSpew_Codegen, "__Line__: %d", __LINE__);
    314  // The call will push the return address and frame pointer on the stack, thus
    315  // we check that the stack would be aligned once the call is complete.
    316  masm.assertStackAlignment(JitStackAlignment, 2 * sizeof(uintptr_t));
    317 
    318  // Call the function with pushing return address to stack.
    319  masm.callJitNoProfiler(reg_code);
    320 
    321  {
    322    // Interpreter -> Baseline OSR will return here.
    323    masm.bind(&returnLabel);
    324    masm.addCodeLabel(returnLabel);
    325    masm.bind(&oomReturnLabel);
    326  }
    327 
    328  // Discard arguments and padding. Set sp to the address of the EnterJITRegs
    329  // on the stack.
    330  masm.mov(FramePointer, StackPointer);
    331 
    332  // Store the returned value into the vp
    333  masm.ld(reg_vp, StackPointer, offsetof(EnterJITRegs, a7));
    334  masm.storeValue(JSReturnOperand, Address(reg_vp, 0));
    335  JitSpew(JitSpew_Codegen, "__Line__: %d", __LINE__);
    336  // Restore non-volatile registers and return.
    337  GenerateReturn(masm, ShortJump);
    338 }
    339 
    340 // static
    341 mozilla::Maybe<::JS::ProfilingFrameIterator::RegisterState>
    342 JitRuntime::getCppEntryRegisters(JitFrameLayout* frameStackAddress) {
    343  return mozilla::Nothing{};
    344 }
    345 
    346 void JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail) {
    347  AutoCreatedBy acb(masm, "JitRuntime::generateInvalidator");
    348 
    349  invalidatorOffset_ = startTrampolineCode(masm);
    350 
    351  // Stack has to be alligned here. If not, we will have to fix it.
    352  masm.checkStackAlignment();
    353 
    354  // Push registers such that we can access them from [base + code].
    355  masm.PushRegsInMask(AllRegs);
    356 
    357  // Pass pointer to InvalidationBailoutStack structure.
    358  masm.movePtr(StackPointer, a0);
    359 
    360  // Reserve place for BailoutInfo pointer. Two words to ensure alignment for
    361  // setupAlignedABICall.
    362  masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
    363  // Pass pointer to BailoutInfo
    364  masm.movePtr(StackPointer, a1);
    365 
    366  using Fn = bool (*)(InvalidationBailoutStack* sp, BaselineBailoutInfo** info);
    367  masm.setupAlignedABICall();
    368  masm.passABIArg(a0);
    369  masm.passABIArg(a1);
    370  masm.callWithABI<Fn, InvalidationBailout>(
    371      ABIType::General, CheckUnsafeCallWithABI::DontCheckOther);
    372 
    373  masm.pop(a2);
    374 
    375  // Pop the machine state and the dead frame.
    376  masm.moveToStackPtr(FramePointer);
    377 
    378  // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
    379  masm.jump(bailoutTail);
    380 }
    381 
    382 void JitRuntime::generateBailoutHandler(MacroAssembler& masm,
    383                                        Label* bailoutTail) {
    384  AutoCreatedBy acb(masm, "JitRuntime::generateBailoutHandler");
    385 
    386  bailoutHandlerOffset_ = startTrampolineCode(masm);
    387 
    388  GenerateBailoutThunk(masm, bailoutTail);
    389 }
    390 
    391 uint32_t JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm,
    392                                        MIRType type) {
    393  AutoCreatedBy acb(masm, "JitRuntime::generatePreBarrier");
    394 
    395  uint32_t offset = startTrampolineCode(masm);
    396 
    397  MOZ_ASSERT(PreBarrierReg == a1);
    398  Register temp1 = a0;
    399  Register temp2 = a2;
    400  Register temp3 = a3;
    401  masm.push(temp1);
    402  masm.push(temp2);
    403  masm.push(temp3);
    404 
    405  Label noBarrier;
    406  masm.emitPreBarrierFastPath(type, temp1, temp2, temp3, &noBarrier);
    407 
    408  // Call into C++ to mark this GC thing.
    409  masm.pop(temp3);
    410  masm.pop(temp2);
    411  masm.pop(temp1);
    412 
    413  LiveRegisterSet save;
    414  save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
    415                           FloatRegisterSet(FloatRegisters::VolatileMask));
    416  masm.push(ra);
    417  masm.PushRegsInMask(save);
    418 
    419  masm.movePtr(ImmPtr(cx->runtime()), a0);
    420 
    421  masm.setupUnalignedABICall(a2);
    422  masm.passABIArg(a0);
    423  masm.passABIArg(a1);
    424  masm.callWithABI(JitPreWriteBarrier(type));
    425 
    426  masm.PopRegsInMask(save);
    427  masm.ret();
    428 
    429  masm.bind(&noBarrier);
    430  masm.pop(temp3);
    431  masm.pop(temp2);
    432  masm.pop(temp1);
    433  masm.abiret();
    434 
    435  return offset;
    436 }
    437 
    438 void JitRuntime::generateBailoutTailStub(MacroAssembler& masm,
    439                                         Label* bailoutTail) {
    440  AutoCreatedBy acb(masm, "JitRuntime::generateBailoutTailStub");
    441 
    442  masm.bind(bailoutTail);
    443  masm.generateBailoutTail(a1, a2);
    444 }
    445 
    446 bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
    447                                   VMFunctionId id, const VMFunctionData& f,
    448                                   DynFn nativeFun, uint32_t* wrapperOffset) {
    449  AutoCreatedBy acb(masm, "JitRuntime::generateVMWrapper");
    450 
    451  *wrapperOffset = startTrampolineCode(masm);
    452 
    453  // Avoid conflicts with argument registers while discarding the result after
    454  // the function call.
    455  AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
    456 
    457  static_assert(
    458      (Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
    459      "Wrapper register set should be a superset of Volatile register set.");
    460 
    461  // The context is the first argument; a0 is the first argument register.
    462  Register cxreg = a0;
    463  regs.take(cxreg);
    464 
    465  // On link-register platforms, it is the responsibility of the VM *callee* to
    466  // push the return address, while the caller must ensure that the address
    467  // is stored in ra on entry. This allows the VM wrapper to work with both
    468  // direct calls and tail calls.
    469  masm.pushReturnAddress();
    470 
    471  // Push the frame pointer to finish the exit frame, then link it up.
    472  masm.Push(FramePointer);
    473  masm.moveStackPtrTo(FramePointer);
    474  masm.loadJSContext(cxreg);
    475  masm.enterExitFrame(cxreg, regs.getAny(), id);
    476 
    477  // Reserve space for the outparameter.
    478  masm.reserveVMFunctionOutParamSpace(f);
    479 
    480  masm.setupUnalignedABICallDontSaveRestoreSP();
    481  masm.passABIArg(cxreg);
    482 
    483  size_t argDisp = ExitFrameLayout::Size();
    484 
    485  // Copy any arguments.
    486  for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
    487    switch (f.argProperties(explicitArg)) {
    488      case VMFunctionData::WordByValue:
    489        if (f.argPassedInFloatReg(explicitArg)) {
    490          masm.passABIArg(MoveOperand(FramePointer, argDisp), ABIType::Float64);
    491        } else {
    492          masm.passABIArg(MoveOperand(FramePointer, argDisp), ABIType::General);
    493        }
    494        argDisp += sizeof(void*);
    495        break;
    496      case VMFunctionData::WordByRef:
    497        masm.passABIArg(MoveOperand(FramePointer, argDisp,
    498                                    MoveOperand::Kind::EffectiveAddress),
    499                        ABIType::General);
    500        argDisp += sizeof(void*);
    501        break;
    502      case VMFunctionData::DoubleByValue:
    503      case VMFunctionData::DoubleByRef:
    504        MOZ_CRASH("NYI: riscv callVM should not be used with 128bits values.");
    505        break;
    506    }
    507  }
    508 
    509  // Copy the semi-implicit outparam, if any.
    510  // It is not a C++-abi outparam, which would get passed in the
    511  // outparam register, but a real parameter to the function, which
    512  // was stack-allocated above.
    513  const int32_t outParamOffset =
    514      -int32_t(ExitFooterFrame::Size()) - f.sizeOfOutParamStackSlot();
    515  if (f.outParam != Type_Void) {
    516    masm.passABIArg(MoveOperand(FramePointer, outParamOffset,
    517                                MoveOperand::Kind::EffectiveAddress),
    518                    ABIType::General);
    519  }
    520 
    521  masm.callWithABI(nativeFun, ABIType::General,
    522                   CheckUnsafeCallWithABI::DontCheckHasExitFrame);
    523 
    524  // Test for failure.
    525  switch (f.failType()) {
    526    case Type_Cell:
    527      masm.branchTestPtr(Assembler::Zero, a0, a0, masm.failureLabel());
    528      break;
    529    case Type_Bool:
    530      // Called functions return bools, which are 0/false and non-zero/true
    531      masm.branchIfFalseBool(a0, masm.failureLabel());
    532      break;
    533    case Type_Void:
    534      break;
    535    default:
    536      MOZ_CRASH("unknown failure kind");
    537  }
    538 
    539  // Load the outparam.
    540  masm.loadVMFunctionOutParam(f, Address(FramePointer, outParamOffset));
    541 
    542  // Pop frame and restore frame pointer.
    543  masm.moveToStackPtr(FramePointer);
    544  masm.pop(FramePointer);
    545 
    546  // Return. Subtract sizeof(void*) for the frame pointer.
    547  masm.retn(Imm32(sizeof(ExitFrameLayout) - sizeof(void*) +
    548                  f.explicitStackSlots() * sizeof(void*) +
    549                  f.extraValuesToPop * sizeof(Value)));
    550 
    551  return true;
    552 }