tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Trampoline-mips64.cpp (20197B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "mozilla/DebugOnly.h"
      8 
      9 #include "jit/Bailouts.h"
     10 #include "jit/BaselineFrame.h"
     11 #include "jit/CalleeToken.h"
     12 #include "jit/JitFrames.h"
     13 #include "jit/JitRuntime.h"
     14 #include "jit/JitSpewer.h"
     15 #include "jit/mips-shared/SharedICHelpers-mips-shared.h"
     16 #include "jit/PerfSpewer.h"
     17 #include "jit/VMFunctions.h"
     18 #include "vm/JitActivation.h"  // js::jit::JitActivation
     19 #include "vm/JSContext.h"
     20 #include "vm/Realm.h"
     21 
     22 #include "jit/MacroAssembler-inl.h"
     23 
     24 using namespace js;
     25 using namespace js::jit;
     26 
     27 // All registers to save and restore. This includes the stack pointer, since we
     28 // use the ability to reference register values on the stack by index.
     29 static const LiveRegisterSet AllRegs =
     30    LiveRegisterSet(GeneralRegisterSet(Registers::AllMask),
     31                    FloatRegisterSet(FloatRegisters::AllMask));
     32 
     33 static_assert(sizeof(uintptr_t) == sizeof(uint64_t), "Not 32-bit clean.");
     34 
     35 struct EnterJITRegs {
     36  double f31;
     37  double f30;
     38  double f29;
     39  double f28;
     40  double f27;
     41  double f26;
     42  double f25;
     43  double f24;
     44 
     45  uintptr_t align;
     46 
     47  // non-volatile registers.
     48  uint64_t ra;
     49  uint64_t fp;
     50  uint64_t s7;
     51  uint64_t s6;
     52  uint64_t s5;
     53  uint64_t s4;
     54  uint64_t s3;
     55  uint64_t s2;
     56  uint64_t s1;
     57  uint64_t s0;
     58  // Save reg_vp(a7) on stack, use it after call jit code.
     59  uint64_t a7;
     60 };
     61 
     62 static void GenerateReturn(MacroAssembler& masm, int returnCode) {
     63  MOZ_ASSERT(masm.framePushed() == sizeof(EnterJITRegs));
     64 
     65  if (isLoongson()) {
     66    // Restore non-volatile registers
     67    masm.as_ld(s0, StackPointer, offsetof(EnterJITRegs, s0));
     68    masm.as_gslq(s1, s2, StackPointer, offsetof(EnterJITRegs, s2));
     69    masm.as_gslq(s3, s4, StackPointer, offsetof(EnterJITRegs, s4));
     70    masm.as_gslq(s5, s6, StackPointer, offsetof(EnterJITRegs, s6));
     71    masm.as_gslq(s7, fp, StackPointer, offsetof(EnterJITRegs, fp));
     72    masm.as_ld(ra, StackPointer, offsetof(EnterJITRegs, ra));
     73 
     74    // Restore non-volatile floating point registers
     75    masm.as_gslq(f24, f25, StackPointer, offsetof(EnterJITRegs, f25));
     76    masm.as_gslq(f26, f27, StackPointer, offsetof(EnterJITRegs, f27));
     77    masm.as_gslq(f28, f29, StackPointer, offsetof(EnterJITRegs, f29));
     78    masm.as_gslq(f30, f31, StackPointer, offsetof(EnterJITRegs, f31));
     79  } else {
     80    // Restore non-volatile registers
     81    masm.as_ld(s0, StackPointer, offsetof(EnterJITRegs, s0));
     82    masm.as_ld(s1, StackPointer, offsetof(EnterJITRegs, s1));
     83    masm.as_ld(s2, StackPointer, offsetof(EnterJITRegs, s2));
     84    masm.as_ld(s3, StackPointer, offsetof(EnterJITRegs, s3));
     85    masm.as_ld(s4, StackPointer, offsetof(EnterJITRegs, s4));
     86    masm.as_ld(s5, StackPointer, offsetof(EnterJITRegs, s5));
     87    masm.as_ld(s6, StackPointer, offsetof(EnterJITRegs, s6));
     88    masm.as_ld(s7, StackPointer, offsetof(EnterJITRegs, s7));
     89    masm.as_ld(fp, StackPointer, offsetof(EnterJITRegs, fp));
     90    masm.as_ld(ra, StackPointer, offsetof(EnterJITRegs, ra));
     91 
     92    // Restore non-volatile floating point registers
     93    masm.as_ldc1(f24, StackPointer, offsetof(EnterJITRegs, f24));
     94    masm.as_ldc1(f25, StackPointer, offsetof(EnterJITRegs, f25));
     95    masm.as_ldc1(f26, StackPointer, offsetof(EnterJITRegs, f26));
     96    masm.as_ldc1(f27, StackPointer, offsetof(EnterJITRegs, f27));
     97    masm.as_ldc1(f28, StackPointer, offsetof(EnterJITRegs, f28));
     98    masm.as_ldc1(f29, StackPointer, offsetof(EnterJITRegs, f29));
     99    masm.as_ldc1(f30, StackPointer, offsetof(EnterJITRegs, f30));
    100    masm.as_ldc1(f31, StackPointer, offsetof(EnterJITRegs, f31));
    101  }
    102 
    103  masm.freeStack(sizeof(EnterJITRegs));
    104 
    105  masm.branch(ra);
    106 }
    107 
    108 static void GeneratePrologue(MacroAssembler& masm) {
    109  masm.reserveStack(sizeof(EnterJITRegs));
    110 
    111  if (isLoongson()) {
    112    masm.as_gssq(a7, s0, StackPointer, offsetof(EnterJITRegs, s0));
    113    masm.as_gssq(s1, s2, StackPointer, offsetof(EnterJITRegs, s2));
    114    masm.as_gssq(s3, s4, StackPointer, offsetof(EnterJITRegs, s4));
    115    masm.as_gssq(s5, s6, StackPointer, offsetof(EnterJITRegs, s6));
    116    masm.as_gssq(s7, fp, StackPointer, offsetof(EnterJITRegs, fp));
    117    masm.as_sd(ra, StackPointer, offsetof(EnterJITRegs, ra));
    118 
    119    masm.as_gssq(f24, f25, StackPointer, offsetof(EnterJITRegs, f25));
    120    masm.as_gssq(f26, f27, StackPointer, offsetof(EnterJITRegs, f27));
    121    masm.as_gssq(f28, f29, StackPointer, offsetof(EnterJITRegs, f29));
    122    masm.as_gssq(f30, f31, StackPointer, offsetof(EnterJITRegs, f31));
    123    return;
    124  }
    125 
    126  masm.as_sd(s0, StackPointer, offsetof(EnterJITRegs, s0));
    127  masm.as_sd(s1, StackPointer, offsetof(EnterJITRegs, s1));
    128  masm.as_sd(s2, StackPointer, offsetof(EnterJITRegs, s2));
    129  masm.as_sd(s3, StackPointer, offsetof(EnterJITRegs, s3));
    130  masm.as_sd(s4, StackPointer, offsetof(EnterJITRegs, s4));
    131  masm.as_sd(s5, StackPointer, offsetof(EnterJITRegs, s5));
    132  masm.as_sd(s6, StackPointer, offsetof(EnterJITRegs, s6));
    133  masm.as_sd(s7, StackPointer, offsetof(EnterJITRegs, s7));
    134  masm.as_sd(fp, StackPointer, offsetof(EnterJITRegs, fp));
    135  masm.as_sd(ra, StackPointer, offsetof(EnterJITRegs, ra));
    136  masm.as_sd(a7, StackPointer, offsetof(EnterJITRegs, a7));
    137 
    138  masm.as_sdc1(f24, StackPointer, offsetof(EnterJITRegs, f24));
    139  masm.as_sdc1(f25, StackPointer, offsetof(EnterJITRegs, f25));
    140  masm.as_sdc1(f26, StackPointer, offsetof(EnterJITRegs, f26));
    141  masm.as_sdc1(f27, StackPointer, offsetof(EnterJITRegs, f27));
    142  masm.as_sdc1(f28, StackPointer, offsetof(EnterJITRegs, f28));
    143  masm.as_sdc1(f29, StackPointer, offsetof(EnterJITRegs, f29));
    144  masm.as_sdc1(f30, StackPointer, offsetof(EnterJITRegs, f30));
    145  masm.as_sdc1(f31, StackPointer, offsetof(EnterJITRegs, f31));
    146 }
    147 
    148 // Generates a trampoline for calling Jit compiled code from a C++ function.
    149 // The trampoline use the EnterJitCode signature, with the standard x64 fastcall
    150 // calling convention.
    151 void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
    152  AutoCreatedBy acb(masm, "JitRuntime::generateEnterJIT");
    153 
    154  enterJITOffset_ = startTrampolineCode(masm);
    155 
    156  const Register reg_code = IntArgReg0;
    157  const Register reg_argc = IntArgReg1;
    158  const Register reg_argv = IntArgReg2;
    159  const mozilla::DebugOnly<Register> reg_frame = IntArgReg3;
    160  const Register reg_token = IntArgReg4;
    161  const Register reg_chain = IntArgReg5;
    162  const Register reg_values = IntArgReg6;
    163  const Register reg_vp = IntArgReg7;
    164 
    165  MOZ_ASSERT(OsrFrameReg == reg_frame);
    166 
    167  GeneratePrologue(masm);
    168 
    169  // Save stack pointer as baseline frame.
    170  masm.movePtr(StackPointer, FramePointer);
    171 
    172  generateEnterJitShared(masm, reg_argc, reg_argv, reg_token, s0, s1, s2);
    173 
    174  // Push the descriptor.
    175  masm.unboxInt32(Address(reg_vp, 0), s3);
    176  masm.pushFrameDescriptorForJitCall(FrameType::CppToJSJit, s3, s3);
    177 
    178  CodeLabel returnLabel;
    179  Label oomReturnLabel;
    180  {
    181    // Handle Interpreter -> Baseline OSR.
    182    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
    183    MOZ_ASSERT(!regs.has(FramePointer));
    184    regs.take(OsrFrameReg);
    185    regs.take(reg_code);
    186 
    187    Label notOsr;
    188    masm.ma_b(OsrFrameReg, OsrFrameReg, &notOsr, Assembler::Zero, ShortJump);
    189 
    190    Register numStackValues = reg_values;
    191    regs.take(numStackValues);
    192    Register scratch = regs.takeAny();
    193 
    194    // Push return address.
    195    masm.subPtr(Imm32(sizeof(uintptr_t)), StackPointer);
    196    masm.ma_li(scratch, &returnLabel);
    197    masm.storePtr(scratch, Address(StackPointer, 0));
    198 
    199    // Push previous frame pointer.
    200    masm.subPtr(Imm32(sizeof(uintptr_t)), StackPointer);
    201    masm.storePtr(FramePointer, Address(StackPointer, 0));
    202 
    203    // Reserve frame.
    204    Register framePtr = FramePointer;
    205    masm.movePtr(StackPointer, framePtr);
    206    masm.subPtr(Imm32(BaselineFrame::Size()), StackPointer);
    207 
    208    Register framePtrScratch = regs.takeAny();
    209    masm.movePtr(sp, framePtrScratch);
    210 
    211    // Reserve space for locals and stack values.
    212    masm.ma_dsll(scratch, numStackValues, Imm32(3));
    213    masm.subPtr(scratch, StackPointer);
    214 
    215    // Enter exit frame.
    216    masm.reserveStack(3 * sizeof(uintptr_t));
    217    masm.storePtr(
    218        ImmWord(MakeFrameDescriptor(FrameType::BaselineJS)),
    219        Address(StackPointer, 2 * sizeof(uintptr_t)));  // Frame descriptor
    220    masm.storePtr(
    221        zero, Address(StackPointer, sizeof(uintptr_t)));  // fake return address
    222    masm.storePtr(FramePointer, Address(StackPointer, 0));
    223 
    224    // No GC things to mark, push a bare token.
    225    masm.loadJSContext(scratch);
    226    masm.enterFakeExitFrame(scratch, scratch, ExitFrameType::Bare);
    227 
    228    masm.reserveStack(2 * sizeof(uintptr_t));
    229    masm.storePtr(framePtr,
    230                  Address(StackPointer, sizeof(uintptr_t)));  // BaselineFrame
    231    masm.storePtr(reg_code, Address(StackPointer, 0));        // jitcode
    232 
    233    using Fn = bool (*)(BaselineFrame* frame, InterpreterFrame* interpFrame,
    234                        uint32_t numStackValues);
    235    masm.setupUnalignedABICall(scratch);
    236    masm.passABIArg(framePtrScratch);  // BaselineFrame
    237    masm.passABIArg(OsrFrameReg);      // InterpreterFrame
    238    masm.passABIArg(numStackValues);
    239    masm.callWithABI<Fn, jit::InitBaselineFrameForOsr>(
    240        ABIType::General, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
    241 
    242    regs.add(OsrFrameReg);
    243    Register jitcode = regs.takeAny();
    244    masm.loadPtr(Address(StackPointer, 0), jitcode);
    245    masm.loadPtr(Address(StackPointer, sizeof(uintptr_t)), framePtr);
    246    masm.freeStack(2 * sizeof(uintptr_t));
    247 
    248    Label error;
    249    masm.freeStack(ExitFrameLayout::SizeWithFooter());
    250    masm.branchIfFalseBool(ReturnReg, &error);
    251 
    252    // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
    253    // if profiler instrumentation is enabled.
    254    {
    255      Label skipProfilingInstrumentation;
    256      AbsoluteAddress addressOfEnabled(
    257          cx->runtime()->geckoProfiler().addressOfEnabled());
    258      masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
    259                    &skipProfilingInstrumentation);
    260      masm.profilerEnterFrame(framePtr, scratch);
    261      masm.bind(&skipProfilingInstrumentation);
    262    }
    263 
    264    masm.jump(jitcode);
    265 
    266    // OOM: load error value, discard return address and previous frame
    267    // pointer and return.
    268    masm.bind(&error);
    269    masm.movePtr(framePtr, StackPointer);
    270    masm.addPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
    271    masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
    272    masm.jump(&oomReturnLabel);
    273 
    274    masm.bind(&notOsr);
    275    // Load the scope chain in R1.
    276    MOZ_ASSERT(R1.scratchReg() != reg_code);
    277    masm.ma_move(R1.scratchReg(), reg_chain);
    278  }
    279 
    280  // The call will push the return address on the stack, thus we check that
    281  // the stack would be aligned once the call is complete.
    282  masm.assertStackAlignment(JitStackAlignment, 2 * sizeof(uintptr_t));
    283 
    284  // Call the function with pushing return address to stack.
    285  masm.callJitNoProfiler(reg_code);
    286 
    287  {
    288    // Interpreter -> Baseline OSR will return here.
    289    masm.bind(&returnLabel);
    290    masm.addCodeLabel(returnLabel);
    291    masm.bind(&oomReturnLabel);
    292  }
    293 
    294  // Discard arguments and padding. Set sp to the address of the EnterJITRegs
    295  // on the stack.
    296  masm.mov(FramePointer, StackPointer);
    297 
    298  // Store the returned value into the vp
    299  masm.as_ld(reg_vp, StackPointer, offsetof(EnterJITRegs, a7));
    300  masm.storeValue(JSReturnOperand, Address(reg_vp, 0));
    301 
    302  // Restore non-volatile registers and return.
    303  GenerateReturn(masm, ShortJump);
    304 }
    305 
    306 // static
    307 mozilla::Maybe<::JS::ProfilingFrameIterator::RegisterState>
    308 JitRuntime::getCppEntryRegisters(JitFrameLayout* frameStackAddress) {
    309  // Not supported, or not implemented yet.
    310  // TODO: Implement along with the corresponding stack-walker changes, in
    311  // coordination with the Gecko Profiler, see bug 1635987 and follow-ups.
    312  return mozilla::Nothing{};
    313 }
    314 
    315 void JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail) {
    316  AutoCreatedBy acb(masm, "JitRuntime::generateInvalidator");
    317 
    318  invalidatorOffset_ = startTrampolineCode(masm);
    319 
    320  // Stack has to be alligned here. If not, we will have to fix it.
    321  masm.checkStackAlignment();
    322 
    323  // Push registers such that we can access them from [base + code].
    324  masm.PushRegsInMask(AllRegs);
    325 
    326  // Pass pointer to InvalidationBailoutStack structure.
    327  masm.movePtr(StackPointer, a0);
    328 
    329  // Reserve place for BailoutInfo pointer. Two words to ensure alignment for
    330  // setupAlignedABICall.
    331  masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
    332  // Pass pointer to BailoutInfo
    333  masm.movePtr(StackPointer, a1);
    334 
    335  using Fn = bool (*)(InvalidationBailoutStack* sp, BaselineBailoutInfo** info);
    336  masm.setupAlignedABICall();
    337  masm.passABIArg(a0);
    338  masm.passABIArg(a1);
    339  masm.callWithABI<Fn, InvalidationBailout>(
    340      ABIType::General, CheckUnsafeCallWithABI::DontCheckOther);
    341 
    342  masm.pop(a2);
    343 
    344  // Pop the machine state and the dead frame.
    345  masm.moveToStackPtr(FramePointer);
    346 
    347  // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
    348  masm.jump(bailoutTail);
    349 }
    350 
    351 /* - When bailout is done via out of line code (lazy bailout).
    352 * Frame size is stored in $ra (look at
    353 * CodeGeneratorMIPS64::generateOutOfLineCode()) and thunk code should save it
    354 * on stack. Other difference is that members snapshotOffset_ and padding_ are
    355 * pushed to the stack by CodeGeneratorMIPS64::visitOutOfLineBailout().
    356 */
    357 static void PushBailoutFrame(MacroAssembler& masm, Register spArg) {
    358  // Push the frameSize_ stored in ra
    359  // See: CodeGeneratorMIPS64::generateOutOfLineCode()
    360  masm.push(ra);
    361 
    362  // Push registers such that we can access them from [base + code].
    363  masm.PushRegsInMask(AllRegs);
    364 
    365  // Put pointer to BailoutStack as first argument to the Bailout()
    366  masm.movePtr(StackPointer, spArg);
    367 }
    368 
    369 static void GenerateBailoutThunk(MacroAssembler& masm, Label* bailoutTail) {
    370  PushBailoutFrame(masm, a0);
    371 
    372  // Put pointer to BailoutInfo
    373  static const uint32_t sizeOfBailoutInfo = sizeof(uintptr_t) * 2;
    374  masm.subPtr(Imm32(sizeOfBailoutInfo), StackPointer);
    375  masm.movePtr(StackPointer, a1);
    376 
    377  using Fn = bool (*)(BailoutStack* sp, BaselineBailoutInfo** info);
    378  masm.setupAlignedABICall();
    379  masm.passABIArg(a0);
    380  masm.passABIArg(a1);
    381  masm.callWithABI<Fn, Bailout>(ABIType::General,
    382                                CheckUnsafeCallWithABI::DontCheckOther);
    383 
    384  // Get BailoutInfo pointer
    385  masm.loadPtr(Address(StackPointer, 0), a2);
    386 
    387  // Remove both the bailout frame and the topmost Ion frame's stack.
    388  masm.moveToStackPtr(FramePointer);
    389 
    390  // Jump to shared bailout tail. The BailoutInfo pointer has to be in a2.
    391  masm.jump(bailoutTail);
    392 }
    393 
    394 void JitRuntime::generateBailoutHandler(MacroAssembler& masm,
    395                                        Label* bailoutTail) {
    396  AutoCreatedBy acb(masm, "JitRuntime::generateBailoutHandler");
    397 
    398  bailoutHandlerOffset_ = startTrampolineCode(masm);
    399 
    400  GenerateBailoutThunk(masm, bailoutTail);
    401 }
    402 
    403 bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
    404                                   VMFunctionId id, const VMFunctionData& f,
    405                                   DynFn nativeFun, uint32_t* wrapperOffset) {
    406  AutoCreatedBy acb(masm, "JitRuntime::generateVMWrapper");
    407 
    408  *wrapperOffset = startTrampolineCode(masm);
    409 
    410  // Avoid conflicts with argument registers while discarding the result after
    411  // the function call.
    412  AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
    413 
    414  static_assert(
    415      (Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
    416      "Wrapper register set should be a superset of Volatile register set.");
    417 
    418  // The context is the first argument; a0 is the first argument register.
    419  Register cxreg = a0;
    420  regs.take(cxreg);
    421 
    422  // On link-register platforms, it is the responsibility of the VM *callee* to
    423  // push the return address, while the caller must ensure that the address
    424  // is stored in ra on entry. This allows the VM wrapper to work with both
    425  // direct calls and tail calls.
    426  masm.pushReturnAddress();
    427 
    428  // Push the frame pointer to finish the exit frame, then link it up.
    429  masm.Push(FramePointer);
    430  masm.moveStackPtrTo(FramePointer);
    431  masm.loadJSContext(cxreg);
    432  masm.enterExitFrame(cxreg, regs.getAny(), id);
    433 
    434  // Reserve space for the outparameter.
    435  masm.reserveVMFunctionOutParamSpace(f);
    436  masm.setupUnalignedABICallDontSaveRestoreSP();
    437  masm.passABIArg(cxreg);
    438 
    439  size_t argDisp = ExitFrameLayout::Size();
    440 
    441  // Copy any arguments.
    442  for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
    443    switch (f.argProperties(explicitArg)) {
    444      case VMFunctionData::WordByValue:
    445        if (f.argPassedInFloatReg(explicitArg)) {
    446          masm.passABIArg(MoveOperand(FramePointer, argDisp), ABIType::Float64);
    447        } else {
    448          masm.passABIArg(MoveOperand(FramePointer, argDisp), ABIType::General);
    449        }
    450        argDisp += sizeof(void*);
    451        break;
    452      case VMFunctionData::WordByRef:
    453        masm.passABIArg(MoveOperand(FramePointer, argDisp,
    454                                    MoveOperand::Kind::EffectiveAddress),
    455                        ABIType::General);
    456        argDisp += sizeof(void*);
    457        break;
    458      case VMFunctionData::DoubleByValue:
    459      case VMFunctionData::DoubleByRef:
    460        MOZ_CRASH("NYI: MIPS64 callVM should not be used with 128bits values.");
    461        break;
    462    }
    463  }
    464 
    465  // Copy the implicit outparam, if any.
    466  const int32_t outParamOffset =
    467      -int32_t(ExitFooterFrame::Size()) - f.sizeOfOutParamStackSlot();
    468  if (f.outParam != Type_Void) {
    469    masm.passABIArg(MoveOperand(FramePointer, outParamOffset,
    470                                MoveOperand::Kind::EffectiveAddress),
    471                    ABIType::General);
    472  }
    473 
    474  masm.callWithABI(nativeFun, ABIType::General,
    475                   CheckUnsafeCallWithABI::DontCheckHasExitFrame);
    476 
    477  // Test for failure.
    478  switch (f.failType()) {
    479    case Type_Cell:
    480      masm.branchTestPtr(Assembler::Zero, v0, v0, masm.failureLabel());
    481      break;
    482    case Type_Bool:
    483      // Called functions return bools, which are 0/false and non-zero/true
    484      masm.branchIfFalseBool(v0, masm.failureLabel());
    485      break;
    486    case Type_Void:
    487      break;
    488    default:
    489      MOZ_CRASH("unknown failure kind");
    490  }
    491 
    492  // Load the outparam.
    493  masm.loadVMFunctionOutParam(f, Address(FramePointer, outParamOffset));
    494 
    495  // Pop frame and restore frame pointer.
    496  masm.moveToStackPtr(FramePointer);
    497  masm.pop(FramePointer);
    498 
    499  // Return. Subtract sizeof(void*) for the frame pointer.
    500  masm.retn(Imm32(sizeof(ExitFrameLayout) - sizeof(void*) +
    501                  f.explicitStackSlots() * sizeof(void*) +
    502                  f.extraValuesToPop * sizeof(Value)));
    503 
    504  return true;
    505 }
    506 
    507 uint32_t JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm,
    508                                        MIRType type) {
    509  AutoCreatedBy acb(masm, "JitRuntime::generatePreBarrier");
    510 
    511  uint32_t offset = startTrampolineCode(masm);
    512 
    513  MOZ_ASSERT(PreBarrierReg == a1);
    514  Register temp1 = a0;
    515  Register temp2 = a2;
    516  Register temp3 = a3;
    517  masm.push(temp1);
    518  masm.push(temp2);
    519  masm.push(temp3);
    520 
    521  Label noBarrier;
    522  masm.emitPreBarrierFastPath(type, temp1, temp2, temp3, &noBarrier);
    523 
    524  // Call into C++ to mark this GC thing.
    525  masm.pop(temp3);
    526  masm.pop(temp2);
    527  masm.pop(temp1);
    528 
    529  LiveRegisterSet save;
    530  save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
    531                           FloatRegisterSet(FloatRegisters::VolatileMask));
    532  save.add(ra);
    533  masm.PushRegsInMask(save);
    534 
    535  masm.movePtr(ImmPtr(cx->runtime()), a0);
    536 
    537  masm.setupUnalignedABICall(a2);
    538  masm.passABIArg(a0);
    539  masm.passABIArg(a1);
    540  masm.callWithABI(JitPreWriteBarrier(type));
    541 
    542  save.take(AnyRegister(ra));
    543  masm.PopRegsInMask(save);
    544  masm.ret();
    545 
    546  masm.bind(&noBarrier);
    547  masm.pop(temp3);
    548  masm.pop(temp2);
    549  masm.pop(temp1);
    550  masm.abiret();
    551 
    552  return offset;
    553 }
    554 
    555 void JitRuntime::generateBailoutTailStub(MacroAssembler& masm,
    556                                         Label* bailoutTail) {
    557  AutoCreatedBy acb(masm, "JitRuntime::generateBailoutTailStub");
    558 
    559  masm.bind(bailoutTail);
    560  masm.generateBailoutTail(a1, a2);
    561 }