tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Trampoline-loong64.cpp (18756B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "jit/Bailouts.h"
      8 #include "jit/BaselineFrame.h"
      9 #include "jit/CalleeToken.h"
     10 #include "jit/JitFrames.h"
     11 #include "jit/JitRuntime.h"
     12 #include "jit/loong64/SharedICHelpers-loong64.h"
     13 #include "jit/PerfSpewer.h"
     14 #include "jit/VMFunctions.h"
     15 #include "vm/JitActivation.h"  // js::jit::JitActivation
     16 #include "vm/JSContext.h"
     17 
     18 #include "jit/MacroAssembler-inl.h"
     19 
     20 using namespace js;
     21 using namespace js::jit;
     22 
     23 // All registers to save and restore. This includes the stack pointer, since we
     24 // use the ability to reference register values on the stack by index.
     25 static const LiveRegisterSet AllRegs =
     26    LiveRegisterSet(GeneralRegisterSet(Registers::AllMask),
     27                    FloatRegisterSet(FloatRegisters::AllMask));
     28 
     29 static_assert(sizeof(uintptr_t) == sizeof(uint64_t), "Not 32-bit clean.");
     30 
     31 struct EnterJITRegs {
     32  double f31;
     33  double f30;
     34  double f29;
     35  double f28;
     36  double f27;
     37  double f26;
     38  double f25;
     39  double f24;
     40 
     41  //  uintptr_t align;
     42 
     43  // non-volatile registers.
     44  uint64_t ra;
     45  uint64_t fp;
     46  uint64_t s8;
     47  uint64_t s7;
     48  uint64_t s6;
     49  uint64_t s5;
     50  uint64_t s4;
     51  uint64_t s3;
     52  uint64_t s2;
     53  uint64_t s1;
     54  uint64_t s0;
     55  // Save reg_vp(a7) on stack, use it after call jit code.
     56  uint64_t a7;
     57 };
     58 
     59 static void GenerateReturn(MacroAssembler& masm, int returnCode) {
     60  MOZ_ASSERT(masm.framePushed() == sizeof(EnterJITRegs));
     61 
     62  // Restore non-volatile registers
     63  masm.as_ld_d(s0, StackPointer, offsetof(EnterJITRegs, s0));
     64  masm.as_ld_d(s1, StackPointer, offsetof(EnterJITRegs, s1));
     65  masm.as_ld_d(s2, StackPointer, offsetof(EnterJITRegs, s2));
     66  masm.as_ld_d(s3, StackPointer, offsetof(EnterJITRegs, s3));
     67  masm.as_ld_d(s4, StackPointer, offsetof(EnterJITRegs, s4));
     68  masm.as_ld_d(s5, StackPointer, offsetof(EnterJITRegs, s5));
     69  masm.as_ld_d(s6, StackPointer, offsetof(EnterJITRegs, s6));
     70  masm.as_ld_d(s7, StackPointer, offsetof(EnterJITRegs, s7));
     71  masm.as_ld_d(s8, StackPointer, offsetof(EnterJITRegs, s8));
     72  masm.as_ld_d(fp, StackPointer, offsetof(EnterJITRegs, fp));
     73  masm.as_ld_d(ra, StackPointer, offsetof(EnterJITRegs, ra));
     74 
     75  // Restore non-volatile floating point registers
     76  masm.as_fld_d(f24, StackPointer, offsetof(EnterJITRegs, f24));
     77  masm.as_fld_d(f25, StackPointer, offsetof(EnterJITRegs, f25));
     78  masm.as_fld_d(f26, StackPointer, offsetof(EnterJITRegs, f26));
     79  masm.as_fld_d(f27, StackPointer, offsetof(EnterJITRegs, f27));
     80  masm.as_fld_d(f28, StackPointer, offsetof(EnterJITRegs, f28));
     81  masm.as_fld_d(f29, StackPointer, offsetof(EnterJITRegs, f29));
     82  masm.as_fld_d(f30, StackPointer, offsetof(EnterJITRegs, f30));
     83  masm.as_fld_d(f31, StackPointer, offsetof(EnterJITRegs, f31));
     84 
     85  masm.freeStack(sizeof(EnterJITRegs));
     86 
     87  masm.branch(ra);
     88 }
     89 
     90 static void GeneratePrologue(MacroAssembler& masm) {
     91  masm.reserveStack(sizeof(EnterJITRegs));
     92 
     93  masm.as_st_d(s0, StackPointer, offsetof(EnterJITRegs, s0));
     94  masm.as_st_d(s1, StackPointer, offsetof(EnterJITRegs, s1));
     95  masm.as_st_d(s2, StackPointer, offsetof(EnterJITRegs, s2));
     96  masm.as_st_d(s3, StackPointer, offsetof(EnterJITRegs, s3));
     97  masm.as_st_d(s4, StackPointer, offsetof(EnterJITRegs, s4));
     98  masm.as_st_d(s5, StackPointer, offsetof(EnterJITRegs, s5));
     99  masm.as_st_d(s6, StackPointer, offsetof(EnterJITRegs, s6));
    100  masm.as_st_d(s7, StackPointer, offsetof(EnterJITRegs, s7));
    101  masm.as_st_d(s8, StackPointer, offsetof(EnterJITRegs, s8));
    102  masm.as_st_d(fp, StackPointer, offsetof(EnterJITRegs, fp));
    103  masm.as_st_d(ra, StackPointer, offsetof(EnterJITRegs, ra));
    104  masm.as_st_d(a7, StackPointer, offsetof(EnterJITRegs, a7));
    105 
    106  masm.as_fst_d(f24, StackPointer, offsetof(EnterJITRegs, f24));
    107  masm.as_fst_d(f25, StackPointer, offsetof(EnterJITRegs, f25));
    108  masm.as_fst_d(f26, StackPointer, offsetof(EnterJITRegs, f26));
    109  masm.as_fst_d(f27, StackPointer, offsetof(EnterJITRegs, f27));
    110  masm.as_fst_d(f28, StackPointer, offsetof(EnterJITRegs, f28));
    111  masm.as_fst_d(f29, StackPointer, offsetof(EnterJITRegs, f29));
    112  masm.as_fst_d(f30, StackPointer, offsetof(EnterJITRegs, f30));
    113  masm.as_fst_d(f31, StackPointer, offsetof(EnterJITRegs, f31));
    114 }
    115 
    116 // Generates a trampoline for calling Jit compiled code from a C++ function.
    117 // The trampoline use the EnterJitCode signature, with the standard x64 fastcall
    118 // calling convention.
    119 void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
    120  AutoCreatedBy acb(masm, "JitRuntime::generateEnterJIT");
    121 
    122  enterJITOffset_ = startTrampolineCode(masm);
    123 
    124  const Register reg_code = IntArgReg0;
    125  const Register reg_argc = IntArgReg1;
    126  const Register reg_argv = IntArgReg2;
    127  const mozilla::DebugOnly<Register> reg_frame = IntArgReg3;
    128  const Register reg_token = IntArgReg4;
    129  const Register reg_chain = IntArgReg5;
    130  const Register reg_values = IntArgReg6;
    131  const Register reg_vp = IntArgReg7;
    132 
    133  MOZ_ASSERT(OsrFrameReg == reg_frame);
    134 
    135  GeneratePrologue(masm);
    136 
    137  // Save stack pointer as baseline frame.
    138  masm.movePtr(StackPointer, FramePointer);
    139 
    140  generateEnterJitShared(masm, reg_argc, reg_argv, reg_token, s0, s1, s2);
    141 
    142  // Push the descriptor.
    143  masm.unboxInt32(Address(reg_vp, 0), s3);
    144  masm.pushFrameDescriptorForJitCall(FrameType::CppToJSJit, s3, s3);
    145 
    146  CodeLabel returnLabel;
    147  Label oomReturnLabel;
    148  {
    149    // Handle Interpreter -> Baseline OSR.
    150    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
    151    MOZ_ASSERT(!regs.has(FramePointer));
    152    regs.take(OsrFrameReg);
    153    regs.take(reg_code);
    154    MOZ_ASSERT(!regs.has(ReturnReg), "ReturnReg matches reg_code");
    155 
    156    Label notOsr;
    157    masm.ma_b(OsrFrameReg, OsrFrameReg, &notOsr, Assembler::Zero, ShortJump);
    158 
    159    Register numStackValues = reg_values;
    160    regs.take(numStackValues);
    161    Register scratch = regs.takeAny();
    162 
    163    // Push return address.
    164    masm.subPtr(Imm32(sizeof(uintptr_t)), StackPointer);
    165    masm.ma_li(scratch, &returnLabel);
    166    masm.storePtr(scratch, Address(StackPointer, 0));
    167 
    168    // Push previous frame pointer.
    169    masm.subPtr(Imm32(sizeof(uintptr_t)), StackPointer);
    170    masm.storePtr(FramePointer, Address(StackPointer, 0));
    171 
    172    // Reserve frame.
    173    Register framePtr = FramePointer;
    174    masm.movePtr(StackPointer, framePtr);
    175    masm.subPtr(Imm32(BaselineFrame::Size()), StackPointer);
    176 
    177    Register framePtrScratch = regs.takeAny();
    178    masm.movePtr(sp, framePtrScratch);
    179 
    180    // Reserve space for locals and stack values.
    181    masm.as_slli_d(scratch, numStackValues, 3);
    182    masm.subPtr(scratch, StackPointer);
    183 
    184    // Enter exit frame.
    185    masm.reserveStack(3 * sizeof(uintptr_t));
    186    masm.storePtr(
    187        ImmWord(MakeFrameDescriptor(FrameType::BaselineJS)),
    188        Address(StackPointer, 2 * sizeof(uintptr_t)));  // Frame descriptor
    189    masm.storePtr(
    190        zero, Address(StackPointer, sizeof(uintptr_t)));  // fake return address
    191    masm.storePtr(FramePointer, Address(StackPointer, 0));
    192 
    193    // No GC things to mark, push a bare token.
    194    masm.loadJSContext(scratch);
    195    masm.enterFakeExitFrame(scratch, scratch, ExitFrameType::Bare);
    196 
    197    masm.reserveStack(2 * sizeof(uintptr_t));
    198    masm.storePtr(framePtr,
    199                  Address(StackPointer, sizeof(uintptr_t)));  // BaselineFrame
    200    masm.storePtr(reg_code, Address(StackPointer, 0));        // jitcode
    201 
    202    using Fn = bool (*)(BaselineFrame* frame, InterpreterFrame* interpFrame,
    203                        uint32_t numStackValues);
    204    masm.setupUnalignedABICall(scratch);
    205    masm.passABIArg(framePtrScratch);  // BaselineFrame
    206    masm.passABIArg(OsrFrameReg);      // InterpreterFrame
    207    masm.passABIArg(numStackValues);
    208    masm.callWithABI<Fn, jit::InitBaselineFrameForOsr>(
    209        ABIType::General, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
    210 
    211    regs.add(OsrFrameReg);
    212    Register jitcode = regs.takeAny();
    213    masm.loadPtr(Address(StackPointer, 0), jitcode);
    214    masm.loadPtr(Address(StackPointer, sizeof(uintptr_t)), framePtr);
    215    masm.freeStack(2 * sizeof(uintptr_t));
    216 
    217    Label error;
    218    masm.freeStack(ExitFrameLayout::SizeWithFooter());
    219    masm.branchIfFalseBool(ReturnReg, &error);
    220 
    221    // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
    222    // if profiler instrumentation is enabled.
    223    {
    224      Label skipProfilingInstrumentation;
    225      AbsoluteAddress addressOfEnabled(
    226          cx->runtime()->geckoProfiler().addressOfEnabled());
    227      masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
    228                    &skipProfilingInstrumentation);
    229      masm.profilerEnterFrame(framePtr, scratch);
    230      masm.bind(&skipProfilingInstrumentation);
    231    }
    232 
    233    masm.jump(jitcode);
    234 
    235    // OOM: load error value, discard return address and previous frame
    236    // pointer and return.
    237    masm.bind(&error);
    238    masm.movePtr(framePtr, StackPointer);
    239    masm.addPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
    240    masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
    241    masm.jump(&oomReturnLabel);
    242 
    243    masm.bind(&notOsr);
    244    // Load the scope chain in R1.
    245    MOZ_ASSERT(R1.scratchReg() != reg_code);
    246    masm.as_or(R1.scratchReg(), reg_chain, zero);
    247  }
    248 
    249  // The call will push the return address and frame pointer on the stack, thus
    250  // we check that the stack would be aligned once the call is complete.
    251  masm.assertStackAlignment(JitStackAlignment, 2 * sizeof(uintptr_t));
    252 
    253  // Call the function with pushing return address to stack.
    254  masm.callJitNoProfiler(reg_code);
    255 
    256  {
    257    // Interpreter -> Baseline OSR will return here.
    258    masm.bind(&returnLabel);
    259    masm.addCodeLabel(returnLabel);
    260    masm.bind(&oomReturnLabel);
    261  }
    262 
    263  // Discard arguments and padding. Set sp to the address of the EnterJITRegs
    264  // on the stack.
    265  masm.mov(FramePointer, StackPointer);
    266 
    267  // Store the returned value into the vp
    268  masm.as_ld_d(reg_vp, StackPointer, offsetof(EnterJITRegs, a7));
    269  masm.storeValue(JSReturnOperand, Address(reg_vp, 0));
    270 
    271  // Restore non-volatile registers and return.
    272  GenerateReturn(masm, ShortJump);
    273 }
    274 
    275 // static
    276 mozilla::Maybe<::JS::ProfilingFrameIterator::RegisterState>
    277 JitRuntime::getCppEntryRegisters(JitFrameLayout* frameStackAddress) {
    278  // Not supported, or not implemented yet.
    279  // TODO: Implement along with the corresponding stack-walker changes, in
    280  // coordination with the Gecko Profiler, see bug 1635987 and follow-ups.
    281  return mozilla::Nothing{};
    282 }
    283 
    284 void JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail) {
    285  AutoCreatedBy acb(masm, "JitRuntime::generateInvalidator");
    286 
    287  invalidatorOffset_ = startTrampolineCode(masm);
    288 
    289  // Stack has to be alligned here. If not, we will have to fix it.
    290  masm.checkStackAlignment();
    291 
    292  // Push registers such that we can access them from [base + code].
    293  masm.PushRegsInMask(AllRegs);
    294 
    295  // Pass pointer to InvalidationBailoutStack structure.
    296  masm.movePtr(StackPointer, a0);
    297 
    298  // Reserve place for BailoutInfo pointer. Two words to ensure alignment for
    299  // setupAlignedABICall.
    300  masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
    301  // Pass pointer to BailoutInfo
    302  masm.movePtr(StackPointer, a1);
    303 
    304  using Fn = bool (*)(InvalidationBailoutStack* sp, BaselineBailoutInfo** info);
    305  masm.setupAlignedABICall();
    306  masm.passABIArg(a0);
    307  masm.passABIArg(a1);
    308  masm.callWithABI<Fn, InvalidationBailout>(
    309      ABIType::General, CheckUnsafeCallWithABI::DontCheckOther);
    310 
    311  masm.pop(a2);
    312 
    313  // Pop the machine state and the dead frame.
    314  masm.moveToStackPtr(FramePointer);
    315 
    316  // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
    317  masm.jump(bailoutTail);
    318 }
    319 
    320 /* - When bailout is done via out of line code (lazy bailout).
    321 * Frame size is stored in $ra (look at
    322 * CodeGeneratorLOONG64::generateOutOfLineCode()) and thunk code should save it
    323 * on stack. Other difference is that members snapshotOffset_ and padding_ are
    324 * pushed to the stack by CodeGeneratorLOONG64::visitOutOfLineBailout().
    325 */
    326 static void PushBailoutFrame(MacroAssembler& masm, Register spArg) {
    327  // Push the frameSize_ stored in ra
    328  // See: CodeGeneratorLOONG64::generateOutOfLineCode()
    329  masm.push(ra);
    330 
    331  // Push registers such that we can access them from [base + code].
    332  masm.PushRegsInMask(AllRegs);
    333 
    334  // Put pointer to BailoutStack as first argument to the Bailout()
    335  masm.movePtr(StackPointer, spArg);
    336 }
    337 
    338 static void GenerateBailoutThunk(MacroAssembler& masm, Label* bailoutTail) {
    339  PushBailoutFrame(masm, a0);
    340 
    341  // Make space for Bailout's bailoutInfo outparam.
    342  masm.reserveStack(sizeof(void*));
    343  masm.movePtr(StackPointer, a1);
    344 
    345  // Call the bailout function.
    346  using Fn = bool (*)(BailoutStack* sp, BaselineBailoutInfo** info);
    347  masm.setupUnalignedABICall(a2);
    348  masm.passABIArg(a0);
    349  masm.passABIArg(a1);
    350  masm.callWithABI<Fn, Bailout>(ABIType::General,
    351                                CheckUnsafeCallWithABI::DontCheckOther);
    352 
    353  // Get the bailoutInfo outparam.
    354  masm.pop(a2);
    355 
    356  // Remove both the bailout frame and the topmost Ion frame's stack.
    357  masm.moveToStackPtr(FramePointer);
    358 
    359  // Jump to shared bailout tail. The BailoutInfo pointer has to be in a2.
    360  masm.jump(bailoutTail);
    361 }
    362 
    363 void JitRuntime::generateBailoutHandler(MacroAssembler& masm,
    364                                        Label* bailoutTail) {
    365  AutoCreatedBy acb(masm, "JitRuntime::generateBailoutHandler");
    366 
    367  bailoutHandlerOffset_ = startTrampolineCode(masm);
    368 
    369  GenerateBailoutThunk(masm, bailoutTail);
    370 }
    371 
    372 bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
    373                                   VMFunctionId id, const VMFunctionData& f,
    374                                   DynFn nativeFun, uint32_t* wrapperOffset) {
    375  AutoCreatedBy acb(masm, "JitRuntime::generateVMWrapper");
    376 
    377  *wrapperOffset = startTrampolineCode(masm);
    378 
    379  // Avoid conflicts with argument registers while discarding the result after
    380  // the function call.
    381  AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
    382 
    383  static_assert(
    384      (Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
    385      "Wrapper register set should be a superset of Volatile register set.");
    386 
    387  // The context is the first argument; a0 is the first argument register.
    388  Register cxreg = a0;
    389  regs.take(cxreg);
    390 
    391  // On link-register platforms, it is the responsibility of the VM *callee* to
    392  // push the return address, while the caller must ensure that the address
    393  // is stored in ra on entry. This allows the VM wrapper to work with both
    394  // direct calls and tail calls.
    395  masm.pushReturnAddress();
    396 
    397  // Push the frame pointer to finish the exit frame, then link it up.
    398  masm.Push(FramePointer);
    399  masm.moveStackPtrTo(FramePointer);
    400  masm.loadJSContext(cxreg);
    401  masm.enterExitFrame(cxreg, regs.getAny(), id);
    402 
    403  // Reserve space for the outparameter.
    404  masm.reserveVMFunctionOutParamSpace(f);
    405 
    406  masm.setupUnalignedABICallDontSaveRestoreSP();
    407  masm.passABIArg(cxreg);
    408 
    409  size_t argDisp = ExitFrameLayout::Size();
    410 
    411  // Copy any arguments.
    412  for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
    413    switch (f.argProperties(explicitArg)) {
    414      case VMFunctionData::WordByValue:
    415        if (f.argPassedInFloatReg(explicitArg)) {
    416          masm.passABIArg(MoveOperand(FramePointer, argDisp), ABIType::Float64);
    417        } else {
    418          masm.passABIArg(MoveOperand(FramePointer, argDisp), ABIType::General);
    419        }
    420        argDisp += sizeof(void*);
    421        break;
    422      case VMFunctionData::WordByRef:
    423        masm.passABIArg(MoveOperand(FramePointer, argDisp,
    424                                    MoveOperand::Kind::EffectiveAddress),
    425                        ABIType::General);
    426        argDisp += sizeof(void*);
    427        break;
    428      case VMFunctionData::DoubleByValue:
    429      case VMFunctionData::DoubleByRef:
    430        MOZ_CRASH(
    431            "NYI: LOONG64 callVM should not be used with 128bits values.");
    432        break;
    433    }
    434  }
    435 
    436  // Copy the implicit outparam, if any.
    437  const int32_t outParamOffset =
    438      -int32_t(ExitFooterFrame::Size()) - f.sizeOfOutParamStackSlot();
    439  if (f.outParam != Type_Void) {
    440    masm.passABIArg(MoveOperand(FramePointer, outParamOffset,
    441                                MoveOperand::Kind::EffectiveAddress),
    442                    ABIType::General);
    443  }
    444 
    445  masm.callWithABI(nativeFun, ABIType::General,
    446                   CheckUnsafeCallWithABI::DontCheckHasExitFrame);
    447 
    448  // Test for failure.
    449  switch (f.failType()) {
    450    case Type_Cell:
    451      masm.branchTestPtr(Assembler::Zero, a0, a0, masm.failureLabel());
    452      break;
    453    case Type_Bool:
    454      // Called functions return bools, which are 0/false and non-zero/true
    455      masm.branchIfFalseBool(a0, masm.failureLabel());
    456      break;
    457    case Type_Void:
    458      break;
    459    default:
    460      MOZ_CRASH("unknown failure kind");
    461  }
    462 
    463  // Load the outparam.
    464  masm.loadVMFunctionOutParam(f, Address(FramePointer, outParamOffset));
    465 
    466  // Pop frame and restore frame pointer.
    467  masm.moveToStackPtr(FramePointer);
    468  masm.pop(FramePointer);
    469 
    470  // Return. Subtract sizeof(void*) for the frame pointer.
    471  masm.retn(Imm32(sizeof(ExitFrameLayout) - sizeof(void*) +
    472                  f.explicitStackSlots() * sizeof(void*) +
    473                  f.extraValuesToPop * sizeof(Value)));
    474 
    475  return true;
    476 }
    477 
    478 uint32_t JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm,
    479                                        MIRType type) {
    480  AutoCreatedBy acb(masm, "JitRuntime::generatePreBarrier");
    481 
    482  uint32_t offset = startTrampolineCode(masm);
    483 
    484  MOZ_ASSERT(PreBarrierReg == a1);
    485  Register temp1 = a0;
    486  Register temp2 = a2;
    487  Register temp3 = a3;
    488  masm.push(temp1);
    489  masm.push(temp2);
    490  masm.push(temp3);
    491 
    492  Label noBarrier;
    493  masm.emitPreBarrierFastPath(type, temp1, temp2, temp3, &noBarrier);
    494 
    495  // Call into C++ to mark this GC thing.
    496  masm.pop(temp3);
    497  masm.pop(temp2);
    498  masm.pop(temp1);
    499 
    500  LiveRegisterSet save;
    501  save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
    502                           FloatRegisterSet(FloatRegisters::VolatileMask));
    503  masm.push(ra);
    504  masm.PushRegsInMask(save);
    505 
    506  masm.movePtr(ImmPtr(cx->runtime()), a0);
    507 
    508  masm.setupUnalignedABICall(a2);
    509  masm.passABIArg(a0);
    510  masm.passABIArg(a1);
    511  masm.callWithABI(JitPreWriteBarrier(type));
    512 
    513  masm.PopRegsInMask(save);
    514  masm.ret();
    515 
    516  masm.bind(&noBarrier);
    517  masm.pop(temp3);
    518  masm.pop(temp2);
    519  masm.pop(temp1);
    520  masm.abiret();
    521 
    522  return offset;
    523 }
    524 
    525 void JitRuntime::generateBailoutTailStub(MacroAssembler& masm,
    526                                         Label* bailoutTail) {
    527  AutoCreatedBy acb(masm, "JitRuntime::generateBailoutTailStub");
    528 
    529  masm.bind(bailoutTail);
    530  masm.generateBailoutTail(a1, a2);
    531 }