tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Trampoline-arm.cpp (19455B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "jit/arm/SharedICHelpers-arm.h"
      8 #include "jit/Bailouts.h"
      9 #include "jit/BaselineFrame.h"
     10 #include "jit/CalleeToken.h"
     11 #include "jit/JitFrames.h"
     12 #include "jit/JitRuntime.h"
     13 #include "jit/JitSpewer.h"
     14 #include "jit/PerfSpewer.h"
     15 #include "jit/VMFunctions.h"
     16 #include "vm/JitActivation.h"  // js::jit::JitActivation
     17 #include "vm/JSContext.h"
     18 #include "vm/Realm.h"
     19 
     20 #include "jit/MacroAssembler-inl.h"
     21 
     22 using namespace js;
     23 using namespace js::jit;
     24 
     25 static const FloatRegisterSet NonVolatileFloatRegs = FloatRegisterSet(
     26    (1ULL << FloatRegisters::d8) | (1ULL << FloatRegisters::d9) |
     27    (1ULL << FloatRegisters::d10) | (1ULL << FloatRegisters::d11) |
     28    (1ULL << FloatRegisters::d12) | (1ULL << FloatRegisters::d13) |
     29    (1ULL << FloatRegisters::d14) | (1ULL << FloatRegisters::d15));
     30 
     31 static void GenerateReturn(MacroAssembler& masm, int returnCode) {
     32  // Restore non-volatile floating point registers.
     33  masm.transferMultipleByRuns(NonVolatileFloatRegs, IsLoad, StackPointer, IA);
     34 
     35  // Get rid of padding word.
     36  masm.addPtr(Imm32(sizeof(void*)), sp);
     37 
     38  // Set up return value
     39  masm.ma_mov(Imm32(returnCode), r0);
     40 
     41  // Pop and return
     42  masm.startDataTransferM(IsLoad, sp, IA, WriteBack);
     43  masm.transferReg(r4);
     44  masm.transferReg(r5);
     45  masm.transferReg(r6);
     46  masm.transferReg(r7);
     47  masm.transferReg(r8);
     48  masm.transferReg(r9);
     49  masm.transferReg(r10);
     50  masm.transferReg(r11);
     51  // r12 isn't saved, so it shouldn't be restored.
     52  masm.transferReg(pc);
     53  masm.finishDataTransfer();
     54  masm.flushBuffer();
     55 }
     56 
     57 struct EnterJITStack {
     58  double d8;
     59  double d9;
     60  double d10;
     61  double d11;
     62  double d12;
     63  double d13;
     64  double d14;
     65  double d15;
     66 
     67  // Padding.
     68  void* padding;
     69 
     70  // Non-volatile registers.
     71  void* r4;
     72  void* r5;
     73  void* r6;
     74  void* r7;
     75  void* r8;
     76  void* r9;
     77  void* r10;
     78  void* r11;
     79  // The abi does not expect r12 (ip) to be preserved
     80  void* lr;
     81 
     82  // Arguments.
     83  // code == r0
     84  // argc == r1
     85  // argv == r2
     86  // frame == r3
     87  CalleeToken token;
     88  JSObject* scopeChain;
     89  size_t numStackValues;
     90  Value* vp;
     91 };
     92 
     93 /*
     94 * This method generates a trampoline for a c++ function with the following
     95 * signature:
     96 *   void enter(void* code, int argc, Value* argv, InterpreterFrame* fp,
     97 *              CalleeToken calleeToken, JSObject* scopeChain, Value* vp)
     98 *   ...using standard EABI calling convention
     99 */
    100 void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
    101  AutoCreatedBy acb(masm, "JitRuntime::generateEnterJIT");
    102 
    103  enterJITOffset_ = startTrampolineCode(masm);
    104 
    105  const Address slot_token(sp, offsetof(EnterJITStack, token));
    106  const Address slot_vp(sp, offsetof(EnterJITStack, vp));
    107 
    108  static_assert(OsrFrameReg == r3);
    109 
    110  // Save non-volatile registers. These must be saved by the trampoline,
    111  // rather than the JIT'd code, because they are scanned by the conservative
    112  // scanner.
    113  masm.startDataTransferM(IsStore, sp, DB, WriteBack);
    114  masm.transferReg(r4);   // [sp,0]
    115  masm.transferReg(r5);   // [sp,4]
    116  masm.transferReg(r6);   // [sp,8]
    117  masm.transferReg(r7);   // [sp,12]
    118  masm.transferReg(r8);   // [sp,16]
    119  masm.transferReg(r9);   // [sp,20]
    120  masm.transferReg(r10);  // [sp,24]
    121  masm.transferReg(r11);  // [sp,28]
    122  // The abi does not expect r12 (ip) to be preserved
    123  masm.transferReg(lr);  // [sp,32]
    124  // The 5th argument is located at [sp, 36]
    125  masm.finishDataTransfer();
    126 
    127  // Add padding word.
    128  masm.subPtr(Imm32(sizeof(void*)), sp);
    129 
    130  // Push the float registers.
    131  masm.transferMultipleByRuns(NonVolatileFloatRegs, IsStore, sp, DB);
    132 
    133  // Load calleeToken into r9.
    134  masm.loadPtr(slot_token, r9);
    135 
    136  // Save stack pointer.
    137  masm.movePtr(sp, r11);
    138 
    139  // Load the number of actual arguments into r10.
    140  masm.loadPtr(slot_vp, r10);
    141  masm.unboxInt32(Address(r10, 0), r10);
    142 
    143  Register argcReg = r1;
    144  Register argvReg = r2;
    145  Register calleeTokenReg = r9;
    146  generateEnterJitShared(masm, argcReg, argvReg, calleeTokenReg, r4, r5, r6);
    147 
    148  // Push the frame descriptor.
    149  masm.pushFrameDescriptorForJitCall(FrameType::CppToJSJit, r10, r10);
    150 
    151  Label returnLabel;
    152  {
    153    // Handle Interpreter -> Baseline OSR.
    154    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
    155    MOZ_ASSERT(!regs.has(r11));
    156    regs.take(OsrFrameReg);
    157    regs.take(r0);  // jitcode
    158    MOZ_ASSERT(!regs.has(ReturnReg), "ReturnReg matches r0");
    159 
    160    const Address slot_numStackValues(r11,
    161                                      offsetof(EnterJITStack, numStackValues));
    162 
    163    Label notOsr;
    164    masm.branchTestPtr(Assembler::Zero, OsrFrameReg, OsrFrameReg, &notOsr);
    165 
    166    Register scratch = regs.takeAny();
    167 
    168    Register numStackValues = regs.takeAny();
    169    masm.load32(slot_numStackValues, numStackValues);
    170 
    171    // Write return address. On ARM, CodeLabel is only used for tableswitch,
    172    // so we can't use it here to get the return address. Instead, we use pc
    173    // + a fixed offset to a jump to returnLabel. The pc register holds pc +
    174    // 8, so we add the size of 2 instructions to skip the instructions
    175    // emitted by push and jump(&skipJump).
    176    {
    177      AutoForbidPoolsAndNops afp(&masm, 5);
    178      Label skipJump;
    179      masm.mov(pc, scratch);
    180      masm.addPtr(Imm32(2 * sizeof(uint32_t)), scratch);
    181      masm.push(scratch);
    182      masm.jump(&skipJump);
    183      masm.jump(&returnLabel);
    184      masm.bind(&skipJump);
    185    }
    186 
    187    // Frame prologue.
    188    masm.push(FramePointer);
    189    masm.mov(sp, FramePointer);
    190 
    191    // Reserve frame.
    192    masm.subPtr(Imm32(BaselineFrame::Size()), sp);
    193 
    194    Register framePtrScratch = regs.takeAny();
    195    masm.touchFrameValues(numStackValues, scratch, framePtrScratch);
    196    masm.mov(sp, framePtrScratch);
    197 
    198    // Reserve space for locals and stack values.
    199    masm.ma_lsl(Imm32(3), numStackValues, scratch);
    200    masm.ma_sub(sp, scratch, sp);
    201 
    202    // Enter exit frame.
    203    masm.push(FrameDescriptor(FrameType::BaselineJS));
    204    masm.push(Imm32(0));  // Fake return address.
    205    masm.push(FramePointer);
    206    // No GC things to mark on the stack, push a bare token.
    207    masm.loadJSContext(scratch);
    208    masm.enterFakeExitFrame(scratch, scratch, ExitFrameType::Bare);
    209 
    210    masm.push(r0);  // jitcode
    211 
    212    using Fn = bool (*)(BaselineFrame* frame, InterpreterFrame* interpFrame,
    213                        uint32_t numStackValues);
    214    masm.setupUnalignedABICall(scratch);
    215    masm.passABIArg(framePtrScratch);  // BaselineFrame
    216    masm.passABIArg(OsrFrameReg);      // InterpreterFrame
    217    masm.passABIArg(numStackValues);
    218    masm.callWithABI<Fn, jit::InitBaselineFrameForOsr>(
    219        ABIType::General, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
    220 
    221    Register jitcode = regs.takeAny();
    222    masm.pop(jitcode);
    223 
    224    MOZ_ASSERT(jitcode != ReturnReg);
    225 
    226    Label error;
    227    masm.addPtr(Imm32(ExitFrameLayout::SizeWithFooter()), sp);
    228    masm.branchIfFalseBool(ReturnReg, &error);
    229 
    230    // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
    231    // if profiler instrumentation is enabled.
    232    {
    233      Label skipProfilingInstrumentation;
    234      AbsoluteAddress addressOfEnabled(
    235          cx->runtime()->geckoProfiler().addressOfEnabled());
    236      masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
    237                    &skipProfilingInstrumentation);
    238      masm.profilerEnterFrame(FramePointer, scratch);
    239      masm.bind(&skipProfilingInstrumentation);
    240    }
    241 
    242    masm.jump(jitcode);
    243 
    244    // OOM: frame epilogue, load error value, discard return address and return.
    245    masm.bind(&error);
    246    masm.mov(FramePointer, sp);
    247    masm.pop(FramePointer);
    248    masm.addPtr(Imm32(sizeof(uintptr_t)), sp);  // Return address.
    249    masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
    250    masm.jump(&returnLabel);
    251 
    252    masm.bind(&notOsr);
    253    // Load the scope chain in R1.
    254    MOZ_ASSERT(R1.scratchReg() != r0);
    255    masm.loadPtr(Address(r11, offsetof(EnterJITStack, scopeChain)),
    256                 R1.scratchReg());
    257  }
    258 
    259  // The callee will push the return address and frame pointer on the stack,
    260  // thus we check that the stack would be aligned once the call is complete.
    261  masm.assertStackAlignment(JitStackAlignment, 2 * sizeof(uintptr_t));
    262 
    263  // Call the function.
    264  masm.callJitNoProfiler(r0);
    265 
    266  // Interpreter -> Baseline OSR will return here.
    267  masm.bind(&returnLabel);
    268 
    269  // Discard arguments and padding. Set sp to the address of the EnterJITStack
    270  // on the stack.
    271  masm.mov(r11, sp);
    272 
    273  // Store the returned value into the slot_vp
    274  masm.loadPtr(slot_vp, r5);
    275  masm.storeValue(JSReturnOperand, Address(r5, 0));
    276 
    277  // Restore non-volatile registers and return.
    278  GenerateReturn(masm, true);
    279 }
    280 
    281 // static
    282 mozilla::Maybe<::JS::ProfilingFrameIterator::RegisterState>
    283 JitRuntime::getCppEntryRegisters(JitFrameLayout* frameStackAddress) {
    284  // Not supported, or not implemented yet.
    285  // TODO: Implement along with the corresponding stack-walker changes, in
    286  // coordination with the Gecko Profiler, see bug 1635987 and follow-ups.
    287  return mozilla::Nothing{};
    288 }
    289 
    290 void JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail) {
    291  // See large comment in x86's JitRuntime::generateInvalidator.
    292 
    293  AutoCreatedBy acb(masm, "JitRuntime::generateInvalidator");
    294 
    295  invalidatorOffset_ = startTrampolineCode(masm);
    296 
    297  // At this point, one of two things has happened:
    298  // 1) Execution has just returned from C code, which left the stack aligned
    299  // 2) Execution has just returned from Ion code, which left the stack
    300  // unaligned. The old return address should not matter, but we still want the
    301  // stack to be aligned, and there is no good reason to automatically align it
    302  // with a call to setupUnalignedABICall.
    303  masm.as_bic(sp, sp, Imm8(7));
    304  masm.startDataTransferM(IsStore, sp, DB, WriteBack);
    305  // We don't have to push everything, but this is likely easier.
    306  // Setting regs_.
    307  for (uint32_t i = 0; i < Registers::Total; i++) {
    308    masm.transferReg(Register::FromCode(i));
    309  }
    310  masm.finishDataTransfer();
    311 
    312  // Since our datastructures for stack inspection are compile-time fixed,
    313  // if there are only 16 double registers, then we need to reserve
    314  // space on the stack for the missing 16.
    315  if (FloatRegisters::ActualTotalPhys() != FloatRegisters::TotalPhys) {
    316    ScratchRegisterScope scratch(masm);
    317    int missingRegs =
    318        FloatRegisters::TotalPhys - FloatRegisters::ActualTotalPhys();
    319    masm.ma_sub(Imm32(missingRegs * sizeof(double)), sp, scratch);
    320  }
    321 
    322  masm.startFloatTransferM(IsStore, sp, DB, WriteBack);
    323  for (uint32_t i = 0; i < FloatRegisters::ActualTotalPhys(); i++) {
    324    masm.transferFloatReg(FloatRegister(i, FloatRegister::Double));
    325  }
    326  masm.finishFloatTransfer();
    327 
    328  masm.ma_mov(sp, r0);
    329  // Reserve 8 bytes for the outparam to ensure alignment for
    330  // setupAlignedABICall.
    331  masm.reserveStack(sizeof(void*) * 2);
    332  masm.mov(sp, r1);
    333  using Fn = bool (*)(InvalidationBailoutStack* sp, BaselineBailoutInfo** info);
    334  masm.setupAlignedABICall();
    335  masm.passABIArg(r0);
    336  masm.passABIArg(r1);
    337  masm.callWithABI<Fn, InvalidationBailout>(
    338      ABIType::General, CheckUnsafeCallWithABI::DontCheckOther);
    339 
    340  masm.pop(r2);  // Get bailoutInfo outparam.
    341 
    342  // Pop the machine state and the dead frame.
    343  masm.moveToStackPtr(FramePointer);
    344 
    345  // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
    346  masm.jump(bailoutTail);
    347 }
    348 
    349 static void PushBailoutFrame(MacroAssembler& masm, Register spArg) {
    350 #ifdef ENABLE_WASM_SIMD
    351 #  error "Needs more careful logic if SIMD is enabled"
    352 #endif
    353 
    354  // STEP 1a: Save our register sets to the stack so Bailout() can read
    355  // everything.
    356  // sp % 8 == 0
    357 
    358  masm.startDataTransferM(IsStore, sp, DB, WriteBack);
    359  // We don't have to push everything, but this is likely easier.
    360  // Setting regs_.
    361  for (uint32_t i = 0; i < Registers::Total; i++) {
    362    masm.transferReg(Register::FromCode(i));
    363  }
    364  masm.finishDataTransfer();
    365 
    366  ScratchRegisterScope scratch(masm);
    367 
    368  // Since our datastructures for stack inspection are compile-time fixed,
    369  // if there are only 16 double registers, then we need to reserve
    370  // space on the stack for the missing 16.
    371  if (FloatRegisters::ActualTotalPhys() != FloatRegisters::TotalPhys) {
    372    int missingRegs =
    373        FloatRegisters::TotalPhys - FloatRegisters::ActualTotalPhys();
    374    masm.ma_sub(Imm32(missingRegs * sizeof(double)), sp, scratch);
    375  }
    376  masm.startFloatTransferM(IsStore, sp, DB, WriteBack);
    377  for (uint32_t i = 0; i < FloatRegisters::ActualTotalPhys(); i++) {
    378    masm.transferFloatReg(FloatRegister(i, FloatRegister::Double));
    379  }
    380  masm.finishFloatTransfer();
    381 
    382  // The current stack pointer is the first argument to jit::Bailout.
    383  masm.ma_mov(sp, spArg);
    384 }
    385 
    386 static void GenerateBailoutThunk(MacroAssembler& masm, Label* bailoutTail) {
    387  PushBailoutFrame(masm, r0);
    388 
    389  // Make space for Bailout's bailoutInfo outparam.
    390  masm.reserveStack(sizeof(void*));
    391  masm.mov(sp, r1);
    392  using Fn = bool (*)(BailoutStack* sp, BaselineBailoutInfo** info);
    393  masm.setupAlignedABICall();
    394 
    395  masm.passABIArg(r0);
    396  masm.passABIArg(r1);
    397 
    398  masm.callWithABI<Fn, Bailout>(ABIType::General,
    399                                CheckUnsafeCallWithABI::DontCheckOther);
    400  masm.pop(r2);  // Get the bailoutInfo outparam.
    401 
    402  // Remove both the bailout frame and the topmost Ion frame's stack.
    403  masm.moveToStackPtr(FramePointer);
    404 
    405  // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
    406  masm.jump(bailoutTail);
    407 }
    408 
    409 void JitRuntime::generateBailoutHandler(MacroAssembler& masm,
    410                                        Label* bailoutTail) {
    411  AutoCreatedBy acb(masm, "JitRuntime::generateBailoutHandler");
    412 
    413  bailoutHandlerOffset_ = startTrampolineCode(masm);
    414 
    415  GenerateBailoutThunk(masm, bailoutTail);
    416 }
    417 
    418 bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
    419                                   VMFunctionId id, const VMFunctionData& f,
    420                                   DynFn nativeFun, uint32_t* wrapperOffset) {
    421  AutoCreatedBy acb(masm, "JitRuntime::generateVMWrapper");
    422 
    423  *wrapperOffset = startTrampolineCode(masm);
    424 
    425  AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
    426 
    427  static_assert(
    428      (Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
    429      "Wrapper register set must be a superset of Volatile register set.");
    430 
    431  // The context is the first argument; r0 is the first argument register.
    432  Register cxreg = r0;
    433  regs.take(cxreg);
    434 
    435  // On link-register platforms, it is the responsibility of the VM *callee* to
    436  // push the return address, while the caller must ensure that the address
    437  // is stored in lr on entry. This allows the VM wrapper to work with both
    438  // direct calls and tail calls.
    439  masm.pushReturnAddress();
    440 
    441  // Push the frame pointer to finish the exit frame, then link it up.
    442  masm.Push(FramePointer);
    443  masm.moveStackPtrTo(FramePointer);
    444  masm.loadJSContext(cxreg);
    445  masm.enterExitFrame(cxreg, regs.getAny(), id);
    446 
    447  // Reserve space for the outparameter.
    448  masm.reserveVMFunctionOutParamSpace(f);
    449 
    450  masm.setupUnalignedABICallDontSaveRestoreSP();
    451  masm.passABIArg(cxreg);
    452 
    453  size_t argDisp = ExitFrameLayout::Size();
    454 
    455  // Copy any arguments.
    456  for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
    457    switch (f.argProperties(explicitArg)) {
    458      case VMFunctionData::WordByValue:
    459        masm.passABIArg(MoveOperand(FramePointer, argDisp), ABIType::General);
    460        argDisp += sizeof(void*);
    461        break;
    462      case VMFunctionData::DoubleByValue:
    463        // Values should be passed by reference, not by value, so we assert
    464        // that the argument is a double-precision float.
    465        MOZ_ASSERT(f.argPassedInFloatReg(explicitArg));
    466        masm.passABIArg(MoveOperand(FramePointer, argDisp), ABIType::Float64);
    467        argDisp += sizeof(double);
    468        break;
    469      case VMFunctionData::WordByRef:
    470        masm.passABIArg(MoveOperand(FramePointer, argDisp,
    471                                    MoveOperand::Kind::EffectiveAddress),
    472                        ABIType::General);
    473        argDisp += sizeof(void*);
    474        break;
    475      case VMFunctionData::DoubleByRef:
    476        masm.passABIArg(MoveOperand(FramePointer, argDisp,
    477                                    MoveOperand::Kind::EffectiveAddress),
    478                        ABIType::General);
    479        argDisp += 2 * sizeof(void*);
    480        break;
    481    }
    482  }
    483 
    484  // Copy the implicit outparam, if any.
    485  const int32_t outParamOffset =
    486      -int32_t(ExitFooterFrame::Size()) - f.sizeOfOutParamStackSlot();
    487  if (f.outParam != Type_Void) {
    488    masm.passABIArg(MoveOperand(FramePointer, outParamOffset,
    489                                MoveOperand::Kind::EffectiveAddress),
    490                    ABIType::General);
    491  }
    492 
    493  masm.callWithABI(nativeFun, ABIType::General,
    494                   CheckUnsafeCallWithABI::DontCheckHasExitFrame);
    495 
    496  // Test for failure.
    497  switch (f.failType()) {
    498    case Type_Cell:
    499      masm.branchTestPtr(Assembler::Zero, r0, r0, masm.failureLabel());
    500      break;
    501    case Type_Bool:
    502      masm.branchIfFalseBool(r0, masm.failureLabel());
    503      break;
    504    case Type_Void:
    505      break;
    506    default:
    507      MOZ_CRASH("unknown failure kind");
    508  }
    509 
    510  // Load the outparam.
    511  masm.loadVMFunctionOutParam(f, Address(FramePointer, outParamOffset));
    512 
    513  // Until C++ code is instrumented against Spectre, prevent speculative
    514  // execution from returning any private data.
    515  if (f.returnsData() && JitOptions.spectreJitToCxxCalls) {
    516    masm.speculationBarrier();
    517  }
    518 
    519  // Pop frame and restore frame pointer.
    520  masm.moveToStackPtr(FramePointer);
    521  masm.pop(FramePointer);
    522 
    523  // Return. Subtract sizeof(void*) for the frame pointer.
    524  masm.retn(Imm32(sizeof(ExitFrameLayout) - sizeof(void*) +
    525                  f.explicitStackSlots() * sizeof(void*) +
    526                  f.extraValuesToPop * sizeof(Value)));
    527 
    528  return true;
    529 }
    530 
    531 uint32_t JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm,
    532                                        MIRType type) {
    533  AutoCreatedBy acb(masm, "JitRuntime::generatePreBarrier");
    534 
    535  uint32_t offset = startTrampolineCode(masm);
    536 
    537  masm.pushReturnAddress();
    538 
    539  static_assert(PreBarrierReg == r1);
    540  Register temp1 = r2;
    541  Register temp2 = r3;
    542  Register temp3 = r4;
    543  masm.push(temp1);
    544  masm.push(temp2);
    545  masm.push(temp3);
    546 
    547  Label noBarrier;
    548  masm.emitPreBarrierFastPath(type, temp1, temp2, temp3, &noBarrier);
    549 
    550  // Call into C++ to mark this GC thing.
    551  masm.pop(temp3);
    552  masm.pop(temp2);
    553  masm.pop(temp1);
    554 
    555  LiveRegisterSet save;
    556  save.set() =
    557      RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
    558                  FloatRegisterSet(FloatRegisters::VolatileDoubleMask));
    559  masm.PushRegsInMask(save);
    560 
    561  masm.movePtr(ImmPtr(cx->runtime()), r0);
    562 
    563  masm.setupUnalignedABICall(r2);
    564  masm.passABIArg(r0);
    565  masm.passABIArg(r1);
    566  masm.callWithABI(JitPreWriteBarrier(type));
    567  masm.PopRegsInMask(save);
    568  masm.ret();
    569 
    570  masm.bind(&noBarrier);
    571  masm.pop(temp3);
    572  masm.pop(temp2);
    573  masm.pop(temp1);
    574  masm.ret();
    575 
    576  return offset;
    577 }
    578 
    579 void JitRuntime::generateBailoutTailStub(MacroAssembler& masm,
    580                                         Label* bailoutTail) {
    581  AutoCreatedBy acb(masm, "JitRuntime::generateBailoutTailStub");
    582 
    583  masm.bind(bailoutTail);
    584  masm.generateBailoutTail(r1, r2);
    585 }