tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Trampoline-x64.cpp (19584B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "jit/Bailouts.h"
      8 #include "jit/BaselineFrame.h"
      9 #include "jit/CalleeToken.h"
     10 #include "jit/JitFrames.h"
     11 #include "jit/JitRuntime.h"
     12 #include "jit/PerfSpewer.h"
     13 #include "jit/VMFunctions.h"
     14 #include "jit/x64/SharedICRegisters-x64.h"
     15 #include "vm/JitActivation.h"  // js::jit::JitActivation
     16 #include "vm/JSContext.h"
     17 
     18 #include "jit/MacroAssembler-inl.h"
     19 
     20 using namespace js;
     21 using namespace js::jit;
     22 
     23 using mozilla::IsPowerOfTwo;
     24 
     25 // This struct reflects the contents of the stack entry.
     26 // Given a `CommonFrameLayout* frame`:
     27 // - `frame->prevType()` should be `FrameType::CppToJSJit`.
     28 // - Then EnterJITStackEntry starts at:
     29 //     frame->callerFramePtr() + EnterJITStackEntry::offsetFromFP()
     30 //     (the offset is negative, so this subtracts from the frame pointer)
     31 struct EnterJITStackEntry {
     32  // Offset from frame pointer to EnterJITStackEntry*.
     33  static constexpr int32_t offsetFromFP() {
     34    return -int32_t(offsetof(EnterJITStackEntry, rbp));
     35  }
     36 
     37  void* result;
     38 
     39 #if defined(_WIN64)
     40  struct XMM {
     41    using XMM128 = char[16];
     42    XMM128 xmm6;
     43    XMM128 xmm7;
     44    XMM128 xmm8;
     45    XMM128 xmm9;
     46    XMM128 xmm10;
     47    XMM128 xmm11;
     48    XMM128 xmm12;
     49    XMM128 xmm13;
     50    XMM128 xmm14;
     51    XMM128 xmm15;
     52  } xmm;
     53 
     54  // 16-byte aligment for xmm registers above.
     55  uint64_t xmmPadding;
     56 
     57  void* rsi;
     58  void* rdi;
     59 #endif
     60 
     61  void* r15;
     62  void* r14;
     63  void* r13;
     64  void* r12;
     65  void* rbx;
     66  void* rbp;
     67 
     68  // Pushed by CALL.
     69  void* rip;
     70 };
     71 
     72 // All registers to save and restore. This includes the stack pointer, since we
     73 // use the ability to reference register values on the stack by index.
     74 static const LiveRegisterSet AllRegs =
     75    LiveRegisterSet(GeneralRegisterSet(Registers::AllMask),
     76                    FloatRegisterSet(FloatRegisters::AllMask));
     77 
     78 // Generates a trampoline for calling Jit compiled code from a C++ function.
     79 // The trampoline use the EnterJitCode signature, with the standard x64 fastcall
     80 // calling convention.
     81 void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
     82  AutoCreatedBy acb(masm, "JitRuntime::generateEnterJIT");
     83 
     84  enterJITOffset_ = startTrampolineCode(masm);
     85 
     86  masm.assertStackAlignment(ABIStackAlignment,
     87                            -int32_t(sizeof(uintptr_t)) /* return address */);
     88 
     89  const Register reg_code = IntArgReg0;
     90  const Register reg_argc = IntArgReg1;
     91  const Register reg_argv = IntArgReg2;
     92  static_assert(OsrFrameReg == IntArgReg3);
     93 
     94 #if defined(_WIN64)
     95  const Operand token = Operand(rbp, 16 + ShadowStackSpace);
     96  const Operand scopeChain = Operand(rbp, 24 + ShadowStackSpace);
     97  const Operand numStackValuesAddr = Operand(rbp, 32 + ShadowStackSpace);
     98  const Operand result = Operand(rbp, 40 + ShadowStackSpace);
     99 #else
    100  const Register token = IntArgReg4;
    101  const Register scopeChain = IntArgReg5;
    102  const Operand numStackValuesAddr = Operand(rbp, 16 + ShadowStackSpace);
    103  const Operand result = Operand(rbp, 24 + ShadowStackSpace);
    104 #endif
    105 
    106  // Note: the stack pushes below must match the fields in EnterJITStackEntry.
    107 
    108  // Save old stack frame pointer, set new stack frame pointer.
    109  masm.push(rbp);
    110  masm.mov(rsp, rbp);
    111 
    112  // Save non-volatile registers. These must be saved by the trampoline, rather
    113  // than by the JIT'd code, because they are scanned by the conservative
    114  // scanner.
    115  masm.push(rbx);
    116  masm.push(r12);
    117  masm.push(r13);
    118  masm.push(r14);
    119  masm.push(r15);
    120 #if defined(_WIN64)
    121  masm.push(rdi);
    122  masm.push(rsi);
    123 
    124  // 16-byte aligment for vmovdqa
    125  masm.subq(Imm32(sizeof(EnterJITStackEntry::XMM) + 8), rsp);
    126 
    127  masm.vmovdqa(xmm6, Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm6)));
    128  masm.vmovdqa(xmm7, Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm7)));
    129  masm.vmovdqa(xmm8, Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm8)));
    130  masm.vmovdqa(xmm9, Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm9)));
    131  masm.vmovdqa(xmm10, Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm10)));
    132  masm.vmovdqa(xmm11, Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm11)));
    133  masm.vmovdqa(xmm12, Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm12)));
    134  masm.vmovdqa(xmm13, Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm13)));
    135  masm.vmovdqa(xmm14, Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm14)));
    136  masm.vmovdqa(xmm15, Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm15)));
    137 #endif
    138 
    139  // Push address of return value.
    140  masm.push(result);
    141 
    142  // End of pushes reflected in EnterJITStackEntry, i.e. EnterJITStackEntry
    143  // starts at this rsp.
    144 
    145  masm.movq(token, r12);
    146  generateEnterJitShared(masm, reg_argc, reg_argv, r12, r13, r14, r15);
    147 
    148  // Push the descriptor.
    149  masm.movq(result, reg_argc);
    150  masm.unboxInt32(Operand(reg_argc, 0), reg_argc);
    151  masm.pushFrameDescriptorForJitCall(FrameType::CppToJSJit, reg_argc, reg_argc);
    152 
    153  CodeLabel returnLabel;
    154  Label oomReturnLabel;
    155  {
    156    // Handle Interpreter -> Baseline OSR.
    157    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
    158    MOZ_ASSERT(!regs.has(rbp));
    159    regs.take(OsrFrameReg);
    160    regs.take(reg_code);
    161 
    162    Register scratch = regs.takeAny();
    163 
    164    Label notOsr;
    165    masm.branchTestPtr(Assembler::Zero, OsrFrameReg, OsrFrameReg, &notOsr);
    166 
    167    Register numStackValues = regs.takeAny();
    168    masm.movq(numStackValuesAddr, numStackValues);
    169 
    170    // Push return address
    171    masm.mov(&returnLabel, scratch);
    172    masm.push(scratch);
    173 
    174    // Frame prologue.
    175    masm.push(rbp);
    176    masm.mov(rsp, rbp);
    177 
    178    // Reserve frame.
    179    masm.subPtr(Imm32(BaselineFrame::Size()), rsp);
    180 
    181    Register framePtrScratch = regs.takeAny();
    182    masm.touchFrameValues(numStackValues, scratch, framePtrScratch);
    183    masm.mov(rsp, framePtrScratch);
    184 
    185    // Reserve space for locals and stack values.
    186    Register valuesSize = regs.takeAny();
    187    masm.mov(numStackValues, valuesSize);
    188    masm.shll(Imm32(3), valuesSize);
    189    masm.subPtr(valuesSize, rsp);
    190 
    191    // Enter exit frame.
    192    masm.push(FrameDescriptor(FrameType::BaselineJS));
    193    masm.push(Imm32(0));  // Fake return address.
    194    masm.push(FramePointer);
    195    // No GC things to mark, push a bare token.
    196    masm.loadJSContext(scratch);
    197    masm.enterFakeExitFrame(scratch, scratch, ExitFrameType::Bare);
    198 
    199    regs.add(valuesSize);
    200 
    201    masm.push(reg_code);
    202 
    203    using Fn = bool (*)(BaselineFrame* frame, InterpreterFrame* interpFrame,
    204                        uint32_t numStackValues);
    205    masm.setupUnalignedABICall(scratch);
    206    masm.passABIArg(framePtrScratch);  // BaselineFrame
    207    masm.passABIArg(OsrFrameReg);      // InterpreterFrame
    208    masm.passABIArg(numStackValues);
    209    masm.callWithABI<Fn, jit::InitBaselineFrameForOsr>(
    210        ABIType::General, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
    211 
    212    masm.pop(reg_code);
    213 
    214    MOZ_ASSERT(reg_code != ReturnReg);
    215 
    216    Label error;
    217    masm.addPtr(Imm32(ExitFrameLayout::SizeWithFooter()), rsp);
    218    masm.branchIfFalseBool(ReturnReg, &error);
    219 
    220    // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
    221    // if profiler instrumentation is enabled.
    222    {
    223      Label skipProfilingInstrumentation;
    224      AbsoluteAddress addressOfEnabled(
    225          cx->runtime()->geckoProfiler().addressOfEnabled());
    226      masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
    227                    &skipProfilingInstrumentation);
    228      masm.profilerEnterFrame(rbp, scratch);
    229      masm.bind(&skipProfilingInstrumentation);
    230    }
    231 
    232    masm.jump(reg_code);
    233 
    234    // OOM: frame epilogue, load error value, discard return address and return.
    235    masm.bind(&error);
    236    masm.mov(rbp, rsp);
    237    masm.pop(rbp);
    238    masm.addPtr(Imm32(sizeof(uintptr_t)), rsp);  // Return address.
    239    masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
    240    masm.jump(&oomReturnLabel);
    241 
    242    masm.bind(&notOsr);
    243    masm.movq(scopeChain, R1.scratchReg());
    244  }
    245 
    246  // The call will push the return address and frame pointer on the stack, thus
    247  // we check that the stack would be aligned once the call is complete.
    248  masm.assertStackAlignment(JitStackAlignment, 2 * sizeof(uintptr_t));
    249 
    250  // Call function.
    251  masm.callJitNoProfiler(reg_code);
    252 
    253  {
    254    // Interpreter -> Baseline OSR will return here.
    255    masm.bind(&returnLabel);
    256    masm.addCodeLabel(returnLabel);
    257    masm.bind(&oomReturnLabel);
    258  }
    259 
    260  // Discard arguments and padding. Set rsp to the address of the
    261  // EnterJITStackEntry on the stack.
    262  masm.lea(Operand(rbp, EnterJITStackEntry::offsetFromFP()), rsp);
    263 
    264  /*****************************************************************
    265  Place return value where it belongs, pop all saved registers
    266  *****************************************************************/
    267  masm.pop(r12);  // vp
    268  masm.storeValue(JSReturnOperand, Operand(r12, 0));
    269 
    270  // Restore non-volatile registers.
    271 #if defined(_WIN64)
    272  masm.vmovdqa(Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm6)), xmm6);
    273  masm.vmovdqa(Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm7)), xmm7);
    274  masm.vmovdqa(Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm8)), xmm8);
    275  masm.vmovdqa(Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm9)), xmm9);
    276  masm.vmovdqa(Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm10)), xmm10);
    277  masm.vmovdqa(Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm11)), xmm11);
    278  masm.vmovdqa(Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm12)), xmm12);
    279  masm.vmovdqa(Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm13)), xmm13);
    280  masm.vmovdqa(Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm14)), xmm14);
    281  masm.vmovdqa(Operand(rsp, offsetof(EnterJITStackEntry::XMM, xmm15)), xmm15);
    282 
    283  masm.addq(Imm32(sizeof(EnterJITStackEntry::XMM) + 8), rsp);
    284 
    285  masm.pop(rsi);
    286  masm.pop(rdi);
    287 #endif
    288  masm.pop(r15);
    289  masm.pop(r14);
    290  masm.pop(r13);
    291  masm.pop(r12);
    292  masm.pop(rbx);
    293 
    294  // Restore frame pointer and return.
    295  masm.pop(rbp);
    296  masm.ret();
    297 }
    298 
    299 // static
    300 mozilla::Maybe<::JS::ProfilingFrameIterator::RegisterState>
    301 JitRuntime::getCppEntryRegisters(JitFrameLayout* frameStackAddress) {
    302  if (frameStackAddress->prevType() != FrameType::CppToJSJit) {
    303    // This is not a CppToJSJit frame, there are no C++ registers here.
    304    return mozilla::Nothing{};
    305  }
    306 
    307  // Compute pointer to start of EnterJITStackEntry on the stack.
    308  uint8_t* fp = frameStackAddress->callerFramePtr();
    309  auto* enterJITStackEntry = reinterpret_cast<EnterJITStackEntry*>(
    310      fp + EnterJITStackEntry::offsetFromFP());
    311 
    312  // Extract native function call registers.
    313  ::JS::ProfilingFrameIterator::RegisterState registerState;
    314  registerState.fp = enterJITStackEntry->rbp;
    315  registerState.pc = enterJITStackEntry->rip;
    316  // sp should be inside the caller's frame, so set sp to the value of the stack
    317  // pointer before the call to the EnterJit trampoline.
    318  registerState.sp = &enterJITStackEntry->rip + 1;
    319  // No lr in this world.
    320  registerState.lr = nullptr;
    321  return mozilla::Some(registerState);
    322 }
    323 
    324 // Push AllRegs in a way that is compatible with RegisterDump, regardless of
    325 // what PushRegsInMask might do to reduce the set size.
    326 static void DumpAllRegs(MacroAssembler& masm) {
    327 #ifdef ENABLE_WASM_SIMD
    328  masm.PushRegsInMask(AllRegs);
    329 #else
    330  // When SIMD isn't supported, PushRegsInMask reduces the set of float
    331  // registers to be double-sized, while the RegisterDump expects each of
    332  // the float registers to have the maximal possible size
    333  // (Simd128DataSize). To work around this, we just spill the double
    334  // registers by hand here, using the register dump offset directly.
    335  for (GeneralRegisterBackwardIterator iter(AllRegs.gprs()); iter.more();
    336       ++iter) {
    337    masm.Push(*iter);
    338  }
    339 
    340  masm.reserveStack(sizeof(RegisterDump::FPUArray));
    341  for (FloatRegisterBackwardIterator iter(AllRegs.fpus()); iter.more();
    342       ++iter) {
    343    FloatRegister reg = *iter;
    344    Address spillAddress(StackPointer, reg.getRegisterDumpOffsetInBytes());
    345    masm.storeDouble(reg, spillAddress);
    346  }
    347 #endif
    348 }
    349 
    350 void JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail) {
    351  AutoCreatedBy acb(masm, "JitRuntime::generateInvalidator");
    352 
    353  // See explanatory comment in x86's JitRuntime::generateInvalidator.
    354 
    355  invalidatorOffset_ = startTrampolineCode(masm);
    356 
    357  // Push registers such that we can access them from [base + code].
    358  DumpAllRegs(masm);
    359 
    360  masm.movq(rsp, rax);  // Argument to jit::InvalidationBailout.
    361 
    362  // Make space for InvalidationBailout's bailoutInfo outparam.
    363  masm.reserveStack(sizeof(void*));
    364  masm.movq(rsp, rbx);
    365 
    366  using Fn = bool (*)(InvalidationBailoutStack* sp, BaselineBailoutInfo** info);
    367  masm.setupUnalignedABICall(rdx);
    368  masm.passABIArg(rax);
    369  masm.passABIArg(rbx);
    370  masm.callWithABI<Fn, InvalidationBailout>(
    371      ABIType::General, CheckUnsafeCallWithABI::DontCheckOther);
    372 
    373  masm.pop(r9);  // Get the bailoutInfo outparam.
    374 
    375  // Pop the machine state and the dead frame.
    376  masm.moveToStackPtr(FramePointer);
    377 
    378  // Jump to shared bailout tail. The BailoutInfo pointer has to be in r9.
    379  masm.jmp(bailoutTail);
    380 }
    381 
    382 static void PushBailoutFrame(MacroAssembler& masm, Register spArg) {
    383  // Push registers such that we can access them from [base + code].
    384  DumpAllRegs(masm);
    385 
    386  // Get the stack pointer into a register, pre-alignment.
    387  masm.movq(rsp, spArg);
    388 }
    389 
    390 static void GenerateBailoutThunk(MacroAssembler& masm, Label* bailoutTail) {
    391  PushBailoutFrame(masm, r8);
    392 
    393  // Make space for Bailout's bailoutInfo outparam.
    394  masm.reserveStack(sizeof(void*));
    395  masm.movq(rsp, r9);
    396 
    397  // Call the bailout function.
    398  using Fn = bool (*)(BailoutStack* sp, BaselineBailoutInfo** info);
    399  masm.setupUnalignedABICall(rax);
    400  masm.passABIArg(r8);
    401  masm.passABIArg(r9);
    402  masm.callWithABI<Fn, Bailout>(ABIType::General,
    403                                CheckUnsafeCallWithABI::DontCheckOther);
    404 
    405  masm.pop(r9);  // Get the bailoutInfo outparam.
    406 
    407  // Remove both the bailout frame and the topmost Ion frame's stack.
    408  masm.moveToStackPtr(FramePointer);
    409 
    410  // Jump to shared bailout tail. The BailoutInfo pointer has to be in r9.
    411  masm.jmp(bailoutTail);
    412 }
    413 
    414 void JitRuntime::generateBailoutHandler(MacroAssembler& masm,
    415                                        Label* bailoutTail) {
    416  AutoCreatedBy acb(masm, "JitRuntime::generateBailoutHandler");
    417 
    418  bailoutHandlerOffset_ = startTrampolineCode(masm);
    419 
    420  GenerateBailoutThunk(masm, bailoutTail);
    421 }
    422 
    423 bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
    424                                   VMFunctionId id, const VMFunctionData& f,
    425                                   DynFn nativeFun, uint32_t* wrapperOffset) {
    426  AutoCreatedBy acb(masm, "JitRuntime::generateVMWrapper");
    427 
    428  *wrapperOffset = startTrampolineCode(masm);
    429 
    430  // Avoid conflicts with argument registers while discarding the result after
    431  // the function call.
    432  AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
    433 
    434  static_assert(
    435      (Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
    436      "Wrapper register set must be a superset of Volatile register set");
    437 
    438  // The context is the first argument.
    439  Register cxreg = IntArgReg0;
    440  regs.take(cxreg);
    441 
    442  // Stack is:
    443  //    ... frame ...
    444  //  +12 [args]
    445  //  +8  descriptor
    446  //  +0  returnAddress
    447  //
    448  // Push the frame pointer to finish the exit frame, then link it up.
    449  masm.Push(FramePointer);
    450  masm.moveStackPtrTo(FramePointer);
    451  masm.loadJSContext(cxreg);
    452  masm.enterExitFrame(cxreg, regs.getAny(), id);
    453 
    454  // Reserve space for the outparameter.
    455  masm.reserveVMFunctionOutParamSpace(f);
    456 
    457  masm.setupUnalignedABICallDontSaveRestoreSP();
    458  masm.passABIArg(cxreg);
    459 
    460  size_t argDisp = ExitFrameLayout::Size();
    461 
    462  // Copy arguments.
    463  for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
    464    switch (f.argProperties(explicitArg)) {
    465      case VMFunctionData::WordByValue:
    466        if (f.argPassedInFloatReg(explicitArg)) {
    467          masm.passABIArg(MoveOperand(FramePointer, argDisp), ABIType::Float64);
    468        } else {
    469          masm.passABIArg(MoveOperand(FramePointer, argDisp), ABIType::General);
    470        }
    471        argDisp += sizeof(void*);
    472        break;
    473      case VMFunctionData::WordByRef:
    474        masm.passABIArg(MoveOperand(FramePointer, argDisp,
    475                                    MoveOperand::Kind::EffectiveAddress),
    476                        ABIType::General);
    477        argDisp += sizeof(void*);
    478        break;
    479      case VMFunctionData::DoubleByValue:
    480      case VMFunctionData::DoubleByRef:
    481        MOZ_CRASH("NYI: x64 callVM should not be used with 128bits values.");
    482    }
    483  }
    484 
    485  // Copy the implicit outparam, if any.
    486  const int32_t outParamOffset =
    487      -int32_t(ExitFooterFrame::Size()) - f.sizeOfOutParamStackSlot();
    488  if (f.outParam != Type_Void) {
    489    masm.passABIArg(MoveOperand(FramePointer, outParamOffset,
    490                                MoveOperand::Kind::EffectiveAddress),
    491                    ABIType::General);
    492  }
    493 
    494  masm.callWithABI(nativeFun, ABIType::General,
    495                   CheckUnsafeCallWithABI::DontCheckHasExitFrame);
    496 
    497  // Test for failure.
    498  switch (f.failType()) {
    499    case Type_Cell:
    500      masm.branchTestPtr(Assembler::Zero, rax, rax, masm.failureLabel());
    501      break;
    502    case Type_Bool:
    503      masm.testb(rax, rax);
    504      masm.j(Assembler::Zero, masm.failureLabel());
    505      break;
    506    case Type_Void:
    507      break;
    508    default:
    509      MOZ_CRASH("unknown failure kind");
    510  }
    511 
    512  // Load the outparam.
    513  masm.loadVMFunctionOutParam(f, Address(FramePointer, outParamOffset));
    514 
    515  // Until C++ code is instrumented against Spectre, prevent speculative
    516  // execution from returning any private data.
    517  if (f.returnsData() && JitOptions.spectreJitToCxxCalls) {
    518    masm.speculationBarrier();
    519  }
    520 
    521  // Pop frame and restore frame pointer.
    522  masm.moveToStackPtr(FramePointer);
    523  masm.pop(FramePointer);
    524 
    525  // Return. Subtract sizeof(void*) for the frame pointer.
    526  masm.retn(Imm32(sizeof(ExitFrameLayout) - sizeof(void*) +
    527                  f.explicitStackSlots() * sizeof(void*) +
    528                  f.extraValuesToPop * sizeof(Value)));
    529 
    530  return true;
    531 }
    532 
    533 uint32_t JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm,
    534                                        MIRType type) {
    535  AutoCreatedBy acb(masm, "JitRuntime::generatePreBarrier");
    536 
    537  uint32_t offset = startTrampolineCode(masm);
    538 
    539  static_assert(PreBarrierReg == rdx);
    540  Register temp1 = rax;
    541  Register temp2 = rbx;
    542  Register temp3 = rcx;
    543  masm.push(temp1);
    544  masm.push(temp2);
    545  masm.push(temp3);
    546 
    547  Label noBarrier;
    548  masm.emitPreBarrierFastPath(type, temp1, temp2, temp3, &noBarrier);
    549 
    550  // Call into C++ to mark this GC thing.
    551  masm.pop(temp3);
    552  masm.pop(temp2);
    553  masm.pop(temp1);
    554 
    555  LiveRegisterSet regs =
    556      LiveRegisterSet(GeneralRegisterSet(Registers::VolatileMask),
    557                      FloatRegisterSet(FloatRegisters::VolatileMask));
    558  masm.PushRegsInMask(regs);
    559 
    560  masm.mov(ImmPtr(cx->runtime()), rcx);
    561 
    562  masm.setupUnalignedABICall(rax);
    563  masm.passABIArg(rcx);
    564  masm.passABIArg(rdx);
    565  masm.callWithABI(JitPreWriteBarrier(type));
    566 
    567  masm.PopRegsInMask(regs);
    568  masm.ret();
    569 
    570  masm.bind(&noBarrier);
    571  masm.pop(temp3);
    572  masm.pop(temp2);
    573  masm.pop(temp1);
    574  masm.ret();
    575 
    576  return offset;
    577 }
    578 
    579 void JitRuntime::generateBailoutTailStub(MacroAssembler& masm,
    580                                         Label* bailoutTail) {
    581  AutoCreatedBy acb(masm, "JitRuntime::generateBailoutTailStub");
    582 
    583  masm.bind(bailoutTail);
    584  masm.generateBailoutTail(rdx, r9);
    585 }