tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Trampoline-arm64.cpp (25406B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "jit/arm64/SharedICHelpers-arm64.h"
      8 #include "jit/Bailouts.h"
      9 #include "jit/BaselineFrame.h"
     10 #include "jit/CalleeToken.h"
     11 #include "jit/JitFrames.h"
     12 #include "jit/JitRuntime.h"
     13 #include "jit/PerfSpewer.h"
     14 #include "jit/VMFunctions.h"
     15 #include "vm/JitActivation.h"  // js::jit::JitActivation
     16 #include "vm/JSContext.h"
     17 
     18 #include "jit/MacroAssembler-inl.h"
     19 
     20 using namespace js;
     21 using namespace js::jit;
     22 
     23 /* This method generates a trampoline on ARM64 for a c++ function with
     24 * the following signature:
     25 *   bool blah(void* code, int argc, Value* argv,
     26 *             JSObject* scopeChain, Value* vp)
     27 *   ...using standard AArch64 calling convention
     28 */
     29 void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) {
     30  AutoCreatedBy acb(masm, "JitRuntime::generateEnterJIT");
     31 
     32  enterJITOffset_ = startTrampolineCode(masm);
     33 
     34  const Register reg_code = IntArgReg0;      // EnterJitData::jitcode.
     35  const Register reg_argc = IntArgReg1;      // EnterJitData::maxArgc.
     36  const Register reg_argv = IntArgReg2;      // EnterJitData::maxArgv.
     37  const Register reg_osrFrame = IntArgReg3;  // EnterJitData::osrFrame.
     38  const Register reg_callee = IntArgReg4;    // EnterJitData::calleeToken.
     39  const Register reg_scope = IntArgReg5;     // EnterJitData::scopeChain.
     40  const Register reg_osrNStack =
     41      IntArgReg6;                      // EnterJitData::osrNumStackValues.
     42  const Register reg_vp = IntArgReg7;  // Address of EnterJitData::result.
     43 
     44  static_assert(OsrFrameReg == IntArgReg3);
     45 
     46  // During the pushes below, use the normal stack pointer.
     47  masm.SetStackPointer64(sp);
     48 
     49  // Save return address and old frame pointer; set new frame pointer.
     50  masm.push(r30, r29);
     51  masm.moveStackPtrTo(r29);
     52 
     53  // Save callee-save integer registers.
     54  // Also save x7 (reg_vp) and x30 (lr), for use later.
     55  masm.push(r19, r20, r21, r22);
     56  masm.push(r23, r24, r25, r26);
     57  masm.push(r27, r28, r7, r30);
     58 
     59  // Save callee-save floating-point registers.
     60  // AArch64 ABI specifies that only the lower 64 bits must be saved.
     61  masm.push(d8, d9, d10, d11);
     62  masm.push(d12, d13, d14, d15);
     63 
     64 #ifdef DEBUG
     65  // Emit stack canaries.
     66  masm.movePtr(ImmWord(0xdeadd00d), r23);
     67  masm.movePtr(ImmWord(0xdeadd11d), r24);
     68  masm.push(r23, r24);
     69 #endif
     70 
     71  // At this point we are 16-byte aligned.
     72  masm.assertStackAlignment(JitStackAlignment);
     73 
     74  // ARM64 expects the stack pointer to be 16-byte aligned whenever it
     75  // is used as a base register. This makes it awkward to build a
     76  // stack frame using incremental pushes. Therefore, unlike other
     77  // platforms, arm64 does not use generateEnterJitShared. Instead,
     78  // we compute the total size of the frame, adjust the stack pointer
     79  // a single time, and then initialize the contents of the frame.
     80  //
     81  // At this point:
     82  // - reg_argc contains the number of args passed (not including this)
     83  // - reg_argv points to the beginning of a contiguous array of arguments
     84  //   values, *not* including `this`. `this` is at argvReg[-1].
     85  // - reg_callee contains the callee token
     86 
     87  // Compute the number of args to push.
     88  Label notFunction;
     89  Register actual_args = r19;
     90  masm.branchTest32(Assembler::NonZero, reg_callee, Imm32(CalleeTokenScriptBit),
     91                    &notFunction);
     92  masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), reg_callee, actual_args);
     93  masm.loadFunctionArgCount(actual_args, actual_args);
     94  masm.max32(actual_args, reg_argc, actual_args);
     95 
     96  // In addition to args, our stack frame needs space for the descriptor,
     97  // the calleeToken, `this`, and `newTarget`. We allocate `newTarget`
     98  // unconditionally; at worst it costs us a tiny bit of stack space.
     99  // Add these to frame_size and round up to an even number.
    100  Register frame_size = r20;
    101  Register scratch = r21;
    102  Register scratch2 = r22;
    103  uint32_t extraSlots = 4;
    104  masm.add32(Imm32(extraSlots + 1), actual_args, frame_size);
    105  masm.and32(Imm32(~1), frame_size);
    106 
    107  // Touch frame incrementally (a requirement for Windows).
    108  masm.touchFrameValues(frame_size, scratch, scratch2);
    109 
    110  // Allocate the stack frame.
    111  masm.lshift32(Imm32(3), frame_size);
    112  masm.subFromStackPtr(frame_size);
    113 
    114  // Copy `this` through `argN` from reg_argv to the stack.
    115  // WARNING: destructively modifies reg_argv.
    116  // This section uses ARM instructions directly to get easy access
    117  // to post-increment loads/stores.
    118  ARMRegister dest(r23, 64);
    119  ARMRegister arg(scratch, 64);
    120  ARMRegister tmp_argc(scratch2, 64);
    121  ARMRegister argc(reg_argc, 64);
    122  ARMRegister argv(reg_argv, 64);
    123  masm.Add(dest, sp, Operand(2 * sizeof(uintptr_t)));
    124  masm.Add(tmp_argc, argc, Operand(1));
    125  masm.Sub(argv, argv, Operand(sizeof(Value)));  // Point at `this`.
    126 
    127  Label argLoop;
    128  masm.bind(&argLoop);
    129  // Load an argument from argv, then increment argv by 8.
    130  masm.Ldr(arg, MemOperand(argv, Operand(8), vixl::PostIndex));
    131  // Store the argument to dest, then increment dest by 8.
    132  masm.Str(arg, MemOperand(dest, Operand(8), vixl::PostIndex));
    133  // Decrement tmp_argc and set the condition codes for the new value.
    134  masm.Subs(tmp_argc, tmp_argc, Operand(1));
    135  // Branch if arguments remain.
    136  masm.B(&argLoop, vixl::Condition::NonZero);
    137 
    138  // Fill any remaining arguments with `undefined`.
    139  // First compute the number of missing arguments.
    140  Label noUndef;
    141  const ARMRegister missing_args(scratch2, 64);
    142  masm.Subs(missing_args, ARMRegister(actual_args, 64), argc);
    143  masm.B(&noUndef, vixl::Condition::Zero);
    144 
    145  Label undefLoop;
    146  masm.Mov(arg, int64_t(UndefinedValue().asRawBits()));
    147  masm.bind(&undefLoop);
    148  // Store `undefined` to dest, then increment dest by 8.
    149  masm.Str(arg, MemOperand(dest, Operand(8), vixl::PostIndex));
    150  // Decrement missing_args and set the condition codes for the new value.
    151  masm.Subs(missing_args, missing_args, Operand(1));
    152  // Branch if missing arguments remain.
    153  masm.B(&undefLoop, vixl::Condition::NonZero);
    154  masm.bind(&noUndef);
    155 
    156  // Store newTarget if necessary
    157  Label doneArgs;
    158  masm.branchTest32(Assembler::Zero, reg_callee,
    159                    Imm32(CalleeToken_FunctionConstructing), &doneArgs);
    160  masm.Ldr(arg, MemOperand(argv));
    161  masm.Str(arg, MemOperand(dest));
    162  masm.jump(&doneArgs);
    163  masm.bind(&notFunction);
    164 
    165  // Non-functions have no arguments.
    166  // Allocate space for the callee token and the descriptor.
    167  const int32_t nonFunctionFrameSize = 2 * sizeof(uintptr_t);
    168  static_assert(nonFunctionFrameSize % JitStackAlignment == 0);
    169  masm.subFromStackPtr(Imm32(nonFunctionFrameSize));
    170  masm.bind(&doneArgs);
    171 
    172  // Store descriptor and callee token.
    173  masm.unboxInt32(Address(reg_vp, 0), scratch);
    174  masm.makeFrameDescriptorForJitCall(FrameType::CppToJSJit, scratch, scratch);
    175  masm.Str(ARMRegister(scratch, 64), MemOperand(sp, 0));
    176  masm.Str(ARMRegister(reg_callee, 64), MemOperand(sp, sizeof(uintptr_t)));
    177 
    178  // We start using the PSP here.
    179  // TODO: convert the code below to use sp instead.
    180  masm.Mov(PseudoStackPointer64, sp);
    181  masm.SetStackPointer64(PseudoStackPointer64);
    182 
    183  masm.checkStackAlignment();
    184 
    185  Label osrReturnPoint;
    186  {
    187    // Check for Interpreter -> Baseline OSR.
    188 
    189    AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
    190    MOZ_ASSERT(!regs.has(FramePointer));
    191    regs.take(OsrFrameReg);
    192    regs.take(reg_code);
    193    regs.take(reg_osrNStack);
    194    MOZ_ASSERT(!regs.has(ReturnReg), "ReturnReg matches reg_code");
    195 
    196    Label notOsr;
    197    masm.branchTestPtr(Assembler::Zero, OsrFrameReg, OsrFrameReg, &notOsr);
    198 
    199    Register scratch = regs.takeAny();
    200 
    201    // Frame prologue.
    202    masm.Adr(ARMRegister(scratch, 64), &osrReturnPoint);
    203    masm.push(scratch, FramePointer);
    204    masm.moveStackPtrTo(FramePointer);
    205 
    206    // Reserve frame.
    207    masm.subFromStackPtr(Imm32(BaselineFrame::Size()));
    208 
    209    Register framePtrScratch = regs.takeAny();
    210    masm.touchFrameValues(reg_osrNStack, scratch, framePtrScratch);
    211    masm.moveStackPtrTo(framePtrScratch);
    212 
    213    // Reserve space for locals and stack values.
    214    // scratch = num_stack_values * sizeof(Value).
    215    masm.Lsl(ARMRegister(scratch, 32), ARMRegister(reg_osrNStack, 32), 3);
    216    masm.subFromStackPtr(scratch);
    217 
    218    // Enter exit frame.
    219    masm.push(FrameDescriptor(FrameType::BaselineJS));
    220    masm.push(xzr);  // Push xzr for a fake return address.
    221    masm.push(FramePointer);
    222    // No GC things to mark: push a bare token.
    223    masm.loadJSContext(scratch);
    224    masm.enterFakeExitFrame(scratch, scratch, ExitFrameType::Bare);
    225 
    226    masm.push(reg_code);
    227 
    228    // Initialize the frame, including filling in the slots.
    229    using Fn = bool (*)(BaselineFrame* frame, InterpreterFrame* interpFrame,
    230                        uint32_t numStackValues);
    231    masm.setupUnalignedABICall(r19);
    232    masm.passABIArg(framePtrScratch);  // BaselineFrame.
    233    masm.passABIArg(reg_osrFrame);     // InterpreterFrame.
    234    masm.passABIArg(reg_osrNStack);
    235    masm.callWithABI<Fn, jit::InitBaselineFrameForOsr>(
    236        ABIType::General, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
    237 
    238    masm.pop(scratch);
    239    MOZ_ASSERT(scratch != ReturnReg);
    240 
    241    masm.addToStackPtr(Imm32(ExitFrameLayout::SizeWithFooter()));
    242 
    243    Label error;
    244    masm.branchIfFalseBool(ReturnReg, &error);
    245 
    246    // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
    247    // if profiler instrumentation is enabled.
    248    {
    249      Label skipProfilingInstrumentation;
    250      AbsoluteAddress addressOfEnabled(
    251          cx->runtime()->geckoProfiler().addressOfEnabled());
    252      masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
    253                    &skipProfilingInstrumentation);
    254      masm.profilerEnterFrame(FramePointer, regs.getAny());
    255      masm.bind(&skipProfilingInstrumentation);
    256    }
    257 
    258    masm.jump(scratch);
    259 
    260    // OOM: frame epilogue, load error value, discard return address and return.
    261    masm.bind(&error);
    262    masm.moveToStackPtr(FramePointer);
    263    masm.pop(FramePointer);
    264    masm.addToStackPtr(Imm32(sizeof(uintptr_t)));  // Return address.
    265    masm.syncStackPtr();
    266    masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
    267    masm.B(&osrReturnPoint);
    268 
    269    masm.bind(&notOsr);
    270    masm.movePtr(reg_scope, R1_);
    271  }
    272 
    273  // The callee will push the return address and frame pointer on the stack,
    274  // thus we check that the stack would be aligned once the call is complete.
    275  masm.assertStackAlignment(JitStackAlignment, 2 * sizeof(uintptr_t));
    276 
    277  // Call function.
    278  // Since AArch64 doesn't have the pc register available, the callee must push
    279  // lr.
    280  masm.callJitNoProfiler(reg_code);
    281 
    282  // Interpreter -> Baseline OSR will return here.
    283  masm.bind(&osrReturnPoint);
    284 
    285  // Discard arguments and padding. Set sp to the address of the saved
    286  // registers. In debug builds we have to include the two stack canaries
    287  // checked below.
    288 #ifdef DEBUG
    289  static constexpr size_t SavedRegSize = 22 * sizeof(void*);
    290 #else
    291  static constexpr size_t SavedRegSize = 20 * sizeof(void*);
    292 #endif
    293  masm.computeEffectiveAddress(Address(FramePointer, -int32_t(SavedRegSize)),
    294                               masm.getStackPointer());
    295 
    296  masm.syncStackPtr();
    297  masm.SetStackPointer64(sp);
    298 
    299 #ifdef DEBUG
    300  // Check that canaries placed on function entry are still present.
    301  masm.pop(r24, r23);
    302  Label x23OK, x24OK;
    303 
    304  masm.branchPtr(Assembler::Equal, r23, ImmWord(0xdeadd00d), &x23OK);
    305  masm.breakpoint();
    306  masm.bind(&x23OK);
    307 
    308  masm.branchPtr(Assembler::Equal, r24, ImmWord(0xdeadd11d), &x24OK);
    309  masm.breakpoint();
    310  masm.bind(&x24OK);
    311 #endif
    312 
    313  // Restore callee-save floating-point registers.
    314  masm.pop(d15, d14, d13, d12);
    315  masm.pop(d11, d10, d9, d8);
    316 
    317  // Restore callee-save integer registers.
    318  // Also restore x7 (reg_vp) and x30 (lr).
    319  masm.pop(r30, r7, r28, r27);
    320  masm.pop(r26, r25, r24, r23);
    321  masm.pop(r22, r21, r20, r19);
    322 
    323  // Store return value (in JSReturnReg = x2 to just-popped reg_vp).
    324  masm.storeValue(JSReturnOperand, Address(reg_vp, 0));
    325 
    326  // Restore old frame pointer.
    327  masm.pop(r29, r30);
    328 
    329  // Return using the value popped into x30.
    330  masm.abiret();
    331 
    332  // Reset stack pointer.
    333  masm.SetStackPointer64(PseudoStackPointer64);
    334 }
    335 
    336 // static
    337 mozilla::Maybe<::JS::ProfilingFrameIterator::RegisterState>
    338 JitRuntime::getCppEntryRegisters(JitFrameLayout* frameStackAddress) {
    339  // Not supported, or not implemented yet.
    340  // TODO: Implement along with the corresponding stack-walker changes, in
    341  // coordination with the Gecko Profiler, see bug 1635987 and follow-ups.
    342  return mozilla::Nothing{};
    343 }
    344 
    345 static void PushRegisterDump(MacroAssembler& masm) {
    346  const LiveRegisterSet First28GeneralRegisters = LiveRegisterSet(
    347      GeneralRegisterSet(Registers::AllMask &
    348                         ~(1 << 31 | 1 << 30 | 1 << 29 | 1 << 28)),
    349      FloatRegisterSet(FloatRegisters::NoneMask));
    350 
    351  const LiveRegisterSet AllFloatRegisters =
    352      LiveRegisterSet(GeneralRegisterSet(Registers::NoneMask),
    353                      FloatRegisterSet(FloatRegisters::AllMask));
    354 
    355  // Push all general-purpose registers.
    356  //
    357  // The ARM64 ABI does not treat SP as a normal register that can
    358  // be pushed. So pushing happens in two phases.
    359  //
    360  // Registers are pushed in reverse order of code.
    361  //
    362  // See block comment in MacroAssembler.h for further required invariants.
    363 
    364  // First, push the last four registers, passing zero for sp.
    365  // Zero is pushed for x28 and x31: the pseudo-SP and SP, respectively.
    366  masm.asVIXL().Push(xzr, x30, x29, xzr);
    367 
    368  // Second, push the first 28 registers that serve no special purpose.
    369  masm.PushRegsInMask(First28GeneralRegisters);
    370 
    371  // Finally, push all floating-point registers, completing the RegisterDump.
    372  masm.PushRegsInMask(AllFloatRegisters);
    373 }
    374 
    375 void JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail) {
    376  AutoCreatedBy acb(masm, "JitRuntime::generateInvalidator");
    377 
    378  invalidatorOffset_ = startTrampolineCode(masm);
    379 
    380  // The InvalidationBailoutStack saved in r0 must be:
    381  // - osiPointReturnAddress_
    382  // - ionScript_  (pushed by CodeGeneratorARM64::generateInvalidateEpilogue())
    383  // - regs_  (pushed here)
    384  // - fpregs_  (pushed here) [=r0]
    385  PushRegisterDump(masm);
    386  masm.moveStackPtrTo(r0);
    387 
    388  // Reserve space for InvalidationBailout's bailoutInfo outparam.
    389  masm.Sub(x1, masm.GetStackPointer64(), Operand(sizeof(void*)));
    390  masm.moveToStackPtr(r1);
    391 
    392  using Fn = bool (*)(InvalidationBailoutStack* sp, BaselineBailoutInfo** info);
    393  masm.setupUnalignedABICall(r10);
    394  masm.passABIArg(r0);
    395  masm.passABIArg(r1);
    396 
    397  masm.callWithABI<Fn, InvalidationBailout>(
    398      ABIType::General, CheckUnsafeCallWithABI::DontCheckOther);
    399 
    400  masm.pop(r2);  // Get the bailoutInfo outparam.
    401 
    402  // Pop the machine state and the dead frame.
    403  masm.moveToStackPtr(FramePointer);
    404 
    405  // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
    406  masm.jump(bailoutTail);
    407 }
    408 
    409 static void PushBailoutFrame(MacroAssembler& masm, Register spArg) {
    410  // This assumes no SIMD registers, as JS does not support SIMD.
    411 
    412  // The stack saved in spArg must be (higher entries have higher memory
    413  // addresses):
    414  // - snapshotOffset_
    415  // - frameSize_
    416  // - regs_
    417  // - fpregs_ (spArg + 0)
    418  PushRegisterDump(masm);
    419  masm.moveStackPtrTo(spArg);
    420 }
    421 
    422 static void GenerateBailoutThunk(MacroAssembler& masm, Label* bailoutTail) {
    423  PushBailoutFrame(masm, r0);
    424 
    425  // SP % 8 == 4
    426  // STEP 1c: Call the bailout function, giving a pointer to the
    427  //          structure we just blitted onto the stack.
    428  // Make space for the BaselineBailoutInfo* outparam.
    429  masm.reserveStack(sizeof(void*));
    430  masm.moveStackPtrTo(r1);
    431 
    432  using Fn = bool (*)(BailoutStack* sp, BaselineBailoutInfo** info);
    433  masm.setupUnalignedABICall(r2);
    434  masm.passABIArg(r0);
    435  masm.passABIArg(r1);
    436  masm.callWithABI<Fn, Bailout>(ABIType::General,
    437                                CheckUnsafeCallWithABI::DontCheckOther);
    438 
    439  // Get the bailoutInfo outparam.
    440  masm.pop(r2);
    441 
    442  // Remove both the bailout frame and the topmost Ion frame's stack.
    443  masm.moveToStackPtr(FramePointer);
    444 
    445  // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
    446  masm.jump(bailoutTail);
    447 }
    448 
    449 void JitRuntime::generateBailoutHandler(MacroAssembler& masm,
    450                                        Label* bailoutTail) {
    451  AutoCreatedBy acb(masm, "JitRuntime::generateBailoutHandler");
    452 
    453  bailoutHandlerOffset_ = startTrampolineCode(masm);
    454 
    455  GenerateBailoutThunk(masm, bailoutTail);
    456 }
    457 
    458 bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm,
    459                                   VMFunctionId id, const VMFunctionData& f,
    460                                   DynFn nativeFun, uint32_t* wrapperOffset) {
    461  AutoCreatedBy acb(masm, "JitRuntime::generateVMWrapper");
    462 
    463  *wrapperOffset = startTrampolineCode(masm);
    464 
    465  // Avoid conflicts with argument registers while discarding the result after
    466  // the function call.
    467  AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
    468 
    469  static_assert(
    470      (Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
    471      "Wrapper register set must be a superset of the Volatile register set.");
    472 
    473  // The first argument is the JSContext.
    474  Register reg_cx = IntArgReg0;
    475  regs.take(reg_cx);
    476  Register temp = regs.getAny();
    477 
    478  // On entry, the stack is:
    479  //   ... frame ...
    480  //  [args]
    481  //  descriptor
    482  //
    483  // Before we pass arguments (potentially pushing some of them on the stack),
    484  // we want:
    485  //  ... frame ...
    486  //  [args]
    487  //  descriptor           \
    488  //  return address       | <- exit frame
    489  //  saved frame pointer  /
    490  //  VM id                  <- exit frame footer
    491  //  [space for out-param, if necessary]]
    492  //  [alignment padding, if necessary]
    493  //
    494  // To minimize PSP overhead, we compute the final stack size and update the
    495  // stack pointer all in one go. Then we use the PSP to "push" the required
    496  // values into the pre-allocated stack space.
    497  size_t stackAdjustment = 0;
    498 
    499  // The descriptor was already pushed.
    500  stackAdjustment += ExitFrameLayout::SizeWithFooter() - sizeof(uintptr_t);
    501  stackAdjustment += f.sizeOfOutParamStackSlot();
    502 
    503  masm.SetStackPointer64(sp);
    504 
    505  // First, update the actual stack pointer to its final aligned value.
    506  masm.Sub(ARMRegister(temp, 64), masm.GetStackPointer64(),
    507           Operand(stackAdjustment));
    508  masm.And(sp, ARMRegister(temp, 64), ~(uint64_t(JitStackAlignment) - 1));
    509 
    510  // On link-register platforms, it is the responsibility of the VM *callee* to
    511  // push the return address, while the caller must ensure that the address
    512  // is stored in lr on entry. This allows the VM wrapper to work with both
    513  // direct calls and tail calls.
    514  masm.str(ARMRegister(lr, 64),
    515           MemOperand(PseudoStackPointer64, -8, vixl::PreIndex));
    516 
    517  // Push the frame pointer using the PSP.
    518  masm.str(ARMRegister(FramePointer, 64),
    519           MemOperand(PseudoStackPointer64, -8, vixl::PreIndex));
    520 
    521  // Because we've been moving the PSP as we fill in the frame, we can set the
    522  // frame pointer for this frame directly from the PSP.
    523  masm.movePtr(PseudoStackPointer, FramePointer);
    524 
    525  masm.loadJSContext(reg_cx);
    526 
    527  // Finish the exit frame. See MacroAssembler::enterExitFrame.
    528 
    529  // linkExitFrame
    530  masm.loadPtr(Address(reg_cx, JSContext::offsetOfActivation()), temp);
    531  masm.storePtr(FramePointer,
    532                Address(temp, JitActivation::offsetOfPackedExitFP()));
    533 
    534  // Push `ExitFrameType::VMFunction + VMFunctionId`
    535  uint32_t type = uint32_t(ExitFrameType::VMFunction) + uint32_t(id);
    536  masm.move32(Imm32(type), temp);
    537  masm.str(ARMRegister(temp, 64),
    538           MemOperand(PseudoStackPointer64, -8, vixl::PreIndex));
    539 
    540  // If the out parameter is a handle, initialize it to empty.
    541  // See MacroAssembler::reserveVMFunctionOutParamSpace and PushEmptyRooted.
    542  if (f.outParam == Type_Handle) {
    543    switch (f.outParamRootType) {
    544      case VMFunctionData::RootNone:
    545        MOZ_CRASH("Handle must have root type");
    546      case VMFunctionData::RootObject:
    547      case VMFunctionData::RootString:
    548      case VMFunctionData::RootCell:
    549      case VMFunctionData::RootBigInt:
    550        masm.str(xzr, MemOperand(PseudoStackPointer64, -8, vixl::PreIndex));
    551        break;
    552      case VMFunctionData::RootValue:
    553        masm.movePtr(ImmWord(UndefinedValue().asRawBits()), temp);
    554        masm.str(ARMRegister(temp, 64),
    555                 MemOperand(PseudoStackPointer64, -8, vixl::PreIndex));
    556        break;
    557      case VMFunctionData::RootId:
    558        masm.movePtr(ImmWord(JS::PropertyKey::Void().asRawBits()), temp);
    559        masm.str(ARMRegister(temp, 64),
    560                 MemOperand(PseudoStackPointer64, -8, vixl::PreIndex));
    561    }
    562  }
    563 
    564  // Now that we've filled in the stack frame, synchronize the PSP with the
    565  // real stack pointer and return to PSP-mode while we pass arguments.
    566  masm.moveStackPtrTo(PseudoStackPointer);
    567  masm.SetStackPointer64(PseudoStackPointer64);
    568 
    569  MOZ_ASSERT(masm.framePushed() == 0);
    570  masm.setupAlignedABICall();
    571  masm.passABIArg(reg_cx);
    572 
    573  size_t argDisp = ExitFrameLayout::Size();
    574 
    575  // Copy arguments.
    576  for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
    577    switch (f.argProperties(explicitArg)) {
    578      case VMFunctionData::WordByValue:
    579        masm.passABIArg(
    580            MoveOperand(FramePointer, argDisp),
    581            (f.argPassedInFloatReg(explicitArg) ? ABIType::Float64
    582                                                : ABIType::General));
    583        argDisp += sizeof(void*);
    584        break;
    585 
    586      case VMFunctionData::WordByRef:
    587        masm.passABIArg(MoveOperand(FramePointer, argDisp,
    588                                    MoveOperand::Kind::EffectiveAddress),
    589                        ABIType::General);
    590        argDisp += sizeof(void*);
    591        break;
    592 
    593      case VMFunctionData::DoubleByValue:
    594      case VMFunctionData::DoubleByRef:
    595        MOZ_CRASH("NYI: AArch64 callVM should not be used with 128bit values.");
    596    }
    597  }
    598 
    599  // Copy the semi-implicit outparam, if any.
    600  // It is not a C++-abi outparam, which would get passed in the
    601  // outparam register, but a real parameter to the function, which
    602  // was stack-allocated above.
    603  const int32_t outParamOffset =
    604      -int32_t(ExitFooterFrame::Size()) - f.sizeOfOutParamStackSlot();
    605  if (f.outParam != Type_Void) {
    606    masm.passABIArg(MoveOperand(FramePointer, outParamOffset,
    607                                MoveOperand::Kind::EffectiveAddress),
    608                    ABIType::General);
    609  }
    610 
    611  masm.callWithABI(nativeFun, ABIType::General,
    612                   CheckUnsafeCallWithABI::DontCheckHasExitFrame);
    613 
    614  // Test for failure.
    615  switch (f.failType()) {
    616    case Type_Cell:
    617      masm.branchTestPtr(Assembler::Zero, r0, r0, masm.failureLabel());
    618      break;
    619    case Type_Bool:
    620      masm.branchIfFalseBool(r0, masm.failureLabel());
    621      break;
    622    case Type_Void:
    623      break;
    624    default:
    625      MOZ_CRASH("unknown failure kind");
    626  }
    627 
    628  // Load the outparam.
    629  masm.loadVMFunctionOutParam(f, Address(FramePointer, outParamOffset));
    630 
    631  // Until C++ code is instrumented against Spectre, prevent speculative
    632  // execution from returning any private data.
    633  if (f.returnsData() && JitOptions.spectreJitToCxxCalls) {
    634    masm.speculationBarrier();
    635  }
    636 
    637  // Pop frame and restore frame pointer. We call Mov here directly instead
    638  // of `moveToStackPtr` to avoid a syncStackPtr. The stack pointer will be
    639  // synchronized as part of retn, after adjusting the PSP.
    640  masm.Mov(masm.GetStackPointer64(), ARMRegister(FramePointer, 64));
    641  masm.pop(FramePointer);
    642 
    643  // Return. Subtract sizeof(void*) for the frame pointer.
    644  masm.retn(Imm32(sizeof(ExitFrameLayout) - sizeof(void*) +
    645                  f.explicitStackSlots() * sizeof(void*) +
    646                  f.extraValuesToPop * sizeof(Value)));
    647 
    648  return true;
    649 }
    650 
    651 uint32_t JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm,
    652                                        MIRType type) {
    653  AutoCreatedBy acb(masm, "JitRuntime::generatePreBarrier");
    654 
    655  uint32_t offset = startTrampolineCode(masm);
    656 
    657  static_assert(PreBarrierReg == r1);
    658  Register temp1 = r2;
    659  Register temp2 = r3;
    660  Register temp3 = r4;
    661  masm.push(temp1);
    662  masm.push(temp2);
    663  masm.push(temp3);
    664 
    665  Label noBarrier;
    666  masm.emitPreBarrierFastPath(type, temp1, temp2, temp3, &noBarrier);
    667 
    668  // Call into C++ to mark this GC thing.
    669  masm.pop(temp3);
    670  masm.pop(temp2);
    671  masm.pop(temp1);
    672 
    673  LiveRegisterSet regs =
    674      LiveRegisterSet(GeneralRegisterSet(Registers::VolatileMask),
    675                      FloatRegisterSet(FloatRegisters::VolatileMask));
    676 
    677  // Also preserve the return address.
    678  regs.add(lr);
    679 
    680  masm.PushRegsInMask(regs);
    681 
    682  masm.movePtr(ImmPtr(cx->runtime()), r3);
    683 
    684  masm.setupUnalignedABICall(r0);
    685  masm.passABIArg(r3);
    686  masm.passABIArg(PreBarrierReg);
    687  masm.callWithABI(JitPreWriteBarrier(type));
    688 
    689  // Pop the volatile regs and restore LR.
    690  masm.PopRegsInMask(regs);
    691  masm.abiret();
    692 
    693  masm.bind(&noBarrier);
    694  masm.pop(temp3);
    695  masm.pop(temp2);
    696  masm.pop(temp1);
    697  masm.abiret();
    698 
    699  return offset;
    700 }
    701 
    702 void JitRuntime::generateBailoutTailStub(MacroAssembler& masm,
    703                                         Label* bailoutTail) {
    704  AutoCreatedBy acb(masm, "JitRuntime::generateBailoutTailStub");
    705 
    706  masm.bind(bailoutTail);
    707  masm.generateBailoutTail(r1, r2);
    708 }