Trampoline-x86.cpp (17107B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #include "mozilla/MathAlgorithms.h" 8 9 #include "jit/Bailouts.h" 10 #include "jit/BaselineFrame.h" 11 #include "jit/BaselineJIT.h" 12 #include "jit/CalleeToken.h" 13 #include "jit/JitFrames.h" 14 #include "jit/JitRuntime.h" 15 #include "jit/JitSpewer.h" 16 #include "jit/PerfSpewer.h" 17 #include "jit/VMFunctions.h" 18 #include "jit/x86/SharedICHelpers-x86.h" 19 #include "vm/JitActivation.h" // js::jit::JitActivation 20 #include "vm/JSContext.h" 21 #include "vm/Realm.h" 22 #ifdef MOZ_VTUNE 23 # include "vtune/VTuneWrapper.h" 24 #endif 25 26 #include "jit/MacroAssembler-inl.h" 27 #include "vm/JSScript-inl.h" 28 29 using mozilla::IsPowerOfTwo; 30 31 using namespace js; 32 using namespace js::jit; 33 34 // All registers to save and restore. This includes the stack pointer, since we 35 // use the ability to reference register values on the stack by index. 36 static const LiveRegisterSet AllRegs = 37 LiveRegisterSet(GeneralRegisterSet(Registers::AllMask), 38 FloatRegisterSet(FloatRegisters::AllMask)); 39 40 enum EnterJitEbpArgumentOffset { 41 ARG_JITCODE = 2 * sizeof(void*), 42 ARG_ARGC = 3 * sizeof(void*), 43 ARG_ARGV = 4 * sizeof(void*), 44 ARG_STACKFRAME = 5 * sizeof(void*), 45 ARG_CALLEETOKEN = 6 * sizeof(void*), 46 ARG_SCOPECHAIN = 7 * sizeof(void*), 47 ARG_STACKVALUES = 8 * sizeof(void*), 48 ARG_RESULT = 9 * sizeof(void*) 49 }; 50 51 // Generates a trampoline for calling Jit compiled code from a C++ function. 52 // The trampoline use the EnterJitCode signature, with the standard cdecl 53 // calling convention. 54 void JitRuntime::generateEnterJIT(JSContext* cx, MacroAssembler& masm) { 55 AutoCreatedBy acb(masm, "JitRuntime::generateEnterJIT"); 56 57 enterJITOffset_ = startTrampolineCode(masm); 58 59 masm.assertStackAlignment(ABIStackAlignment, 60 -int32_t(sizeof(uintptr_t)) /* return address */); 61 62 // Save old stack frame pointer, set new stack frame pointer. 63 masm.push(ebp); 64 masm.movl(esp, ebp); 65 66 // Save non-volatile registers. These must be saved by the trampoline, 67 // rather than the JIT'd code, because they are scanned by the conservative 68 // scanner. 69 masm.push(ebx); 70 masm.push(esi); 71 masm.push(edi); 72 73 Register reg_argc = eax; 74 masm.loadPtr(Address(ebp, ARG_ARGC), reg_argc); 75 76 Register reg_argv = ebx; 77 masm.loadPtr(Address(ebp, ARG_ARGV), reg_argv); 78 79 Register reg_token = edx; 80 masm.loadPtr(Address(ebp, ARG_CALLEETOKEN), reg_token); 81 82 generateEnterJitShared(masm, reg_argc, reg_argv, reg_token, ecx, esi, edi); 83 84 // Push the descriptor. 85 masm.mov(Operand(ebp, ARG_RESULT), eax); 86 masm.unboxInt32(Address(eax, 0x0), eax); 87 masm.pushFrameDescriptorForJitCall(FrameType::CppToJSJit, eax, eax); 88 89 // Load the InterpreterFrame address into the OsrFrameReg. 90 // This address is also used for setting the constructing bit on all paths. 91 masm.loadPtr(Address(ebp, ARG_STACKFRAME), OsrFrameReg); 92 93 CodeLabel returnLabel; 94 Label oomReturnLabel; 95 { 96 // Handle Interpreter -> Baseline OSR. 97 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All()); 98 MOZ_ASSERT(!regs.has(ebp)); 99 regs.take(OsrFrameReg); 100 regs.take(ReturnReg); 101 102 Register scratch = regs.takeAny(); 103 104 Label notOsr; 105 masm.branchTestPtr(Assembler::Zero, OsrFrameReg, OsrFrameReg, ¬Osr); 106 107 Register numStackValues = regs.takeAny(); 108 masm.loadPtr(Address(ebp, ARG_STACKVALUES), numStackValues); 109 110 Register jitcode = regs.takeAny(); 111 masm.loadPtr(Address(ebp, ARG_JITCODE), jitcode); 112 113 // Push return address. 114 masm.mov(&returnLabel, scratch); 115 masm.push(scratch); 116 117 // Frame prologue. 118 masm.push(ebp); 119 masm.mov(esp, ebp); 120 121 // Reserve frame. 122 masm.subPtr(Imm32(BaselineFrame::Size()), esp); 123 124 Register framePtrScratch = regs.takeAny(); 125 masm.touchFrameValues(numStackValues, scratch, framePtrScratch); 126 masm.mov(esp, framePtrScratch); 127 128 // Reserve space for locals and stack values. 129 masm.mov(numStackValues, scratch); 130 masm.shll(Imm32(3), scratch); 131 masm.subPtr(scratch, esp); 132 133 // Enter exit frame. 134 masm.push(FrameDescriptor(FrameType::BaselineJS)); 135 masm.push(Imm32(0)); // Fake return address. 136 masm.push(FramePointer); 137 // No GC things to mark on the stack, push a bare token. 138 masm.loadJSContext(scratch); 139 masm.enterFakeExitFrame(scratch, scratch, ExitFrameType::Bare); 140 141 masm.push(jitcode); 142 143 using Fn = bool (*)(BaselineFrame* frame, InterpreterFrame* interpFrame, 144 uint32_t numStackValues); 145 masm.setupUnalignedABICall(scratch); 146 masm.passABIArg(framePtrScratch); // BaselineFrame 147 masm.passABIArg(OsrFrameReg); // InterpreterFrame 148 masm.passABIArg(numStackValues); 149 masm.callWithABI<Fn, jit::InitBaselineFrameForOsr>( 150 ABIType::General, CheckUnsafeCallWithABI::DontCheckHasExitFrame); 151 152 masm.pop(jitcode); 153 154 MOZ_ASSERT(jitcode != ReturnReg); 155 156 Label error; 157 masm.addPtr(Imm32(ExitFrameLayout::SizeWithFooter()), esp); 158 masm.branchIfFalseBool(ReturnReg, &error); 159 160 // If OSR-ing, then emit instrumentation for setting lastProfilerFrame 161 // if profiler instrumentation is enabled. 162 { 163 Label skipProfilingInstrumentation; 164 AbsoluteAddress addressOfEnabled( 165 cx->runtime()->geckoProfiler().addressOfEnabled()); 166 masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), 167 &skipProfilingInstrumentation); 168 masm.profilerEnterFrame(ebp, scratch); 169 masm.bind(&skipProfilingInstrumentation); 170 } 171 172 masm.jump(jitcode); 173 174 // OOM: frame epilogue, load error value, discard return address and return. 175 masm.bind(&error); 176 masm.mov(ebp, esp); 177 masm.pop(ebp); 178 masm.addPtr(Imm32(sizeof(uintptr_t)), esp); // Return address. 179 masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand); 180 masm.jump(&oomReturnLabel); 181 182 masm.bind(¬Osr); 183 masm.loadPtr(Address(ebp, ARG_SCOPECHAIN), R1.scratchReg()); 184 } 185 186 // The call will push the return address and frame pointer on the stack, thus 187 // we check that the stack would be aligned once the call is complete. 188 masm.assertStackAlignment(JitStackAlignment, 2 * sizeof(uintptr_t)); 189 190 /*************************************************************** 191 Call passed-in code, get return value and fill in the 192 passed in return value pointer 193 ***************************************************************/ 194 masm.call(Address(ebp, ARG_JITCODE)); 195 196 { 197 // Interpreter -> Baseline OSR will return here. 198 masm.bind(&returnLabel); 199 masm.addCodeLabel(returnLabel); 200 masm.bind(&oomReturnLabel); 201 } 202 203 // Restore the stack pointer so the stack looks like this: 204 // +20 ... arguments ... 205 // +16 <return> 206 // +12 ebp <- %ebp pointing here. 207 // +8 ebx 208 // +4 esi 209 // +0 edi <- %esp pointing here. 210 masm.lea(Operand(ebp, -int32_t(3 * sizeof(void*))), esp); 211 212 // Store the return value. 213 masm.loadPtr(Address(ebp, ARG_RESULT), eax); 214 masm.storeValue(JSReturnOperand, Operand(eax, 0)); 215 216 /************************************************************** 217 Return stack and registers to correct state 218 **************************************************************/ 219 220 // Restore non-volatile registers 221 masm.pop(edi); 222 masm.pop(esi); 223 masm.pop(ebx); 224 225 // Restore old stack frame pointer 226 masm.pop(ebp); 227 masm.ret(); 228 } 229 230 // static 231 mozilla::Maybe<::JS::ProfilingFrameIterator::RegisterState> 232 JitRuntime::getCppEntryRegisters(JitFrameLayout* frameStackAddress) { 233 // Not supported, or not implemented yet. 234 // TODO: Implement along with the corresponding stack-walker changes, in 235 // coordination with the Gecko Profiler, see bug 1635987 and follow-ups. 236 return mozilla::Nothing{}; 237 } 238 239 // Push AllRegs in a way that is compatible with RegisterDump, regardless of 240 // what PushRegsInMask might do to reduce the set size. 241 static void DumpAllRegs(MacroAssembler& masm) { 242 #ifdef ENABLE_WASM_SIMD 243 masm.PushRegsInMask(AllRegs); 244 #else 245 // When SIMD isn't supported, PushRegsInMask reduces the set of float 246 // registers to be double-sized, while the RegisterDump expects each of 247 // the float registers to have the maximal possible size 248 // (Simd128DataSize). To work around this, we just spill the double 249 // registers by hand here, using the register dump offset directly. 250 for (GeneralRegisterBackwardIterator iter(AllRegs.gprs()); iter.more(); 251 ++iter) { 252 masm.Push(*iter); 253 } 254 255 masm.reserveStack(sizeof(RegisterDump::FPUArray)); 256 for (FloatRegisterBackwardIterator iter(AllRegs.fpus()); iter.more(); 257 ++iter) { 258 FloatRegister reg = *iter; 259 Address spillAddress(StackPointer, reg.getRegisterDumpOffsetInBytes()); 260 masm.storeDouble(reg, spillAddress); 261 } 262 #endif 263 } 264 265 void JitRuntime::generateInvalidator(MacroAssembler& masm, Label* bailoutTail) { 266 AutoCreatedBy acb(masm, "JitRuntime::generateInvalidator"); 267 268 invalidatorOffset_ = startTrampolineCode(masm); 269 270 // We do the minimum amount of work in assembly and shunt the rest 271 // off to InvalidationBailout. Assembly does: 272 // 273 // - Push the machine state onto the stack. 274 // - Call the InvalidationBailout routine with the stack pointer. 275 // - Now that the frame has been bailed out, convert the invalidated 276 // frame into an exit frame. 277 // - Do the normal check-return-code-and-thunk-to-the-interpreter dance. 278 279 // Push registers such that we can access them from [base + code]. 280 DumpAllRegs(masm); 281 282 masm.movl(esp, eax); // Argument to jit::InvalidationBailout. 283 284 // Make space for InvalidationBailout's bailoutInfo outparam. 285 masm.reserveStack(sizeof(void*)); 286 masm.movl(esp, ebx); 287 288 using Fn = bool (*)(InvalidationBailoutStack* sp, BaselineBailoutInfo** info); 289 masm.setupUnalignedABICall(edx); 290 masm.passABIArg(eax); 291 masm.passABIArg(ebx); 292 masm.callWithABI<Fn, InvalidationBailout>( 293 ABIType::General, CheckUnsafeCallWithABI::DontCheckOther); 294 295 masm.pop(ecx); // Get bailoutInfo outparam. 296 297 // Pop the machine state and the dead frame. 298 masm.moveToStackPtr(FramePointer); 299 300 // Jump to shared bailout tail. The BailoutInfo pointer has to be in ecx. 301 masm.jmp(bailoutTail); 302 } 303 304 static void PushBailoutFrame(MacroAssembler& masm, Register spArg) { 305 // Push registers such that we can access them from [base + code]. 306 DumpAllRegs(masm); 307 308 // The current stack pointer is the first argument to jit::Bailout. 309 masm.movl(esp, spArg); 310 } 311 312 static void GenerateBailoutThunk(MacroAssembler& masm, Label* bailoutTail) { 313 PushBailoutFrame(masm, eax); 314 315 // Make space for Bailout's bailoutInfo outparam. 316 masm.reserveStack(sizeof(void*)); 317 masm.movl(esp, ebx); 318 319 // Call the bailout function. 320 using Fn = bool (*)(BailoutStack* sp, BaselineBailoutInfo** info); 321 masm.setupUnalignedABICall(ecx); 322 masm.passABIArg(eax); 323 masm.passABIArg(ebx); 324 masm.callWithABI<Fn, Bailout>(ABIType::General, 325 CheckUnsafeCallWithABI::DontCheckOther); 326 327 masm.pop(ecx); // Get the bailoutInfo outparam. 328 329 // Remove both the bailout frame and the topmost Ion frame's stack. 330 masm.moveToStackPtr(FramePointer); 331 332 // Jump to shared bailout tail. The BailoutInfo pointer has to be in ecx. 333 masm.jmp(bailoutTail); 334 } 335 336 void JitRuntime::generateBailoutHandler(MacroAssembler& masm, 337 Label* bailoutTail) { 338 AutoCreatedBy acb(masm, "JitRuntime::generateBailoutHandler"); 339 340 bailoutHandlerOffset_ = startTrampolineCode(masm); 341 342 GenerateBailoutThunk(masm, bailoutTail); 343 } 344 345 bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, 346 VMFunctionId id, const VMFunctionData& f, 347 DynFn nativeFun, uint32_t* wrapperOffset) { 348 AutoCreatedBy acb(masm, "JitRuntime::generateVMWrapper"); 349 350 *wrapperOffset = startTrampolineCode(masm); 351 352 // Avoid conflicts with argument registers while discarding the result after 353 // the function call. 354 AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask); 355 356 static_assert( 357 (Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0, 358 "Wrapper register set must be a superset of Volatile register set."); 359 360 // The context is the first argument. 361 Register cxreg = regs.takeAny(); 362 363 // Stack is: 364 // ... frame ... 365 // +8 [args] 366 // +4 descriptor 367 // +0 returnAddress 368 // 369 // Push the frame pointer to finish the exit frame, then link it up. 370 masm.Push(FramePointer); 371 masm.moveStackPtrTo(FramePointer); 372 masm.loadJSContext(cxreg); 373 masm.enterExitFrame(cxreg, regs.getAny(), id); 374 375 // Reserve space for the outparameter. 376 masm.reserveVMFunctionOutParamSpace(f); 377 378 masm.setupUnalignedABICallDontSaveRestoreSP(); 379 masm.passABIArg(cxreg); 380 381 size_t argDisp = ExitFrameLayout::Size(); 382 383 // Copy arguments. 384 for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) { 385 switch (f.argProperties(explicitArg)) { 386 case VMFunctionData::WordByValue: 387 masm.passABIArg(MoveOperand(FramePointer, argDisp), ABIType::General); 388 argDisp += sizeof(void*); 389 break; 390 case VMFunctionData::DoubleByValue: 391 // We don't pass doubles in float registers on x86, so no need 392 // to check for argPassedInFloatReg. 393 masm.passABIArg(MoveOperand(FramePointer, argDisp), ABIType::General); 394 argDisp += sizeof(void*); 395 masm.passABIArg(MoveOperand(FramePointer, argDisp), ABIType::General); 396 argDisp += sizeof(void*); 397 break; 398 case VMFunctionData::WordByRef: 399 masm.passABIArg(MoveOperand(FramePointer, argDisp, 400 MoveOperand::Kind::EffectiveAddress), 401 ABIType::General); 402 argDisp += sizeof(void*); 403 break; 404 case VMFunctionData::DoubleByRef: 405 masm.passABIArg(MoveOperand(FramePointer, argDisp, 406 MoveOperand::Kind::EffectiveAddress), 407 ABIType::General); 408 argDisp += 2 * sizeof(void*); 409 break; 410 } 411 } 412 413 // Copy the implicit outparam, if any. 414 const int32_t outParamOffset = 415 -int32_t(ExitFooterFrame::Size()) - f.sizeOfOutParamStackSlot(); 416 if (f.outParam != Type_Void) { 417 masm.passABIArg(MoveOperand(FramePointer, outParamOffset, 418 MoveOperand::Kind::EffectiveAddress), 419 ABIType::General); 420 } 421 422 masm.callWithABI(nativeFun, ABIType::General, 423 CheckUnsafeCallWithABI::DontCheckHasExitFrame); 424 425 // Test for failure. 426 switch (f.failType()) { 427 case Type_Cell: 428 masm.branchTestPtr(Assembler::Zero, eax, eax, masm.failureLabel()); 429 break; 430 case Type_Bool: 431 masm.testb(eax, eax); 432 masm.j(Assembler::Zero, masm.failureLabel()); 433 break; 434 case Type_Void: 435 break; 436 default: 437 MOZ_CRASH("unknown failure kind"); 438 } 439 440 // Load the outparam. 441 masm.loadVMFunctionOutParam(f, Address(FramePointer, outParamOffset)); 442 443 // Until C++ code is instrumented against Spectre, prevent speculative 444 // execution from returning any private data. 445 if (f.returnsData() && JitOptions.spectreJitToCxxCalls) { 446 masm.speculationBarrier(); 447 } 448 449 // Pop frame and restore frame pointer. 450 masm.moveToStackPtr(FramePointer); 451 masm.pop(FramePointer); 452 453 // Return. Subtract sizeof(void*) for the frame pointer. 454 masm.retn(Imm32(sizeof(ExitFrameLayout) - sizeof(void*) + 455 f.explicitStackSlots() * sizeof(void*) + 456 f.extraValuesToPop * sizeof(Value))); 457 458 return true; 459 } 460 461 uint32_t JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm, 462 MIRType type) { 463 AutoCreatedBy acb(masm, "JitRuntime::generatePreBarrier"); 464 465 uint32_t offset = startTrampolineCode(masm); 466 467 static_assert(PreBarrierReg == edx); 468 Register temp1 = eax; 469 Register temp2 = ebx; 470 Register temp3 = ecx; 471 masm.push(temp1); 472 masm.push(temp2); 473 masm.push(temp3); 474 475 Label noBarrier; 476 masm.emitPreBarrierFastPath(type, temp1, temp2, temp3, &noBarrier); 477 478 // Call into C++ to mark this GC thing. 479 masm.pop(temp3); 480 masm.pop(temp2); 481 masm.pop(temp1); 482 483 LiveRegisterSet save; 484 save.set() = RegisterSet(GeneralRegisterSet(Registers::VolatileMask), 485 FloatRegisterSet(FloatRegisters::VolatileMask)); 486 masm.PushRegsInMask(save); 487 488 masm.movl(ImmPtr(cx->runtime()), ecx); 489 490 masm.setupUnalignedABICall(eax); 491 masm.passABIArg(ecx); 492 masm.passABIArg(edx); 493 masm.callWithABI(JitPreWriteBarrier(type)); 494 495 masm.PopRegsInMask(save); 496 masm.ret(); 497 498 masm.bind(&noBarrier); 499 masm.pop(temp3); 500 masm.pop(temp2); 501 masm.pop(temp1); 502 masm.ret(); 503 504 return offset; 505 } 506 507 void JitRuntime::generateBailoutTailStub(MacroAssembler& masm, 508 Label* bailoutTail) { 509 AutoCreatedBy acb(masm, "JitRuntime::generateBailoutTailStub"); 510 511 masm.bind(bailoutTail); 512 masm.generateBailoutTail(edx, ecx); 513 }