MozSimulator-vixl.cpp (23097B)
1 // Copyright 2013, ARM Limited 2 // All rights reserved. 3 // 4 // Redistribution and use in source and binary forms, with or without 5 // modification, are permitted provided that the following conditions are met: 6 // 7 // * Redistributions of source code must retain the above copyright notice, 8 // this list of conditions and the following disclaimer. 9 // * Redistributions in binary form must reproduce the above copyright notice, 10 // this list of conditions and the following disclaimer in the documentation 11 // and/or other materials provided with the distribution. 12 // * Neither the name of ARM Limited nor the names of its contributors may be 13 // used to endorse or promote products derived from this software without 14 // specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND 17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE 20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 27 #include "mozilla/DebugOnly.h" 28 29 #include <cstring> 30 31 #include "jit/arm64/vixl/Debugger-vixl.h" 32 #include "jit/arm64/vixl/MozCachingDecoder.h" 33 #include "jit/arm64/vixl/Simulator-vixl.h" 34 #include "jit/IonTypes.h" 35 #include "js/UniquePtr.h" 36 #include "js/Utility.h" 37 #include "threading/LockGuard.h" 38 #include "vm/JSContext.h" 39 #include "vm/Runtime.h" 40 41 js::jit::SimulatorProcess* js::jit::SimulatorProcess::singleton_ = nullptr; 42 43 namespace vixl { 44 45 using mozilla::DebugOnly; 46 using js::jit::ABIFunctionType; 47 using js::jit::JitActivation; 48 using js::jit::SimulatorProcess; 49 50 Simulator::Simulator(Decoder* decoder, FILE* stream) 51 : stream_(nullptr) 52 , print_disasm_(nullptr) 53 , instrumentation_(nullptr) 54 , stack_(nullptr) 55 , stack_limit_(nullptr) 56 , decoder_(nullptr) 57 , oom_(false) 58 , single_stepping_(false) 59 , single_step_callback_(nullptr) 60 , single_step_callback_arg_(nullptr) 61 { 62 this->init(decoder, stream); 63 64 // If this environment variable is present, trace the executed instructions. 65 // (Very helpful for debugging code generation crashes.) 66 if (getenv("VIXL_TRACE")) { 67 set_trace_parameters(LOG_DISASM); 68 } 69 } 70 71 72 Simulator::~Simulator() { 73 js_free(stack_); 74 stack_ = nullptr; 75 76 // The decoder may outlive the simulator. 77 if (print_disasm_) { 78 decoder_->RemoveVisitor(print_disasm_); 79 js_delete(print_disasm_); 80 print_disasm_ = nullptr; 81 } 82 83 if (instrumentation_) { 84 decoder_->RemoveVisitor(instrumentation_); 85 js_delete(instrumentation_); 86 instrumentation_ = nullptr; 87 } 88 } 89 90 91 void Simulator::ResetState() { 92 // Reset the system registers. 93 nzcv_ = SimSystemRegister::DefaultValueFor(NZCV); 94 fpcr_ = SimSystemRegister::DefaultValueFor(FPCR); 95 96 // Reset registers to 0. 97 pc_ = nullptr; 98 pc_modified_ = false; 99 for (unsigned i = 0; i < kNumberOfRegisters; i++) { 100 set_xreg(i, 0xbadbeef); 101 } 102 // Set FP registers to a value that is a NaN in both 32-bit and 64-bit FP. 103 uint64_t nan_bits = UINT64_C(0x7ff0dead7f8beef1); 104 VIXL_ASSERT(IsSignallingNaN(RawbitsToDouble(nan_bits & kDRegMask))); 105 VIXL_ASSERT(IsSignallingNaN(RawbitsToFloat(nan_bits & kSRegMask))); 106 for (unsigned i = 0; i < kNumberOfFPRegisters; i++) { 107 set_dreg_bits(i, nan_bits); 108 } 109 // Returning to address 0 exits the Simulator. 110 set_lr(kEndOfSimAddress); 111 } 112 113 114 void Simulator::init(Decoder* decoder, FILE* stream) { 115 // Ensure that shift operations act as the simulator expects. 116 VIXL_ASSERT((static_cast<int32_t>(-1) >> 1) == -1); 117 VIXL_ASSERT((static_cast<uint32_t>(-1) >> 1) == 0x7FFFFFFF); 118 119 instruction_stats_ = false; 120 121 // Set up the decoder. 122 decoder_ = decoder; 123 decoder_->AppendVisitor(this); 124 125 stream_ = stream; 126 print_disasm_ = js_new<PrintDisassembler>(stream_); 127 if (!print_disasm_) { 128 oom_ = true; 129 return; 130 } 131 set_coloured_trace(false); 132 trace_parameters_ = LOG_NONE; 133 134 ResetState(); 135 136 // Allocate and set up the simulator stack. 137 stack_ = js_pod_malloc<byte>(stack_size_); 138 if (!stack_) { 139 oom_ = true; 140 return; 141 } 142 stack_limit_ = stack_ + stack_protection_size_; 143 // Configure the starting stack pointer. 144 // - Find the top of the stack. 145 byte * tos = stack_ + stack_size_; 146 // - There's a protection region at both ends of the stack. 147 tos -= stack_protection_size_; 148 // - The stack pointer must be 16-byte aligned. 149 tos = AlignDown(tos, 16); 150 set_sp(tos); 151 152 // Set the sample period to 10, as the VIXL examples and tests are short. 153 if (getenv("VIXL_STATS")) { 154 instrumentation_ = js_new<Instrument>("vixl_stats.csv", 10); 155 if (!instrumentation_) { 156 oom_ = true; 157 return; 158 } 159 } 160 161 // Print a warning about exclusive-access instructions, but only the first 162 // time they are encountered. This warning can be silenced using 163 // SilenceExclusiveAccessWarning(). 164 print_exclusive_access_warning_ = true; 165 } 166 167 168 Simulator* Simulator::Current() { 169 JSContext* cx = js::TlsContext.get(); 170 if (!cx) { 171 return nullptr; 172 } 173 JSRuntime* rt = cx->runtime(); 174 if (!rt) { 175 return nullptr; 176 } 177 if (!js::CurrentThreadCanAccessRuntime(rt)) { 178 return nullptr; 179 } 180 return cx->simulator(); 181 } 182 183 184 Simulator* Simulator::Create() { 185 Decoder *decoder = js_new<Decoder>(); 186 if (!decoder) 187 return nullptr; 188 189 // FIXME: This just leaks the Decoder object for now, which is probably OK. 190 // FIXME: We should free it at some point. 191 // FIXME: Note that it can't be stored in the SimulatorRuntime due to lifetime conflicts. 192 js::UniquePtr<Simulator> sim; 193 if (getenv("USE_DEBUGGER") != nullptr) { 194 sim.reset(js_new<Debugger>(decoder, stdout)); 195 } else { 196 sim.reset(js_new<Simulator>(decoder, stdout)); 197 } 198 199 // Check if Simulator:init ran out of memory. 200 if (sim && sim->oom()) { 201 return nullptr; 202 } 203 204 #ifdef JS_CACHE_SIMULATOR_ARM64 205 // Register the simulator in the Simulator process to handle cache flushes 206 // across threads. 207 js::jit::AutoLockSimulatorCache alsc; 208 if (!SimulatorProcess::registerSimulator(sim.get())) { 209 return nullptr; 210 } 211 #endif 212 213 return sim.release(); 214 } 215 216 217 void Simulator::Destroy(Simulator* sim) { 218 #ifdef JS_CACHE_SIMULATOR_ARM64 219 if (sim) { 220 js::jit::AutoLockSimulatorCache alsc; 221 SimulatorProcess::unregisterSimulator(sim); 222 } 223 #endif 224 225 js_delete(sim); 226 } 227 228 229 void Simulator::ExecuteInstruction() { 230 // The program counter should always be aligned. 231 VIXL_ASSERT(IsWordAligned(pc_)); 232 #ifdef JS_CACHE_SIMULATOR_ARM64 233 if (pendingCacheRequests) { 234 // We're here emulating the behavior of the membarrier carried over on 235 // real hardware does; see syscalls to membarrier in MozCpu-vixl.cpp. 236 // There's a slight difference that the simulator is not being 237 // interrupted: instead, we effectively run the icache flush request 238 // before executing the next instruction, which is close enough and 239 // sufficient for our use case. 240 js::jit::AutoLockSimulatorCache alsc; 241 FlushICache(); 242 } 243 #endif 244 decoder_->Decode(pc_); 245 increment_pc(); 246 } 247 248 249 uintptr_t Simulator::stackLimit() const { 250 return reinterpret_cast<uintptr_t>(stack_limit_); 251 } 252 253 254 uintptr_t* Simulator::addressOfStackLimit() { 255 return (uintptr_t*)&stack_limit_; 256 } 257 258 259 bool Simulator::overRecursed(uintptr_t newsp) const { 260 if (newsp == 0) { 261 newsp = get_sp(); 262 } 263 return newsp <= stackLimit(); 264 } 265 266 267 bool Simulator::overRecursedWithExtra(uint32_t extra) const { 268 uintptr_t newsp = get_sp() - extra; 269 return newsp <= stackLimit(); 270 } 271 272 273 JS::ProfilingFrameIterator::RegisterState 274 Simulator::registerState() 275 { 276 JS::ProfilingFrameIterator::RegisterState state; 277 state.pc = (uint8_t*) get_pc(); 278 state.fp = (uint8_t*) get_fp(); 279 state.lr = (uint8_t*) get_lr(); 280 state.sp = (uint8_t*) get_sp(); 281 return state; 282 } 283 284 int64_t Simulator::call(uint8_t* entry, int argument_count, ...) { 285 va_list parameters; 286 va_start(parameters, argument_count); 287 288 // First eight arguments passed in registers. 289 VIXL_ASSERT(argument_count <= 8); 290 // This code should use the type of the called function 291 // (with templates, like the callVM machinery), but since the 292 // number of called functions is miniscule, their types have been 293 // divined from the number of arguments. 294 if (argument_count == 8) { 295 // EnterJitData::jitcode. 296 set_xreg(0, va_arg(parameters, int64_t)); 297 // EnterJitData::maxArgc. 298 set_xreg(1, va_arg(parameters, unsigned)); 299 // EnterJitData::maxArgv. 300 set_xreg(2, va_arg(parameters, int64_t)); 301 // EnterJitData::osrFrame. 302 set_xreg(3, va_arg(parameters, int64_t)); 303 // EnterJitData::calleeToken. 304 set_xreg(4, va_arg(parameters, int64_t)); 305 // EnterJitData::scopeChain. 306 set_xreg(5, va_arg(parameters, int64_t)); 307 // EnterJitData::osrNumStackValues. 308 set_xreg(6, va_arg(parameters, unsigned)); 309 // Address of EnterJitData::result. 310 set_xreg(7, va_arg(parameters, int64_t)); 311 } else if (argument_count == 2) { 312 // EntryArg* args 313 set_xreg(0, va_arg(parameters, int64_t)); 314 // uint8_t* GlobalData 315 set_xreg(1, va_arg(parameters, int64_t)); 316 } else if (argument_count == 1) { // irregexp 317 // InputOutputData& data 318 set_xreg(0, va_arg(parameters, int64_t)); 319 } else if (argument_count == 0) { // testsJit.cpp 320 // accept. 321 } else { 322 MOZ_CRASH("Unknown number of arguments"); 323 } 324 325 va_end(parameters); 326 327 // Call must transition back to native code on exit. 328 VIXL_ASSERT(get_lr() == int64_t(kEndOfSimAddress)); 329 330 // Execute the simulation. 331 DebugOnly<int64_t> entryStack = get_sp(); 332 RunFrom((Instruction*)entry); 333 DebugOnly<int64_t> exitStack = get_sp(); 334 VIXL_ASSERT(entryStack == exitStack); 335 336 int64_t result = xreg(0); 337 if (getenv("USE_DEBUGGER")) { 338 printf("LEAVE\n"); 339 } 340 return result; 341 } 342 343 344 // When the generated code calls a VM function (masm.callWithABI) we need to 345 // call that function instead of trying to execute it with the simulator 346 // (because it's x64 code instead of AArch64 code). We do that by redirecting the VM 347 // call to a svc (Supervisor Call) instruction that is handled by the 348 // simulator. We write the original destination of the jump just at a known 349 // offset from the svc instruction so the simulator knows what to call. 350 class Redirection 351 { 352 friend class Simulator; 353 354 Redirection(void* nativeFunction, ABIFunctionType type) 355 : nativeFunction_(nativeFunction), 356 type_(type), 357 next_(nullptr) 358 { 359 next_ = SimulatorProcess::redirection(); 360 SimulatorProcess::setRedirection(this); 361 362 Instruction* instr = (Instruction*)(&svcInstruction_); 363 vixl::Assembler::svc(instr, kCallRtRedirected); 364 } 365 366 public: 367 void* addressOfSvcInstruction() { return &svcInstruction_; } 368 void* nativeFunction() const { return nativeFunction_; } 369 ABIFunctionType type() const { return type_; } 370 371 static Redirection* Get(void* nativeFunction, ABIFunctionType type) { 372 js::jit::AutoLockSimulatorCache alsr; 373 374 // TODO: Store srt_ in the simulator for this assertion. 375 // VIXL_ASSERT_IF(pt->simulator(), pt->simulator()->srt_ == srt); 376 377 Redirection* current = SimulatorProcess::redirection(); 378 for (; current != nullptr; current = current->next_) { 379 if (current->nativeFunction_ == nativeFunction) { 380 VIXL_ASSERT(current->type() == type); 381 return current; 382 } 383 } 384 385 // Note: we can't use js_new here because the constructor is private. 386 js::AutoEnterOOMUnsafeRegion oomUnsafe; 387 Redirection* redir = js_pod_malloc<Redirection>(1); 388 if (!redir) 389 oomUnsafe.crash("Simulator redirection"); 390 new(redir) Redirection(nativeFunction, type); 391 return redir; 392 } 393 394 static const Redirection* FromSvcInstruction(const Instruction* svcInstruction) { 395 const uint8_t* addrOfSvc = reinterpret_cast<const uint8_t*>(svcInstruction); 396 const uint8_t* addrOfRedirection = addrOfSvc - offsetof(Redirection, svcInstruction_); 397 return reinterpret_cast<const Redirection*>(addrOfRedirection); 398 } 399 400 private: 401 void* nativeFunction_; 402 uint32_t svcInstruction_; 403 ABIFunctionType type_; 404 Redirection* next_; 405 }; 406 407 408 409 410 void* Simulator::RedirectNativeFunction(void* nativeFunction, ABIFunctionType type) { 411 Redirection* redirection = Redirection::Get(nativeFunction, type); 412 return redirection->addressOfSvcInstruction(); 413 } 414 415 void Simulator::VisitException(const Instruction* instr) { 416 if (instr->InstructionBits() == UNDEFINED_INST_PATTERN) { 417 uint8_t* newPC; 418 if (js::wasm::HandleIllegalInstruction(registerState(), &newPC)) { 419 set_pc((Instruction*)newPC); 420 return; 421 } 422 DoUnreachable(instr); 423 } 424 425 switch (instr->Mask(ExceptionMask)) { 426 case BRK: { 427 int lowbit = ImmException_offset; 428 int highbit = ImmException_offset + ImmException_width - 1; 429 HostBreakpoint(instr->Bits(highbit, lowbit)); 430 break; 431 } 432 case HLT: 433 switch (instr->ImmException()) { 434 case kTraceOpcode: 435 DoTrace(instr); 436 return; 437 case kLogOpcode: 438 DoLog(instr); 439 return; 440 case kPrintfOpcode: 441 DoPrintf(instr); 442 return; 443 default: 444 HostBreakpoint(); 445 return; 446 } 447 case SVC: 448 // The SVC instruction is hijacked by the JIT as a pseudo-instruction 449 // causing the Simulator to execute host-native code for callWithABI. 450 switch (instr->ImmException()) { 451 case kCallRtRedirected: 452 VisitCallRedirection(instr); 453 return; 454 case kMarkStackPointer: { 455 js::AutoEnterOOMUnsafeRegion oomUnsafe; 456 if (!spStack_.append(get_sp())) 457 oomUnsafe.crash("tracking stack for ARM64 simulator"); 458 return; 459 } 460 case kCheckStackPointer: { 461 DebugOnly<int64_t> current = get_sp(); 462 DebugOnly<int64_t> expected = spStack_.popCopy(); 463 VIXL_ASSERT(current == expected); 464 return; 465 } 466 default: 467 VIXL_UNIMPLEMENTED(); 468 } 469 break; 470 default: 471 VIXL_UNIMPLEMENTED(); 472 } 473 } 474 475 476 void Simulator::setGPR32Result(int32_t result) { 477 set_wreg(0, result); 478 } 479 480 481 void Simulator::setGPR64Result(int64_t result) { 482 set_xreg(0, result); 483 } 484 485 486 void Simulator::setFP32Result(float result) { 487 set_sreg(0, result); 488 } 489 490 491 void Simulator::setFP64Result(double result) { 492 set_dreg(0, result); 493 } 494 495 ABI_FUNCTION_TYPE_SIM_PROTOTYPES 496 497 // Simulator support for callWithABI(). 498 void 499 Simulator::VisitCallRedirection(const Instruction* instr) 500 { 501 VIXL_ASSERT(instr->Mask(ExceptionMask) == SVC); 502 VIXL_ASSERT(instr->ImmException() == kCallRtRedirected); 503 504 const Redirection* redir = Redirection::FromSvcInstruction(instr); 505 uintptr_t nativeFn = reinterpret_cast<uintptr_t>(redir->nativeFunction()); 506 507 // Stack must be aligned prior to the call. 508 // FIXME: It's actually our job to perform the alignment... 509 //VIXL_ASSERT((xreg(31, Reg31IsStackPointer) & (StackAlignment - 1)) == 0); 510 511 // Used to assert that callee-saved registers are preserved. 512 DebugOnly<int64_t> x19 = xreg(19); 513 DebugOnly<int64_t> x20 = xreg(20); 514 DebugOnly<int64_t> x21 = xreg(21); 515 DebugOnly<int64_t> x22 = xreg(22); 516 DebugOnly<int64_t> x23 = xreg(23); 517 DebugOnly<int64_t> x24 = xreg(24); 518 DebugOnly<int64_t> x25 = xreg(25); 519 DebugOnly<int64_t> x26 = xreg(26); 520 DebugOnly<int64_t> x27 = xreg(27); 521 DebugOnly<int64_t> x28 = xreg(28); 522 DebugOnly<int64_t> x29 = xreg(29); 523 DebugOnly<int64_t> savedSP = get_sp(); 524 525 #ifdef DEBUG 526 qreg_t qregs[kNumberOfCalleeSavedFPRegisters] = {}; 527 for (unsigned i = 0; i < kNumberOfCalleeSavedFPRegisters; i++) { 528 qregs[i] = qreg(kFirstCalleeSavedFPRegisterIndex + i); 529 } 530 #endif 531 532 // Get the SP for reading stack arguments 533 int64_t* sp = reinterpret_cast<int64_t*>(get_sp()); 534 // Remember LR for returning from the "call". 535 int64_t savedLR = xreg(30); 536 537 // Allow recursive Simulator calls: returning from the call must stop 538 // the simulation and transition back to native Simulator code. 539 set_xreg(30, int64_t(kEndOfSimAddress)); 540 541 // Store argument register values in local variables for ease of use below. 542 int64_t x0 = xreg(0); 543 int64_t x1 = xreg(1); 544 int64_t x2 = xreg(2); 545 int64_t x3 = xreg(3); 546 int64_t x4 = xreg(4); 547 int64_t x5 = xreg(5); 548 int64_t x6 = xreg(6); 549 int64_t x7 = xreg(7); 550 double d0 = dreg(0); 551 double d1 = dreg(1); 552 double d2 = dreg(2); 553 double d3 = dreg(3); 554 float s0 = sreg(0); 555 float s1 = sreg(1); 556 float s2 = sreg(2); 557 float s3 = sreg(3); 558 float s4 = sreg(4); 559 560 if (single_stepping_) { 561 single_step_callback_(single_step_callback_arg_, this, nullptr); 562 } 563 564 // Dispatch the call and set the return value. 565 switch (redir->type()) { 566 ABI_FUNCTION_TYPE_ARM64_SIM_DISPATCH 567 568 default: 569 MOZ_CRASH("Unknown function type."); 570 } 571 572 if (single_stepping_) { 573 single_step_callback_(single_step_callback_arg_, this, nullptr); 574 } 575 576 // Nuke the volatile registers. x0-x7 are used as result registers, but except 577 // for x0, none are used in the above signatures. 578 for (int i = 1; i <= 18; i++) { 579 // Code feed 1 bad data 580 set_xreg(i, int64_t(0xc0defeed1badda7a)); 581 } 582 583 // Assert that callee-saved registers are unchanged. 584 VIXL_ASSERT(xreg(19) == x19); 585 VIXL_ASSERT(xreg(20) == x20); 586 VIXL_ASSERT(xreg(21) == x21); 587 VIXL_ASSERT(xreg(22) == x22); 588 VIXL_ASSERT(xreg(23) == x23); 589 VIXL_ASSERT(xreg(24) == x24); 590 VIXL_ASSERT(xreg(25) == x25); 591 VIXL_ASSERT(xreg(26) == x26); 592 VIXL_ASSERT(xreg(27) == x27); 593 VIXL_ASSERT(xreg(28) == x28); 594 VIXL_ASSERT(xreg(29) == x29); 595 596 // Assert that the stack is unchanged. 597 VIXL_ASSERT(savedSP == get_sp()); 598 599 constexpr qreg_t code_feed_1bad_data = { 600 0xc0, 0xde, 0xfe, 0xed, 0x1b, 0xad, 0xda, 0x7a, 601 0xc0, 0xde, 0xfe, 0xed, 0x1b, 0xad, 0xda, 0x7a, 602 }; 603 604 // v0-v7 are used as argument and result registers. We're currently only using 605 // v0 as an output register, so clobber the remaining registers. 606 for (unsigned i = 1; i < kFirstCalleeSavedFPRegisterIndex; i++) { 607 set_qreg(i, code_feed_1bad_data); 608 } 609 610 // Bottom 64 bits of v8-v15 are callee preserved. 611 for (unsigned i = 0; i < kNumberOfCalleeSavedFPRegisters; i++) { 612 qreg_t r = qreg(kFirstCalleeSavedFPRegisterIndex + i); 613 614 // Assert callee-saved register halves are unchanged. 615 VIXL_ASSERT(std::memcmp(&r.val, &qregs[i].val, sizeof(int64_t)) == 0); 616 617 // Clobber high 64 bits. 618 std::memcpy(&r.val[sizeof(int64_t)], &code_feed_1bad_data.val, 619 sizeof(int64_t)); 620 set_qreg(kFirstCalleeSavedFPRegisterIndex + i, r); 621 } 622 623 // v16-v31 are temporary registers and caller preserved. 624 constexpr unsigned kFirstTempFPRegisterIndex = 625 kFirstCalleeSavedFPRegisterIndex + kNumberOfCalleeSavedFPRegisters; 626 for (unsigned i = kFirstTempFPRegisterIndex; i < kNumberOfVRegisters; i++) { 627 set_qreg(i, code_feed_1bad_data); 628 } 629 630 // Simulate a return. 631 set_lr(savedLR); 632 set_pc((Instruction*)savedLR); 633 if (getenv("USE_DEBUGGER")) 634 printf("SVCRET\n"); 635 } 636 637 #ifdef JS_CACHE_SIMULATOR_ARM64 638 void 639 Simulator::FlushICache() 640 { 641 // Flush the caches recorded by the current thread as well as what got 642 // recorded from other threads before this call. 643 auto& vec = SimulatorProcess::getICacheFlushes(this); 644 for (auto& flush : vec) { 645 decoder_->FlushICache(flush.start, flush.length); 646 } 647 vec.clear(); 648 pendingCacheRequests = false; 649 } 650 651 void CachingDecoder::Decode(const Instruction* instr) { 652 InstDecodedKind state; 653 if (lastPage_ && lastPage_->contains(instr)) { 654 state = lastPage_->decode(instr); 655 } else { 656 uintptr_t key = SinglePageDecodeCache::PageStart(instr); 657 ICacheMap::AddPtr p = iCache_.lookupForAdd(key); 658 if (p) { 659 lastPage_ = p->value(); 660 state = lastPage_->decode(instr); 661 } else { 662 js::AutoEnterOOMUnsafeRegion oomUnsafe; 663 SinglePageDecodeCache* newPage = js_new<SinglePageDecodeCache>(instr); 664 if (!newPage || !iCache_.add(p, key, newPage)) { 665 oomUnsafe.crash("Simulator SinglePageDecodeCache"); 666 } 667 lastPage_ = newPage; 668 state = InstDecodedKind::NotDecodedYet; 669 } 670 } 671 672 switch (state) { 673 case InstDecodedKind::NotDecodedYet: { 674 cachingDecoder_.setDecodePtr(lastPage_->decodePtr(instr)); 675 this->Decoder::Decode(instr); 676 break; 677 } 678 #define CASE(A) \ 679 case InstDecodedKind::A: { \ 680 Visit##A(instr); \ 681 break; \ 682 } 683 684 VISITOR_LIST(CASE) 685 #undef CASE 686 } 687 } 688 689 void CachingDecoder::FlushICache(void* start, size_t size) { 690 MOZ_ASSERT(uintptr_t(start) % vixl::kInstructionSize == 0); 691 MOZ_ASSERT(size % vixl::kInstructionSize == 0); 692 const uint8_t* it = reinterpret_cast<const uint8_t*>(start); 693 const uint8_t* end = it + size; 694 SinglePageDecodeCache* last = nullptr; 695 for (; it < end; it += vixl::kInstructionSize) { 696 auto instr = reinterpret_cast<const Instruction*>(it); 697 if (last && last->contains(instr)) { 698 last->clearDecode(instr); 699 } else { 700 uintptr_t key = SinglePageDecodeCache::PageStart(instr); 701 ICacheMap::Ptr p = iCache_.lookup(key); 702 if (p) { 703 last = p->value(); 704 last->clearDecode(instr); 705 } 706 } 707 } 708 } 709 #endif 710 711 } // namespace vixl 712 713 namespace js { 714 namespace jit { 715 716 #ifdef JS_CACHE_SIMULATOR_ARM64 717 void SimulatorProcess::recordICacheFlush(void* start, size_t length) { 718 singleton_->lock_.assertOwnedByCurrentThread(); 719 AutoEnterOOMUnsafeRegion oomUnsafe; 720 ICacheFlush range{start, length}; 721 for (auto& s : singleton_->pendingFlushes_) { 722 if (!s.records.append(range)) { 723 oomUnsafe.crash("Simulator recordFlushICache"); 724 } 725 } 726 } 727 728 void SimulatorProcess::membarrier() { 729 singleton_->lock_.assertOwnedByCurrentThread(); 730 for (auto& s : singleton_->pendingFlushes_) { 731 s.thread->pendingCacheRequests = true; 732 } 733 } 734 735 SimulatorProcess::ICacheFlushes& SimulatorProcess::getICacheFlushes(Simulator* sim) { 736 singleton_->lock_.assertOwnedByCurrentThread(); 737 for (auto& s : singleton_->pendingFlushes_) { 738 if (s.thread == sim) { 739 return s.records; 740 } 741 } 742 MOZ_CRASH("Simulator is not registered in the SimulatorProcess"); 743 } 744 745 bool SimulatorProcess::registerSimulator(Simulator* sim) { 746 singleton_->lock_.assertOwnedByCurrentThread(); 747 ICacheFlushes empty; 748 SimFlushes simFlushes{sim, std::move(empty)}; 749 return singleton_->pendingFlushes_.append(std::move(simFlushes)); 750 } 751 752 void SimulatorProcess::unregisterSimulator(Simulator* sim) { 753 singleton_->lock_.assertOwnedByCurrentThread(); 754 for (auto& s : singleton_->pendingFlushes_) { 755 if (s.thread == sim) { 756 singleton_->pendingFlushes_.erase(&s); 757 return; 758 } 759 } 760 MOZ_CRASH("Simulator is not registered in the SimulatorProcess"); 761 } 762 #endif // !JS_CACHE_SIMULATOR_ARM64 763 764 } // namespace jit 765 } // namespace js 766 767 vixl::Simulator* JSContext::simulator() const { 768 return simulator_; 769 }