BaselineIC.cpp (84973B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #include "jit/BaselineIC.h" 8 9 #include "mozilla/DebugOnly.h" 10 #include "mozilla/Sprintf.h" 11 12 #include "jstypes.h" 13 14 #include "builtin/Eval.h" 15 #include "jit/BaselineCacheIRCompiler.h" 16 #include "jit/CacheIRGenerator.h" 17 #include "jit/CacheIRHealth.h" 18 #include "jit/JitFrames.h" 19 #include "jit/JitRuntime.h" 20 #include "jit/JitSpewer.h" 21 #include "jit/Linker.h" 22 #include "jit/PerfSpewer.h" 23 #include "jit/SharedICHelpers.h" 24 #include "jit/SharedICRegisters.h" 25 #include "jit/StubFolding.h" 26 #include "jit/VMFunctions.h" 27 #include "js/Conversions.h" 28 #include "js/friend/ErrorMessages.h" // JSMSG_* 29 #include "vm/BytecodeIterator.h" 30 #include "vm/BytecodeLocation.h" 31 #include "vm/BytecodeUtil.h" 32 #include "vm/EqualityOperations.h" 33 #include "vm/JSFunction.h" 34 #include "vm/JSScript.h" 35 #include "vm/Opcodes.h" 36 #include "vm/PortableBaselineInterpret.h" 37 #include "vm/TypeofEqOperand.h" // TypeofEqOperand 38 #ifdef MOZ_VTUNE 39 # include "vtune/VTuneWrapper.h" 40 #endif 41 42 #include "jit/MacroAssembler-inl.h" 43 #include "jit/SharedICHelpers-inl.h" 44 #include "jit/VMFunctionList-inl.h" 45 #include "vm/BytecodeIterator-inl.h" 46 #include "vm/BytecodeLocation-inl.h" 47 #include "vm/EnvironmentObject-inl.h" 48 #include "vm/Interpreter-inl.h" 49 #include "vm/JSScript-inl.h" 50 51 using mozilla::DebugOnly; 52 53 namespace js { 54 namespace jit { 55 56 // Class used to emit all Baseline IC fallback code when initializing the 57 // JitRuntime. 58 class MOZ_RAII FallbackICCodeCompiler final { 59 BaselineICFallbackCode& code; 60 MacroAssembler& masm; 61 62 JSContext* cx; 63 bool inStubFrame_ = false; 64 65 #ifdef DEBUG 66 bool entersStubFrame_ = false; 67 uint32_t framePushedAtEnterStubFrame_ = 0; 68 #endif 69 70 [[nodiscard]] bool emitCall(bool isSpread, bool isConstructing); 71 [[nodiscard]] bool emitGetElem(bool hasReceiver); 72 [[nodiscard]] bool emitGetProp(bool hasReceiver); 73 74 public: 75 FallbackICCodeCompiler(JSContext* cx, BaselineICFallbackCode& code, 76 MacroAssembler& masm) 77 : code(code), masm(masm), cx(cx) {} 78 79 #define DEF_METHOD(kind) [[nodiscard]] bool emit_##kind(); 80 IC_BASELINE_FALLBACK_CODE_KIND_LIST(DEF_METHOD) 81 #undef DEF_METHOD 82 83 void pushCallArguments(MacroAssembler& masm, 84 AllocatableGeneralRegisterSet regs, Register argcReg, 85 bool isConstructing); 86 87 // Push a payload specialized per compiler needed to execute stubs. 88 void PushStubPayload(MacroAssembler& masm, Register scratch); 89 void pushStubPayload(MacroAssembler& masm, Register scratch); 90 91 // Emits a tail call to a VMFunction wrapper. 92 [[nodiscard]] bool tailCallVMInternal(MacroAssembler& masm, VMFunctionId id); 93 94 template <typename Fn, Fn fn> 95 [[nodiscard]] bool tailCallVM(MacroAssembler& masm); 96 97 // Emits a normal (non-tail) call to a VMFunction wrapper. 98 [[nodiscard]] bool callVMInternal(MacroAssembler& masm, VMFunctionId id); 99 100 template <typename Fn, Fn fn> 101 [[nodiscard]] bool callVM(MacroAssembler& masm); 102 103 // A stub frame is used when a stub wants to call into the VM without 104 // performing a tail call. This is required for the return address 105 // to pc mapping to work. 106 void enterStubFrame(MacroAssembler& masm, Register scratch); 107 void assumeStubFrame(); 108 void leaveStubFrame(MacroAssembler& masm); 109 }; 110 111 AllocatableGeneralRegisterSet BaselineICAvailableGeneralRegs(size_t numInputs) { 112 AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All()); 113 MOZ_ASSERT(!regs.has(FramePointer)); 114 #if defined(JS_CODEGEN_ARM) 115 MOZ_ASSERT(!regs.has(ICTailCallReg)); 116 regs.take(BaselineSecondScratchReg); 117 #elif defined(JS_CODEGEN_MIPS64) 118 MOZ_ASSERT(!regs.has(ICTailCallReg)); 119 MOZ_ASSERT(!regs.has(CallReg)); 120 #elif defined(JS_CODEGEN_ARM64) 121 MOZ_ASSERT(!regs.has(PseudoStackPointer)); 122 MOZ_ASSERT(!regs.has(RealStackPointer)); 123 MOZ_ASSERT(!regs.has(ICTailCallReg)); 124 #endif 125 regs.take(ICStubReg); 126 127 switch (numInputs) { 128 case 0: 129 break; 130 case 1: 131 regs.take(R0); 132 break; 133 case 2: 134 regs.take(R0); 135 regs.take(R1); 136 break; 137 default: 138 MOZ_CRASH("Invalid numInputs"); 139 } 140 141 return regs; 142 } 143 144 static jsbytecode* StubOffsetToPc(const ICFallbackStub* stub, 145 const JSScript* script) { 146 return script->offsetToPC(stub->pcOffset()); 147 } 148 149 #ifdef JS_JITSPEW 150 void FallbackICSpew(JSContext* cx, ICFallbackStub* stub, const char* fmt, ...) { 151 if (JitSpewEnabled(JitSpew_BaselineICFallback)) { 152 RootedScript script(cx, GetTopJitJSScript(cx)); 153 jsbytecode* pc = StubOffsetToPc(stub, script); 154 155 char fmtbuf[100]; 156 va_list args; 157 va_start(args, fmt); 158 (void)VsprintfLiteral(fmtbuf, fmt, args); 159 va_end(args); 160 161 JitSpew( 162 JitSpew_BaselineICFallback, 163 "Fallback hit for (%s:%u:%u) (pc=%zu,line=%u,uses=%u,stubs=%zu): %s", 164 script->filename(), script->lineno(), script->column().oneOriginValue(), 165 script->pcToOffset(pc), PCToLineNumber(script, pc), 166 script->getWarmUpCount(), stub->numOptimizedStubs(), fmtbuf); 167 } 168 } 169 #endif // JS_JITSPEW 170 171 void ICEntry::trace(JSTracer* trc, ICFallbackStub* fallbackStub) { 172 ICStub* stub = firstStub(); 173 174 // Trace CacheIR stubs. 175 while (stub != fallbackStub) { 176 stub->toCacheIRStub()->trace(trc); 177 stub = stub->toCacheIRStub()->next(); 178 } 179 180 // Fallback stubs use runtime-wide trampoline code we don't need to trace. 181 MOZ_ASSERT(stub->usesTrampolineCode()); 182 } 183 184 bool ICEntry::traceWeak(JSTracer* trc, ICFallbackStub* fallbackStub) { 185 // Trace CacheIR stubs and remove those containing weak pointers to dead GC 186 // things. Prebarriers are not necessary because this happens as part of GC. 187 188 ICStub* stub = firstStub(); 189 ICCacheIRStub* prev = nullptr; 190 bool allSurvived = true; 191 while (stub != fallbackStub) { 192 ICCacheIRStub* cacheIRStub = stub->toCacheIRStub(); 193 if (!cacheIRStub->traceWeak(trc)) { 194 fallbackStub->unlinkStubUnbarriered(this, prev, cacheIRStub); 195 allSurvived = false; 196 } else { 197 prev = cacheIRStub; 198 } 199 200 stub = cacheIRStub->next(); 201 MOZ_ASSERT_IF(prev, prev->next() == stub); 202 } 203 204 // Clear the folded stubs flag if we know for sure that there are none 205 // left. The flag will remain set if we have removed all folded stubs but 206 // other stubs remain. 207 if (fallbackStub->numOptimizedStubs() == 0 && 208 fallbackStub->mayHaveFoldedStub()) { 209 fallbackStub->clearMayHaveFoldedStub(); 210 } 211 212 #ifdef DEBUG 213 size_t count = 0; 214 for (ICStub* stub = firstStub(); stub != fallbackStub; 215 stub = stub->toCacheIRStub()->next()) { 216 count++; 217 } 218 MOZ_ASSERT(count == fallbackStub->state().numOptimizedStubs()); 219 #endif 220 221 return allSurvived; 222 } 223 224 // constexpr table mapping JSOp to BaselineICFallbackKind. Each value in the 225 // table is either a fallback kind or a sentinel value (NoICValue) indicating 226 // the JSOp is not a JOF_IC op. 227 class MOZ_STATIC_CLASS OpToFallbackKindTable { 228 static_assert(sizeof(BaselineICFallbackKind) == sizeof(uint8_t)); 229 uint8_t table_[JSOP_LIMIT] = {}; 230 231 constexpr void setKind(JSOp op, BaselineICFallbackKind kind) { 232 MOZ_ASSERT(uint8_t(kind) != NoICValue); 233 table_[size_t(op)] = uint8_t(kind); 234 } 235 236 public: 237 static constexpr uint8_t NoICValue = uint8_t(BaselineICFallbackKind::Count); 238 239 uint8_t lookup(JSOp op) const { return table_[size_t(op)]; } 240 241 constexpr OpToFallbackKindTable() { 242 for (size_t i = 0; i < JSOP_LIMIT; i++) { 243 table_[i] = NoICValue; 244 } 245 246 setKind(JSOp::Not, BaselineICFallbackKind::ToBool); 247 setKind(JSOp::And, BaselineICFallbackKind::ToBool); 248 setKind(JSOp::Or, BaselineICFallbackKind::ToBool); 249 setKind(JSOp::JumpIfTrue, BaselineICFallbackKind::ToBool); 250 setKind(JSOp::JumpIfFalse, BaselineICFallbackKind::ToBool); 251 252 setKind(JSOp::BitNot, BaselineICFallbackKind::UnaryArith); 253 setKind(JSOp::Pos, BaselineICFallbackKind::UnaryArith); 254 setKind(JSOp::Neg, BaselineICFallbackKind::UnaryArith); 255 setKind(JSOp::Inc, BaselineICFallbackKind::UnaryArith); 256 setKind(JSOp::Dec, BaselineICFallbackKind::UnaryArith); 257 setKind(JSOp::ToNumeric, BaselineICFallbackKind::UnaryArith); 258 259 setKind(JSOp::BitOr, BaselineICFallbackKind::BinaryArith); 260 setKind(JSOp::BitXor, BaselineICFallbackKind::BinaryArith); 261 setKind(JSOp::BitAnd, BaselineICFallbackKind::BinaryArith); 262 setKind(JSOp::Lsh, BaselineICFallbackKind::BinaryArith); 263 setKind(JSOp::Rsh, BaselineICFallbackKind::BinaryArith); 264 setKind(JSOp::Ursh, BaselineICFallbackKind::BinaryArith); 265 setKind(JSOp::Add, BaselineICFallbackKind::BinaryArith); 266 setKind(JSOp::Sub, BaselineICFallbackKind::BinaryArith); 267 setKind(JSOp::Mul, BaselineICFallbackKind::BinaryArith); 268 setKind(JSOp::Div, BaselineICFallbackKind::BinaryArith); 269 setKind(JSOp::Mod, BaselineICFallbackKind::BinaryArith); 270 setKind(JSOp::Pow, BaselineICFallbackKind::BinaryArith); 271 272 setKind(JSOp::Eq, BaselineICFallbackKind::Compare); 273 setKind(JSOp::Ne, BaselineICFallbackKind::Compare); 274 setKind(JSOp::Lt, BaselineICFallbackKind::Compare); 275 setKind(JSOp::Le, BaselineICFallbackKind::Compare); 276 setKind(JSOp::Gt, BaselineICFallbackKind::Compare); 277 setKind(JSOp::Ge, BaselineICFallbackKind::Compare); 278 setKind(JSOp::StrictEq, BaselineICFallbackKind::Compare); 279 setKind(JSOp::StrictNe, BaselineICFallbackKind::Compare); 280 281 setKind(JSOp::NewArray, BaselineICFallbackKind::NewArray); 282 283 setKind(JSOp::NewObject, BaselineICFallbackKind::NewObject); 284 setKind(JSOp::NewInit, BaselineICFallbackKind::NewObject); 285 286 setKind(JSOp::Lambda, BaselineICFallbackKind::Lambda); 287 288 setKind(JSOp::InitElem, BaselineICFallbackKind::SetElem); 289 setKind(JSOp::InitHiddenElem, BaselineICFallbackKind::SetElem); 290 setKind(JSOp::InitLockedElem, BaselineICFallbackKind::SetElem); 291 setKind(JSOp::InitElemInc, BaselineICFallbackKind::SetElem); 292 setKind(JSOp::SetElem, BaselineICFallbackKind::SetElem); 293 setKind(JSOp::StrictSetElem, BaselineICFallbackKind::SetElem); 294 295 setKind(JSOp::InitProp, BaselineICFallbackKind::SetProp); 296 setKind(JSOp::InitLockedProp, BaselineICFallbackKind::SetProp); 297 setKind(JSOp::InitHiddenProp, BaselineICFallbackKind::SetProp); 298 setKind(JSOp::InitGLexical, BaselineICFallbackKind::SetProp); 299 setKind(JSOp::SetProp, BaselineICFallbackKind::SetProp); 300 setKind(JSOp::StrictSetProp, BaselineICFallbackKind::SetProp); 301 setKind(JSOp::SetName, BaselineICFallbackKind::SetProp); 302 setKind(JSOp::StrictSetName, BaselineICFallbackKind::SetProp); 303 setKind(JSOp::SetGName, BaselineICFallbackKind::SetProp); 304 setKind(JSOp::StrictSetGName, BaselineICFallbackKind::SetProp); 305 306 setKind(JSOp::GetProp, BaselineICFallbackKind::GetProp); 307 setKind(JSOp::GetBoundName, BaselineICFallbackKind::GetProp); 308 309 setKind(JSOp::GetPropSuper, BaselineICFallbackKind::GetPropSuper); 310 311 setKind(JSOp::GetElem, BaselineICFallbackKind::GetElem); 312 313 setKind(JSOp::GetElemSuper, BaselineICFallbackKind::GetElemSuper); 314 315 setKind(JSOp::In, BaselineICFallbackKind::In); 316 317 setKind(JSOp::HasOwn, BaselineICFallbackKind::HasOwn); 318 319 setKind(JSOp::CheckPrivateField, BaselineICFallbackKind::CheckPrivateField); 320 321 setKind(JSOp::GetName, BaselineICFallbackKind::GetName); 322 setKind(JSOp::GetGName, BaselineICFallbackKind::GetName); 323 324 setKind(JSOp::BindName, BaselineICFallbackKind::BindName); 325 setKind(JSOp::BindUnqualifiedName, BaselineICFallbackKind::BindName); 326 setKind(JSOp::BindUnqualifiedGName, BaselineICFallbackKind::BindName); 327 328 setKind(JSOp::GetIntrinsic, BaselineICFallbackKind::LazyConstant); 329 setKind(JSOp::BuiltinObject, BaselineICFallbackKind::LazyConstant); 330 setKind(JSOp::ImportMeta, BaselineICFallbackKind::LazyConstant); 331 332 setKind(JSOp::Call, BaselineICFallbackKind::Call); 333 setKind(JSOp::CallContent, BaselineICFallbackKind::Call); 334 setKind(JSOp::CallIgnoresRv, BaselineICFallbackKind::Call); 335 setKind(JSOp::CallIter, BaselineICFallbackKind::Call); 336 setKind(JSOp::CallContentIter, BaselineICFallbackKind::Call); 337 setKind(JSOp::Eval, BaselineICFallbackKind::Call); 338 setKind(JSOp::StrictEval, BaselineICFallbackKind::Call); 339 340 setKind(JSOp::SuperCall, BaselineICFallbackKind::CallConstructing); 341 setKind(JSOp::New, BaselineICFallbackKind::CallConstructing); 342 setKind(JSOp::NewContent, BaselineICFallbackKind::CallConstructing); 343 344 setKind(JSOp::SpreadCall, BaselineICFallbackKind::SpreadCall); 345 setKind(JSOp::SpreadEval, BaselineICFallbackKind::SpreadCall); 346 setKind(JSOp::StrictSpreadEval, BaselineICFallbackKind::SpreadCall); 347 348 setKind(JSOp::SpreadSuperCall, 349 BaselineICFallbackKind::SpreadCallConstructing); 350 setKind(JSOp::SpreadNew, BaselineICFallbackKind::SpreadCallConstructing); 351 352 setKind(JSOp::Instanceof, BaselineICFallbackKind::InstanceOf); 353 354 setKind(JSOp::Typeof, BaselineICFallbackKind::TypeOf); 355 setKind(JSOp::TypeofExpr, BaselineICFallbackKind::TypeOf); 356 357 setKind(JSOp::TypeofEq, BaselineICFallbackKind::TypeOfEq); 358 359 setKind(JSOp::ToPropertyKey, BaselineICFallbackKind::ToPropertyKey); 360 361 setKind(JSOp::Iter, BaselineICFallbackKind::GetIterator); 362 363 setKind(JSOp::OptimizeSpreadCall, 364 BaselineICFallbackKind::OptimizeSpreadCall); 365 366 setKind(JSOp::Rest, BaselineICFallbackKind::Rest); 367 368 setKind(JSOp::CloseIter, BaselineICFallbackKind::CloseIter); 369 setKind(JSOp::OptimizeGetIterator, 370 BaselineICFallbackKind::OptimizeGetIterator); 371 372 setKind(JSOp::GetImport, BaselineICFallbackKind::GetImport); 373 } 374 }; 375 376 static constexpr OpToFallbackKindTable FallbackKindTable; 377 378 void ICScript::initICEntries(JSContext* cx, JSScript* script) { 379 MOZ_ASSERT(cx->zone()->jitZone()); 380 MOZ_ASSERT(jit::IsBaselineInterpreterEnabled() || 381 jit::IsPortableBaselineInterpreterEnabled()); 382 383 MOZ_ASSERT(numICEntries() == script->numICEntries()); 384 385 // Index of the next ICEntry to initialize. 386 uint32_t icEntryIndex = 0; 387 388 const BaselineICFallbackCode& fallbackCode = 389 cx->runtime()->jitRuntime()->baselineICFallbackCode(); 390 391 // For JOF_IC ops: initialize ICEntries and fallback stubs. 392 for (BytecodeLocation loc : js::AllBytecodesIterable(script)) { 393 JSOp op = loc.getOp(); 394 395 // Assert the frontend stored the correct IC index in jump target ops. 396 MOZ_ASSERT_IF(BytecodeIsJumpTarget(op), loc.icIndex() == icEntryIndex); 397 398 uint8_t tableValue = FallbackKindTable.lookup(op); 399 400 if (tableValue == OpToFallbackKindTable::NoICValue) { 401 MOZ_ASSERT(!BytecodeOpHasIC(op), 402 "Missing entry in OpToFallbackKindTable for JOF_IC op"); 403 continue; 404 } 405 406 MOZ_ASSERT(BytecodeOpHasIC(op), 407 "Unexpected fallback kind for non-JOF_IC op"); 408 409 BaselineICFallbackKind kind = BaselineICFallbackKind(tableValue); 410 TrampolinePtr stubCode = 411 #ifdef ENABLE_PORTABLE_BASELINE_INTERP 412 !jit::IsPortableBaselineInterpreterEnabled() 413 ? fallbackCode.addr(kind) 414 : TrampolinePtr(js::pbl::GetPortableFallbackStub(kind)); 415 #else 416 fallbackCode.addr(kind); 417 #endif 418 419 // Initialize the ICEntry and ICFallbackStub. 420 uint32_t offset = loc.bytecodeToOffset(script); 421 ICEntry& entryRef = this->icEntry(icEntryIndex); 422 ICFallbackStub* stub = fallbackStub(icEntryIndex); 423 icEntryIndex++; 424 new (&entryRef) ICEntry(stub); 425 new (stub) ICFallbackStub(offset, stubCode); 426 } 427 428 // Assert all ICEntries have been initialized. 429 MOZ_ASSERT(icEntryIndex == numICEntries()); 430 } 431 432 bool ICSupportsPolymorphicTypeData(JSOp op) { 433 MOZ_ASSERT(BytecodeOpHasIC(op)); 434 BaselineICFallbackKind kind = 435 BaselineICFallbackKind(FallbackKindTable.lookup(op)); 436 switch (kind) { 437 case BaselineICFallbackKind::ToBool: 438 case BaselineICFallbackKind::TypeOf: 439 case BaselineICFallbackKind::TypeOfEq: 440 return true; 441 default: 442 return false; 443 } 444 } 445 446 bool ICCacheIRStub::makesGCCalls() const { return stubInfo()->makesGCCalls(); } 447 448 void ICFallbackStub::trackNotAttached() { state().trackNotAttached(); } 449 450 // When we enter a baseline fallback stub, if a Warp compilation 451 // exists that transpiled that IC, we notify that compilation. This 452 // helps the bailout code tell whether a bailing instruction hoisted 453 // by LICM would have been executed anyway. 454 static void MaybeNotifyWarp(JSScript* script, ICFallbackStub* stub) { 455 if (stub->state().usedByTranspiler() && script->hasIonScript()) { 456 script->ionScript()->noteBaselineFallback(); 457 } 458 } 459 460 void ICCacheIRStub::trace(JSTracer* trc) { 461 if (hasJitCode()) { 462 JitCode* stubJitCode = jitCode(); 463 TraceManuallyBarrieredEdge(trc, &stubJitCode, "baseline-ic-stub-code"); 464 } 465 466 TraceCacheIRStub(trc, this, stubInfo()); 467 } 468 469 bool ICCacheIRStub::traceWeak(JSTracer* trc) { 470 return TraceWeakCacheIRStub(trc, this, stubInfo()); 471 } 472 473 static void MaybeTransition(JSContext* cx, BaselineFrame* frame, 474 ICFallbackStub* stub) { 475 if (stub->state().shouldTransition()) { 476 if (!TryFoldingStubs(cx, stub, frame->script(), frame->icScript())) { 477 cx->recoverFromOutOfMemory(); 478 } 479 if (stub->state().maybeTransition()) { 480 ICEntry* icEntry = frame->icScript()->icEntryForStub(stub); 481 #ifdef JS_CACHEIR_SPEW 482 if (cx->spewer().enabled(cx, frame->script(), 483 SpewChannel::CacheIRHealthReport)) { 484 CacheIRHealth cih; 485 RootedScript script(cx, frame->script()); 486 cih.healthReportForIC(cx, icEntry, stub, script, 487 SpewContext::Transition); 488 } 489 #endif 490 stub->discardStubs(cx->zone(), icEntry); 491 } 492 } 493 } 494 495 // This helper handles ICState updates/transitions while attaching CacheIR 496 // stubs. 497 template <typename IRGenerator, typename... Args> 498 static void TryAttachStub(const char* name, JSContext* cx, BaselineFrame* frame, 499 ICFallbackStub* stub, Args&&... args) { 500 MaybeTransition(cx, frame, stub); 501 502 if (stub->state().canAttachStub()) { 503 RootedScript script(cx, frame->script()); 504 ICScript* icScript = frame->icScript(); 505 jsbytecode* pc = StubOffsetToPc(stub, script); 506 bool attached = false; 507 IRGenerator gen(cx, script, pc, stub->state(), std::forward<Args>(args)...); 508 switch (gen.tryAttachStub()) { 509 case AttachDecision::Attach: { 510 ICAttachResult result = 511 AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(), 512 script, icScript, stub, gen.stubName()); 513 if (result == ICAttachResult::Attached) { 514 attached = true; 515 JitSpew(JitSpew_BaselineIC, " Attached %s CacheIR stub", name); 516 } 517 } break; 518 case AttachDecision::NoAction: 519 break; 520 case AttachDecision::TemporarilyUnoptimizable: 521 case AttachDecision::Deferred: 522 MOZ_ASSERT_UNREACHABLE("Not expected in generic TryAttachStub"); 523 break; 524 } 525 if (!attached) { 526 stub->trackNotAttached(); 527 } 528 } 529 } 530 531 void ICFallbackStub::unlinkStub(Zone* zone, ICEntry* icEntry, 532 ICCacheIRStub* prev, ICCacheIRStub* stub) { 533 // We are removing edges from ICStub to gcthings. Perform a barrier to let the 534 // GC know about those edges. 535 PreWriteBarrier(zone, stub); 536 537 unlinkStubUnbarriered(icEntry, prev, stub); 538 } 539 540 void ICFallbackStub::unlinkStubUnbarriered(ICEntry* icEntry, 541 ICCacheIRStub* prev, 542 ICCacheIRStub* stub) { 543 if (prev) { 544 MOZ_ASSERT(prev->next() == stub); 545 prev->setNext(stub->next()); 546 } else { 547 MOZ_ASSERT(icEntry->firstStub() == stub); 548 icEntry->setFirstStub(stub->next()); 549 } 550 551 state_.trackUnlinkedStub(); 552 553 #ifdef DEBUG 554 // Poison stub code to ensure we don't call this stub again. However, if 555 // this stub can make calls, a pointer to it may be stored in a stub frame 556 // on the stack, so we can't touch the stubCode_ or GC will crash when 557 // tracing this pointer. 558 if (!stub->makesGCCalls()) { 559 stub->stubCode_ = (uint8_t*)0xbad; 560 } 561 #endif 562 } 563 564 void ICFallbackStub::discardStubs(Zone* zone, ICEntry* icEntry) { 565 ICStub* stub = icEntry->firstStub(); 566 while (stub != this) { 567 unlinkStub(zone, icEntry, /* prev = */ nullptr, stub->toCacheIRStub()); 568 stub = stub->toCacheIRStub()->next(); 569 } 570 clearMayHaveFoldedStub(); 571 } 572 573 static void InitMacroAssemblerForICStub(StackMacroAssembler& masm) { 574 #ifndef JS_USE_LINK_REGISTER 575 // The first value contains the return addres, 576 // which we pull into ICTailCallReg for tail calls. 577 masm.adjustFrame(sizeof(intptr_t)); 578 #endif 579 #ifdef JS_CODEGEN_ARM 580 masm.setSecondScratchReg(BaselineSecondScratchReg); 581 #endif 582 } 583 584 bool FallbackICCodeCompiler::tailCallVMInternal(MacroAssembler& masm, 585 VMFunctionId id) { 586 TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(id); 587 const VMFunctionData& fun = GetVMFunction(id); 588 uint32_t argSize = fun.explicitStackSlots() * sizeof(void*); 589 EmitBaselineTailCallVM(code, masm, argSize); 590 return true; 591 } 592 593 bool FallbackICCodeCompiler::callVMInternal(MacroAssembler& masm, 594 VMFunctionId id) { 595 MOZ_ASSERT(inStubFrame_); 596 597 TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(id); 598 599 EmitBaselineCallVM(code, masm); 600 return true; 601 } 602 603 template <typename Fn, Fn fn> 604 bool FallbackICCodeCompiler::callVM(MacroAssembler& masm) { 605 VMFunctionId id = VMFunctionToId<Fn, fn>::id; 606 return callVMInternal(masm, id); 607 } 608 609 template <typename Fn, Fn fn> 610 bool FallbackICCodeCompiler::tailCallVM(MacroAssembler& masm) { 611 VMFunctionId id = VMFunctionToId<Fn, fn>::id; 612 return tailCallVMInternal(masm, id); 613 } 614 615 void FallbackICCodeCompiler::enterStubFrame(MacroAssembler& masm, 616 Register scratch) { 617 EmitBaselineEnterStubFrame(masm, scratch); 618 #ifdef DEBUG 619 framePushedAtEnterStubFrame_ = masm.framePushed(); 620 #endif 621 622 MOZ_ASSERT(!inStubFrame_); 623 inStubFrame_ = true; 624 625 #ifdef DEBUG 626 entersStubFrame_ = true; 627 #endif 628 } 629 630 void FallbackICCodeCompiler::assumeStubFrame() { 631 MOZ_ASSERT(!inStubFrame_); 632 inStubFrame_ = true; 633 634 #ifdef DEBUG 635 entersStubFrame_ = true; 636 637 // |framePushed| isn't tracked precisely in ICStubs, so simply assume it to 638 // be the stub frame layout and the pushed ICStub* so that assertions don't 639 // fail in leaveStubFrame 640 framePushedAtEnterStubFrame_ = 641 BaselineStubFrameLayout::Size() + sizeof(ICStub*); 642 #endif 643 } 644 645 void FallbackICCodeCompiler::leaveStubFrame(MacroAssembler& masm) { 646 MOZ_ASSERT(entersStubFrame_ && inStubFrame_); 647 inStubFrame_ = false; 648 649 #ifdef DEBUG 650 masm.setFramePushed(framePushedAtEnterStubFrame_); 651 #endif 652 EmitBaselineLeaveStubFrame(masm); 653 } 654 655 void FallbackICCodeCompiler::pushStubPayload(MacroAssembler& masm, 656 Register scratch) { 657 if (inStubFrame_) { 658 masm.loadPtr(Address(FramePointer, 0), scratch); 659 masm.pushBaselineFramePtr(scratch, scratch); 660 } else { 661 masm.pushBaselineFramePtr(FramePointer, scratch); 662 } 663 } 664 665 void FallbackICCodeCompiler::PushStubPayload(MacroAssembler& masm, 666 Register scratch) { 667 pushStubPayload(masm, scratch); 668 masm.adjustFrame(sizeof(intptr_t)); 669 } 670 671 // 672 // ToBool_Fallback 673 // 674 675 bool DoToBoolFallback(JSContext* cx, BaselineFrame* frame, ICFallbackStub* stub, 676 HandleValue arg, MutableHandleValue ret) { 677 stub->incrementEnteredCount(); 678 MaybeNotifyWarp(frame->outerScript(), stub); 679 FallbackICSpew(cx, stub, "ToBool"); 680 681 TryAttachStub<ToBoolIRGenerator>("ToBool", cx, frame, stub, arg); 682 683 bool cond = ToBoolean(arg); 684 ret.setBoolean(cond); 685 686 return true; 687 } 688 689 bool FallbackICCodeCompiler::emit_ToBool() { 690 static_assert(R0 == JSReturnOperand); 691 692 // Restore the tail call register. 693 EmitRestoreTailCallReg(masm); 694 695 // Push arguments. 696 masm.pushValue(R0); 697 masm.push(ICStubReg); 698 pushStubPayload(masm, R0.scratchReg()); 699 700 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 701 MutableHandleValue); 702 return tailCallVM<Fn, DoToBoolFallback>(masm); 703 } 704 705 // 706 // GetElem_Fallback 707 // 708 709 bool DoGetElemFallback(JSContext* cx, BaselineFrame* frame, 710 ICFallbackStub* stub, HandleValue lhs, HandleValue rhs, 711 MutableHandleValue res) { 712 stub->incrementEnteredCount(); 713 MaybeNotifyWarp(frame->outerScript(), stub); 714 FallbackICSpew(cx, stub, "GetElem"); 715 716 #ifdef DEBUG 717 jsbytecode* pc = StubOffsetToPc(stub, frame->script()); 718 MOZ_ASSERT(JSOp(*pc) == JSOp::GetElem); 719 #endif 720 721 TryAttachStub<GetPropIRGenerator>("GetElem", cx, frame, stub, 722 CacheKind::GetElem, lhs, rhs, lhs); 723 724 if (!GetElementOperation(cx, lhs, rhs, res)) { 725 return false; 726 } 727 728 return true; 729 } 730 731 bool DoGetElemSuperFallback(JSContext* cx, BaselineFrame* frame, 732 ICFallbackStub* stub, HandleValue lhs, 733 HandleValue rhs, HandleValue receiver, 734 MutableHandleValue res) { 735 stub->incrementEnteredCount(); 736 MaybeNotifyWarp(frame->outerScript(), stub); 737 738 jsbytecode* pc = StubOffsetToPc(stub, frame->script()); 739 740 JSOp op = JSOp(*pc); 741 FallbackICSpew(cx, stub, "GetElemSuper(%s)", CodeName(op)); 742 743 MOZ_ASSERT(op == JSOp::GetElemSuper); 744 745 // |lhs| is [[HomeObject]].[[Prototype]] which must be an Object or null. 746 MOZ_ASSERT(lhs.isObjectOrNull()); 747 748 int lhsIndex = -1; 749 RootedObject lhsObj( 750 cx, ToObjectFromStackForPropertyAccess(cx, lhs, lhsIndex, rhs)); 751 if (!lhsObj) { 752 return false; 753 } 754 755 TryAttachStub<GetPropIRGenerator>("GetElemSuper", cx, frame, stub, 756 CacheKind::GetElemSuper, lhs, rhs, 757 receiver); 758 759 return GetObjectElementOperation(cx, op, lhsObj, receiver, rhs, res); 760 } 761 762 bool FallbackICCodeCompiler::emitGetElem(bool hasReceiver) { 763 static_assert(R0 == JSReturnOperand); 764 765 // Restore the tail call register. 766 EmitRestoreTailCallReg(masm); 767 768 // Super property getters use a |this| that differs from base object 769 if (hasReceiver) { 770 // State: receiver in R0, index in R1, obj on the stack 771 772 // Ensure stack is fully synced for the expression decompiler. 773 // We need: receiver, index, obj 774 masm.pushValue(R0); 775 masm.pushValue(R1); 776 masm.pushValue(Address(masm.getStackPointer(), sizeof(Value) * 2)); 777 778 // Push arguments. 779 masm.pushValue(R0); // Receiver 780 masm.pushValue(R1); // Index 781 masm.pushValue(Address(masm.getStackPointer(), sizeof(Value) * 5)); // Obj 782 masm.push(ICStubReg); 783 masm.pushBaselineFramePtr(FramePointer, R0.scratchReg()); 784 785 using Fn = 786 bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 787 HandleValue, HandleValue, MutableHandleValue); 788 if (!tailCallVM<Fn, DoGetElemSuperFallback>(masm)) { 789 return false; 790 } 791 } else { 792 // Ensure stack is fully synced for the expression decompiler. 793 masm.pushValue(R0); 794 masm.pushValue(R1); 795 796 // Push arguments. 797 masm.pushValue(R1); 798 masm.pushValue(R0); 799 masm.push(ICStubReg); 800 masm.pushBaselineFramePtr(FramePointer, R0.scratchReg()); 801 802 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, 803 HandleValue, HandleValue, MutableHandleValue); 804 if (!tailCallVM<Fn, DoGetElemFallback>(masm)) { 805 return false; 806 } 807 } 808 809 // This is the resume point used when bailout rewrites call stack to undo 810 // Ion inlined frames. The return address pushed onto reconstructed stack 811 // will point here. 812 assumeStubFrame(); 813 if (hasReceiver) { 814 code.initBailoutReturnOffset(BailoutReturnKind::GetElemSuper, 815 masm.currentOffset()); 816 } else { 817 code.initBailoutReturnOffset(BailoutReturnKind::GetElem, 818 masm.currentOffset()); 819 } 820 821 leaveStubFrame(masm); 822 823 EmitReturnFromIC(masm); 824 return true; 825 } 826 827 bool FallbackICCodeCompiler::emit_GetElem() { 828 return emitGetElem(/* hasReceiver = */ false); 829 } 830 831 bool FallbackICCodeCompiler::emit_GetElemSuper() { 832 return emitGetElem(/* hasReceiver = */ true); 833 } 834 835 bool DoSetElemFallback(JSContext* cx, BaselineFrame* frame, 836 ICFallbackStub* stub, Value* stack, HandleValue objv, 837 HandleValue index, HandleValue rhs) { 838 using DeferType = SetPropIRGenerator::DeferType; 839 840 stub->incrementEnteredCount(); 841 MaybeNotifyWarp(frame->outerScript(), stub); 842 843 RootedScript script(cx, frame->script()); 844 RootedScript outerScript(cx, script); 845 jsbytecode* pc = StubOffsetToPc(stub, script); 846 JSOp op = JSOp(*pc); 847 FallbackICSpew(cx, stub, "SetElem(%s)", CodeName(JSOp(*pc))); 848 849 MOZ_ASSERT(op == JSOp::SetElem || op == JSOp::StrictSetElem || 850 op == JSOp::InitElem || op == JSOp::InitHiddenElem || 851 op == JSOp::InitLockedElem || op == JSOp::InitElemInc); 852 853 int objvIndex = -3; 854 RootedObject obj( 855 cx, ToObjectFromStackForPropertyAccess(cx, objv, objvIndex, index)); 856 if (!obj) { 857 return false; 858 } 859 860 Rooted<Shape*> oldShape(cx, obj->shape()); 861 862 DeferType deferType = DeferType::None; 863 bool attached = false; 864 865 MaybeTransition(cx, frame, stub); 866 867 if (stub->state().canAttachStub()) { 868 ICScript* icScript = frame->icScript(); 869 SetPropIRGenerator gen(cx, script, pc, CacheKind::SetElem, stub->state(), 870 objv, index, rhs); 871 switch (gen.tryAttachStub()) { 872 case AttachDecision::Attach: { 873 ICAttachResult result = AttachBaselineCacheIRStub( 874 cx, gen.writerRef(), gen.cacheKind(), frame->script(), icScript, 875 stub, gen.stubName()); 876 if (result == ICAttachResult::Attached) { 877 attached = true; 878 JitSpew(JitSpew_BaselineIC, " Attached SetElem CacheIR stub"); 879 } 880 } break; 881 case AttachDecision::NoAction: 882 break; 883 case AttachDecision::TemporarilyUnoptimizable: 884 attached = true; 885 break; 886 case AttachDecision::Deferred: 887 deferType = gen.deferType(); 888 MOZ_ASSERT(deferType != DeferType::None); 889 break; 890 } 891 if (deferType == DeferType::None && !attached) { 892 stub->trackNotAttached(); 893 } 894 } 895 896 if (op == JSOp::InitElem || op == JSOp::InitHiddenElem || 897 op == JSOp::InitLockedElem) { 898 if (!InitElemOperation(cx, pc, obj, index, rhs)) { 899 return false; 900 } 901 } else if (op == JSOp::InitElemInc) { 902 if (!InitElemIncOperation(cx, obj.as<ArrayObject>(), index.toInt32(), 903 rhs)) { 904 return false; 905 } 906 } else { 907 if (!SetObjectElementWithReceiver(cx, obj, index, rhs, objv, 908 JSOp(*pc) == JSOp::StrictSetElem)) { 909 return false; 910 } 911 } 912 913 if (stack) { 914 // Overwrite the object on the stack (pushed for the decompiler) with the 915 // rhs. 916 MOZ_ASSERT(stack[2] == objv); 917 stack[2] = rhs; 918 } 919 920 if (attached) { 921 return true; 922 } 923 924 // The SetObjectElement call might have entered this IC recursively, so try 925 // to transition. 926 MaybeTransition(cx, frame, stub); 927 928 bool canAttachStub = stub->state().canAttachStub(); 929 930 if (deferType != DeferType::None && canAttachStub) { 931 SetPropIRGenerator gen(cx, script, pc, CacheKind::SetElem, stub->state(), 932 objv, index, rhs); 933 934 MOZ_ASSERT(deferType == DeferType::AddSlot); 935 AttachDecision decision = gen.tryAttachAddSlotStub(oldShape); 936 937 switch (decision) { 938 case AttachDecision::Attach: { 939 ICScript* icScript = frame->icScript(); 940 ICAttachResult result = AttachBaselineCacheIRStub( 941 cx, gen.writerRef(), gen.cacheKind(), frame->script(), icScript, 942 stub, gen.stubName()); 943 if (result == ICAttachResult::Attached) { 944 attached = true; 945 JitSpew(JitSpew_BaselineIC, " Attached SetElem CacheIR stub"); 946 } 947 } break; 948 case AttachDecision::NoAction: 949 gen.trackAttached(IRGenerator::NotAttached); 950 break; 951 case AttachDecision::TemporarilyUnoptimizable: 952 case AttachDecision::Deferred: 953 MOZ_ASSERT_UNREACHABLE("Invalid attach result"); 954 break; 955 } 956 if (!attached) { 957 stub->trackNotAttached(); 958 } 959 } 960 961 return true; 962 } 963 964 bool FallbackICCodeCompiler::emit_SetElem() { 965 static_assert(R0 == JSReturnOperand); 966 967 EmitRestoreTailCallReg(masm); 968 969 // State: R0: object, R1: index, stack: rhs. 970 // For the decompiler, the stack has to be: object, index, rhs, 971 // so we push the index, then overwrite the rhs Value with R0 972 // and push the rhs value. 973 masm.pushValue(R1); 974 masm.loadValue(Address(masm.getStackPointer(), sizeof(Value)), R1); 975 masm.storeValue(R0, Address(masm.getStackPointer(), sizeof(Value))); 976 masm.pushValue(R1); 977 978 // Push arguments. 979 masm.pushValue(R1); // RHS 980 981 // Push index. On x86 and ARM two push instructions are emitted so use a 982 // separate register to store the old stack pointer. 983 masm.moveStackPtrTo(R1.scratchReg()); 984 masm.pushValue(Address(R1.scratchReg(), 2 * sizeof(Value))); 985 masm.pushValue(R0); // Object. 986 987 // Push pointer to stack values, so that the stub can overwrite the object 988 // (pushed for the decompiler) with the rhs. 989 masm.computeEffectiveAddress( 990 Address(masm.getStackPointer(), 3 * sizeof(Value)), R0.scratchReg()); 991 masm.push(R0.scratchReg()); 992 993 masm.push(ICStubReg); 994 pushStubPayload(masm, R0.scratchReg()); 995 996 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, Value*, 997 HandleValue, HandleValue, HandleValue); 998 return tailCallVM<Fn, DoSetElemFallback>(masm); 999 } 1000 1001 // 1002 // In_Fallback 1003 // 1004 1005 bool DoInFallback(JSContext* cx, BaselineFrame* frame, ICFallbackStub* stub, 1006 HandleValue key, HandleValue objValue, 1007 MutableHandleValue res) { 1008 stub->incrementEnteredCount(); 1009 MaybeNotifyWarp(frame->outerScript(), stub); 1010 FallbackICSpew(cx, stub, "In"); 1011 1012 if (!objValue.isObject()) { 1013 ReportInNotObjectError(cx, key, objValue); 1014 return false; 1015 } 1016 1017 TryAttachStub<HasPropIRGenerator>("In", cx, frame, stub, CacheKind::In, key, 1018 objValue); 1019 1020 RootedObject obj(cx, &objValue.toObject()); 1021 bool cond = false; 1022 if (!OperatorIn(cx, key, obj, &cond)) { 1023 return false; 1024 } 1025 res.setBoolean(cond); 1026 1027 return true; 1028 } 1029 1030 bool FallbackICCodeCompiler::emit_In() { 1031 EmitRestoreTailCallReg(masm); 1032 1033 // Sync for the decompiler. 1034 masm.pushValue(R0); 1035 masm.pushValue(R1); 1036 1037 // Push arguments. 1038 masm.pushValue(R1); 1039 masm.pushValue(R0); 1040 masm.push(ICStubReg); 1041 pushStubPayload(masm, R0.scratchReg()); 1042 1043 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 1044 HandleValue, MutableHandleValue); 1045 return tailCallVM<Fn, DoInFallback>(masm); 1046 } 1047 1048 // 1049 // HasOwn_Fallback 1050 // 1051 1052 bool DoHasOwnFallback(JSContext* cx, BaselineFrame* frame, ICFallbackStub* stub, 1053 HandleValue keyValue, HandleValue objValue, 1054 MutableHandleValue res) { 1055 stub->incrementEnteredCount(); 1056 MaybeNotifyWarp(frame->outerScript(), stub); 1057 FallbackICSpew(cx, stub, "HasOwn"); 1058 1059 TryAttachStub<HasPropIRGenerator>("HasOwn", cx, frame, stub, 1060 CacheKind::HasOwn, keyValue, objValue); 1061 1062 bool found; 1063 if (!HasOwnProperty(cx, objValue, keyValue, &found)) { 1064 return false; 1065 } 1066 1067 res.setBoolean(found); 1068 return true; 1069 } 1070 1071 bool FallbackICCodeCompiler::emit_HasOwn() { 1072 EmitRestoreTailCallReg(masm); 1073 1074 // Sync for the decompiler. 1075 masm.pushValue(R0); 1076 masm.pushValue(R1); 1077 1078 // Push arguments. 1079 masm.pushValue(R1); 1080 masm.pushValue(R0); 1081 masm.push(ICStubReg); 1082 pushStubPayload(masm, R0.scratchReg()); 1083 1084 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 1085 HandleValue, MutableHandleValue); 1086 return tailCallVM<Fn, DoHasOwnFallback>(masm); 1087 } 1088 1089 // 1090 // CheckPrivate_Fallback 1091 // 1092 1093 bool DoCheckPrivateFieldFallback(JSContext* cx, BaselineFrame* frame, 1094 ICFallbackStub* stub, HandleValue objValue, 1095 HandleValue keyValue, MutableHandleValue res) { 1096 stub->incrementEnteredCount(); 1097 MaybeNotifyWarp(frame->outerScript(), stub); 1098 1099 jsbytecode* pc = StubOffsetToPc(stub, frame->script()); 1100 1101 FallbackICSpew(cx, stub, "CheckPrivateField"); 1102 1103 MOZ_ASSERT(keyValue.isSymbol() && keyValue.toSymbol()->isPrivateName()); 1104 1105 TryAttachStub<CheckPrivateFieldIRGenerator>("CheckPrivate", cx, frame, stub, 1106 CacheKind::CheckPrivateField, 1107 keyValue, objValue); 1108 1109 bool result; 1110 if (!CheckPrivateFieldOperation(cx, pc, objValue, keyValue, &result)) { 1111 return false; 1112 } 1113 1114 res.setBoolean(result); 1115 return true; 1116 } 1117 1118 bool FallbackICCodeCompiler::emit_CheckPrivateField() { 1119 EmitRestoreTailCallReg(masm); 1120 1121 // Sync for the decompiler. 1122 masm.pushValue(R0); 1123 masm.pushValue(R1); 1124 1125 // Push arguments. 1126 masm.pushValue(R1); 1127 masm.pushValue(R0); 1128 masm.push(ICStubReg); 1129 pushStubPayload(masm, R0.scratchReg()); 1130 1131 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 1132 HandleValue, MutableHandleValue); 1133 return tailCallVM<Fn, DoCheckPrivateFieldFallback>(masm); 1134 } 1135 1136 // 1137 // GetName_Fallback 1138 // 1139 1140 bool DoGetNameFallback(JSContext* cx, BaselineFrame* frame, 1141 ICFallbackStub* stub, HandleObject envChain, 1142 MutableHandleValue res) { 1143 stub->incrementEnteredCount(); 1144 MaybeNotifyWarp(frame->outerScript(), stub); 1145 1146 RootedScript script(cx, frame->script()); 1147 jsbytecode* pc = StubOffsetToPc(stub, script); 1148 mozilla::DebugOnly<JSOp> op = JSOp(*pc); 1149 FallbackICSpew(cx, stub, "GetName(%s)", CodeName(JSOp(*pc))); 1150 1151 MOZ_ASSERT(op == JSOp::GetName || op == JSOp::GetGName); 1152 1153 Rooted<PropertyName*> name(cx, script->getName(pc)); 1154 1155 TryAttachStub<GetNameIRGenerator>("GetName", cx, frame, stub, envChain, name); 1156 1157 static_assert(JSOpLength_GetGName == JSOpLength_GetName, 1158 "Otherwise our check for JSOp::Typeof isn't ok"); 1159 if (IsTypeOfNameOp(JSOp(pc[JSOpLength_GetGName]))) { 1160 if (!GetEnvironmentName<GetNameMode::TypeOf>(cx, envChain, name, res)) { 1161 return false; 1162 } 1163 } else { 1164 if (!GetEnvironmentName<GetNameMode::Normal>(cx, envChain, name, res)) { 1165 return false; 1166 } 1167 } 1168 1169 return true; 1170 } 1171 1172 bool FallbackICCodeCompiler::emit_GetName() { 1173 static_assert(R0 == JSReturnOperand); 1174 1175 EmitRestoreTailCallReg(masm); 1176 1177 masm.push(R0.scratchReg()); 1178 masm.push(ICStubReg); 1179 pushStubPayload(masm, R0.scratchReg()); 1180 1181 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleObject, 1182 MutableHandleValue); 1183 return tailCallVM<Fn, DoGetNameFallback>(masm); 1184 } 1185 1186 // 1187 // BindName_Fallback 1188 // 1189 1190 bool DoBindNameFallback(JSContext* cx, BaselineFrame* frame, 1191 ICFallbackStub* stub, HandleObject envChain, 1192 MutableHandleValue res) { 1193 stub->incrementEnteredCount(); 1194 MaybeNotifyWarp(frame->outerScript(), stub); 1195 1196 jsbytecode* pc = StubOffsetToPc(stub, frame->script()); 1197 JSOp op = JSOp(*pc); 1198 FallbackICSpew(cx, stub, "BindName(%s)", CodeName(JSOp(*pc))); 1199 1200 MOZ_ASSERT(op == JSOp::BindName || op == JSOp::BindUnqualifiedName || 1201 op == JSOp::BindUnqualifiedGName); 1202 1203 Rooted<PropertyName*> name(cx, frame->script()->getName(pc)); 1204 1205 TryAttachStub<BindNameIRGenerator>("BindName", cx, frame, stub, envChain, 1206 name); 1207 1208 JSObject* env; 1209 if (op == JSOp::BindName) { 1210 env = LookupNameWithGlobalDefault(cx, name, envChain); 1211 } else { 1212 env = LookupNameUnqualified(cx, name, envChain); 1213 } 1214 if (!env) { 1215 return false; 1216 } 1217 1218 res.setObject(*env); 1219 return true; 1220 } 1221 1222 bool FallbackICCodeCompiler::emit_BindName() { 1223 static_assert(R0 == JSReturnOperand); 1224 1225 EmitRestoreTailCallReg(masm); 1226 1227 masm.push(R0.scratchReg()); 1228 masm.push(ICStubReg); 1229 pushStubPayload(masm, R0.scratchReg()); 1230 1231 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleObject, 1232 MutableHandleValue); 1233 return tailCallVM<Fn, DoBindNameFallback>(masm); 1234 } 1235 1236 // 1237 // LazyConstant_Fallback 1238 // 1239 1240 bool DoLazyConstantFallback(JSContext* cx, BaselineFrame* frame, 1241 ICFallbackStub* stub, MutableHandleValue res) { 1242 stub->incrementEnteredCount(); 1243 MaybeNotifyWarp(frame->outerScript(), stub); 1244 1245 RootedScript script(cx, frame->script()); 1246 jsbytecode* pc = StubOffsetToPc(stub, script); 1247 JSOp op = JSOp(*pc); 1248 FallbackICSpew(cx, stub, "LazyConstant(%s)", CodeName(JSOp(*pc))); 1249 1250 MOZ_ASSERT(op == JSOp::GetIntrinsic || op == JSOp::BuiltinObject || 1251 op == JSOp::ImportMeta); 1252 1253 if (op == JSOp::GetIntrinsic) { 1254 if (!GetIntrinsicOperation(cx, script, pc, res)) { 1255 return false; 1256 } 1257 } else if (op == JSOp::BuiltinObject) { 1258 auto kind = BuiltinObjectKind(GET_UINT8(pc)); 1259 JSObject* builtinObject = BuiltinObjectOperation(cx, kind); 1260 if (!builtinObject) { 1261 return false; 1262 } 1263 res.setObject(*builtinObject); 1264 } else { 1265 JSObject* metaObject = ImportMetaOperation(cx, script); 1266 if (!metaObject) { 1267 return false; 1268 } 1269 res.setObject(*metaObject); 1270 } 1271 1272 TryAttachStub<LazyConstantIRGenerator>("LazyConstant", cx, frame, stub, res); 1273 1274 return true; 1275 } 1276 1277 bool FallbackICCodeCompiler::emit_LazyConstant() { 1278 EmitRestoreTailCallReg(masm); 1279 1280 masm.push(ICStubReg); 1281 pushStubPayload(masm, R0.scratchReg()); 1282 1283 using Fn = 1284 bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, MutableHandleValue); 1285 return tailCallVM<Fn, DoLazyConstantFallback>(masm); 1286 } 1287 1288 // 1289 // GetProp_Fallback 1290 // 1291 1292 bool DoGetPropFallback(JSContext* cx, BaselineFrame* frame, 1293 ICFallbackStub* stub, HandleValue val, 1294 MutableHandleValue res) { 1295 stub->incrementEnteredCount(); 1296 MaybeNotifyWarp(frame->outerScript(), stub); 1297 1298 RootedScript script(cx, frame->script()); 1299 jsbytecode* pc = StubOffsetToPc(stub, script); 1300 JSOp op = JSOp(*pc); 1301 FallbackICSpew(cx, stub, "GetProp(%s)", CodeName(op)); 1302 1303 MOZ_ASSERT(op == JSOp::GetProp || op == JSOp::GetBoundName); 1304 1305 Rooted<PropertyName*> name(cx, script->getName(pc)); 1306 RootedValue idVal(cx, StringValue(name)); 1307 1308 TryAttachStub<GetPropIRGenerator>("GetProp", cx, frame, stub, 1309 CacheKind::GetProp, val, idVal, val); 1310 1311 if (op == JSOp::GetBoundName) { 1312 RootedObject env(cx, &val.toObject()); 1313 RootedId id(cx, NameToId(name)); 1314 return GetNameBoundInEnvironment(cx, env, id, res); 1315 } 1316 1317 MOZ_ASSERT(op == JSOp::GetProp); 1318 if (!GetProperty(cx, val, name, res)) { 1319 return false; 1320 } 1321 1322 return true; 1323 } 1324 1325 bool DoGetPropSuperFallback(JSContext* cx, BaselineFrame* frame, 1326 ICFallbackStub* stub, HandleValue receiver, 1327 HandleValue val, MutableHandleValue res) { 1328 stub->incrementEnteredCount(); 1329 MaybeNotifyWarp(frame->outerScript(), stub); 1330 1331 RootedScript script(cx, frame->script()); 1332 jsbytecode* pc = StubOffsetToPc(stub, script); 1333 FallbackICSpew(cx, stub, "GetPropSuper(%s)", CodeName(JSOp(*pc))); 1334 1335 MOZ_ASSERT(JSOp(*pc) == JSOp::GetPropSuper); 1336 1337 Rooted<PropertyName*> name(cx, script->getName(pc)); 1338 RootedValue idVal(cx, StringValue(name)); 1339 1340 // |val| is [[HomeObject]].[[Prototype]] which must be an Object or null. 1341 MOZ_ASSERT(val.isObjectOrNull()); 1342 1343 int valIndex = -1; 1344 RootedObject valObj( 1345 cx, ToObjectFromStackForPropertyAccess(cx, val, valIndex, name)); 1346 if (!valObj) { 1347 return false; 1348 } 1349 1350 TryAttachStub<GetPropIRGenerator>("GetPropSuper", cx, frame, stub, 1351 CacheKind::GetPropSuper, val, idVal, 1352 receiver); 1353 1354 if (!GetProperty(cx, valObj, receiver, name, res)) { 1355 return false; 1356 } 1357 1358 return true; 1359 } 1360 1361 bool FallbackICCodeCompiler::emitGetProp(bool hasReceiver) { 1362 static_assert(R0 == JSReturnOperand); 1363 1364 EmitRestoreTailCallReg(masm); 1365 1366 // Super property getters use a |this| that differs from base object 1367 if (hasReceiver) { 1368 // Push arguments. 1369 masm.pushValue(R0); 1370 masm.pushValue(R1); 1371 masm.push(ICStubReg); 1372 masm.pushBaselineFramePtr(FramePointer, R0.scratchReg()); 1373 1374 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, 1375 HandleValue, HandleValue, MutableHandleValue); 1376 if (!tailCallVM<Fn, DoGetPropSuperFallback>(masm)) { 1377 return false; 1378 } 1379 } else { 1380 // Ensure stack is fully synced for the expression decompiler. 1381 masm.pushValue(R0); 1382 1383 // Push arguments. 1384 masm.pushValue(R0); 1385 masm.push(ICStubReg); 1386 masm.pushBaselineFramePtr(FramePointer, R0.scratchReg()); 1387 1388 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, 1389 HandleValue, MutableHandleValue); 1390 if (!tailCallVM<Fn, DoGetPropFallback>(masm)) { 1391 return false; 1392 } 1393 } 1394 1395 // This is the resume point used when bailout rewrites call stack to undo 1396 // Ion inlined frames. The return address pushed onto reconstructed stack 1397 // will point here. 1398 assumeStubFrame(); 1399 if (hasReceiver) { 1400 code.initBailoutReturnOffset(BailoutReturnKind::GetPropSuper, 1401 masm.currentOffset()); 1402 } else { 1403 code.initBailoutReturnOffset(BailoutReturnKind::GetProp, 1404 masm.currentOffset()); 1405 } 1406 1407 leaveStubFrame(masm); 1408 1409 EmitReturnFromIC(masm); 1410 return true; 1411 } 1412 1413 bool FallbackICCodeCompiler::emit_GetProp() { 1414 return emitGetProp(/* hasReceiver = */ false); 1415 } 1416 1417 bool FallbackICCodeCompiler::emit_GetPropSuper() { 1418 return emitGetProp(/* hasReceiver = */ true); 1419 } 1420 1421 // 1422 // SetProp_Fallback 1423 // 1424 1425 bool DoSetPropFallback(JSContext* cx, BaselineFrame* frame, 1426 ICFallbackStub* stub, Value* stack, HandleValue lhs, 1427 HandleValue rhs) { 1428 using DeferType = SetPropIRGenerator::DeferType; 1429 1430 stub->incrementEnteredCount(); 1431 MaybeNotifyWarp(frame->outerScript(), stub); 1432 1433 RootedScript script(cx, frame->script()); 1434 jsbytecode* pc = StubOffsetToPc(stub, script); 1435 JSOp op = JSOp(*pc); 1436 FallbackICSpew(cx, stub, "SetProp(%s)", CodeName(op)); 1437 1438 MOZ_ASSERT(op == JSOp::SetProp || op == JSOp::StrictSetProp || 1439 op == JSOp::SetName || op == JSOp::StrictSetName || 1440 op == JSOp::SetGName || op == JSOp::StrictSetGName || 1441 op == JSOp::InitProp || op == JSOp::InitLockedProp || 1442 op == JSOp::InitHiddenProp || op == JSOp::InitGLexical); 1443 1444 Rooted<PropertyName*> name(cx, script->getName(pc)); 1445 RootedId id(cx, NameToId(name)); 1446 1447 int lhsIndex = stack ? -2 : JSDVG_IGNORE_STACK; 1448 RootedObject obj(cx, 1449 ToObjectFromStackForPropertyAccess(cx, lhs, lhsIndex, id)); 1450 if (!obj) { 1451 return false; 1452 } 1453 Rooted<Shape*> oldShape(cx, obj->shape()); 1454 1455 DeferType deferType = DeferType::None; 1456 bool attached = false; 1457 MaybeTransition(cx, frame, stub); 1458 1459 if (stub->state().canAttachStub()) { 1460 RootedValue idVal(cx, StringValue(name)); 1461 SetPropIRGenerator gen(cx, script, pc, CacheKind::SetProp, stub->state(), 1462 lhs, idVal, rhs); 1463 switch (gen.tryAttachStub()) { 1464 case AttachDecision::Attach: { 1465 ICScript* icScript = frame->icScript(); 1466 ICAttachResult result = AttachBaselineCacheIRStub( 1467 cx, gen.writerRef(), gen.cacheKind(), frame->script(), icScript, 1468 stub, gen.stubName()); 1469 if (result == ICAttachResult::Attached) { 1470 attached = true; 1471 JitSpew(JitSpew_BaselineIC, " Attached SetProp CacheIR stub"); 1472 } 1473 } break; 1474 case AttachDecision::NoAction: 1475 break; 1476 case AttachDecision::TemporarilyUnoptimizable: 1477 attached = true; 1478 break; 1479 case AttachDecision::Deferred: 1480 deferType = gen.deferType(); 1481 MOZ_ASSERT(deferType != DeferType::None); 1482 break; 1483 } 1484 if (deferType == DeferType::None && !attached) { 1485 stub->trackNotAttached(); 1486 } 1487 } 1488 1489 if (op == JSOp::InitProp || op == JSOp::InitLockedProp || 1490 op == JSOp::InitHiddenProp) { 1491 if (!InitPropertyOperation(cx, pc, obj, name, rhs)) { 1492 return false; 1493 } 1494 } else if (op == JSOp::SetName || op == JSOp::StrictSetName || 1495 op == JSOp::SetGName || op == JSOp::StrictSetGName) { 1496 if (!SetNameOperation(cx, script, pc, obj, rhs)) { 1497 return false; 1498 } 1499 } else if (op == JSOp::InitGLexical) { 1500 ExtensibleLexicalEnvironmentObject* lexicalEnv; 1501 if (script->hasNonSyntacticScope()) { 1502 lexicalEnv = &NearestEnclosingExtensibleLexicalEnvironment( 1503 frame->environmentChain()); 1504 } else { 1505 lexicalEnv = &cx->global()->lexicalEnvironment(); 1506 } 1507 InitGlobalLexicalOperation(cx, lexicalEnv, script, pc, rhs); 1508 } else { 1509 MOZ_ASSERT(op == JSOp::SetProp || op == JSOp::StrictSetProp); 1510 1511 ObjectOpResult result; 1512 if (!SetProperty(cx, obj, id, rhs, lhs, result) || 1513 !result.checkStrictModeError(cx, obj, id, op == JSOp::StrictSetProp)) { 1514 return false; 1515 } 1516 } 1517 1518 if (stack) { 1519 // Overwrite the LHS on the stack (pushed for the decompiler) with the RHS. 1520 MOZ_ASSERT(stack[1] == lhs); 1521 stack[1] = rhs; 1522 } 1523 1524 if (attached) { 1525 return true; 1526 } 1527 1528 // The SetProperty call might have entered this IC recursively, so try 1529 // to transition. 1530 MaybeTransition(cx, frame, stub); 1531 1532 bool canAttachStub = stub->state().canAttachStub(); 1533 1534 if (deferType != DeferType::None && canAttachStub) { 1535 RootedValue idVal(cx, StringValue(name)); 1536 SetPropIRGenerator gen(cx, script, pc, CacheKind::SetProp, stub->state(), 1537 lhs, idVal, rhs); 1538 1539 MOZ_ASSERT(deferType == DeferType::AddSlot); 1540 AttachDecision decision = gen.tryAttachAddSlotStub(oldShape); 1541 1542 switch (decision) { 1543 case AttachDecision::Attach: { 1544 ICScript* icScript = frame->icScript(); 1545 ICAttachResult result = AttachBaselineCacheIRStub( 1546 cx, gen.writerRef(), gen.cacheKind(), frame->script(), icScript, 1547 stub, gen.stubName()); 1548 if (result == ICAttachResult::Attached) { 1549 attached = true; 1550 JitSpew(JitSpew_BaselineIC, " Attached SetElem CacheIR stub"); 1551 } 1552 } break; 1553 case AttachDecision::NoAction: 1554 gen.trackAttached(IRGenerator::NotAttached); 1555 break; 1556 case AttachDecision::TemporarilyUnoptimizable: 1557 case AttachDecision::Deferred: 1558 MOZ_ASSERT_UNREACHABLE("Invalid attach result"); 1559 break; 1560 } 1561 if (!attached) { 1562 stub->trackNotAttached(); 1563 } 1564 } 1565 1566 return true; 1567 } 1568 1569 bool FallbackICCodeCompiler::emit_SetProp() { 1570 static_assert(R0 == JSReturnOperand); 1571 1572 EmitRestoreTailCallReg(masm); 1573 1574 // Ensure stack is fully synced for the expression decompiler. 1575 // Overwrite the RHS value on top of the stack with the object, then push 1576 // the RHS in R1 on top of that. 1577 masm.storeValue(R0, Address(masm.getStackPointer(), 0)); 1578 masm.pushValue(R1); 1579 1580 // Push arguments. 1581 masm.pushValue(R1); 1582 masm.pushValue(R0); 1583 1584 // Push pointer to stack values, so that the stub can overwrite the object 1585 // (pushed for the decompiler) with the RHS. 1586 masm.computeEffectiveAddress( 1587 Address(masm.getStackPointer(), 2 * sizeof(Value)), R0.scratchReg()); 1588 masm.push(R0.scratchReg()); 1589 1590 masm.push(ICStubReg); 1591 pushStubPayload(masm, R0.scratchReg()); 1592 1593 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, Value*, 1594 HandleValue, HandleValue); 1595 if (!tailCallVM<Fn, DoSetPropFallback>(masm)) { 1596 return false; 1597 } 1598 1599 // This is the resume point used when bailout rewrites call stack to undo 1600 // Ion inlined frames. The return address pushed onto reconstructed stack 1601 // will point here. 1602 assumeStubFrame(); 1603 code.initBailoutReturnOffset(BailoutReturnKind::SetProp, 1604 masm.currentOffset()); 1605 1606 leaveStubFrame(masm); 1607 EmitReturnFromIC(masm); 1608 1609 return true; 1610 } 1611 1612 // 1613 // Call_Fallback 1614 // 1615 1616 bool DoCallFallback(JSContext* cx, BaselineFrame* frame, ICFallbackStub* stub, 1617 uint32_t argc, Value* vp, MutableHandleValue res) { 1618 stub->incrementEnteredCount(); 1619 MaybeNotifyWarp(frame->outerScript(), stub); 1620 1621 RootedScript script(cx, frame->script()); 1622 jsbytecode* pc = StubOffsetToPc(stub, script); 1623 JSOp op = JSOp(*pc); 1624 FallbackICSpew(cx, stub, "Call(%s)", CodeName(op)); 1625 1626 MOZ_ASSERT(argc == GET_ARGC(pc)); 1627 bool constructing = 1628 (op == JSOp::New || op == JSOp::NewContent || op == JSOp::SuperCall); 1629 bool ignoresReturnValue = (op == JSOp::CallIgnoresRv); 1630 1631 // Ensure vp array is rooted - we may GC in here. 1632 size_t numValues = argc + 2 + constructing; 1633 RootedExternalValueArray vpRoot(cx, numValues, vp); 1634 1635 CallArgs callArgs = CallArgsFromSp(argc + constructing, vp + numValues, 1636 constructing, ignoresReturnValue); 1637 RootedValue callee(cx, vp[0]); 1638 RootedValue newTarget(cx, constructing ? callArgs.newTarget() : NullValue()); 1639 1640 // Transition stub state to megamorphic or generic if warranted. 1641 MaybeTransition(cx, frame, stub); 1642 1643 bool canAttachStub = stub->state().canAttachStub(); 1644 bool handled = false; 1645 1646 // Only bother to try optimizing JSOp::Call with CacheIR if the chain is still 1647 // allowed to attach stubs. 1648 if (canAttachStub) { 1649 HandleValueArray args = HandleValueArray::fromMarkedLocation(argc, vp + 2); 1650 CallIRGenerator gen(cx, script, pc, stub->state(), frame, argc, callee, 1651 callArgs.thisv(), newTarget, args); 1652 switch (gen.tryAttachStub()) { 1653 case AttachDecision::NoAction: 1654 break; 1655 case AttachDecision::Attach: { 1656 ICScript* icScript = frame->icScript(); 1657 ICAttachResult result = 1658 AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(), 1659 script, icScript, stub, gen.stubName()); 1660 if (result == ICAttachResult::Attached) { 1661 handled = true; 1662 JitSpew(JitSpew_BaselineIC, " Attached Call CacheIR stub"); 1663 } 1664 } break; 1665 case AttachDecision::TemporarilyUnoptimizable: 1666 handled = true; 1667 break; 1668 case AttachDecision::Deferred: 1669 MOZ_CRASH("No deferred Call stubs"); 1670 } 1671 if (!handled) { 1672 stub->trackNotAttached(); 1673 } 1674 } 1675 1676 if (constructing) { 1677 if (!ConstructFromStack(cx, callArgs)) { 1678 return false; 1679 } 1680 res.set(callArgs.rval()); 1681 } else if ((op == JSOp::Eval || op == JSOp::StrictEval) && 1682 cx->global()->valueIsEval(callee)) { 1683 if (!DirectEval(cx, callArgs.get(0), res)) { 1684 return false; 1685 } 1686 } else { 1687 MOZ_ASSERT(op == JSOp::Call || op == JSOp::CallContent || 1688 op == JSOp::CallIgnoresRv || op == JSOp::CallIter || 1689 op == JSOp::CallContentIter || op == JSOp::Eval || 1690 op == JSOp::StrictEval); 1691 if ((op == JSOp::CallIter || op == JSOp::CallContentIter) && 1692 callee.isPrimitive()) { 1693 MOZ_ASSERT(argc == 0, "thisv must be on top of the stack"); 1694 ReportValueError(cx, JSMSG_NOT_ITERABLE, -1, callArgs.thisv(), nullptr); 1695 return false; 1696 } 1697 1698 if (!CallFromStack(cx, callArgs)) { 1699 return false; 1700 } 1701 1702 res.set(callArgs.rval()); 1703 } 1704 1705 return true; 1706 } 1707 1708 bool DoSpreadCallFallback(JSContext* cx, BaselineFrame* frame, 1709 ICFallbackStub* stub, Value* vp, 1710 MutableHandleValue res) { 1711 stub->incrementEnteredCount(); 1712 MaybeNotifyWarp(frame->outerScript(), stub); 1713 1714 RootedScript script(cx, frame->script()); 1715 jsbytecode* pc = StubOffsetToPc(stub, script); 1716 JSOp op = JSOp(*pc); 1717 bool constructing = (op == JSOp::SpreadNew || op == JSOp::SpreadSuperCall); 1718 FallbackICSpew(cx, stub, "SpreadCall(%s)", CodeName(op)); 1719 1720 // Ensure vp array is rooted - we may GC in here. 1721 RootedExternalValueArray vpRoot(cx, 3 + constructing, vp); 1722 1723 RootedValue callee(cx, vp[0]); 1724 RootedValue thisv(cx, vp[1]); 1725 RootedValue arr(cx, vp[2]); 1726 RootedValue newTarget(cx, constructing ? vp[3] : NullValue()); 1727 1728 // Transition stub state to megamorphic or generic if warranted. 1729 MaybeTransition(cx, frame, stub); 1730 1731 // Try attaching a call stub. 1732 bool handled = false; 1733 if (op != JSOp::SpreadEval && op != JSOp::StrictSpreadEval && 1734 stub->state().canAttachStub()) { 1735 // Try CacheIR first: 1736 Rooted<ArrayObject*> aobj(cx, &arr.toObject().as<ArrayObject>()); 1737 MOZ_ASSERT(IsPackedArray(aobj)); 1738 1739 HandleValueArray args = HandleValueArray::fromMarkedLocation( 1740 aobj->length(), aobj->getDenseElements()); 1741 CallIRGenerator gen(cx, script, pc, stub->state(), frame, 1, callee, thisv, 1742 newTarget, args); 1743 switch (gen.tryAttachStub()) { 1744 case AttachDecision::NoAction: 1745 break; 1746 case AttachDecision::Attach: { 1747 ICScript* icScript = frame->icScript(); 1748 ICAttachResult result = 1749 AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(), 1750 script, icScript, stub, gen.stubName()); 1751 1752 if (result == ICAttachResult::Attached) { 1753 handled = true; 1754 JitSpew(JitSpew_BaselineIC, " Attached Spread Call CacheIR stub"); 1755 } 1756 } break; 1757 case AttachDecision::TemporarilyUnoptimizable: 1758 handled = true; 1759 break; 1760 case AttachDecision::Deferred: 1761 MOZ_ASSERT_UNREACHABLE("No deferred optimizations for spread calls"); 1762 break; 1763 } 1764 if (!handled) { 1765 stub->trackNotAttached(); 1766 } 1767 } 1768 1769 return SpreadCallOperation(cx, script, pc, thisv, callee, arr, newTarget, 1770 res); 1771 } 1772 1773 void FallbackICCodeCompiler::pushCallArguments( 1774 MacroAssembler& masm, AllocatableGeneralRegisterSet regs, Register argcReg, 1775 bool isConstructing) { 1776 MOZ_ASSERT(!regs.has(argcReg)); 1777 1778 // argPtr initially points to the last argument. 1779 Register argPtr = regs.takeAny(); 1780 masm.mov(FramePointer, argPtr); 1781 1782 // Skip 3 pointers pushed on top of the arguments: the frame descriptor, 1783 // return address, and old frame pointer. 1784 size_t valueOffset = BaselineStubFrameLayout::Size(); 1785 1786 // We have to push |this|, callee, new.target (if constructing) and argc 1787 // arguments. Handle the number of Values we know statically first. 1788 1789 size_t numNonArgValues = 2 + isConstructing; 1790 for (size_t i = 0; i < numNonArgValues; i++) { 1791 masm.pushValue(Address(argPtr, valueOffset)); 1792 valueOffset += sizeof(Value); 1793 } 1794 1795 // If there are no arguments we're done. 1796 Label done; 1797 masm.branchTest32(Assembler::Zero, argcReg, argcReg, &done); 1798 1799 // Push argc Values. 1800 Label loop; 1801 Register count = regs.takeAny(); 1802 masm.addPtr(Imm32(valueOffset), argPtr); 1803 masm.move32(argcReg, count); 1804 masm.bind(&loop); 1805 { 1806 masm.pushValue(Address(argPtr, 0)); 1807 masm.addPtr(Imm32(sizeof(Value)), argPtr); 1808 1809 masm.branchSub32(Assembler::NonZero, Imm32(1), count, &loop); 1810 } 1811 masm.bind(&done); 1812 } 1813 1814 bool FallbackICCodeCompiler::emitCall(bool isSpread, bool isConstructing) { 1815 static_assert(R0 == JSReturnOperand); 1816 1817 // Values are on the stack left-to-right. Calling convention wants them 1818 // right-to-left so duplicate them on the stack in reverse order. 1819 // |this| and callee are pushed last. 1820 1821 AllocatableGeneralRegisterSet regs = BaselineICAvailableGeneralRegs(0); 1822 1823 if (MOZ_UNLIKELY(isSpread)) { 1824 // Push a stub frame so that we can perform a non-tail call. 1825 enterStubFrame(masm, R1.scratchReg()); 1826 1827 // Use FramePointer instead of StackPointer because it's not affected by 1828 // the stack pushes below. 1829 1830 // newTarget 1831 uint32_t valueOffset = BaselineStubFrameLayout::Size(); 1832 if (isConstructing) { 1833 masm.pushValue(Address(FramePointer, valueOffset)); 1834 valueOffset += sizeof(Value); 1835 } 1836 1837 // array 1838 masm.pushValue(Address(FramePointer, valueOffset)); 1839 valueOffset += sizeof(Value); 1840 1841 // this 1842 masm.pushValue(Address(FramePointer, valueOffset)); 1843 valueOffset += sizeof(Value); 1844 1845 // callee 1846 masm.pushValue(Address(FramePointer, valueOffset)); 1847 valueOffset += sizeof(Value); 1848 1849 masm.push(masm.getStackPointer()); 1850 masm.push(ICStubReg); 1851 1852 PushStubPayload(masm, R0.scratchReg()); 1853 1854 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, Value*, 1855 MutableHandleValue); 1856 if (!callVM<Fn, DoSpreadCallFallback>(masm)) { 1857 return false; 1858 } 1859 1860 leaveStubFrame(masm); 1861 EmitReturnFromIC(masm); 1862 1863 // SpreadCall is not yet supported in Ion, so do not generate asmcode for 1864 // bailout. 1865 return true; 1866 } 1867 1868 // Push a stub frame so that we can perform a non-tail call. 1869 enterStubFrame(masm, R1.scratchReg()); 1870 1871 regs.take(R0.scratchReg()); // argc. 1872 1873 pushCallArguments(masm, regs, R0.scratchReg(), isConstructing); 1874 1875 masm.push(masm.getStackPointer()); 1876 masm.push(R0.scratchReg()); 1877 masm.push(ICStubReg); 1878 1879 PushStubPayload(masm, R0.scratchReg()); 1880 1881 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, uint32_t, 1882 Value*, MutableHandleValue); 1883 if (!callVM<Fn, DoCallFallback>(masm)) { 1884 return false; 1885 } 1886 1887 leaveStubFrame(masm); 1888 EmitReturnFromIC(masm); 1889 1890 // This is the resume point used when bailout rewrites call stack to undo 1891 // Ion inlined frames. The return address pushed onto reconstructed stack 1892 // will point here. 1893 assumeStubFrame(); 1894 1895 MOZ_ASSERT(!isSpread); 1896 1897 if (isConstructing) { 1898 code.initBailoutReturnOffset(BailoutReturnKind::New, masm.currentOffset()); 1899 } else { 1900 code.initBailoutReturnOffset(BailoutReturnKind::Call, masm.currentOffset()); 1901 } 1902 1903 // Load passed-in ThisV into R1 just in case it's needed. Need to do this 1904 // before we leave the stub frame since that info will be lost. 1905 // Current stack: [...., ThisV, CalleeToken, Descriptor ] 1906 size_t thisvOffset = 1907 JitFrameLayout::offsetOfThis() - JitFrameLayout::bytesPoppedAfterCall(); 1908 masm.loadValue(Address(masm.getStackPointer(), thisvOffset), R1); 1909 1910 leaveStubFrame(masm); 1911 1912 // If this is a |constructing| call, if the callee returns a non-object, we 1913 // replace it with the |this| object passed in. 1914 if (isConstructing) { 1915 static_assert(JSReturnOperand == R0); 1916 Label skipThisReplace; 1917 1918 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace); 1919 masm.moveValue(R1, R0); 1920 #ifdef DEBUG 1921 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace); 1922 masm.assumeUnreachable("Failed to return object in constructing call."); 1923 #endif 1924 masm.bind(&skipThisReplace); 1925 } 1926 1927 EmitReturnFromIC(masm); 1928 return true; 1929 } 1930 1931 bool FallbackICCodeCompiler::emit_Call() { 1932 return emitCall(/* isSpread = */ false, /* isConstructing = */ false); 1933 } 1934 1935 bool FallbackICCodeCompiler::emit_CallConstructing() { 1936 return emitCall(/* isSpread = */ false, /* isConstructing = */ true); 1937 } 1938 1939 bool FallbackICCodeCompiler::emit_SpreadCall() { 1940 return emitCall(/* isSpread = */ true, /* isConstructing = */ false); 1941 } 1942 1943 bool FallbackICCodeCompiler::emit_SpreadCallConstructing() { 1944 return emitCall(/* isSpread = */ true, /* isConstructing = */ true); 1945 } 1946 1947 // 1948 // GetIterator_Fallback 1949 // 1950 1951 bool DoGetIteratorFallback(JSContext* cx, BaselineFrame* frame, 1952 ICFallbackStub* stub, HandleValue value, 1953 MutableHandleValue res) { 1954 stub->incrementEnteredCount(); 1955 MaybeNotifyWarp(frame->outerScript(), stub); 1956 FallbackICSpew(cx, stub, "GetIterator"); 1957 1958 TryAttachStub<GetIteratorIRGenerator>("GetIterator", cx, frame, stub, value); 1959 1960 PropertyIteratorObject* iterObj = ValueToIterator(cx, value); 1961 if (!iterObj) { 1962 return false; 1963 } 1964 1965 res.setObject(*iterObj); 1966 return true; 1967 } 1968 1969 bool FallbackICCodeCompiler::emit_GetIterator() { 1970 EmitRestoreTailCallReg(masm); 1971 1972 // Sync stack for the decompiler. 1973 masm.pushValue(R0); 1974 1975 masm.pushValue(R0); 1976 masm.push(ICStubReg); 1977 pushStubPayload(masm, R0.scratchReg()); 1978 1979 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 1980 MutableHandleValue); 1981 return tailCallVM<Fn, DoGetIteratorFallback>(masm); 1982 } 1983 1984 // 1985 // OptimizeSpreadCall_Fallback 1986 // 1987 1988 bool DoOptimizeSpreadCallFallback(JSContext* cx, BaselineFrame* frame, 1989 ICFallbackStub* stub, HandleValue value, 1990 MutableHandleValue res) { 1991 stub->incrementEnteredCount(); 1992 MaybeNotifyWarp(frame->outerScript(), stub); 1993 FallbackICSpew(cx, stub, "OptimizeSpreadCall"); 1994 1995 TryAttachStub<OptimizeSpreadCallIRGenerator>("OptimizeSpreadCall", cx, frame, 1996 stub, value); 1997 1998 return OptimizeSpreadCall(cx, value, res); 1999 } 2000 2001 bool FallbackICCodeCompiler::emit_OptimizeSpreadCall() { 2002 EmitRestoreTailCallReg(masm); 2003 2004 masm.pushValue(R0); 2005 masm.push(ICStubReg); 2006 pushStubPayload(masm, R0.scratchReg()); 2007 2008 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 2009 MutableHandleValue); 2010 return tailCallVM<Fn, DoOptimizeSpreadCallFallback>(masm); 2011 } 2012 2013 // 2014 // InstanceOf_Fallback 2015 // 2016 2017 bool DoInstanceOfFallback(JSContext* cx, BaselineFrame* frame, 2018 ICFallbackStub* stub, HandleValue lhs, 2019 HandleValue rhs, MutableHandleValue res) { 2020 stub->incrementEnteredCount(); 2021 MaybeNotifyWarp(frame->outerScript(), stub); 2022 FallbackICSpew(cx, stub, "InstanceOf"); 2023 2024 if (!rhs.isObject()) { 2025 ReportValueError(cx, JSMSG_BAD_INSTANCEOF_RHS, -1, rhs, nullptr); 2026 return false; 2027 } 2028 2029 RootedObject obj(cx, &rhs.toObject()); 2030 bool cond = false; 2031 if (!InstanceofOperator(cx, obj, lhs, &cond)) { 2032 return false; 2033 } 2034 2035 res.setBoolean(cond); 2036 2037 if (!obj->is<JSFunction>()) { 2038 // ensure we've recorded at least one failure, so we can detect there was a 2039 // non-optimizable case 2040 if (!stub->state().hasFailures()) { 2041 stub->trackNotAttached(); 2042 } 2043 return true; 2044 } 2045 2046 TryAttachStub<InstanceOfIRGenerator>("InstanceOf", cx, frame, stub, lhs, obj); 2047 return true; 2048 } 2049 2050 bool FallbackICCodeCompiler::emit_InstanceOf() { 2051 EmitRestoreTailCallReg(masm); 2052 2053 // Sync stack for the decompiler. 2054 masm.pushValue(R0); 2055 masm.pushValue(R1); 2056 2057 masm.pushValue(R1); 2058 masm.pushValue(R0); 2059 masm.push(ICStubReg); 2060 pushStubPayload(masm, R0.scratchReg()); 2061 2062 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 2063 HandleValue, MutableHandleValue); 2064 return tailCallVM<Fn, DoInstanceOfFallback>(masm); 2065 } 2066 2067 // 2068 // TypeOf_Fallback 2069 // 2070 2071 bool DoTypeOfFallback(JSContext* cx, BaselineFrame* frame, ICFallbackStub* stub, 2072 HandleValue val, MutableHandleValue res) { 2073 stub->incrementEnteredCount(); 2074 MaybeNotifyWarp(frame->outerScript(), stub); 2075 FallbackICSpew(cx, stub, "TypeOf"); 2076 2077 TryAttachStub<TypeOfIRGenerator>("TypeOf", cx, frame, stub, val); 2078 2079 JSType type = js::TypeOfValue(val); 2080 RootedString string(cx, TypeName(type, cx->names())); 2081 res.setString(string); 2082 return true; 2083 } 2084 2085 bool FallbackICCodeCompiler::emit_TypeOf() { 2086 EmitRestoreTailCallReg(masm); 2087 2088 masm.pushValue(R0); 2089 masm.push(ICStubReg); 2090 pushStubPayload(masm, R0.scratchReg()); 2091 2092 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 2093 MutableHandleValue); 2094 return tailCallVM<Fn, DoTypeOfFallback>(masm); 2095 } 2096 2097 // 2098 // TypeOfEq_Fallback 2099 // 2100 2101 bool DoTypeOfEqFallback(JSContext* cx, BaselineFrame* frame, 2102 ICFallbackStub* stub, HandleValue val, 2103 MutableHandleValue res) { 2104 stub->incrementEnteredCount(); 2105 MaybeNotifyWarp(frame->outerScript(), stub); 2106 FallbackICSpew(cx, stub, "TypeOfEq"); 2107 2108 jsbytecode* pc = StubOffsetToPc(stub, frame->script()); 2109 auto operand = TypeofEqOperand::fromRawValue(GET_UINT8(pc)); 2110 JSType type = operand.type(); 2111 JSOp compareOp = operand.compareOp(); 2112 2113 TryAttachStub<TypeOfEqIRGenerator>("TypeOfEq", cx, frame, stub, val, type, 2114 compareOp); 2115 2116 bool result = js::TypeOfValue(val) == type; 2117 if (compareOp == JSOp::Ne) { 2118 result = !result; 2119 } 2120 res.setBoolean(result); 2121 return true; 2122 } 2123 2124 bool FallbackICCodeCompiler::emit_TypeOfEq() { 2125 EmitRestoreTailCallReg(masm); 2126 2127 masm.pushValue(R0); 2128 masm.push(ICStubReg); 2129 pushStubPayload(masm, R0.scratchReg()); 2130 2131 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 2132 MutableHandleValue); 2133 return tailCallVM<Fn, DoTypeOfEqFallback>(masm); 2134 } 2135 2136 // 2137 // ToPropertyKey_Fallback 2138 // 2139 2140 bool DoToPropertyKeyFallback(JSContext* cx, BaselineFrame* frame, 2141 ICFallbackStub* stub, HandleValue val, 2142 MutableHandleValue res) { 2143 stub->incrementEnteredCount(); 2144 MaybeNotifyWarp(frame->outerScript(), stub); 2145 FallbackICSpew(cx, stub, "ToPropertyKey"); 2146 2147 TryAttachStub<ToPropertyKeyIRGenerator>("ToPropertyKey", cx, frame, stub, 2148 val); 2149 2150 return ToPropertyKeyOperation(cx, val, res); 2151 } 2152 2153 bool FallbackICCodeCompiler::emit_ToPropertyKey() { 2154 EmitRestoreTailCallReg(masm); 2155 2156 masm.pushValue(R0); 2157 masm.push(ICStubReg); 2158 pushStubPayload(masm, R0.scratchReg()); 2159 2160 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 2161 MutableHandleValue); 2162 return tailCallVM<Fn, DoToPropertyKeyFallback>(masm); 2163 } 2164 2165 // 2166 // Rest_Fallback 2167 // 2168 2169 bool DoRestFallback(JSContext* cx, BaselineFrame* frame, ICFallbackStub* stub, 2170 MutableHandleValue res) { 2171 unsigned numFormals = frame->numFormalArgs() - 1; 2172 unsigned numActuals = frame->numActualArgs(); 2173 unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0; 2174 Value* rest = frame->argv() + numFormals; 2175 2176 ArrayObject* obj = NewDenseCopiedArray(cx, numRest, rest); 2177 if (!obj) { 2178 return false; 2179 } 2180 res.setObject(*obj); 2181 return true; 2182 } 2183 2184 bool FallbackICCodeCompiler::emit_Rest() { 2185 EmitRestoreTailCallReg(masm); 2186 2187 masm.push(ICStubReg); 2188 pushStubPayload(masm, R0.scratchReg()); 2189 2190 using Fn = 2191 bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, MutableHandleValue); 2192 return tailCallVM<Fn, DoRestFallback>(masm); 2193 } 2194 2195 // 2196 // UnaryArith_Fallback 2197 // 2198 2199 bool DoUnaryArithFallback(JSContext* cx, BaselineFrame* frame, 2200 ICFallbackStub* stub, HandleValue val, 2201 MutableHandleValue res) { 2202 stub->incrementEnteredCount(); 2203 MaybeNotifyWarp(frame->outerScript(), stub); 2204 2205 jsbytecode* pc = StubOffsetToPc(stub, frame->script()); 2206 JSOp op = JSOp(*pc); 2207 FallbackICSpew(cx, stub, "UnaryArith(%s)", CodeName(op)); 2208 2209 switch (op) { 2210 case JSOp::BitNot: { 2211 res.set(val); 2212 if (!BitNot(cx, res, res)) { 2213 return false; 2214 } 2215 break; 2216 } 2217 case JSOp::Pos: { 2218 res.set(val); 2219 if (!ToNumber(cx, res)) { 2220 return false; 2221 } 2222 break; 2223 } 2224 case JSOp::Neg: { 2225 res.set(val); 2226 if (!NegOperation(cx, res, res)) { 2227 return false; 2228 } 2229 break; 2230 } 2231 case JSOp::Inc: { 2232 if (!IncOperation(cx, val, res)) { 2233 return false; 2234 } 2235 break; 2236 } 2237 case JSOp::Dec: { 2238 if (!DecOperation(cx, val, res)) { 2239 return false; 2240 } 2241 break; 2242 } 2243 case JSOp::ToNumeric: { 2244 res.set(val); 2245 if (!ToNumeric(cx, res)) { 2246 return false; 2247 } 2248 break; 2249 } 2250 default: 2251 MOZ_CRASH("Unexpected op"); 2252 } 2253 MOZ_ASSERT(res.isNumeric()); 2254 2255 TryAttachStub<UnaryArithIRGenerator>("UnaryArith", cx, frame, stub, op, val, 2256 res); 2257 return true; 2258 } 2259 2260 bool FallbackICCodeCompiler::emit_UnaryArith() { 2261 static_assert(R0 == JSReturnOperand); 2262 2263 // Restore the tail call register. 2264 EmitRestoreTailCallReg(masm); 2265 2266 // Ensure stack is fully synced for the expression decompiler. 2267 masm.pushValue(R0); 2268 2269 // Push arguments. 2270 masm.pushValue(R0); 2271 masm.push(ICStubReg); 2272 pushStubPayload(masm, R0.scratchReg()); 2273 2274 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 2275 MutableHandleValue); 2276 return tailCallVM<Fn, DoUnaryArithFallback>(masm); 2277 } 2278 2279 // 2280 // BinaryArith_Fallback 2281 // 2282 2283 bool DoBinaryArithFallback(JSContext* cx, BaselineFrame* frame, 2284 ICFallbackStub* stub, HandleValue lhs, 2285 HandleValue rhs, MutableHandleValue ret) { 2286 stub->incrementEnteredCount(); 2287 MaybeNotifyWarp(frame->outerScript(), stub); 2288 2289 jsbytecode* pc = StubOffsetToPc(stub, frame->script()); 2290 JSOp op = JSOp(*pc); 2291 FallbackICSpew( 2292 cx, stub, "CacheIRBinaryArith(%s,%d,%d)", CodeName(op), 2293 int(lhs.isDouble() ? JSVAL_TYPE_DOUBLE : lhs.extractNonDoubleType()), 2294 int(rhs.isDouble() ? JSVAL_TYPE_DOUBLE : rhs.extractNonDoubleType())); 2295 2296 // Don't pass lhs/rhs directly, we need the original values when 2297 // generating stubs. 2298 RootedValue lhsCopy(cx, lhs); 2299 RootedValue rhsCopy(cx, rhs); 2300 2301 // Perform the arith operation. 2302 switch (op) { 2303 case JSOp::Add: 2304 // Do an add. 2305 if (!AddValues(cx, &lhsCopy, &rhsCopy, ret)) { 2306 return false; 2307 } 2308 break; 2309 case JSOp::Sub: 2310 if (!SubValues(cx, &lhsCopy, &rhsCopy, ret)) { 2311 return false; 2312 } 2313 break; 2314 case JSOp::Mul: 2315 if (!MulValues(cx, &lhsCopy, &rhsCopy, ret)) { 2316 return false; 2317 } 2318 break; 2319 case JSOp::Div: 2320 if (!DivValues(cx, &lhsCopy, &rhsCopy, ret)) { 2321 return false; 2322 } 2323 break; 2324 case JSOp::Mod: 2325 if (!ModValues(cx, &lhsCopy, &rhsCopy, ret)) { 2326 return false; 2327 } 2328 break; 2329 case JSOp::Pow: 2330 if (!PowValues(cx, &lhsCopy, &rhsCopy, ret)) { 2331 return false; 2332 } 2333 break; 2334 case JSOp::BitOr: { 2335 if (!BitOr(cx, &lhsCopy, &rhsCopy, ret)) { 2336 return false; 2337 } 2338 break; 2339 } 2340 case JSOp::BitXor: { 2341 if (!BitXor(cx, &lhsCopy, &rhsCopy, ret)) { 2342 return false; 2343 } 2344 break; 2345 } 2346 case JSOp::BitAnd: { 2347 if (!BitAnd(cx, &lhsCopy, &rhsCopy, ret)) { 2348 return false; 2349 } 2350 break; 2351 } 2352 case JSOp::Lsh: { 2353 if (!BitLsh(cx, &lhsCopy, &rhsCopy, ret)) { 2354 return false; 2355 } 2356 break; 2357 } 2358 case JSOp::Rsh: { 2359 if (!BitRsh(cx, &lhsCopy, &rhsCopy, ret)) { 2360 return false; 2361 } 2362 break; 2363 } 2364 case JSOp::Ursh: { 2365 if (!UrshValues(cx, &lhsCopy, &rhsCopy, ret)) { 2366 return false; 2367 } 2368 break; 2369 } 2370 default: 2371 MOZ_CRASH("Unhandled baseline arith op"); 2372 } 2373 2374 TryAttachStub<BinaryArithIRGenerator>("BinaryArith", cx, frame, stub, op, lhs, 2375 rhs, ret); 2376 return true; 2377 } 2378 2379 bool FallbackICCodeCompiler::emit_BinaryArith() { 2380 static_assert(R0 == JSReturnOperand); 2381 2382 // Restore the tail call register. 2383 EmitRestoreTailCallReg(masm); 2384 2385 // Ensure stack is fully synced for the expression decompiler. 2386 masm.pushValue(R0); 2387 masm.pushValue(R1); 2388 2389 // Push arguments. 2390 masm.pushValue(R1); 2391 masm.pushValue(R0); 2392 masm.push(ICStubReg); 2393 pushStubPayload(masm, R0.scratchReg()); 2394 2395 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 2396 HandleValue, MutableHandleValue); 2397 return tailCallVM<Fn, DoBinaryArithFallback>(masm); 2398 } 2399 2400 // 2401 // Compare_Fallback 2402 // 2403 bool DoCompareFallback(JSContext* cx, BaselineFrame* frame, 2404 ICFallbackStub* stub, HandleValue lhs, HandleValue rhs, 2405 MutableHandleValue ret) { 2406 stub->incrementEnteredCount(); 2407 MaybeNotifyWarp(frame->outerScript(), stub); 2408 2409 jsbytecode* pc = StubOffsetToPc(stub, frame->script()); 2410 JSOp op = JSOp(*pc); 2411 2412 FallbackICSpew(cx, stub, "Compare(%s)", CodeName(op)); 2413 2414 // Don't pass lhs/rhs directly, we need the original values when 2415 // generating stubs. 2416 RootedValue lhsCopy(cx, lhs); 2417 RootedValue rhsCopy(cx, rhs); 2418 2419 // Perform the compare operation. 2420 bool out; 2421 switch (op) { 2422 case JSOp::Lt: 2423 if (!LessThan(cx, &lhsCopy, &rhsCopy, &out)) { 2424 return false; 2425 } 2426 break; 2427 case JSOp::Le: 2428 if (!LessThanOrEqual(cx, &lhsCopy, &rhsCopy, &out)) { 2429 return false; 2430 } 2431 break; 2432 case JSOp::Gt: 2433 if (!GreaterThan(cx, &lhsCopy, &rhsCopy, &out)) { 2434 return false; 2435 } 2436 break; 2437 case JSOp::Ge: 2438 if (!GreaterThanOrEqual(cx, &lhsCopy, &rhsCopy, &out)) { 2439 return false; 2440 } 2441 break; 2442 case JSOp::Eq: 2443 if (!js::LooselyEqual(cx, lhsCopy, rhsCopy, &out)) { 2444 return false; 2445 } 2446 break; 2447 case JSOp::Ne: 2448 if (!js::LooselyEqual(cx, lhsCopy, rhsCopy, &out)) { 2449 return false; 2450 } 2451 out = !out; 2452 break; 2453 case JSOp::StrictEq: 2454 if (!js::StrictlyEqual(cx, lhsCopy, rhsCopy, &out)) { 2455 return false; 2456 } 2457 break; 2458 case JSOp::StrictNe: 2459 if (!js::StrictlyEqual(cx, lhsCopy, rhsCopy, &out)) { 2460 return false; 2461 } 2462 out = !out; 2463 break; 2464 default: 2465 MOZ_ASSERT_UNREACHABLE("Unhandled baseline compare op"); 2466 return false; 2467 } 2468 2469 ret.setBoolean(out); 2470 2471 TryAttachStub<CompareIRGenerator>("Compare", cx, frame, stub, op, lhs, rhs); 2472 return true; 2473 } 2474 2475 bool FallbackICCodeCompiler::emit_Compare() { 2476 static_assert(R0 == JSReturnOperand); 2477 2478 // Restore the tail call register. 2479 EmitRestoreTailCallReg(masm); 2480 2481 // Ensure stack is fully synced for the expression decompiler. 2482 masm.pushValue(R0); 2483 masm.pushValue(R1); 2484 2485 // Push arguments. 2486 masm.pushValue(R1); 2487 masm.pushValue(R0); 2488 masm.push(ICStubReg); 2489 pushStubPayload(masm, R0.scratchReg()); 2490 2491 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 2492 HandleValue, MutableHandleValue); 2493 return tailCallVM<Fn, DoCompareFallback>(masm); 2494 } 2495 2496 // 2497 // NewArray_Fallback 2498 // 2499 2500 bool DoNewArrayFallback(JSContext* cx, BaselineFrame* frame, 2501 ICFallbackStub* stub, MutableHandleValue res) { 2502 stub->incrementEnteredCount(); 2503 MaybeNotifyWarp(frame->outerScript(), stub); 2504 FallbackICSpew(cx, stub, "NewArray"); 2505 2506 jsbytecode* pc = StubOffsetToPc(stub, frame->script()); 2507 2508 uint32_t length = GET_UINT32(pc); 2509 MOZ_ASSERT(length <= INT32_MAX, 2510 "the bytecode emitter must fail to compile code that would " 2511 "produce a length exceeding int32_t range"); 2512 2513 Rooted<ArrayObject*> array(cx, NewArrayOperation(cx, length)); 2514 if (!array) { 2515 return false; 2516 } 2517 2518 TryAttachStub<NewArrayIRGenerator>("NewArray", cx, frame, stub, JSOp(*pc), 2519 array, frame); 2520 2521 res.setObject(*array); 2522 return true; 2523 } 2524 2525 bool FallbackICCodeCompiler::emit_NewArray() { 2526 EmitRestoreTailCallReg(masm); 2527 2528 masm.push(ICStubReg); // stub. 2529 masm.pushBaselineFramePtr(FramePointer, R0.scratchReg()); 2530 2531 using Fn = 2532 bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, MutableHandleValue); 2533 return tailCallVM<Fn, DoNewArrayFallback>(masm); 2534 } 2535 2536 // 2537 // NewObject_Fallback 2538 // 2539 bool DoNewObjectFallback(JSContext* cx, BaselineFrame* frame, 2540 ICFallbackStub* stub, MutableHandleValue res) { 2541 stub->incrementEnteredCount(); 2542 MaybeNotifyWarp(frame->outerScript(), stub); 2543 FallbackICSpew(cx, stub, "NewObject"); 2544 2545 RootedScript script(cx, frame->script()); 2546 jsbytecode* pc = StubOffsetToPc(stub, script); 2547 2548 RootedObject obj(cx, NewObjectOperation(cx, script, pc)); 2549 if (!obj) { 2550 return false; 2551 } 2552 2553 TryAttachStub<NewObjectIRGenerator>("NewObject", cx, frame, stub, JSOp(*pc), 2554 obj, frame); 2555 2556 res.setObject(*obj); 2557 return true; 2558 } 2559 2560 bool FallbackICCodeCompiler::emit_NewObject() { 2561 EmitRestoreTailCallReg(masm); 2562 2563 masm.push(ICStubReg); // stub. 2564 pushStubPayload(masm, R0.scratchReg()); 2565 2566 using Fn = 2567 bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, MutableHandleValue); 2568 return tailCallVM<Fn, DoNewObjectFallback>(masm); 2569 } 2570 2571 // 2572 // Lambda_Fallback 2573 // 2574 2575 bool DoLambdaFallback(JSContext* cx, BaselineFrame* frame, ICFallbackStub* stub, 2576 MutableHandleValue res) { 2577 stub->incrementEnteredCount(); 2578 MaybeNotifyWarp(frame->outerScript(), stub); 2579 FallbackICSpew(cx, stub, "Lambda"); 2580 2581 jsbytecode* pc = StubOffsetToPc(stub, frame->script()); 2582 2583 Rooted<JSFunction*> fun(cx, frame->script()->getFunction(pc)); 2584 Rooted<JSObject*> env(cx, frame->environmentChain()); 2585 2586 TryAttachStub<LambdaIRGenerator>("Lambda", cx, frame, stub, JSOp(*pc), fun, 2587 frame); 2588 2589 JSObject* clone = Lambda(cx, fun, env); 2590 if (!clone) { 2591 return false; 2592 } 2593 2594 res.setObject(*clone); 2595 return true; 2596 } 2597 2598 bool FallbackICCodeCompiler::emit_Lambda() { 2599 EmitRestoreTailCallReg(masm); 2600 2601 masm.push(ICStubReg); 2602 masm.pushBaselineFramePtr(FramePointer, R0.scratchReg()); 2603 2604 using Fn = 2605 bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, MutableHandleValue); 2606 return tailCallVM<Fn, DoLambdaFallback>(masm); 2607 } 2608 2609 // 2610 // CloseIter_Fallback 2611 // 2612 2613 bool DoCloseIterFallback(JSContext* cx, BaselineFrame* frame, 2614 ICFallbackStub* stub, HandleObject iter) { 2615 stub->incrementEnteredCount(); 2616 MaybeNotifyWarp(frame->outerScript(), stub); 2617 FallbackICSpew(cx, stub, "CloseIter"); 2618 2619 jsbytecode* pc = StubOffsetToPc(stub, frame->script()); 2620 CompletionKind kind = CompletionKind(GET_UINT8(pc)); 2621 2622 TryAttachStub<CloseIterIRGenerator>("CloseIter", cx, frame, stub, iter, kind); 2623 2624 return CloseIterOperation(cx, iter, kind); 2625 } 2626 2627 bool FallbackICCodeCompiler::emit_CloseIter() { 2628 EmitRestoreTailCallReg(masm); 2629 2630 masm.push(R0.scratchReg()); 2631 masm.push(ICStubReg); 2632 pushStubPayload(masm, R0.scratchReg()); 2633 2634 using Fn = 2635 bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleObject); 2636 return tailCallVM<Fn, DoCloseIterFallback>(masm); 2637 } 2638 2639 // 2640 // OptimizeGetIterator_Fallback 2641 // 2642 2643 bool DoOptimizeGetIteratorFallback(JSContext* cx, BaselineFrame* frame, 2644 ICFallbackStub* stub, HandleValue value, 2645 MutableHandleValue res) { 2646 stub->incrementEnteredCount(); 2647 MaybeNotifyWarp(frame->outerScript(), stub); 2648 FallbackICSpew(cx, stub, "OptimizeGetIterator"); 2649 2650 TryAttachStub<OptimizeGetIteratorIRGenerator>("OptimizeGetIterator", cx, 2651 frame, stub, value); 2652 2653 bool result = OptimizeGetIterator(value, cx); 2654 res.setBoolean(result); 2655 return true; 2656 } 2657 2658 bool FallbackICCodeCompiler::emit_OptimizeGetIterator() { 2659 EmitRestoreTailCallReg(masm); 2660 2661 masm.pushValue(R0); 2662 masm.push(ICStubReg); 2663 pushStubPayload(masm, R0.scratchReg()); 2664 2665 using Fn = bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, HandleValue, 2666 MutableHandleValue); 2667 return tailCallVM<Fn, DoOptimizeGetIteratorFallback>(masm); 2668 } 2669 2670 // 2671 // GetImport_Fallback 2672 // 2673 2674 bool DoGetImportFallback(JSContext* cx, BaselineFrame* frame, 2675 ICFallbackStub* stub, MutableHandleValue res) { 2676 stub->incrementEnteredCount(); 2677 MaybeNotifyWarp(frame->outerScript(), stub); 2678 FallbackICSpew(cx, stub, "GetImport"); 2679 2680 RootedObject envChain(cx, frame->environmentChain()); 2681 RootedScript script(cx, frame->script()); 2682 jsbytecode* pc = StubOffsetToPc(stub, script); 2683 2684 TryAttachStub<GetImportIRGenerator>("GetImport", cx, frame, stub); 2685 2686 return GetImportOperation(cx, envChain, script, pc, res); 2687 } 2688 2689 bool FallbackICCodeCompiler::emit_GetImport() { 2690 EmitRestoreTailCallReg(masm); 2691 2692 masm.push(ICStubReg); 2693 pushStubPayload(masm, R0.scratchReg()); 2694 2695 using Fn = 2696 bool (*)(JSContext*, BaselineFrame*, ICFallbackStub*, MutableHandleValue); 2697 return tailCallVM<Fn, DoGetImportFallback>(masm); 2698 } 2699 2700 bool JitRuntime::generateBaselineICFallbackCode(JSContext* cx) { 2701 TempAllocator temp(&cx->tempLifoAlloc()); 2702 StackMacroAssembler masm(cx, temp); 2703 PerfSpewerRangeRecorder rangeRecorder(masm); 2704 AutoCreatedBy acb(masm, "JitRuntime::generateBaselineICFallbackCode"); 2705 2706 BaselineICFallbackCode& fallbackCode = baselineICFallbackCode_.ref(); 2707 FallbackICCodeCompiler compiler(cx, fallbackCode, masm); 2708 2709 JitSpew(JitSpew_Codegen, "# Emitting Baseline IC fallback code"); 2710 2711 #define EMIT_CODE(kind) \ 2712 { \ 2713 AutoCreatedBy acb(masm, "kind=" #kind); \ 2714 uint32_t offset = startTrampolineCode(masm); \ 2715 InitMacroAssemblerForICStub(masm); \ 2716 if (!compiler.emit_##kind()) { \ 2717 return false; \ 2718 } \ 2719 fallbackCode.initOffset(BaselineICFallbackKind::kind, offset); \ 2720 rangeRecorder.recordOffset("BaselineICFallback: " #kind); \ 2721 } 2722 IC_BASELINE_FALLBACK_CODE_KIND_LIST(EMIT_CODE) 2723 #undef EMIT_CODE 2724 2725 Linker linker(masm); 2726 JitCode* code = linker.newCode(cx, CodeKind::Other); 2727 if (!code) { 2728 return false; 2729 } 2730 2731 rangeRecorder.collectRangesForJitCode(code); 2732 2733 #ifdef MOZ_VTUNE 2734 vtune::MarkStub(code, "BaselineICFallback"); 2735 #endif 2736 2737 fallbackCode.initCode(code); 2738 return true; 2739 } 2740 2741 } // namespace jit 2742 } // namespace js