tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Ion.cpp (86316B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "jit/Ion.h"
      8 
      9 #include "mozilla/CheckedInt.h"
     10 #include "mozilla/DebugOnly.h"
     11 #include "mozilla/IntegerPrintfMacros.h"
     12 #include "mozilla/MemoryReporting.h"
     13 
     14 #include "gc/GCContext.h"
     15 #include "gc/PublicIterators.h"
     16 #include "jit/AliasAnalysis.h"
     17 #include "jit/AlignmentMaskAnalysis.h"
     18 #include "jit/AutoWritableJitCode.h"
     19 #include "jit/BacktrackingAllocator.h"
     20 #include "jit/BaselineFrame.h"
     21 #include "jit/BaselineJIT.h"
     22 #include "jit/BranchHinting.h"
     23 #include "jit/CodeGenerator.h"
     24 #include "jit/CompileInfo.h"
     25 #include "jit/DominatorTree.h"
     26 #include "jit/EdgeCaseAnalysis.h"
     27 #include "jit/EffectiveAddressAnalysis.h"
     28 #include "jit/ExecutableAllocator.h"
     29 #include "jit/FoldLinearArithConstants.h"
     30 #include "jit/InlineScriptTree.h"
     31 #include "jit/InstructionReordering.h"
     32 #include "jit/Invalidation.h"
     33 #include "jit/InvalidationScriptSet.h"
     34 #include "jit/IonAnalysis.h"
     35 #include "jit/IonCompileTask.h"
     36 #include "jit/IonIC.h"
     37 #include "jit/IonOptimizationLevels.h"
     38 #include "jit/IonScript.h"
     39 #include "jit/JitcodeMap.h"
     40 #include "jit/JitFrames.h"
     41 #include "jit/JitRuntime.h"
     42 #include "jit/JitSpewer.h"
     43 #include "jit/JitZone.h"
     44 #include "jit/LICM.h"
     45 #include "jit/Linker.h"
     46 #include "jit/LIR.h"
     47 #include "jit/Lowering.h"
     48 #include "jit/PerfSpewer.h"
     49 #include "jit/RangeAnalysis.h"
     50 #include "jit/ScalarReplacement.h"
     51 #include "jit/ScriptFromCalleeToken.h"
     52 #include "jit/SimpleAllocator.h"
     53 #include "jit/Sink.h"
     54 #include "jit/UnrollLoops.h"
     55 #include "jit/ValueNumbering.h"
     56 #include "jit/WarpBuilder.h"
     57 #include "jit/WarpOracle.h"
     58 #include "jit/WasmBCE.h"
     59 #include "js/Printf.h"
     60 #include "js/UniquePtr.h"
     61 #include "util/Memory.h"
     62 #include "util/WindowsWrapper.h"
     63 #include "vm/HelperThreads.h"
     64 #include "vm/Realm.h"
     65 #ifdef MOZ_VTUNE
     66 #  include "vtune/VTuneWrapper.h"
     67 #endif
     68 
     69 #include "gc/GC-inl.h"
     70 #include "gc/StableCellHasher-inl.h"
     71 #include "jit/InlineScriptTree-inl.h"
     72 #include "jit/MacroAssembler-inl.h"
     73 #include "jit/SafepointIndex-inl.h"
     74 #include "vm/GeckoProfiler-inl.h"
     75 #include "vm/JSContext-inl.h"
     76 #include "vm/JSScript-inl.h"
     77 #include "vm/Realm-inl.h"
     78 
     79 #if defined(ANDROID)
     80 #  include <sys/system_properties.h>
     81 #endif
     82 
     83 using mozilla::CheckedInt;
     84 using mozilla::DebugOnly;
     85 
     86 using namespace js;
     87 using namespace js::jit;
     88 
     89 JitRuntime::~JitRuntime() {
     90  MOZ_ASSERT(numFinishedOffThreadTasks_ == 0);
     91  MOZ_ASSERT(ionLazyLinkListSize_ == 0);
     92  MOZ_ASSERT(ionLazyLinkList_.ref().isEmpty());
     93 
     94  MOZ_ASSERT(ionFreeTaskBatch_.ref().empty());
     95 
     96  // By this point, the jitcode global table should be empty.
     97  MOZ_ASSERT_IF(jitcodeGlobalTable_, jitcodeGlobalTable_->empty());
     98  js_delete(jitcodeGlobalTable_.ref());
     99 
    100  // interpreterEntryMap should be cleared out during finishRoots()
    101  MOZ_ASSERT_IF(interpreterEntryMap_, interpreterEntryMap_->empty());
    102  js_delete(interpreterEntryMap_.ref());
    103 
    104  js_delete(jitHintsMap_.ref());
    105 }
    106 
    107 uint32_t JitRuntime::startTrampolineCode(MacroAssembler& masm) {
    108  AutoCreatedBy acb(masm, "startTrampolineCode");
    109 
    110  masm.assumeUnreachable("Shouldn't get here");
    111  masm.flushBuffer();
    112  masm.haltingAlign(CodeAlignment);
    113  masm.setFramePushed(0);
    114  return masm.currentOffset();
    115 }
    116 
    117 bool JitRuntime::initialize(JSContext* cx) {
    118  MOZ_ASSERT(CurrentThreadCanAccessRuntime(cx->runtime()));
    119 
    120  AutoAllocInAtomsZone az(cx);
    121  JitContext jctx(cx);
    122 
    123  if (!generateTrampolines(cx)) {
    124    return false;
    125  }
    126 
    127  if (!generateBaselineICFallbackCode(cx)) {
    128    return false;
    129  }
    130 
    131  jitcodeGlobalTable_ = cx->new_<JitcodeGlobalTable>();
    132  if (!jitcodeGlobalTable_) {
    133    return false;
    134  }
    135 
    136  if (!JitOptions.disableJitHints) {
    137    jitHintsMap_ = cx->new_<JitHintsMap>();
    138    if (!jitHintsMap_) {
    139      return false;
    140    }
    141  }
    142 
    143  if (JitOptions.emitInterpreterEntryTrampoline) {
    144    interpreterEntryMap_ = cx->new_<EntryTrampolineMap>();
    145    if (!interpreterEntryMap_) {
    146      return false;
    147    }
    148  }
    149 
    150  if (!GenerateBaselineInterpreter(cx, baselineInterpreter_)) {
    151    return false;
    152  }
    153 
    154  // Initialize the jitCodeRaw of the Runtime's canonical SelfHostedLazyScript
    155  // to point to the interpreter trampoline.
    156  cx->runtime()->selfHostedLazyScript.ref().jitCodeRaw_ =
    157      interpreterStub().value;
    158 
    159  return true;
    160 }
    161 
    162 bool JitRuntime::generateTrampolines(JSContext* cx) {
    163  TempAllocator temp(&cx->tempLifoAlloc());
    164  StackMacroAssembler masm(cx, temp);
    165  PerfSpewerRangeRecorder rangeRecorder(masm);
    166 
    167  Label bailoutTail;
    168  JitSpew(JitSpew_Codegen, "# Emitting bailout tail stub");
    169  generateBailoutTailStub(masm, &bailoutTail);
    170 
    171  JitSpew(JitSpew_Codegen, "# Emitting bailout handler");
    172  generateBailoutHandler(masm, &bailoutTail);
    173  rangeRecorder.recordOffset("Trampoline: Bailout");
    174 
    175  JitSpew(JitSpew_Codegen, "# Emitting invalidator");
    176  generateInvalidator(masm, &bailoutTail);
    177  rangeRecorder.recordOffset("Trampoline: Invalidator");
    178 
    179  JitSpew(JitSpew_Codegen, "# Emitting EnterJIT sequence");
    180  generateEnterJIT(cx, masm);
    181  rangeRecorder.recordOffset("Trampoline: EnterJIT");
    182 
    183  JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Value");
    184  valuePreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::Value);
    185  rangeRecorder.recordOffset("Trampoline: PreBarrier Value");
    186 
    187  JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for String");
    188  stringPreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::String);
    189  rangeRecorder.recordOffset("Trampoline: PreBarrier String");
    190 
    191  JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Object");
    192  objectPreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::Object);
    193  rangeRecorder.recordOffset("Trampoline: PreBarrier Object");
    194 
    195  JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Shape");
    196  shapePreBarrierOffset_ = generatePreBarrier(cx, masm, MIRType::Shape);
    197  rangeRecorder.recordOffset("Trampoline: PreBarrier Shape");
    198 
    199  JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for WasmAnyRef");
    200  wasmAnyRefPreBarrierOffset_ =
    201      generatePreBarrier(cx, masm, MIRType::WasmAnyRef);
    202  rangeRecorder.recordOffset("Trampoline: PreBarrier WasmAnyRef");
    203 
    204  JitSpew(JitSpew_Codegen, "# Emitting lazy link stub");
    205  generateLazyLinkStub(masm);
    206  rangeRecorder.recordOffset("Trampoline: LazyLinkStub");
    207 
    208  JitSpew(JitSpew_Codegen, "# Emitting interpreter stub");
    209  generateInterpreterStub(masm);
    210  rangeRecorder.recordOffset("Trampoline: Interpreter");
    211 
    212  JitSpew(JitSpew_Codegen, "# Emitting double-to-int32-value stub");
    213  generateDoubleToInt32ValueStub(masm);
    214  rangeRecorder.recordOffset("Trampoline: DoubleToInt32ValueStub");
    215 
    216  JitSpew(JitSpew_Codegen, "# Emitting VM function wrappers");
    217  if (!generateVMWrappers(cx, masm, rangeRecorder)) {
    218    return false;
    219  }
    220 
    221  JitSpew(JitSpew_Codegen, "# Emitting profiler exit frame tail stub");
    222  Label profilerExitTail;
    223  generateProfilerExitFrameTailStub(masm, &profilerExitTail);
    224  rangeRecorder.recordOffset("Trampoline: ProfilerExitFrameTailStub");
    225 
    226  JitSpew(JitSpew_Codegen, "# Emitting exception tail stub");
    227  generateExceptionTailStub(masm, &profilerExitTail, &bailoutTail);
    228  rangeRecorder.recordOffset("Trampoline: ExceptionTailStub");
    229 
    230  JitSpew(JitSpew_Codegen, "# Emitting Ion generic call stub");
    231  generateIonGenericCallStub(masm, IonGenericCallKind::Call);
    232  rangeRecorder.recordOffset("Trampoline: IonGenericCall");
    233 
    234  JitSpew(JitSpew_Codegen, "# Emitting Ion generic construct stub");
    235  generateIonGenericCallStub(masm, IonGenericCallKind::Construct);
    236  rangeRecorder.recordOffset("Trampoline: IonGenericConstruct");
    237 
    238  JitSpew(JitSpew_Codegen, "# Emitting trampoline natives");
    239  TrampolineNativeJitEntryOffsets nativeOffsets;
    240  generateTrampolineNatives(masm, nativeOffsets, rangeRecorder);
    241 
    242  Linker linker(masm);
    243  trampolineCode_ = linker.newCode(cx, CodeKind::Other);
    244  if (!trampolineCode_) {
    245    return false;
    246  }
    247 
    248  rangeRecorder.collectRangesForJitCode(trampolineCode_);
    249 #ifdef MOZ_VTUNE
    250  vtune::MarkStub(trampolineCode_, "Trampolines");
    251 #endif
    252 
    253  // Initialize TrampolineNative JitEntry array.
    254  for (size_t i = 0; i < size_t(TrampolineNative::Count); i++) {
    255    TrampolineNative native = TrampolineNative(i);
    256    uint32_t offset = nativeOffsets[native];
    257    MOZ_ASSERT(offset > 0 && offset < trampolineCode_->instructionsSize());
    258    trampolineNativeJitEntries_[native] = trampolineCode_->raw() + offset;
    259  }
    260 
    261  return true;
    262 }
    263 
    264 bool JitRuntime::ensureDebugTrapHandler(JSContext* cx,
    265                                        DebugTrapHandlerKind kind) {
    266  if (debugTrapHandlers_[kind]) {
    267    return true;
    268  }
    269 
    270  // JitRuntime code stubs are shared across compartments and have to
    271  // be allocated in the atoms zone.
    272  mozilla::Maybe<AutoAllocInAtomsZone> az;
    273  if (!cx->zone()->isAtomsZone()) {
    274    az.emplace(cx);
    275  }
    276  debugTrapHandlers_[kind] = generateDebugTrapHandler(cx, kind);
    277  return debugTrapHandlers_[kind];
    278 }
    279 
    280 JitRuntime::IonCompileTaskList& JitRuntime::ionLazyLinkList(JSRuntime* rt) {
    281  MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt),
    282             "Should only be mutated by the main thread.");
    283  return ionLazyLinkList_.ref();
    284 }
    285 
    286 void JitRuntime::ionLazyLinkListRemove(JSRuntime* rt,
    287                                       jit::IonCompileTask* task) {
    288  MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt),
    289             "Should only be mutated by the main thread.");
    290  MOZ_ASSERT(rt == task->script()->runtimeFromMainThread());
    291  MOZ_ASSERT(ionLazyLinkListSize_ > 0);
    292 
    293  task->removeFrom(ionLazyLinkList(rt));
    294  ionLazyLinkListSize_--;
    295 
    296  MOZ_ASSERT(ionLazyLinkList(rt).isEmpty() == (ionLazyLinkListSize_ == 0));
    297 }
    298 
    299 void JitRuntime::ionLazyLinkListAdd(JSRuntime* rt, jit::IonCompileTask* task) {
    300  MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt),
    301             "Should only be mutated by the main thread.");
    302  MOZ_ASSERT(rt == task->script()->runtimeFromMainThread());
    303  ionLazyLinkList(rt).insertFront(task);
    304  ionLazyLinkListSize_++;
    305 }
    306 
    307 uint8_t* JitRuntime::allocateIonOsrTempData(size_t size) {
    308  MOZ_ASSERT(size > 0);
    309 
    310  uint8_t* prevBuffer = ionOsrTempData_.ref().get();
    311  size_t prevSize = ionOsrTempDataSize_.ref();
    312  MOZ_ASSERT((prevSize > 0) == !!prevBuffer);
    313 
    314  // Reuse the previous buffer if possible.
    315  if (prevSize >= size) {
    316    return prevBuffer;
    317  }
    318 
    319  // Allocate or resize the buffer.
    320  uint8_t* buffer = js_pod_realloc<uint8_t>(prevBuffer, prevSize, size);
    321  if (!buffer) {
    322    // ionOsrTempData_ is still valid.
    323    return nullptr;
    324  }
    325  // ionOsrTempData_ is no longer valid.
    326  (void)ionOsrTempData_.ref().release();
    327  ionOsrTempData_.ref().reset(buffer);
    328  ionOsrTempDataSize_ = size;
    329  return buffer;
    330 }
    331 
    332 void JitRuntime::freeIonOsrTempData() {
    333  ionOsrTempData_.ref().reset();
    334  ionOsrTempDataSize_ = 0;
    335 }
    336 
    337 static bool LinkCodeGen(JSContext* cx, CodeGenerator* codegen,
    338                        HandleScript script) {
    339  if (!codegen->link(cx)) {
    340    return false;
    341  }
    342 
    343  // Record Ion compile time in glean.
    344  if (mozilla::TimeDuration compileTime = codegen->getCompilationTime()) {
    345    cx->metrics().ION_COMPILE_TIME(compileTime);
    346  }
    347 
    348  return true;
    349 }
    350 
    351 static bool LinkBackgroundCodeGen(JSContext* cx, IonCompileTask* task) {
    352  CodeGenerator* codegen = task->backgroundCodegen();
    353  if (!codegen) {
    354    return false;
    355  }
    356 
    357  JitContext jctx(cx);
    358  RootedScript script(cx, task->script());
    359  return LinkCodeGen(cx, codegen, script);
    360 }
    361 
    362 void jit::LinkIonScript(JSContext* cx, HandleScript calleeScript) {
    363  // Get the pending IonCompileTask from the script.
    364  MOZ_ASSERT(calleeScript->hasBaselineScript());
    365  IonCompileTask* task =
    366      calleeScript->baselineScript()->pendingIonCompileTask();
    367  calleeScript->baselineScript()->removePendingIonCompileTask(cx->runtime(),
    368                                                              calleeScript);
    369 
    370  // Remove from pending.
    371  cx->runtime()->jitRuntime()->ionLazyLinkListRemove(cx->runtime(), task);
    372 
    373  {
    374    gc::AutoSuppressGC suppressGC(cx);
    375    if (!LinkBackgroundCodeGen(cx, task)) {
    376      // Silently ignore OOM during code generation. The assembly code
    377      // doesn't have code to handle it after linking happened. So it's
    378      // not OK to throw a catchable exception from there.
    379      cx->clearPendingException();
    380    }
    381  }
    382 
    383  AutoStartIonFreeTask freeTask(cx->runtime()->jitRuntime());
    384  FinishOffThreadTask(cx->runtime(), freeTask, task);
    385 }
    386 
    387 uint8_t* jit::LazyLinkTopActivation(JSContext* cx,
    388                                    LazyLinkExitFrameLayout* frame) {
    389  RootedScript calleeScript(
    390      cx, ScriptFromCalleeToken(frame->jsFrame()->calleeToken()));
    391 
    392  LinkIonScript(cx, calleeScript);
    393 
    394  MOZ_ASSERT(calleeScript->hasBaselineScript());
    395  MOZ_ASSERT(calleeScript->jitCodeRaw());
    396 
    397  return calleeScript->jitCodeRaw();
    398 }
    399 
    400 /* static */
    401 void JitRuntime::TraceAtomZoneRoots(JSTracer* trc) {
    402  // Shared stubs are allocated in the atoms zone, so do not iterate
    403  // them after the atoms heap after it has been "finished."
    404  if (trc->runtime()->atomsAreFinished()) {
    405    return;
    406  }
    407 
    408  Zone* zone = trc->runtime()->atomsZone();
    409  for (auto i = zone->cellIterUnsafe<JitCode>(); !i.done(); i.next()) {
    410    JitCode* code = i;
    411    TraceRoot(trc, &code, "wrapper");
    412  }
    413 }
    414 
    415 /* static */
    416 bool JitRuntime::MarkJitcodeGlobalTableIteratively(GCMarker* marker) {
    417  if (marker->runtime()->hasJitRuntime() &&
    418      marker->runtime()->jitRuntime()->hasJitcodeGlobalTable()) {
    419    return marker->runtime()
    420        ->jitRuntime()
    421        ->getJitcodeGlobalTable()
    422        ->markIteratively(marker);
    423  }
    424  return false;
    425 }
    426 
    427 /* static */
    428 void JitRuntime::TraceWeakJitcodeGlobalTable(JSRuntime* rt, JSTracer* trc) {
    429  if (rt->hasJitRuntime() && rt->jitRuntime()->hasJitcodeGlobalTable()) {
    430    rt->jitRuntime()->getJitcodeGlobalTable()->traceWeak(rt, trc);
    431  }
    432 }
    433 
    434 bool JitZone::addInlinedCompilation(const IonScriptKey& ionScriptKey,
    435                                    JSScript* inlined) {
    436  MOZ_ASSERT(inlined != ionScriptKey.script());
    437 
    438  auto p = inlinedCompilations_.lookupForAdd(inlined);
    439  if (p) {
    440    auto& compilations = p->value();
    441    if (!compilations.empty() && compilations.back() == ionScriptKey) {
    442      return true;
    443    }
    444    return compilations.append(ionScriptKey);
    445  }
    446 
    447  IonScriptKeyVector compilations;
    448  if (!compilations.append(ionScriptKey)) {
    449    return false;
    450  }
    451  return inlinedCompilations_.add(p, inlined, std::move(compilations));
    452 }
    453 
    454 void jit::AddPendingInvalidation(IonScriptKeyVector& invalid,
    455                                 JSScript* script) {
    456  MOZ_ASSERT(script);
    457 
    458  CancelOffThreadIonCompile(script);
    459 
    460  // Let the script warm up again before attempting another compile.
    461  script->resetWarmUpCounterToDelayIonCompilation();
    462 
    463  JitScript* jitScript = script->maybeJitScript();
    464  if (!jitScript) {
    465    return;
    466  }
    467 
    468  auto addPendingInvalidation = [&invalid](const IonScriptKey& ionScriptKey) {
    469    AutoEnterOOMUnsafeRegion oomUnsafe;
    470    if (!invalid.append(ionScriptKey)) {
    471      // BUG 1536159: For diagnostics, compute the size of the failed
    472      // allocation. This presumes the vector growth strategy is to double. This
    473      // is only used for crash reporting so not a problem if we get it wrong.
    474      size_t allocSize = 2 * sizeof(IonScriptKey) * invalid.capacity();
    475      oomUnsafe.crash(allocSize, "Could not update IonScriptKeyVector");
    476    }
    477  };
    478 
    479  // Trigger invalidation of the IonScript.
    480  if (jitScript->hasIonScript()) {
    481    IonScriptKey ionScriptKey(script, jitScript->ionScript()->compilationId());
    482    addPendingInvalidation(ionScriptKey);
    483  }
    484 
    485  // Trigger invalidation of any callers inlining this script.
    486  auto* inlinedCompilations =
    487      script->zone()->jitZone()->maybeInlinedCompilations(script);
    488  if (inlinedCompilations) {
    489    for (const auto& ionScriptKey : *inlinedCompilations) {
    490      addPendingInvalidation(ionScriptKey);
    491    }
    492    script->zone()->jitZone()->removeInlinedCompilations(script);
    493  }
    494 }
    495 
    496 IonScript* IonScriptKey::maybeIonScriptToInvalidate() const {
    497  // This must be called either on the main thread or when sweeping WeakCaches
    498  // off-thread.
    499  MOZ_ASSERT(CurrentThreadIsMainThread() || CurrentThreadIsGCSweeping());
    500 
    501 #ifdef DEBUG
    502  // Make sure this is not called under CodeGenerator::link (before the
    503  // corresponding IonScript is created).
    504  auto* jitZone = script_->zoneFromAnyThread()->jitZone();
    505  MOZ_ASSERT_IF(jitZone->currentCompilationId(),
    506                jitZone->currentCompilationId().ref() != id_);
    507 #endif
    508 
    509  if (!script_->hasIonScript() ||
    510      script_->ionScript()->compilationId() != id_) {
    511    return nullptr;
    512  }
    513 
    514  return script_->ionScript();
    515 }
    516 
    517 bool IonScriptKey::traceWeak(JSTracer* trc) {
    518  // Sweep the IonScriptKey if either the script is dead or the IonScript has
    519  // been invalidated.
    520 
    521  if (!TraceManuallyBarrieredWeakEdge(trc, &script_, "IonScriptKey::script")) {
    522    return false;
    523  }
    524 
    525  return maybeIonScriptToInvalidate() != nullptr;
    526 }
    527 
    528 void JitZone::traceWeak(JSTracer* trc, Zone* zone) {
    529  MOZ_ASSERT(this == zone->jitZone());
    530 
    531  for (WeakHeapPtr<JitCode*>& stub : stubs_) {
    532    TraceWeakEdge(trc, &stub, "JitZone::stubs_");
    533  }
    534 
    535  baselineCacheIRStubCodes_.traceWeak(trc);
    536  inlinedCompilations_.traceWeak(trc);
    537 
    538  TraceWeakEdge(trc, &lastStubFoldingBailoutInner_,
    539                "JitZone::lastStubFoldingBailoutInner_");
    540  TraceWeakEdge(trc, &lastStubFoldingBailoutOuter_,
    541                "JitZone::lastStubFoldingBailoutOuter_");
    542 }
    543 
    544 void JitZone::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
    545                                     JS::CodeSizes* code, size_t* jitZone,
    546                                     size_t* cacheIRStubs) const {
    547  *jitZone += mallocSizeOf(this);
    548  *jitZone +=
    549      baselineCacheIRStubCodes_.shallowSizeOfExcludingThis(mallocSizeOf);
    550  *jitZone += ionCacheIRStubInfoSet_.shallowSizeOfExcludingThis(mallocSizeOf);
    551 
    552  execAlloc().addSizeOfCode(code);
    553 
    554  *cacheIRStubs += stubSpace_.sizeOfExcludingThis(mallocSizeOf);
    555 }
    556 
    557 void JitCodeHeader::init(JitCode* jitCode) {
    558  // As long as JitCode isn't moveable, we can avoid tracing this and
    559  // mutating executable data.
    560  MOZ_ASSERT(!gc::IsMovableKind(gc::AllocKind::JITCODE));
    561  jitCode_ = jitCode;
    562 }
    563 
    564 template <AllowGC allowGC>
    565 JitCode* JitCode::New(JSContext* cx, uint8_t* code, uint32_t totalSize,
    566                      uint32_t headerSize, ExecutablePool* pool,
    567                      CodeKind kind) {
    568  uint32_t bufferSize = totalSize - headerSize;
    569  JitCode* codeObj =
    570      cx->newCell<JitCode, allowGC>(code, bufferSize, headerSize, pool, kind);
    571  if (!codeObj) {
    572    // The caller already allocated `totalSize` bytes of executable memory.
    573    pool->release(totalSize, kind);
    574    return nullptr;
    575  }
    576 
    577  cx->zone()->incJitMemory(totalSize);
    578 
    579  return codeObj;
    580 }
    581 
    582 template JitCode* JitCode::New<CanGC>(JSContext* cx, uint8_t* code,
    583                                      uint32_t bufferSize, uint32_t headerSize,
    584                                      ExecutablePool* pool, CodeKind kind);
    585 
    586 template JitCode* JitCode::New<NoGC>(JSContext* cx, uint8_t* code,
    587                                     uint32_t bufferSize, uint32_t headerSize,
    588                                     ExecutablePool* pool, CodeKind kind);
    589 
    590 void JitCode::copyFrom(MacroAssembler& masm) {
    591  // Store the JitCode pointer in the JitCodeHeader so we can recover the
    592  // gcthing from relocation tables.
    593  JitCodeHeader::FromExecutable(raw())->init(this);
    594 
    595  insnSize_ = masm.instructionsSize();
    596  masm.executableCopy(raw());
    597 
    598  jumpRelocTableBytes_ = masm.jumpRelocationTableBytes();
    599  masm.copyJumpRelocationTable(raw() + jumpRelocTableOffset());
    600 
    601  dataRelocTableBytes_ = masm.dataRelocationTableBytes();
    602  masm.copyDataRelocationTable(raw() + dataRelocTableOffset());
    603 
    604  masm.processCodeLabels(raw());
    605 }
    606 
    607 void JitCode::traceChildren(JSTracer* trc) {
    608  // Note that we cannot mark invalidated scripts, since we've basically
    609  // corrupted the code stream by injecting bailouts.
    610  if (invalidated()) {
    611    return;
    612  }
    613 
    614  if (jumpRelocTableBytes_) {
    615    uint8_t* start = raw() + jumpRelocTableOffset();
    616    CompactBufferReader reader(start, start + jumpRelocTableBytes_);
    617    MacroAssembler::TraceJumpRelocations(trc, this, reader);
    618  }
    619  if (dataRelocTableBytes_) {
    620    uint8_t* start = raw() + dataRelocTableOffset();
    621    CompactBufferReader reader(start, start + dataRelocTableBytes_);
    622    MacroAssembler::TraceDataRelocations(trc, this, reader);
    623  }
    624 }
    625 
    626 void JitCode::finalize(JS::GCContext* gcx) {
    627  // If this jitcode had a bytecode map, it must have already been removed.
    628 #ifdef DEBUG
    629  JSRuntime* rt = gcx->runtime();
    630  if (hasBytecodeMap_) {
    631    MOZ_ASSERT(rt->jitRuntime()->hasJitcodeGlobalTable());
    632    MOZ_ASSERT(!rt->jitRuntime()->getJitcodeGlobalTable()->lookup(raw()));
    633  }
    634 #endif
    635 
    636 #ifdef MOZ_VTUNE
    637  vtune::UnmarkCode(this);
    638 #endif
    639 
    640  MOZ_ASSERT(pool_);
    641 
    642  // With W^X JIT code, reprotecting memory for each JitCode instance is
    643  // slow, so we record the ranges and poison them later all at once. It's
    644  // safe to ignore OOM here, it just means we won't poison the code.
    645  if (gcx->appendJitPoisonRange(JitPoisonRange(pool_, raw() - headerSize_,
    646                                               headerSize_ + bufferSize_))) {
    647    pool_->addRef();
    648  }
    649  setHeaderPtr(nullptr);
    650 
    651  pool_->release(headerSize_ + bufferSize_, CodeKind(kind_));
    652  zone()->decJitMemory(headerSize_ + bufferSize_);
    653 
    654  pool_ = nullptr;
    655 }
    656 
    657 IonScript::IonScript(IonCompilationId compilationId, uint32_t localSlotsSize,
    658                     uint32_t argumentSlotsSize, uint32_t frameSize)
    659    : localSlotsSize_(localSlotsSize),
    660      argumentSlotsSize_(argumentSlotsSize),
    661      frameSize_(frameSize),
    662      compilationId_(compilationId) {}
    663 
    664 IonScript* IonScript::New(JSContext* cx, IonCompilationId compilationId,
    665                          uint32_t localSlotsSize, uint32_t argumentSlotsSize,
    666                          uint32_t frameSize, size_t snapshotsListSize,
    667                          size_t snapshotsRVATableSize, size_t recoversSize,
    668                          size_t constants, size_t nurseryObjects,
    669                          size_t safepointIndices, size_t osiIndices,
    670                          size_t icEntries, size_t runtimeSize,
    671                          size_t safepointsSize) {
    672  if (snapshotsListSize >= MAX_BUFFER_SIZE) {
    673    ReportOutOfMemory(cx);
    674    return nullptr;
    675  }
    676 
    677  // Verify the hardcoded sizes in header are accurate.
    678  static_assert(SizeOf_OsiIndex == sizeof(OsiIndex),
    679                "IonScript has wrong size for OsiIndex");
    680  static_assert(SizeOf_SafepointIndex == sizeof(SafepointIndex),
    681                "IonScript has wrong size for SafepointIndex");
    682 
    683  CheckedInt<Offset> allocSize = sizeof(IonScript);
    684  allocSize += CheckedInt<Offset>(constants) * sizeof(HeapPtr<Value>);
    685  allocSize += CheckedInt<Offset>(runtimeSize);
    686  allocSize += CheckedInt<Offset>(nurseryObjects) * sizeof(HeapPtr<JSObject*>);
    687  allocSize += CheckedInt<Offset>(osiIndices) * sizeof(OsiIndex);
    688  allocSize += CheckedInt<Offset>(safepointIndices) * sizeof(SafepointIndex);
    689  allocSize += CheckedInt<Offset>(icEntries) * sizeof(uint32_t);
    690  allocSize += CheckedInt<Offset>(safepointsSize);
    691  allocSize += CheckedInt<Offset>(snapshotsListSize);
    692  allocSize += CheckedInt<Offset>(snapshotsRVATableSize);
    693  allocSize += CheckedInt<Offset>(recoversSize);
    694 
    695  if (!allocSize.isValid()) {
    696    ReportAllocationOverflow(cx);
    697    return nullptr;
    698  }
    699 
    700  void* raw = cx->pod_malloc<uint8_t>(allocSize.value());
    701  MOZ_ASSERT(uintptr_t(raw) % alignof(IonScript) == 0);
    702  if (!raw) {
    703    return nullptr;
    704  }
    705  IonScript* script = new (raw)
    706      IonScript(compilationId, localSlotsSize, argumentSlotsSize, frameSize);
    707 
    708  Offset offsetCursor = sizeof(IonScript);
    709 
    710  MOZ_ASSERT(offsetCursor % alignof(HeapPtr<Value>) == 0);
    711  script->initElements<HeapPtr<Value>>(offsetCursor, constants);
    712  script->constantTableOffset_ = offsetCursor;
    713  offsetCursor += constants * sizeof(HeapPtr<Value>);
    714 
    715  MOZ_ASSERT(offsetCursor % alignof(uint64_t) == 0);
    716  script->runtimeDataOffset_ = offsetCursor;
    717  offsetCursor += runtimeSize;
    718 
    719  MOZ_ASSERT(offsetCursor % alignof(HeapPtr<JSObject*>) == 0);
    720  script->initElements<HeapPtr<JSObject*>>(offsetCursor, nurseryObjects);
    721  script->nurseryObjectsOffset_ = offsetCursor;
    722  offsetCursor += nurseryObjects * sizeof(HeapPtr<JSObject*>);
    723 
    724  MOZ_ASSERT(offsetCursor % alignof(OsiIndex) == 0);
    725  script->osiIndexOffset_ = offsetCursor;
    726  offsetCursor += osiIndices * sizeof(OsiIndex);
    727 
    728  MOZ_ASSERT(offsetCursor % alignof(SafepointIndex) == 0);
    729  script->safepointIndexOffset_ = offsetCursor;
    730  offsetCursor += safepointIndices * sizeof(SafepointIndex);
    731 
    732  MOZ_ASSERT(offsetCursor % alignof(uint32_t) == 0);
    733  script->icIndexOffset_ = offsetCursor;
    734  offsetCursor += icEntries * sizeof(uint32_t);
    735 
    736  script->safepointsOffset_ = offsetCursor;
    737  offsetCursor += safepointsSize;
    738 
    739  script->snapshotsOffset_ = offsetCursor;
    740  offsetCursor += snapshotsListSize;
    741 
    742  script->rvaTableOffset_ = offsetCursor;
    743  offsetCursor += snapshotsRVATableSize;
    744 
    745  script->recoversOffset_ = offsetCursor;
    746  offsetCursor += recoversSize;
    747 
    748  script->allocBytes_ = offsetCursor;
    749 
    750  MOZ_ASSERT(script->numConstants() == constants);
    751  MOZ_ASSERT(script->runtimeSize() == runtimeSize);
    752  MOZ_ASSERT(script->numNurseryObjects() == nurseryObjects);
    753  MOZ_ASSERT(script->numOsiIndices() == osiIndices);
    754  MOZ_ASSERT(script->numSafepointIndices() == safepointIndices);
    755  MOZ_ASSERT(script->numICs() == icEntries);
    756  MOZ_ASSERT(script->safepointsSize() == safepointsSize);
    757  MOZ_ASSERT(script->snapshotsListSize() == snapshotsListSize);
    758  MOZ_ASSERT(script->snapshotsRVATableSize() == snapshotsRVATableSize);
    759  MOZ_ASSERT(script->recoversSize() == recoversSize);
    760  MOZ_ASSERT(script->endOffset() == offsetCursor);
    761 
    762  return script;
    763 }
    764 
    765 void IonScript::trace(JSTracer* trc) {
    766  if (method_) {
    767    TraceEdge(trc, &method_, "method");
    768  }
    769 
    770  for (size_t i = 0; i < numConstants(); i++) {
    771    TraceEdge(trc, &getConstant(i), "constant");
    772  }
    773 
    774  for (size_t i = 0; i < numNurseryObjects(); i++) {
    775    TraceEdge(trc, &nurseryObjects()[i], "nursery-object");
    776  }
    777 
    778  // Trace caches so that the JSScript pointer can be updated if moved.
    779  for (size_t i = 0; i < numICs(); i++) {
    780    getICFromIndex(i).trace(trc, this);
    781  }
    782 }
    783 
    784 void IonScript::traceWeak(JSTracer* trc) {
    785  // IonICs do not currently contain weak pointers. If this is added then they
    786  // should be traced here.
    787 }
    788 
    789 /* static */
    790 void IonScript::preWriteBarrier(Zone* zone, IonScript* ionScript) {
    791  PreWriteBarrier(zone, ionScript);
    792 }
    793 
    794 void IonScript::copySnapshots(const SnapshotWriter* writer) {
    795  MOZ_ASSERT(writer->listSize() == snapshotsListSize());
    796  memcpy(offsetToPointer<uint8_t>(snapshotsOffset()), writer->listBuffer(),
    797         snapshotsListSize());
    798 
    799  MOZ_ASSERT(snapshotsRVATableSize());
    800  MOZ_ASSERT(writer->RVATableSize() == snapshotsRVATableSize());
    801  memcpy(offsetToPointer<uint8_t>(rvaTableOffset()), writer->RVATableBuffer(),
    802         snapshotsRVATableSize());
    803 }
    804 
    805 void IonScript::copyRecovers(const RecoverWriter* writer) {
    806  MOZ_ASSERT(writer->size() == recoversSize());
    807  memcpy(offsetToPointer<uint8_t>(recoversOffset()), writer->buffer(),
    808         recoversSize());
    809 }
    810 
    811 void IonScript::copySafepoints(const SafepointWriter* writer) {
    812  MOZ_ASSERT(writer->size() == safepointsSize());
    813  memcpy(offsetToPointer<uint8_t>(safepointsOffset()), writer->buffer(),
    814         safepointsSize());
    815 }
    816 
    817 void IonScript::copyConstants(const Value* vp) {
    818  for (size_t i = 0; i < numConstants(); i++) {
    819    constants()[i].init(vp[i]);
    820  }
    821 }
    822 
    823 void IonScript::copySafepointIndices(const CodegenSafepointIndex* si) {
    824  // Convert CodegenSafepointIndex to more compact form.
    825  SafepointIndex* table = safepointIndices();
    826  for (size_t i = 0; i < numSafepointIndices(); ++i) {
    827    table[i] = SafepointIndex(si[i]);
    828  }
    829 }
    830 
    831 void IonScript::copyOsiIndices(const OsiIndex* oi) {
    832  memcpy(osiIndices(), oi, numOsiIndices() * sizeof(OsiIndex));
    833 }
    834 
    835 void IonScript::copyRuntimeData(const uint8_t* data) {
    836  memcpy(runtimeData(), data, runtimeSize());
    837 }
    838 
    839 void IonScript::copyICEntries(const uint32_t* icEntries) {
    840  memcpy(icIndex(), icEntries, numICs() * sizeof(uint32_t));
    841 
    842  // Update the codeRaw_ field in the ICs now that we know the code address.
    843  for (size_t i = 0; i < numICs(); i++) {
    844    getICFromIndex(i).resetCodeRaw(this);
    845  }
    846 }
    847 
    848 const SafepointIndex* IonScript::getSafepointIndex(uint32_t disp) const {
    849  MOZ_ASSERT(numSafepointIndices() > 0);
    850 
    851  const SafepointIndex* table = safepointIndices();
    852  if (numSafepointIndices() == 1) {
    853    MOZ_ASSERT(disp == table[0].displacement());
    854    return &table[0];
    855  }
    856 
    857  size_t minEntry = 0;
    858  size_t maxEntry = numSafepointIndices() - 1;
    859  uint32_t min = table[minEntry].displacement();
    860  uint32_t max = table[maxEntry].displacement();
    861 
    862  // Raise if the element is not in the list.
    863  MOZ_ASSERT(min <= disp && disp <= max);
    864 
    865  // Approximate the location of the FrameInfo.
    866  size_t guess = (disp - min) * (maxEntry - minEntry) / (max - min) + minEntry;
    867  uint32_t guessDisp = table[guess].displacement();
    868 
    869  if (table[guess].displacement() == disp) {
    870    return &table[guess];
    871  }
    872 
    873  // Doing a linear scan from the guess should be more efficient in case of
    874  // small group which are equally distributed on the code.
    875  //
    876  // such as:  <...      ...    ...  ...  .   ...    ...>
    877  if (guessDisp > disp) {
    878    while (--guess >= minEntry) {
    879      guessDisp = table[guess].displacement();
    880      MOZ_ASSERT(guessDisp >= disp);
    881      if (guessDisp == disp) {
    882        return &table[guess];
    883      }
    884    }
    885  } else {
    886    while (++guess <= maxEntry) {
    887      guessDisp = table[guess].displacement();
    888      MOZ_ASSERT(guessDisp <= disp);
    889      if (guessDisp == disp) {
    890        return &table[guess];
    891      }
    892    }
    893  }
    894 
    895  MOZ_CRASH("displacement not found.");
    896 }
    897 
    898 const OsiIndex* IonScript::getOsiIndex(uint32_t disp) const {
    899  const OsiIndex* end = osiIndices() + numOsiIndices();
    900  for (const OsiIndex* it = osiIndices(); it != end; ++it) {
    901    if (it->returnPointDisplacement() == disp) {
    902      return it;
    903    }
    904  }
    905 
    906  MOZ_CRASH("Failed to find OSI point return address");
    907 }
    908 
    909 const OsiIndex* IonScript::getOsiIndex(uint8_t* retAddr) const {
    910  JitSpew(JitSpew_IonInvalidate, "IonScript %p has method %p raw %p",
    911          (void*)this, (void*)method(), method()->raw());
    912 
    913  MOZ_ASSERT(containsCodeAddress(retAddr));
    914  uint32_t disp = retAddr - method()->raw();
    915  return getOsiIndex(disp);
    916 }
    917 
    918 void IonScript::Destroy(JS::GCContext* gcx, IonScript* script) {
    919  // Destroy the HeapPtrs to ensure there are no pointers into the IonScript's
    920  // nursery objects list or constants list in the store buffer. Because this
    921  // can be called during sweeping when discarding JIT code, we have to lock the
    922  // store buffer when we find a pointer that's (still) in the nursery.
    923  mozilla::Maybe<gc::AutoLockStoreBuffer> lock;
    924  for (size_t i = 0, len = script->numNurseryObjects(); i < len; i++) {
    925    JSObject* obj = script->nurseryObjects()[i];
    926    if (lock.isNothing() && IsInsideNursery(obj)) {
    927      lock.emplace(gcx->runtimeFromAnyThread());
    928    }
    929    script->nurseryObjects()[i].~HeapPtr<JSObject*>();
    930  }
    931  for (size_t i = 0, len = script->numConstants(); i < len; i++) {
    932    Value v = script->getConstant(i);
    933    if (lock.isNothing() && v.isGCThing() && IsInsideNursery(v.toGCThing())) {
    934      lock.emplace(gcx->runtimeFromAnyThread());
    935    }
    936    script->getConstant(i).~HeapPtr<Value>();
    937  }
    938 
    939  // This allocation is tracked by JSScript::setIonScriptImpl.
    940  gcx->deleteUntracked(script);
    941 }
    942 
    943 void JS::DeletePolicy<js::jit::IonScript>::operator()(
    944    const js::jit::IonScript* script) {
    945  IonScript::Destroy(rt_->gcContext(), const_cast<IonScript*>(script));
    946 }
    947 
    948 void IonScript::purgeICs(Zone* zone) {
    949  for (size_t i = 0; i < numICs(); i++) {
    950    getICFromIndex(i).reset(zone, this);
    951  }
    952 }
    953 
    954 namespace js {
    955 namespace jit {
    956 
    957 bool OptimizeMIR(MIRGenerator* mir) {
    958  MIRGraph& graph = mir->graph();
    959 
    960  if (mir->shouldCancel("Start")) {
    961    return false;
    962  }
    963 
    964  mir->spewPass("BuildSSA");
    965  AssertBasicGraphCoherency(graph);
    966 
    967  if (JitSpewEnabled(JitSpew_MIRExpressions)) {
    968    JitSpewCont(JitSpew_MIRExpressions, "\n");
    969    DumpMIRExpressions(JitSpewPrinter(), graph, mir->outerInfo(),
    970                       "BuildSSA (== input to OptimizeMIR)");
    971  }
    972 
    973  if (!JitOptions.disablePruning && !mir->compilingWasm()) {
    974    JitSpewCont(JitSpew_Prune, "\n");
    975    if (!PruneUnusedBranches(mir, graph)) {
    976      return false;
    977    }
    978    mir->spewPass("Prune Unused Branches");
    979    AssertBasicGraphCoherency(graph);
    980 
    981    if (mir->shouldCancel("Prune Unused Branches")) {
    982      return false;
    983    }
    984  }
    985 
    986  {
    987    bool dummy;
    988    if (!FoldEmptyBlocks(graph, &dummy)) {
    989      return false;
    990    }
    991    mir->spewPass("Fold Empty Blocks");
    992    AssertBasicGraphCoherency(graph);
    993 
    994    if (mir->shouldCancel("Fold Empty Blocks")) {
    995      return false;
    996    }
    997  }
    998 
    999  // Remove trivially dead resume point operands before folding tests, so the
   1000  // latter pass can optimize more aggressively.
   1001  if (!mir->compilingWasm()) {
   1002    if (!EliminateTriviallyDeadResumePointOperands(mir, graph)) {
   1003      return false;
   1004    }
   1005    mir->spewPass("Eliminate trivially dead resume point operands");
   1006    AssertBasicGraphCoherency(graph);
   1007 
   1008    if (mir->shouldCancel("Eliminate trivially dead resume point operands")) {
   1009      return false;
   1010    }
   1011  }
   1012 
   1013  {
   1014    if (!FoldTests(graph)) {
   1015      return false;
   1016    }
   1017    mir->spewPass("Fold Tests");
   1018    AssertBasicGraphCoherency(graph);
   1019 
   1020    if (mir->shouldCancel("Fold Tests")) {
   1021      return false;
   1022    }
   1023  }
   1024 
   1025  {
   1026    if (!SplitCriticalEdges(graph)) {
   1027      return false;
   1028    }
   1029    mir->spewPass("Split Critical Edges");
   1030    AssertGraphCoherency(graph);
   1031 
   1032    if (mir->shouldCancel("Split Critical Edges")) {
   1033      return false;
   1034    }
   1035  }
   1036 
   1037  {
   1038    RenumberBlocks(graph);
   1039    mir->spewPass("Renumber Blocks");
   1040    AssertGraphCoherency(graph);
   1041 
   1042    if (mir->shouldCancel("Renumber Blocks")) {
   1043      return false;
   1044    }
   1045  }
   1046 
   1047  {
   1048    if (!BuildDominatorTree(mir, graph)) {
   1049      return false;
   1050    }
   1051    // No spew: graph not changed.
   1052 
   1053    if (mir->shouldCancel("Dominator Tree")) {
   1054      return false;
   1055    }
   1056  }
   1057 
   1058  {
   1059    // Aggressive phi elimination must occur before any code elimination. If the
   1060    // script contains a try-statement, we only compiled the try block and not
   1061    // the catch or finally blocks, so in this case it's also invalid to use
   1062    // aggressive phi elimination.
   1063    Observability observability = graph.hasTryBlock()
   1064                                      ? ConservativeObservability
   1065                                      : AggressiveObservability;
   1066    if (!EliminatePhis(mir, graph, observability)) {
   1067      return false;
   1068    }
   1069    mir->spewPass("Eliminate phis");
   1070    AssertGraphCoherency(graph);
   1071 
   1072    if (mir->shouldCancel("Eliminate phis")) {
   1073      return false;
   1074    }
   1075 
   1076    if (!BuildPhiReverseMapping(graph)) {
   1077      return false;
   1078    }
   1079    AssertExtendedGraphCoherency(graph);
   1080    // No spew: graph not changed.
   1081 
   1082    if (mir->shouldCancel("Phi reverse mapping")) {
   1083      return false;
   1084    }
   1085  }
   1086 
   1087  if (!JitOptions.disableRecoverIns &&
   1088      mir->optimizationInfo().scalarReplacementEnabled() &&
   1089      !JitOptions.disableObjectKeysScalarReplacement) {
   1090    JitSpewCont(JitSpew_Escape, "\n");
   1091    if (!ReplaceObjectKeys(mir, graph)) {
   1092      return false;
   1093    }
   1094    mir->spewPass("Replace ObjectKeys");
   1095    AssertGraphCoherency(graph);
   1096 
   1097    if (mir->shouldCancel("Replace ObjectKeys")) {
   1098      return false;
   1099    }
   1100  }
   1101 
   1102  if (!mir->compilingWasm() && !JitOptions.disableIteratorIndices) {
   1103    if (!OptimizeIteratorIndices(mir, graph)) {
   1104      return false;
   1105    }
   1106    mir->spewPass("Iterator Indices");
   1107    AssertGraphCoherency(graph);
   1108 
   1109    if (mir->shouldCancel("Iterator Indices")) {
   1110      return false;
   1111    }
   1112  }
   1113 
   1114  if (!JitOptions.disableRecoverIns &&
   1115      mir->optimizationInfo().scalarReplacementEnabled()) {
   1116    JitSpewCont(JitSpew_Escape, "\n");
   1117    if (!ScalarReplacement(mir, graph)) {
   1118      return false;
   1119    }
   1120    mir->spewPass("Scalar Replacement");
   1121    AssertGraphCoherency(graph);
   1122 
   1123    if (mir->shouldCancel("Scalar Replacement")) {
   1124      return false;
   1125    }
   1126  }
   1127 
   1128  if (!mir->compilingWasm()) {
   1129    if (!ApplyTypeInformation(mir, graph)) {
   1130      return false;
   1131    }
   1132    mir->spewPass("Apply types");
   1133    AssertExtendedGraphCoherency(graph);
   1134 
   1135    if (mir->shouldCancel("Apply types")) {
   1136      return false;
   1137    }
   1138  }
   1139 
   1140  if (mir->compilingWasm()) {
   1141    if (!TrackWasmRefTypes(graph)) {
   1142      return false;
   1143    }
   1144    mir->spewPass("Track Wasm ref types");
   1145    AssertExtendedGraphCoherency(graph);
   1146 
   1147    if (mir->shouldCancel("Track Wasm ref types")) {
   1148      return false;
   1149    }
   1150  }
   1151 
   1152  if (mir->optimizationInfo().amaEnabled()) {
   1153    AlignmentMaskAnalysis ama(graph);
   1154    if (!ama.analyze()) {
   1155      return false;
   1156    }
   1157    mir->spewPass("Alignment Mask Analysis");
   1158    AssertExtendedGraphCoherency(graph);
   1159 
   1160    if (mir->shouldCancel("Alignment Mask Analysis")) {
   1161      return false;
   1162    }
   1163  }
   1164 
   1165  ValueNumberer gvn(mir, graph);
   1166 
   1167  // Alias analysis is required for LICM and GVN so that we don't move
   1168  // loads across stores. We also use alias information when removing
   1169  // redundant shapeguards.
   1170  if (mir->optimizationInfo().licmEnabled() ||
   1171      mir->optimizationInfo().gvnEnabled() ||
   1172      mir->optimizationInfo().eliminateRedundantShapeGuardsEnabled()) {
   1173    {
   1174      AliasAnalysis analysis(mir, graph);
   1175      JitSpewCont(JitSpew_Alias, "\n");
   1176      if (!analysis.analyze()) {
   1177        return false;
   1178      }
   1179 
   1180      mir->spewPass("Alias analysis");
   1181      AssertExtendedGraphCoherency(graph);
   1182 
   1183      if (mir->shouldCancel("Alias analysis")) {
   1184        return false;
   1185      }
   1186    }
   1187 
   1188    if (!mir->compilingWasm()) {
   1189      // Eliminating dead resume point operands requires basic block
   1190      // instructions to be numbered. Reuse the numbering computed during
   1191      // alias analysis.
   1192      if (!EliminateDeadResumePointOperands(mir, graph)) {
   1193        return false;
   1194      }
   1195 
   1196      mir->spewPass("Eliminate dead resume point operands");
   1197      AssertExtendedGraphCoherency(graph);
   1198 
   1199      if (mir->shouldCancel("Eliminate dead resume point operands")) {
   1200        return false;
   1201      }
   1202    }
   1203  }
   1204 
   1205  if (mir->optimizationInfo().gvnEnabled()) {
   1206    JitSpewCont(JitSpew_GVN, "\n");
   1207    if (!gvn.run(ValueNumberer::UpdateAliasAnalysis)) {
   1208      return false;
   1209    }
   1210    mir->spewPass("GVN");
   1211    AssertExtendedGraphCoherency(graph);
   1212 
   1213    if (mir->shouldCancel("GVN")) {
   1214      return false;
   1215    }
   1216  }
   1217 
   1218  if (mir->branchHintingEnabled()) {
   1219    JitSpewCont(JitSpew_BranchHint, "\n");
   1220    if (!BranchHinting(mir, graph)) {
   1221      return false;
   1222    }
   1223    mir->spewPass("BranchHinting");
   1224    AssertBasicGraphCoherency(graph);
   1225 
   1226    if (mir->shouldCancel("BranchHinting")) {
   1227      return false;
   1228    }
   1229  }
   1230 
   1231  // LICM can hoist instructions from conditional branches and
   1232  // trigger bailouts. Disable it if bailing out of a hoisted
   1233  // instruction has previously invalidated this script.
   1234  if (mir->licmEnabled()) {
   1235    JitSpewCont(JitSpew_LICM, "\n");
   1236    if (!LICM(mir, graph)) {
   1237      return false;
   1238    }
   1239    mir->spewPass("LICM");
   1240    AssertExtendedGraphCoherency(graph);
   1241 
   1242    if (mir->shouldCancel("LICM")) {
   1243      return false;
   1244    }
   1245  }
   1246 
   1247  RangeAnalysis r(mir, graph);
   1248  if (mir->optimizationInfo().rangeAnalysisEnabled()) {
   1249    JitSpewCont(JitSpew_Range, "\n");
   1250    if (!r.addBetaNodes()) {
   1251      return false;
   1252    }
   1253    mir->spewPass("Beta");
   1254    AssertExtendedGraphCoherency(graph);
   1255 
   1256    if (mir->shouldCancel("RA Beta")) {
   1257      return false;
   1258    }
   1259 
   1260    if (!r.analyze() || !r.addRangeAssertions()) {
   1261      return false;
   1262    }
   1263    mir->spewPass("Range Analysis");
   1264    AssertExtendedGraphCoherency(graph);
   1265 
   1266    if (mir->shouldCancel("Range Analysis")) {
   1267      return false;
   1268    }
   1269 
   1270    if (!r.removeBetaNodes()) {
   1271      return false;
   1272    }
   1273    mir->spewPass("De-Beta");
   1274    AssertExtendedGraphCoherency(graph);
   1275 
   1276    if (mir->shouldCancel("RA De-Beta")) {
   1277      return false;
   1278    }
   1279 
   1280    if (mir->optimizationInfo().gvnEnabled()) {
   1281      bool shouldRunUCE = false;
   1282      if (!r.prepareForUCE(&shouldRunUCE)) {
   1283        return false;
   1284      }
   1285      mir->spewPass("RA check UCE");
   1286      AssertExtendedGraphCoherency(graph);
   1287 
   1288      if (mir->shouldCancel("RA check UCE")) {
   1289        return false;
   1290      }
   1291 
   1292      if (shouldRunUCE) {
   1293        if (!gvn.run(ValueNumberer::DontUpdateAliasAnalysis)) {
   1294          return false;
   1295        }
   1296        mir->spewPass("UCE After RA");
   1297        AssertExtendedGraphCoherency(graph);
   1298 
   1299        if (mir->shouldCancel("UCE After RA")) {
   1300          return false;
   1301        }
   1302      }
   1303    }
   1304 
   1305    if (mir->optimizationInfo().autoTruncateEnabled()) {
   1306      if (!r.truncate()) {
   1307        return false;
   1308      }
   1309      mir->spewPass("Truncate Doubles");
   1310      AssertExtendedGraphCoherency(graph);
   1311 
   1312      if (mir->shouldCancel("Truncate Doubles")) {
   1313        return false;
   1314      }
   1315    }
   1316  }
   1317 
   1318  if (!JitOptions.disableRecoverIns) {
   1319    JitSpewCont(JitSpew_Sink, "\n");
   1320    if (!Sink(mir, graph)) {
   1321      return false;
   1322    }
   1323    mir->spewPass("Sink");
   1324    AssertExtendedGraphCoherency(graph);
   1325 
   1326    if (mir->shouldCancel("Sink")) {
   1327      return false;
   1328    }
   1329  }
   1330 
   1331  if (!JitOptions.disableRecoverIns &&
   1332      mir->optimizationInfo().rangeAnalysisEnabled()) {
   1333    JitSpewCont(JitSpew_Range, "\n");
   1334    if (!r.removeUnnecessaryBitops()) {
   1335      return false;
   1336    }
   1337    mir->spewPass("Remove Unnecessary Bitops");
   1338    AssertExtendedGraphCoherency(graph);
   1339 
   1340    if (mir->shouldCancel("Remove Unnecessary Bitops")) {
   1341      return false;
   1342    }
   1343  }
   1344 
   1345  {
   1346    JitSpewCont(JitSpew_FLAC, "\n");
   1347    if (!FoldLinearArithConstants(mir, graph)) {
   1348      return false;
   1349    }
   1350    mir->spewPass("Fold Linear Arithmetic Constants");
   1351    AssertBasicGraphCoherency(graph);
   1352 
   1353    if (mir->shouldCancel("Fold Linear Arithmetic Constants")) {
   1354      return false;
   1355    }
   1356  }
   1357 
   1358  // EAA, but only for wasm; it appears to be of minimal benefit for JS inputs.
   1359  if (mir->compilingWasm() && mir->optimizationInfo().eaaEnabled()) {
   1360    EffectiveAddressAnalysis eaa(mir, graph);
   1361    JitSpewCont(JitSpew_EAA, "\n");
   1362    if (!eaa.analyze()) {
   1363      return false;
   1364    }
   1365    mir->spewPass("Effective Address Analysis");
   1366    AssertExtendedGraphCoherency(graph);
   1367 
   1368    if (mir->shouldCancel("Effective Address Analysis")) {
   1369      return false;
   1370    }
   1371  }
   1372 
   1373  // BCE marks bounds checks as dead, so do BCE before DCE.
   1374  if (mir->compilingWasm()) {
   1375    JitSpewCont(JitSpew_WasmBCE, "\n");
   1376    if (!EliminateBoundsChecks(mir, graph)) {
   1377      return false;
   1378    }
   1379    mir->spewPass("Redundant Bounds Check Elimination");
   1380    AssertGraphCoherency(graph);
   1381 
   1382    if (mir->shouldCancel("BCE")) {
   1383      return false;
   1384    }
   1385  }
   1386 
   1387  {
   1388    if (!EliminateDeadCode(mir, graph)) {
   1389      return false;
   1390    }
   1391    mir->spewPass("DCE");
   1392    AssertExtendedGraphCoherency(graph);
   1393 
   1394    if (mir->shouldCancel("DCE")) {
   1395      return false;
   1396    }
   1397  }
   1398 
   1399  if (!JitOptions.disableMarkLoadsUsedAsPropertyKeys && !mir->compilingWasm()) {
   1400    JitSpewCont(JitSpew_MarkLoadsUsedAsPropertyKeys, "\n");
   1401    if (!MarkLoadsUsedAsPropertyKeys(graph)) {
   1402      return false;
   1403    }
   1404    if (mir->shouldCancel("MarkLoadsUsedAsPropertyKeys")) {
   1405      return false;
   1406    }
   1407  }
   1408 
   1409  if (mir->optimizationInfo().instructionReorderingEnabled() &&
   1410      !mir->outerInfo().hadReorderingBailout()) {
   1411    if (!ReorderInstructions(mir, graph)) {
   1412      return false;
   1413    }
   1414    mir->spewPass("Reordering");
   1415 
   1416    AssertExtendedGraphCoherency(graph);
   1417 
   1418    if (mir->shouldCancel("Reordering")) {
   1419      return false;
   1420    }
   1421  }
   1422 
   1423  // Make loops contiguous. We do this after GVN/UCE and range analysis,
   1424  // which can remove CFG edges, exposing more blocks that can be moved.
   1425  {
   1426    if (!MakeLoopsContiguous(graph)) {
   1427      return false;
   1428    }
   1429    mir->spewPass("Make loops contiguous");
   1430    AssertExtendedGraphCoherency(graph);
   1431 
   1432    if (mir->shouldCancel("Make loops contiguous")) {
   1433      return false;
   1434    }
   1435  }
   1436  AssertExtendedGraphCoherency(graph, /* underValueNumberer = */ false,
   1437                               /* force = */ true);
   1438 
   1439  // Unroll and/or peel loops
   1440  if (mir->compilingWasm() && JS::Prefs::wasm_unroll_loops()) {
   1441    bool loopsChanged;
   1442    if (!UnrollLoops(mir, graph, &loopsChanged)) {
   1443      return false;
   1444    }
   1445 
   1446    mir->spewPass("Unroll loops");
   1447 
   1448    AssertExtendedGraphCoherency(graph);
   1449 
   1450    if (mir->shouldCancel("Unroll loops")) {
   1451      return false;
   1452    }
   1453 
   1454    if (loopsChanged) {
   1455      // Rerun GVN in the hope that unrolling exposed more optimization
   1456      // opportunities.
   1457      if (!gvn.run(ValueNumberer::DontUpdateAliasAnalysis)) {
   1458        return false;
   1459      }
   1460      // And tidy up any empty blocks.
   1461      bool blocksFolded;
   1462      if (!FoldEmptyBlocks(graph, &blocksFolded)) {
   1463        return false;
   1464      }
   1465      if (blocksFolded) {
   1466        // Redo the dominator tree.
   1467        ClearDominatorTree(graph);
   1468        if (!BuildDominatorTree(mir, graph)) {
   1469          return false;
   1470        }
   1471      }
   1472 
   1473      AssertExtendedGraphCoherency(graph);
   1474 
   1475      if (mir->shouldCancel("Rerun GVN after loop unrolling")) {
   1476        return false;
   1477      }
   1478    }
   1479  }
   1480 
   1481  // Remove unreachable blocks created by MBasicBlock::NewFakeLoopPredecessor
   1482  // to ensure every loop header has two predecessors. (This only happens due
   1483  // to OSR.)  After this point, it is no longer possible to build the
   1484  // dominator tree.
   1485  if (!mir->compilingWasm() && graph.osrBlock()) {
   1486    graph.removeFakeLoopPredecessors();
   1487    mir->spewPass("Remove fake loop predecessors");
   1488    AssertGraphCoherency(graph);
   1489 
   1490    if (mir->shouldCancel("Remove fake loop predecessors")) {
   1491      return false;
   1492    }
   1493  }
   1494 
   1495  // Passes after this point must not move instructions; these analyses
   1496  // depend on knowing the final order in which instructions will execute.
   1497 
   1498  if (mir->optimizationInfo().edgeCaseAnalysisEnabled()) {
   1499    EdgeCaseAnalysis edgeCaseAnalysis(mir, graph);
   1500    if (!edgeCaseAnalysis.analyzeLate()) {
   1501      return false;
   1502    }
   1503    mir->spewPass("Edge Case Analysis (Late)");
   1504    AssertGraphCoherency(graph);
   1505 
   1506    if (mir->shouldCancel("Edge Case Analysis (Late)")) {
   1507      return false;
   1508    }
   1509  }
   1510 
   1511  if (mir->optimizationInfo().eliminateRedundantChecksEnabled()) {
   1512    // Note: check elimination has to run after all other passes that move
   1513    // instructions. Since check uses are replaced with the actual index,
   1514    // code motion after this pass could incorrectly move a load or store
   1515    // before its bounds check.
   1516    if (!EliminateRedundantChecks(graph)) {
   1517      return false;
   1518    }
   1519    mir->spewPass("Bounds Check Elimination");
   1520    AssertGraphCoherency(graph);
   1521 
   1522    if (mir->shouldCancel("Bounds Check Elimination")) {
   1523      return false;
   1524    }
   1525  }
   1526 
   1527  if (mir->optimizationInfo().eliminateRedundantShapeGuardsEnabled()) {
   1528    if (!EliminateRedundantShapeGuards(graph)) {
   1529      return false;
   1530    }
   1531    mir->spewPass("Shape Guard Elimination");
   1532    AssertGraphCoherency(graph);
   1533 
   1534    if (mir->shouldCancel("Shape Guard Elimination")) {
   1535      return false;
   1536    }
   1537  }
   1538 
   1539  // Run the GC Barrier Elimination pass after instruction reordering, to
   1540  // ensure we don't move instructions that can trigger GC between stores we
   1541  // optimize here.
   1542  if (mir->optimizationInfo().eliminateRedundantGCBarriersEnabled()) {
   1543    if (!EliminateRedundantGCBarriers(graph)) {
   1544      return false;
   1545    }
   1546    mir->spewPass("GC Barrier Elimination");
   1547    AssertGraphCoherency(graph);
   1548 
   1549    if (mir->shouldCancel("GC Barrier Elimination")) {
   1550      return false;
   1551    }
   1552  }
   1553 
   1554  if (!mir->compilingWasm() && !mir->outerInfo().hadUnboxFoldingBailout()) {
   1555    if (!FoldLoadsWithUnbox(mir, graph)) {
   1556      return false;
   1557    }
   1558    mir->spewPass("FoldLoadsWithUnbox");
   1559    AssertGraphCoherency(graph);
   1560 
   1561    if (mir->shouldCancel("FoldLoadsWithUnbox")) {
   1562      return false;
   1563    }
   1564  }
   1565 
   1566  if (!mir->compilingWasm()) {
   1567    if (!AddKeepAliveInstructions(graph)) {
   1568      return false;
   1569    }
   1570    mir->spewPass("Add KeepAlive Instructions");
   1571    AssertGraphCoherency(graph);
   1572 
   1573    if (mir->shouldCancel("Add KeepAlive Instructions")) {
   1574      return false;
   1575    }
   1576  }
   1577 
   1578  AssertGraphCoherency(graph, /* force = */ true);
   1579 
   1580  if (JitSpewEnabled(JitSpew_MIRExpressions)) {
   1581    JitSpewCont(JitSpew_MIRExpressions, "\n");
   1582    DumpMIRExpressions(JitSpewPrinter(), graph, mir->outerInfo(),
   1583                       "BeforeLIR (== result of OptimizeMIR)");
   1584  }
   1585 
   1586  return true;
   1587 }
   1588 
   1589 LIRGraph* GenerateLIR(MIRGenerator* mir) {
   1590  MIRGraph& graph = mir->graph();
   1591 
   1592  LIRGraph* lir = mir->alloc().lifoAlloc()->new_<LIRGraph>(&graph);
   1593  if (!lir || !lir->init()) {
   1594    return nullptr;
   1595  }
   1596 
   1597  LIRGenerator lirgen(mir, graph, *lir);
   1598  {
   1599    if (!lirgen.generate()) {
   1600      return nullptr;
   1601    }
   1602    mir->spewPass("Generate LIR");
   1603 
   1604    if (mir->shouldCancel("Generate LIR")) {
   1605      return nullptr;
   1606    }
   1607  }
   1608 
   1609 #ifdef DEBUG
   1610  AllocationIntegrityState integrity(*lir);
   1611  if (JitOptions.fullDebugChecks) {
   1612    if (!integrity.record()) {
   1613      return nullptr;
   1614    }
   1615  }
   1616 #endif
   1617 
   1618  IonRegisterAllocator allocator = mir->optimizationInfo().registerAllocator();
   1619  switch (allocator) {
   1620    case RegisterAllocator_Backtracking: {
   1621      BacktrackingAllocator regalloc(mir, &lirgen, *lir);
   1622      if (!regalloc.go()) {
   1623        return nullptr;
   1624      }
   1625      mir->spewPass("Allocate Registers [Backtracking]", &regalloc);
   1626      break;
   1627    }
   1628    case RegisterAllocator_Simple: {
   1629      SimpleAllocator regalloc(mir, &lirgen, *lir);
   1630      if (!regalloc.go()) {
   1631        return nullptr;
   1632      }
   1633      mir->spewPass("Allocate Registers [Simple]");
   1634      break;
   1635    }
   1636    default:
   1637      MOZ_CRASH("Bad regalloc");
   1638  }
   1639 
   1640 #ifdef DEBUG
   1641  if (JitOptions.fullDebugChecks) {
   1642    if (!integrity.check()) {
   1643      return nullptr;
   1644    }
   1645  }
   1646 #endif
   1647 
   1648  if (mir->shouldCancel("Allocate Registers")) {
   1649    return nullptr;
   1650  }
   1651 
   1652  return lir;
   1653 }
   1654 
   1655 static CodeGenerator* GenerateCode(MIRGenerator* mir, LIRGraph* lir,
   1656                                   const WarpSnapshot* snapshot) {
   1657  auto codegen = MakeUnique<CodeGenerator>(mir, lir);
   1658  if (!codegen) {
   1659    return nullptr;
   1660  }
   1661 
   1662  if (!codegen->generate(snapshot)) {
   1663    return nullptr;
   1664  }
   1665 
   1666  return codegen.release();
   1667 }
   1668 
   1669 CodeGenerator* CompileBackEnd(MIRGenerator* mir, WarpSnapshot* snapshot) {
   1670  // Everything in CompileBackEnd can potentially run on a helper thread.
   1671  AutoEnterIonBackend enter;
   1672  AutoSpewEndFunction spewEndFunction(mir);
   1673  mozilla::TimeStamp compileStartTime = mozilla::TimeStamp::Now();
   1674 
   1675  {
   1676    WarpCompilation comp(mir->alloc());
   1677    WarpBuilder builder(*snapshot, *mir, &comp);
   1678    if (!builder.build()) {
   1679      return nullptr;
   1680    }
   1681  }
   1682 
   1683  if (!OptimizeMIR(mir)) {
   1684    return nullptr;
   1685  }
   1686 
   1687  LIRGraph* lir = GenerateLIR(mir);
   1688  if (!lir) {
   1689    return nullptr;
   1690  }
   1691 
   1692  CodeGenerator* codegen = GenerateCode(mir, lir, snapshot);
   1693  if (codegen) {
   1694    codegen->setCompilationTime(mozilla::TimeStamp::Now() - compileStartTime);
   1695  }
   1696  return codegen;
   1697 }
   1698 
   1699 static AbortReasonOr<WarpSnapshot*> CreateWarpSnapshot(JSContext* cx,
   1700                                                       MIRGenerator* mirGen,
   1701                                                       HandleScript script) {
   1702  // Suppress GC during compilation.
   1703  gc::AutoSuppressGC suppressGC(cx);
   1704 
   1705  mirGen->spewBeginFunction(script);
   1706 
   1707  WarpOracle oracle(cx, *mirGen, script);
   1708 
   1709  AbortReasonOr<WarpSnapshot*> result = oracle.createSnapshot();
   1710 
   1711  MOZ_ASSERT_IF(result.isErr(), result.unwrapErr() == AbortReason::Alloc ||
   1712                                    result.unwrapErr() == AbortReason::Error ||
   1713                                    result.unwrapErr() == AbortReason::Disable);
   1714  MOZ_ASSERT_IF(!result.isErr(), result.unwrap());
   1715 
   1716  return result;
   1717 }
   1718 
   1719 UniquePtr<LifoAlloc> JitRuntime::tryReuseIonLifoAlloc() {
   1720  // Try to reuse the LifoAlloc of a finished Ion compilation task for a new
   1721  // Ion compilation. If there are multiple tasks, we pick the one with the
   1722  // largest LifoAlloc.
   1723 
   1724  auto& batch = ionFreeTaskBatch_.ref();
   1725  IonCompileTask* bestTask = nullptr;
   1726  size_t bestTaskIndex = 0;
   1727  size_t bestTaskSize = 0;
   1728 
   1729  for (size_t i = 0, len = batch.length(); i < len; i++) {
   1730    IonCompileTask* task = batch[i];
   1731    if (task->alloc().lifoAlloc()->isHuge()) {
   1732      // Ignore 'huge' LifoAllocs. This avoids keeping a lot of memory alive and
   1733      // also avoids freeing all LifoAlloc memory (instead of reusing it) in
   1734      // freeAllIfHugeAndUnused.
   1735      continue;
   1736    }
   1737    size_t taskSize = task->alloc().lifoAlloc()->computedSizeOfExcludingThis();
   1738    if (!bestTask || taskSize >= bestTaskSize) {
   1739      bestTask = task;
   1740      bestTaskIndex = i;
   1741      bestTaskSize = taskSize;
   1742    }
   1743  }
   1744 
   1745  if (bestTask) {
   1746    batch.erase(&batch[bestTaskIndex]);
   1747    return FreeIonCompileTaskAndReuseLifoAlloc(bestTask);
   1748  }
   1749 
   1750  return nullptr;
   1751 }
   1752 
   1753 static AbortReason IonCompile(JSContext* cx, HandleScript script,
   1754                              jsbytecode* osrPc) {
   1755  cx->check(script);
   1756 
   1757  if (!cx->zone()->ensureJitZoneExists(cx)) {
   1758    return AbortReason::Error;
   1759  }
   1760 
   1761  UniquePtr<LifoAlloc> alloc =
   1762      cx->runtime()->jitRuntime()->tryReuseIonLifoAlloc();
   1763  if (!alloc) {
   1764    alloc = cx->make_unique<LifoAlloc>(TempAllocator::PreferredLifoChunkSize,
   1765                                       js::MallocArena);
   1766    if (!alloc) {
   1767      return AbortReason::Error;
   1768    }
   1769  }
   1770 
   1771  TempAllocator* temp = alloc->new_<TempAllocator>(alloc.get());
   1772  if (!temp) {
   1773    return AbortReason::Alloc;
   1774  }
   1775 
   1776  MIRGraph* graph = alloc->new_<MIRGraph>(temp);
   1777  if (!graph) {
   1778    return AbortReason::Alloc;
   1779  }
   1780 
   1781  InlineScriptTree* inlineScriptTree =
   1782      InlineScriptTree::New(temp, nullptr, nullptr, script);
   1783  if (!inlineScriptTree) {
   1784    return AbortReason::Alloc;
   1785  }
   1786 
   1787  CompileInfo* info =
   1788      alloc->new_<CompileInfo>(CompileRuntime::get(cx->runtime()), script,
   1789                               osrPc, script->needsArgsObj(), inlineScriptTree);
   1790  if (!info) {
   1791    return AbortReason::Alloc;
   1792  }
   1793 
   1794  const OptimizationInfo* optimizationInfo =
   1795      IonOptimizations.get(OptimizationLevel::Normal);
   1796  const JitCompileOptions options(cx);
   1797 
   1798  MIRGenerator* mirGen =
   1799      alloc->new_<MIRGenerator>(CompileRealm::get(cx->realm()), options, temp,
   1800                                graph, info, optimizationInfo);
   1801  if (!mirGen) {
   1802    return AbortReason::Alloc;
   1803  }
   1804 
   1805  auto clearDependencies =
   1806      mozilla::MakeScopeExit([mirGen]() { mirGen->tracker.reset(); });
   1807 
   1808  MOZ_ASSERT(!script->baselineScript()->hasPendingIonCompileTask());
   1809  MOZ_ASSERT(!script->hasIonScript());
   1810  MOZ_ASSERT(script->canIonCompile());
   1811 
   1812  if (osrPc) {
   1813    script->jitScript()->setHadIonOSR();
   1814  }
   1815 
   1816  AbortReasonOr<WarpSnapshot*> result = CreateWarpSnapshot(cx, mirGen, script);
   1817  if (result.isErr()) {
   1818    return result.unwrapErr();
   1819  }
   1820  WarpSnapshot* snapshot = result.unwrap();
   1821 
   1822  // If possible, compile the script off thread.
   1823  if (options.offThreadCompilationAvailable()) {
   1824    JitSpew(JitSpew_IonSyncLogs,
   1825            "Can't log script %s:%u:%u"
   1826            ". (Compiled on background thread.)",
   1827            script->filename(), script->lineno(),
   1828            script->column().oneOriginValue());
   1829 
   1830    IonCompileTask* task = alloc->new_<IonCompileTask>(cx, *mirGen, snapshot);
   1831    if (!task) {
   1832      return AbortReason::Alloc;
   1833    }
   1834 
   1835    AutoLockHelperThreadState lock;
   1836    if (!StartOffThreadIonCompile(task, lock)) {
   1837      JitSpew(JitSpew_IonAbort, "Unable to start off-thread ion compilation.");
   1838      mirGen->spewEndFunction();
   1839      return AbortReason::Alloc;
   1840    }
   1841 
   1842    script->jitScript()->setIsIonCompilingOffThread(script);
   1843 
   1844    // The allocator and associated data will be destroyed after being
   1845    // processed in the finishedOffThreadCompilations list.
   1846    (void)alloc.release();
   1847    clearDependencies.release();
   1848 
   1849    return AbortReason::NoAbort;
   1850  }
   1851 
   1852  bool succeeded = false;
   1853  {
   1854    gc::AutoSuppressGC suppressGC(cx);
   1855    JitContext jctx(cx);
   1856    UniquePtr<CodeGenerator> codegen(CompileBackEnd(mirGen, snapshot));
   1857    if (!codegen) {
   1858      JitSpew(JitSpew_IonAbort, "Failed during back-end compilation.");
   1859      if (cx->isExceptionPending()) {
   1860        return AbortReason::Error;
   1861      }
   1862      return AbortReason::Disable;
   1863    }
   1864 
   1865    succeeded = LinkCodeGen(cx, codegen.get(), script);
   1866  }
   1867 
   1868  if (succeeded) {
   1869    return AbortReason::NoAbort;
   1870  }
   1871  if (cx->isExceptionPending()) {
   1872    return AbortReason::Error;
   1873  }
   1874  return AbortReason::Disable;
   1875 }
   1876 
   1877 static void AssertBaselineFrameCanEnterIon(JSContext* cx,
   1878                                           BaselineFrame* frame) {
   1879  MOZ_ASSERT(jit::IsIonEnabled(cx));
   1880  MOZ_ASSERT(!frame->isEvalFrame());
   1881  MOZ_ASSERT(frame->script()->canIonCompile());
   1882  MOZ_ASSERT(!frame->script()->isIonCompilingOffThread());
   1883 
   1884  // Baseline has the same limit for the number of actual arguments, so if we
   1885  // entered Baseline we can also enter Ion.
   1886  MOZ_ASSERT_IF(frame->isFunctionFrame(),
   1887                !TooManyActualArguments(frame->numActualArgs()));
   1888 
   1889  // The number of formal arguments is checked in CanIonCompileScript. The
   1890  // Baseline JIT shouldn't attempt to tier up if that returns false.
   1891  MOZ_ASSERT_IF(frame->isFunctionFrame(),
   1892                !TooManyFormalArguments(frame->numFormalArgs()));
   1893 }
   1894 
   1895 static bool ScriptIsTooLarge(JSContext* cx, JSScript* script) {
   1896  if (!JitOptions.limitScriptSize) {
   1897    return false;
   1898  }
   1899 
   1900  size_t numLocalsAndArgs = NumLocalsAndArgs(script);
   1901 
   1902  bool canCompileOffThread = OffThreadCompilationAvailable(cx);
   1903  size_t maxScriptSize = canCompileOffThread
   1904                             ? JitOptions.ionMaxScriptSize
   1905                             : JitOptions.ionMaxScriptSizeMainThread;
   1906  size_t maxLocalsAndArgs = canCompileOffThread
   1907                                ? JitOptions.ionMaxLocalsAndArgs
   1908                                : JitOptions.ionMaxLocalsAndArgsMainThread;
   1909 
   1910  if (script->length() > maxScriptSize || numLocalsAndArgs > maxLocalsAndArgs) {
   1911    JitSpew(JitSpew_IonAbort,
   1912            "Script too large (%zu bytes) (%zu locals/args) @ %s:%u:%u",
   1913            script->length(), numLocalsAndArgs, script->filename(),
   1914            script->lineno(), script->column().oneOriginValue());
   1915    return true;
   1916  }
   1917 
   1918  return false;
   1919 }
   1920 
   1921 bool CanIonCompileScript(JSContext* cx, JSScript* script) {
   1922  if (!script->canIonCompile()) {
   1923    return false;
   1924  }
   1925 
   1926  if (script->isForEval()) {
   1927    // Eval frames are not yet supported. Fixing this will require adding
   1928    // support for the eval frame's environment chain, also for bailouts.
   1929    // Additionally, JSOp::GlobalOrEvalDeclInstantiation in WarpBuilder
   1930    // currently doesn't support eval scripts. See bug 1996190.
   1931    JitSpew(JitSpew_IonAbort, "eval script");
   1932    script->disableIon();
   1933    return false;
   1934  }
   1935 
   1936  if (script->isAsync() && script->isModule()) {
   1937    // Async modules are not supported (bug 1996189).
   1938    JitSpew(JitSpew_IonAbort, "async module");
   1939    script->disableIon();
   1940    return false;
   1941  }
   1942 
   1943  if (script->hasNonSyntacticScope() && !script->function()) {
   1944    // Support functions with a non-syntactic global scope but not other
   1945    // scripts. For global scripts, WarpBuilder currently uses the global
   1946    // object as scope chain, and this is not valid when the script has a
   1947    // non-syntactic global scope.
   1948    JitSpew(JitSpew_IonAbort, "has non-syntactic global scope");
   1949    script->disableIon();
   1950    return false;
   1951  }
   1952 
   1953  if (script->function() &&
   1954      TooManyFormalArguments(script->function()->nargs())) {
   1955    JitSpew(JitSpew_IonAbort, "too many formal arguments");
   1956    script->disableIon();
   1957    return false;
   1958  }
   1959 
   1960  if (ScriptIsTooLarge(cx, script)) {
   1961    script->disableIon();
   1962    return false;
   1963  }
   1964 
   1965  return true;
   1966 }
   1967 
   1968 static MethodStatus Compile(JSContext* cx, HandleScript script,
   1969                            BaselineFrame* osrFrame, jsbytecode* osrPc) {
   1970  MOZ_ASSERT(jit::IsIonEnabled(cx));
   1971  MOZ_ASSERT(jit::IsBaselineJitEnabled(cx));
   1972 
   1973  MOZ_ASSERT(script->hasBaselineScript());
   1974  MOZ_ASSERT(!script->baselineScript()->hasPendingIonCompileTask());
   1975  MOZ_ASSERT(!script->hasIonScript());
   1976 
   1977  AutoGeckoProfilerEntry pseudoFrame(
   1978      cx, "Ion script compilation",
   1979      JS::ProfilingCategoryPair::JS_IonCompilation);
   1980 
   1981  if (script->isDebuggee() || (osrFrame && osrFrame->isDebuggee())) {
   1982    JitSpew(JitSpew_IonAbort, "debugging");
   1983    return Method_Skipped;
   1984  }
   1985 
   1986  if (!CanIonCompileScript(cx, script)) {
   1987    JitSpew(JitSpew_IonAbort, "Aborted compilation of %s:%u:%u",
   1988            script->filename(), script->lineno(),
   1989            script->column().oneOriginValue());
   1990    return Method_CantCompile;
   1991  }
   1992 
   1993  OptimizationLevel optimizationLevel =
   1994      IonOptimizations.levelForScript(cx, script, osrPc);
   1995  if (optimizationLevel == OptimizationLevel::DontCompile) {
   1996    return Method_Skipped;
   1997  }
   1998 
   1999  MOZ_ASSERT(optimizationLevel == OptimizationLevel::Normal);
   2000 
   2001  if (!CanLikelyAllocateMoreExecutableMemory()) {
   2002    script->resetWarmUpCounterToDelayIonCompilation();
   2003    return Method_Skipped;
   2004  }
   2005 
   2006  MOZ_ASSERT(!script->hasIonScript());
   2007 
   2008  AbortReason reason = IonCompile(cx, script, osrPc);
   2009  if (reason == AbortReason::Error) {
   2010    MOZ_ASSERT(cx->isExceptionPending());
   2011    return Method_Error;
   2012  }
   2013 
   2014  if (reason == AbortReason::Disable) {
   2015    return Method_CantCompile;
   2016  }
   2017 
   2018  if (reason == AbortReason::Alloc) {
   2019    ReportOutOfMemory(cx);
   2020    return Method_Error;
   2021  }
   2022 
   2023  // Compilation succeeded or we invalidated right away or an inlining/alloc
   2024  // abort
   2025  if (script->hasIonScript()) {
   2026    return Method_Compiled;
   2027  }
   2028  return Method_Skipped;
   2029 }
   2030 
   2031 }  // namespace jit
   2032 }  // namespace js
   2033 
   2034 bool jit::OffThreadCompilationAvailable(JSContext* cx) {
   2035  // Even if off thread compilation is enabled, compilation must still occur
   2036  // on the main thread in some cases.
   2037  //
   2038  // Require cpuCount > 1 so that Ion compilation jobs and active-thread
   2039  // execution are not competing for the same resources.
   2040  return cx->runtime()->canUseOffthreadIonCompilation() &&
   2041         GetHelperThreadCPUCount() > 1 && CanUseExtraThreads();
   2042 }
   2043 
   2044 MethodStatus jit::CanEnterIon(JSContext* cx, RunState& state) {
   2045  MOZ_ASSERT(jit::IsIonEnabled(cx));
   2046 
   2047  HandleScript script = state.script();
   2048  MOZ_ASSERT(!script->hasIonScript());
   2049 
   2050  // Skip if the script has been disabled.
   2051  if (!script->canIonCompile()) {
   2052    return Method_Skipped;
   2053  }
   2054 
   2055  // Skip if the script is being compiled off thread.
   2056  if (script->isIonCompilingOffThread()) {
   2057    return Method_Skipped;
   2058  }
   2059 
   2060  if (state.isInvoke()) {
   2061    InvokeState& invoke = *state.asInvoke();
   2062 
   2063    if (TooManyActualArguments(invoke.args().length())) {
   2064      JitSpew(JitSpew_IonAbort, "too many actual args");
   2065      ForbidCompilation(cx, script);
   2066      return Method_CantCompile;
   2067    }
   2068  }
   2069 
   2070  // If --ion-eager is used, compile with Baseline first, so that we
   2071  // can directly enter IonMonkey.
   2072  if (JitOptions.eagerIonCompilation() && !script->hasBaselineScript()) {
   2073    MethodStatus status =
   2074        CanEnterBaselineMethod<BaselineTier::Compiler>(cx, state);
   2075    if (status != Method_Compiled) {
   2076      return status;
   2077    }
   2078    // Bytecode analysis may forbid compilation for a script.
   2079    if (!script->canIonCompile()) {
   2080      return Method_CantCompile;
   2081    }
   2082  }
   2083 
   2084  if (!script->hasBaselineScript()) {
   2085    return Method_Skipped;
   2086  }
   2087 
   2088  MOZ_ASSERT(!script->isIonCompilingOffThread());
   2089  MOZ_ASSERT(script->canIonCompile());
   2090 
   2091  // Attempt compilation. Returns Method_Compiled if already compiled.
   2092  MethodStatus status = Compile(cx, script, /* osrFrame = */ nullptr,
   2093                                /* osrPc = */ nullptr);
   2094  if (status != Method_Compiled) {
   2095    if (status == Method_CantCompile) {
   2096      ForbidCompilation(cx, script);
   2097    }
   2098    return status;
   2099  }
   2100 
   2101  if (state.script()->baselineScript()->hasPendingIonCompileTask()) {
   2102    LinkIonScript(cx, state.script());
   2103    if (!state.script()->hasIonScript()) {
   2104      return jit::Method_Skipped;
   2105    }
   2106  }
   2107 
   2108  return Method_Compiled;
   2109 }
   2110 
   2111 static MethodStatus BaselineCanEnterAtEntry(JSContext* cx, HandleScript script,
   2112                                            BaselineFrame* frame) {
   2113  AssertBaselineFrameCanEnterIon(cx, frame);
   2114  MOZ_ASSERT(!script->hasIonScript());
   2115  MOZ_ASSERT(frame->isFunctionFrame());
   2116 
   2117  if (script->baselineScript()->hasPendingIonCompileTask()) {
   2118    LinkIonScript(cx, script);
   2119    if (script->hasIonScript()) {
   2120      return Method_Compiled;
   2121    }
   2122  }
   2123 
   2124  // Attempt compilation. Returns Method_Compiled if already compiled.
   2125  MethodStatus status = Compile(cx, script, frame, nullptr);
   2126  if (status != Method_Compiled) {
   2127    if (status == Method_CantCompile) {
   2128      ForbidCompilation(cx, script);
   2129    }
   2130    return status;
   2131  }
   2132 
   2133  return Method_Compiled;
   2134 }
   2135 
   2136 // Decide if a transition from baseline execution to Ion code should occur.
   2137 // May compile or recompile the target JSScript.
   2138 static MethodStatus BaselineCanEnterAtBranch(JSContext* cx, HandleScript script,
   2139                                             BaselineFrame* osrFrame,
   2140                                             jsbytecode* pc) {
   2141  AssertBaselineFrameCanEnterIon(cx, osrFrame);
   2142  MOZ_ASSERT((JSOp)*pc == JSOp::LoopHead);
   2143 
   2144  // Optionally ignore on user request.
   2145  if (!JitOptions.osr) {
   2146    return Method_Skipped;
   2147  }
   2148 
   2149  // Check if the jitcode still needs to get linked and do this
   2150  // to have a valid IonScript.
   2151  if (script->baselineScript()->hasPendingIonCompileTask()) {
   2152    LinkIonScript(cx, script);
   2153  }
   2154 
   2155  // By default a recompilation doesn't happen on osr mismatch.
   2156  // Decide if we want to force a recompilation if this happens too much.
   2157  if (script->hasIonScript()) {
   2158    if (pc == script->ionScript()->osrPc()) {
   2159      return Method_Compiled;
   2160    }
   2161 
   2162    uint32_t count = script->ionScript()->incrOsrPcMismatchCounter();
   2163    if (count <= JitOptions.osrPcMismatchesBeforeRecompile &&
   2164        !JitOptions.eagerIonCompilation()) {
   2165      return Method_Skipped;
   2166    }
   2167 
   2168    JitSpew(JitSpew_IonScripts, "Forcing OSR Mismatch Compilation");
   2169    Invalidate(cx, script);
   2170  }
   2171 
   2172  // Attempt compilation.
   2173  // - Returns Method_Compiled if the right ionscript is present
   2174  //   (Meaning it was present or a sequantial compile finished)
   2175  // - Returns Method_Skipped if pc doesn't match
   2176  //   (This means a background thread compilation with that pc could have
   2177  //   started or not.)
   2178  MethodStatus status = Compile(cx, script, osrFrame, pc);
   2179  if (status != Method_Compiled) {
   2180    if (status == Method_CantCompile) {
   2181      ForbidCompilation(cx, script);
   2182    }
   2183    return status;
   2184  }
   2185 
   2186  // Return the compilation was skipped when the osr pc wasn't adjusted.
   2187  // This can happen when there was still an IonScript available and a
   2188  // background compilation started, but hasn't finished yet.
   2189  // Or when we didn't force a recompile.
   2190  if (script->hasIonScript() && pc != script->ionScript()->osrPc()) {
   2191    return Method_Skipped;
   2192  }
   2193 
   2194  return Method_Compiled;
   2195 }
   2196 
   2197 static bool IonCompileScriptForBaseline(JSContext* cx, BaselineFrame* frame,
   2198                                        jsbytecode* pc) {
   2199  MOZ_ASSERT(IsIonEnabled(cx));
   2200 
   2201  RootedScript script(cx, frame->script());
   2202  bool isLoopHead = JSOp(*pc) == JSOp::LoopHead;
   2203 
   2204  // The Baseline JIT code checks for Ion disabled or compiling off-thread.
   2205  MOZ_ASSERT(script->canIonCompile());
   2206  MOZ_ASSERT(!script->isIonCompilingOffThread());
   2207 
   2208  // If Ion script exists, but PC is not at a loop entry, then Ion will be
   2209  // entered for this script at an appropriate LOOPENTRY or the next time this
   2210  // function is called.
   2211  if (script->hasIonScript() && !isLoopHead) {
   2212    JitSpew(JitSpew_BaselineOSR, "IonScript exists, but not at loop entry!");
   2213    // TODO: ASSERT that a ion-script-already-exists checker stub doesn't exist.
   2214    // TODO: Clear all optimized stubs.
   2215    // TODO: Add a ion-script-already-exists checker stub.
   2216    return true;
   2217  }
   2218 
   2219  // Ensure that Ion-compiled code is available.
   2220  JitSpew(JitSpew_BaselineOSR,
   2221          "WarmUpCounter for %s:%u:%u reached %d at pc %p, trying to switch to "
   2222          "Ion!",
   2223          script->filename(), script->lineno(),
   2224          script->column().oneOriginValue(), (int)script->getWarmUpCount(),
   2225          (void*)pc);
   2226 
   2227  MethodStatus stat;
   2228  if (isLoopHead) {
   2229    JitSpew(JitSpew_BaselineOSR, "  Compile at loop head!");
   2230    stat = BaselineCanEnterAtBranch(cx, script, frame, pc);
   2231  } else if (frame->isFunctionFrame()) {
   2232    JitSpew(JitSpew_BaselineOSR,
   2233            "  Compile function from top for later entry!");
   2234    stat = BaselineCanEnterAtEntry(cx, script, frame);
   2235  } else {
   2236    return true;
   2237  }
   2238 
   2239  if (stat == Method_Error) {
   2240    JitSpew(JitSpew_BaselineOSR, "  Compile with Ion errored!");
   2241    return false;
   2242  }
   2243 
   2244  if (stat == Method_CantCompile) {
   2245    MOZ_ASSERT(!script->canIonCompile());
   2246    JitSpew(JitSpew_BaselineOSR, "  Can't compile with Ion!");
   2247  } else if (stat == Method_Skipped) {
   2248    JitSpew(JitSpew_BaselineOSR, "  Skipped compile with Ion!");
   2249  } else if (stat == Method_Compiled) {
   2250    JitSpew(JitSpew_BaselineOSR, "  Compiled with Ion!");
   2251  } else {
   2252    MOZ_CRASH("Invalid MethodStatus!");
   2253  }
   2254 
   2255  return true;
   2256 }
   2257 
   2258 bool jit::IonCompileScriptForBaselineAtEntry(JSContext* cx,
   2259                                             BaselineFrame* frame) {
   2260  JSScript* script = frame->script();
   2261  return IonCompileScriptForBaseline(cx, frame, script->code());
   2262 }
   2263 
   2264 /* clang-format off */
   2265 // The following data is kept in a temporary heap-allocated buffer, stored in
   2266 // JitRuntime (high memory addresses at top, low at bottom):
   2267 //
   2268 //     +----->+=================================+  --      <---- High Address
   2269 //     |      |                                 |   |
   2270 //     |      |     ...BaselineFrame...         |   |-- Copy of BaselineFrame + stack values
   2271 //     |      |                                 |   |
   2272 //     |      +---------------------------------+   |
   2273 //     |      |                                 |   |
   2274 //     |      |     ...Locals/Stack...          |   |
   2275 //     |      |                                 |   |
   2276 //     |      +=================================+  --
   2277 //     |      |     Padding(Maybe Empty)        |
   2278 //     |      +=================================+  --
   2279 //     +------|-- baselineFrame                 |   |-- IonOsrTempData
   2280 //            |   jitcode                       |   |
   2281 //            +=================================+  --      <---- Low Address
   2282 //
   2283 // A pointer to the IonOsrTempData is returned.
   2284 /* clang-format on */
   2285 
   2286 static IonOsrTempData* PrepareOsrTempData(JSContext* cx, BaselineFrame* frame,
   2287                                          uint32_t frameSize, void* jitcode) {
   2288  uint32_t numValueSlots = frame->numValueSlots(frameSize);
   2289 
   2290  // Calculate the amount of space to allocate:
   2291  //      BaselineFrame space:
   2292  //          (sizeof(Value) * numValueSlots)
   2293  //        + sizeof(BaselineFrame)
   2294  //
   2295  //      IonOsrTempData space:
   2296  //          sizeof(IonOsrTempData)
   2297 
   2298  size_t frameSpace = sizeof(BaselineFrame) + sizeof(Value) * numValueSlots;
   2299  size_t ionOsrTempDataSpace = sizeof(IonOsrTempData);
   2300 
   2301  size_t totalSpace = AlignBytes(frameSpace, sizeof(Value)) +
   2302                      AlignBytes(ionOsrTempDataSpace, sizeof(Value));
   2303 
   2304  JitRuntime* jrt = cx->runtime()->jitRuntime();
   2305  uint8_t* buf = jrt->allocateIonOsrTempData(totalSpace);
   2306  if (!buf) {
   2307    ReportOutOfMemory(cx);
   2308    return nullptr;
   2309  }
   2310 
   2311  IonOsrTempData* info = new (buf) IonOsrTempData();
   2312  info->jitcode = jitcode;
   2313 
   2314  // Copy the BaselineFrame + local/stack Values to the buffer. Arguments and
   2315  // |this| are not copied but left on the stack: the Baseline and Ion frame
   2316  // share the same frame prefix and Ion won't clobber these values. Note
   2317  // that info->baselineFrame will point to the *end* of the frame data, like
   2318  // the frame pointer register in baseline frames.
   2319  uint8_t* frameStart =
   2320      (uint8_t*)info + AlignBytes(ionOsrTempDataSpace, sizeof(Value));
   2321  info->baselineFrame = frameStart + frameSpace;
   2322 
   2323  memcpy(frameStart, (uint8_t*)frame - numValueSlots * sizeof(Value),
   2324         frameSpace);
   2325 
   2326  JitSpew(JitSpew_BaselineOSR, "Allocated IonOsrTempData at %p", info);
   2327  JitSpew(JitSpew_BaselineOSR, "Jitcode is %p", info->jitcode);
   2328 
   2329  // All done.
   2330  return info;
   2331 }
   2332 
   2333 bool jit::IonCompileScriptForBaselineOSR(JSContext* cx, BaselineFrame* frame,
   2334                                         uint32_t frameSize, jsbytecode* pc,
   2335                                         IonOsrTempData** infoPtr) {
   2336  MOZ_ASSERT(infoPtr);
   2337  *infoPtr = nullptr;
   2338 
   2339  MOZ_ASSERT(frame->debugFrameSize() == frameSize);
   2340  MOZ_ASSERT(JSOp(*pc) == JSOp::LoopHead);
   2341 
   2342  if (!IonCompileScriptForBaseline(cx, frame, pc)) {
   2343    return false;
   2344  }
   2345 
   2346  JSScript* script = frame->script();
   2347  if (!script->hasIonScript() || script->ionScript()->osrPc() != pc ||
   2348      frame->isDebuggee()) {
   2349    return true;
   2350  }
   2351 
   2352  IonScript* ion = script->ionScript();
   2353  MOZ_ASSERT(cx->runtime()->geckoProfiler().enabled() ==
   2354             ion->hasProfilingInstrumentation());
   2355  MOZ_ASSERT(ion->osrPc() == pc);
   2356 
   2357  ion->resetOsrPcMismatchCounter();
   2358 
   2359  JitSpew(JitSpew_BaselineOSR, "  OSR possible!");
   2360  void* jitcode = ion->method()->raw() + ion->osrEntryOffset();
   2361 
   2362  // Prepare the temporary heap copy of the fake InterpreterFrame and actual
   2363  // args list.
   2364  JitSpew(JitSpew_BaselineOSR, "Got jitcode.  Preparing for OSR into ion.");
   2365  IonOsrTempData* info = PrepareOsrTempData(cx, frame, frameSize, jitcode);
   2366  if (!info) {
   2367    return false;
   2368  }
   2369 
   2370  *infoPtr = info;
   2371  return true;
   2372 }
   2373 
   2374 static void InvalidateActivation(JS::GCContext* gcx,
   2375                                 const JitActivationIterator& activations,
   2376                                 bool invalidateAll) {
   2377  JitSpew(JitSpew_IonInvalidate, "BEGIN invalidating activation");
   2378 
   2379 #ifdef CHECK_OSIPOINT_REGISTERS
   2380  if (JitOptions.checkOsiPointRegisters) {
   2381    activations->asJit()->setCheckRegs(false);
   2382  }
   2383 #endif
   2384 
   2385  size_t frameno = 1;
   2386 
   2387  for (OnlyJSJitFrameIter iter(activations); !iter.done(); ++iter, ++frameno) {
   2388    const JSJitFrameIter& frame = iter.frame();
   2389    MOZ_ASSERT_IF(frameno == 1,
   2390                  frame.isExitFrame() || frame.type() == FrameType::Bailout);
   2391 
   2392 #ifdef JS_JITSPEW
   2393    switch (frame.type()) {
   2394      case FrameType::Exit:
   2395        JitSpew(JitSpew_IonInvalidate, "#%zu exit frame @ %p", frameno,
   2396                frame.fp());
   2397        break;
   2398      case FrameType::BaselineJS:
   2399      case FrameType::IonJS:
   2400      case FrameType::Bailout: {
   2401        MOZ_ASSERT(frame.isScripted());
   2402        const char* type = "Unknown";
   2403        if (frame.isIonJS()) {
   2404          type = "Optimized";
   2405        } else if (frame.isBaselineJS()) {
   2406          type = "Baseline";
   2407        } else if (frame.isBailoutJS()) {
   2408          type = "Bailing";
   2409        }
   2410        JSScript* script = frame.maybeForwardedScript();
   2411        JitSpew(JitSpew_IonInvalidate,
   2412                "#%zu %s JS frame @ %p, %s:%u:%u (fun: %p, script: %p, pc %p)",
   2413                frameno, type, frame.fp(), script->maybeForwardedFilename(),
   2414                script->lineno(), script->column().oneOriginValue(),
   2415                frame.maybeCallee(), script, frame.resumePCinCurrentFrame());
   2416        break;
   2417      }
   2418      case FrameType::BaselineStub:
   2419        JitSpew(JitSpew_IonInvalidate, "#%zu baseline stub frame @ %p", frameno,
   2420                frame.fp());
   2421        break;
   2422      case FrameType::BaselineInterpreterEntry:
   2423        JitSpew(JitSpew_IonInvalidate,
   2424                "#%zu baseline interpreter entry frame @ %p", frameno,
   2425                frame.fp());
   2426        break;
   2427      case FrameType::TrampolineNative:
   2428        JitSpew(JitSpew_IonInvalidate, "#%zu TrampolineNative frame @ %p",
   2429                frameno, frame.fp());
   2430        break;
   2431      case FrameType::IonICCall:
   2432        JitSpew(JitSpew_IonInvalidate, "#%zu ion IC call frame @ %p", frameno,
   2433                frame.fp());
   2434        break;
   2435      case FrameType::CppToJSJit:
   2436        JitSpew(JitSpew_IonInvalidate, "#%zu entry frame @ %p", frameno,
   2437                frame.fp());
   2438        break;
   2439      case FrameType::WasmToJSJit:
   2440        JitSpew(JitSpew_IonInvalidate, "#%zu wasm frames @ %p", frameno,
   2441                frame.fp());
   2442        break;
   2443    }
   2444 #endif  // JS_JITSPEW
   2445 
   2446    if (!frame.isIonScripted()) {
   2447      continue;
   2448    }
   2449 
   2450    // See if the frame has already been invalidated.
   2451    if (frame.checkInvalidation()) {
   2452      continue;
   2453    }
   2454 
   2455    JSScript* script = frame.maybeForwardedScript();
   2456    if (!script->hasIonScript()) {
   2457      continue;
   2458    }
   2459 
   2460    if (!invalidateAll && !script->ionScript()->invalidated()) {
   2461      continue;
   2462    }
   2463 
   2464    IonScript* ionScript = script->ionScript();
   2465 
   2466    // Purge ICs before we mark this script as invalidated. This will
   2467    // prevent lastJump_ from appearing to be a bogus pointer, just
   2468    // in case anyone tries to read it.
   2469    ionScript->purgeICs(script->zone());
   2470 
   2471    // This frame needs to be invalidated. We do the following:
   2472    //
   2473    // 1. Increment the reference counter to keep the ionScript alive
   2474    //    for the invalidation bailout or for the exception handler.
   2475    // 2. Determine safepoint that corresponds to the current call.
   2476    // 3. From safepoint, get distance to the OSI-patchable offset.
   2477    // 4. From the IonScript, determine the distance between the
   2478    //    call-patchable offset and the invalidation epilogue.
   2479    // 5. Patch the OSI point with a call-relative to the
   2480    //    invalidation epilogue.
   2481    //
   2482    // The code generator ensures that there's enough space for us
   2483    // to patch in a call-relative operation at each invalidation
   2484    // point.
   2485    //
   2486    // Note: you can't simplify this mechanism to "just patch the
   2487    // instruction immediately after the call" because things may
   2488    // need to move into a well-defined register state (using move
   2489    // instructions after the call) in to capture an appropriate
   2490    // snapshot after the call occurs.
   2491 
   2492    ionScript->incrementInvalidationCount();
   2493 
   2494    JitCode* ionCode = ionScript->method();
   2495 
   2496    // We're about to remove edges from the JSScript to GC things embedded in
   2497    // the JitCode. Perform a barrier to let the GC know about those edges.
   2498    PreWriteBarrier(script->zone(), ionCode, [](JSTracer* trc, JitCode* code) {
   2499      code->traceChildren(trc);
   2500    });
   2501 
   2502    ionCode->setInvalidated();
   2503 
   2504    // Don't adjust OSI points in a bailout path.
   2505    if (frame.isBailoutJS()) {
   2506      continue;
   2507    }
   2508 
   2509    // Write the delta (from the return address offset to the
   2510    // IonScript pointer embedded into the invalidation epilogue)
   2511    // where the safepointed call instruction used to be. We rely on
   2512    // the call sequence causing the safepoint being >= the size of
   2513    // a uint32, which is checked during safepoint index
   2514    // construction.
   2515    AutoWritableJitCode awjc(ionCode);
   2516    const SafepointIndex* si =
   2517        ionScript->getSafepointIndex(frame.resumePCinCurrentFrame());
   2518    CodeLocationLabel dataLabelToMunge(frame.resumePCinCurrentFrame());
   2519    ptrdiff_t delta = ionScript->invalidateEpilogueDataOffset() -
   2520                      (frame.resumePCinCurrentFrame() - ionCode->raw());
   2521    Assembler::PatchWrite_Imm32(dataLabelToMunge, Imm32(delta));
   2522 
   2523    CodeLocationLabel osiPatchPoint =
   2524        SafepointReader::InvalidationPatchPoint(ionScript, si);
   2525    CodeLocationLabel invalidateEpilogue(
   2526        ionCode, CodeOffset(ionScript->invalidateEpilogueOffset()));
   2527 
   2528    JitSpew(
   2529        JitSpew_IonInvalidate,
   2530        "   ! Invalidate ionScript %p (inv count %zu) -> patching osipoint %p",
   2531        ionScript, ionScript->invalidationCount(), (void*)osiPatchPoint.raw());
   2532    Assembler::PatchWrite_NearCall(osiPatchPoint, invalidateEpilogue);
   2533  }
   2534 
   2535  JitSpew(JitSpew_IonInvalidate, "END invalidating activation");
   2536 }
   2537 
   2538 void jit::InvalidateAll(JS::GCContext* gcx, Zone* zone) {
   2539  // The caller should previously have cancelled off thread compilation.
   2540  MOZ_ASSERT(!HasOffThreadIonCompile(zone));
   2541  if (zone->isAtomsZone()) {
   2542    return;
   2543  }
   2544  JSContext* cx = TlsContext.get();
   2545  for (JitActivationIterator iter(cx); !iter.done(); ++iter) {
   2546    if (iter->compartment()->zone() == zone) {
   2547      JitSpew(JitSpew_IonInvalidate, "Invalidating all frames for GC");
   2548      InvalidateActivation(gcx, iter, true);
   2549    }
   2550  }
   2551 }
   2552 
   2553 static void ClearIonScriptAfterInvalidation(JSContext* cx, JSScript* script,
   2554                                            IonScript* ionScript,
   2555                                            bool resetUses) {
   2556  // Null out the JitScript's IonScript pointer. The caller is responsible for
   2557  // destroying the IonScript using the invalidation count mechanism.
   2558  DebugOnly<IonScript*> clearedIonScript =
   2559      script->jitScript()->clearIonScript(cx->gcContext(), script);
   2560  MOZ_ASSERT(clearedIonScript == ionScript);
   2561 
   2562  // Wait for the scripts to get warm again before doing another
   2563  // compile, unless we are recompiling *because* a script got hot
   2564  // (resetUses is false).
   2565  if (resetUses) {
   2566    script->resetWarmUpCounterToDelayIonCompilation();
   2567  }
   2568 }
   2569 
   2570 void jit::Invalidate(JSContext* cx, const IonScriptKeyVector& invalid,
   2571                     bool resetUses, bool cancelOffThread) {
   2572  JitSpew(JitSpew_IonInvalidate, "Start invalidation.");
   2573 
   2574  // Add an invalidation reference to all invalidated IonScripts to indicate
   2575  // to the traversal which frames have been invalidated.
   2576  size_t numInvalidations = 0;
   2577  for (const auto& ionScriptKey : invalid) {
   2578    JSScript* script = ionScriptKey.script();
   2579    if (cancelOffThread) {
   2580      CancelOffThreadIonCompile(script);
   2581    }
   2582 
   2583    IonScript* ionScript = ionScriptKey.maybeIonScriptToInvalidate();
   2584    if (!ionScript) {
   2585      continue;
   2586    }
   2587 
   2588    JitSpew(JitSpew_IonInvalidate, " Invalidate %s:%u:%u, IonScript %p",
   2589            script->filename(), script->lineno(),
   2590            script->column().oneOriginValue(), ionScript);
   2591 
   2592    // Keep the ion script alive during the invalidation and flag this
   2593    // ionScript as being invalidated.  This increment is removed by the
   2594    // loop after the calls to InvalidateActivation.
   2595    ionScript->incrementInvalidationCount();
   2596    numInvalidations++;
   2597  }
   2598 
   2599  if (!numInvalidations) {
   2600    JitSpew(JitSpew_IonInvalidate, " No IonScript invalidation.");
   2601    return;
   2602  }
   2603 
   2604  JS::GCContext* gcx = cx->gcContext();
   2605  for (JitActivationIterator iter(cx); !iter.done(); ++iter) {
   2606    InvalidateActivation(gcx, iter, false);
   2607  }
   2608 
   2609  // Drop the references added above. If a script was never active, its
   2610  // IonScript will be immediately destroyed. Otherwise, it will be held live
   2611  // until its last invalidated frame is destroyed.
   2612  for (const auto& ionScriptKey : invalid) {
   2613    IonScript* ionScript = ionScriptKey.maybeIonScriptToInvalidate();
   2614    if (!ionScript) {
   2615      continue;
   2616    }
   2617 
   2618    if (ionScript->invalidationCount() == 1) {
   2619      // decrementInvalidationCount will destroy the IonScript so null out
   2620      // jitScript->ionScript_ now. We don't want to do this unconditionally
   2621      // because maybeIonScriptToInvalidate depends on script->ionScript() (we
   2622      // would leak the IonScript if |invalid| contains duplicates).
   2623      ClearIonScriptAfterInvalidation(cx, ionScriptKey.script(), ionScript,
   2624                                      resetUses);
   2625    }
   2626 
   2627    ionScript->decrementInvalidationCount(gcx);
   2628    numInvalidations--;
   2629  }
   2630 
   2631  // Make sure we didn't leak references by invalidating the same IonScript
   2632  // multiple times in the above loop.
   2633  MOZ_ASSERT(!numInvalidations);
   2634 
   2635  // Finally, null out jitScript->ionScript_ for IonScripts that are still on
   2636  // the stack.
   2637  for (const auto& ionScriptKey : invalid) {
   2638    if (IonScript* ionScript = ionScriptKey.maybeIonScriptToInvalidate()) {
   2639      ClearIonScriptAfterInvalidation(cx, ionScriptKey.script(), ionScript,
   2640                                      resetUses);
   2641    }
   2642  }
   2643 }
   2644 
   2645 void jit::IonScript::invalidate(JSContext* cx, JSScript* script, bool resetUses,
   2646                                const char* reason) {
   2647  // Note: we could short circuit here if we already invalidated this
   2648  // IonScript, but jit::Invalidate also cancels off-thread compilations of
   2649  // |script|.
   2650  MOZ_RELEASE_ASSERT(invalidated() || script->ionScript() == this);
   2651 
   2652  JitSpew(JitSpew_IonInvalidate, " Invalidate IonScript %p: %s", this, reason);
   2653 
   2654  // IonScriptKeyVector has inline space for at least one element.
   2655  IonScriptKeyVector list;
   2656  MOZ_RELEASE_ASSERT(list.reserve(1));
   2657  list.infallibleEmplaceBack(script, compilationId());
   2658 
   2659  Invalidate(cx, list, resetUses, true);
   2660 }
   2661 
   2662 void jit::Invalidate(JSContext* cx, JSScript* script, bool resetUses,
   2663                     bool cancelOffThread) {
   2664  MOZ_ASSERT(script->hasIonScript());
   2665 
   2666  if (cx->runtime()->geckoProfiler().enabled()) {
   2667    // Register invalidation with profiler.
   2668    // Format of event payload string:
   2669    //      "<filename>:<lineno>"
   2670 
   2671    // Get the script filename, if any, and its length.
   2672    const char* filename = script->filename();
   2673    if (filename == nullptr) {
   2674      filename = "<unknown>";
   2675    }
   2676 
   2677    // Construct the descriptive string.
   2678    UniqueChars buf = JS_smprintf("%s:%u:%u", filename, script->lineno(),
   2679                                  script->column().oneOriginValue());
   2680 
   2681    // Ignore the event on allocation failure.
   2682    if (buf) {
   2683      cx->runtime()->geckoProfiler().markEvent("Invalidate", buf.get());
   2684    }
   2685  }
   2686 
   2687  // IonScriptKeyVector has inline space for at least one element.
   2688  IonScriptKeyVector scripts;
   2689  MOZ_ASSERT(script->hasIonScript());
   2690  MOZ_RELEASE_ASSERT(scripts.reserve(1));
   2691  scripts.infallibleEmplaceBack(script, script->ionScript()->compilationId());
   2692 
   2693  Invalidate(cx, scripts, resetUses, cancelOffThread);
   2694 }
   2695 
   2696 void jit::FinishInvalidation(JS::GCContext* gcx, JSScript* script) {
   2697  if (!script->hasIonScript()) {
   2698    return;
   2699  }
   2700 
   2701  // In all cases, null out jitScript->ionScript_ to avoid re-entry.
   2702  IonScript* ion = script->jitScript()->clearIonScript(gcx, script);
   2703 
   2704  // If this script has Ion code on the stack, invalidated() will return
   2705  // true. In this case we have to wait until destroying it.
   2706  if (!ion->invalidated()) {
   2707    jit::IonScript::Destroy(gcx, ion);
   2708  }
   2709 }
   2710 
   2711 void jit::ForbidCompilation(JSContext* cx, JSScript* script) {
   2712  JitSpew(JitSpew_IonAbort, "Disabling Ion compilation of script %s:%u:%u",
   2713          script->filename(), script->lineno(),
   2714          script->column().oneOriginValue());
   2715 
   2716  CancelOffThreadIonCompile(script);
   2717 
   2718  if (script->hasIonScript()) {
   2719    Invalidate(cx, script, false);
   2720  }
   2721 
   2722  script->disableIon();
   2723 }
   2724 
   2725 size_t jit::SizeOfIonData(JSScript* script,
   2726                          mozilla::MallocSizeOf mallocSizeOf) {
   2727  size_t result = 0;
   2728 
   2729  if (script->hasIonScript()) {
   2730    result += script->ionScript()->sizeOfIncludingThis(mallocSizeOf);
   2731  }
   2732 
   2733  return result;
   2734 }
   2735 
   2736 // If you change these, please also change the comment in TempAllocator.
   2737 /* static */ const size_t TempAllocator::BallastSize = 16 * 1024;
   2738 /* static */ const size_t TempAllocator::PreferredLifoChunkSize = 32 * 1024;