tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

IonCacheIRCompiler.cpp (83721B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "jit/IonCacheIRCompiler.h"
      8 #include "mozilla/Maybe.h"
      9 
     10 #include <algorithm>
     11 
     12 #include "jit/CacheIRCompiler.h"
     13 #include "jit/CacheIRWriter.h"
     14 #include "jit/IonIC.h"
     15 #include "jit/JitcodeMap.h"
     16 #include "jit/JitFrames.h"
     17 #include "jit/JitRuntime.h"
     18 #include "jit/JitZone.h"
     19 #include "jit/JSJitFrameIter.h"
     20 #include "jit/Linker.h"
     21 #include "jit/SharedICHelpers.h"
     22 #include "jit/VMFunctions.h"
     23 #include "proxy/DeadObjectProxy.h"
     24 #include "proxy/Proxy.h"
     25 #include "util/Memory.h"
     26 #include "vm/StaticStrings.h"
     27 
     28 #include "jit/JSJitFrameIter-inl.h"
     29 #include "jit/MacroAssembler-inl.h"
     30 #include "jit/VMFunctionList-inl.h"
     31 
     32 using namespace js;
     33 using namespace js::jit;
     34 
     35 using mozilla::Maybe;
     36 
     37 namespace JS {
     38 struct ExpandoAndGeneration;
     39 }
     40 
     41 using JS::ExpandoAndGeneration;
     42 
     43 namespace js {
     44 namespace jit {
     45 
     46 // IonCacheIRCompiler compiles CacheIR to IonIC native code.
     47 IonCacheIRCompiler::IonCacheIRCompiler(JSContext* cx, TempAllocator& alloc,
     48                                       const CacheIRWriter& writer, IonIC* ic,
     49                                       IonScript* ionScript,
     50                                       uint32_t stubDataOffset)
     51    : CacheIRCompiler(cx, alloc, writer, stubDataOffset, Mode::Ion,
     52                      StubFieldPolicy::Constant),
     53      writer_(writer),
     54      ic_(ic),
     55      ionScript_(ionScript),
     56      savedLiveRegs_(false),
     57      localTracingSlots_(0),
     58      perfSpewer_(ic->script(), ic->pc()) {
     59  MOZ_ASSERT(ic_);
     60  MOZ_ASSERT(ionScript_);
     61 }
     62 
     63 template <typename T>
     64 T IonCacheIRCompiler::rawPointerStubField(uint32_t offset) {
     65  static_assert(sizeof(T) == sizeof(uintptr_t), "T must have pointer size");
     66  return (T)readStubWord(offset, StubField::Type::RawPointer);
     67 }
     68 
     69 template <typename T>
     70 T IonCacheIRCompiler::rawInt64StubField(uint32_t offset) {
     71  static_assert(sizeof(T) == sizeof(int64_t), "T musthave int64 size");
     72  return (T)readStubInt64(offset, StubField::Type::RawInt64);
     73 }
     74 
     75 template <typename Fn, Fn fn>
     76 void IonCacheIRCompiler::callVM(MacroAssembler& masm) {
     77  VMFunctionId id = VMFunctionToId<Fn, fn>::id;
     78  callVMInternal(masm, id);
     79 }
     80 
     81 void IonCacheIRCompiler::pushStubCodePointer() {
     82  stubJitCodeOffset_.emplace(masm.PushWithPatch(ImmPtr((void*)-1)));
     83 }
     84 
     85 // AutoSaveLiveRegisters must be used when we make a call that can GC. The
     86 // constructor ensures all live registers are stored on the stack (where the GC
     87 // expects them) and the destructor restores these registers.
     88 AutoSaveLiveRegisters::AutoSaveLiveRegisters(IonCacheIRCompiler& compiler)
     89    : compiler_(compiler) {
     90  MOZ_ASSERT(compiler_.liveRegs_.isSome());
     91  MOZ_ASSERT(compiler_.ic_);
     92  compiler_.allocator.saveIonLiveRegisters(
     93      compiler_.masm, compiler_.liveRegs_.ref(),
     94      compiler_.ic_->scratchRegisterForEntryJump(), compiler_.ionScript_);
     95  compiler_.savedLiveRegs_ = true;
     96 }
     97 AutoSaveLiveRegisters::~AutoSaveLiveRegisters() {
     98  MOZ_ASSERT(compiler_.stubJitCodeOffset_.isSome(),
     99             "Must have pushed JitCode* pointer");
    100  compiler_.allocator.restoreIonLiveRegisters(compiler_.masm,
    101                                              compiler_.liveRegs_.ref());
    102  MOZ_ASSERT_IF(!compiler_.masm.oom(), compiler_.masm.framePushed() ==
    103                                           compiler_.ionScript_->frameSize());
    104 }
    105 
    106 }  // namespace jit
    107 }  // namespace js
    108 
    109 void CacheRegisterAllocator::saveIonLiveRegisters(MacroAssembler& masm,
    110                                                  LiveRegisterSet liveRegs,
    111                                                  Register scratch,
    112                                                  IonScript* ionScript) {
    113  // We have to push all registers in liveRegs on the stack. It's possible we
    114  // stored other values in our live registers and stored operands on the
    115  // stack (where our live registers should go), so this requires some careful
    116  // work. Try to keep it simple by taking one small step at a time.
    117 
    118  // Step 1. Discard any dead operands so we can reuse their registers.
    119  freeDeadOperandLocations(masm);
    120 
    121  // Step 2. Figure out the size of our live regs.  This is consistent with
    122  // the fact that we're using storeRegsInMask to generate the save code and
    123  // PopRegsInMask to generate the restore code.
    124  size_t sizeOfLiveRegsInBytes =
    125      MacroAssembler::PushRegsInMaskSizeInBytes(liveRegs);
    126 
    127  MOZ_ASSERT(sizeOfLiveRegsInBytes > 0);
    128 
    129  // Step 3. Ensure all non-input operands are on the stack.
    130  size_t numInputs = writer_.numInputOperands();
    131  for (size_t i = numInputs; i < operandLocations_.length(); i++) {
    132    OperandLocation& loc = operandLocations_[i];
    133    if (loc.isInRegister()) {
    134      spillOperandToStack(masm, &loc);
    135    }
    136  }
    137 
    138  // Step 4. Restore the register state, but don't discard the stack as
    139  // non-input operands are stored there.
    140  restoreInputState(masm, /* shouldDiscardStack = */ false);
    141 
    142  // We just restored the input state, so no input operands should be stored
    143  // on the stack.
    144 #ifdef DEBUG
    145  for (size_t i = 0; i < numInputs; i++) {
    146    const OperandLocation& loc = operandLocations_[i];
    147    MOZ_ASSERT(!loc.isOnStack());
    148  }
    149 #endif
    150 
    151  // Step 5. At this point our register state is correct. Stack values,
    152  // however, may cover the space where we have to store the live registers.
    153  // Move them out of the way.
    154 
    155  bool hasOperandOnStack = false;
    156  for (size_t i = numInputs; i < operandLocations_.length(); i++) {
    157    OperandLocation& loc = operandLocations_[i];
    158    if (!loc.isOnStack()) {
    159      continue;
    160    }
    161 
    162    hasOperandOnStack = true;
    163 
    164    size_t operandSize = loc.stackSizeInBytes();
    165    size_t operandStackPushed = loc.stackPushed();
    166    MOZ_ASSERT(operandSize > 0);
    167    MOZ_ASSERT(stackPushed_ >= operandStackPushed);
    168    MOZ_ASSERT(operandStackPushed >= operandSize);
    169 
    170    // If this operand doesn't cover the live register space, there's
    171    // nothing to do.
    172    if (operandStackPushed - operandSize >= sizeOfLiveRegsInBytes) {
    173      MOZ_ASSERT(stackPushed_ > sizeOfLiveRegsInBytes);
    174      continue;
    175    }
    176 
    177    // Reserve stack space for the live registers if needed.
    178    if (sizeOfLiveRegsInBytes > stackPushed_) {
    179      size_t extraBytes = sizeOfLiveRegsInBytes - stackPushed_;
    180      MOZ_ASSERT((extraBytes % sizeof(uintptr_t)) == 0);
    181      masm.subFromStackPtr(Imm32(extraBytes));
    182      stackPushed_ += extraBytes;
    183    }
    184 
    185    // Push the operand below the live register space.
    186    if (loc.kind() == OperandLocation::PayloadStack) {
    187      masm.push(
    188          Address(masm.getStackPointer(), stackPushed_ - operandStackPushed));
    189      stackPushed_ += operandSize;
    190      loc.setPayloadStack(stackPushed_, loc.payloadType());
    191      continue;
    192    }
    193    MOZ_ASSERT(loc.kind() == OperandLocation::ValueStack);
    194    masm.pushValue(
    195        Address(masm.getStackPointer(), stackPushed_ - operandStackPushed));
    196    stackPushed_ += operandSize;
    197    loc.setValueStack(stackPushed_);
    198  }
    199 
    200  // Step 6. If we have any operands on the stack, adjust their stackPushed
    201  // values to not include sizeOfLiveRegsInBytes (this simplifies code down
    202  // the line). Then push/store the live registers.
    203  if (hasOperandOnStack) {
    204    MOZ_ASSERT(stackPushed_ > sizeOfLiveRegsInBytes);
    205    stackPushed_ -= sizeOfLiveRegsInBytes;
    206 
    207    for (size_t i = numInputs; i < operandLocations_.length(); i++) {
    208      OperandLocation& loc = operandLocations_[i];
    209      if (loc.isOnStack()) {
    210        loc.adjustStackPushed(-int32_t(sizeOfLiveRegsInBytes));
    211      }
    212    }
    213 
    214    size_t stackBottom = stackPushed_ + sizeOfLiveRegsInBytes;
    215    masm.storeRegsInMask(liveRegs, Address(masm.getStackPointer(), stackBottom),
    216                         scratch);
    217    masm.setFramePushed(masm.framePushed() + sizeOfLiveRegsInBytes);
    218  } else {
    219    // If no operands are on the stack, discard the unused stack space.
    220    if (stackPushed_ > 0) {
    221      masm.addToStackPtr(Imm32(stackPushed_));
    222      stackPushed_ = 0;
    223    }
    224    masm.PushRegsInMask(liveRegs);
    225  }
    226  freePayloadSlots_.clear();
    227  freeValueSlots_.clear();
    228 
    229  MOZ_ASSERT_IF(!masm.oom(), masm.framePushed() == ionScript->frameSize() +
    230                                                       sizeOfLiveRegsInBytes);
    231 
    232  // Step 7. All live registers and non-input operands are stored on the stack
    233  // now, so at this point all registers except for the input registers are
    234  // available.
    235  availableRegs_.set() = GeneralRegisterSet::Not(inputRegisterSet());
    236  availableRegsAfterSpill_.set() = GeneralRegisterSet();
    237 
    238  // Step 8. We restored our input state, so we have to fix up aliased input
    239  // registers again.
    240  fixupAliasedInputs(masm);
    241 }
    242 
    243 void CacheRegisterAllocator::restoreIonLiveRegisters(MacroAssembler& masm,
    244                                                     LiveRegisterSet liveRegs) {
    245  masm.PopRegsInMask(liveRegs);
    246 
    247  availableRegs_.set() = GeneralRegisterSet();
    248  availableRegsAfterSpill_.set() = GeneralRegisterSet::All();
    249 }
    250 
    251 static void* GetReturnAddressToIonCode(JSContext* cx) {
    252  JSJitFrameIter frame(cx->activation()->asJit());
    253  MOZ_ASSERT(frame.type() == FrameType::Exit,
    254             "An exit frame is expected as update functions are called with a "
    255             "VMFunction.");
    256 
    257  void* returnAddr = frame.returnAddress();
    258 #ifdef DEBUG
    259  ++frame;
    260  MOZ_ASSERT(frame.isIonJS());
    261 #endif
    262  return returnAddr;
    263 }
    264 
    265 // The AutoSaveLiveRegisters parameter is used to ensure registers were saved
    266 void IonCacheIRCompiler::enterStubFrame(MacroAssembler& masm,
    267                                        const AutoSaveLiveRegisters&) {
    268  MOZ_ASSERT(!enteredStubFrame_);
    269  pushStubCodePointer();
    270  masm.Push(FrameDescriptor(FrameType::IonJS));
    271  masm.Push(ImmPtr(GetReturnAddressToIonCode(cx_)));
    272 
    273  masm.Push(FramePointer);
    274  masm.moveStackPtrTo(FramePointer);
    275 
    276  enteredStubFrame_ = true;
    277 }
    278 
    279 void IonCacheIRCompiler::storeTracedValue(MacroAssembler& masm,
    280                                          ValueOperand value) {
    281  MOZ_ASSERT(localTracingSlots_ < 255);
    282  masm.Push(value);
    283  localTracingSlots_++;
    284 }
    285 
    286 void IonCacheIRCompiler::loadTracedValue(MacroAssembler& masm,
    287                                         uint8_t slotIndex,
    288                                         ValueOperand value) {
    289  MOZ_ASSERT(slotIndex <= localTracingSlots_);
    290  int32_t offset = IonICCallFrameLayout::LocallyTracedValueOffset +
    291                   slotIndex * sizeof(Value);
    292  masm.loadValue(Address(FramePointer, -offset), value);
    293 }
    294 
    295 bool IonCacheIRCompiler::init() {
    296  if (!allocator.init()) {
    297    return false;
    298  }
    299 
    300  size_t numInputs = writer_.numInputOperands();
    301  MOZ_ASSERT(numInputs == NumInputsForCacheKind(ic_->kind()));
    302 
    303  AllocatableGeneralRegisterSet available;
    304 
    305  switch (ic_->kind()) {
    306    case CacheKind::GetProp:
    307    case CacheKind::GetElem: {
    308      IonGetPropertyIC* ic = ic_->asGetPropertyIC();
    309      ValueOperand output = ic->output();
    310 
    311      available.add(output);
    312 
    313      liveRegs_.emplace(ic->liveRegs());
    314      outputUnchecked_.emplace(output);
    315 
    316      MOZ_ASSERT(numInputs == 1 || numInputs == 2);
    317 
    318      allocator.initInputLocation(0, ic->value());
    319      if (numInputs > 1) {
    320        allocator.initInputLocation(1, ic->id());
    321      }
    322      break;
    323    }
    324    case CacheKind::GetPropSuper:
    325    case CacheKind::GetElemSuper: {
    326      IonGetPropSuperIC* ic = ic_->asGetPropSuperIC();
    327      ValueOperand output = ic->output();
    328 
    329      available.add(output);
    330 
    331      liveRegs_.emplace(ic->liveRegs());
    332      outputUnchecked_.emplace(output);
    333 
    334      MOZ_ASSERT(numInputs == 2 || numInputs == 3);
    335 
    336      allocator.initInputLocation(0, ic->object(), JSVAL_TYPE_OBJECT);
    337 
    338      if (ic->kind() == CacheKind::GetPropSuper) {
    339        MOZ_ASSERT(numInputs == 2);
    340        allocator.initInputLocation(1, ic->receiver());
    341      } else {
    342        MOZ_ASSERT(numInputs == 3);
    343        allocator.initInputLocation(1, ic->id());
    344        allocator.initInputLocation(2, ic->receiver());
    345      }
    346      break;
    347    }
    348    case CacheKind::SetProp:
    349    case CacheKind::SetElem: {
    350      IonSetPropertyIC* ic = ic_->asSetPropertyIC();
    351 
    352      available.add(ic->temp());
    353 
    354      liveRegs_.emplace(ic->liveRegs());
    355 
    356      allocator.initInputLocation(0, ic->object(), JSVAL_TYPE_OBJECT);
    357 
    358      if (ic->kind() == CacheKind::SetProp) {
    359        MOZ_ASSERT(numInputs == 2);
    360        allocator.initInputLocation(1, ic->rhs());
    361      } else {
    362        MOZ_ASSERT(numInputs == 3);
    363        allocator.initInputLocation(1, ic->id());
    364        allocator.initInputLocation(2, ic->rhs());
    365      }
    366      break;
    367    }
    368    case CacheKind::GetName: {
    369      IonGetNameIC* ic = ic_->asGetNameIC();
    370      ValueOperand output = ic->output();
    371 
    372      available.add(output);
    373      available.add(ic->temp());
    374 
    375      liveRegs_.emplace(ic->liveRegs());
    376      outputUnchecked_.emplace(output);
    377 
    378      MOZ_ASSERT(numInputs == 1);
    379      allocator.initInputLocation(0, ic->environment(), JSVAL_TYPE_OBJECT);
    380      break;
    381    }
    382    case CacheKind::BindName: {
    383      IonBindNameIC* ic = ic_->asBindNameIC();
    384      Register output = ic->output();
    385 
    386      available.add(output);
    387      available.add(ic->temp());
    388 
    389      liveRegs_.emplace(ic->liveRegs());
    390      outputUnchecked_.emplace(
    391          TypedOrValueRegister(MIRType::Object, AnyRegister(output)));
    392 
    393      MOZ_ASSERT(numInputs == 1);
    394      allocator.initInputLocation(0, ic->environment(), JSVAL_TYPE_OBJECT);
    395      break;
    396    }
    397    case CacheKind::GetIterator: {
    398      IonGetIteratorIC* ic = ic_->asGetIteratorIC();
    399      Register output = ic->output();
    400 
    401      available.add(output);
    402      available.add(ic->temp1());
    403      available.add(ic->temp2());
    404 
    405      liveRegs_.emplace(ic->liveRegs());
    406      outputUnchecked_.emplace(
    407          TypedOrValueRegister(MIRType::Object, AnyRegister(output)));
    408 
    409      MOZ_ASSERT(numInputs == 1);
    410      allocator.initInputLocation(0, ic->value());
    411      break;
    412    }
    413    case CacheKind::OptimizeSpreadCall: {
    414      auto* ic = ic_->asOptimizeSpreadCallIC();
    415      ValueOperand output = ic->output();
    416 
    417      available.add(output);
    418      available.add(ic->temp());
    419 
    420      liveRegs_.emplace(ic->liveRegs());
    421      outputUnchecked_.emplace(output);
    422 
    423      MOZ_ASSERT(numInputs == 1);
    424      allocator.initInputLocation(0, ic->value());
    425      break;
    426    }
    427    case CacheKind::In: {
    428      IonInIC* ic = ic_->asInIC();
    429      Register output = ic->output();
    430 
    431      available.add(output);
    432 
    433      liveRegs_.emplace(ic->liveRegs());
    434      outputUnchecked_.emplace(
    435          TypedOrValueRegister(MIRType::Boolean, AnyRegister(output)));
    436 
    437      MOZ_ASSERT(numInputs == 2);
    438      allocator.initInputLocation(0, ic->key());
    439      allocator.initInputLocation(
    440          1, TypedOrValueRegister(MIRType::Object, AnyRegister(ic->object())));
    441      break;
    442    }
    443    case CacheKind::HasOwn: {
    444      IonHasOwnIC* ic = ic_->asHasOwnIC();
    445      Register output = ic->output();
    446 
    447      available.add(output);
    448 
    449      liveRegs_.emplace(ic->liveRegs());
    450      outputUnchecked_.emplace(
    451          TypedOrValueRegister(MIRType::Boolean, AnyRegister(output)));
    452 
    453      MOZ_ASSERT(numInputs == 2);
    454      allocator.initInputLocation(0, ic->id());
    455      allocator.initInputLocation(1, ic->value());
    456      break;
    457    }
    458    case CacheKind::CheckPrivateField: {
    459      IonCheckPrivateFieldIC* ic = ic_->asCheckPrivateFieldIC();
    460      Register output = ic->output();
    461 
    462      available.add(output);
    463 
    464      liveRegs_.emplace(ic->liveRegs());
    465      outputUnchecked_.emplace(
    466          TypedOrValueRegister(MIRType::Boolean, AnyRegister(output)));
    467 
    468      MOZ_ASSERT(numInputs == 2);
    469      allocator.initInputLocation(0, ic->value());
    470      allocator.initInputLocation(1, ic->id());
    471      break;
    472    }
    473    case CacheKind::InstanceOf: {
    474      IonInstanceOfIC* ic = ic_->asInstanceOfIC();
    475      Register output = ic->output();
    476      available.add(output);
    477      liveRegs_.emplace(ic->liveRegs());
    478      outputUnchecked_.emplace(
    479          TypedOrValueRegister(MIRType::Boolean, AnyRegister(output)));
    480 
    481      MOZ_ASSERT(numInputs == 2);
    482      allocator.initInputLocation(0, ic->lhs());
    483      allocator.initInputLocation(
    484          1, TypedOrValueRegister(MIRType::Object, AnyRegister(ic->rhs())));
    485      break;
    486    }
    487    case CacheKind::ToPropertyKey: {
    488      IonToPropertyKeyIC* ic = ic_->asToPropertyKeyIC();
    489      ValueOperand output = ic->output();
    490 
    491      available.add(output);
    492 
    493      liveRegs_.emplace(ic->liveRegs());
    494      outputUnchecked_.emplace(TypedOrValueRegister(output));
    495 
    496      MOZ_ASSERT(numInputs == 1);
    497      allocator.initInputLocation(0, ic->input());
    498      break;
    499    }
    500    case CacheKind::UnaryArith: {
    501      IonUnaryArithIC* ic = ic_->asUnaryArithIC();
    502      ValueOperand output = ic->output();
    503 
    504      available.add(output);
    505 
    506      liveRegs_.emplace(ic->liveRegs());
    507      outputUnchecked_.emplace(TypedOrValueRegister(output));
    508 
    509      MOZ_ASSERT(numInputs == 1);
    510      allocator.initInputLocation(0, ic->input());
    511      break;
    512    }
    513    case CacheKind::BinaryArith: {
    514      IonBinaryArithIC* ic = ic_->asBinaryArithIC();
    515      ValueOperand output = ic->output();
    516 
    517      available.add(output);
    518 
    519      liveRegs_.emplace(ic->liveRegs());
    520      outputUnchecked_.emplace(TypedOrValueRegister(output));
    521 
    522      MOZ_ASSERT(numInputs == 2);
    523      allocator.initInputLocation(0, ic->lhs());
    524      allocator.initInputLocation(1, ic->rhs());
    525      break;
    526    }
    527    case CacheKind::Compare: {
    528      IonCompareIC* ic = ic_->asCompareIC();
    529      Register output = ic->output();
    530 
    531      available.add(output);
    532 
    533      liveRegs_.emplace(ic->liveRegs());
    534      outputUnchecked_.emplace(
    535          TypedOrValueRegister(MIRType::Boolean, AnyRegister(output)));
    536 
    537      MOZ_ASSERT(numInputs == 2);
    538      allocator.initInputLocation(0, ic->lhs());
    539      allocator.initInputLocation(1, ic->rhs());
    540      break;
    541    }
    542    case CacheKind::CloseIter: {
    543      IonCloseIterIC* ic = ic_->asCloseIterIC();
    544 
    545      available.add(ic->temp());
    546 
    547      liveRegs_.emplace(ic->liveRegs());
    548      allocator.initInputLocation(0, ic->iter(), JSVAL_TYPE_OBJECT);
    549      break;
    550    }
    551    case CacheKind::OptimizeGetIterator: {
    552      auto* ic = ic_->asOptimizeGetIteratorIC();
    553      Register output = ic->output();
    554 
    555      available.add(output);
    556      available.add(ic->temp());
    557 
    558      liveRegs_.emplace(ic->liveRegs());
    559      outputUnchecked_.emplace(
    560          TypedOrValueRegister(MIRType::Boolean, AnyRegister(output)));
    561 
    562      MOZ_ASSERT(numInputs == 1);
    563      allocator.initInputLocation(0, ic->value());
    564      break;
    565    }
    566    case CacheKind::Call:
    567    case CacheKind::TypeOf:
    568    case CacheKind::TypeOfEq:
    569    case CacheKind::ToBool:
    570    case CacheKind::LazyConstant:
    571    case CacheKind::NewArray:
    572    case CacheKind::NewObject:
    573    case CacheKind::Lambda:
    574    case CacheKind::GetImport:
    575      MOZ_CRASH("Unsupported IC");
    576  }
    577 
    578  liveFloatRegs_ = LiveFloatRegisterSet(liveRegs_->fpus());
    579 
    580  allocator.initAvailableRegs(available);
    581  allocator.initAvailableRegsAfterSpill();
    582  return true;
    583 }
    584 
    585 JitCode* IonCacheIRCompiler::compile(IonICStub* stub) {
    586  AutoCreatedBy acb(masm, "IonCacheIRCompiler::compile");
    587 
    588  masm.setFramePushed(ionScript_->frameSize());
    589  if (cx_->runtime()->geckoProfiler().enabled()) {
    590    masm.enableProfilingInstrumentation();
    591  }
    592 
    593  allocator.fixupAliasedInputs(masm);
    594 
    595  perfSpewer_.startRecording();
    596 
    597  CacheIRReader reader(writer_);
    598  do {
    599    CacheOp op = reader.readOp();
    600    perfSpewer_.recordInstruction(masm, op);
    601    switch (op) {
    602 #define DEFINE_OP(op, ...)                 \
    603  case CacheOp::op:                        \
    604    if (!emit##op(reader)) return nullptr; \
    605    break;
    606      CACHE_IR_OPS(DEFINE_OP)
    607 #undef DEFINE_OP
    608 
    609      default:
    610        MOZ_CRASH("Invalid op");
    611    }
    612    allocator.nextOp();
    613  } while (reader.more());
    614 
    615  masm.assumeUnreachable("Should have returned from IC");
    616 
    617  // Done emitting the main IC code. Now emit the failure paths.
    618  perfSpewer_.recordOffset(masm, "FailurePath");
    619 
    620  for (size_t i = 0; i < failurePaths.length(); i++) {
    621    if (!emitFailurePath(i)) {
    622      return nullptr;
    623    }
    624    Register scratch = ic_->scratchRegisterForEntryJump();
    625    CodeOffset offset = masm.movWithPatch(ImmWord(-1), scratch);
    626    masm.jump(Address(scratch, 0));
    627    if (!nextCodeOffsets_.append(offset)) {
    628      return nullptr;
    629    }
    630  }
    631 
    632  perfSpewer_.endRecording();
    633 
    634  Linker linker(masm);
    635  Rooted<JitCode*> newStubCode(cx_, linker.newCode(cx_, CodeKind::Ion));
    636  if (!newStubCode) {
    637    cx_->recoverFromOutOfMemory();
    638    return nullptr;
    639  }
    640 
    641  newStubCode->setLocalTracingSlots(localTracingSlots_);
    642 
    643  for (CodeOffset offset : nextCodeOffsets_) {
    644    Assembler::PatchDataWithValueCheck(CodeLocationLabel(newStubCode, offset),
    645                                       ImmPtr(stub->nextCodeRawPtr()),
    646                                       ImmPtr((void*)-1));
    647  }
    648  if (stubJitCodeOffset_) {
    649    Assembler::PatchDataWithValueCheck(
    650        CodeLocationLabel(newStubCode, *stubJitCodeOffset_),
    651        ImmPtr(newStubCode.get()), ImmPtr((void*)-1));
    652  }
    653 
    654  return newStubCode;
    655 }
    656 
    657 #ifdef DEBUG
    658 void IonCacheIRCompiler::assertFloatRegisterAvailable(FloatRegister reg) {
    659  switch (ic_->kind()) {
    660    case CacheKind::GetProp:
    661    case CacheKind::GetElem:
    662    case CacheKind::GetPropSuper:
    663    case CacheKind::GetElemSuper:
    664    case CacheKind::GetName:
    665    case CacheKind::BindName:
    666    case CacheKind::GetIterator:
    667    case CacheKind::In:
    668    case CacheKind::HasOwn:
    669    case CacheKind::CheckPrivateField:
    670    case CacheKind::InstanceOf:
    671    case CacheKind::UnaryArith:
    672    case CacheKind::ToPropertyKey:
    673    case CacheKind::OptimizeSpreadCall:
    674    case CacheKind::CloseIter:
    675    case CacheKind::OptimizeGetIterator:
    676      MOZ_CRASH("No float registers available");
    677    case CacheKind::SetProp:
    678    case CacheKind::SetElem:
    679      // FloatReg0 is available per LIRGenerator::visitSetPropertyCache.
    680      MOZ_ASSERT(reg == FloatReg0);
    681      break;
    682    case CacheKind::BinaryArith:
    683    case CacheKind::Compare:
    684      // FloatReg0 and FloatReg1 are available per
    685      // LIRGenerator::visitBinaryCache.
    686      MOZ_ASSERT(reg == FloatReg0 || reg == FloatReg1);
    687      break;
    688    case CacheKind::Call:
    689    case CacheKind::TypeOf:
    690    case CacheKind::TypeOfEq:
    691    case CacheKind::ToBool:
    692    case CacheKind::LazyConstant:
    693    case CacheKind::NewArray:
    694    case CacheKind::NewObject:
    695    case CacheKind::Lambda:
    696    case CacheKind::GetImport:
    697      MOZ_CRASH("Unsupported IC");
    698  }
    699 }
    700 #endif
    701 
    702 bool IonCacheIRCompiler::emitGuardShape(ObjOperandId objId,
    703                                        uint32_t shapeOffset) {
    704  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    705  Register obj = allocator.useRegister(masm, objId);
    706  Shape* shape = weakShapeStubField(shapeOffset);
    707 
    708  bool needSpectreMitigations = objectGuardNeedsSpectreMitigations(objId);
    709 
    710  Maybe<AutoScratchRegister> maybeScratch;
    711  if (needSpectreMitigations) {
    712    maybeScratch.emplace(allocator, masm);
    713  }
    714 
    715  FailurePath* failure;
    716  if (!addFailurePath(&failure)) {
    717    return false;
    718  }
    719 
    720  if (needSpectreMitigations) {
    721    masm.branchTestObjShape(Assembler::NotEqual, obj, shape, *maybeScratch, obj,
    722                            failure->label());
    723  } else {
    724    masm.branchTestObjShapeNoSpectreMitigations(Assembler::NotEqual, obj, shape,
    725                                                failure->label());
    726  }
    727 
    728  return true;
    729 }
    730 
    731 bool IonCacheIRCompiler::emitGuardProto(ObjOperandId objId,
    732                                        uint32_t protoOffset) {
    733  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    734  Register obj = allocator.useRegister(masm, objId);
    735  JSObject* proto = weakObjectStubField(protoOffset);
    736 
    737  AutoScratchRegister scratch(allocator, masm);
    738 
    739  FailurePath* failure;
    740  if (!addFailurePath(&failure)) {
    741    return false;
    742  }
    743 
    744  masm.loadObjProto(obj, scratch);
    745  masm.branchPtr(Assembler::NotEqual, scratch, ImmGCPtr(proto),
    746                 failure->label());
    747  return true;
    748 }
    749 
    750 bool IonCacheIRCompiler::emitGuardCompartment(ObjOperandId objId,
    751                                              uint32_t globalOffset,
    752                                              uint32_t compartmentOffset) {
    753  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    754  Register obj = allocator.useRegister(masm, objId);
    755  JSObject* globalWrapper = objectStubField(globalOffset);
    756  JS::Compartment* compartment = compartmentStubField(compartmentOffset);
    757  AutoScratchRegister scratch(allocator, masm);
    758 
    759  FailurePath* failure;
    760  if (!addFailurePath(&failure)) {
    761    return false;
    762  }
    763 
    764  // Verify that the global wrapper is still valid, as
    765  // it is pre-requisite for doing the compartment check.
    766  masm.movePtr(ImmGCPtr(globalWrapper), scratch);
    767  Address handlerAddr(scratch, ProxyObject::offsetOfHandler());
    768  masm.branchPtr(Assembler::Equal, handlerAddr,
    769                 ImmPtr(&DeadObjectProxy::singleton), failure->label());
    770 
    771  masm.branchTestObjCompartment(Assembler::NotEqual, obj, compartment, scratch,
    772                                failure->label());
    773  return true;
    774 }
    775 
    776 bool IonCacheIRCompiler::emitGuardAnyClass(ObjOperandId objId,
    777                                           uint32_t claspOffset) {
    778  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    779  Register obj = allocator.useRegister(masm, objId);
    780  AutoScratchRegister scratch(allocator, masm);
    781 
    782  const JSClass* clasp = classStubField(claspOffset);
    783 
    784  FailurePath* failure;
    785  if (!addFailurePath(&failure)) {
    786    return false;
    787  }
    788 
    789  if (objectGuardNeedsSpectreMitigations(objId)) {
    790    masm.branchTestObjClass(Assembler::NotEqual, obj, clasp, scratch, obj,
    791                            failure->label());
    792  } else {
    793    masm.branchTestObjClassNoSpectreMitigations(Assembler::NotEqual, obj, clasp,
    794                                                scratch, failure->label());
    795  }
    796 
    797  return true;
    798 }
    799 
    800 bool IonCacheIRCompiler::emitGuardHasProxyHandler(ObjOperandId objId,
    801                                                  uint32_t handlerOffset) {
    802  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    803  Register obj = allocator.useRegister(masm, objId);
    804  const void* handler = proxyHandlerStubField(handlerOffset);
    805 
    806  FailurePath* failure;
    807  if (!addFailurePath(&failure)) {
    808    return false;
    809  }
    810 
    811  Address handlerAddr(obj, ProxyObject::offsetOfHandler());
    812  masm.branchPtr(Assembler::NotEqual, handlerAddr, ImmPtr(handler),
    813                 failure->label());
    814  return true;
    815 }
    816 
    817 bool IonCacheIRCompiler::emitGuardSpecificObject(ObjOperandId objId,
    818                                                 uint32_t expectedOffset) {
    819  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    820  Register obj = allocator.useRegister(masm, objId);
    821  JSObject* expected = weakObjectStubField(expectedOffset);
    822 
    823  FailurePath* failure;
    824  if (!addFailurePath(&failure)) {
    825    return false;
    826  }
    827 
    828  masm.branchPtr(Assembler::NotEqual, obj, ImmGCPtr(expected),
    829                 failure->label());
    830  return true;
    831 }
    832 
    833 bool IonCacheIRCompiler::emitGuardSpecificFunction(
    834    ObjOperandId objId, uint32_t expectedOffset, uint32_t nargsAndFlagsOffset) {
    835  return emitGuardSpecificObject(objId, expectedOffset);
    836 }
    837 
    838 bool IonCacheIRCompiler::emitGuardSpecificAtom(StringOperandId strId,
    839                                               uint32_t expectedOffset) {
    840  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    841  Register str = allocator.useRegister(masm, strId);
    842  AutoScratchRegister scratch(allocator, masm);
    843 
    844  JSOffThreadAtom* atom = &stringStubField(expectedOffset)->asOffThreadAtom();
    845 
    846  FailurePath* failure;
    847  if (!addFailurePath(&failure)) {
    848    return false;
    849  }
    850 
    851  LiveRegisterSet volatileRegs = liveVolatileRegs();
    852  volatileRegs.takeUnchecked(scratch);
    853 
    854  masm.guardSpecificAtom(str, atom, scratch, volatileRegs, failure->label());
    855  return true;
    856 }
    857 
    858 bool IonCacheIRCompiler::emitGuardSpecificSymbol(SymbolOperandId symId,
    859                                                 uint32_t expectedOffset) {
    860  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    861  Register sym = allocator.useRegister(masm, symId);
    862  JS::Symbol* expected = symbolStubField(expectedOffset);
    863 
    864  FailurePath* failure;
    865  if (!addFailurePath(&failure)) {
    866    return false;
    867  }
    868 
    869  masm.branchPtr(Assembler::NotEqual, sym, ImmGCPtr(expected),
    870                 failure->label());
    871  return true;
    872 }
    873 
    874 bool IonCacheIRCompiler::emitGuardSpecificValue(ValOperandId valId,
    875                                                uint32_t expectedOffset) {
    876  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    877  ValueOperand val = allocator.useValueRegister(masm, valId);
    878  Value expected = valueStubField(expectedOffset);
    879 
    880  Maybe<AutoScratchRegister> maybeScratch;
    881  if (expected.isNaN()) {
    882    maybeScratch.emplace(allocator, masm);
    883  }
    884 
    885  FailurePath* failure;
    886  if (!addFailurePath(&failure)) {
    887    return false;
    888  }
    889 
    890  if (expected.isNaN()) {
    891    masm.branchTestNaNValue(Assembler::NotEqual, val, *maybeScratch,
    892                            failure->label());
    893  } else {
    894    masm.branchTestValue(Assembler::NotEqual, val, expected, failure->label());
    895  }
    896  return true;
    897 }
    898 
    899 bool IonCacheIRCompiler::emitLoadValueResult(uint32_t valOffset) {
    900  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    901  AutoOutputRegister output(*this);
    902  masm.moveValue(valueStubField(valOffset), output.valueReg());
    903  return true;
    904 }
    905 
    906 bool IonCacheIRCompiler::emitUncheckedLoadWeakValueResult(uint32_t valOffset) {
    907  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    908  AutoOutputRegister output(*this);
    909 
    910  // Note: if we change this code to not use a strong reference for the Value,
    911  // we should remove the isIon check in
    912  // emitCheckWeakValueResultFor{Fixed,Dynamic}Slot.
    913  masm.moveValue(weakValueStubField(valOffset), output.valueReg());
    914  return true;
    915 }
    916 
    917 bool IonCacheIRCompiler::emitUncheckedLoadWeakObjectResult(uint32_t objOffset) {
    918  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    919  AutoOutputRegister output(*this);
    920  AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
    921 
    922  // Note: if we change this code to not use a strong reference for the Value,
    923  // we should remove the isIon check in
    924  // emitCheckWeakValueResultFor{Fixed,Dynamic}Slot.
    925  Value result = ObjectValue(*weakObjectStubField(objOffset));
    926  masm.moveValue(result, output.valueReg());
    927  return true;
    928 }
    929 
    930 bool IonCacheIRCompiler::emitLoadFixedSlotResult(ObjOperandId objId,
    931                                                 uint32_t offsetOffset) {
    932  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    933  AutoOutputRegister output(*this);
    934  Register obj = allocator.useRegister(masm, objId);
    935  int32_t offset = int32StubField(offsetOffset);
    936  masm.loadTypedOrValue(Address(obj, offset), output);
    937  return true;
    938 }
    939 
    940 bool IonCacheIRCompiler::emitLoadFixedSlotTypedResult(ObjOperandId objId,
    941                                                      uint32_t offsetOffset,
    942                                                      ValueType) {
    943  // The type is only used by Warp.
    944  return emitLoadFixedSlotResult(objId, offsetOffset);
    945 }
    946 
    947 bool IonCacheIRCompiler::emitLoadDynamicSlotResult(ObjOperandId objId,
    948                                                   uint32_t offsetOffset) {
    949  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    950  AutoOutputRegister output(*this);
    951  Register obj = allocator.useRegister(masm, objId);
    952  int32_t offset = int32StubField(offsetOffset);
    953 
    954  AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
    955  masm.loadPtr(Address(obj, NativeObject::offsetOfSlots()), scratch);
    956  masm.loadTypedOrValue(Address(scratch, offset), output);
    957  return true;
    958 }
    959 
    960 bool IonCacheIRCompiler::emitCallScriptedGetterResult(
    961    ValOperandId receiverId, ObjOperandId calleeId, bool sameRealm,
    962    uint32_t nargsAndFlagsOffset) {
    963  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
    964  AutoSaveLiveRegisters save(*this);
    965  AutoOutputRegister output(*this);
    966 
    967  ValueOperand receiver = allocator.useValueRegister(masm, receiverId);
    968  Register callee = allocator.useRegister(masm, calleeId);
    969  AutoScratchRegister scratch(allocator, masm);
    970 
    971  int32_t nargsAndFlags = int32StubField(nargsAndFlagsOffset);
    972  size_t nargs = nargsAndFlags >> JSFunction::ArgCountShift;
    973 
    974  allocator.discardStack(masm);
    975 
    976  uint32_t framePushedBefore = masm.framePushed();
    977 
    978  enterStubFrame(masm, save);
    979 
    980  // The JitFrameLayout pushed below will be aligned to JitStackAlignment,
    981  // so we just have to make sure the stack is aligned after we push the
    982  // |this| + argument Values.
    983  uint32_t argSize = (nargs + 1) * sizeof(Value);
    984  uint32_t padding =
    985      ComputeByteAlignment(masm.framePushed() + argSize, JitStackAlignment);
    986  MOZ_ASSERT(padding % sizeof(uintptr_t) == 0);
    987  MOZ_ASSERT(padding < JitStackAlignment);
    988  masm.reserveStack(padding);
    989 
    990  for (size_t i = 0; i < nargs; i++) {
    991    masm.Push(UndefinedValue());
    992  }
    993  masm.Push(receiver);
    994 
    995  if (!sameRealm) {
    996    masm.switchToObjectRealm(callee, scratch);
    997  }
    998 
    999  masm.Push(callee);
   1000  masm.Push(FrameDescriptor(FrameType::IonICCall, /* argc = */ 0));
   1001 
   1002  // Check stack alignment. Add 2 * sizeof(uintptr_t) for the return address and
   1003  // frame pointer pushed by the call/callee.
   1004  MOZ_ASSERT(
   1005      ((masm.framePushed() + 2 * sizeof(uintptr_t)) % JitStackAlignment) == 0);
   1006 
   1007  masm.loadJitCodeRaw(callee, scratch);
   1008  masm.callJit(scratch);
   1009 
   1010  if (!sameRealm) {
   1011    static_assert(!JSReturnOperand.aliases(ReturnReg),
   1012                  "ReturnReg available as scratch after scripted calls");
   1013    masm.switchToRealm(cx_->realm(), ReturnReg);
   1014  }
   1015 
   1016  masm.storeCallResultValue(output);
   1017 
   1018  // Restore the frame pointer and stack pointer.
   1019  masm.loadPtr(Address(FramePointer, 0), FramePointer);
   1020  masm.freeStack(masm.framePushed() - framePushedBefore);
   1021  return true;
   1022 }
   1023 
   1024 #ifdef JS_PUNBOX64
   1025 template <typename IdType>
   1026 bool IonCacheIRCompiler::emitCallScriptedProxyGetShared(
   1027    ValOperandId targetId, ObjOperandId receiverId, ObjOperandId handlerId,
   1028    ObjOperandId trapId, IdType id, uint32_t nargsAndFlags) {
   1029  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1030  AutoSaveLiveRegisters save(*this);
   1031  AutoOutputRegister output(*this);
   1032 
   1033  ValueOperand target = allocator.useValueRegister(masm, targetId);
   1034  Register receiver = allocator.useRegister(masm, receiverId);
   1035  Register handler = allocator.useRegister(masm, handlerId);
   1036  Register callee = allocator.useRegister(masm, trapId);
   1037  ValueOperand idVal;
   1038  if constexpr (std::is_same_v<IdType, ValOperandId>) {
   1039    idVal = allocator.useValueRegister(masm, id);
   1040  }
   1041  size_t nargs = nargsAndFlags >> JSFunction::ArgCountShift;
   1042 
   1043  AutoScratchRegister scratch(allocator, masm);
   1044  AutoScratchRegister scratch2(allocator, masm);
   1045  ValueOperand scratchVal(scratch);
   1046  ValueOperand scratchVal2(scratch2);
   1047 
   1048  allocator.discardStack(masm);
   1049 
   1050  uint32_t framePushedBefore = masm.framePushed();
   1051 
   1052  enterStubFrame(masm, save);
   1053 
   1054  // We need to keep the target around to potentially validate the proxy result
   1055  storeTracedValue(masm, target);
   1056  if constexpr (std::is_same_v<IdType, ValOperandId>) {
   1057    // Same for the id, assuming it's not baked in
   1058    storeTracedValue(masm, idVal);
   1059 #  ifdef DEBUG
   1060    Label notPrivateSymbol;
   1061    masm.branchTestSymbol(Assembler::NotEqual, idVal, &notPrivateSymbol);
   1062    masm.unboxSymbol(idVal, scratch);
   1063    masm.branch32(
   1064        Assembler::NotEqual, Address(scratch, JS::Symbol::offsetOfCode()),
   1065        Imm32(uint32_t(JS::SymbolCode::PrivateNameSymbol)), &notPrivateSymbol);
   1066    masm.assumeUnreachable("Unexpected private field in callScriptedProxy");
   1067    masm.bind(&notPrivateSymbol);
   1068 #  endif
   1069  }
   1070  uint32_t framePushedBeforeArgs = masm.framePushed();
   1071 
   1072  // The JitFrameLayout pushed below will be aligned to JitStackAlignment,
   1073  // so we just have to make sure the stack is aligned after we push the
   1074  // |this| + argument Values.
   1075  uint32_t argSize = (std::max(nargs, (size_t)3) + 1) * sizeof(Value);
   1076  uint32_t padding =
   1077      ComputeByteAlignment(masm.framePushed() + argSize, JitStackAlignment);
   1078  MOZ_ASSERT(padding % sizeof(uintptr_t) == 0);
   1079  MOZ_ASSERT(padding < JitStackAlignment);
   1080  masm.reserveStack(padding);
   1081 
   1082  for (size_t i = 3; i < nargs; i++) {
   1083    masm.Push(UndefinedValue());
   1084  }
   1085 
   1086  masm.tagValue(JSVAL_TYPE_OBJECT, receiver, scratchVal);
   1087  masm.Push(scratchVal);
   1088 
   1089  if constexpr (std::is_same_v<IdType, ValOperandId>) {
   1090    masm.Push(idVal);
   1091  } else {
   1092    masm.movePropertyKey(idStubField(id), scratch);
   1093    masm.tagValue(JSVAL_TYPE_STRING, scratch, scratchVal);
   1094    masm.Push(scratchVal);
   1095  }
   1096 
   1097  masm.Push(target);
   1098 
   1099  masm.tagValue(JSVAL_TYPE_OBJECT, handler, scratchVal);
   1100  masm.Push(scratchVal);
   1101 
   1102  masm.Push(callee);
   1103  masm.Push(FrameDescriptor(FrameType::IonICCall, /* argc = */ 3));
   1104 
   1105  // Check stack alignment. Add 2 * sizeof(uintptr_t) for the return address and
   1106  // frame pointer pushed by the call/callee.
   1107  MOZ_ASSERT(
   1108      ((masm.framePushed() + 2 * sizeof(uintptr_t)) % JitStackAlignment) == 0);
   1109 
   1110  masm.loadJitCodeRaw(callee, scratch);
   1111  masm.callJit(scratch);
   1112 
   1113  masm.storeCallResultValue(output);
   1114 
   1115  Label success, end;
   1116  loadTracedValue(masm, 0, scratchVal);
   1117  masm.unboxObject(scratchVal, scratch);
   1118  masm.branchTestObjectNeedsProxyResultValidation(Assembler::Zero, scratch,
   1119                                                  scratch2, &success);
   1120 
   1121  if constexpr (std::is_same_v<IdType, ValOperandId>) {
   1122    loadTracedValue(masm, 1, scratchVal2);
   1123  } else {
   1124    masm.moveValue(StringValue(idStubField(id).toString()), scratchVal2);
   1125  }
   1126 
   1127  uint32_t framePushedAfterCall = masm.framePushed();
   1128  masm.freeStack(masm.framePushed() - framePushedBeforeArgs);
   1129 
   1130  masm.Push(output.valueReg());
   1131  masm.Push(scratchVal2);
   1132  masm.Push(scratch);
   1133 
   1134  using Fn = bool (*)(JSContext*, HandleObject, HandleValue, HandleValue,
   1135                      MutableHandleValue);
   1136  callVM<Fn, CheckProxyGetByValueResult>(masm);
   1137 
   1138  masm.storeCallResultValue(output);
   1139 
   1140  masm.jump(&end);
   1141  masm.bind(&success);
   1142  masm.setFramePushed(framePushedAfterCall);
   1143 
   1144  // Restore the frame pointer and stack pointer.
   1145  masm.loadPtr(Address(FramePointer, 0), FramePointer);
   1146  masm.freeStack(masm.framePushed() - framePushedBefore);
   1147  masm.bind(&end);
   1148 
   1149  return true;
   1150 }
   1151 
   1152 bool IonCacheIRCompiler::emitCallScriptedProxyGetResult(
   1153    ValOperandId targetId, ObjOperandId receiverId, ObjOperandId handlerId,
   1154    ObjOperandId trapId, uint32_t id, uint32_t nargsAndFlags) {
   1155  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1156  return emitCallScriptedProxyGetShared(targetId, receiverId, handlerId, trapId,
   1157                                        id, nargsAndFlags);
   1158 }
   1159 
   1160 bool IonCacheIRCompiler::emitCallScriptedProxyGetByValueResult(
   1161    ValOperandId targetId, ObjOperandId receiverId, ObjOperandId handlerId,
   1162    ValOperandId idId, ObjOperandId trapId, uint32_t nargsAndFlags) {
   1163  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1164  return emitCallScriptedProxyGetShared(targetId, receiverId, handlerId, trapId,
   1165                                        idId, nargsAndFlags);
   1166 }
   1167 #endif
   1168 
   1169 bool IonCacheIRCompiler::emitCallInlinedGetterResult(
   1170    ValOperandId receiverId, ObjOperandId calleeId, uint32_t icScriptOffset,
   1171    bool sameRealm, uint32_t nargsAndFlagsOffset) {
   1172  MOZ_CRASH("Trial inlining not supported in Ion");
   1173 }
   1174 
   1175 bool IonCacheIRCompiler::emitCallNativeGetterResult(
   1176    ValOperandId receiverId, uint32_t getterOffset, bool sameRealm,
   1177    uint32_t nargsAndFlagsOffset) {
   1178  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1179  AutoSaveLiveRegisters save(*this);
   1180  AutoOutputRegister output(*this);
   1181 
   1182  ValueOperand receiver = allocator.useValueRegister(masm, receiverId);
   1183 
   1184  JSFunction* target = &objectStubField(getterOffset)->as<JSFunction>();
   1185  MOZ_ASSERT(target->isNativeFun());
   1186 
   1187  AutoScratchRegisterMaybeOutput argJSContext(allocator, masm, output);
   1188  AutoScratchRegisterMaybeOutputType argUintN(allocator, masm, output);
   1189  AutoScratchRegister argVp(allocator, masm);
   1190  AutoScratchRegister scratch(allocator, masm);
   1191 
   1192  allocator.discardStack(masm);
   1193 
   1194  // Native functions have the signature:
   1195  //  bool (*)(JSContext*, unsigned, Value* vp)
   1196  // Where vp[0] is space for an outparam, vp[1] is |this|, and vp[2] onward
   1197  // are the function arguments.
   1198 
   1199  // Construct vp array:
   1200  // Push receiver value for |this|
   1201  masm.Push(receiver);
   1202  // Push callee/outparam.
   1203  masm.Push(ObjectValue(*target));
   1204 
   1205  // Preload arguments into registers.
   1206  masm.loadJSContext(argJSContext);
   1207  masm.move32(Imm32(0), argUintN);
   1208  masm.moveStackPtrTo(argVp.get());
   1209 
   1210  // Push marking data for later use.
   1211  masm.Push(argUintN);
   1212  pushStubCodePointer();
   1213 
   1214  if (!masm.icBuildOOLFakeExitFrame(GetReturnAddressToIonCode(cx_), save)) {
   1215    return false;
   1216  }
   1217  masm.enterFakeExitFrame(argJSContext, scratch, ExitFrameType::IonOOLNative);
   1218 
   1219  if (!sameRealm) {
   1220    masm.switchToRealm(target->realm(), scratch);
   1221  }
   1222 
   1223  // Construct and execute call.
   1224  masm.setupUnalignedABICall(scratch);
   1225  masm.passABIArg(argJSContext);
   1226  masm.passABIArg(argUintN);
   1227  masm.passABIArg(argVp);
   1228  masm.callWithABI(DynamicFunction<JSNative>(target->native()),
   1229                   ABIType::General,
   1230                   CheckUnsafeCallWithABI::DontCheckHasExitFrame);
   1231 
   1232  // Test for failure.
   1233  masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
   1234 
   1235  if (!sameRealm) {
   1236    masm.switchToRealm(cx_->realm(), ReturnReg);
   1237  }
   1238 
   1239  // Load the outparam vp[0] into output register(s).
   1240  Address outparam(masm.getStackPointer(),
   1241                   IonOOLNativeExitFrameLayout::offsetOfResult());
   1242  masm.loadValue(outparam, output.valueReg());
   1243 
   1244  if (JitOptions.spectreJitToCxxCalls) {
   1245    masm.speculationBarrier();
   1246  }
   1247 
   1248  masm.adjustStack(IonOOLNativeExitFrameLayout::Size(0));
   1249  return true;
   1250 }
   1251 
   1252 bool IonCacheIRCompiler::emitCallDOMGetterResult(ObjOperandId objId,
   1253                                                 uint32_t jitInfoOffset) {
   1254  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1255  AutoSaveLiveRegisters save(*this);
   1256  AutoOutputRegister output(*this);
   1257 
   1258  Register obj = allocator.useRegister(masm, objId);
   1259 
   1260  const JSJitInfo* info = rawPointerStubField<const JSJitInfo*>(jitInfoOffset);
   1261 
   1262  allocator.discardStack(masm);
   1263  enterStubFrame(masm, save);
   1264 
   1265  masm.Push(obj);
   1266  masm.Push(ImmPtr(info));
   1267 
   1268  using Fn =
   1269      bool (*)(JSContext*, const JSJitInfo*, HandleObject, MutableHandleValue);
   1270  callVM<Fn, jit::CallDOMGetter>(masm);
   1271 
   1272  masm.storeCallResultValue(output);
   1273  return true;
   1274 }
   1275 
   1276 bool IonCacheIRCompiler::emitCallDOMSetter(ObjOperandId objId,
   1277                                           uint32_t jitInfoOffset,
   1278                                           ValOperandId rhsId) {
   1279  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1280  AutoSaveLiveRegisters save(*this);
   1281 
   1282  Register obj = allocator.useRegister(masm, objId);
   1283  ValueOperand val = allocator.useValueRegister(masm, rhsId);
   1284 
   1285  const JSJitInfo* info = rawPointerStubField<const JSJitInfo*>(jitInfoOffset);
   1286 
   1287  allocator.discardStack(masm);
   1288  enterStubFrame(masm, save);
   1289 
   1290  masm.Push(val);
   1291  masm.Push(obj);
   1292  masm.Push(ImmPtr(info));
   1293 
   1294  using Fn = bool (*)(JSContext*, const JSJitInfo*, HandleObject, HandleValue);
   1295  callVM<Fn, jit::CallDOMSetter>(masm);
   1296  return true;
   1297 }
   1298 
   1299 bool IonCacheIRCompiler::emitProxyGetResult(ObjOperandId objId,
   1300                                            uint32_t idOffset) {
   1301  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1302  AutoSaveLiveRegisters save(*this);
   1303  AutoOutputRegister output(*this);
   1304 
   1305  Register obj = allocator.useRegister(masm, objId);
   1306  jsid id = idStubField(idOffset);
   1307 
   1308  // ProxyGetProperty(JSContext* cx, HandleObject proxy, HandleId id,
   1309  //                  MutableHandleValue vp)
   1310  AutoScratchRegisterMaybeOutput argJSContext(allocator, masm, output);
   1311  AutoScratchRegisterMaybeOutputType argProxy(allocator, masm, output);
   1312  AutoScratchRegister argId(allocator, masm);
   1313  AutoScratchRegister argVp(allocator, masm);
   1314  AutoScratchRegister scratch(allocator, masm);
   1315 
   1316  allocator.discardStack(masm);
   1317 
   1318  // Push stubCode for marking.
   1319  pushStubCodePointer();
   1320 
   1321  // Push args on stack first so we can take pointers to make handles.
   1322  masm.Push(UndefinedValue());
   1323  masm.moveStackPtrTo(argVp.get());
   1324 
   1325  masm.Push(id, scratch);
   1326  masm.moveStackPtrTo(argId.get());
   1327 
   1328  // Push the proxy. Also used as receiver.
   1329  masm.Push(obj);
   1330  masm.moveStackPtrTo(argProxy.get());
   1331 
   1332  masm.loadJSContext(argJSContext);
   1333 
   1334  if (!masm.icBuildOOLFakeExitFrame(GetReturnAddressToIonCode(cx_), save)) {
   1335    return false;
   1336  }
   1337  masm.enterFakeExitFrame(argJSContext, scratch, ExitFrameType::IonOOLProxy);
   1338 
   1339  // Make the call.
   1340  using Fn = bool (*)(JSContext* cx, HandleObject proxy, HandleId id,
   1341                      MutableHandleValue vp);
   1342  masm.setupUnalignedABICall(scratch);
   1343  masm.passABIArg(argJSContext);
   1344  masm.passABIArg(argProxy);
   1345  masm.passABIArg(argId);
   1346  masm.passABIArg(argVp);
   1347  masm.callWithABI<Fn, ProxyGetProperty>(
   1348      ABIType::General, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
   1349 
   1350  // Test for failure.
   1351  masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
   1352 
   1353  // Load the outparam vp[0] into output register(s).
   1354  Address outparam(masm.getStackPointer(),
   1355                   IonOOLProxyExitFrameLayout::offsetOfResult());
   1356  masm.loadValue(outparam, output.valueReg());
   1357 
   1358  // Spectre mitigation in case of speculative execution within C++ code.
   1359  if (JitOptions.spectreJitToCxxCalls) {
   1360    masm.speculationBarrier();
   1361  }
   1362 
   1363  // masm.leaveExitFrame & pop locals
   1364  masm.adjustStack(IonOOLProxyExitFrameLayout::Size());
   1365  return true;
   1366 }
   1367 
   1368 bool IonCacheIRCompiler::emitFrameIsConstructingResult() {
   1369  MOZ_CRASH("Baseline-specific op");
   1370 }
   1371 
   1372 bool IonCacheIRCompiler::emitLoadConstantStringResult(uint32_t strOffset) {
   1373  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1374  MOZ_CRASH("not used in ion");
   1375 }
   1376 
   1377 bool IonCacheIRCompiler::emitCompareStringResult(JSOp op, StringOperandId lhsId,
   1378                                                 StringOperandId rhsId) {
   1379  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1380  AutoSaveLiveRegisters save(*this);
   1381  AutoOutputRegister output(*this);
   1382 
   1383  Register left = allocator.useRegister(masm, lhsId);
   1384  Register right = allocator.useRegister(masm, rhsId);
   1385 
   1386  allocator.discardStack(masm);
   1387 
   1388  Label slow, done;
   1389  MOZ_ASSERT(!output.hasValue());
   1390  masm.compareStrings(op, left, right, output.typedReg().gpr(), &slow);
   1391 
   1392  masm.jump(&done);
   1393  masm.bind(&slow);
   1394 
   1395  enterStubFrame(masm, save);
   1396 
   1397  // Push the operands in reverse order for JSOp::Le and JSOp::Gt:
   1398  // - |left <= right| is implemented as |right >= left|.
   1399  // - |left > right| is implemented as |right < left|.
   1400  if (op == JSOp::Le || op == JSOp::Gt) {
   1401    masm.Push(left);
   1402    masm.Push(right);
   1403  } else {
   1404    masm.Push(right);
   1405    masm.Push(left);
   1406  }
   1407 
   1408  using Fn = bool (*)(JSContext*, HandleString, HandleString, bool*);
   1409  if (op == JSOp::Eq || op == JSOp::StrictEq) {
   1410    callVM<Fn, jit::StringsEqual<EqualityKind::Equal>>(masm);
   1411  } else if (op == JSOp::Ne || op == JSOp::StrictNe) {
   1412    callVM<Fn, jit::StringsEqual<EqualityKind::NotEqual>>(masm);
   1413  } else if (op == JSOp::Lt || op == JSOp::Gt) {
   1414    callVM<Fn, jit::StringsCompare<ComparisonKind::LessThan>>(masm);
   1415  } else {
   1416    MOZ_ASSERT(op == JSOp::Le || op == JSOp::Ge);
   1417    callVM<Fn, jit::StringsCompare<ComparisonKind::GreaterThanOrEqual>>(masm);
   1418  }
   1419 
   1420  masm.storeCallBoolResult(output.typedReg().gpr());
   1421  masm.bind(&done);
   1422  return true;
   1423 }
   1424 
   1425 bool IonCacheIRCompiler::emitStoreFixedSlot(ObjOperandId objId,
   1426                                            uint32_t offsetOffset,
   1427                                            ValOperandId rhsId) {
   1428  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1429  Register obj = allocator.useRegister(masm, objId);
   1430  int32_t offset = int32StubField(offsetOffset);
   1431  ConstantOrRegister val = allocator.useConstantOrRegister(masm, rhsId);
   1432  AutoScratchRegister scratch(allocator, masm);
   1433 
   1434  Address slot(obj, offset);
   1435  EmitPreBarrier(masm, slot, MIRType::Value);
   1436  masm.storeConstantOrRegister(val, slot);
   1437  emitPostBarrierSlot(obj, val, scratch);
   1438  return true;
   1439 }
   1440 
   1441 bool IonCacheIRCompiler::emitStoreDynamicSlot(ObjOperandId objId,
   1442                                              uint32_t offsetOffset,
   1443                                              ValOperandId rhsId) {
   1444  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1445  Register obj = allocator.useRegister(masm, objId);
   1446  int32_t offset = int32StubField(offsetOffset);
   1447  ConstantOrRegister val = allocator.useConstantOrRegister(masm, rhsId);
   1448  AutoScratchRegister scratch(allocator, masm);
   1449 
   1450  masm.loadPtr(Address(obj, NativeObject::offsetOfSlots()), scratch);
   1451  Address slot(scratch, offset);
   1452  EmitPreBarrier(masm, slot, MIRType::Value);
   1453  masm.storeConstantOrRegister(val, slot);
   1454  emitPostBarrierSlot(obj, val, scratch);
   1455  return true;
   1456 }
   1457 
   1458 bool IonCacheIRCompiler::emitAddAndStoreSlotShared(
   1459    CacheOp op, ObjOperandId objId, uint32_t offsetOffset, ValOperandId rhsId,
   1460    uint32_t newShapeOffset, Maybe<uint32_t> numNewSlotsOffset,
   1461    bool preserveWrapper) {
   1462  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1463  Register obj = allocator.useRegister(masm, objId);
   1464  int32_t offset = int32StubField(offsetOffset);
   1465  ConstantOrRegister val = allocator.useConstantOrRegister(masm, rhsId);
   1466 
   1467  AutoScratchRegister scratch1(allocator, masm);
   1468 
   1469  Maybe<AutoScratchRegister> scratch2;
   1470  if (op == CacheOp::AllocateAndStoreDynamicSlot || preserveWrapper) {
   1471    scratch2.emplace(allocator, masm);
   1472  }
   1473 
   1474  FailurePath* failure = nullptr;
   1475  if (preserveWrapper) {
   1476    if (!addFailurePath(&failure)) {
   1477      return false;
   1478    }
   1479    LiveRegisterSet save = liveVolatileRegs();
   1480    save.takeUnchecked(scratch1);
   1481    save.takeUnchecked(scratch2.ref());
   1482    masm.preserveWrapper(obj, scratch1, scratch2.ref(), save);
   1483    masm.branchIfFalseBool(scratch1, failure->label());
   1484  }
   1485 
   1486  Shape* newShape = shapeStubField(newShapeOffset);
   1487 
   1488  if (op == CacheOp::AllocateAndStoreDynamicSlot) {
   1489    // We have to (re)allocate dynamic slots. Do this first, as it's the
   1490    // only fallible operation here. Note that growSlotsPure is
   1491    // fallible but does not GC.
   1492 
   1493    if (!failure && !addFailurePath(&failure)) {
   1494      return false;
   1495    }
   1496 
   1497    int32_t numNewSlots = int32StubField(*numNewSlotsOffset);
   1498    MOZ_ASSERT(numNewSlots > 0);
   1499 
   1500    LiveRegisterSet save = liveVolatileRegs();
   1501    masm.PushRegsInMask(save);
   1502 
   1503    using Fn = bool (*)(JSContext* cx, NativeObject* obj, uint32_t newCount);
   1504    masm.setupUnalignedABICall(scratch1);
   1505    masm.loadJSContext(scratch1);
   1506    masm.passABIArg(scratch1);
   1507    masm.passABIArg(obj);
   1508    masm.move32(Imm32(numNewSlots), scratch2.ref());
   1509    masm.passABIArg(scratch2.ref());
   1510    masm.callWithABI<Fn, NativeObject::growSlotsPure>();
   1511    masm.storeCallPointerResult(scratch1);
   1512 
   1513    LiveRegisterSet ignore;
   1514    ignore.add(scratch1);
   1515    masm.PopRegsInMaskIgnore(save, ignore);
   1516 
   1517    masm.branchIfFalseBool(scratch1, failure->label());
   1518  }
   1519 
   1520  // Update the object's shape.
   1521  masm.storeObjShape(newShape, obj,
   1522                     [](MacroAssembler& masm, const Address& addr) {
   1523                       EmitPreBarrier(masm, addr, MIRType::Shape);
   1524                     });
   1525 
   1526  // Perform the store. No pre-barrier required since this is a new
   1527  // initialization.
   1528  if (op == CacheOp::AddAndStoreFixedSlot) {
   1529    Address slot(obj, offset);
   1530    masm.storeConstantOrRegister(val, slot);
   1531  } else {
   1532    MOZ_ASSERT(op == CacheOp::AddAndStoreDynamicSlot ||
   1533               op == CacheOp::AllocateAndStoreDynamicSlot);
   1534    masm.loadPtr(Address(obj, NativeObject::offsetOfSlots()), scratch1);
   1535    Address slot(scratch1, offset);
   1536    masm.storeConstantOrRegister(val, slot);
   1537  }
   1538 
   1539  emitPostBarrierSlot(obj, val, scratch1);
   1540 
   1541  return true;
   1542 }
   1543 
   1544 bool IonCacheIRCompiler::emitAddAndStoreFixedSlot(ObjOperandId objId,
   1545                                                  uint32_t offsetOffset,
   1546                                                  ValOperandId rhsId,
   1547                                                  uint32_t newShapeOffset,
   1548                                                  bool preserveWrapper) {
   1549  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1550  Maybe<uint32_t> numNewSlotsOffset = mozilla::Nothing();
   1551  return emitAddAndStoreSlotShared(CacheOp::AddAndStoreFixedSlot, objId,
   1552                                   offsetOffset, rhsId, newShapeOffset,
   1553                                   numNewSlotsOffset, preserveWrapper);
   1554 }
   1555 
   1556 bool IonCacheIRCompiler::emitAddAndStoreDynamicSlot(ObjOperandId objId,
   1557                                                    uint32_t offsetOffset,
   1558                                                    ValOperandId rhsId,
   1559                                                    uint32_t newShapeOffset,
   1560                                                    bool preserveWrapper) {
   1561  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1562  Maybe<uint32_t> numNewSlotsOffset = mozilla::Nothing();
   1563  return emitAddAndStoreSlotShared(CacheOp::AddAndStoreDynamicSlot, objId,
   1564                                   offsetOffset, rhsId, newShapeOffset,
   1565                                   numNewSlotsOffset, preserveWrapper);
   1566 }
   1567 
   1568 bool IonCacheIRCompiler::emitAllocateAndStoreDynamicSlot(
   1569    ObjOperandId objId, uint32_t offsetOffset, ValOperandId rhsId,
   1570    uint32_t newShapeOffset, uint32_t numNewSlotsOffset, bool preserveWrapper) {
   1571  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1572  return emitAddAndStoreSlotShared(
   1573      CacheOp::AllocateAndStoreDynamicSlot, objId, offsetOffset, rhsId,
   1574      newShapeOffset, mozilla::Some(numNewSlotsOffset), preserveWrapper);
   1575 }
   1576 
   1577 bool IonCacheIRCompiler::emitLoadStringCharResult(
   1578    StringOperandId strId, Int32OperandId indexId,
   1579    StringCharOutOfBounds outOfBounds) {
   1580  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1581  AutoOutputRegister output(*this);
   1582  Register str = allocator.useRegister(masm, strId);
   1583  Register index = allocator.useRegister(masm, indexId);
   1584  AutoScratchRegisterMaybeOutput scratch1(allocator, masm, output);
   1585  AutoScratchRegisterMaybeOutputType scratch2(allocator, masm, output);
   1586  AutoScratchRegister scratch3(allocator, masm);
   1587 
   1588  FailurePath* failure;
   1589  if (!addFailurePath(&failure)) {
   1590    return false;
   1591  }
   1592 
   1593  // Bounds check, load string char.
   1594  Label done;
   1595  Label tagResult;
   1596  Label loadFailed;
   1597  if (outOfBounds == StringCharOutOfBounds::Failure) {
   1598    masm.spectreBoundsCheck32(index, Address(str, JSString::offsetOfLength()),
   1599                              scratch1, failure->label());
   1600    masm.loadStringChar(str, index, scratch1, scratch2, scratch3,
   1601                        failure->label());
   1602  } else {
   1603    if (outOfBounds == StringCharOutOfBounds::EmptyString) {
   1604      // Return the empty string for out-of-bounds access.
   1605      masm.movePtr(ImmGCPtr(cx_->runtime()->emptyString), scratch2);
   1606    } else {
   1607      // Return |undefined| for out-of-bounds access.
   1608      masm.moveValue(UndefinedValue(), output.valueReg());
   1609    }
   1610 
   1611    // This CacheIR op is always preceded by |LinearizeForCharAccess|, so we're
   1612    // guaranteed to see no nested ropes.
   1613    masm.spectreBoundsCheck32(index, Address(str, JSString::offsetOfLength()),
   1614                              scratch1, &done);
   1615    masm.loadStringChar(str, index, scratch1, scratch2, scratch3, &loadFailed);
   1616  }
   1617 
   1618  // Load StaticString for this char. For larger code units perform a VM call.
   1619  Label vmCall;
   1620  masm.lookupStaticString(scratch1, scratch2, cx_->staticStrings(), &vmCall);
   1621  masm.jump(&tagResult);
   1622 
   1623  if (outOfBounds != StringCharOutOfBounds::Failure) {
   1624    masm.bind(&loadFailed);
   1625    masm.assumeUnreachable("loadStringChar can't fail for linear strings");
   1626  }
   1627 
   1628  {
   1629    masm.bind(&vmCall);
   1630 
   1631    // FailurePath and AutoSaveLiveRegisters don't get along very well. Both are
   1632    // modifying the stack and expect that no other stack manipulations are
   1633    // made. Therefore we need to use an ABI call instead of a VM call here.
   1634 
   1635    LiveRegisterSet volatileRegs = liveVolatileRegs();
   1636    volatileRegs.takeUnchecked(scratch1);
   1637    volatileRegs.takeUnchecked(scratch2);
   1638    volatileRegs.takeUnchecked(scratch3);
   1639    volatileRegs.takeUnchecked(output);
   1640    masm.PushRegsInMask(volatileRegs);
   1641 
   1642    using Fn = JSLinearString* (*)(JSContext * cx, int32_t code);
   1643    masm.setupUnalignedABICall(scratch2);
   1644    masm.loadJSContext(scratch2);
   1645    masm.passABIArg(scratch2);
   1646    masm.passABIArg(scratch1);
   1647    masm.callWithABI<Fn, jit::StringFromCharCodeNoGC>();
   1648    masm.storeCallPointerResult(scratch2);
   1649 
   1650    masm.PopRegsInMask(volatileRegs);
   1651 
   1652    masm.branchPtr(Assembler::Equal, scratch2, ImmWord(0), failure->label());
   1653  }
   1654 
   1655  if (outOfBounds != StringCharOutOfBounds::UndefinedValue) {
   1656    masm.bind(&tagResult);
   1657    masm.bind(&done);
   1658    masm.tagValue(JSVAL_TYPE_STRING, scratch2, output.valueReg());
   1659  } else {
   1660    masm.bind(&tagResult);
   1661    masm.tagValue(JSVAL_TYPE_STRING, scratch2, output.valueReg());
   1662    masm.bind(&done);
   1663  }
   1664  return true;
   1665 }
   1666 
   1667 bool IonCacheIRCompiler::emitLoadStringCharResult(StringOperandId strId,
   1668                                                  Int32OperandId indexId,
   1669                                                  bool handleOOB) {
   1670  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1671  auto outOfBounds = handleOOB ? StringCharOutOfBounds::EmptyString
   1672                               : StringCharOutOfBounds::Failure;
   1673  return emitLoadStringCharResult(strId, indexId, outOfBounds);
   1674 }
   1675 
   1676 bool IonCacheIRCompiler::emitLoadStringAtResult(StringOperandId strId,
   1677                                                Int32OperandId indexId,
   1678                                                bool handleOOB) {
   1679  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1680  auto outOfBounds = handleOOB ? StringCharOutOfBounds::UndefinedValue
   1681                               : StringCharOutOfBounds::Failure;
   1682  return emitLoadStringCharResult(strId, indexId, outOfBounds);
   1683 }
   1684 
   1685 bool IonCacheIRCompiler::emitCallNativeSetter(ObjOperandId receiverId,
   1686                                              uint32_t setterOffset,
   1687                                              ValOperandId rhsId,
   1688                                              bool sameRealm,
   1689                                              uint32_t nargsAndFlagsOffset) {
   1690  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1691  AutoSaveLiveRegisters save(*this);
   1692 
   1693  Register receiver = allocator.useRegister(masm, receiverId);
   1694  JSFunction* target = &objectStubField(setterOffset)->as<JSFunction>();
   1695  MOZ_ASSERT(target->isNativeFun());
   1696  ConstantOrRegister val = allocator.useConstantOrRegister(masm, rhsId);
   1697 
   1698  AutoScratchRegister argJSContext(allocator, masm);
   1699  AutoScratchRegister argVp(allocator, masm);
   1700  AutoScratchRegister argUintN(allocator, masm);
   1701 #ifndef JS_CODEGEN_X86
   1702  AutoScratchRegister scratch(allocator, masm);
   1703 #else
   1704  // Not enough registers on x86.
   1705  Register scratch = argUintN;
   1706 #endif
   1707 
   1708  allocator.discardStack(masm);
   1709 
   1710  // Set up the call:
   1711  //  bool (*)(JSContext*, unsigned, Value* vp)
   1712  // vp[0] is callee/outparam
   1713  // vp[1] is |this|
   1714  // vp[2] is the value
   1715 
   1716  // Build vp and move the base into argVpReg.
   1717  masm.Push(val);
   1718  masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(receiver)));
   1719  masm.Push(ObjectValue(*target));
   1720  masm.moveStackPtrTo(argVp.get());
   1721 
   1722  // Preload other regs.
   1723  masm.loadJSContext(argJSContext);
   1724  masm.move32(Imm32(1), argUintN);
   1725 
   1726  // Push marking data for later use.
   1727  masm.Push(argUintN);
   1728  pushStubCodePointer();
   1729 
   1730  if (!masm.icBuildOOLFakeExitFrame(GetReturnAddressToIonCode(cx_), save)) {
   1731    return false;
   1732  }
   1733  masm.enterFakeExitFrame(argJSContext, scratch, ExitFrameType::IonOOLNative);
   1734 
   1735  if (!sameRealm) {
   1736    masm.switchToRealm(target->realm(), scratch);
   1737  }
   1738 
   1739  // Make the call.
   1740  masm.setupUnalignedABICall(scratch);
   1741 #ifdef JS_CODEGEN_X86
   1742  // Reload argUintN because it was clobbered.
   1743  masm.move32(Imm32(1), argUintN);
   1744 #endif
   1745  masm.passABIArg(argJSContext);
   1746  masm.passABIArg(argUintN);
   1747  masm.passABIArg(argVp);
   1748  masm.callWithABI(DynamicFunction<JSNative>(target->native()),
   1749                   ABIType::General,
   1750                   CheckUnsafeCallWithABI::DontCheckHasExitFrame);
   1751 
   1752  // Test for failure.
   1753  masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
   1754 
   1755  if (!sameRealm) {
   1756    masm.switchToRealm(cx_->realm(), ReturnReg);
   1757  }
   1758 
   1759  masm.adjustStack(IonOOLNativeExitFrameLayout::Size(1));
   1760  return true;
   1761 }
   1762 
   1763 bool IonCacheIRCompiler::emitCallScriptedSetter(ObjOperandId receiverId,
   1764                                                ObjOperandId calleeId,
   1765                                                ValOperandId rhsId,
   1766                                                bool sameRealm,
   1767                                                uint32_t nargsAndFlagsOffset) {
   1768  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1769  AutoSaveLiveRegisters save(*this);
   1770 
   1771  Register receiver = allocator.useRegister(masm, receiverId);
   1772  Register callee = allocator.useRegister(masm, calleeId);
   1773  ConstantOrRegister val = allocator.useConstantOrRegister(masm, rhsId);
   1774 
   1775  int32_t nargsAndFlags = int32StubField(nargsAndFlagsOffset);
   1776  size_t nargs = nargsAndFlags >> JSFunction::ArgCountShift;
   1777 
   1778  AutoScratchRegister scratch(allocator, masm);
   1779 
   1780  allocator.discardStack(masm);
   1781 
   1782  uint32_t framePushedBefore = masm.framePushed();
   1783 
   1784  enterStubFrame(masm, save);
   1785 
   1786  // The JitFrameLayout pushed below will be aligned to JitStackAlignment,
   1787  // so we just have to make sure the stack is aligned after we push the
   1788  // |this| + argument Values.
   1789  size_t numPushedArgs = std::max<size_t>(1, nargs);
   1790  uint32_t argSize = (numPushedArgs + 1) * sizeof(Value);
   1791  uint32_t padding =
   1792      ComputeByteAlignment(masm.framePushed() + argSize, JitStackAlignment);
   1793  MOZ_ASSERT(padding % sizeof(uintptr_t) == 0);
   1794  MOZ_ASSERT(padding < JitStackAlignment);
   1795  masm.reserveStack(padding);
   1796 
   1797  for (size_t i = 1; i < nargs; i++) {
   1798    masm.Push(UndefinedValue());
   1799  }
   1800  masm.Push(val);
   1801  masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(receiver)));
   1802 
   1803  if (!sameRealm) {
   1804    masm.switchToObjectRealm(callee, scratch);
   1805  }
   1806 
   1807  masm.Push(callee);
   1808  masm.Push(FrameDescriptor(FrameType::IonICCall, /* argc = */ 1));
   1809 
   1810  // Check stack alignment. Add 2 * sizeof(uintptr_t) for the return address and
   1811  // frame pointer pushed by the call/callee.
   1812  MOZ_ASSERT(
   1813      ((masm.framePushed() + 2 * sizeof(uintptr_t)) % JitStackAlignment) == 0);
   1814 
   1815  masm.loadJitCodeRaw(callee, scratch);
   1816  masm.callJit(scratch);
   1817 
   1818  if (!sameRealm) {
   1819    masm.switchToRealm(cx_->realm(), ReturnReg);
   1820  }
   1821 
   1822  // Restore the frame pointer and stack pointer.
   1823  masm.loadPtr(Address(FramePointer, 0), FramePointer);
   1824  masm.freeStack(masm.framePushed() - framePushedBefore);
   1825  return true;
   1826 }
   1827 
   1828 bool IonCacheIRCompiler::emitCallInlinedSetter(
   1829    ObjOperandId receiverId, ObjOperandId calleeId, ValOperandId rhsId,
   1830    uint32_t icScriptOffset, bool sameRealm, uint32_t nargsAndFlagsOffset) {
   1831  MOZ_CRASH("Trial inlining not supported in Ion");
   1832 }
   1833 
   1834 bool IonCacheIRCompiler::emitCallSetArrayLength(ObjOperandId objId, bool strict,
   1835                                                ValOperandId rhsId) {
   1836  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1837  AutoSaveLiveRegisters save(*this);
   1838 
   1839  Register obj = allocator.useRegister(masm, objId);
   1840  ConstantOrRegister val = allocator.useConstantOrRegister(masm, rhsId);
   1841 
   1842  allocator.discardStack(masm);
   1843  enterStubFrame(masm, save);
   1844 
   1845  masm.Push(Imm32(strict));
   1846  masm.Push(val);
   1847  masm.Push(obj);
   1848 
   1849  using Fn = bool (*)(JSContext*, HandleObject, HandleValue, bool);
   1850  callVM<Fn, jit::SetArrayLength>(masm);
   1851  return true;
   1852 }
   1853 
   1854 bool IonCacheIRCompiler::emitProxySet(ObjOperandId objId, uint32_t idOffset,
   1855                                      ValOperandId rhsId, bool strict) {
   1856  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1857  AutoSaveLiveRegisters save(*this);
   1858 
   1859  Register obj = allocator.useRegister(masm, objId);
   1860  ConstantOrRegister val = allocator.useConstantOrRegister(masm, rhsId);
   1861  jsid id = idStubField(idOffset);
   1862 
   1863  AutoScratchRegister scratch(allocator, masm);
   1864 
   1865  allocator.discardStack(masm);
   1866  enterStubFrame(masm, save);
   1867 
   1868  masm.Push(Imm32(strict));
   1869  masm.Push(val);
   1870  masm.Push(id, scratch);
   1871  masm.Push(obj);
   1872 
   1873  using Fn = bool (*)(JSContext*, HandleObject, HandleId, HandleValue, bool);
   1874  callVM<Fn, ProxySetProperty>(masm);
   1875  return true;
   1876 }
   1877 
   1878 bool IonCacheIRCompiler::emitProxySetByValue(ObjOperandId objId,
   1879                                             ValOperandId idId,
   1880                                             ValOperandId rhsId, bool strict) {
   1881  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1882  AutoSaveLiveRegisters save(*this);
   1883 
   1884  Register obj = allocator.useRegister(masm, objId);
   1885  ConstantOrRegister idVal = allocator.useConstantOrRegister(masm, idId);
   1886  ConstantOrRegister val = allocator.useConstantOrRegister(masm, rhsId);
   1887 
   1888  allocator.discardStack(masm);
   1889  enterStubFrame(masm, save);
   1890 
   1891  masm.Push(Imm32(strict));
   1892  masm.Push(val);
   1893  masm.Push(idVal);
   1894  masm.Push(obj);
   1895 
   1896  using Fn = bool (*)(JSContext*, HandleObject, HandleValue, HandleValue, bool);
   1897  callVM<Fn, ProxySetPropertyByValue>(masm);
   1898  return true;
   1899 }
   1900 
   1901 bool IonCacheIRCompiler::emitCallAddOrUpdateSparseElementHelper(
   1902    ObjOperandId objId, Int32OperandId idId, ValOperandId rhsId, bool strict) {
   1903  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1904  AutoSaveLiveRegisters save(*this);
   1905 
   1906  Register obj = allocator.useRegister(masm, objId);
   1907  Register id = allocator.useRegister(masm, idId);
   1908  ValueOperand val = allocator.useValueRegister(masm, rhsId);
   1909 
   1910  allocator.discardStack(masm);
   1911  enterStubFrame(masm, save);
   1912 
   1913  masm.Push(Imm32(strict));
   1914  masm.Push(val);
   1915  masm.Push(id);
   1916  masm.Push(obj);
   1917 
   1918  using Fn = bool (*)(JSContext* cx, Handle<NativeObject*> obj, int32_t int_id,
   1919                      HandleValue v, bool strict);
   1920  callVM<Fn, AddOrUpdateSparseElementHelper>(masm);
   1921  return true;
   1922 }
   1923 
   1924 bool IonCacheIRCompiler::emitMegamorphicSetElement(ObjOperandId objId,
   1925                                                   ValOperandId idId,
   1926                                                   ValOperandId rhsId,
   1927                                                   bool strict) {
   1928  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1929  AutoSaveLiveRegisters save(*this);
   1930 
   1931  Register obj = allocator.useRegister(masm, objId);
   1932  ConstantOrRegister idVal = allocator.useConstantOrRegister(masm, idId);
   1933  ConstantOrRegister val = allocator.useConstantOrRegister(masm, rhsId);
   1934 
   1935  allocator.discardStack(masm);
   1936  enterStubFrame(masm, save);
   1937 
   1938  masm.Push(Imm32(strict));
   1939  masm.Push(val);
   1940  masm.Push(idVal);
   1941  masm.Push(obj);
   1942 
   1943  using Fn = bool (*)(JSContext*, HandleObject, HandleValue, HandleValue, bool);
   1944  callVM<Fn, SetElementMegamorphic<false>>(masm);
   1945  return true;
   1946 }
   1947 
   1948 bool IonCacheIRCompiler::emitReturnFromIC() {
   1949  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1950  if (!savedLiveRegs_) {
   1951    allocator.restoreInputState(masm);
   1952  }
   1953 
   1954  uint8_t* rejoinAddr = ic_->rejoinAddr(ionScript_);
   1955  masm.jump(ImmPtr(rejoinAddr));
   1956  return true;
   1957 }
   1958 
   1959 bool IonCacheIRCompiler::emitGuardDOMExpandoMissingOrGuardShape(
   1960    ValOperandId expandoId, uint32_t shapeOffset) {
   1961  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1962  ValueOperand val = allocator.useValueRegister(masm, expandoId);
   1963  Shape* shape = shapeStubField(shapeOffset);
   1964 
   1965  AutoScratchRegister objScratch(allocator, masm);
   1966 
   1967  FailurePath* failure;
   1968  if (!addFailurePath(&failure)) {
   1969    return false;
   1970  }
   1971 
   1972  Label done;
   1973  masm.branchTestUndefined(Assembler::Equal, val, &done);
   1974 
   1975  masm.debugAssertIsObject(val);
   1976  masm.unboxObject(val, objScratch);
   1977  // The expando object is not used in this case, so we don't need Spectre
   1978  // mitigations.
   1979  masm.branchTestObjShapeNoSpectreMitigations(Assembler::NotEqual, objScratch,
   1980                                              shape, failure->label());
   1981 
   1982  masm.bind(&done);
   1983  return true;
   1984 }
   1985 
   1986 bool IonCacheIRCompiler::emitLoadDOMExpandoValueGuardGeneration(
   1987    ObjOperandId objId, uint32_t expandoAndGenerationOffset,
   1988    uint32_t generationOffset, ValOperandId resultId) {
   1989  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   1990  Register obj = allocator.useRegister(masm, objId);
   1991  ExpandoAndGeneration* expandoAndGeneration =
   1992      rawPointerStubField<ExpandoAndGeneration*>(expandoAndGenerationOffset);
   1993  uint64_t generation = rawInt64StubField<uint64_t>(generationOffset);
   1994 
   1995  ValueOperand output = allocator.defineValueRegister(masm, resultId);
   1996 
   1997  FailurePath* failure;
   1998  if (!addFailurePath(&failure)) {
   1999    return false;
   2000  }
   2001 
   2002  masm.loadDOMExpandoValueGuardGeneration(obj, output, expandoAndGeneration,
   2003                                          generation, failure->label());
   2004  return true;
   2005 }
   2006 
   2007 void IonIC::attachCacheIRStub(JSContext* cx, const CacheIRWriter& writer,
   2008                              CacheKind kind, IonScript* ionScript,
   2009                              bool* attached) {
   2010  // We shouldn't GC or report OOM (or any other exception) here.
   2011  AutoAssertNoPendingException aanpe(cx);
   2012  JS::AutoCheckCannotGC nogc;
   2013 
   2014  MOZ_ASSERT(!*attached);
   2015 
   2016  // Do nothing if the IR generator failed or triggered a GC that invalidated
   2017  // the script.
   2018  if (writer.tooLarge()) {
   2019    cx->runtime()->setUseCounter(cx->global(), JSUseCounter::IC_STUB_TOO_LARGE);
   2020    return;
   2021  }
   2022  if (writer.oom()) {
   2023    cx->runtime()->setUseCounter(cx->global(), JSUseCounter::IC_STUB_OOM);
   2024    return;
   2025  }
   2026  MOZ_ASSERT(!writer.failed());
   2027 
   2028  if (ionScript->invalidated()) {
   2029    return;
   2030  }
   2031 
   2032  JitZone* jitZone = cx->zone()->jitZone();
   2033 
   2034  constexpr uint32_t stubDataOffset = sizeof(IonICStub);
   2035  static_assert(stubDataOffset % sizeof(uint64_t) == 0,
   2036                "Stub fields must be aligned");
   2037 
   2038  // Try to reuse a previously-allocated CacheIRStubInfo.
   2039  CacheIRStubKey::Lookup lookup(kind, ICStubEngine::IonIC, writer.codeStart(),
   2040                                writer.codeLength());
   2041  CacheIRStubInfo* stubInfo = jitZone->getIonCacheIRStubInfo(lookup);
   2042  if (!stubInfo) {
   2043    // Allocate the shared CacheIRStubInfo. Note that the
   2044    // putIonCacheIRStubInfo call below will transfer ownership to
   2045    // the stub info HashSet, so we don't have to worry about freeing
   2046    // it below.
   2047 
   2048    // For Ion ICs, we don't track/use the makesGCCalls flag, so just pass true.
   2049    bool makesGCCalls = true;
   2050    stubInfo = CacheIRStubInfo::New(kind, ICStubEngine::IonIC, makesGCCalls,
   2051                                    stubDataOffset, writer);
   2052    if (!stubInfo) {
   2053      return;
   2054    }
   2055 
   2056    CacheIRStubKey key(stubInfo);
   2057    if (!jitZone->putIonCacheIRStubInfo(lookup, key)) {
   2058      return;
   2059    }
   2060  }
   2061 
   2062  MOZ_ASSERT(stubInfo);
   2063 
   2064  // Ensure we don't attach duplicate stubs. This can happen if a stub failed
   2065  // for some reason and the IR generator doesn't check for exactly the same
   2066  // conditions.
   2067  for (IonICStub* stub = firstStub_; stub; stub = stub->next()) {
   2068    if (stub->stubInfo() != stubInfo) {
   2069      continue;
   2070    }
   2071    if (!writer.stubDataEquals(stub->stubDataStart())) {
   2072      continue;
   2073    }
   2074    return;
   2075  }
   2076 
   2077  size_t bytesNeeded = stubInfo->stubDataOffset() + stubInfo->stubDataSize();
   2078 
   2079  // Allocate the IonICStub in the JitZone's stub space. Ion stubs and
   2080  // CacheIRStubInfo instances for Ion stubs can be purged on GC. That's okay
   2081  // because the stub code is rooted separately when we make a VM call, and
   2082  // stub code should never access the IonICStub after making a VM call. The
   2083  // IonICStub::poison method poisons the stub to catch bugs in this area.
   2084  ICStubSpace* stubSpace = cx->zone()->jitZone()->stubSpace();
   2085  void* newStubMem = stubSpace->alloc(bytesNeeded);
   2086  if (!newStubMem) {
   2087    return;
   2088  }
   2089 
   2090  IonICStub* newStub =
   2091      new (newStubMem) IonICStub(fallbackAddr(ionScript), stubInfo);
   2092  writer.copyStubData(newStub->stubDataStart());
   2093 
   2094  TempAllocator temp(&cx->tempLifoAlloc());
   2095  JitContext jctx(cx);
   2096  IonCacheIRCompiler compiler(cx, temp, writer, this, ionScript,
   2097                              stubDataOffset);
   2098  if (!compiler.init()) {
   2099    return;
   2100  }
   2101 
   2102  JitCode* code = compiler.compile(newStub);
   2103  if (!code) {
   2104    return;
   2105  }
   2106 
   2107  // Record the stub code if perf spewer is enabled.
   2108  CacheKind stubKind = newStub->stubInfo()->kind();
   2109  compiler.perfSpewer().saveProfile(cx, script(), code,
   2110                                    CacheKindNames[uint8_t(stubKind)]);
   2111 
   2112  // Add an entry to the profiler's code table, so that the profiler can
   2113  // identify this as Ion code.
   2114  if (ionScript->hasProfilingInstrumentation()) {
   2115    uint8_t* addr = rejoinAddr(ionScript);
   2116    auto entry = MakeJitcodeGlobalEntry<IonICEntry>(cx, code, code->raw(),
   2117                                                    code->rawEnd(), addr);
   2118    if (!entry) {
   2119      cx->recoverFromOutOfMemory();
   2120      return;
   2121    }
   2122 
   2123    auto* globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
   2124    if (!globalTable->addEntry(std::move(entry))) {
   2125      return;
   2126    }
   2127  }
   2128 
   2129  attachStub(newStub, code);
   2130  *attached = true;
   2131 }
   2132 
   2133 bool IonCacheIRCompiler::emitCallStringObjectConcatResult(ValOperandId lhsId,
   2134                                                          ValOperandId rhsId) {
   2135  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   2136  AutoSaveLiveRegisters save(*this);
   2137  AutoOutputRegister output(*this);
   2138 
   2139  ValueOperand lhs = allocator.useValueRegister(masm, lhsId);
   2140  ValueOperand rhs = allocator.useValueRegister(masm, rhsId);
   2141 
   2142  allocator.discardStack(masm);
   2143 
   2144  enterStubFrame(masm, save);
   2145  masm.Push(rhs);
   2146  masm.Push(lhs);
   2147 
   2148  using Fn = bool (*)(JSContext*, HandleValue, HandleValue, MutableHandleValue);
   2149  callVM<Fn, DoConcatStringObject>(masm);
   2150 
   2151  masm.storeCallResultValue(output);
   2152  return true;
   2153 }
   2154 
   2155 bool IonCacheIRCompiler::emitCloseIterScriptedResult(ObjOperandId iterId,
   2156                                                     ObjOperandId calleeId,
   2157                                                     uint32_t calleeNargs) {
   2158  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   2159  AutoSaveLiveRegisters save(*this);
   2160 
   2161  Register iter = allocator.useRegister(masm, iterId);
   2162  Register callee = allocator.useRegister(masm, calleeId);
   2163 
   2164  allocator.discardStack(masm);
   2165 
   2166  uint32_t framePushedBefore = masm.framePushed();
   2167 
   2168  // Construct IonICCallFrameLayout.
   2169  enterStubFrame(masm, save);
   2170 
   2171  uint32_t stubFramePushed = masm.framePushed();
   2172 
   2173  // The JitFrameLayout pushed below will be aligned to JitStackAlignment,
   2174  // so we just have to make sure the stack is aligned after we push |this|
   2175  // and |calleeNargs| undefined arguments.
   2176  uint32_t argSize = (calleeNargs + 1) * sizeof(Value);
   2177  uint32_t padding =
   2178      ComputeByteAlignment(masm.framePushed() + argSize, JitStackAlignment);
   2179  MOZ_ASSERT(padding % sizeof(uintptr_t) == 0);
   2180  MOZ_ASSERT(padding < JitStackAlignment);
   2181  masm.reserveStack(padding);
   2182 
   2183  for (uint32_t i = 0; i < calleeNargs; i++) {
   2184    masm.Push(UndefinedValue());
   2185  }
   2186  masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(iter)));
   2187 
   2188  masm.Push(callee);
   2189  masm.Push(FrameDescriptor(FrameType::IonICCall, /* argc = */ 0));
   2190 
   2191  masm.loadJitCodeRaw(callee, callee);
   2192  masm.callJit(callee);
   2193 
   2194  // Verify that the return value is an object.
   2195  Label success;
   2196  masm.branchTestObject(Assembler::Equal, JSReturnOperand, &success);
   2197 
   2198  // We can reuse the same stub frame, but we first have to pop the arguments
   2199  // from the previous call.
   2200  uint32_t framePushedAfterCall = masm.framePushed();
   2201  masm.freeStack(masm.framePushed() - stubFramePushed);
   2202 
   2203  masm.push(Imm32(int32_t(CheckIsObjectKind::IteratorReturn)));
   2204  using Fn = bool (*)(JSContext*, CheckIsObjectKind);
   2205  callVM<Fn, ThrowCheckIsObject>(masm);
   2206 
   2207  masm.bind(&success);
   2208  masm.setFramePushed(framePushedAfterCall);
   2209 
   2210  // Restore the frame pointer and stack pointer.
   2211  masm.loadPtr(Address(FramePointer, 0), FramePointer);
   2212  masm.freeStack(masm.framePushed() - framePushedBefore);
   2213  return true;
   2214 }
   2215 
   2216 bool IonCacheIRCompiler::emitGuardFunctionScript(ObjOperandId funId,
   2217                                                 uint32_t expectedOffset,
   2218                                                 uint32_t nargsAndFlagsOffset) {
   2219  JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
   2220 
   2221  Register fun = allocator.useRegister(masm, funId);
   2222  AutoScratchRegister scratch(allocator, masm);
   2223  BaseScript* expected = weakBaseScriptStubField(expectedOffset);
   2224 
   2225  FailurePath* failure;
   2226  if (!addFailurePath(&failure)) {
   2227    return false;
   2228  }
   2229 
   2230  masm.loadPrivate(Address(fun, JSFunction::offsetOfJitInfoOrScript()),
   2231                   scratch);
   2232  masm.branchPtr(Assembler::NotEqual, scratch, ImmGCPtr(expected),
   2233                 failure->label());
   2234  return true;
   2235 }
   2236 
   2237 bool IonCacheIRCompiler::emitCallScriptedFunction(ObjOperandId calleeId,
   2238                                                  Int32OperandId argcId,
   2239                                                  CallFlags flags,
   2240                                                  uint32_t argcFixed) {
   2241  MOZ_CRASH("Call ICs not used in ion");
   2242 }
   2243 
   2244 bool IonCacheIRCompiler::emitCallBoundScriptedFunction(ObjOperandId calleeId,
   2245                                                       ObjOperandId targetId,
   2246                                                       Int32OperandId argcId,
   2247                                                       CallFlags flags,
   2248                                                       uint32_t numBoundArgs) {
   2249  MOZ_CRASH("Call ICs not used in ion");
   2250 }
   2251 
   2252 bool IonCacheIRCompiler::emitCallWasmFunction(
   2253    ObjOperandId calleeId, Int32OperandId argcId, CallFlags flags,
   2254    uint32_t argcFixed, uint32_t funcExportOffset, uint32_t instanceOffset) {
   2255  MOZ_CRASH("Call ICs not used in ion");
   2256 }
   2257 
   2258 #ifdef JS_SIMULATOR
   2259 bool IonCacheIRCompiler::emitCallNativeFunction(ObjOperandId calleeId,
   2260                                                Int32OperandId argcId,
   2261                                                CallFlags flags,
   2262                                                uint32_t argcFixed,
   2263                                                uint32_t targetOffset) {
   2264  MOZ_CRASH("Call ICs not used in ion");
   2265 }
   2266 
   2267 bool IonCacheIRCompiler::emitCallDOMFunction(
   2268    ObjOperandId calleeId, Int32OperandId argcId, ObjOperandId thisObjId,
   2269    CallFlags flags, uint32_t argcFixed, uint32_t targetOffset) {
   2270  MOZ_CRASH("Call ICs not used in ion");
   2271 }
   2272 
   2273 bool IonCacheIRCompiler::emitCallDOMFunctionWithAllocSite(
   2274    ObjOperandId calleeId, Int32OperandId argcId, ObjOperandId thisObjId,
   2275    CallFlags flags, uint32_t argcFixed, uint32_t siteOffset,
   2276    uint32_t targetOffset) {
   2277  MOZ_CRASH("Call ICs not used in ion");
   2278 }
   2279 #else
   2280 bool IonCacheIRCompiler::emitCallNativeFunction(ObjOperandId calleeId,
   2281                                                Int32OperandId argcId,
   2282                                                CallFlags flags,
   2283                                                uint32_t argcFixed,
   2284                                                bool ignoresReturnValue) {
   2285  MOZ_CRASH("Call ICs not used in ion");
   2286 }
   2287 
   2288 bool IonCacheIRCompiler::emitCallDOMFunction(ObjOperandId calleeId,
   2289                                             Int32OperandId argcId,
   2290                                             ObjOperandId thisObjId,
   2291                                             CallFlags flags,
   2292                                             uint32_t argcFixed) {
   2293  MOZ_CRASH("Call ICs not used in ion");
   2294 }
   2295 
   2296 bool IonCacheIRCompiler::emitCallDOMFunctionWithAllocSite(
   2297    ObjOperandId calleeId, Int32OperandId argcId, ObjOperandId thisObjId,
   2298    CallFlags flags, uint32_t argcFixed, uint32_t siteOffset) {
   2299  MOZ_CRASH("Call ICs not used in ion");
   2300 }
   2301 #endif
   2302 
   2303 bool IonCacheIRCompiler::emitCallClassHook(ObjOperandId calleeId,
   2304                                           Int32OperandId argcId,
   2305                                           CallFlags flags, uint32_t argcFixed,
   2306                                           uint32_t targetOffset) {
   2307  MOZ_CRASH("Call ICs not used in ion");
   2308 }
   2309 
   2310 bool IonCacheIRCompiler::emitCallInlinedFunction(ObjOperandId calleeId,
   2311                                                 Int32OperandId argcId,
   2312                                                 uint32_t icScriptOffset,
   2313                                                 CallFlags flags,
   2314                                                 uint32_t argcFixed) {
   2315  MOZ_CRASH("Call ICs not used in ion");
   2316 }
   2317 
   2318 bool IonCacheIRCompiler::emitLoadArgumentFixedSlot(ValOperandId resultId,
   2319                                                   uint8_t slotIndex) {
   2320  MOZ_CRASH("Call ICs not used in ion");
   2321 }
   2322 
   2323 bool IonCacheIRCompiler::emitLoadArgumentDynamicSlot(ValOperandId resultId,
   2324                                                     Int32OperandId argcId,
   2325                                                     uint8_t slotIndex) {
   2326  MOZ_CRASH("Call ICs not used in ion");
   2327 }
   2328 
   2329 bool IonCacheIRCompiler::emitIsArrayResult(ValOperandId inputId) {
   2330  MOZ_CRASH("Call ICs not used in ion");
   2331 }
   2332 
   2333 bool IonCacheIRCompiler::emitIsTypedArrayResult(ObjOperandId objId,
   2334                                                bool isPossiblyWrapped) {
   2335  MOZ_CRASH("Call ICs not used in ion");
   2336 }
   2337 
   2338 bool IonCacheIRCompiler::emitStringFromCharCodeResult(Int32OperandId codeId) {
   2339  MOZ_CRASH("Call ICs not used in ion");
   2340 }
   2341 
   2342 bool IonCacheIRCompiler::emitStringFromCodePointResult(Int32OperandId codeId) {
   2343  MOZ_CRASH("Call ICs not used in ion");
   2344 }
   2345 
   2346 bool IonCacheIRCompiler::emitReflectGetPrototypeOfResult(ObjOperandId objId) {
   2347  MOZ_CRASH("Call ICs not used in ion");
   2348 }
   2349 
   2350 bool IonCacheIRCompiler::emitHasClassResult(ObjOperandId objId,
   2351                                            uint32_t claspOffset) {
   2352  MOZ_CRASH("Call ICs not used in ion");
   2353 }
   2354 
   2355 bool IonCacheIRCompiler::emitHasShapeResult(ObjOperandId objId,
   2356                                            uint32_t shapeOffset) {
   2357  MOZ_CRASH("Call ICs not used in ion");
   2358 }
   2359 
   2360 bool IonCacheIRCompiler::emitSameValueResult(ValOperandId lhs,
   2361                                             ValOperandId rhs) {
   2362  MOZ_CRASH("Call ICs not used in ion");
   2363 }
   2364 
   2365 bool IonCacheIRCompiler::emitSetHasStringResult(ObjOperandId setId,
   2366                                                StringOperandId strId) {
   2367  MOZ_CRASH("Call ICs not used in ion");
   2368 }
   2369 
   2370 bool IonCacheIRCompiler::emitMapHasStringResult(ObjOperandId mapId,
   2371                                                StringOperandId strId) {
   2372  MOZ_CRASH("Call ICs not used in ion");
   2373 }
   2374 
   2375 bool IonCacheIRCompiler::emitMapGetStringResult(ObjOperandId mapId,
   2376                                                StringOperandId strId) {
   2377  MOZ_CRASH("Call ICs not used in ion");
   2378 }
   2379 
   2380 bool IonCacheIRCompiler::emitNewArrayObjectResult(uint32_t arrayLength,
   2381                                                  uint32_t shapeOffset,
   2382                                                  uint32_t siteOffset) {
   2383  MOZ_CRASH("NewArray ICs not used in ion");
   2384 }
   2385 
   2386 bool IonCacheIRCompiler::emitNewPlainObjectResult(uint32_t numFixedSlots,
   2387                                                  uint32_t numDynamicSlots,
   2388                                                  gc::AllocKind allocKind,
   2389                                                  uint32_t shapeOffset,
   2390                                                  uint32_t siteOffset) {
   2391  MOZ_CRASH("NewObject ICs not used in ion");
   2392 }
   2393 
   2394 bool IonCacheIRCompiler::emitNewFunctionCloneResult(uint32_t canonicalOffset,
   2395                                                    gc::AllocKind allocKind,
   2396                                                    uint32_t siteOffset) {
   2397  MOZ_CRASH("Lambda ICs not used in ion");
   2398 }
   2399 
   2400 bool IonCacheIRCompiler::emitCallRegExpMatcherResult(ObjOperandId regexpId,
   2401                                                     StringOperandId inputId,
   2402                                                     Int32OperandId lastIndexId,
   2403                                                     uint32_t stubOffset) {
   2404  MOZ_CRASH("Call ICs not used in ion");
   2405 }
   2406 
   2407 bool IonCacheIRCompiler::emitCallRegExpSearcherResult(
   2408    ObjOperandId regexpId, StringOperandId inputId, Int32OperandId lastIndexId,
   2409    uint32_t stubOffset) {
   2410  MOZ_CRASH("Call ICs not used in ion");
   2411 }
   2412 
   2413 bool IonCacheIRCompiler::emitRegExpBuiltinExecMatchResult(
   2414    ObjOperandId regexpId, StringOperandId inputId, uint32_t stubOffset) {
   2415  MOZ_CRASH("Call ICs not used in ion");
   2416 }
   2417 
   2418 bool IonCacheIRCompiler::emitRegExpBuiltinExecTestResult(
   2419    ObjOperandId regexpId, StringOperandId inputId, uint32_t stubOffset) {
   2420  MOZ_CRASH("Call ICs not used in ion");
   2421 }
   2422 
   2423 bool IonCacheIRCompiler::emitRegExpHasCaptureGroupsResult(
   2424    ObjOperandId regexpId, StringOperandId inputId) {
   2425  MOZ_CRASH("Call ICs not used in ion");
   2426 }