tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

WasmInstance.cpp (160930B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 *
      4 * Copyright 2016 Mozilla Foundation
      5 *
      6 * Licensed under the Apache License, Version 2.0 (the "License");
      7 * you may not use this file except in compliance with the License.
      8 * You may obtain a copy of the License at
      9 *
     10 *     http://www.apache.org/licenses/LICENSE-2.0
     11 *
     12 * Unless required by applicable law or agreed to in writing, software
     13 * distributed under the License is distributed on an "AS IS" BASIS,
     14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     15 * See the License for the specific language governing permissions and
     16 * limitations under the License.
     17 */
     18 
     19 #include "wasm/WasmInstance-inl.h"
     20 
     21 #include "mozilla/CheckedInt.h"
     22 #include "mozilla/DebugOnly.h"
     23 
     24 #include <algorithm>
     25 #include <utility>
     26 
     27 #include "jsmath.h"
     28 
     29 #include "builtin/String.h"
     30 #include "gc/Barrier.h"
     31 #include "gc/Marking.h"
     32 #include "jit/AtomicOperations.h"
     33 #include "jit/Disassemble.h"
     34 #include "jit/JitCommon.h"
     35 #include "jit/JitRuntime.h"
     36 #include "jit/Registers.h"
     37 #include "js/ForOfIterator.h"
     38 #include "js/friend/ErrorMessages.h"  // js::GetErrorMessage, JSMSG_*
     39 #include "js/Stack.h"                 // JS::NativeStackLimitMin
     40 #include "util/StringBuilder.h"
     41 #include "util/Text.h"
     42 #include "util/Unicode.h"
     43 #include "vm/ArrayBufferObject.h"
     44 #include "vm/BigIntType.h"
     45 #include "vm/BoundFunctionObject.h"
     46 #include "vm/Compartment.h"
     47 #include "vm/ErrorObject.h"
     48 #include "vm/Interpreter.h"
     49 #include "vm/Iteration.h"
     50 #include "vm/JitActivation.h"
     51 #include "vm/JSFunction.h"
     52 #include "vm/JSObject.h"
     53 #include "vm/PlainObject.h"  // js::PlainObject
     54 #include "wasm/WasmBuiltins.h"
     55 #include "wasm/WasmCode.h"
     56 #include "wasm/WasmDebug.h"
     57 #include "wasm/WasmDebugFrame.h"
     58 #include "wasm/WasmFeatures.h"
     59 #include "wasm/WasmHeuristics.h"
     60 #include "wasm/WasmInitExpr.h"
     61 #include "wasm/WasmJS.h"
     62 #include "wasm/WasmMemory.h"
     63 #include "wasm/WasmModule.h"
     64 #include "wasm/WasmModuleTypes.h"
     65 #include "wasm/WasmPI.h"
     66 #include "wasm/WasmStubs.h"
     67 #include "wasm/WasmTypeDef.h"
     68 #include "wasm/WasmValType.h"
     69 #include "wasm/WasmValue.h"
     70 
     71 #include "gc/Marking-inl.h"
     72 #include "gc/StoreBuffer-inl.h"
     73 #include "vm/ArrayBufferObject-inl.h"
     74 #include "vm/JSObject-inl.h"
     75 #include "wasm/WasmGcObject-inl.h"
     76 
     77 using namespace js;
     78 using namespace js::jit;
     79 using namespace js::wasm;
     80 
     81 using mozilla::CheckedUint32;
     82 using mozilla::DebugOnly;
     83 using mozilla::Maybe;
     84 using mozilla::Nothing;
     85 using mozilla::Some;
     86 
     87 // Instance must be aligned at least as much as any of the integer, float,
     88 // or SIMD values that we'd like to store in it.
     89 static_assert(alignof(Instance) >=
     90              std::max(sizeof(Registers::RegisterContent),
     91                       sizeof(FloatRegisters::RegisterContent)));
     92 
     93 // The globalArea must be aligned at least as much as an instance. This is
     94 // guaranteed to be sufficient for all data types we care about, including
     95 // SIMD values. See the above assertion.
     96 static_assert(Instance::offsetOfData() % alignof(Instance) == 0);
     97 
     98 // We want the memory base to be the first field, and accessible with no
     99 // offset. This incidentally is also an assertion that there is no superclass
    100 // with fields.
    101 static_assert(Instance::offsetOfMemory0Base() == 0);
    102 
    103 // We want instance fields that are commonly accessed by the JIT to have
    104 // compact encodings. A limit of less than 128 bytes is chosen to fit within
    105 // the signed 8-bit mod r/m x86 encoding.
    106 static_assert(Instance::offsetOfLastCommonJitField() < 128);
    107 
    108 //////////////////////////////////////////////////////////////////////////////
    109 //
    110 // Functions and invocation.
    111 
    112 FuncDefInstanceData* Instance::funcDefInstanceData(uint32_t funcIndex) const {
    113  MOZ_ASSERT(funcIndex >= codeMeta().numFuncImports);
    114  uint32_t funcDefIndex = funcIndex - codeMeta().numFuncImports;
    115  FuncDefInstanceData* instanceData =
    116      (FuncDefInstanceData*)(data() + codeMeta().funcDefsOffsetStart);
    117  return &instanceData[funcDefIndex];
    118 }
    119 
    120 TypeDefInstanceData* Instance::typeDefInstanceData(uint32_t typeIndex) const {
    121  TypeDefInstanceData* instanceData =
    122      (TypeDefInstanceData*)(data() + codeMeta().typeDefsOffsetStart);
    123  return &instanceData[typeIndex];
    124 }
    125 
    126 const void* Instance::addressOfGlobalCell(const GlobalDesc& global) const {
    127  const void* cell = data() + global.offset();
    128  // Indirect globals store a pointer to their cell in the instance global
    129  // data. Dereference it to find the real cell.
    130  if (global.isIndirect()) {
    131    cell = *(const void**)cell;
    132  }
    133  return cell;
    134 }
    135 
    136 FuncImportInstanceData& Instance::funcImportInstanceData(uint32_t funcIndex) {
    137  MOZ_ASSERT(funcIndex < codeMeta().numFuncImports);
    138  FuncImportInstanceData* instanceData =
    139      (FuncImportInstanceData*)(data() + codeMeta().funcImportsOffsetStart);
    140  return instanceData[funcIndex];
    141 }
    142 
    143 FuncExportInstanceData& Instance::funcExportInstanceData(
    144    uint32_t funcExportIndex) {
    145  FuncExportInstanceData* instanceData =
    146      (FuncExportInstanceData*)(data() + codeMeta().funcExportsOffsetStart);
    147  return instanceData[funcExportIndex];
    148 }
    149 
    150 MemoryInstanceData& Instance::memoryInstanceData(uint32_t memoryIndex) const {
    151  MemoryInstanceData* instanceData =
    152      (MemoryInstanceData*)(data() + codeMeta().memoriesOffsetStart);
    153  return instanceData[memoryIndex];
    154 }
    155 
    156 TableInstanceData& Instance::tableInstanceData(uint32_t tableIndex) const {
    157  TableInstanceData* instanceData =
    158      (TableInstanceData*)(data() + codeMeta().tablesOffsetStart);
    159  return instanceData[tableIndex];
    160 }
    161 
    162 TagInstanceData& Instance::tagInstanceData(uint32_t tagIndex) const {
    163  TagInstanceData* instanceData =
    164      (TagInstanceData*)(data() + codeMeta().tagsOffsetStart);
    165  return instanceData[tagIndex];
    166 }
    167 
    168 static bool UnpackResults(JSContext* cx, const ValTypeVector& resultTypes,
    169                          const Maybe<char*> stackResultsArea, uint64_t* argv,
    170                          MutableHandleValue rval) {
    171  if (!stackResultsArea) {
    172    MOZ_ASSERT(resultTypes.length() <= 1);
    173    // Result is either one scalar value to unpack to a wasm value, or
    174    // an ignored value for a zero-valued function.
    175    if (resultTypes.length() == 1) {
    176      return ToWebAssemblyValue(cx, rval, resultTypes[0], argv, true);
    177    }
    178    return true;
    179  }
    180 
    181  MOZ_ASSERT(stackResultsArea.isSome());
    182  Rooted<ArrayObject*> array(cx);
    183  if (!IterableToArray(cx, rval, &array)) {
    184    return false;
    185  }
    186 
    187  if (resultTypes.length() != array->length()) {
    188    UniqueChars expected(JS_smprintf("%zu", resultTypes.length()));
    189    UniqueChars got(JS_smprintf("%u", array->length()));
    190    if (!expected || !got) {
    191      ReportOutOfMemory(cx);
    192      return false;
    193    }
    194 
    195    JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
    196                             JSMSG_WASM_WRONG_NUMBER_OF_VALUES, expected.get(),
    197                             got.get());
    198    return false;
    199  }
    200 
    201  DebugOnly<uint64_t> previousOffset = ~(uint64_t)0;
    202 
    203  ABIResultIter iter(ResultType::Vector(resultTypes));
    204  // The values are converted in the order they are pushed on the
    205  // abstract WebAssembly stack; switch to iterate in push order.
    206  while (!iter.done()) {
    207    iter.next();
    208  }
    209  DebugOnly<bool> seenRegisterResult = false;
    210  for (iter.switchToPrev(); !iter.done(); iter.prev()) {
    211    const ABIResult& result = iter.cur();
    212    MOZ_ASSERT(!seenRegisterResult);
    213    // Use rval as a scratch area to hold the extracted result.
    214    rval.set(array->getDenseElement(iter.index()));
    215    if (result.inRegister()) {
    216      // Currently, if a function type has results, there can be only
    217      // one register result.  If there is only one result, it is
    218      // returned as a scalar and not an iterable, so we don't get here.
    219      // If there are multiple results, we extract the register result
    220      // and set `argv[0]` set to the extracted result, to be returned by
    221      // register in the stub.  The register result follows any stack
    222      // results, so this preserves conversion order.
    223      if (!ToWebAssemblyValue(cx, rval, result.type(), argv, true)) {
    224        return false;
    225      }
    226      seenRegisterResult = true;
    227      continue;
    228    }
    229    uint32_t result_size = result.size();
    230    MOZ_ASSERT(result_size == 4 || result_size == 8);
    231 #ifdef DEBUG
    232    if (previousOffset == ~(uint64_t)0) {
    233      previousOffset = (uint64_t)result.stackOffset();
    234    } else {
    235      MOZ_ASSERT(previousOffset - (uint64_t)result_size ==
    236                 (uint64_t)result.stackOffset());
    237      previousOffset -= (uint64_t)result_size;
    238    }
    239 #endif
    240    char* loc = stackResultsArea.value() + result.stackOffset();
    241    if (!ToWebAssemblyValue(cx, rval, result.type(), loc, result_size == 8)) {
    242      return false;
    243    }
    244  }
    245 
    246  return true;
    247 }
    248 
    249 bool Instance::callImport(JSContext* cx, uint32_t funcImportIndex,
    250                          unsigned argc, uint64_t* argv) {
    251  AssertRealmUnchanged aru(cx);
    252 
    253 #ifdef ENABLE_WASM_JSPI
    254  // We should not be on a suspendable stack.
    255  MOZ_ASSERT(!cx->wasm().onSuspendableStack());
    256 #endif
    257 
    258  FuncImportInstanceData& instanceFuncImport =
    259      funcImportInstanceData(funcImportIndex);
    260  const FuncType& funcType = codeMeta().getFuncType(funcImportIndex);
    261 
    262  if (funcType.hasUnexposableArgOrRet()) {
    263    JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
    264                             JSMSG_WASM_BAD_VAL_TYPE);
    265    return false;
    266  }
    267 
    268  ArgTypeVector argTypes(funcType);
    269  size_t invokeArgsLength = argTypes.lengthWithoutStackResults();
    270 
    271  // If we're applying the Function.prototype.call.bind optimization, the
    272  // number of arguments to the target function is decreased by one to account
    273  // for the 'this' parameter we're passing
    274  bool isFunctionCallBind = instanceFuncImport.isFunctionCallBind;
    275  if (isFunctionCallBind) {
    276    // Guarded against in MaybeOptimizeFunctionCallBind.
    277    MOZ_ASSERT(invokeArgsLength != 0);
    278    invokeArgsLength -= 1;
    279  }
    280 
    281  RootedValue thisv(cx, UndefinedValue());
    282  InvokeArgs invokeArgs(cx);
    283  if (!invokeArgs.init(cx, invokeArgsLength)) {
    284    return false;
    285  }
    286 
    287  MOZ_ASSERT(argTypes.lengthWithStackResults() == argc);
    288  Maybe<char*> stackResultPointer;
    289  size_t lastBoxIndexPlusOne = 0;
    290  {
    291    JS::AutoAssertNoGC nogc;
    292    for (size_t i = 0; i < argc; i++) {
    293      const void* rawArgLoc = &argv[i];
    294 
    295      if (argTypes.isSyntheticStackResultPointerArg(i)) {
    296        stackResultPointer = Some(*(char**)rawArgLoc);
    297        continue;
    298      }
    299 
    300      size_t naturalIndex = argTypes.naturalIndex(i);
    301      ValType type = funcType.args()[naturalIndex];
    302 
    303      // Skip JS value conversion that may GC (as the argument array is not
    304      // rooted), and do that in a follow up loop.
    305      if (ToJSValueMayGC(type)) {
    306        lastBoxIndexPlusOne = i + 1;
    307        continue;
    308      }
    309 
    310      MutableHandleValue argValue =
    311          isFunctionCallBind
    312              ? ((naturalIndex == 0) ? &thisv : invokeArgs[naturalIndex - 1])
    313              : invokeArgs[naturalIndex];
    314      if (!ToJSValue(cx, rawArgLoc, type, argValue)) {
    315        return false;
    316      }
    317    }
    318  }
    319 
    320  // Visit arguments that need to perform allocation in a second loop
    321  // after the rest of arguments are converted.
    322  for (size_t i = 0; i < lastBoxIndexPlusOne; i++) {
    323    if (argTypes.isSyntheticStackResultPointerArg(i)) {
    324      continue;
    325    }
    326 
    327    size_t naturalIndex = argTypes.naturalIndex(i);
    328    ValType type = funcType.args()[naturalIndex];
    329 
    330    // Visit the arguments that could trigger a GC now.
    331    if (!ToJSValueMayGC(type)) {
    332      continue;
    333    }
    334    // All value types that require boxing when converted to a JS value are not
    335    // references.
    336    MOZ_ASSERT(!type.isRefRepr());
    337 
    338    // The conversions are safe here because source values are not references
    339    // and will not be moved. This may move the unrooted arguments in the array
    340    // but that's okay because those were handled in the above loop.
    341    const void* rawArgLoc = &argv[i];
    342    MutableHandleValue argValue =
    343        isFunctionCallBind
    344            ? ((naturalIndex == 0) ? &thisv : invokeArgs[naturalIndex - 1])
    345            : invokeArgs[naturalIndex];
    346    if (!ToJSValue(cx, rawArgLoc, type, argValue)) {
    347      return false;
    348    }
    349  }
    350 
    351  Rooted<JSObject*> importCallable(cx, instanceFuncImport.callable);
    352  MOZ_ASSERT(cx->realm() == importCallable->nonCCWRealm());
    353 
    354  RootedValue fval(cx, ObjectValue(*importCallable));
    355  RootedValue rval(cx);
    356  if (!Call(cx, fval, thisv, invokeArgs, &rval)) {
    357    return false;
    358  }
    359 
    360  if (!UnpackResults(cx, funcType.results(), stackResultPointer, argv, &rval)) {
    361    return false;
    362  }
    363 
    364  if (!JitOptions.enableWasmJitExit) {
    365    return true;
    366  }
    367 
    368  // JIT exits have not been updated to support the Function.prototype.call.bind
    369  // optimization.
    370  if (instanceFuncImport.isFunctionCallBind) {
    371    return true;
    372  }
    373 
    374  // The import may already have become optimized.
    375  const FuncImport& funcImport = code().funcImport(funcImportIndex);
    376  void* jitExitCode =
    377      code().sharedStubs().base() + funcImport.jitExitCodeOffset();
    378  if (instanceFuncImport.code == jitExitCode) {
    379    return true;
    380  }
    381 
    382  if (!importCallable->is<JSFunction>()) {
    383    return true;
    384  }
    385 
    386  // Test if the function is JIT compiled.
    387  if (!importCallable->as<JSFunction>().hasBytecode()) {
    388    return true;
    389  }
    390 
    391  JSScript* script = importCallable->as<JSFunction>().nonLazyScript();
    392  if (!script->hasJitScript()) {
    393    return true;
    394  }
    395 
    396  // Skip if pushing arguments would require stack probes.
    397  if (importCallable->as<JSFunction>().nargs() > JIT_ARGS_LENGTH_MAX) {
    398    return true;
    399  }
    400 
    401  // Skip if the function does not have a signature that allows for a JIT exit.
    402  if (!funcType.canHaveJitExit()) {
    403    return true;
    404  }
    405 
    406  // Let's optimize it!
    407 
    408  instanceFuncImport.code = jitExitCode;
    409  return true;
    410 }
    411 
    412 /* static */ int32_t /* 0 to signal trap; 1 to signal OK */
    413 Instance::callImport_general(Instance* instance, int32_t funcImportIndex,
    414                             int32_t argc, uint64_t* argv) {
    415  JSContext* cx = instance->cx();
    416  return instance->callImport(cx, funcImportIndex, argc, argv);
    417 }
    418 
    419 //////////////////////////////////////////////////////////////////////////////
    420 //
    421 // Atomic operations and shared memory.
    422 
    423 template <typename ValT, typename PtrT>
    424 static int32_t PerformWait(Instance* instance, uint32_t memoryIndex,
    425                           PtrT byteOffset, ValT value, int64_t timeout_ns) {
    426  JSContext* cx = instance->cx();
    427 
    428  if (!instance->memory(memoryIndex)->isShared()) {
    429    ReportTrapError(cx, JSMSG_WASM_NONSHARED_WAIT);
    430    return -1;
    431  }
    432 
    433  if (byteOffset & (sizeof(ValT) - 1)) {
    434    ReportTrapError(cx, JSMSG_WASM_UNALIGNED_ACCESS);
    435    return -1;
    436  }
    437 
    438  if (byteOffset + sizeof(ValT) >
    439      instance->memory(memoryIndex)->volatileMemoryLength()) {
    440    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
    441    return -1;
    442  }
    443 
    444  mozilla::Maybe<mozilla::TimeDuration> timeout;
    445  if (timeout_ns >= 0) {
    446    timeout = mozilla::Some(
    447        mozilla::TimeDuration::FromMicroseconds(double(timeout_ns) / 1000));
    448  }
    449 
    450  MOZ_ASSERT(byteOffset <= SIZE_MAX, "Bounds check is broken");
    451  switch (atomics_wait_impl(cx, instance->sharedMemoryBuffer(memoryIndex),
    452                            size_t(byteOffset), value, timeout)) {
    453    case FutexThread::WaitResult::OK:
    454      return 0;
    455    case FutexThread::WaitResult::NotEqual:
    456      return 1;
    457    case FutexThread::WaitResult::TimedOut:
    458      return 2;
    459    case FutexThread::WaitResult::Error:
    460      return -1;
    461    default:
    462      MOZ_CRASH();
    463  }
    464 }
    465 
    466 /* static */ int32_t Instance::wait_i32_m32(Instance* instance,
    467                                            uint32_t byteOffset, int32_t value,
    468                                            int64_t timeout_ns,
    469                                            uint32_t memoryIndex) {
    470  MOZ_ASSERT(SASigWaitI32M32.failureMode == FailureMode::FailOnNegI32);
    471  return PerformWait(instance, memoryIndex, byteOffset, value, timeout_ns);
    472 }
    473 
    474 /* static */ int32_t Instance::wait_i32_m64(Instance* instance,
    475                                            uint64_t byteOffset, int32_t value,
    476                                            int64_t timeout_ns,
    477                                            uint32_t memoryIndex) {
    478  MOZ_ASSERT(SASigWaitI32M64.failureMode == FailureMode::FailOnNegI32);
    479  return PerformWait(instance, memoryIndex, byteOffset, value, timeout_ns);
    480 }
    481 
    482 /* static */ int32_t Instance::wait_i64_m32(Instance* instance,
    483                                            uint32_t byteOffset, int64_t value,
    484                                            int64_t timeout_ns,
    485                                            uint32_t memoryIndex) {
    486  MOZ_ASSERT(SASigWaitI64M32.failureMode == FailureMode::FailOnNegI32);
    487  return PerformWait(instance, memoryIndex, byteOffset, value, timeout_ns);
    488 }
    489 
    490 /* static */ int32_t Instance::wait_i64_m64(Instance* instance,
    491                                            uint64_t byteOffset, int64_t value,
    492                                            int64_t timeout_ns,
    493                                            uint32_t memoryIndex) {
    494  MOZ_ASSERT(SASigWaitI64M64.failureMode == FailureMode::FailOnNegI32);
    495  return PerformWait(instance, memoryIndex, byteOffset, value, timeout_ns);
    496 }
    497 
    498 template <typename PtrT>
    499 static int32_t PerformWake(Instance* instance, PtrT byteOffset, int32_t count,
    500                           uint32_t memoryIndex) {
    501  JSContext* cx = instance->cx();
    502 
    503  // The alignment guard is not in the wasm spec as of 2017-11-02, but is
    504  // considered likely to appear, as 4-byte alignment is required for WAKE by
    505  // the spec's validation algorithm.
    506 
    507  if (byteOffset & 3) {
    508    ReportTrapError(cx, JSMSG_WASM_UNALIGNED_ACCESS);
    509    return -1;
    510  }
    511 
    512  if (byteOffset >= instance->memory(memoryIndex)->volatileMemoryLength()) {
    513    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
    514    return -1;
    515  }
    516 
    517  if (!instance->memory(memoryIndex)->isShared()) {
    518    return 0;
    519  }
    520 
    521  MOZ_ASSERT(byteOffset <= SIZE_MAX, "Bounds check is broken");
    522  int64_t woken;
    523  if (!atomics_notify_impl(cx, instance->sharedMemoryBuffer(memoryIndex),
    524                           size_t(byteOffset), int64_t(count), &woken)) {
    525    return -1;
    526  }
    527 
    528  if (woken > INT32_MAX) {
    529    ReportTrapError(cx, JSMSG_WASM_WAKE_OVERFLOW);
    530    return -1;
    531  }
    532 
    533  return int32_t(woken);
    534 }
    535 
    536 /* static */ int32_t Instance::wake_m32(Instance* instance, uint32_t byteOffset,
    537                                        int32_t count, uint32_t memoryIndex) {
    538  MOZ_ASSERT(SASigWakeM32.failureMode == FailureMode::FailOnNegI32);
    539  return PerformWake(instance, byteOffset, count, memoryIndex);
    540 }
    541 
    542 /* static */ int32_t Instance::wake_m64(Instance* instance, uint64_t byteOffset,
    543                                        int32_t count, uint32_t memoryIndex) {
    544  MOZ_ASSERT(SASigWakeM32.failureMode == FailureMode::FailOnNegI32);
    545  return PerformWake(instance, byteOffset, count, memoryIndex);
    546 }
    547 
    548 //////////////////////////////////////////////////////////////////////////////
    549 //
    550 // Bulk memory operations.
    551 
    552 /* static */ uint32_t Instance::memoryGrow_m32(Instance* instance,
    553                                               uint32_t delta,
    554                                               uint32_t memoryIndex) {
    555  MOZ_ASSERT(SASigMemoryGrowM32.failureMode == FailureMode::Infallible);
    556  MOZ_ASSERT(!instance->isAsmJS());
    557 
    558  JSContext* cx = instance->cx();
    559  Rooted<WasmMemoryObject*> memory(cx, instance->memory(memoryIndex));
    560 
    561  // It is safe to cast to uint32_t, as all limits have been checked inside
    562  // grow() and will not have been exceeded for a 32-bit memory.
    563  uint32_t ret = uint32_t(WasmMemoryObject::grow(memory, uint64_t(delta), cx));
    564 
    565  // If there has been a moving grow, this Instance should have been notified.
    566  MOZ_RELEASE_ASSERT(
    567      instance->memoryBase(memoryIndex) ==
    568      instance->memory(memoryIndex)->buffer().dataPointerEither());
    569 
    570  return ret;
    571 }
    572 
    573 /* static */ uint64_t Instance::memoryGrow_m64(Instance* instance,
    574                                               uint64_t delta,
    575                                               uint32_t memoryIndex) {
    576  MOZ_ASSERT(SASigMemoryGrowM64.failureMode == FailureMode::Infallible);
    577  MOZ_ASSERT(!instance->isAsmJS());
    578 
    579  JSContext* cx = instance->cx();
    580  Rooted<WasmMemoryObject*> memory(cx, instance->memory(memoryIndex));
    581 
    582  uint64_t ret = WasmMemoryObject::grow(memory, delta, cx);
    583 
    584  // If there has been a moving grow, this Instance should have been notified.
    585  MOZ_RELEASE_ASSERT(
    586      instance->memoryBase(memoryIndex) ==
    587      instance->memory(memoryIndex)->buffer().dataPointerEither());
    588 
    589  return ret;
    590 }
    591 
    592 /* static */ uint32_t Instance::memorySize_m32(Instance* instance,
    593                                               uint32_t memoryIndex) {
    594  MOZ_ASSERT(SASigMemorySizeM32.failureMode == FailureMode::Infallible);
    595 
    596  // This invariant must hold when running Wasm code. Assert it here so we can
    597  // write tests for cross-realm calls.
    598  DebugOnly<JSContext*> cx = instance->cx();
    599  MOZ_ASSERT(cx->realm() == instance->realm());
    600 
    601  Pages pages = instance->memory(memoryIndex)->volatilePages();
    602 #ifdef JS_64BIT
    603  // Ensure that the memory size is no more than 4GiB.
    604  MOZ_ASSERT(pages <=
    605             Pages::fromPageCount(
    606                 MaxMemoryPagesValidation(AddressType::I32, pages.pageSize()),
    607                 pages.pageSize()));
    608 #endif
    609  return uint32_t(pages.pageCount());
    610 }
    611 
    612 /* static */ uint64_t Instance::memorySize_m64(Instance* instance,
    613                                               uint32_t memoryIndex) {
    614  MOZ_ASSERT(SASigMemorySizeM64.failureMode == FailureMode::Infallible);
    615 
    616  // This invariant must hold when running Wasm code. Assert it here so we can
    617  // write tests for cross-realm calls.
    618  DebugOnly<JSContext*> cx = instance->cx();
    619  MOZ_ASSERT(cx->realm() == instance->realm());
    620 
    621  Pages pages = instance->memory(memoryIndex)->volatilePages();
    622 #ifdef JS_64BIT
    623  MOZ_ASSERT(pages <= Pages::fromPageCount(MaxMemory64StandardPagesValidation,
    624                                           pages.pageSize()));
    625 #endif
    626  return pages.pageCount();
    627 }
    628 
    629 template <typename PointerT, typename CopyFuncT, typename IndexT>
    630 inline int32_t WasmMemoryCopy(JSContext* cx, PointerT dstMemBase,
    631                              PointerT srcMemBase, size_t dstMemLen,
    632                              size_t srcMemLen, IndexT dstByteOffset,
    633                              IndexT srcByteOffset, IndexT len,
    634                              CopyFuncT memMove) {
    635  if (!MemoryBoundsCheck(dstByteOffset, len, dstMemLen) ||
    636      !MemoryBoundsCheck(srcByteOffset, len, srcMemLen)) {
    637    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
    638    return -1;
    639  }
    640 
    641  memMove(dstMemBase + uintptr_t(dstByteOffset),
    642          srcMemBase + uintptr_t(srcByteOffset), size_t(len));
    643  return 0;
    644 }
    645 
    646 template <typename I>
    647 inline int32_t MemoryCopy(JSContext* cx, I dstByteOffset, I srcByteOffset,
    648                          I len, uint8_t* memBase) {
    649  const WasmArrayRawBuffer* rawBuf = WasmArrayRawBuffer::fromDataPtr(memBase);
    650  size_t memLen = rawBuf->byteLength();
    651  return WasmMemoryCopy(cx, memBase, memBase, memLen, memLen, dstByteOffset,
    652                        srcByteOffset, len, memmove);
    653 }
    654 
    655 template <typename I>
    656 inline int32_t MemoryCopyShared(JSContext* cx, I dstByteOffset, I srcByteOffset,
    657                                I len, uint8_t* memBase) {
    658  using RacyMemMove =
    659      void (*)(SharedMem<uint8_t*>, SharedMem<uint8_t*>, size_t);
    660 
    661  const WasmSharedArrayRawBuffer* rawBuf =
    662      WasmSharedArrayRawBuffer::fromDataPtr(memBase);
    663  size_t memLen = rawBuf->volatileByteLength();
    664 
    665  SharedMem<uint8_t*> sharedMemBase = SharedMem<uint8_t*>::shared(memBase);
    666  return WasmMemoryCopy<SharedMem<uint8_t*>, RacyMemMove>(
    667      cx, sharedMemBase, sharedMemBase, memLen, memLen, dstByteOffset,
    668      srcByteOffset, len, AtomicOperations::memmoveSafeWhenRacy);
    669 }
    670 
    671 /* static */ int32_t Instance::memCopy_m32(Instance* instance,
    672                                           uint32_t dstByteOffset,
    673                                           uint32_t srcByteOffset, uint32_t len,
    674                                           uint8_t* memBase) {
    675  MOZ_ASSERT(SASigMemCopyM32.failureMode == FailureMode::FailOnNegI32);
    676  JSContext* cx = instance->cx();
    677  return MemoryCopy(cx, dstByteOffset, srcByteOffset, len, memBase);
    678 }
    679 
    680 /* static */ int32_t Instance::memCopyShared_m32(Instance* instance,
    681                                                 uint32_t dstByteOffset,
    682                                                 uint32_t srcByteOffset,
    683                                                 uint32_t len,
    684                                                 uint8_t* memBase) {
    685  MOZ_ASSERT(SASigMemCopySharedM32.failureMode == FailureMode::FailOnNegI32);
    686  JSContext* cx = instance->cx();
    687  return MemoryCopyShared(cx, dstByteOffset, srcByteOffset, len, memBase);
    688 }
    689 
    690 /* static */ int32_t Instance::memCopy_m64(Instance* instance,
    691                                           uint64_t dstByteOffset,
    692                                           uint64_t srcByteOffset, uint64_t len,
    693                                           uint8_t* memBase) {
    694  MOZ_ASSERT(SASigMemCopyM64.failureMode == FailureMode::FailOnNegI32);
    695  JSContext* cx = instance->cx();
    696  return MemoryCopy(cx, dstByteOffset, srcByteOffset, len, memBase);
    697 }
    698 
    699 /* static */ int32_t Instance::memCopyShared_m64(Instance* instance,
    700                                                 uint64_t dstByteOffset,
    701                                                 uint64_t srcByteOffset,
    702                                                 uint64_t len,
    703                                                 uint8_t* memBase) {
    704  MOZ_ASSERT(SASigMemCopySharedM64.failureMode == FailureMode::FailOnNegI32);
    705  JSContext* cx = instance->cx();
    706  return MemoryCopyShared(cx, dstByteOffset, srcByteOffset, len, memBase);
    707 }
    708 
    709 // Dynamic dispatch to get the length of a memory given just the base and
    710 // whether it is shared or not. This is only used for memCopy_any, where being
    711 // slower is okay.
    712 static inline size_t GetVolatileByteLength(uint8_t* memBase, bool isShared) {
    713  if (isShared) {
    714    return WasmSharedArrayRawBuffer::fromDataPtr(memBase)->volatileByteLength();
    715  }
    716  return WasmArrayRawBuffer::fromDataPtr(memBase)->byteLength();
    717 }
    718 
    719 /* static */ int32_t Instance::memCopy_any(Instance* instance,
    720                                           uint64_t dstByteOffset,
    721                                           uint64_t srcByteOffset, uint64_t len,
    722                                           uint32_t dstMemIndex,
    723                                           uint32_t srcMemIndex) {
    724  MOZ_ASSERT(SASigMemCopyAny.failureMode == FailureMode::FailOnNegI32);
    725  JSContext* cx = instance->cx();
    726 
    727  using RacyMemMove =
    728      void (*)(SharedMem<uint8_t*>, SharedMem<uint8_t*>, size_t);
    729 
    730  const MemoryInstanceData& dstMemory =
    731      instance->memoryInstanceData(dstMemIndex);
    732  const MemoryInstanceData& srcMemory =
    733      instance->memoryInstanceData(srcMemIndex);
    734 
    735  uint8_t* dstMemBase = dstMemory.base;
    736  uint8_t* srcMemBase = srcMemory.base;
    737 
    738  size_t dstMemLen = GetVolatileByteLength(dstMemBase, dstMemory.isShared);
    739  size_t srcMemLen = GetVolatileByteLength(srcMemBase, srcMemory.isShared);
    740 
    741  return WasmMemoryCopy<SharedMem<uint8_t*>, RacyMemMove>(
    742      cx, SharedMem<uint8_t*>::shared(dstMemBase),
    743      SharedMem<uint8_t*>::shared(srcMemBase), dstMemLen, srcMemLen,
    744      dstByteOffset, srcByteOffset, len, AtomicOperations::memmoveSafeWhenRacy);
    745 }
    746 
    747 template <typename T, typename F, typename I>
    748 inline int32_t WasmMemoryFill(JSContext* cx, T memBase, size_t memLen,
    749                              I byteOffset, uint32_t value, I len, F memSet) {
    750  if (!MemoryBoundsCheck(byteOffset, len, memLen)) {
    751    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
    752    return -1;
    753  }
    754 
    755  // The required write direction is upward, but that is not currently
    756  // observable as there are no fences nor any read/write protect operation.
    757  memSet(memBase + uintptr_t(byteOffset), int(value), size_t(len));
    758  return 0;
    759 }
    760 
    761 template <typename I>
    762 inline int32_t MemoryFill(JSContext* cx, I byteOffset, uint32_t value, I len,
    763                          uint8_t* memBase) {
    764  const WasmArrayRawBuffer* rawBuf = WasmArrayRawBuffer::fromDataPtr(memBase);
    765  size_t memLen = rawBuf->byteLength();
    766  return WasmMemoryFill(cx, memBase, memLen, byteOffset, value, len, memset);
    767 }
    768 
    769 template <typename I>
    770 inline int32_t MemoryFillShared(JSContext* cx, I byteOffset, uint32_t value,
    771                                I len, uint8_t* memBase) {
    772  const WasmSharedArrayRawBuffer* rawBuf =
    773      WasmSharedArrayRawBuffer::fromDataPtr(memBase);
    774  size_t memLen = rawBuf->volatileByteLength();
    775  return WasmMemoryFill(cx, SharedMem<uint8_t*>::shared(memBase), memLen,
    776                        byteOffset, value, len,
    777                        AtomicOperations::memsetSafeWhenRacy);
    778 }
    779 
    780 /* static */ int32_t Instance::memFill_m32(Instance* instance,
    781                                           uint32_t byteOffset, uint32_t value,
    782                                           uint32_t len, uint8_t* memBase) {
    783  MOZ_ASSERT(SASigMemFillM32.failureMode == FailureMode::FailOnNegI32);
    784  JSContext* cx = instance->cx();
    785  return MemoryFill(cx, byteOffset, value, len, memBase);
    786 }
    787 
    788 /* static */ int32_t Instance::memFillShared_m32(Instance* instance,
    789                                                 uint32_t byteOffset,
    790                                                 uint32_t value, uint32_t len,
    791                                                 uint8_t* memBase) {
    792  MOZ_ASSERT(SASigMemFillSharedM32.failureMode == FailureMode::FailOnNegI32);
    793  JSContext* cx = instance->cx();
    794  return MemoryFillShared(cx, byteOffset, value, len, memBase);
    795 }
    796 
    797 /* static */ int32_t Instance::memFill_m64(Instance* instance,
    798                                           uint64_t byteOffset, uint32_t value,
    799                                           uint64_t len, uint8_t* memBase) {
    800  MOZ_ASSERT(SASigMemFillM64.failureMode == FailureMode::FailOnNegI32);
    801  JSContext* cx = instance->cx();
    802  return MemoryFill(cx, byteOffset, value, len, memBase);
    803 }
    804 
    805 /* static */ int32_t Instance::memFillShared_m64(Instance* instance,
    806                                                 uint64_t byteOffset,
    807                                                 uint32_t value, uint64_t len,
    808                                                 uint8_t* memBase) {
    809  MOZ_ASSERT(SASigMemFillSharedM64.failureMode == FailureMode::FailOnNegI32);
    810  JSContext* cx = instance->cx();
    811  return MemoryFillShared(cx, byteOffset, value, len, memBase);
    812 }
    813 
    814 static bool BoundsCheckInit(uint32_t dstOffset, uint32_t srcOffset,
    815                            uint32_t len, size_t memLen, uint32_t segLen) {
    816  uint64_t dstOffsetLimit = uint64_t(dstOffset) + uint64_t(len);
    817  uint64_t srcOffsetLimit = uint64_t(srcOffset) + uint64_t(len);
    818 
    819  return dstOffsetLimit > memLen || srcOffsetLimit > segLen;
    820 }
    821 
    822 static bool BoundsCheckInit(uint64_t dstOffset, uint32_t srcOffset,
    823                            uint32_t len, size_t memLen, uint32_t segLen) {
    824  uint64_t dstOffsetLimit = dstOffset + uint64_t(len);
    825  uint64_t srcOffsetLimit = uint64_t(srcOffset) + uint64_t(len);
    826 
    827  return dstOffsetLimit < dstOffset || dstOffsetLimit > memLen ||
    828         srcOffsetLimit > segLen;
    829 }
    830 
    831 template <typename I>
    832 static int32_t MemoryInit(JSContext* cx, Instance* instance,
    833                          uint32_t memoryIndex, I dstOffset, uint32_t srcOffset,
    834                          uint32_t len, const DataSegment* maybeSeg) {
    835  if (!maybeSeg) {
    836    if (len == 0 && srcOffset == 0) {
    837      return 0;
    838    }
    839 
    840    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
    841    return -1;
    842  }
    843 
    844  const DataSegment& seg = *maybeSeg;
    845  MOZ_RELEASE_ASSERT(!seg.active());
    846 
    847  const uint32_t segLen = seg.bytes.length();
    848  WasmMemoryObject* mem = instance->memory(memoryIndex);
    849  const size_t memLen = mem->volatileMemoryLength();
    850 
    851  // We are proposing to copy
    852  //
    853  //   seg.bytes.begin()[ srcOffset .. srcOffset + len - 1 ]
    854  // to
    855  //   memoryBase[ dstOffset .. dstOffset + len - 1 ]
    856 
    857  if (BoundsCheckInit(dstOffset, srcOffset, len, memLen, segLen)) {
    858    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
    859    return -1;
    860  }
    861 
    862  // The required read/write direction is upward, but that is not currently
    863  // observable as there are no fences nor any read/write protect operation.
    864  SharedMem<uint8_t*> dataPtr = mem->buffer().dataPointerEither();
    865  if (mem->isShared()) {
    866    AtomicOperations::memcpySafeWhenRacy(
    867        dataPtr + uintptr_t(dstOffset), (uint8_t*)seg.bytes.begin() + srcOffset,
    868        len);
    869  } else {
    870    uint8_t* rawBuf = dataPtr.unwrap(/*Unshared*/);
    871    memcpy(rawBuf + uintptr_t(dstOffset),
    872           (const char*)seg.bytes.begin() + srcOffset, len);
    873  }
    874  return 0;
    875 }
    876 
    877 /* static */ int32_t Instance::memInit_m32(Instance* instance,
    878                                           uint32_t dstOffset,
    879                                           uint32_t srcOffset, uint32_t len,
    880                                           uint32_t segIndex,
    881                                           uint32_t memIndex) {
    882  MOZ_ASSERT(SASigMemInitM32.failureMode == FailureMode::FailOnNegI32);
    883  MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(),
    884                     "ensured by validation");
    885 
    886  JSContext* cx = instance->cx();
    887  return MemoryInit(cx, instance, memIndex, dstOffset, srcOffset, len,
    888                    instance->passiveDataSegments_[segIndex]);
    889 }
    890 
    891 /* static */ int32_t Instance::memInit_m64(Instance* instance,
    892                                           uint64_t dstOffset,
    893                                           uint32_t srcOffset, uint32_t len,
    894                                           uint32_t segIndex,
    895                                           uint32_t memIndex) {
    896  MOZ_ASSERT(SASigMemInitM64.failureMode == FailureMode::FailOnNegI32);
    897  MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(),
    898                     "ensured by validation");
    899 
    900  JSContext* cx = instance->cx();
    901  return MemoryInit(cx, instance, memIndex, dstOffset, srcOffset, len,
    902                    instance->passiveDataSegments_[segIndex]);
    903 }
    904 
    905 //////////////////////////////////////////////////////////////////////////////
    906 //
    907 // Bulk table operations.
    908 
    909 /* static */ int32_t Instance::tableCopy(Instance* instance, uint32_t dstOffset,
    910                                         uint32_t srcOffset, uint32_t len,
    911                                         uint32_t dstTableIndex,
    912                                         uint32_t srcTableIndex) {
    913  MOZ_ASSERT(SASigTableCopy.failureMode == FailureMode::FailOnNegI32);
    914 
    915  JSContext* cx = instance->cx();
    916  const SharedTable& srcTable = instance->tables()[srcTableIndex];
    917  uint32_t srcTableLen = srcTable->length();
    918 
    919  const SharedTable& dstTable = instance->tables()[dstTableIndex];
    920  uint32_t dstTableLen = dstTable->length();
    921 
    922  // Bounds check and deal with arithmetic overflow.
    923  uint64_t dstOffsetLimit = uint64_t(dstOffset) + len;
    924  uint64_t srcOffsetLimit = uint64_t(srcOffset) + len;
    925 
    926  if (dstOffsetLimit > dstTableLen || srcOffsetLimit > srcTableLen) {
    927    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
    928    return -1;
    929  }
    930 
    931  bool isOOM = false;
    932 
    933  if (&srcTable == &dstTable && dstOffset > srcOffset) {
    934    for (uint32_t i = len; i > 0; i--) {
    935      if (!dstTable->copy(cx, *srcTable, dstOffset + (i - 1),
    936                          srcOffset + (i - 1))) {
    937        isOOM = true;
    938        break;
    939      }
    940    }
    941  } else if (&srcTable == &dstTable && dstOffset == srcOffset) {
    942    // No-op
    943  } else {
    944    for (uint32_t i = 0; i < len; i++) {
    945      if (!dstTable->copy(cx, *srcTable, dstOffset + i, srcOffset + i)) {
    946        isOOM = true;
    947        break;
    948      }
    949    }
    950  }
    951 
    952  if (isOOM) {
    953    return -1;
    954  }
    955  return 0;
    956 }
    957 
    958 #ifdef DEBUG
    959 static bool AllSegmentsArePassive(const DataSegmentVector& vec) {
    960  for (const DataSegment* seg : vec) {
    961    if (seg->active()) {
    962      return false;
    963    }
    964  }
    965  return true;
    966 }
    967 #endif
    968 
    969 bool Instance::initSegments(JSContext* cx,
    970                            const DataSegmentVector& dataSegments,
    971                            const ModuleElemSegmentVector& elemSegments) {
    972  MOZ_ASSERT_IF(codeMeta().memories.length() == 0,
    973                AllSegmentsArePassive(dataSegments));
    974 
    975  Rooted<WasmInstanceObject*> instanceObj(cx, object());
    976 
    977  // Write data/elem segments into memories/tables.
    978 
    979  for (const ModuleElemSegment& seg : elemSegments) {
    980    if (seg.active()) {
    981      RootedVal offsetVal(cx);
    982      if (!seg.offset().evaluate(cx, instanceObj, &offsetVal)) {
    983        return false;  // OOM
    984      }
    985 
    986      const wasm::Table* table = tables()[seg.tableIndex];
    987      uint64_t offset = table->addressType() == AddressType::I32
    988                            ? offsetVal.get().i32()
    989                            : offsetVal.get().i64();
    990 
    991      uint64_t tableLength = table->length();
    992      if (offset > tableLength || tableLength - offset < seg.numElements()) {
    993        JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
    994                                 JSMSG_WASM_OUT_OF_BOUNDS);
    995        return false;
    996      }
    997 
    998      if (!initElems(cx, seg.tableIndex, seg, offset)) {
    999        return false;  // OOM
   1000      }
   1001    }
   1002  }
   1003 
   1004  for (const DataSegment* seg : dataSegments) {
   1005    if (!seg->active()) {
   1006      continue;
   1007    }
   1008 
   1009    Rooted<const WasmMemoryObject*> memoryObj(cx, memory(seg->memoryIndex));
   1010    size_t memoryLength = memoryObj->volatileMemoryLength();
   1011    uint8_t* memoryBase =
   1012        memoryObj->buffer().dataPointerEither().unwrap(/* memcpy */);
   1013 
   1014    RootedVal offsetVal(cx);
   1015    if (!seg->offset().evaluate(cx, instanceObj, &offsetVal)) {
   1016      return false;  // OOM
   1017    }
   1018    uint64_t offset = memoryObj->addressType() == AddressType::I32
   1019                          ? offsetVal.get().i32()
   1020                          : offsetVal.get().i64();
   1021    uint32_t count = seg->bytes.length();
   1022 
   1023    if (offset > memoryLength || memoryLength - offset < count) {
   1024      JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
   1025                               JSMSG_WASM_OUT_OF_BOUNDS);
   1026      return false;
   1027    }
   1028    memcpy(memoryBase + uintptr_t(offset), seg->bytes.begin(), count);
   1029  }
   1030 
   1031  return true;
   1032 }
   1033 
   1034 bool Instance::initElems(JSContext* cx, uint32_t tableIndex,
   1035                         const ModuleElemSegment& seg, uint32_t dstOffset) {
   1036  Table& table = *tables_[tableIndex];
   1037  MOZ_ASSERT(dstOffset <= table.length());
   1038  MOZ_ASSERT(seg.numElements() <= table.length() - dstOffset);
   1039 
   1040  if (seg.numElements() == 0) {
   1041    return true;
   1042  }
   1043 
   1044  if (table.isFunction() &&
   1045      seg.encoding == ModuleElemSegment::Encoding::Indices) {
   1046    // Initialize this table of functions without creating any intermediate
   1047    // JSFunctions.
   1048    bool ok = iterElemsFunctions(
   1049        seg, [&](uint32_t i, void* code, Instance* instance) -> bool {
   1050          table.setFuncRef(dstOffset + i, code, instance);
   1051          return true;
   1052        });
   1053    if (!ok) {
   1054      return false;
   1055    }
   1056  } else {
   1057    bool ok = iterElemsAnyrefs(cx, seg, [&](uint32_t i, AnyRef ref) -> bool {
   1058      table.setRef(dstOffset + i, ref);
   1059      return true;
   1060    });
   1061    if (!ok) {
   1062      return false;
   1063    }
   1064  }
   1065 
   1066  return true;
   1067 }
   1068 
   1069 template <typename F>
   1070 bool Instance::iterElemsFunctions(const ModuleElemSegment& seg,
   1071                                  const F& onFunc) {
   1072  // In the future, we could theoretically get function data (instance + code
   1073  // pointer) from segments with the expression encoding without creating
   1074  // JSFunctions. But that is not how it works today. We can only bypass the
   1075  // creation of JSFunctions for the index encoding.
   1076  MOZ_ASSERT(seg.encoding == ModuleElemSegment::Encoding::Indices);
   1077 
   1078  if (seg.numElements() == 0) {
   1079    return true;
   1080  }
   1081 
   1082  const FuncImportVector& funcImports = code().funcImports();
   1083 
   1084  for (uint32_t i = 0; i < seg.numElements(); i++) {
   1085    uint32_t elemFuncIndex = seg.elemIndices[i];
   1086 
   1087    if (elemFuncIndex < funcImports.length()) {
   1088      FuncImportInstanceData& import = funcImportInstanceData(elemFuncIndex);
   1089      MOZ_ASSERT(import.callable->isCallable());
   1090 
   1091      if (import.callable->is<JSFunction>()) {
   1092        JSFunction* fun = &import.callable->as<JSFunction>();
   1093        if (!codeMeta().funcImportsAreJS && fun->isWasm()) {
   1094          // This element is a wasm function imported from another
   1095          // instance. To preserve the === function identity required by
   1096          // the JS embedding spec, we must get the imported function's
   1097          // underlying CodeRange.funcCheckedCallEntry and Instance so that
   1098          // future Table.get()s produce the same function object as was
   1099          // imported.
   1100          if (!onFunc(i, fun->wasmCheckedCallEntry(), &fun->wasmInstance())) {
   1101            return false;
   1102          }
   1103          continue;
   1104        }
   1105      }
   1106    }
   1107 
   1108    const CodeRange* codeRange;
   1109    uint8_t* codeBase;
   1110    code().funcCodeRange(elemFuncIndex, &codeRange, &codeBase);
   1111    if (!onFunc(i, codeBase + codeRange->funcCheckedCallEntry(), this)) {
   1112      return false;
   1113    }
   1114  }
   1115 
   1116  return true;
   1117 }
   1118 
   1119 template <typename F>
   1120 bool Instance::iterElemsAnyrefs(JSContext* cx, const ModuleElemSegment& seg,
   1121                                const F& onAnyRef) {
   1122  if (seg.numElements() == 0) {
   1123    return true;
   1124  }
   1125 
   1126  switch (seg.encoding) {
   1127    case ModuleElemSegment::Encoding::Indices: {
   1128      // The only types of indices that exist right now are function indices, so
   1129      // this code is specialized to functions.
   1130 
   1131      RootedFunction fun(cx);
   1132      for (uint32_t i = 0; i < seg.numElements(); i++) {
   1133        uint32_t funcIndex = seg.elemIndices[i];
   1134        if (!getExportedFunction(cx, funcIndex, &fun) ||
   1135            !onAnyRef(i, AnyRef::fromJSObject(*fun.get()))) {
   1136          return false;
   1137        }
   1138      }
   1139      break;
   1140    }
   1141    case ModuleElemSegment::Encoding::Expressions: {
   1142      Rooted<WasmInstanceObject*> instanceObj(cx, object());
   1143      const ModuleElemSegment::Expressions& exprs = seg.elemExpressions;
   1144 
   1145      UniqueChars error;
   1146      // The offset is a dummy because the expression has already been
   1147      // validated.
   1148      Decoder d(exprs.exprBytes.begin(), exprs.exprBytes.end(), 0, &error);
   1149      for (uint32_t i = 0; i < seg.numElements(); i++) {
   1150        RootedVal result(cx);
   1151        if (!InitExpr::decodeAndEvaluate(cx, instanceObj, d, seg.elemType,
   1152                                         &result)) {
   1153          MOZ_ASSERT(!error);  // The only possible failure should be OOM.
   1154          return false;
   1155        }
   1156        // We would need to root this AnyRef if we were doing anything other
   1157        // than storing it.
   1158        AnyRef ref = result.get().ref();
   1159        if (!onAnyRef(i, ref)) {
   1160          return false;
   1161        }
   1162      }
   1163      break;
   1164    }
   1165    default:
   1166      MOZ_CRASH("unknown encoding type for element segment");
   1167  }
   1168  return true;
   1169 }
   1170 
   1171 /* static */ int32_t Instance::tableInit(Instance* instance, uint32_t dstOffset,
   1172                                         uint32_t srcOffset, uint32_t len,
   1173                                         uint32_t segIndex,
   1174                                         uint32_t tableIndex) {
   1175  MOZ_ASSERT(SASigTableInit.failureMode == FailureMode::FailOnNegI32);
   1176 
   1177  MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveElemSegments_.length(),
   1178                     "ensured by validation");
   1179 
   1180  JSContext* cx = instance->cx();
   1181 
   1182  const InstanceElemSegment& seg = instance->passiveElemSegments_[segIndex];
   1183  const uint32_t segLen = seg.length();
   1184 
   1185  Table& table = *instance->tables()[tableIndex];
   1186  const uint32_t tableLen = table.length();
   1187 
   1188  // We are proposing to copy
   1189  //
   1190  //   seg[ srcOffset .. srcOffset + len - 1 ]
   1191  // to
   1192  //   tableBase[ dstOffset .. dstOffset + len - 1 ]
   1193 
   1194  // Bounds check and deal with arithmetic overflow.
   1195  uint64_t dstOffsetLimit = uint64_t(dstOffset) + uint64_t(len);
   1196  uint64_t srcOffsetLimit = uint64_t(srcOffset) + uint64_t(len);
   1197 
   1198  if (dstOffsetLimit > tableLen || srcOffsetLimit > segLen) {
   1199    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
   1200    return -1;
   1201  }
   1202 
   1203  for (size_t i = 0; i < len; i++) {
   1204    table.setRef(dstOffset + i, seg[srcOffset + i]);
   1205  }
   1206 
   1207  return 0;
   1208 }
   1209 
   1210 /* static */ int32_t Instance::tableFill(Instance* instance, uint32_t start,
   1211                                         void* value, uint32_t len,
   1212                                         uint32_t tableIndex) {
   1213  MOZ_ASSERT(SASigTableFill.failureMode == FailureMode::FailOnNegI32);
   1214 
   1215  JSContext* cx = instance->cx();
   1216  Table& table = *instance->tables()[tableIndex];
   1217 
   1218  // Bounds check and deal with arithmetic overflow.
   1219  uint64_t offsetLimit = uint64_t(start) + uint64_t(len);
   1220 
   1221  if (offsetLimit > table.length()) {
   1222    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
   1223    return -1;
   1224  }
   1225 
   1226  switch (table.repr()) {
   1227    case TableRepr::Ref:
   1228      table.fillAnyRef(start, len, AnyRef::fromCompiledCode(value));
   1229      break;
   1230    case TableRepr::Func:
   1231      MOZ_RELEASE_ASSERT(!table.isAsmJS());
   1232      table.fillFuncRef(start, len, FuncRef::fromCompiledCode(value), cx);
   1233      break;
   1234  }
   1235 
   1236  return 0;
   1237 }
   1238 
   1239 template <typename I>
   1240 static bool WasmDiscardCheck(Instance* instance, I byteOffset, I byteLen,
   1241                             size_t memLen, bool shared) {
   1242  JSContext* cx = instance->cx();
   1243 
   1244  if (byteOffset % wasm::StandardPageSizeBytes != 0 ||
   1245      byteLen % wasm::StandardPageSizeBytes != 0) {
   1246    ReportTrapError(cx, JSMSG_WASM_UNALIGNED_ACCESS);
   1247    return false;
   1248  }
   1249 
   1250  if (!MemoryBoundsCheck(byteOffset, byteLen, memLen)) {
   1251    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
   1252    return false;
   1253  }
   1254 
   1255  return true;
   1256 }
   1257 
   1258 template <typename I>
   1259 static int32_t MemDiscardNotShared(Instance* instance, I byteOffset, I byteLen,
   1260                                   uint8_t* memBase) {
   1261  WasmArrayRawBuffer* rawBuf = WasmArrayRawBuffer::fromDataPtr(memBase);
   1262  size_t memLen = rawBuf->byteLength();
   1263 
   1264  if (!WasmDiscardCheck(instance, byteOffset, byteLen, memLen, false)) {
   1265    return -1;
   1266  }
   1267  rawBuf->discard(byteOffset, byteLen);
   1268 
   1269  return 0;
   1270 }
   1271 
   1272 template <typename I>
   1273 static int32_t MemDiscardShared(Instance* instance, I byteOffset, I byteLen,
   1274                                uint8_t* memBase) {
   1275  WasmSharedArrayRawBuffer* rawBuf =
   1276      WasmSharedArrayRawBuffer::fromDataPtr(memBase);
   1277  size_t memLen = rawBuf->volatileByteLength();
   1278 
   1279  if (!WasmDiscardCheck(instance, byteOffset, byteLen, memLen, true)) {
   1280    return -1;
   1281  }
   1282  rawBuf->discard(byteOffset, byteLen);
   1283 
   1284  return 0;
   1285 }
   1286 
   1287 /* static */ int32_t Instance::memDiscard_m32(Instance* instance,
   1288                                              uint32_t byteOffset,
   1289                                              uint32_t byteLen,
   1290                                              uint8_t* memBase) {
   1291  return MemDiscardNotShared(instance, byteOffset, byteLen, memBase);
   1292 }
   1293 
   1294 /* static */ int32_t Instance::memDiscard_m64(Instance* instance,
   1295                                              uint64_t byteOffset,
   1296                                              uint64_t byteLen,
   1297                                              uint8_t* memBase) {
   1298  return MemDiscardNotShared(instance, byteOffset, byteLen, memBase);
   1299 }
   1300 
   1301 /* static */ int32_t Instance::memDiscardShared_m32(Instance* instance,
   1302                                                    uint32_t byteOffset,
   1303                                                    uint32_t byteLen,
   1304                                                    uint8_t* memBase) {
   1305  return MemDiscardShared(instance, byteOffset, byteLen, memBase);
   1306 }
   1307 
   1308 /* static */ int32_t Instance::memDiscardShared_m64(Instance* instance,
   1309                                                    uint64_t byteOffset,
   1310                                                    uint64_t byteLen,
   1311                                                    uint8_t* memBase) {
   1312  return MemDiscardShared(instance, byteOffset, byteLen, memBase);
   1313 }
   1314 
   1315 /* static */ void* Instance::tableGet(Instance* instance, uint32_t address,
   1316                                      uint32_t tableIndex) {
   1317  MOZ_ASSERT(SASigTableGet.failureMode == FailureMode::FailOnInvalidRef);
   1318 
   1319  JSContext* cx = instance->cx();
   1320  const Table& table = *instance->tables()[tableIndex];
   1321  if (address >= table.length()) {
   1322    ReportTrapError(cx, JSMSG_WASM_TABLE_OUT_OF_BOUNDS);
   1323    return AnyRef::invalid().forCompiledCode();
   1324  }
   1325 
   1326  switch (table.repr()) {
   1327    case TableRepr::Ref:
   1328      return table.getAnyRef(address).forCompiledCode();
   1329    case TableRepr::Func: {
   1330      MOZ_RELEASE_ASSERT(!table.isAsmJS());
   1331      RootedFunction fun(cx);
   1332      if (!table.getFuncRef(cx, address, &fun)) {
   1333        return AnyRef::invalid().forCompiledCode();
   1334      }
   1335      return FuncRef::fromJSFunction(fun).forCompiledCode();
   1336    }
   1337  }
   1338  MOZ_CRASH("Should not happen");
   1339 }
   1340 
   1341 /* static */ uint32_t Instance::tableGrow(Instance* instance, void* initValue,
   1342                                          uint32_t delta, uint32_t tableIndex) {
   1343  MOZ_ASSERT(SASigTableGrow.failureMode == FailureMode::Infallible);
   1344 
   1345  JSContext* cx = instance->cx();
   1346  RootedAnyRef ref(cx, AnyRef::fromCompiledCode(initValue));
   1347  Table& table = *instance->tables()[tableIndex];
   1348 
   1349  uint32_t oldSize = table.grow(delta);
   1350 
   1351  if (oldSize != uint32_t(-1) && initValue != nullptr) {
   1352    table.fillUninitialized(oldSize, delta, ref, cx);
   1353  }
   1354 
   1355 #ifdef DEBUG
   1356  if (!table.elemType().isNullable()) {
   1357    table.assertRangeNotNull(oldSize, delta);
   1358  }
   1359 #endif  // DEBUG
   1360  return oldSize;
   1361 }
   1362 
   1363 /* static */ int32_t Instance::tableSet(Instance* instance, uint32_t address,
   1364                                        void* value, uint32_t tableIndex) {
   1365  MOZ_ASSERT(SASigTableSet.failureMode == FailureMode::FailOnNegI32);
   1366 
   1367  JSContext* cx = instance->cx();
   1368  Table& table = *instance->tables()[tableIndex];
   1369 
   1370  if (address >= table.length()) {
   1371    ReportTrapError(cx, JSMSG_WASM_TABLE_OUT_OF_BOUNDS);
   1372    return -1;
   1373  }
   1374 
   1375  switch (table.repr()) {
   1376    case TableRepr::Ref:
   1377      table.setAnyRef(address, AnyRef::fromCompiledCode(value));
   1378      break;
   1379    case TableRepr::Func:
   1380      MOZ_RELEASE_ASSERT(!table.isAsmJS());
   1381      table.fillFuncRef(address, 1, FuncRef::fromCompiledCode(value), cx);
   1382      break;
   1383  }
   1384 
   1385  return 0;
   1386 }
   1387 
   1388 /* static */ uint32_t Instance::tableSize(Instance* instance,
   1389                                          uint32_t tableIndex) {
   1390  MOZ_ASSERT(SASigTableSize.failureMode == FailureMode::Infallible);
   1391  Table& table = *instance->tables()[tableIndex];
   1392  return table.length();
   1393 }
   1394 
   1395 /* static */ void* Instance::refFunc(Instance* instance, uint32_t funcIndex) {
   1396  MOZ_ASSERT(SASigRefFunc.failureMode == FailureMode::FailOnInvalidRef);
   1397  JSContext* cx = instance->cx();
   1398 
   1399  RootedFunction exportedFunc(cx);
   1400  if (!instance->getExportedFunction(cx, funcIndex, &exportedFunc)) {
   1401    MOZ_ASSERT(cx->isThrowingOutOfMemory());
   1402    return AnyRef::invalid().forCompiledCode();
   1403  }
   1404  return FuncRef::fromJSFunction(exportedFunc.get()).forCompiledCode();
   1405 }
   1406 
   1407 //////////////////////////////////////////////////////////////////////////////
   1408 //
   1409 // Segment management.
   1410 
   1411 /* static */ int32_t Instance::elemDrop(Instance* instance, uint32_t segIndex) {
   1412  MOZ_ASSERT(SASigElemDrop.failureMode == FailureMode::FailOnNegI32);
   1413 
   1414  MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveElemSegments_.length(),
   1415                     "ensured by validation");
   1416 
   1417  instance->passiveElemSegments_[segIndex].clearAndFree();
   1418  return 0;
   1419 }
   1420 
   1421 /* static */ int32_t Instance::dataDrop(Instance* instance, uint32_t segIndex) {
   1422  MOZ_ASSERT(SASigDataDrop.failureMode == FailureMode::FailOnNegI32);
   1423 
   1424  MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(),
   1425                     "ensured by validation");
   1426 
   1427  if (!instance->passiveDataSegments_[segIndex]) {
   1428    return 0;
   1429  }
   1430 
   1431  SharedDataSegment& segRefPtr = instance->passiveDataSegments_[segIndex];
   1432  MOZ_RELEASE_ASSERT(!segRefPtr->active());
   1433 
   1434  // Drop this instance's reference to the DataSegment so it can be released.
   1435  segRefPtr = nullptr;
   1436  return 0;
   1437 }
   1438 
   1439 //////////////////////////////////////////////////////////////////////////////
   1440 //
   1441 // AnyRef support.
   1442 
   1443 /* static */ void Instance::postBarrierEdge(Instance* instance,
   1444                                            AnyRef* location) {
   1445  MOZ_ASSERT(SASigPostBarrierEdge.failureMode == FailureMode::Infallible);
   1446  MOZ_ASSERT(location);
   1447  instance->storeBuffer_->putWasmAnyRef(location);
   1448 }
   1449 
   1450 /* static */ void Instance::postBarrierEdgePrecise(Instance* instance,
   1451                                                   AnyRef* location,
   1452                                                   void* prev) {
   1453  MOZ_ASSERT(SASigPostBarrierEdgePrecise.failureMode ==
   1454             FailureMode::Infallible);
   1455  MOZ_ASSERT(location);
   1456  AnyRef next = *location;
   1457  InternalBarrierMethods<AnyRef>::postBarrier(
   1458      location, wasm::AnyRef::fromCompiledCode(prev), next);
   1459 }
   1460 
   1461 /* static */ void Instance::postBarrierWholeCell(Instance* instance,
   1462                                                 gc::Cell* object) {
   1463  MOZ_ASSERT(SASigPostBarrierWholeCell.failureMode == FailureMode::Infallible);
   1464  MOZ_ASSERT(object);
   1465  instance->storeBuffer_->putWholeCell(object);
   1466 }
   1467 
   1468 //////////////////////////////////////////////////////////////////////////////
   1469 //
   1470 // GC and exception handling support.
   1471 
   1472 /* static */
   1473 template <bool ZeroFields>
   1474 void* Instance::structNewIL(Instance* instance, uint32_t typeDefIndex,
   1475                            gc::AllocSite* allocSite) {
   1476  MOZ_ASSERT((ZeroFields ? SASigStructNewIL_true : SASigStructNewIL_false)
   1477                 .failureMode == FailureMode::FailOnNullPtr);
   1478  JSContext* cx = instance->cx();
   1479  TypeDefInstanceData* typeDefData =
   1480      instance->typeDefInstanceData(typeDefIndex);
   1481  // The new struct will be allocated in an initial heap as determined by
   1482  // pretenuring logic as set up in `Instance::init`.
   1483  return WasmStructObject::createStructIL<ZeroFields>(
   1484      cx, typeDefData, allocSite, allocSite->initialHeap());
   1485 }
   1486 
   1487 template void* Instance::structNewIL<true>(Instance* instance,
   1488                                           uint32_t typeDefIndex,
   1489                                           gc::AllocSite* allocSite);
   1490 template void* Instance::structNewIL<false>(Instance* instance,
   1491                                            uint32_t typeDefIndex,
   1492                                            gc::AllocSite* allocSite);
   1493 
   1494 /* static */
   1495 template <bool ZeroFields>
   1496 void* Instance::structNewOOL(Instance* instance, uint32_t typeDefIndex,
   1497                             gc::AllocSite* allocSite) {
   1498  MOZ_ASSERT((ZeroFields ? SASigStructNewOOL_true : SASigStructNewOOL_false)
   1499                 .failureMode == FailureMode::FailOnNullPtr);
   1500  JSContext* cx = instance->cx();
   1501  TypeDefInstanceData* typeDefData =
   1502      instance->typeDefInstanceData(typeDefIndex);
   1503  // The new struct will be allocated in an initial heap as determined by
   1504  // pretenuring logic as set up in `Instance::init`.
   1505  return WasmStructObject::createStructOOL<ZeroFields>(
   1506      cx, typeDefData, allocSite, allocSite->initialHeap());
   1507 }
   1508 
   1509 template void* Instance::structNewOOL<true>(Instance* instance,
   1510                                            uint32_t typeDefIndex,
   1511                                            gc::AllocSite* allocSite);
   1512 template void* Instance::structNewOOL<false>(Instance* instance,
   1513                                             uint32_t typeDefIndex,
   1514                                             gc::AllocSite* allocSite);
   1515 
   1516 /* static */
   1517 template <bool ZeroFields>
   1518 void* Instance::arrayNew(Instance* instance, uint32_t numElements,
   1519                         uint32_t typeDefIndex, gc::AllocSite* allocSite) {
   1520  MOZ_ASSERT(
   1521      (ZeroFields ? SASigArrayNew_true : SASigArrayNew_false).failureMode ==
   1522      FailureMode::FailOnNullPtr);
   1523  JSContext* cx = instance->cx();
   1524  TypeDefInstanceData* typeDefData =
   1525      instance->typeDefInstanceData(typeDefIndex);
   1526  // The new array will be allocated in an initial heap as determined by
   1527  // pretenuring logic as set up in `Instance::init`.
   1528  return WasmArrayObject::createArray<ZeroFields>(
   1529      cx, typeDefData, allocSite, allocSite->initialHeap(), numElements);
   1530 }
   1531 
   1532 template void* Instance::arrayNew<true>(Instance* instance,
   1533                                        uint32_t numElements,
   1534                                        uint32_t typeDefIndex,
   1535                                        gc::AllocSite* allocSite);
   1536 template void* Instance::arrayNew<false>(Instance* instance,
   1537                                         uint32_t numElements,
   1538                                         uint32_t typeDefIndex,
   1539                                         gc::AllocSite* allocSite);
   1540 
   1541 // Copies from a data segment into a wasm GC array. Performs the necessary
   1542 // bounds checks, accounting for the array's element size. If this function
   1543 // returns false, it has already reported a trap error. Null arrays should
   1544 // be handled in the caller.
   1545 static bool ArrayCopyFromData(JSContext* cx, Handle<WasmArrayObject*> arrayObj,
   1546                              uint32_t arrayIndex, const DataSegment* seg,
   1547                              uint32_t segByteOffset, uint32_t numElements) {
   1548  uint32_t elemSize = arrayObj->typeDef().arrayType().elementType().size();
   1549 
   1550  // Compute the number of bytes to copy, ensuring it's below 2^32.
   1551  CheckedUint32 numBytesToCopy =
   1552      CheckedUint32(numElements) * CheckedUint32(elemSize);
   1553  if (!numBytesToCopy.isValid()) {
   1554    // Because the request implies that 2^32 or more bytes are to be copied.
   1555    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
   1556    return false;
   1557  }
   1558 
   1559  // Range-check the copy.  The obvious thing to do is to compute the offset
   1560  // of the last byte to copy, but that would cause underflow in the
   1561  // zero-length-and-zero-offset case.  Instead, compute that value plus one;
   1562  // in other words the offset of the first byte *not* to copy.
   1563  CheckedUint32 lastByteOffsetPlus1 =
   1564      CheckedUint32(segByteOffset) + numBytesToCopy;
   1565 
   1566  CheckedUint32 numBytesAvailable(seg->bytes.length());
   1567  if (!lastByteOffsetPlus1.isValid() || !numBytesAvailable.isValid() ||
   1568      lastByteOffsetPlus1.value() > numBytesAvailable.value()) {
   1569    // Because the last byte to copy doesn't exist inside `seg->bytes`.
   1570    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
   1571    return false;
   1572  }
   1573 
   1574  // Range check the destination array.
   1575  uint64_t dstNumElements = uint64_t(arrayObj->numElements_);
   1576  if (uint64_t(arrayIndex) + uint64_t(numElements) > dstNumElements) {
   1577    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
   1578    return false;
   1579  }
   1580 
   1581  // This value is safe due to the previous range check on number of elements.
   1582  // (We know the full result fits in the array, and we can't overflow uint64_t
   1583  // since elemSize caps out at 16.)
   1584  uint64_t dstByteOffset = uint64_t(arrayIndex) * uint64_t(elemSize);
   1585 
   1586  // Because `numBytesToCopy` is an in-range `CheckedUint32`, the cast to
   1587  // `size_t` is safe even on a 32-bit target.
   1588  if (numElements != 0) {
   1589    memcpy(&arrayObj->data_[dstByteOffset], &seg->bytes[segByteOffset],
   1590           size_t(numBytesToCopy.value()));
   1591  }
   1592 
   1593  return true;
   1594 }
   1595 
   1596 // Copies from an element segment into a wasm GC array. Performs the necessary
   1597 // bounds checks, accounting for the array's element size. If this function
   1598 // returns false, it has already reported a trap error.
   1599 static bool ArrayCopyFromElem(JSContext* cx, Handle<WasmArrayObject*> arrayObj,
   1600                              uint32_t arrayIndex,
   1601                              const InstanceElemSegment& seg,
   1602                              uint32_t segOffset, uint32_t numElements) {
   1603  // Range-check the copy. As in ArrayCopyFromData, compute the index of the
   1604  // last element to copy, plus one.
   1605  CheckedUint32 lastIndexPlus1 =
   1606      CheckedUint32(segOffset) + CheckedUint32(numElements);
   1607  CheckedUint32 numElemsAvailable(seg.length());
   1608  if (!lastIndexPlus1.isValid() || !numElemsAvailable.isValid() ||
   1609      lastIndexPlus1.value() > numElemsAvailable.value()) {
   1610    // Because the last element to copy doesn't exist inside the segment.
   1611    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
   1612    return false;
   1613  }
   1614 
   1615  // Range check the destination array.
   1616  uint64_t dstNumElements = uint64_t(arrayObj->numElements_);
   1617  if (uint64_t(arrayIndex) + uint64_t(numElements) > dstNumElements) {
   1618    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
   1619    return false;
   1620  }
   1621 
   1622  auto copyElements = [&](auto* dst) {
   1623    for (uint32_t i = 0; i < numElements; i++) {
   1624      dst[arrayIndex + i] = seg[segOffset + i];
   1625    }
   1626  };
   1627 
   1628  if (arrayObj->isTenured()) {
   1629    copyElements(reinterpret_cast<GCPtr<AnyRef>*>(arrayObj->data_));
   1630  } else {
   1631    copyElements(reinterpret_cast<PreBarriered<AnyRef>*>(arrayObj->data_));
   1632  }
   1633 
   1634  return true;
   1635 }
   1636 
   1637 // Creates an array (WasmArrayObject) containing `numElements` of type
   1638 // described by `typeDef`.  Initialises it with data copied from the data
   1639 // segment whose index is `segIndex`, starting at byte offset `segByteOffset`
   1640 // in the segment.  Traps if the segment doesn't hold enough bytes to fill the
   1641 // array.
   1642 /* static */ void* Instance::arrayNewData(
   1643    Instance* instance, uint32_t segByteOffset, uint32_t numElements,
   1644    uint32_t typeDefIndex, gc::AllocSite* allocSite, uint32_t segIndex) {
   1645  MOZ_ASSERT(SASigArrayNewData.failureMode == FailureMode::FailOnNullPtr);
   1646  JSContext* cx = instance->cx();
   1647  TypeDefInstanceData* typeDefData =
   1648      instance->typeDefInstanceData(typeDefIndex);
   1649 
   1650  // Check that the data segment is valid for use.
   1651  MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(),
   1652                     "ensured by validation");
   1653  const DataSegment* seg = instance->passiveDataSegments_[segIndex];
   1654 
   1655  // `seg` will be nullptr if the segment has already been 'data.drop'ed
   1656  // (either implicitly in the case of 'active' segments during instantiation,
   1657  // or explicitly by the data.drop instruction.)  In that case we can
   1658  // continue only if there's no need to copy any data out of it.
   1659  if (!seg && (numElements != 0 || segByteOffset != 0)) {
   1660    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
   1661    return nullptr;
   1662  }
   1663  // At this point, if `seg` is null then `numElements` and `segByteOffset`
   1664  // are both zero.
   1665 
   1666  Rooted<WasmArrayObject*> arrayObj(
   1667      cx,
   1668      WasmArrayObject::createArray<true>(
   1669          cx, typeDefData, allocSite, allocSite->initialHeap(), numElements));
   1670  if (!arrayObj) {
   1671    // WasmArrayObject::createArray will have reported OOM.
   1672    return nullptr;
   1673  }
   1674  MOZ_RELEASE_ASSERT(arrayObj->is<WasmArrayObject>());
   1675 
   1676  if (!seg) {
   1677    // A zero-length array was requested and has been created, so we're done.
   1678    return arrayObj;
   1679  }
   1680 
   1681  if (!ArrayCopyFromData(cx, arrayObj, 0, seg, segByteOffset, numElements)) {
   1682    // Trap errors will be reported by ArrayCopyFromData.
   1683    return nullptr;
   1684  }
   1685 
   1686  return arrayObj;
   1687 }
   1688 
   1689 // This is almost identical to ::arrayNewData, apart from the final part that
   1690 // actually copies the data.  It creates an array (WasmArrayObject)
   1691 // containing `numElements` of type described by `typeDef`.  Initialises it
   1692 // with data copied from the element segment whose index is `segIndex`,
   1693 // starting at element number `srcOffset` in the segment.  Traps if the
   1694 // segment doesn't hold enough elements to fill the array.
   1695 /* static */ void* Instance::arrayNewElem(
   1696    Instance* instance, uint32_t srcOffset, uint32_t numElements,
   1697    uint32_t typeDefIndex, gc::AllocSite* allocSite, uint32_t segIndex) {
   1698  MOZ_ASSERT(SASigArrayNewElem.failureMode == FailureMode::FailOnNullPtr);
   1699  JSContext* cx = instance->cx();
   1700  TypeDefInstanceData* typeDefData =
   1701      instance->typeDefInstanceData(typeDefIndex);
   1702 
   1703  // Check that the element segment is valid for use.
   1704  MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveElemSegments_.length(),
   1705                     "ensured by validation");
   1706  const InstanceElemSegment& seg = instance->passiveElemSegments_[segIndex];
   1707 
   1708  const TypeDef* typeDef = typeDefData->typeDef;
   1709 
   1710  // Any data coming from an element segment will be an AnyRef. Writes into
   1711  // array memory are done with raw pointers, so we must ensure here that the
   1712  // destination size is correct.
   1713  MOZ_RELEASE_ASSERT(typeDef->arrayType().elementType().size() ==
   1714                     sizeof(AnyRef));
   1715 
   1716  Rooted<WasmArrayObject*> arrayObj(
   1717      cx,
   1718      WasmArrayObject::createArray<true>(
   1719          cx, typeDefData, allocSite, allocSite->initialHeap(), numElements));
   1720  if (!arrayObj) {
   1721    // WasmArrayObject::createArray will have reported OOM.
   1722    return nullptr;
   1723  }
   1724  MOZ_RELEASE_ASSERT(arrayObj->is<WasmArrayObject>());
   1725 
   1726  if (!ArrayCopyFromElem(cx, arrayObj, 0, seg, srcOffset, numElements)) {
   1727    // Trap errors will be reported by ArrayCopyFromElems.
   1728    return nullptr;
   1729  }
   1730 
   1731  return arrayObj;
   1732 }
   1733 
   1734 // Copies a range of the data segment `segIndex` into an array
   1735 // (WasmArrayObject), starting at offset `segByteOffset` in the data segment and
   1736 // index `index` in the array. `numElements` is the length of the copy in array
   1737 // elements, NOT bytes - the number of bytes will be computed based on the type
   1738 // of the array.
   1739 //
   1740 // Traps if accesses are out of bounds for either the data segment or the array,
   1741 // or if the array object is null.
   1742 /* static */ int32_t Instance::arrayInitData(Instance* instance, void* array,
   1743                                             uint32_t index,
   1744                                             uint32_t segByteOffset,
   1745                                             uint32_t numElements,
   1746                                             uint32_t segIndex) {
   1747  MOZ_ASSERT(SASigArrayInitData.failureMode == FailureMode::FailOnNegI32);
   1748  JSContext* cx = instance->cx();
   1749 
   1750  // Check that the data segment is valid for use.
   1751  MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(),
   1752                     "ensured by validation");
   1753  const DataSegment* seg = instance->passiveDataSegments_[segIndex];
   1754 
   1755  // `seg` will be nullptr if the segment has already been 'data.drop'ed
   1756  // (either implicitly in the case of 'active' segments during instantiation,
   1757  // or explicitly by the data.drop instruction.)  In that case we can
   1758  // continue only if there's no need to copy any data out of it.
   1759  if (!seg && (numElements != 0 || segByteOffset != 0)) {
   1760    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
   1761    return -1;
   1762  }
   1763  // At this point, if `seg` is null then `numElements` and `segByteOffset`
   1764  // are both zero.
   1765 
   1766  // Trap if the array is null.
   1767  if (!array) {
   1768    ReportTrapError(cx, JSMSG_WASM_DEREF_NULL);
   1769    return -1;
   1770  }
   1771 
   1772  if (!seg) {
   1773    // The segment was dropped, therefore a zero-length init was requested, so
   1774    // we're done.
   1775    return 0;
   1776  }
   1777 
   1778  // Get hold of the array.
   1779  Rooted<WasmArrayObject*> arrayObj(cx, static_cast<WasmArrayObject*>(array));
   1780  MOZ_RELEASE_ASSERT(arrayObj->is<WasmArrayObject>());
   1781 
   1782  if (!ArrayCopyFromData(cx, arrayObj, index, seg, segByteOffset,
   1783                         numElements)) {
   1784    // Trap errors will be reported by ArrayCopyFromData.
   1785    return -1;
   1786  }
   1787 
   1788  return 0;
   1789 }
   1790 
   1791 // Copies a range of the element segment `segIndex` into an array
   1792 // (WasmArrayObject), starting at offset `segOffset` in the elem segment and
   1793 // index `index` in the array. `numElements` is the length of the copy.
   1794 //
   1795 // Traps if accesses are out of bounds for either the elem segment or the array,
   1796 // or if the array object is null.
   1797 /* static */ int32_t Instance::arrayInitElem(Instance* instance, void* array,
   1798                                             uint32_t index, uint32_t segOffset,
   1799                                             uint32_t numElements,
   1800                                             uint32_t typeDefIndex,
   1801                                             uint32_t segIndex) {
   1802  MOZ_ASSERT(SASigArrayInitElem.failureMode == FailureMode::FailOnNegI32);
   1803  JSContext* cx = instance->cx();
   1804 
   1805  // Check that the element segment is valid for use.
   1806  MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveElemSegments_.length(),
   1807                     "ensured by validation");
   1808  const InstanceElemSegment& seg = instance->passiveElemSegments_[segIndex];
   1809 
   1810  // Trap if the array is null.
   1811  if (!array) {
   1812    ReportTrapError(cx, JSMSG_WASM_DEREF_NULL);
   1813    return -1;
   1814  }
   1815 
   1816  // Any data coming from an element segment will be an AnyRef. Writes into
   1817  // array memory are done with raw pointers, so we must ensure here that the
   1818  // destination size is correct.
   1819  DebugOnly<const TypeDef*> typeDef =
   1820      &instance->codeMeta().types->type(typeDefIndex);
   1821  MOZ_ASSERT(typeDef->arrayType().elementType().size() == sizeof(AnyRef));
   1822 
   1823  // Get hold of the array.
   1824  Rooted<WasmArrayObject*> arrayObj(cx, static_cast<WasmArrayObject*>(array));
   1825  MOZ_RELEASE_ASSERT(arrayObj->is<WasmArrayObject>());
   1826 
   1827  if (!ArrayCopyFromElem(cx, arrayObj, index, seg, segOffset, numElements)) {
   1828    // Trap errors will be reported by ArrayCopyFromElems.
   1829    return -1;
   1830  }
   1831 
   1832  return 0;
   1833 }
   1834 
   1835 // Copies range of elements between two arrays.
   1836 //
   1837 // Traps if accesses are out of bounds for the arrays, or either array
   1838 // object is null.
   1839 //
   1840 // This function is only used by baseline, Ion emits inline code using
   1841 // WasmArrayMemMove and WasmArrayRefsMove builtins instead.
   1842 /* static */ int32_t Instance::arrayCopy(Instance* instance, void* dstArray,
   1843                                         uint32_t dstIndex, void* srcArray,
   1844                                         uint32_t srcIndex,
   1845                                         uint32_t numElements,
   1846                                         uint32_t elementSize) {
   1847  MOZ_ASSERT(SASigArrayCopy.failureMode == FailureMode::FailOnNegI32);
   1848 
   1849  // At the entry point, `elementSize` may be negative to indicate
   1850  // reftyped-ness of array elements.  That is done in order to avoid having
   1851  // to pass yet another (boolean) parameter here.
   1852 
   1853  // "traps if either array is null"
   1854  if (!srcArray || !dstArray) {
   1855    ReportTrapError(instance->cx(), JSMSG_WASM_DEREF_NULL);
   1856    return -1;
   1857  }
   1858 
   1859  bool elemsAreRefTyped = false;
   1860  if (int32_t(elementSize) < 0) {
   1861    elemsAreRefTyped = true;
   1862    elementSize = uint32_t(-int32_t(elementSize));
   1863  }
   1864  MOZ_ASSERT(elementSize >= 1 && elementSize <= 16);
   1865 
   1866  // Get hold of the two arrays.
   1867  WasmArrayObject* dstArrayObj = static_cast<WasmArrayObject*>(dstArray);
   1868  WasmArrayObject* srcArrayObj = static_cast<WasmArrayObject*>(srcArray);
   1869  MOZ_ASSERT(dstArrayObj->is<WasmArrayObject>() &&
   1870             srcArrayObj->is<WasmArrayObject>());
   1871 
   1872  // If WasmArrayObject::numElements() is changed to return 64 bits, the
   1873  // following checking logic will be incorrect.
   1874  STATIC_ASSERT_WASMARRAYELEMENTS_NUMELEMENTS_IS_U32;
   1875 
   1876  // "traps if destination + length > len(array1)"
   1877  uint64_t dstNumElements = uint64_t(dstArrayObj->numElements_);
   1878  if (uint64_t(dstIndex) + uint64_t(numElements) > dstNumElements) {
   1879    // Potential GC hazard: srcArrayObj and dstArrayObj are invalidated by
   1880    // reporting an error, do no use them after this point.
   1881    ReportTrapError(instance->cx(), JSMSG_WASM_OUT_OF_BOUNDS);
   1882    return -1;
   1883  }
   1884 
   1885  // "traps if source + length > len(array2)"
   1886  uint64_t srcNumElements = uint64_t(srcArrayObj->numElements_);
   1887  if (uint64_t(srcIndex) + uint64_t(numElements) > srcNumElements) {
   1888    // Potential GC hazard: srcArrayObj and dstArrayObj are invalidated by
   1889    // reporting an error, do no use them after this point.
   1890    ReportTrapError(instance->cx(), JSMSG_WASM_OUT_OF_BOUNDS);
   1891    return -1;
   1892  }
   1893 
   1894  if (numElements == 0) {
   1895    // Early exit if there's no work to do.
   1896    return 0;
   1897  }
   1898 
   1899  // Actually do the copy, taking care to handle cases where the src and dst
   1900  // areas overlap.
   1901  uint8_t* srcBase = srcArrayObj->data_;
   1902  uint8_t* dstBase = dstArrayObj->data_;
   1903  srcBase += size_t(srcIndex) * size_t(elementSize);
   1904  dstBase += size_t(dstIndex) * size_t(elementSize);
   1905  if (srcBase == dstBase) {
   1906    // Early exit if there's no work to do.
   1907    return 0;
   1908  }
   1909 
   1910  if (!elemsAreRefTyped) {
   1911    // Hand off to memmove, which is presumably highly optimized.
   1912    memmove(dstBase, srcBase, size_t(numElements) * size_t(elementSize));
   1913    return 0;
   1914  }
   1915 
   1916  AnyRef* src = (AnyRef*)srcBase;
   1917  // Using std::copy will call set() on the barrier wrapper under the hood.
   1918  auto copyElements = [&](auto* dst) {
   1919    if (uintptr_t(dst) < uintptr_t(src)) {
   1920      std::copy(src, src + numElements, dst);
   1921    } else {
   1922      std::copy_backward(src, src + numElements, dst + numElements);
   1923    }
   1924  };
   1925 
   1926  if (dstArrayObj->isTenured()) {
   1927    copyElements((GCPtr<AnyRef>*)dstBase);
   1928  } else {
   1929    copyElements((PreBarriered<AnyRef>*)dstBase);
   1930  }
   1931 
   1932  return 0;
   1933 }
   1934 
   1935 /* static */ void* Instance::exceptionNew(Instance* instance, void* tagArg) {
   1936  MOZ_ASSERT(SASigExceptionNew.failureMode == FailureMode::FailOnNullPtr);
   1937  JSContext* cx = instance->cx();
   1938  AnyRef tag = AnyRef::fromCompiledCode(tagArg);
   1939  Rooted<WasmTagObject*> tagObj(cx, &tag.toJSObject().as<WasmTagObject>());
   1940  RootedObject proto(cx, &cx->global()->getPrototype(JSProto_WasmException));
   1941  RootedObject stack(cx, nullptr);
   1942 
   1943  // We don't create the .stack property by default, unless the pref is set for
   1944  // debugging.
   1945  if (JS::Prefs::wasm_exception_force_stack_trace() &&
   1946      !CaptureStack(cx, &stack)) {
   1947    ReportOutOfMemory(cx);
   1948    return nullptr;
   1949  }
   1950 
   1951  // An OOM will result in null which will be caught on the wasm side.
   1952  return AnyRef::fromJSObjectOrNull(
   1953             WasmExceptionObject::create(cx, tagObj, stack, proto))
   1954      .forCompiledCode();
   1955 }
   1956 
   1957 /* static */ int32_t Instance::throwException(Instance* instance,
   1958                                              void* exceptionArg) {
   1959  MOZ_ASSERT(SASigThrowException.failureMode == FailureMode::FailOnNegI32);
   1960 
   1961  JSContext* cx = instance->cx();
   1962  AnyRef exception = AnyRef::fromCompiledCode(exceptionArg);
   1963  RootedValue exnVal(cx, exception.toJSValue());
   1964  cx->setPendingException(exnVal, nullptr);
   1965 
   1966  // By always returning -1, we trigger a wasmTrap(Trap::ThrowReported),
   1967  // and use that to trigger the stack walking for this exception.
   1968  return -1;
   1969 }
   1970 
   1971 /* static */ int32_t Instance::intrI8VecMul(Instance* instance, uint32_t dest,
   1972                                            uint32_t src1, uint32_t src2,
   1973                                            uint32_t len, uint8_t* memBase) {
   1974  MOZ_ASSERT(SASigIntrI8VecMul.failureMode == FailureMode::FailOnNegI32);
   1975  MOZ_ASSERT(SASigIntrI8VecMul.failureTrap == Trap::OutOfBounds);
   1976  AutoUnsafeCallWithABI unsafe;
   1977 
   1978  const WasmArrayRawBuffer* rawBuf = WasmArrayRawBuffer::fromDataPtr(memBase);
   1979  size_t memLen = rawBuf->byteLength();
   1980 
   1981  // Bounds check and deal with arithmetic overflow.
   1982  uint64_t destLimit = uint64_t(dest) + uint64_t(len);
   1983  uint64_t src1Limit = uint64_t(src1) + uint64_t(len);
   1984  uint64_t src2Limit = uint64_t(src2) + uint64_t(len);
   1985  if (destLimit > memLen || src1Limit > memLen || src2Limit > memLen) {
   1986    return -1;
   1987  }
   1988 
   1989  // Basic dot product
   1990  uint8_t* destPtr = &memBase[dest];
   1991  uint8_t* src1Ptr = &memBase[src1];
   1992  uint8_t* src2Ptr = &memBase[src2];
   1993  while (len > 0) {
   1994    *destPtr = (*src1Ptr) * (*src2Ptr);
   1995 
   1996    destPtr++;
   1997    src1Ptr++;
   1998    src2Ptr++;
   1999    len--;
   2000  }
   2001  return 0;
   2002 }
   2003 
   2004 template <bool isMutable>
   2005 static WasmArrayObject* UncheckedCastToArrayI16(HandleAnyRef ref) {
   2006  JSObject& object = ref.toJSObject();
   2007  WasmArrayObject& array = object.as<WasmArrayObject>();
   2008  DebugOnly<const ArrayType*> type(&array.typeDef().arrayType());
   2009  MOZ_ASSERT(type->elementType() == StorageType::I16);
   2010  MOZ_ASSERT(type->isMutable() == isMutable);
   2011  return &array;
   2012 }
   2013 
   2014 /* static */
   2015 int32_t Instance::stringTest(Instance* instance, void* stringArg) {
   2016  MOZ_ASSERT(SASigStringTest.failureMode == FailureMode::Infallible);
   2017  AnyRef string = AnyRef::fromCompiledCode(stringArg);
   2018  if (string.isNull() || !string.isJSString()) {
   2019    return 0;
   2020  }
   2021  return 1;
   2022 }
   2023 
   2024 /* static */
   2025 void* Instance::stringCast(Instance* instance, void* stringArg) {
   2026  MOZ_ASSERT(SASigStringCast.failureMode == FailureMode::FailOnNullPtr);
   2027  AnyRef string = AnyRef::fromCompiledCode(stringArg);
   2028  if (string.isNull() || !string.isJSString()) {
   2029    ReportTrapError(instance->cx(), JSMSG_WASM_BAD_CAST);
   2030    return nullptr;
   2031  }
   2032  return string.forCompiledCode();
   2033 }
   2034 
   2035 /* static */
   2036 void* Instance::stringFromCharCodeArray(Instance* instance, void* arrayArg,
   2037                                        uint32_t arrayStart,
   2038                                        uint32_t arrayEnd) {
   2039  MOZ_ASSERT(SASigStringFromCharCodeArray.failureMode ==
   2040             FailureMode::FailOnNullPtr);
   2041  JSContext* cx = instance->cx();
   2042  RootedAnyRef arrayRef(cx, AnyRef::fromCompiledCode(arrayArg));
   2043  if (arrayRef.isNull()) {
   2044    ReportTrapError(instance->cx(), JSMSG_WASM_BAD_CAST);
   2045    return nullptr;
   2046  }
   2047  Rooted<WasmArrayObject*> array(cx, UncheckedCastToArrayI16<true>(arrayRef));
   2048 
   2049  if (arrayStart > arrayEnd || arrayEnd > array->numElements_) {
   2050    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
   2051    return nullptr;
   2052  }
   2053  uint32_t arrayCount = arrayEnd - arrayStart;
   2054 
   2055  // GC is disabled on this call since it can cause the array to move,
   2056  // invalidating the data pointer we pass as a parameter
   2057  JSLinearString* string = NewStringCopyN<NoGC, char16_t>(
   2058      cx, (char16_t*)array->data_ + arrayStart, arrayCount);
   2059  if (!string) {
   2060    // If the first attempt failed, we need to try again with a potential GC.
   2061    // Acquire a stable version of the array that we can use. This may copy
   2062    // inline data to the stack, so we avoid doing it unless we must.
   2063    StableWasmArrayObjectElements<uint16_t> stableElements(cx, array);
   2064    string = NewStringCopyN<CanGC, char16_t>(
   2065        cx, (char16_t*)stableElements.elements() + arrayStart, arrayCount);
   2066    if (!string) {
   2067      MOZ_ASSERT(cx->isThrowingOutOfMemory());
   2068      return nullptr;
   2069    }
   2070  }
   2071  return AnyRef::fromJSString(string).forCompiledCode();
   2072 }
   2073 
   2074 /* static */
   2075 int32_t Instance::stringIntoCharCodeArray(Instance* instance, void* stringArg,
   2076                                          void* arrayArg, uint32_t arrayStart) {
   2077  MOZ_ASSERT(SASigStringIntoCharCodeArray.failureMode ==
   2078             FailureMode::FailOnNegI32);
   2079  JSContext* cx = instance->cx();
   2080  AnyRef stringRef = AnyRef::fromCompiledCode(stringArg);
   2081  if (!stringRef.isJSString()) {
   2082    ReportTrapError(cx, JSMSG_WASM_BAD_CAST);
   2083    return -1;
   2084  }
   2085  Rooted<JSString*> string(cx, stringRef.toJSString());
   2086  size_t stringLength = string->length();
   2087 
   2088  RootedAnyRef arrayRef(cx, AnyRef::fromCompiledCode(arrayArg));
   2089  if (arrayRef.isNull()) {
   2090    ReportTrapError(instance->cx(), JSMSG_WASM_BAD_CAST);
   2091    return -1;
   2092  }
   2093  Rooted<WasmArrayObject*> array(cx, UncheckedCastToArrayI16<true>(arrayRef));
   2094 
   2095  CheckedUint32 lastIndexPlus1 = CheckedUint32(arrayStart) + stringLength;
   2096  if (!lastIndexPlus1.isValid() ||
   2097      lastIndexPlus1.value() > array->numElements_) {
   2098    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
   2099    return -1;
   2100  }
   2101 
   2102  JSLinearString* linearStr = string->ensureLinear(cx);
   2103  if (!linearStr) {
   2104    return -1;
   2105  }
   2106  char16_t* arrayData = reinterpret_cast<char16_t*>(array->data_);
   2107  CopyChars(arrayData + arrayStart, *linearStr);
   2108  return stringLength;
   2109 }
   2110 
   2111 void* Instance::stringFromCharCode(Instance* instance, uint32_t charCode) {
   2112  MOZ_ASSERT(SASigStringFromCharCode.failureMode == FailureMode::FailOnNullPtr);
   2113  JSContext* cx = instance->cx();
   2114 
   2115  JSString* str = StringFromCharCode(cx, int32_t(charCode));
   2116  if (!str) {
   2117    MOZ_ASSERT(cx->isThrowingOutOfMemory());
   2118    return nullptr;
   2119  }
   2120 
   2121  return AnyRef::fromJSString(str).forCompiledCode();
   2122 }
   2123 
   2124 void* Instance::stringFromCodePoint(Instance* instance, uint32_t codePoint) {
   2125  MOZ_ASSERT(SASigStringFromCodePoint.failureMode ==
   2126             FailureMode::FailOnNullPtr);
   2127  JSContext* cx = instance->cx();
   2128 
   2129  // Check for any error conditions before calling fromCodePoint so we report
   2130  // the correct error
   2131  if (codePoint > unicode::NonBMPMax) {
   2132    ReportTrapError(cx, JSMSG_WASM_BAD_CODEPOINT);
   2133    return nullptr;
   2134  }
   2135 
   2136  JSString* str = StringFromCodePoint(cx, char32_t(codePoint));
   2137  if (!str) {
   2138    MOZ_ASSERT(cx->isThrowingOutOfMemory());
   2139    return nullptr;
   2140  }
   2141 
   2142  return AnyRef::fromJSString(str).forCompiledCode();
   2143 }
   2144 
   2145 int32_t Instance::stringCharCodeAt(Instance* instance, void* stringArg,
   2146                                   uint32_t index) {
   2147  MOZ_ASSERT(SASigStringCharCodeAt.failureMode == FailureMode::FailOnNegI32);
   2148  JSContext* cx = instance->cx();
   2149  AnyRef stringRef = AnyRef::fromCompiledCode(stringArg);
   2150  if (!stringRef.isJSString()) {
   2151    ReportTrapError(cx, JSMSG_WASM_BAD_CAST);
   2152    return -1;
   2153  }
   2154 
   2155  Rooted<JSString*> string(cx, stringRef.toJSString());
   2156  if (index >= string->length()) {
   2157    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
   2158    return -1;
   2159  }
   2160 
   2161  char16_t c;
   2162  if (!string->getChar(cx, index, &c)) {
   2163    MOZ_ASSERT(cx->isThrowingOutOfMemory());
   2164    return false;
   2165  }
   2166  return c;
   2167 }
   2168 
   2169 int32_t Instance::stringCodePointAt(Instance* instance, void* stringArg,
   2170                                    uint32_t index) {
   2171  MOZ_ASSERT(SASigStringCodePointAt.failureMode == FailureMode::FailOnNegI32);
   2172  JSContext* cx = instance->cx();
   2173  AnyRef stringRef = AnyRef::fromCompiledCode(stringArg);
   2174  if (!stringRef.isJSString()) {
   2175    ReportTrapError(cx, JSMSG_WASM_BAD_CAST);
   2176    return -1;
   2177  }
   2178 
   2179  Rooted<JSString*> string(cx, stringRef.toJSString());
   2180  if (index >= string->length()) {
   2181    ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
   2182    return -1;
   2183  }
   2184 
   2185  char32_t c;
   2186  if (!string->getCodePoint(cx, index, &c)) {
   2187    MOZ_ASSERT(cx->isThrowingOutOfMemory());
   2188    return false;
   2189  }
   2190  return c;
   2191 }
   2192 
   2193 int32_t Instance::stringLength(Instance* instance, void* stringArg) {
   2194  MOZ_ASSERT(SASigStringLength.failureMode == FailureMode::FailOnNegI32);
   2195  JSContext* cx = instance->cx();
   2196  AnyRef stringRef = AnyRef::fromCompiledCode(stringArg);
   2197  if (!stringRef.isJSString()) {
   2198    ReportTrapError(cx, JSMSG_WASM_BAD_CAST);
   2199    return -1;
   2200  }
   2201 
   2202  static_assert(JS::MaxStringLength <= INT32_MAX);
   2203  return (int32_t)stringRef.toJSString()->length();
   2204 }
   2205 
   2206 void* Instance::stringConcat(Instance* instance, void* firstStringArg,
   2207                             void* secondStringArg) {
   2208  MOZ_ASSERT(SASigStringConcat.failureMode == FailureMode::FailOnNullPtr);
   2209  JSContext* cx = instance->cx();
   2210 
   2211  AnyRef firstStringRef = AnyRef::fromCompiledCode(firstStringArg);
   2212  AnyRef secondStringRef = AnyRef::fromCompiledCode(secondStringArg);
   2213  if (!firstStringRef.isJSString() || !secondStringRef.isJSString()) {
   2214    ReportTrapError(cx, JSMSG_WASM_BAD_CAST);
   2215    return nullptr;
   2216  }
   2217 
   2218  Rooted<JSString*> firstString(cx, firstStringRef.toJSString());
   2219  Rooted<JSString*> secondString(cx, secondStringRef.toJSString());
   2220  JSString* result = ConcatStrings<CanGC>(cx, firstString, secondString);
   2221  if (!result) {
   2222    MOZ_ASSERT(cx->isExceptionPending());
   2223    return nullptr;
   2224  }
   2225  return AnyRef::fromJSString(result).forCompiledCode();
   2226 }
   2227 
   2228 void* Instance::stringSubstring(Instance* instance, void* stringArg,
   2229                                uint32_t startIndex, uint32_t endIndex) {
   2230  MOZ_ASSERT(SASigStringSubstring.failureMode == FailureMode::FailOnNullPtr);
   2231  JSContext* cx = instance->cx();
   2232 
   2233  AnyRef stringRef = AnyRef::fromCompiledCode(stringArg);
   2234  if (!stringRef.isJSString()) {
   2235    ReportTrapError(cx, JSMSG_WASM_BAD_CAST);
   2236    return nullptr;
   2237  }
   2238 
   2239  static_assert(JS::MaxStringLength <= INT32_MAX);
   2240  RootedString string(cx, stringRef.toJSString());
   2241  uint32_t stringLength = string->length();
   2242  if (startIndex > stringLength || startIndex > endIndex) {
   2243    return AnyRef::fromJSString(cx->names().empty_).forCompiledCode();
   2244  }
   2245 
   2246  if (endIndex > stringLength) {
   2247    endIndex = stringLength;
   2248  }
   2249 
   2250  JSString* result =
   2251      SubstringKernel(cx, string, startIndex, endIndex - startIndex);
   2252  if (!result) {
   2253    MOZ_ASSERT(cx->isThrowingOutOfMemory());
   2254    return nullptr;
   2255  }
   2256  return AnyRef::fromJSString(result).forCompiledCode();
   2257 }
   2258 
   2259 int32_t Instance::stringEquals(Instance* instance, void* firstStringArg,
   2260                               void* secondStringArg) {
   2261  MOZ_ASSERT(SASigStringEquals.failureMode == FailureMode::FailOnNegI32);
   2262  JSContext* cx = instance->cx();
   2263 
   2264  AnyRef firstStringRef = AnyRef::fromCompiledCode(firstStringArg);
   2265  AnyRef secondStringRef = AnyRef::fromCompiledCode(secondStringArg);
   2266 
   2267  // Null strings are considered equals
   2268  if (firstStringRef.isNull() || secondStringRef.isNull()) {
   2269    return firstStringRef.isNull() == secondStringRef.isNull();
   2270  }
   2271 
   2272  // Otherwise, rule out any other kind of reference value
   2273  if (!firstStringRef.isJSString() || !secondStringRef.isJSString()) {
   2274    ReportTrapError(cx, JSMSG_WASM_BAD_CAST);
   2275    return -1;
   2276  }
   2277 
   2278  bool equals;
   2279  if (!EqualStrings(cx, firstStringRef.toJSString(),
   2280                    secondStringRef.toJSString(), &equals)) {
   2281    MOZ_ASSERT(cx->isThrowingOutOfMemory());
   2282    return -1;
   2283  }
   2284  return equals ? 1 : 0;
   2285 }
   2286 
   2287 int32_t Instance::stringCompare(Instance* instance, void* firstStringArg,
   2288                                void* secondStringArg) {
   2289  MOZ_ASSERT(SASigStringCompare.failureMode == FailureMode::FailOnMaxI32);
   2290  JSContext* cx = instance->cx();
   2291 
   2292  AnyRef firstStringRef = AnyRef::fromCompiledCode(firstStringArg);
   2293  AnyRef secondStringRef = AnyRef::fromCompiledCode(secondStringArg);
   2294  if (!firstStringRef.isJSString() || !secondStringRef.isJSString()) {
   2295    ReportTrapError(cx, JSMSG_WASM_BAD_CAST);
   2296    return INT32_MAX;
   2297  }
   2298 
   2299  int32_t result;
   2300  if (!CompareStrings(cx, firstStringRef.toJSString(),
   2301                      secondStringRef.toJSString(), &result)) {
   2302    MOZ_ASSERT(cx->isThrowingOutOfMemory());
   2303    return INT32_MAX;
   2304  }
   2305 
   2306  if (result < 0) {
   2307    return -1;
   2308  }
   2309  if (result > 0) {
   2310    return 1;
   2311  }
   2312  return result;
   2313 }
   2314 
   2315 // [SMDOC] Wasm Function.prototype.call.bind optimization
   2316 //
   2317 // Check if our import is of the form `Function.prototype.call.bind(targetFunc)`
   2318 // and optimize it so that we call `targetFunc` directly and pass the
   2319 // first wasm function parameter as the 'this' value.
   2320 //
   2321 // Breaking it down:
   2322 //   1. `Function.prototype.call` invokes the function given by `this`
   2323 //       and passes the first argument as the `this` value, then the
   2324 //       remaining arguments as the natural arguments.
   2325 //   2. `Function.prototype.bind` creates a new bound function that will
   2326 //      always pass a chosen value as the `this` value.
   2327 //   3. Binding 'targetFunc' to `Function.prototype.call` is equivalent to
   2328 //      `(thisValue, ...args) => targetFunc.call(thisValue, ...args)`;
   2329 //      but in a form the VM can pattern match on easily.
   2330 //
   2331 // When all of these conditions match, we set the `isFunctionCallBind` flag on
   2332 // FuncImportInstanceData and set callable to `targetFunc`. Then
   2333 // Instance::callImport reads the flag to figure out if the first parameter
   2334 // should be stored in invokeArgs.thisv() or in normal arguments.
   2335 //
   2336 // JIT exits do not support this flag yet, and so we don't use them on the
   2337 // targetFunc. This is okay because we couldn't use them on BoundFunctionObject
   2338 // anyways, and so this is strictly faster. Eventually we can add JIT exit
   2339 // support here.
   2340 JSObject* MaybeOptimizeFunctionCallBind(const wasm::FuncType& funcType,
   2341                                        JSObject* f) {
   2342  // Skip this for functions with no args. This is useless as it would result
   2343  // in `this` always being undefined. Skipping this simplifies the logic in
   2344  // Instance::callImport.
   2345  if (funcType.args().length() == 0) {
   2346    return nullptr;
   2347  }
   2348 
   2349  if (!f->is<BoundFunctionObject>()) {
   2350    return nullptr;
   2351  }
   2352 
   2353  BoundFunctionObject* boundFun = &f->as<BoundFunctionObject>();
   2354  JSObject* boundTarget = boundFun->getTarget();
   2355  Value boundThis = boundFun->getBoundThis();
   2356 
   2357  // There cannot be any extra bound args in addition to the 'this'.
   2358  if (boundFun->numBoundArgs() != 0) {
   2359    return nullptr;
   2360  }
   2361 
   2362  // The bound `target` must be the Function.prototype.call builtin
   2363  if (!IsNativeFunction(boundTarget, fun_call)) {
   2364    return nullptr;
   2365  }
   2366 
   2367  // The bound `this` must be a callable object
   2368  if (!boundThis.isObject() || !boundThis.toObject().isCallable() ||
   2369      IsCrossCompartmentWrapper(boundThis.toObjectOrNull())) {
   2370    return nullptr;
   2371  }
   2372 
   2373  return boundThis.toObjectOrNull();
   2374 }
   2375 
   2376 //////////////////////////////////////////////////////////////////////////////
   2377 //
   2378 // Instance creation and related.
   2379 
   2380 Instance::Instance(JSContext* cx, Handle<WasmInstanceObject*> object,
   2381                   const SharedCode& code, SharedTableVector&& tables,
   2382                   UniqueDebugState maybeDebug)
   2383    : realm_(cx->realm()),
   2384      allocSites_(nullptr),
   2385      jsJitExceptionHandler_(
   2386          cx->runtime()->jitRuntime()->getExceptionTail().value),
   2387      preBarrierCode_(
   2388          cx->runtime()->jitRuntime()->preBarrier(MIRType::WasmAnyRef).value),
   2389      storeBuffer_(&cx->runtime()->gc.storeBuffer()),
   2390      object_(object),
   2391      code_(std::move(code)),
   2392      tables_(std::move(tables)),
   2393      maybeDebug_(std::move(maybeDebug)),
   2394      debugFilter_(nullptr),
   2395      callRefMetrics_(nullptr),
   2396      maxInitializedGlobalsIndexPlus1_(0),
   2397      allocationMetadataBuilder_(nullptr),
   2398      addressOfLastBufferedWholeCell_(
   2399          cx->runtime()->gc.addressOfLastBufferedWholeCell()) {
   2400  for (size_t i = 0; i < N_BASELINE_SCRATCH_WORDS; i++) {
   2401    baselineScratchWords_[i] = 0;
   2402  }
   2403 }
   2404 
   2405 Instance* Instance::create(JSContext* cx, Handle<WasmInstanceObject*> object,
   2406                           const SharedCode& code, uint32_t instanceDataLength,
   2407                           SharedTableVector&& tables,
   2408                           UniqueDebugState maybeDebug) {
   2409  void* base = js_calloc(alignof(Instance) + offsetof(Instance, data_) +
   2410                         instanceDataLength);
   2411  if (!base) {
   2412    ReportOutOfMemory(cx);
   2413    return nullptr;
   2414  }
   2415  void* aligned = (void*)AlignBytes(uintptr_t(base), alignof(Instance));
   2416 
   2417  auto* instance = new (aligned)
   2418      Instance(cx, object, code, std::move(tables), std::move(maybeDebug));
   2419  instance->allocatedBase_ = base;
   2420  return instance;
   2421 }
   2422 
   2423 void Instance::destroy(Instance* instance) {
   2424  instance->~Instance();
   2425  js_free(instance->allocatedBase_);
   2426 }
   2427 
   2428 bool Instance::init(JSContext* cx, const JSObjectVector& funcImports,
   2429                    const ValVector& globalImportValues,
   2430                    Handle<WasmMemoryObjectVector> memories,
   2431                    const WasmGlobalObjectVector& globalObjs,
   2432                    const WasmTagObjectVector& tagObjs,
   2433                    const DataSegmentVector& dataSegments,
   2434                    const ModuleElemSegmentVector& elemSegments) {
   2435  MOZ_ASSERT(!!maybeDebug_ == code().debugEnabled());
   2436 
   2437  MOZ_ASSERT(funcImports.length() == code().funcImports().length());
   2438  MOZ_ASSERT(tables_.length() == codeMeta().tables.length());
   2439 
   2440  cx_ = cx;
   2441  valueBoxClass_ = AnyRef::valueBoxClass();
   2442  interrupt_ = false;
   2443  jumpTable_ = code_->tieringJumpTable();
   2444  debugFilter_ = nullptr;
   2445  callRefMetrics_ = nullptr;
   2446  addressOfNeedsIncrementalBarrier_ =
   2447      cx->compartment()->zone()->addressOfNeedsIncrementalBarrier();
   2448  addressOfNurseryPosition_ = cx->nursery().addressOfPosition();
   2449 #ifdef JS_GC_ZEAL
   2450  addressOfGCZealModeBits_ = cx->runtime()->gc.addressOfZealModeBits();
   2451 #endif
   2452 
   2453  // Initialize the request-tier-up stub pointer, if relevant
   2454  if (code().mode() == CompileMode::LazyTiering) {
   2455    setRequestTierUpStub(code().sharedStubs().base() +
   2456                         code().requestTierUpStubOffset());
   2457    setUpdateCallRefMetricsStub(code().sharedStubs().base() +
   2458                                code().updateCallRefMetricsStubOffset());
   2459  } else {
   2460    setRequestTierUpStub(nullptr);
   2461    setUpdateCallRefMetricsStub(nullptr);
   2462  }
   2463 
   2464  // Initialize the hotness counters, if relevant.
   2465  if (code().mode() == CompileMode::LazyTiering) {
   2466    // Computing the initial hotness counters requires the code section size.
   2467    const size_t codeSectionSize = codeMeta().codeSectionSize();
   2468    for (uint32_t funcIndex = codeMeta().numFuncImports;
   2469         funcIndex < codeMeta().numFuncs(); funcIndex++) {
   2470      funcDefInstanceData(funcIndex)->hotnessCounter =
   2471          computeInitialHotnessCounter(funcIndex, codeSectionSize);
   2472    }
   2473  }
   2474 
   2475  // Initialize type definitions in the instance data.
   2476  const SharedTypeContext& types = codeMeta().types;
   2477  Zone* zone = realm()->zone();
   2478  for (uint32_t typeIndex = 0; typeIndex < types->length(); typeIndex++) {
   2479    const TypeDef& typeDef = types->type(typeIndex);
   2480    TypeDefInstanceData* typeDefData = typeDefInstanceData(typeIndex);
   2481 
   2482    // Set default field values.
   2483    new (typeDefData) TypeDefInstanceData();
   2484 
   2485    // Store the runtime type for this type index
   2486    typeDefData->typeDef = &typeDef;
   2487    typeDefData->superTypeVector = typeDef.superTypeVector();
   2488 
   2489    if (typeDef.kind() == TypeDefKind::Struct ||
   2490        typeDef.kind() == TypeDefKind::Array) {
   2491      // Compute the parameters that allocation will use.  First, the class for
   2492      // the type definition.
   2493      if (typeDef.kind() == TypeDefKind::Struct) {
   2494        const StructType& structType = typeDef.structType();
   2495        bool needsOOLstorage = structType.hasOOL();
   2496        typeDefData->clasp =
   2497            WasmStructObject::classFromOOLness(needsOOLstorage);
   2498      } else {
   2499        typeDefData->clasp = &WasmArrayObject::class_;
   2500      }
   2501 
   2502      // Find the shape using the class and recursion group
   2503      const ObjectFlags objectFlags = {ObjectFlag::NotExtensible};
   2504      typeDefData->shape = WasmGCShape::getShape(
   2505          cx, typeDefData->clasp, cx->realm(), TaggedProto(),
   2506          &typeDef.recGroup(), objectFlags);
   2507      if (!typeDefData->shape) {
   2508        return false;
   2509      }
   2510 
   2511      // If `typeDef` is a struct, cache some layout info here, so that
   2512      // allocators don't have to chase back through `typeDef` to determine
   2513      // that.  Similarly, if `typeDef` is an array, cache its array element
   2514      // size here.
   2515      if (typeDef.kind() == TypeDefKind::Struct) {
   2516        const StructType& structType = typeDef.structType();
   2517        typeDefData->cached.strukt.payloadOffsetIL =
   2518            structType.payloadOffsetIL_;
   2519        typeDefData->cached.strukt.totalSizeIL = structType.totalSizeIL_;
   2520        typeDefData->cached.strukt.totalSizeOOL = structType.totalSizeOOL_;
   2521        typeDefData->cached.strukt.oolPointerOffset =
   2522            structType.oolPointerOffset_;
   2523        typeDefData->cached.strukt.allocKind =
   2524            gc::GetFinalizedAllocKindForClass(structType.allocKind_,
   2525                                              typeDefData->clasp);
   2526        MOZ_ASSERT(!IsFinalizedKind(typeDefData->cached.strukt.allocKind));
   2527        // StructLayout::totalSizeIL/OOL() ensures these are an integral number
   2528        // of words.
   2529        MOZ_ASSERT(
   2530            (typeDefData->cached.strukt.totalSizeIL % sizeof(uintptr_t)) == 0);
   2531        MOZ_ASSERT(
   2532            (typeDefData->cached.strukt.totalSizeOOL % sizeof(uintptr_t)) == 0);
   2533      } else {
   2534        uint32_t arrayElemSize = typeDef.arrayType().elementType().size();
   2535        typeDefData->cached.array.elemSize = arrayElemSize;
   2536        MOZ_ASSERT(arrayElemSize == 16 || arrayElemSize == 8 ||
   2537                   arrayElemSize == 4 || arrayElemSize == 2 ||
   2538                   arrayElemSize == 1);
   2539      }
   2540    } else if (typeDef.kind() == TypeDefKind::Func) {
   2541      // Nothing to do; the default values are OK.
   2542    } else {
   2543      MOZ_ASSERT(typeDef.kind() == TypeDefKind::None);
   2544      MOZ_CRASH();
   2545    }
   2546  }
   2547 
   2548  // Create and initialize alloc sites, they are all the same for Wasm.
   2549  uint32_t allocSitesCount = codeTailMeta().numAllocSites;
   2550  if (allocSitesCount > 0) {
   2551    allocSites_ =
   2552        (gc::AllocSite*)js_malloc(sizeof(gc::AllocSite) * allocSitesCount);
   2553    if (!allocSites_) {
   2554      ReportOutOfMemory(cx);
   2555      return false;
   2556    }
   2557    for (uint32_t i = 0; i < allocSitesCount; ++i) {
   2558      new (&allocSites_[i]) gc::AllocSite();
   2559      allocSites_[i].initWasm(zone);
   2560    }
   2561  }
   2562 
   2563  // Initialize function imports in the instance data
   2564  for (size_t i = 0; i < code().funcImports().length(); i++) {
   2565    JSObject* f = funcImports[i];
   2566 
   2567 #ifdef ENABLE_WASM_JSPI
   2568    if (JSObject* suspendingObject = MaybeUnwrapSuspendingObject(f)) {
   2569      // Compile suspending function Wasm wrapper.
   2570      const FuncType& funcType = codeMeta().getFuncType(i);
   2571      RootedObject wrapped(cx, suspendingObject);
   2572      RootedFunction wrapper(
   2573          cx, WasmSuspendingFunctionCreate(cx, wrapped, funcType));
   2574      if (!wrapper) {
   2575        return false;
   2576      }
   2577      MOZ_ASSERT(wrapper->isWasm());
   2578      f = wrapper;
   2579    }
   2580 #endif
   2581 
   2582    MOZ_ASSERT(f->isCallable());
   2583    const FuncImport& fi = code().funcImport(i);
   2584    const FuncType& funcType = codeMeta().getFuncType(i);
   2585    FuncImportInstanceData& import = funcImportInstanceData(i);
   2586    import.callable = f;
   2587    import.isFunctionCallBind = false;
   2588    if (f->is<JSFunction>()) {
   2589      JSFunction* fun = &f->as<JSFunction>();
   2590      if (!isAsmJS() && !codeMeta().funcImportsAreJS && fun->isWasm()) {
   2591        import.instance = &fun->wasmInstance();
   2592        import.realm = fun->realm();
   2593        import.code = fun->wasmUncheckedCallEntry();
   2594      } else if (void* thunk = MaybeGetTypedNative(fun, funcType)) {
   2595        import.instance = this;
   2596        import.realm = fun->realm();
   2597        import.code = thunk;
   2598      } else {
   2599        import.instance = this;
   2600        import.realm = fun->realm();
   2601        import.code = code().sharedStubs().base() + fi.interpExitCodeOffset();
   2602      }
   2603    } else if (JSObject* callable =
   2604                   MaybeOptimizeFunctionCallBind(funcType, f)) {
   2605      import.instance = this;
   2606      import.callable = callable;
   2607      import.realm = import.callable->nonCCWRealm();
   2608      import.code = code().sharedStubs().base() + fi.interpExitCodeOffset();
   2609      import.isFunctionCallBind = true;
   2610    } else {
   2611      import.instance = this;
   2612      import.realm = import.callable->nonCCWRealm();
   2613      import.code = code().sharedStubs().base() + fi.interpExitCodeOffset();
   2614    }
   2615  }
   2616 
   2617 #ifdef DEBUG
   2618  for (size_t i = 0; i < codeMeta().numExportedFuncs(); i++) {
   2619    MOZ_ASSERT(!funcExportInstanceData(i).func);
   2620  }
   2621 #endif
   2622 
   2623  // Initialize globals in the instance data.
   2624  //
   2625  // This must be performed after we have initialized runtime types as a global
   2626  // initializer may reference them.
   2627  //
   2628  // We increment `maxInitializedGlobalsIndexPlus1_` every iteration of the
   2629  // loop, as we call out to `InitExpr::evaluate` which may call
   2630  // `constantGlobalGet` which uses this value to assert we're never accessing
   2631  // uninitialized globals.
   2632  maxInitializedGlobalsIndexPlus1_ = 0;
   2633  for (size_t i = 0; i < codeMeta().globals.length();
   2634       i++, maxInitializedGlobalsIndexPlus1_ = i) {
   2635    const GlobalDesc& global = codeMeta().globals[i];
   2636 
   2637    // Constants are baked into the code, never stored in the global area.
   2638    if (global.isConstant()) {
   2639      continue;
   2640    }
   2641 
   2642    uint8_t* globalAddr = data() + global.offset();
   2643    switch (global.kind()) {
   2644      case GlobalKind::Import: {
   2645        size_t imported = global.importIndex();
   2646        if (global.isIndirect()) {
   2647          *(void**)globalAddr =
   2648              (void*)&globalObjs[imported]->val().get().cell();
   2649        } else {
   2650          globalImportValues[imported].writeToHeapLocation(globalAddr);
   2651        }
   2652        break;
   2653      }
   2654      case GlobalKind::Variable: {
   2655        RootedVal val(cx);
   2656        const InitExpr& init = global.initExpr();
   2657        Rooted<WasmInstanceObject*> instanceObj(cx, object());
   2658        if (!init.evaluate(cx, instanceObj, &val)) {
   2659          return false;
   2660        }
   2661 
   2662        if (global.isIndirect()) {
   2663          // Initialize the cell
   2664          globalObjs[i]->setVal(val);
   2665 
   2666          // Link to the cell
   2667          *(void**)globalAddr = globalObjs[i]->addressOfCell();
   2668        } else {
   2669          val.get().writeToHeapLocation(globalAddr);
   2670        }
   2671        break;
   2672      }
   2673      case GlobalKind::Constant: {
   2674        MOZ_CRASH("skipped at the top");
   2675      }
   2676    }
   2677  }
   2678 
   2679  // All globals were initialized
   2680  MOZ_ASSERT(maxInitializedGlobalsIndexPlus1_ == codeMeta().globals.length());
   2681 
   2682  // Initialize memories in the instance data
   2683  for (size_t i = 0; i < memories.length(); i++) {
   2684    const MemoryDesc& md = codeMeta().memories[i];
   2685    MemoryInstanceData& data = memoryInstanceData(i);
   2686    WasmMemoryObject* memory = memories.get()[i];
   2687 
   2688    data.memory = memory;
   2689    data.base = memory->buffer().dataPointerEither().unwrap();
   2690    size_t limit = memory->boundsCheckLimit();
   2691 #if !defined(JS_64BIT)
   2692    // We assume that the limit is a 32-bit quantity
   2693    MOZ_ASSERT(limit <= UINT32_MAX);
   2694 #endif
   2695    data.boundsCheckLimit = limit;
   2696 #ifdef ENABLE_WASM_CUSTOM_PAGE_SIZES
   2697    data.boundsCheckLimit16 = limit > 1 ? limit - 1 : 0;
   2698    data.boundsCheckLimit32 = limit > 3 ? limit - 3 : 0;
   2699    data.boundsCheckLimit64 = limit > 7 ? limit - 7 : 0;
   2700    data.boundsCheckLimit128 = limit > 15 ? limit - 15 : 0;
   2701 #endif
   2702    data.isShared = md.isShared();
   2703 
   2704    // Add observer if our memory base may grow
   2705    if (memory && memory->movingGrowable() &&
   2706        !memory->addMovingGrowObserver(cx, object_)) {
   2707      return false;
   2708    }
   2709  }
   2710 
   2711  // Cache the default memory's values
   2712  if (memories.length() > 0) {
   2713    MemoryInstanceData& data = memoryInstanceData(0);
   2714    memory0Base_ = data.base;
   2715    memory0BoundsCheckLimit_ = data.boundsCheckLimit;
   2716  } else {
   2717    memory0Base_ = nullptr;
   2718    memory0BoundsCheckLimit_ = 0;
   2719  }
   2720 
   2721  // Initialize tables in the instance data
   2722  for (size_t i = 0; i < tables_.length(); i++) {
   2723    const TableDesc& td = codeMeta().tables[i];
   2724    TableInstanceData& table = tableInstanceData(i);
   2725    table.length = tables_[i]->length();
   2726    table.elements = tables_[i]->instanceElements();
   2727    // Non-imported tables, with init_expr, has to be initialized with
   2728    // the evaluated value.
   2729    if (!td.isImported && td.initExpr) {
   2730      Rooted<WasmInstanceObject*> instanceObj(cx, object());
   2731      RootedVal val(cx);
   2732      if (!td.initExpr->evaluate(cx, instanceObj, &val)) {
   2733        return false;
   2734      }
   2735      RootedAnyRef ref(cx, val.get().ref());
   2736      tables_[i]->fillUninitialized(0, tables_[i]->length(), ref, cx);
   2737    }
   2738  }
   2739 
   2740 #ifdef DEBUG
   2741  // All (linked) tables with non-nullable types must be initialized.
   2742  for (size_t i = 0; i < tables_.length(); i++) {
   2743    const TableDesc& td = codeMeta().tables[i];
   2744    if (!td.elemType.isNullable()) {
   2745      tables_[i]->assertRangeNotNull(0, tables_[i]->length());
   2746    }
   2747  }
   2748 #endif  // DEBUG
   2749 
   2750  // Initialize tags in the instance data
   2751  for (size_t i = 0; i < codeMeta().tags.length(); i++) {
   2752    MOZ_ASSERT(tagObjs[i] != nullptr);
   2753    tagInstanceData(i).object = tagObjs[i];
   2754  }
   2755  pendingException_ = nullptr;
   2756  pendingExceptionTag_ = nullptr;
   2757 
   2758  // Add debug filtering table.
   2759  if (code().debugEnabled()) {
   2760    size_t numFuncs = codeMeta().numFuncs();
   2761    size_t numWords = std::max<size_t>((numFuncs + 31) / 32, 1);
   2762    debugFilter_ = (uint32_t*)js_calloc(numWords, sizeof(uint32_t));
   2763    if (!debugFilter_) {
   2764      ReportOutOfMemory(cx);
   2765      return false;
   2766    }
   2767  }
   2768 
   2769  if (code().mode() == CompileMode::LazyTiering) {
   2770    callRefMetrics_ = (CallRefMetrics*)js_calloc(
   2771        codeTailMeta().numCallRefMetrics, sizeof(CallRefMetrics));
   2772    if (!callRefMetrics_) {
   2773      ReportOutOfMemory(cx);
   2774      return false;
   2775    }
   2776    // A zeroed-out CallRefMetrics should satisfy
   2777    // CallRefMetrics::checkInvariants.
   2778    MOZ_ASSERT_IF(codeTailMeta().numCallRefMetrics > 0,
   2779                  callRefMetrics_[0].checkInvariants());
   2780  } else {
   2781    MOZ_ASSERT(codeTailMeta().numCallRefMetrics == 0);
   2782  }
   2783 
   2784  // Add observers if our tables may grow
   2785  for (const SharedTable& table : tables_) {
   2786    if (table->movingGrowable() && !table->addMovingGrowObserver(cx, object_)) {
   2787      return false;
   2788    }
   2789  }
   2790 
   2791  // Take references to the passive data segments
   2792  if (!passiveDataSegments_.resize(dataSegments.length())) {
   2793    ReportOutOfMemory(cx);
   2794    return false;
   2795  }
   2796  for (size_t i = 0; i < dataSegments.length(); i++) {
   2797    if (!dataSegments[i]->active()) {
   2798      passiveDataSegments_[i] = dataSegments[i];
   2799    }
   2800  }
   2801 
   2802  // Create InstanceElemSegments for any passive element segments, since these
   2803  // are the ones available at runtime.
   2804  if (!passiveElemSegments_.resize(elemSegments.length())) {
   2805    ReportOutOfMemory(cx);
   2806    return false;
   2807  }
   2808  for (size_t i = 0; i < elemSegments.length(); i++) {
   2809    const ModuleElemSegment& seg = elemSegments[i];
   2810    if (seg.kind == ModuleElemSegment::Kind::Passive) {
   2811      passiveElemSegments_[i] = InstanceElemSegment();
   2812      InstanceElemSegment& instanceSeg = passiveElemSegments_[i];
   2813      if (!instanceSeg.reserve(seg.numElements())) {
   2814        ReportOutOfMemory(cx);
   2815        return false;
   2816      }
   2817 
   2818      bool ok = iterElemsAnyrefs(cx, seg, [&](uint32_t _, AnyRef ref) -> bool {
   2819        instanceSeg.infallibleAppend(ref);
   2820        return true;
   2821      });
   2822      if (!ok) {
   2823        return false;
   2824      }
   2825    }
   2826  }
   2827 
   2828  return true;
   2829 }
   2830 
   2831 Instance::~Instance() {
   2832  realm_->wasm.unregisterInstance(*this);
   2833 
   2834  if (debugFilter_) {
   2835    js_free(debugFilter_);
   2836  }
   2837  if (callRefMetrics_) {
   2838    js_free(callRefMetrics_);
   2839  }
   2840  if (allocSites_) {
   2841    js_free(allocSites_);
   2842  }
   2843 
   2844  // Any pending exceptions should have been consumed.
   2845  MOZ_ASSERT(pendingException_.isNull());
   2846 }
   2847 
   2848 void Instance::setInterrupt() { interrupt_ = true; }
   2849 
   2850 bool Instance::isInterrupted() const { return interrupt_; }
   2851 
   2852 void Instance::resetInterrupt() { interrupt_ = false; }
   2853 
   2854 int32_t Instance::computeInitialHotnessCounter(uint32_t funcIndex,
   2855                                               size_t codeSectionSize) {
   2856  MOZ_ASSERT(code().mode() == CompileMode::LazyTiering);
   2857  MOZ_ASSERT(codeSectionSize > 0);
   2858  uint32_t bodyLength = codeTailMeta().funcDefRange(funcIndex).size();
   2859  return LazyTieringHeuristics::estimateIonCompilationCost(bodyLength,
   2860                                                           codeSectionSize);
   2861 }
   2862 
   2863 void Instance::resetHotnessCounter(uint32_t funcIndex) {
   2864  funcDefInstanceData(funcIndex)->hotnessCounter = INT32_MAX;
   2865 }
   2866 
   2867 int32_t Instance::readHotnessCounter(uint32_t funcIndex) const {
   2868  return funcDefInstanceData(funcIndex)->hotnessCounter;
   2869 }
   2870 
   2871 void Instance::submitCallRefHints(uint32_t funcIndex) {
   2872 #ifdef JS_JITSPEW
   2873  bool headerShown = false;
   2874 #endif
   2875 
   2876  float requiredHotnessFraction =
   2877      float(InliningHeuristics::rawCallRefPercent()) / 100.0;
   2878 
   2879  // Limits as set by InliningHeuristics::InliningHeuristics().
   2880  const DebugOnly<float> epsilon = 0.000001;
   2881  MOZ_ASSERT(requiredHotnessFraction >= 0.1 - epsilon);
   2882  MOZ_ASSERT(requiredHotnessFraction <= 1.0 + epsilon);
   2883 
   2884  CallRefMetricsRange range = codeTailMeta().getFuncDefCallRefs(funcIndex);
   2885  for (uint32_t callRefIndex = range.begin;
   2886       callRefIndex < range.begin + range.length; callRefIndex++) {
   2887    MOZ_RELEASE_ASSERT(callRefIndex < codeTailMeta().numCallRefMetrics);
   2888 
   2889    // In this loop, for each CallRefMetrics, we create a corresponding
   2890    // CallRefHint.  The CallRefHint is a recommendation of which function(s)
   2891    // to inline into the associated call site.  It is based on call target
   2892    // counts at the call site and incorporates other heuristics as implemented
   2893    // by the code below.
   2894    //
   2895    // Later, when compiling the call site with Ion, the CallRefHint created
   2896    // here is consulted.  That may or may not result in inlining actually
   2897    // taking place, since it depends also on context known only at
   2898    // Ion-compilation time -- inlining depth, inlining budgets, etc.  In
   2899    // particular, if the call site is itself within a function that got
   2900    // inlined multiple times, the call site may be compiled multiple times,
   2901    // with inlining happening in some cases and not in others.
   2902    //
   2903    // The logic below tries to find reasons not to inline into this call site,
   2904    // and if none are found, creates and stores a CallRefHint specifying the
   2905    // recommended targets.
   2906    //
   2907    // The core criterion is that the set of targets that eventually get chosen
   2908    // must together make up at least `requiredHotnessFraction` of all calls
   2909    // made by this call site.
   2910 
   2911    CallRefMetrics& metrics = callRefMetrics_[callRefIndex];
   2912    MOZ_RELEASE_ASSERT(metrics.checkInvariants());
   2913 
   2914    // For convenience, work with a copy of the candidates, not directly with
   2915    // `metrics`.
   2916    struct Candidate {
   2917      uint32_t funcIndex = 0;
   2918      uint32_t count = 0;
   2919      Candidate() = default;
   2920      Candidate(const Candidate&) = default;
   2921      Candidate(uint32_t funcIndex, uint32_t count)
   2922          : funcIndex(funcIndex), count(count) {}
   2923    };
   2924    Candidate candidates[CallRefMetrics::NUM_SLOTS];
   2925    size_t numCandidates = 0;
   2926 
   2927    // If we're going to recommend no inlining here, specify a reason.
   2928    const char* skipReason = nullptr;
   2929 
   2930    // The total count for targets that are individually tracked.
   2931    uint64_t totalTrackedCount = 0;
   2932    bool allCandidatesAreImports = true;
   2933 
   2934    // Make a first pass over the candidates, skipping imports.
   2935    for (size_t i = 0; i < CallRefMetrics::NUM_SLOTS; i++) {
   2936      if (!metrics.targets[i]) {
   2937        break;
   2938      }
   2939      uint32_t targetCount = metrics.counts[i];
   2940      if (targetCount == 0) {
   2941        continue;
   2942      }
   2943      totalTrackedCount += uint64_t(targetCount);
   2944 
   2945      // We can't inline a call to a function which is in this module but has a
   2946      // different Instance, since the potential callees of any function depend
   2947      // on the instance it is associated with.  Cross-instance calls should
   2948      // have already been excluded from consideration by the code generated by
   2949      // BaseCompiler::updateCallRefMetrics, but given that this is critical,
   2950      // assert it here.
   2951      const DebugOnly<Instance*> targetFuncInstance =
   2952          static_cast<wasm::Instance*>(
   2953              metrics.targets[i]
   2954                  ->getExtendedSlot(FunctionExtended::WASM_INSTANCE_SLOT)
   2955                  .toPrivate());
   2956      MOZ_ASSERT(targetFuncInstance == this);
   2957 
   2958      uint32_t targetFuncIndex = metrics.targets[i]->wasmFuncIndex();
   2959      if (codeMeta().funcIsImport(targetFuncIndex)) {
   2960        continue;
   2961      }
   2962      allCandidatesAreImports = false;
   2963      candidates[numCandidates] = Candidate(targetFuncIndex, targetCount);
   2964      numCandidates++;
   2965    }
   2966    MOZ_RELEASE_ASSERT(numCandidates <= CallRefMetrics::NUM_SLOTS);
   2967 
   2968    // The total count of all calls made by this call site.
   2969    uint64_t totalCount = totalTrackedCount + uint64_t(metrics.countOther);
   2970 
   2971    // Throw out some obvious cases.
   2972    if (totalCount == 0) {
   2973      // See comments on definition of CallRefMetrics regarding overflow.
   2974      skipReason = "(callsite unused)";
   2975    } else if (metrics.targets[0] == nullptr) {
   2976      // None of the calls made by this call site could be attributed to
   2977      // specific callees; they all got lumped into CallRefMetrics::countOther.
   2978      // See GenerateUpdateCallRefMetricsStub for possible reasons why.
   2979      skipReason = "(no individually tracked targets)";
   2980    } else if (numCandidates > 0 && allCandidatesAreImports) {
   2981      // Imported functions can't be inlined.
   2982      skipReason = "(all targets are imports)";
   2983    }
   2984 
   2985    // We want to avoid inlining large functions into cold(ish) call sites.
   2986    if (!skipReason) {
   2987      uint32_t totalTargetBodySize = 0;
   2988      for (size_t i = 0; i < numCandidates; i++) {
   2989        totalTargetBodySize +=
   2990            codeTailMeta().funcDefRange(candidates[i].funcIndex).size();
   2991      }
   2992      if (totalCount < 2 * totalTargetBodySize) {
   2993        skipReason = "(callsite too cold)";
   2994      }
   2995    }
   2996 
   2997    // The final check is the most important.  We need to choose some subset of
   2998    // the candidates which together make up at least `requiredHotnessFraction`
   2999    // of the calls made by this call site.  However, to avoid generated code
   3000    // wasting time on checking guards for relatively unlikely targets, we
   3001    // ignore any candidate that does not achieve at least 10% of
   3002    // `requiredHotnessFraction`.  Also make up a CallRefHints in anticipation
   3003    // of finding a usable set of candidates.
   3004    CallRefHint hints;
   3005    if (!skipReason) {
   3006      MOZ_RELEASE_ASSERT(totalCount > 0);  // Be sure to avoid NaN/Inf problems
   3007      float usableFraction = 0.0;
   3008      uint32_t numUsableCandidates = 0;
   3009      for (size_t i = 0; i < numCandidates; i++) {
   3010        float candidateFraction =
   3011            float(candidates[i].count) / float(totalCount);
   3012        if (candidateFraction >= 0.1 * requiredHotnessFraction) {
   3013          usableFraction += candidateFraction;
   3014          numUsableCandidates++;
   3015          if (!hints.full()) {
   3016            // Add this candidate to `hints`.  This assumes that we
   3017            // (more-or-less) encounter candidates in declining order of
   3018            // hotness.  See block comment on `struct CallRefMetrics`.
   3019            hints.append(candidates[i].funcIndex);
   3020          }
   3021        }
   3022      }
   3023      if (numUsableCandidates == 0) {
   3024        skipReason = "(no target is hot enough)";
   3025      } else if (usableFraction < requiredHotnessFraction) {
   3026        skipReason = "(collectively not hot enough)";
   3027      }
   3028    }
   3029 
   3030    if (!skipReason) {
   3031      // Success!
   3032      MOZ_ASSERT(hints.length() > 0);
   3033      codeTailMeta().setCallRefHint(callRefIndex, hints);
   3034    } else {
   3035      CallRefHint empty;
   3036      codeTailMeta().setCallRefHint(callRefIndex, empty);
   3037    }
   3038 
   3039 #ifdef JS_JITSPEW
   3040    if (!headerShown) {
   3041      JS_LOG(wasmPerf, Info, "CM=..%06lx  CallRefMetrics for I=..%06lx fI=%-4u",
   3042             (unsigned long)(uintptr_t(&codeMeta()) & 0xFFFFFFL),
   3043             (unsigned long)(uintptr_t(this) & 0xFFFFFFL), funcIndex);
   3044      headerShown = true;
   3045    }
   3046 
   3047    JS::UniqueChars countsStr;
   3048    for (size_t i = 0; i < CallRefMetrics::NUM_SLOTS; i++) {
   3049      countsStr =
   3050          JS_sprintf_append(std::move(countsStr), "%u ", metrics.counts[i]);
   3051    }
   3052    JS::UniqueChars targetStr;
   3053    if (skipReason) {
   3054      targetStr = JS_smprintf("%s", skipReason);
   3055    } else {
   3056      targetStr = JS_smprintf("%s", "fI ");
   3057      for (size_t i = 0; i < hints.length(); i++) {
   3058        targetStr =
   3059            JS_sprintf_append(std::move(targetStr), "%u%s", hints.get(i),
   3060                              i + 1 < hints.length() ? ", " : "");
   3061      }
   3062    }
   3063    JS_LOG(wasmPerf, Info, "CM=..%06lx    %sother:%u --> %s",
   3064           (unsigned long)(uintptr_t(&codeMeta()) & 0xFFFFFFL), countsStr.get(),
   3065           metrics.countOther, targetStr.get());
   3066 #endif
   3067  }
   3068 }
   3069 
   3070 bool Instance::debugFilter(uint32_t funcIndex) const {
   3071  return (debugFilter_[funcIndex / 32] >> funcIndex % 32) & 1;
   3072 }
   3073 
   3074 void Instance::setDebugFilter(uint32_t funcIndex, bool value) {
   3075  if (value) {
   3076    debugFilter_[funcIndex / 32] |= (1 << funcIndex % 32);
   3077  } else {
   3078    debugFilter_[funcIndex / 32] &= ~(1 << funcIndex % 32);
   3079  }
   3080 }
   3081 
   3082 bool Instance::memoryAccessInGuardRegion(const uint8_t* addr,
   3083                                         unsigned numBytes) const {
   3084  MOZ_ASSERT(numBytes > 0);
   3085 
   3086  for (uint32_t memoryIndex = 0; memoryIndex < codeMeta().memories.length();
   3087       memoryIndex++) {
   3088    uint8_t* base = memoryBase(memoryIndex).unwrap(/* comparison */);
   3089    if (addr < base) {
   3090      continue;
   3091    }
   3092 
   3093    WasmMemoryObject* mem = memory(memoryIndex);
   3094    size_t lastByteOffset = addr - base + (numBytes - 1);
   3095    if (lastByteOffset >= mem->volatileMemoryLength() &&
   3096        lastByteOffset < mem->buffer().wasmMappedSize()) {
   3097      return true;
   3098    }
   3099  }
   3100  return false;
   3101 }
   3102 
   3103 void Instance::tracePrivate(JSTracer* trc) {
   3104  // This method is only called from WasmInstanceObject so the only reason why
   3105  // TraceEdge is called is so that the pointer can be updated during a moving
   3106  // GC.
   3107  MOZ_ASSERT_IF(trc->isMarkingTracer(), gc::IsMarked(trc->runtime(), object_));
   3108  TraceEdge(trc, &object_, "wasm instance object");
   3109 
   3110  // OK to just do one tier here; though the tiers have different funcImports
   3111  // tables, they share the instance object.
   3112  for (uint32_t funcIndex = 0; funcIndex < codeMeta().numFuncImports;
   3113       funcIndex++) {
   3114    TraceNullableEdge(trc, &funcImportInstanceData(funcIndex).callable,
   3115                      "wasm import");
   3116  }
   3117 
   3118  for (uint32_t funcExportIndex = 0;
   3119       funcExportIndex < codeMeta().numExportedFuncs(); funcExportIndex++) {
   3120    TraceNullableEdge(trc, &funcExportInstanceData(funcExportIndex).func,
   3121                      "wasm func export");
   3122  }
   3123 
   3124  for (uint32_t memoryIndex = 0;
   3125       memoryIndex < code().codeMeta().memories.length(); memoryIndex++) {
   3126    MemoryInstanceData& memoryData = memoryInstanceData(memoryIndex);
   3127    TraceNullableEdge(trc, &memoryData.memory, "wasm memory object");
   3128  }
   3129 
   3130  for (const SharedTable& table : tables_) {
   3131    table->trace(trc);
   3132  }
   3133 
   3134  for (const GlobalDesc& global : code().codeMeta().globals) {
   3135    // Indirect reference globals get traced by the owning WebAssembly.Global.
   3136    if (!global.type().isRefRepr() || global.isConstant() ||
   3137        global.isIndirect()) {
   3138      continue;
   3139    }
   3140    GCPtr<AnyRef>* obj = (GCPtr<AnyRef>*)(data() + global.offset());
   3141    TraceNullableEdge(trc, obj, "wasm reference-typed global");
   3142  }
   3143 
   3144  for (uint32_t tagIndex = 0; tagIndex < code().codeMeta().tags.length();
   3145       tagIndex++) {
   3146    TraceNullableEdge(trc, &tagInstanceData(tagIndex).object, "wasm tag");
   3147  }
   3148 
   3149  const SharedTypeContext& types = codeMeta().types;
   3150  for (uint32_t typeIndex = 0; typeIndex < types->length(); typeIndex++) {
   3151    TypeDefInstanceData* typeDefData = typeDefInstanceData(typeIndex);
   3152    TraceNullableEdge(trc, &typeDefData->shape, "wasm shape");
   3153  }
   3154 
   3155  if (callRefMetrics_) {
   3156    for (uint32_t i = 0; i < codeTailMeta().numCallRefMetrics; i++) {
   3157      CallRefMetrics* metrics = &callRefMetrics_[i];
   3158      MOZ_ASSERT(metrics->checkInvariants());
   3159      for (size_t j = 0; j < CallRefMetrics::NUM_SLOTS; j++) {
   3160        TraceNullableEdge(trc, &metrics->targets[j], "indirect call target");
   3161      }
   3162    }
   3163  }
   3164 
   3165  TraceNullableEdge(trc, &pendingException_, "wasm pending exception value");
   3166  TraceNullableEdge(trc, &pendingExceptionTag_, "wasm pending exception tag");
   3167 
   3168  passiveElemSegments_.trace(trc);
   3169 
   3170  if (maybeDebug_) {
   3171    maybeDebug_->trace(trc);
   3172  }
   3173 }
   3174 
   3175 void js::wasm::TraceInstanceEdge(JSTracer* trc, Instance* instance,
   3176                                 const char* name) {
   3177  if (IsTracerKind(trc, JS::TracerKind::Moving)) {
   3178    // Compacting GC: The Instance does not move so there is nothing to do here.
   3179    // Reading the object from the instance below would be a data race during
   3180    // multi-threaded updates. Compacting GC does not rely on graph traversal
   3181    // to find all edges that need to be updated.
   3182    return;
   3183  }
   3184 
   3185  // Instance fields are traced by the owning WasmInstanceObject's trace
   3186  // hook. Tracing this ensures they are traced once.
   3187  JSObject* object = instance->objectUnbarriered();
   3188  TraceManuallyBarrieredEdge(trc, &object, name);
   3189 }
   3190 
   3191 static uintptr_t* GetFrameScanStartForStackMap(
   3192    const Frame* frame, const StackMap* map,
   3193    uintptr_t* highestByteVisitedInPrevFrame) {
   3194  // |frame| points somewhere in the middle of the area described by |map|.
   3195  // We have to calculate |scanStart|, the lowest address that is described by
   3196  // |map|, by consulting |map->frameOffsetFromTop|.
   3197 
   3198  const size_t numMappedBytes = map->header.numMappedWords * sizeof(void*);
   3199  const uintptr_t scanStart = uintptr_t(frame) +
   3200                              (map->header.frameOffsetFromTop * sizeof(void*)) -
   3201                              numMappedBytes;
   3202  MOZ_ASSERT(0 == scanStart % sizeof(void*));
   3203 
   3204  // Do what we can to assert that, for consecutive wasm frames, their stack
   3205  // maps also abut exactly.  This is a useful sanity check on the sizing of
   3206  // stackmaps.
   3207  //
   3208  // In debug builds, the stackmap construction machinery goes to considerable
   3209  // efforts to ensure that the stackmaps for consecutive frames abut exactly.
   3210  // This is so as to ensure there are no areas of stack inadvertently ignored
   3211  // by a stackmap, nor covered by two stackmaps.  Hence any failure of this
   3212  // assertion is serious and should be investigated.
   3213 #ifndef JS_CODEGEN_ARM64
   3214  MOZ_ASSERT_IF(
   3215      highestByteVisitedInPrevFrame && *highestByteVisitedInPrevFrame != 0,
   3216      *highestByteVisitedInPrevFrame + 1 == scanStart);
   3217 #endif
   3218 
   3219  if (highestByteVisitedInPrevFrame) {
   3220    *highestByteVisitedInPrevFrame = scanStart + numMappedBytes - 1;
   3221  }
   3222 
   3223  // If we have some exit stub words, this means the map also covers an area
   3224  // created by a exit stub, and so the highest word of that should be a
   3225  // constant created by (code created by) GenerateTrapExit.
   3226  MOZ_ASSERT_IF(map->header.numExitStubWords > 0,
   3227                ((uintptr_t*)scanStart)[map->header.numExitStubWords - 1 -
   3228                                        TrapExitDummyValueOffsetFromTop] ==
   3229                    TrapExitDummyValue);
   3230 
   3231  return (uintptr_t*)scanStart;
   3232 }
   3233 
   3234 uintptr_t Instance::traceFrame(JSTracer* trc, const wasm::WasmFrameIter& wfi,
   3235                               uint8_t* nextPC,
   3236                               uintptr_t highestByteVisitedInPrevFrame) {
   3237  const StackMap* map = code().lookupStackMap(nextPC);
   3238  if (!map) {
   3239    return 0;
   3240  }
   3241  Frame* frame = wfi.frame();
   3242  uintptr_t* stackWords =
   3243      GetFrameScanStartForStackMap(frame, map, &highestByteVisitedInPrevFrame);
   3244 
   3245  // Hand refs off to the GC.
   3246  for (uint32_t i = 0; i < map->header.numMappedWords; i++) {
   3247    if (map->get(i) != StackMap::Kind::AnyRef) {
   3248      continue;
   3249    }
   3250 
   3251    TraceManuallyBarrieredNullableEdge(trc, (AnyRef*)&stackWords[i],
   3252                                       "Instance::traceWasmFrame: normal word");
   3253  }
   3254 
   3255  // Deal with any GC-managed fields in the DebugFrame, if it is
   3256  // present and those fields may be live.
   3257  if (map->header.hasDebugFrameWithLiveRefs) {
   3258    DebugFrame* debugFrame = DebugFrame::from(frame);
   3259    char* debugFrameP = (char*)debugFrame;
   3260 
   3261    for (size_t i = 0; i < MaxRegisterResults; i++) {
   3262      if (debugFrame->hasSpilledRegisterRefResult(i)) {
   3263        char* resultRefP = debugFrameP + DebugFrame::offsetOfRegisterResult(i);
   3264        TraceManuallyBarrieredNullableEdge(
   3265            trc, (AnyRef*)resultRefP,
   3266            "Instance::traceWasmFrame: DebugFrame::resultResults_");
   3267      }
   3268    }
   3269 
   3270    if (debugFrame->hasCachedReturnJSValue()) {
   3271      char* cachedReturnJSValueP =
   3272          debugFrameP + DebugFrame::offsetOfCachedReturnJSValue();
   3273      TraceManuallyBarrieredEdge(
   3274          trc, (js::Value*)cachedReturnJSValueP,
   3275          "Instance::traceWasmFrame: DebugFrame::cachedReturnJSValue_");
   3276    }
   3277  }
   3278 
   3279  return highestByteVisitedInPrevFrame;
   3280 }
   3281 
   3282 void Instance::updateFrameForMovingGC(const wasm::WasmFrameIter& wfi,
   3283                                      uint8_t* nextPC, Nursery& nursery) {
   3284  const StackMap* map = code().lookupStackMap(nextPC);
   3285  if (!map) {
   3286    return;
   3287  }
   3288  Frame* frame = wfi.frame();
   3289  uintptr_t* stackWords = GetFrameScanStartForStackMap(frame, map, nullptr);
   3290 
   3291  // Update array data pointers, both IL and OOL, and struct data pointers,
   3292  // which are only OOL, for any such data areas that moved.  Note, the
   3293  // remapping info consulted by the calls to Nursery::forwardBufferPointer is
   3294  // what previous calls to Nursery::setForwardingPointerWhileTenuring in
   3295  // Wasm{Struct,Array}Object::obj_moved set up.
   3296 
   3297  for (uint32_t i = 0; i < map->header.numMappedWords; i++) {
   3298    StackMap::Kind kind = map->get(i);
   3299 
   3300    switch (kind) {
   3301      case StackMap::Kind::ArrayDataPointer: {
   3302        // Make oldDataPointer point at the storage array in the old object.
   3303        uint8_t* oldDataPointer = (uint8_t*)stackWords[i];
   3304        if (WasmArrayObject::isDataInline(oldDataPointer)) {
   3305          // It's a pointer into the object itself.  Figure out where the old
   3306          // object is, ask where it got moved to, and fish out the updated
   3307          // value from the new object.
   3308          WasmArrayObject* oldArray =
   3309              WasmArrayObject::fromInlineDataPointer(oldDataPointer);
   3310          WasmArrayObject* newArray =
   3311              (WasmArrayObject*)gc::MaybeForwarded(oldArray);
   3312          if (newArray != oldArray) {
   3313            stackWords[i] =
   3314                uintptr_t(WasmArrayObject::addressOfInlineData(newArray));
   3315            MOZ_ASSERT(WasmArrayObject::isDataInline((uint8_t*)stackWords[i]));
   3316          }
   3317        } else {
   3318          WasmArrayObject::DataHeader* oldHeader =
   3319              WasmArrayObject::dataHeaderFromDataPointer(oldDataPointer);
   3320          WasmArrayObject::DataHeader* newHeader = oldHeader;
   3321          nursery.forwardBufferPointer((uintptr_t*)&newHeader);
   3322          if (newHeader != oldHeader) {
   3323            stackWords[i] =
   3324                uintptr_t(WasmArrayObject::dataHeaderToDataPointer(newHeader));
   3325            MOZ_ASSERT(!WasmArrayObject::isDataInline((uint8_t*)stackWords[i]));
   3326          }
   3327        }
   3328        break;
   3329      }
   3330 
   3331      case StackMap::Kind::StructDataPointer: {
   3332        // It's an unmodified pointer from BufferAllocator, so this is simple.
   3333        nursery.forwardBufferPointer(&stackWords[i]);
   3334        break;
   3335      }
   3336 
   3337      default: {
   3338        break;
   3339      }
   3340    }
   3341  }
   3342 }
   3343 
   3344 WasmMemoryObject* Instance::memory(uint32_t memoryIndex) const {
   3345  return memoryInstanceData(memoryIndex).memory;
   3346 }
   3347 
   3348 SharedMem<uint8_t*> Instance::memoryBase(uint32_t memoryIndex) const {
   3349  MOZ_ASSERT_IF(
   3350      memoryIndex == 0,
   3351      memory0Base_ == memory(memoryIndex)->buffer().dataPointerEither());
   3352  return memory(memoryIndex)->buffer().dataPointerEither();
   3353 }
   3354 
   3355 SharedArrayRawBuffer* Instance::sharedMemoryBuffer(uint32_t memoryIndex) const {
   3356  MOZ_ASSERT(memory(memoryIndex)->isShared());
   3357  return memory(memoryIndex)->sharedArrayRawBuffer();
   3358 }
   3359 
   3360 WasmInstanceObject* Instance::objectUnbarriered() const {
   3361  return object_.unbarrieredGet();
   3362 }
   3363 
   3364 WasmInstanceObject* Instance::object() const { return object_; }
   3365 
   3366 static bool GetInterpEntryAndEnsureStubs(JSContext* cx, Instance& instance,
   3367                                         uint32_t funcIndex,
   3368                                         const CallArgs& args,
   3369                                         void** interpEntry,
   3370                                         const FuncType** funcType) {
   3371  const FuncExport* funcExport;
   3372  if (!instance.code().getOrCreateInterpEntry(funcIndex, &funcExport,
   3373                                              interpEntry)) {
   3374    ReportOutOfMemory(cx);
   3375    return false;
   3376  }
   3377 
   3378  *funcType = &instance.codeMeta().getFuncType(funcIndex);
   3379 
   3380 #ifdef DEBUG
   3381  // EnsureEntryStubs() has ensured proper jit-entry stubs have been created and
   3382  // installed in funcIndex's JumpTable entry, so check against the presence of
   3383  // the provisional lazy stub.  See also
   3384  // WasmInstanceObject::getExportedFunction().
   3385  if (!funcExport->hasEagerStubs() && (*funcType)->canHaveJitEntry()) {
   3386    if (!EnsureBuiltinThunksInitialized()) {
   3387      ReportOutOfMemory(cx);
   3388      return false;
   3389    }
   3390    JSFunction& callee = args.callee().as<JSFunction>();
   3391    void* provisionalLazyJitEntryStub = ProvisionalLazyJitEntryStub();
   3392    MOZ_ASSERT(provisionalLazyJitEntryStub);
   3393    MOZ_ASSERT(callee.isWasmWithJitEntry());
   3394    MOZ_ASSERT(*callee.wasmJitEntry() != provisionalLazyJitEntryStub);
   3395  }
   3396 #endif
   3397  return true;
   3398 }
   3399 
   3400 bool wasm::ResultsToJSValue(JSContext* cx, ResultType type,
   3401                            void* registerResultLoc,
   3402                            Maybe<char*> stackResultsLoc,
   3403                            MutableHandleValue rval, CoercionLevel level) {
   3404  if (type.empty()) {
   3405    // No results: set to undefined, and we're done.
   3406    rval.setUndefined();
   3407    return true;
   3408  }
   3409 
   3410  // If we added support for multiple register results, we'd need to establish a
   3411  // convention for how to store them to memory in registerResultLoc.  For now
   3412  // we can punt.
   3413  static_assert(MaxRegisterResults == 1);
   3414 
   3415  // Stack results written to stackResultsLoc; register result written
   3416  // to registerResultLoc.
   3417 
   3418  // First, convert the register return value, and prepare to iterate in
   3419  // push order.  Note that if the register result is a reference type,
   3420  // it may be unrooted, so ToJSValue_anyref must not GC in that case.
   3421  ABIResultIter iter(type);
   3422  DebugOnly<bool> usedRegisterResult = false;
   3423  for (; !iter.done(); iter.next()) {
   3424    if (iter.cur().inRegister()) {
   3425      MOZ_ASSERT(!usedRegisterResult);
   3426      if (!ToJSValue<DebugCodegenVal>(cx, registerResultLoc, iter.cur().type(),
   3427                                      rval, level)) {
   3428        return false;
   3429      }
   3430      usedRegisterResult = true;
   3431    }
   3432  }
   3433  MOZ_ASSERT(usedRegisterResult);
   3434 
   3435  MOZ_ASSERT((stackResultsLoc.isSome()) == (iter.count() > 1));
   3436  if (!stackResultsLoc) {
   3437    // A single result: we're done.
   3438    return true;
   3439  }
   3440 
   3441  // Otherwise, collect results in an array, in push order.
   3442  Rooted<ArrayObject*> array(cx, NewDenseEmptyArray(cx));
   3443  if (!array) {
   3444    return false;
   3445  }
   3446  RootedValue tmp(cx);
   3447  for (iter.switchToPrev(); !iter.done(); iter.prev()) {
   3448    const ABIResult& result = iter.cur();
   3449    if (result.onStack()) {
   3450      char* loc = stackResultsLoc.value() + result.stackOffset();
   3451      if (!ToJSValue<DebugCodegenVal>(cx, loc, result.type(), &tmp, level)) {
   3452        return false;
   3453      }
   3454      if (!NewbornArrayPush(cx, array, tmp)) {
   3455        return false;
   3456      }
   3457    } else {
   3458      if (!NewbornArrayPush(cx, array, rval)) {
   3459        return false;
   3460      }
   3461    }
   3462  }
   3463  rval.set(ObjectValue(*array));
   3464  return true;
   3465 }
   3466 
   3467 class MOZ_RAII ReturnToJSResultCollector {
   3468  class MOZ_RAII StackResultsRooter : public JS::CustomAutoRooter {
   3469    ReturnToJSResultCollector& collector_;
   3470 
   3471   public:
   3472    StackResultsRooter(JSContext* cx, ReturnToJSResultCollector& collector)
   3473        : JS::CustomAutoRooter(cx), collector_(collector) {}
   3474 
   3475    void trace(JSTracer* trc) final {
   3476      for (ABIResultIter iter(collector_.type_); !iter.done(); iter.next()) {
   3477        const ABIResult& result = iter.cur();
   3478        if (result.onStack() && result.type().isRefRepr()) {
   3479          char* loc = collector_.stackResultsArea_.get() + result.stackOffset();
   3480          AnyRef* refLoc = reinterpret_cast<AnyRef*>(loc);
   3481          TraceNullableRoot(trc, refLoc, "StackResultsRooter::trace");
   3482        }
   3483      }
   3484    }
   3485  };
   3486  friend class StackResultsRooter;
   3487 
   3488  ResultType type_;
   3489  UniquePtr<char[], JS::FreePolicy> stackResultsArea_;
   3490  Maybe<StackResultsRooter> rooter_;
   3491 
   3492 public:
   3493  explicit ReturnToJSResultCollector(const ResultType& type) : type_(type) {};
   3494  bool init(JSContext* cx) {
   3495    bool needRooter = false;
   3496    ABIResultIter iter(type_);
   3497    for (; !iter.done(); iter.next()) {
   3498      const ABIResult& result = iter.cur();
   3499      if (result.onStack() && result.type().isRefRepr()) {
   3500        needRooter = true;
   3501      }
   3502    }
   3503    uint32_t areaBytes = iter.stackBytesConsumedSoFar();
   3504    MOZ_ASSERT_IF(needRooter, areaBytes > 0);
   3505    if (areaBytes > 0) {
   3506      // It is necessary to zero storage for ref results, and it doesn't
   3507      // hurt to do so for other POD results.
   3508      stackResultsArea_ = cx->make_zeroed_pod_array<char>(areaBytes);
   3509      if (!stackResultsArea_) {
   3510        return false;
   3511      }
   3512      if (needRooter) {
   3513        rooter_.emplace(cx, *this);
   3514      }
   3515    }
   3516    return true;
   3517  }
   3518 
   3519  void* stackResultsArea() {
   3520    MOZ_ASSERT(stackResultsArea_);
   3521    return stackResultsArea_.get();
   3522  }
   3523 
   3524  bool collect(JSContext* cx, void* registerResultLoc, MutableHandleValue rval,
   3525               CoercionLevel level) {
   3526    Maybe<char*> stackResultsLoc =
   3527        stackResultsArea_ ? Some(stackResultsArea_.get()) : Nothing();
   3528    return ResultsToJSValue(cx, type_, registerResultLoc, stackResultsLoc, rval,
   3529                            level);
   3530  }
   3531 };
   3532 
   3533 /*
   3534 * [SMDOC] Exported wasm functions and the jit-entry stubs
   3535 *
   3536 * ## The kinds of exported functions
   3537 *
   3538 * There are several kinds of exported wasm functions.  /Explicitly/ exported
   3539 * functions are:
   3540 *
   3541 *  - any wasm function exported via the export section
   3542 *  - any asm.js export
   3543 *  - the module start function
   3544 *
   3545 * There are also /implicitly/ exported functions, these are the functions whose
   3546 * indices in the module are referenced outside the code segment, eg, in element
   3547 * segments and in global initializers.
   3548 *
   3549 * ## Wasm functions as JSFunctions
   3550 *
   3551 * Any exported function can be manipulated by JS and wasm code, and to both the
   3552 * exported function is represented as a JSFunction.  To JS, that means that the
   3553 * function can be called in the same way as any other JSFunction.  To Wasm, it
   3554 * means that the function is a reference with the same representation as
   3555 * externref.
   3556 *
   3557 * However, the JSFunction object is created only when the function value is
   3558 * actually exposed to JS the first time.  The creation is performed by
   3559 * getExportedFunction(), below, as follows:
   3560 *
   3561 *  - A function exported via the export section (or from asm.js) is created
   3562 *    when the export object is created, which happens at instantiation time.
   3563 *
   3564 *  - A function implicitly exported via a table is created when the table
   3565 *    element is read (by JS or wasm) and a function value is needed to
   3566 *    represent that value.  Functions stored in tables by initializers have a
   3567 *    special representation that does not require the function object to be
   3568 *    created, as long as the initializing element segment uses the more
   3569 *    efficient index encoding instead of the more general expression encoding.
   3570 *
   3571 *  - A function implicitly exported via a global initializer is created when
   3572 *    the global is initialized.
   3573 *
   3574 *  - A function referenced from a ref.func instruction in code is created when
   3575 *    that instruction is executed the first time.
   3576 *
   3577 * The JSFunction representing a wasm function never changes: every reference to
   3578 * the wasm function that exposes the JSFunction gets the same JSFunction.  In
   3579 * particular, imported functions already have a JSFunction representation (from
   3580 * JS or from their home module), and will be exposed using that representation.
   3581 *
   3582 * The mapping from a wasm function to its JSFunction is instance-specific, and
   3583 * held in a hashmap in the instance.  If a module is shared across multiple
   3584 * instances, possibly in multiple threads, each instance will have its own
   3585 * JSFunction representing the wasm function.
   3586 *
   3587 * ## Stubs -- interpreter, eager, lazy, provisional, and absent
   3588 *
   3589 * While a Wasm exported function is just a JSFunction, the internal wasm ABI is
   3590 * neither the C++ ABI nor the JS JIT ABI, so there needs to be an extra step
   3591 * when C++ or JS JIT code calls wasm code.  For this, execution passes through
   3592 * a stub that is adapted to both the JS caller and the wasm callee.
   3593 *
   3594 * ### Interpreter stubs and jit-entry stubs
   3595 *
   3596 * When JS interpreted code calls a wasm function, we end up in
   3597 * Instance::callExport() to execute the call.  This function must enter wasm,
   3598 * and to do this it uses a stub that is specific to the wasm function (see
   3599 * GenerateInterpEntry) that is callable with the C++ interpreter ABI and which
   3600 * will convert arguments as necessary and enter compiled wasm code.
   3601 *
   3602 * The interpreter stub is created eagerly, when the module is compiled.
   3603 *
   3604 * However, the interpreter call path is slow, and when JS jitted code calls
   3605 * wasm we want to do better.  In this case, there is a different, optimized
   3606 * stub that is to be invoked, and it uses the JIT ABI.  This is the jit-entry
   3607 * stub for the function.  Jitted code will call a wasm function's jit-entry
   3608 * stub to invoke the function with the JIT ABI.  The stub will adapt the call
   3609 * to the wasm ABI.
   3610 *
   3611 * Some jit-entry stubs are created eagerly and some are created lazily.
   3612 *
   3613 * ### Eager jit-entry stubs
   3614 *
   3615 * The explicitly exported functions have stubs created for them eagerly.  Eager
   3616 * stubs are created with their tier when the module is compiled, see
   3617 * ModuleGenerator::finishCodeBlock(), which calls wasm::GenerateStubs(), which
   3618 * generates stubs for functions with eager stubs.
   3619 *
   3620 * An eager stub for tier-1 is upgraded to tier-2 if the module tiers up, see
   3621 * below.
   3622 *
   3623 * ### Lazy jit-entry stubs
   3624 *
   3625 * Stubs are created lazily for all implicitly exported functions.  These
   3626 * functions may flow out to JS, but will only need a stub if they are ever
   3627 * called from jitted code.  (That's true for explicitly exported functions too,
   3628 * but for them the presumption is that they will be called.)
   3629 *
   3630 * Lazy stubs are created only when they are needed, and they are /doubly/ lazy,
   3631 * see getExportedFunction(), below: A function implicitly exported via a table
   3632 * or global may be manipulated eagerly by host code without actually being
   3633 * called (maybe ever), so we do not generate a lazy stub when the function
   3634 * object escapes to JS, but instead delay stub generation until the function is
   3635 * actually called.
   3636 *
   3637 * ### The provisional lazy jit-entry stub
   3638 *
   3639 * However, JS baseline compilation needs to have a stub to start with in order
   3640 * to allow it to attach CacheIR data to the call (or it deoptimizes the call as
   3641 * a C++ call).  Thus when the JSFunction for the wasm export is retrieved by JS
   3642 * code, a /provisional/ lazy jit-entry stub is associated with the function.
   3643 * The stub will invoke the wasm function on the slow interpreter path via
   3644 * callExport - if the function is ever called - and will cause a fast jit-entry
   3645 * stub to be created at the time of the call.  The provisional lazy stub is
   3646 * shared globally, it contains no function-specific or context-specific data.
   3647 *
   3648 * Thus, the final lazy jit-entry stubs are eventually created by
   3649 * Instance::callExport, when a call is routed through it on the slow path for
   3650 * any of the reasons given above.
   3651 *
   3652 * ### Absent jit-entry stubs
   3653 *
   3654 * Some functions never get jit-entry stubs.  The predicate canHaveJitEntry()
   3655 * determines if a wasm function gets a stub, and it will deny this if the
   3656 * function's signature exposes non-JS-compatible types (such as v128) or if
   3657 * stub optimization has been disabled by a jit option.  Calls to these
   3658 * functions will continue to go via callExport and use the slow interpreter
   3659 * stub.
   3660 *
   3661 * ## The jit-entry jump table
   3662 *
   3663 * The mapping from the exported function to its jit-entry stub is implemented
   3664 * by the jit-entry jump table in the JumpTables object (see WasmCode.h).  The
   3665 * jit-entry jump table entry for a function holds a stub that the jit can call
   3666 * to perform fast calls.
   3667 *
   3668 * While there is a single contiguous jump table, it has two logical sections:
   3669 * one for eager stubs, and one for lazy stubs.  These sections are initialized
   3670 * and updated separately, using logic that is specific to each section.
   3671 *
   3672 * The value of the table element for an eager stub is a pointer to the stub
   3673 * code in the current tier.  The pointer is installed just after the creation
   3674 * of the stub, before any code in the module is executed.  If the module later
   3675 * tiers up, the eager jit-entry stub for tier-1 code is replaced by one for
   3676 * tier-2 code, see the next section.
   3677 *
   3678 * Initially the value of the jump table element for a lazy stub is null.
   3679 *
   3680 * If the function is retrieved by JS (by getExportedFunction()) and is not
   3681 * barred from having a jit-entry, then the stub is upgraded to the shared
   3682 * provisional lazy jit-entry stub.  This upgrade happens to be racy if the
   3683 * module is shared, and so the update is atomic and only happens if the entry
   3684 * is already null.  Since the provisional lazy stub is shared, this is fine; if
   3685 * several threads try to upgrade at the same time, it is to the same shared
   3686 * value.
   3687 *
   3688 * If the retrieved function is later invoked (via callExport()), the stub is
   3689 * upgraded to an actual jit-entry stub for the current code tier, again if the
   3690 * function is allowed to have a jit-entry.  This is not racy -- though multiple
   3691 * threads can be trying to create a jit-entry stub at the same time, they do so
   3692 * under a lock and only the first to take the lock will be allowed to create a
   3693 * stub, the others will reuse the first-installed stub.
   3694 *
   3695 * If the module later tiers up, the lazy jit-entry stub for tier-1 code (if it
   3696 * exists) is replaced by one for tier-2 code, see the next section.
   3697 *
   3698 * (Note, the InterpEntry stub is never stored in the jit-entry table, as it
   3699 * uses the C++ ABI, not the JIT ABI.  It is accessible through the
   3700 * FunctionEntry.)
   3701 *
   3702 * ### Interaction of the jit-entry jump table and tiering
   3703 *
   3704 * (For general info about tiering, see the comment in WasmCompile.cpp.)
   3705 *
   3706 * The jit-entry stub, whether eager or lazy, is specific to a code tier - a
   3707 * stub will invoke the code for its function for the tier.  When we tier up,
   3708 * new jit-entry stubs must be created that reference tier-2 code, and must then
   3709 * be patched into the jit-entry table.  The complication here is that, since
   3710 * the jump table is shared with its code between instances on multiple threads,
   3711 * tier-1 code is running on other threads and new tier-1 specific jit-entry
   3712 * stubs may be created concurrently with trying to create the tier-2 stubs on
   3713 * the thread that performs the tiering-up.  Indeed, there may also be
   3714 * concurrent attempts to upgrade null jit-entries to the provisional lazy stub.
   3715 *
   3716 * Eager stubs:
   3717 *
   3718 *  - Eager stubs for tier-2 code are patched in racily by Module::finishTier2()
   3719 *    along with code pointers for tiering; nothing conflicts with these writes.
   3720 *
   3721 * Lazy stubs:
   3722 *
   3723 *  - An upgrade from a null entry to a lazy provisional stub is atomic and can
   3724 *    only happen if the entry is null, and it only happens in
   3725 *    getExportedFunction().  No lazy provisional stub will be installed if
   3726 *    there's another stub present.
   3727 *
   3728 *  - The lazy tier-appropriate stub is installed by callExport() (really by
   3729 *    EnsureEntryStubs()) during the first invocation of the exported function
   3730 *    that reaches callExport().  That invocation must be from within JS, and so
   3731 *    the jit-entry element can't be null, because a prior getExportedFunction()
   3732 *    will have ensured that it is not: the lazy provisional stub will have been
   3733 *    installed.  Hence the installing of the lazy tier-appropriate stub does
   3734 *    not race with the installing of the lazy provisional stub.
   3735 *
   3736 *  - A lazy tier-1 stub is upgraded to a lazy tier-2 stub by
   3737 *    Module::finishTier2().  The upgrade needs to ensure that all tier-1 stubs
   3738 *    are upgraded, and that once the upgrade is finished, callExport() will
   3739 *    only create tier-2 lazy stubs.  (This upgrading does not upgrade lazy
   3740 *    provisional stubs or absent stubs.)
   3741 *
   3742 *    The locking protocol ensuring that all stubs are upgraded properly and
   3743 *    that the system switches to creating tier-2 stubs is implemented in
   3744 *    Module::finishTier2() and EnsureEntryStubs().
   3745 *
   3746 * ## Stub lifetimes and serialization
   3747 *
   3748 * Eager jit-entry stub code, along with stub code for import functions, is
   3749 * serialized along with the tier-2 code for the module.
   3750 *
   3751 * Lazy stub code and thunks for builtin functions (including the provisional
   3752 * lazy jit-entry stub) are never serialized.
   3753 */
   3754 
   3755 static bool WasmCall(JSContext* cx, unsigned argc, Value* vp) {
   3756  CallArgs args = CallArgsFromVp(argc, vp);
   3757  RootedFunction callee(cx, &args.callee().as<JSFunction>());
   3758 
   3759  Instance& instance = callee->wasmInstance();
   3760  uint32_t funcIndex = callee->wasmFuncIndex();
   3761  return instance.callExport(cx, funcIndex, args);
   3762 }
   3763 
   3764 bool Instance::getExportedFunction(JSContext* cx, uint32_t funcIndex,
   3765                                   MutableHandleFunction result) {
   3766  uint32_t funcExportIndex = codeMeta().findFuncExportIndex(funcIndex);
   3767  FuncExportInstanceData& instanceData =
   3768      funcExportInstanceData(funcExportIndex);
   3769 
   3770  // Early exit if we've already found or created this exported function
   3771  if (instanceData.func) {
   3772    result.set(instanceData.func);
   3773    return true;
   3774  }
   3775 
   3776  // If this is an import, we need to recover the original function to maintain
   3777  // reference equality between a re-exported function and 'ref.func'. The
   3778  // identity of the imported function object is stable across tiers, which is
   3779  // what we want.
   3780  //
   3781  // Use the imported function only if it is an exported function, otherwise
   3782  // fall through to get a (possibly new) exported function.
   3783  if (funcIndex < codeMeta().numFuncImports) {
   3784    FuncImportInstanceData& import = funcImportInstanceData(funcIndex);
   3785    if (import.callable->is<JSFunction>()) {
   3786      JSFunction* fun = &import.callable->as<JSFunction>();
   3787      if (!codeMeta().funcImportsAreJS && fun->isWasm()) {
   3788        instanceData.func = fun;
   3789        result.set(fun);
   3790        return true;
   3791      }
   3792    }
   3793  }
   3794 
   3795  // Otherwise this is a locally defined function which we've never created a
   3796  // function object for yet.
   3797  const CodeBlock& codeBlock = code().funcCodeBlock(funcIndex);
   3798  const CodeRange& codeRange = codeBlock.codeRange(funcIndex);
   3799  const TypeDef& funcTypeDef = codeMeta().getFuncTypeDef(funcIndex);
   3800  unsigned numArgs = funcTypeDef.funcType().args().length();
   3801  Instance* instance = const_cast<Instance*>(this);
   3802  const SuperTypeVector* superTypeVector = funcTypeDef.superTypeVector();
   3803  void* uncheckedCallEntry =
   3804      codeBlock.base() + codeRange.funcUncheckedCallEntry();
   3805 
   3806  if (isAsmJS()) {
   3807    // asm.js needs to act like a normal JS function which means having the
   3808    // name from the original source and being callable as a constructor.
   3809    Rooted<JSAtom*> name(cx, getFuncDisplayAtom(cx, funcIndex));
   3810    if (!name) {
   3811      return false;
   3812    }
   3813    result.set(NewNativeConstructor(cx, WasmCall, numArgs, name,
   3814                                    gc::AllocKind::FUNCTION_EXTENDED,
   3815                                    TenuredObject, FunctionFlags::ASMJS_CTOR));
   3816    if (!result) {
   3817      return false;
   3818    }
   3819    MOZ_ASSERT(result->isTenured());
   3820    STATIC_ASSERT_WASM_FUNCTIONS_TENURED;
   3821 
   3822    // asm.js does not support jit entries.
   3823    result->initWasm(funcIndex, instance, superTypeVector, uncheckedCallEntry);
   3824  } else {
   3825    Rooted<JSAtom*> name(cx, NumberToAtom(cx, funcIndex));
   3826    if (!name) {
   3827      return false;
   3828    }
   3829    RootedObject proto(cx);
   3830 #ifdef ENABLE_WASM_TYPE_REFLECTIONS
   3831    proto = GlobalObject::getOrCreatePrototype(cx, JSProto_WasmFunction);
   3832    if (!proto) {
   3833      return false;
   3834    }
   3835 #endif
   3836    result.set(NewFunctionWithProto(
   3837        cx, WasmCall, numArgs, FunctionFlags::WASM, nullptr, name, proto,
   3838        gc::AllocKind::FUNCTION_EXTENDED, TenuredObject));
   3839    if (!result) {
   3840      return false;
   3841    }
   3842    MOZ_ASSERT(result->isTenured());
   3843    STATIC_ASSERT_WASM_FUNCTIONS_TENURED;
   3844 
   3845    // Some applications eagerly access all table elements which currently
   3846    // triggers worst-case behavior for lazy stubs, since each will allocate a
   3847    // separate 4kb code page. Most eagerly-accessed functions are not called,
   3848    // so use a shared, provisional (and slow) lazy stub as JitEntry and wait
   3849    // until Instance::callExport() to create the fast entry stubs.
   3850    if (funcTypeDef.funcType().canHaveJitEntry()) {
   3851      const FuncExport& funcExport = codeBlock.lookupFuncExport(funcIndex);
   3852      if (!funcExport.hasEagerStubs()) {
   3853        if (!EnsureBuiltinThunksInitialized()) {
   3854          return false;
   3855        }
   3856        void* provisionalLazyJitEntryStub = ProvisionalLazyJitEntryStub();
   3857        MOZ_ASSERT(provisionalLazyJitEntryStub);
   3858        code().setJitEntryIfNull(funcIndex, provisionalLazyJitEntryStub);
   3859      }
   3860      result->initWasmWithJitEntry(code().getAddressOfJitEntry(funcIndex),
   3861                                   instance, superTypeVector,
   3862                                   uncheckedCallEntry);
   3863    } else {
   3864      result->initWasm(funcIndex, instance, superTypeVector,
   3865                       uncheckedCallEntry);
   3866    }
   3867  }
   3868 
   3869  instanceData.func = result;
   3870  return true;
   3871 }
   3872 
   3873 bool Instance::callExport(JSContext* cx, uint32_t funcIndex,
   3874                          const CallArgs& args, CoercionLevel level) {
   3875  if (memory0Base_) {
   3876    // If there has been a moving grow, this Instance should have been notified.
   3877    MOZ_RELEASE_ASSERT(memoryBase(0).unwrap() == memory0Base_);
   3878  }
   3879 
   3880  void* interpEntry;
   3881  const FuncType* funcType;
   3882  if (!GetInterpEntryAndEnsureStubs(cx, *this, funcIndex, args, &interpEntry,
   3883                                    &funcType)) {
   3884    return false;
   3885  }
   3886 
   3887  // Lossless coercions can handle unexposable arguments or returns. This is
   3888  // only available in testing code.
   3889  if (level != CoercionLevel::Lossless && funcType->hasUnexposableArgOrRet()) {
   3890    JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
   3891                             JSMSG_WASM_BAD_VAL_TYPE);
   3892    return false;
   3893  }
   3894 
   3895  ArgTypeVector argTypes(*funcType);
   3896  ResultType resultType(ResultType::Vector(funcType->results()));
   3897  ReturnToJSResultCollector results(resultType);
   3898  if (!results.init(cx)) {
   3899    return false;
   3900  }
   3901 
   3902  // The calling convention for an external call into wasm is to pass an
   3903  // array of 16-byte values where each value contains either a coerced int32
   3904  // (in the low word), or a double value (in the low dword) value, with the
   3905  // coercions specified by the wasm signature. The external entry point
   3906  // unpacks this array into the system-ABI-specified registers and stack
   3907  // memory and then calls into the internal entry point. The return value is
   3908  // stored in the first element of the array (which, therefore, must have
   3909  // length >= 1).
   3910  Vector<ExportArg, 8> exportArgs(cx);
   3911  if (!exportArgs.resize(
   3912          std::max<size_t>(1, argTypes.lengthWithStackResults()))) {
   3913    return false;
   3914  }
   3915 
   3916  Rooted<GCVector<AnyRef, 8, SystemAllocPolicy>> refs(cx);
   3917 
   3918  DebugCodegen(DebugChannel::Function, "wasm-function[%d] arguments [",
   3919               funcIndex);
   3920  RootedValue v(cx);
   3921  for (size_t i = 0; i < argTypes.lengthWithStackResults(); ++i) {
   3922    void* rawArgLoc = &exportArgs[i];
   3923    if (argTypes.isSyntheticStackResultPointerArg(i)) {
   3924      *reinterpret_cast<void**>(rawArgLoc) = results.stackResultsArea();
   3925      continue;
   3926    }
   3927    size_t naturalIdx = argTypes.naturalIndex(i);
   3928    v = naturalIdx < args.length() ? args[naturalIdx] : UndefinedValue();
   3929    ValType type = funcType->arg(naturalIdx);
   3930    if (!ToWebAssemblyValue<DebugCodegenVal>(cx, v, type, rawArgLoc, true,
   3931                                             level)) {
   3932      return false;
   3933    }
   3934    if (type.isRefRepr()) {
   3935      void* ptr = *reinterpret_cast<void**>(rawArgLoc);
   3936      // Store in rooted array until no more GC is possible.
   3937      RootedAnyRef ref(cx, AnyRef::fromCompiledCode(ptr));
   3938      if (!refs.emplaceBack(ref.get())) {
   3939        return false;
   3940      }
   3941      DebugCodegen(DebugChannel::Function, "/(#%d)", int(refs.length() - 1));
   3942    }
   3943  }
   3944 
   3945  // Copy over reference values from the rooted array, if any.
   3946  if (refs.length() > 0) {
   3947    DebugCodegen(DebugChannel::Function, "; ");
   3948    size_t nextRef = 0;
   3949    for (size_t i = 0; i < argTypes.lengthWithStackResults(); ++i) {
   3950      if (argTypes.isSyntheticStackResultPointerArg(i)) {
   3951        continue;
   3952      }
   3953      size_t naturalIdx = argTypes.naturalIndex(i);
   3954      ValType type = funcType->arg(naturalIdx);
   3955      if (type.isRefRepr()) {
   3956        AnyRef* rawArgLoc = (AnyRef*)&exportArgs[i];
   3957        *rawArgLoc = refs[nextRef++];
   3958        DebugCodegen(DebugChannel::Function, " ref(#%d) := %p ",
   3959                     int(nextRef - 1), *(void**)rawArgLoc);
   3960      }
   3961    }
   3962    refs.clear();
   3963  }
   3964 
   3965  DebugCodegen(DebugChannel::Function, "]\n");
   3966 
   3967  // Ensure pending exception is cleared before and after (below) call.
   3968  MOZ_ASSERT(pendingException_.isNull());
   3969 
   3970  {
   3971    JitActivation activation(cx);
   3972 
   3973    // Call the per-exported-function trampoline created by GenerateEntry.
   3974    auto funcPtr = JS_DATA_TO_FUNC_PTR(ExportFuncPtr, interpEntry);
   3975    if (!CALL_GENERATED_2(funcPtr, exportArgs.begin(), this)) {
   3976      return false;
   3977    }
   3978  }
   3979 
   3980  MOZ_ASSERT(pendingException_.isNull());
   3981 
   3982  if (isAsmJS() && args.isConstructing()) {
   3983    // By spec, when a JS function is called as a constructor and this
   3984    // function returns a primary type, which is the case for all asm.js
   3985    // exported functions, the returned value is discarded and an empty
   3986    // object is returned instead.
   3987    PlainObject* obj = NewPlainObject(cx);
   3988    if (!obj) {
   3989      return false;
   3990    }
   3991    args.rval().set(ObjectValue(*obj));
   3992    return true;
   3993  }
   3994 
   3995  // Note that we're not rooting the register result, if any; we depend
   3996  // on ResultsCollector::collect to root the value on our behalf,
   3997  // before causing any GC.
   3998  void* registerResultLoc = &exportArgs[0];
   3999  DebugCodegen(DebugChannel::Function, "wasm-function[%d]; results [",
   4000               funcIndex);
   4001  if (!results.collect(cx, registerResultLoc, args.rval(), level)) {
   4002    return false;
   4003  }
   4004  DebugCodegen(DebugChannel::Function, "]\n");
   4005 
   4006  return true;
   4007 }
   4008 
   4009 void Instance::setPendingException(Handle<WasmExceptionObject*> exn) {
   4010  pendingException_ = AnyRef::fromJSObject(*exn.get());
   4011  pendingExceptionTag_ =
   4012      AnyRef::fromJSObject(exn->as<WasmExceptionObject>().tag());
   4013 }
   4014 
   4015 void Instance::constantGlobalGet(uint32_t globalIndex,
   4016                                 MutableHandleVal result) {
   4017  MOZ_RELEASE_ASSERT(globalIndex < maxInitializedGlobalsIndexPlus1_);
   4018  const GlobalDesc& global = codeMeta().globals[globalIndex];
   4019 
   4020  // Constant globals are baked into the code and never stored in global data.
   4021  if (global.isConstant()) {
   4022    // We can just re-evaluate the global initializer to get the value.
   4023    result.set(Val(global.constantValue()));
   4024    return;
   4025  }
   4026 
   4027  // Otherwise, we need to load the initialized value from its cell.
   4028  const void* cell = addressOfGlobalCell(global);
   4029  result.address()->initFromHeapLocation(global.type(), cell);
   4030 }
   4031 
   4032 WasmStructObject* Instance::constantStructNewDefault(JSContext* cx,
   4033                                                     uint32_t typeIndex) {
   4034  // We assume that constant structs will have a long lifetime and hence
   4035  // allocate them directly in the tenured heap.  Also, we have to dynamically
   4036  // decide whether an OOL storage area is required.  This is slow(er); do not
   4037  // call here from generated code.
   4038  TypeDefInstanceData* typeDefData = typeDefInstanceData(typeIndex);
   4039  const wasm::TypeDef* typeDef = typeDefData->typeDef;
   4040  MOZ_ASSERT(typeDef->kind() == wasm::TypeDefKind::Struct);
   4041 
   4042  bool needsOOL = typeDef->structType().hasOOL();
   4043  return needsOOL ? WasmStructObject::createStructOOL<true>(
   4044                        cx, typeDefData, nullptr, gc::Heap::Tenured)
   4045                  : WasmStructObject::createStructIL<true>(
   4046                        cx, typeDefData, nullptr, gc::Heap::Tenured);
   4047 }
   4048 
   4049 WasmArrayObject* Instance::constantArrayNewDefault(JSContext* cx,
   4050                                                   uint32_t typeIndex,
   4051                                                   uint32_t numElements) {
   4052  TypeDefInstanceData* typeDefData = typeDefInstanceData(typeIndex);
   4053  // We assume that constant arrays will have a long lifetime and hence
   4054  // allocate them directly in the tenured heap.
   4055  return WasmArrayObject::createArray<true>(cx, typeDefData, nullptr,
   4056                                            gc::Heap::Tenured, numElements);
   4057 }
   4058 
   4059 JSAtom* Instance::getFuncDisplayAtom(JSContext* cx, uint32_t funcIndex) const {
   4060  // The "display name" of a function is primarily shown in Error.stack which
   4061  // also includes location, so use getFuncNameBeforeLocation.
   4062  UTF8Bytes name;
   4063  bool ok;
   4064  if (codeMetaForAsmJS()) {
   4065    ok = codeMetaForAsmJS()->getFuncNameForAsmJS(funcIndex, &name);
   4066  } else {
   4067    ok = codeMeta().getFuncNameForWasm(NameContext::BeforeLocation, funcIndex,
   4068                                       codeTailMeta().nameSectionPayload.get(),
   4069                                       &name);
   4070  }
   4071  if (!ok) {
   4072    return nullptr;
   4073  }
   4074 
   4075  return AtomizeUTF8Chars(cx, name.begin(), name.length());
   4076 }
   4077 
   4078 void Instance::ensureProfilingLabels(bool profilingEnabled) const {
   4079  return code_->ensureProfilingLabels(profilingEnabled);
   4080 }
   4081 
   4082 void Instance::onMovingGrowMemory(const WasmMemoryObject* memory) {
   4083  MOZ_ASSERT(!isAsmJS());
   4084  MOZ_ASSERT(!memory->isShared());
   4085 
   4086  for (uint32_t i = 0; i < codeMeta().memories.length(); i++) {
   4087    MemoryInstanceData& md = memoryInstanceData(i);
   4088    if (memory != md.memory) {
   4089      continue;
   4090    }
   4091    ArrayBufferObject& buffer = md.memory->buffer().as<ArrayBufferObject>();
   4092 
   4093    md.base = buffer.dataPointer();
   4094    size_t limit = md.memory->boundsCheckLimit();
   4095 #if !defined(JS_64BIT)
   4096    // We assume that the limit is a 32-bit quantity
   4097    MOZ_ASSERT(limit <= UINT32_MAX);
   4098 #endif
   4099    md.boundsCheckLimit = limit;
   4100 #ifdef ENABLE_WASM_CUSTOM_PAGE_SIZES
   4101    md.boundsCheckLimit16 = limit > 1 ? limit - 1 : 0;
   4102    md.boundsCheckLimit32 = limit > 3 ? limit - 3 : 0;
   4103    md.boundsCheckLimit64 = limit > 7 ? limit - 7 : 0;
   4104    md.boundsCheckLimit128 = limit > 15 ? limit - 15 : 0;
   4105 #endif
   4106 
   4107    if (i == 0) {
   4108      memory0Base_ = md.base;
   4109      memory0BoundsCheckLimit_ = md.boundsCheckLimit;
   4110    }
   4111  }
   4112 }
   4113 
   4114 void Instance::onMovingGrowTable(const Table* table) {
   4115  MOZ_ASSERT(!isAsmJS());
   4116 
   4117  // `table` has grown and we must update cached data for it.  Importantly,
   4118  // we can have cached those data in more than one location: we'll have
   4119  // cached them once for each time the table was imported into this instance.
   4120  //
   4121  // When an instance is registered as an observer of a table it is only
   4122  // registered once, regardless of how many times the table was imported.
   4123  // Thus when a table is grown, onMovingGrowTable() is only invoked once for
   4124  // the table.
   4125  //
   4126  // Ergo we must go through the entire list of tables in the instance here
   4127  // and check for the table in all the cached-data slots; we can't exit after
   4128  // the first hit.
   4129 
   4130  for (uint32_t i = 0; i < tables_.length(); i++) {
   4131    if (tables_[i] != table) {
   4132      continue;
   4133    }
   4134    TableInstanceData& table = tableInstanceData(i);
   4135    table.length = tables_[i]->length();
   4136    table.elements = tables_[i]->instanceElements();
   4137  }
   4138 }
   4139 
   4140 JSString* Instance::createDisplayURL(JSContext* cx) {
   4141  // In the best case, we simply have a URL, from a streaming compilation of a
   4142  // fetched Response.
   4143 
   4144  if (codeMeta().scriptedCaller().filenameIsURL) {
   4145    const char* filename = codeMeta().scriptedCaller().filename.get();
   4146    return NewStringCopyUTF8N(cx, JS::UTF8Chars(filename, strlen(filename)));
   4147  }
   4148 
   4149  // Otherwise, build wasm module URL from following parts:
   4150  // - "wasm:" as protocol;
   4151  // - URI encoded filename from metadata (if can be encoded), plus ":";
   4152  // - 64-bit hash of the module bytes (as hex dump).
   4153 
   4154  JSStringBuilder result(cx);
   4155  if (!result.append("wasm:")) {
   4156    return nullptr;
   4157  }
   4158 
   4159  if (const char* filename = codeMeta().scriptedCaller().filename.get()) {
   4160    // EncodeURI returns false due to invalid chars or OOM -- fail only
   4161    // during OOM.
   4162    JSString* filenamePrefix = EncodeURI(cx, filename, strlen(filename));
   4163    if (!filenamePrefix) {
   4164      if (cx->isThrowingOutOfMemory()) {
   4165        return nullptr;
   4166      }
   4167 
   4168      MOZ_ASSERT(!cx->isThrowingOverRecursed());
   4169      cx->clearPendingException();
   4170      return nullptr;
   4171    }
   4172 
   4173    if (!result.append(filenamePrefix)) {
   4174      return nullptr;
   4175    }
   4176  }
   4177 
   4178  if (code().debugEnabled()) {
   4179    if (!result.append(":")) {
   4180      return nullptr;
   4181    }
   4182 
   4183    const ModuleHash& hash = codeTailMeta().debugHash;
   4184    for (unsigned char byte : hash) {
   4185      unsigned char digit1 = byte / 16, digit2 = byte % 16;
   4186      if (!result.append(
   4187              (char)(digit1 < 10 ? digit1 + '0' : digit1 + 'a' - 10))) {
   4188        return nullptr;
   4189      }
   4190      if (!result.append(
   4191              (char)(digit2 < 10 ? digit2 + '0' : digit2 + 'a' - 10))) {
   4192        return nullptr;
   4193      }
   4194    }
   4195  }
   4196 
   4197  return result.finishString();
   4198 }
   4199 
   4200 WasmBreakpointSite* Instance::getOrCreateBreakpointSite(JSContext* cx,
   4201                                                        uint32_t offset) {
   4202  MOZ_ASSERT(debugEnabled());
   4203  return debug().getOrCreateBreakpointSite(cx, this, offset);
   4204 }
   4205 
   4206 void Instance::destroyBreakpointSite(JS::GCContext* gcx, uint32_t offset) {
   4207  MOZ_ASSERT(debugEnabled());
   4208  return debug().destroyBreakpointSite(gcx, this, offset);
   4209 }
   4210 
   4211 void Instance::disassembleExport(JSContext* cx, uint32_t funcIndex, Tier tier,
   4212                                 PrintCallback printString) const {
   4213  const CodeBlock& codeBlock = code().funcCodeBlock(funcIndex);
   4214  const FuncExport& funcExport = codeBlock.lookupFuncExport(funcIndex);
   4215  const CodeRange& range = codeBlock.codeRange(funcExport);
   4216 
   4217  MOZ_ASSERT(range.begin() < codeBlock.length());
   4218  MOZ_ASSERT(range.end() < codeBlock.length());
   4219 
   4220  uint8_t* functionCode = codeBlock.base() + range.begin();
   4221  jit::Disassemble(functionCode, range.end() - range.begin(), printString);
   4222 }
   4223 
   4224 void Instance::addSizeOfMisc(
   4225    mozilla::MallocSizeOf mallocSizeOf, CodeMetadata::SeenSet* seenCodeMeta,
   4226    CodeMetadataForAsmJS::SeenSet* seenCodeMetaForAsmJS,
   4227    Code::SeenSet* seenCode, Table::SeenSet* seenTables, size_t* code,
   4228    size_t* data) const {
   4229  *data += mallocSizeOf(this);
   4230  for (const SharedTable& table : tables_) {
   4231    *data += table->sizeOfIncludingThisIfNotSeen(mallocSizeOf, seenTables);
   4232  }
   4233 
   4234  if (maybeDebug_) {
   4235    maybeDebug_->addSizeOfMisc(mallocSizeOf, seenCodeMeta, seenCodeMetaForAsmJS,
   4236                               seenCode, code, data);
   4237  }
   4238 
   4239  code_->addSizeOfMiscIfNotSeen(mallocSizeOf, seenCodeMeta,
   4240                                seenCodeMetaForAsmJS, seenCode, code, data);
   4241 }
   4242 
   4243 //////////////////////////////////////////////////////////////////////////////
   4244 //
   4245 // Reporting of errors that are traps.
   4246 
   4247 void wasm::MarkPendingExceptionAsTrap(JSContext* cx) {
   4248  RootedValue exn(cx);
   4249  if (!cx->getPendingException(&exn)) {
   4250    return;
   4251  }
   4252 
   4253  if (cx->isThrowingOutOfMemory()) {
   4254    return;
   4255  }
   4256 
   4257  MOZ_RELEASE_ASSERT(exn.isObject() && exn.toObject().is<ErrorObject>());
   4258  exn.toObject().as<ErrorObject>().setFromWasmTrap();
   4259 }
   4260 
   4261 void wasm::ReportTrapError(JSContext* cx, unsigned errorNumber) {
   4262  JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr, errorNumber);
   4263 
   4264  if (cx->isThrowingOutOfMemory()) {
   4265    return;
   4266  }
   4267 
   4268  // Mark the exception as thrown from a trap to prevent if from being handled
   4269  // by wasm exception handlers.
   4270  MarkPendingExceptionAsTrap(cx);
   4271 }