tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

WasmBuiltins.cpp (88278B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 *
      4 * Copyright 2017 Mozilla Foundation
      5 *
      6 * Licensed under the Apache License, Version 2.0 (the "License");
      7 * you may not use this file except in compliance with the License.
      8 * You may obtain a copy of the License at
      9 *
     10 *     http://www.apache.org/licenses/LICENSE-2.0
     11 *
     12 * Unless required by applicable law or agreed to in writing, software
     13 * distributed under the License is distributed on an "AS IS" BASIS,
     14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     15 * See the License for the specific language governing permissions and
     16 * limitations under the License.
     17 */
     18 
     19 #include "wasm/WasmBuiltins.h"
     20 
     21 #include "mozilla/Atomics.h"
     22 #include "mozilla/ScopeExit.h"
     23 
     24 #include <cmath>
     25 
     26 #include "fdlibm.h"
     27 #include "jslibmath.h"
     28 #include "jsmath.h"
     29 
     30 #include "jit/AtomicOperations.h"
     31 #include "jit/InlinableNatives.h"
     32 #include "jit/JitRuntime.h"
     33 #include "jit/MacroAssembler.h"
     34 #include "jit/ProcessExecutableMemory.h"
     35 #include "jit/Simulator.h"
     36 #include "js/experimental/JitInfo.h"  // JSJitInfo
     37 #include "js/friend/ErrorMessages.h"  // js::GetErrorMessage, JSMSG_*
     38 #include "js/friend/StackLimits.h"    // js::AutoCheckRecursionLimit
     39 #include "threading/Mutex.h"
     40 #include "util/Memory.h"
     41 #include "util/Poison.h"
     42 #include "vm/BigIntType.h"
     43 #include "vm/ErrorObject.h"
     44 #include "wasm/WasmCodegenTypes.h"
     45 #include "wasm/WasmDebug.h"
     46 #include "wasm/WasmDebugFrame.h"
     47 #include "wasm/WasmGcObject.h"
     48 #include "wasm/WasmInstance.h"
     49 #include "wasm/WasmPI.h"
     50 #include "wasm/WasmStubs.h"
     51 
     52 #include "debugger/DebugAPI-inl.h"
     53 #include "vm/ErrorObject-inl.h"
     54 #include "vm/JSContext-inl.h"
     55 #include "vm/Stack-inl.h"
     56 #include "wasm/WasmInstance-inl.h"
     57 
     58 using namespace js;
     59 using namespace jit;
     60 using namespace wasm;
     61 
     62 using mozilla::EnumeratedArray;
     63 using mozilla::HashGeneric;
     64 using mozilla::MakeEnumeratedRange;
     65 using mozilla::Maybe;
     66 using mozilla::Nothing;
     67 using mozilla::Some;
     68 
     69 static const unsigned BUILTIN_THUNK_LIFO_SIZE = 64 * 1024;
     70 
     71 // ============================================================================
     72 // WebAssembly builtin C++ functions called from wasm code to implement internal
     73 // wasm operations: type descriptions.
     74 
     75 // Some abbreviations, for the sake of conciseness.
     76 #define _F64 MIRType::Double
     77 #define _F32 MIRType::Float32
     78 #define _I32 MIRType::Int32
     79 #define _I64 MIRType::Int64
     80 #define _PTR MIRType::Pointer
     81 #define _RoN MIRType::WasmAnyRef
     82 #define _WAD MIRType::WasmArrayData
     83 #define _VOID MIRType::None
     84 #define _END MIRType::None
     85 #define _Infallible FailureMode::Infallible
     86 #define _FailOnNegI32 FailureMode::FailOnNegI32
     87 #define _FailOnMaxI32 FailureMode::FailOnMaxI32
     88 #define _FailOnNullPtr FailureMode::FailOnNullPtr
     89 #define _FailOnInvalidRef FailureMode::FailOnInvalidRef
     90 
     91 #define _NoTrap Trap::Limit
     92 #define _ThrowReported Trap::ThrowReported
     93 
     94 namespace js {
     95 namespace wasm {
     96 
     97 constexpr SymbolicAddressSignature SASigSinNativeD = {
     98    SymbolicAddress::SinNativeD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
     99 constexpr SymbolicAddressSignature SASigSinFdlibmD = {
    100    SymbolicAddress::SinFdlibmD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
    101 constexpr SymbolicAddressSignature SASigCosNativeD = {
    102    SymbolicAddress::CosNativeD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
    103 constexpr SymbolicAddressSignature SASigCosFdlibmD = {
    104    SymbolicAddress::CosFdlibmD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
    105 constexpr SymbolicAddressSignature SASigTanNativeD = {
    106    SymbolicAddress::TanNativeD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
    107 constexpr SymbolicAddressSignature SASigTanFdlibmD = {
    108    SymbolicAddress::TanFdlibmD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
    109 constexpr SymbolicAddressSignature SASigASinD = {
    110    SymbolicAddress::ASinD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
    111 constexpr SymbolicAddressSignature SASigACosD = {
    112    SymbolicAddress::ACosD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
    113 constexpr SymbolicAddressSignature SASigATanD = {
    114    SymbolicAddress::ATanD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
    115 constexpr SymbolicAddressSignature SASigCeilD = {
    116    SymbolicAddress::CeilD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
    117 constexpr SymbolicAddressSignature SASigCeilF = {
    118    SymbolicAddress::CeilF, _F32, _Infallible, _NoTrap, 1, {_F32, _END}};
    119 constexpr SymbolicAddressSignature SASigFloorD = {
    120    SymbolicAddress::FloorD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
    121 constexpr SymbolicAddressSignature SASigFloorF = {
    122    SymbolicAddress::FloorF, _F32, _Infallible, _NoTrap, 1, {_F32, _END}};
    123 constexpr SymbolicAddressSignature SASigTruncD = {
    124    SymbolicAddress::TruncD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
    125 constexpr SymbolicAddressSignature SASigTruncF = {
    126    SymbolicAddress::TruncF, _F32, _Infallible, _NoTrap, 1, {_F32, _END}};
    127 constexpr SymbolicAddressSignature SASigNearbyIntD = {
    128    SymbolicAddress::NearbyIntD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
    129 constexpr SymbolicAddressSignature SASigNearbyIntF = {
    130    SymbolicAddress::NearbyIntF, _F32, _Infallible, _NoTrap, 1, {_F32, _END}};
    131 constexpr SymbolicAddressSignature SASigExpD = {
    132    SymbolicAddress::ExpD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
    133 constexpr SymbolicAddressSignature SASigLogD = {
    134    SymbolicAddress::LogD, _F64, _Infallible, _NoTrap, 1, {_F64, _END}};
    135 constexpr SymbolicAddressSignature SASigPowD = {
    136    SymbolicAddress::PowD, _F64, _Infallible, _NoTrap, 2, {_F64, _F64, _END}};
    137 constexpr SymbolicAddressSignature SASigATan2D = {
    138    SymbolicAddress::ATan2D, _F64, _Infallible, _NoTrap, 2, {_F64, _F64, _END}};
    139 constexpr SymbolicAddressSignature SASigArrayMemMove = {
    140    SymbolicAddress::ArrayMemMove,
    141    _VOID,
    142    _Infallible,
    143    _NoTrap,
    144    6,
    145    {_WAD, _I32, _WAD, _I32, _I32, _I32, _END}};
    146 constexpr SymbolicAddressSignature SASigArrayRefsMove = {
    147    SymbolicAddress::ArrayRefsMove,
    148    _VOID,
    149    _Infallible,
    150    _NoTrap,
    151    6,
    152    {_RoN, _WAD, _I32, _WAD, _I32, _I32, _END}};
    153 constexpr SymbolicAddressSignature SASigMemoryGrowM32 = {
    154    SymbolicAddress::MemoryGrowM32, _I32, _Infallible, _NoTrap, 3,
    155    {_PTR, _I32, _I32, _END}};
    156 constexpr SymbolicAddressSignature SASigMemoryGrowM64 = {
    157    SymbolicAddress::MemoryGrowM64, _I64, _Infallible, _NoTrap, 3,
    158    {_PTR, _I64, _I32, _END}};
    159 constexpr SymbolicAddressSignature SASigMemorySizeM32 = {
    160    SymbolicAddress::MemorySizeM32,
    161    _I32,
    162    _Infallible,
    163    _NoTrap,
    164    2,
    165    {_PTR, _I32, _END}};
    166 constexpr SymbolicAddressSignature SASigMemorySizeM64 = {
    167    SymbolicAddress::MemorySizeM64,
    168    _I64,
    169    _Infallible,
    170    _NoTrap,
    171    2,
    172    {_PTR, _I32, _END}};
    173 constexpr SymbolicAddressSignature SASigWaitI32M32 = {
    174    SymbolicAddress::WaitI32M32,         _I32, _FailOnNegI32, _ThrowReported, 5,
    175    {_PTR, _I32, _I32, _I64, _I32, _END}};
    176 constexpr SymbolicAddressSignature SASigWaitI32M64 = {
    177    SymbolicAddress::WaitI32M64,         _I32, _FailOnNegI32, _ThrowReported, 5,
    178    {_PTR, _I64, _I32, _I64, _I32, _END}};
    179 constexpr SymbolicAddressSignature SASigWaitI64M32 = {
    180    SymbolicAddress::WaitI64M32,         _I32, _FailOnNegI32, _ThrowReported, 5,
    181    {_PTR, _I32, _I64, _I64, _I32, _END}};
    182 constexpr SymbolicAddressSignature SASigWaitI64M64 = {
    183    SymbolicAddress::WaitI64M64,         _I32, _FailOnNegI32, _ThrowReported, 5,
    184    {_PTR, _I64, _I64, _I64, _I32, _END}};
    185 constexpr SymbolicAddressSignature SASigWakeM32 = {
    186    SymbolicAddress::WakeM32, _I32, _FailOnNegI32,
    187    _ThrowReported,           4,    {_PTR, _I32, _I32, _I32, _END}};
    188 constexpr SymbolicAddressSignature SASigWakeM64 = {
    189    SymbolicAddress::WakeM64, _I32, _FailOnNegI32,
    190    _ThrowReported,           4,    {_PTR, _I64, _I32, _I32, _END}};
    191 constexpr SymbolicAddressSignature SASigMemCopyM32 = {
    192    SymbolicAddress::MemCopyM32,
    193    _VOID,
    194    _FailOnNegI32,
    195    _ThrowReported,
    196    5,
    197    {_PTR, _I32, _I32, _I32, _PTR, _END}};
    198 constexpr SymbolicAddressSignature SASigMemCopySharedM32 = {
    199    SymbolicAddress::MemCopySharedM32,
    200    _VOID,
    201    _FailOnNegI32,
    202    _ThrowReported,
    203    5,
    204    {_PTR, _I32, _I32, _I32, _PTR, _END}};
    205 constexpr SymbolicAddressSignature SASigMemCopyM64 = {
    206    SymbolicAddress::MemCopyM64,
    207    _VOID,
    208    _FailOnNegI32,
    209    _ThrowReported,
    210    5,
    211    {_PTR, _I64, _I64, _I64, _PTR, _END}};
    212 constexpr SymbolicAddressSignature SASigMemCopySharedM64 = {
    213    SymbolicAddress::MemCopySharedM64,
    214    _VOID,
    215    _FailOnNegI32,
    216    _ThrowReported,
    217    5,
    218    {_PTR, _I64, _I64, _I64, _PTR, _END}};
    219 constexpr SymbolicAddressSignature SASigMemCopyAny = {
    220    SymbolicAddress::MemCopyAny,
    221    _VOID,
    222    _FailOnNegI32,
    223    _ThrowReported,
    224    6,
    225    {_PTR, _I64, _I64, _I64, _I32, _I32, _END}};
    226 constexpr SymbolicAddressSignature SASigDataDrop = {
    227    SymbolicAddress::DataDrop, _VOID, _FailOnNegI32, _ThrowReported, 2,
    228    {_PTR, _I32, _END}};
    229 constexpr SymbolicAddressSignature SASigMemFillM32 = {
    230    SymbolicAddress::MemFillM32,
    231    _VOID,
    232    _FailOnNegI32,
    233    _ThrowReported,
    234    5,
    235    {_PTR, _I32, _I32, _I32, _PTR, _END}};
    236 constexpr SymbolicAddressSignature SASigMemFillSharedM32 = {
    237    SymbolicAddress::MemFillSharedM32,
    238    _VOID,
    239    _FailOnNegI32,
    240    _ThrowReported,
    241    5,
    242    {_PTR, _I32, _I32, _I32, _PTR, _END}};
    243 constexpr SymbolicAddressSignature SASigMemFillM64 = {
    244    SymbolicAddress::MemFillM64,
    245    _VOID,
    246    _FailOnNegI32,
    247    _ThrowReported,
    248    5,
    249    {_PTR, _I64, _I32, _I64, _PTR, _END}};
    250 constexpr SymbolicAddressSignature SASigMemFillSharedM64 = {
    251    SymbolicAddress::MemFillSharedM64,
    252    _VOID,
    253    _FailOnNegI32,
    254    _ThrowReported,
    255    5,
    256    {_PTR, _I64, _I32, _I64, _PTR, _END}};
    257 constexpr SymbolicAddressSignature SASigMemDiscardM32 = {
    258    SymbolicAddress::MemDiscardM32, _VOID, _FailOnNegI32, _ThrowReported, 4,
    259    {_PTR, _I32, _I32, _PTR, _END}};
    260 constexpr SymbolicAddressSignature SASigMemDiscardSharedM32 = {
    261    SymbolicAddress::MemDiscardSharedM32,
    262    _VOID,
    263    _FailOnNegI32,
    264    _ThrowReported,
    265    4,
    266    {_PTR, _I32, _I32, _PTR, _END}};
    267 constexpr SymbolicAddressSignature SASigMemDiscardM64 = {
    268    SymbolicAddress::MemDiscardM64, _VOID, _FailOnNegI32, _ThrowReported, 4,
    269    {_PTR, _I64, _I64, _PTR, _END}};
    270 constexpr SymbolicAddressSignature SASigMemDiscardSharedM64 = {
    271    SymbolicAddress::MemDiscardSharedM64,
    272    _VOID,
    273    _FailOnNegI32,
    274    _ThrowReported,
    275    4,
    276    {_PTR, _I64, _I64, _PTR, _END}};
    277 constexpr SymbolicAddressSignature SASigMemInitM32 = {
    278    SymbolicAddress::MemInitM32,
    279    _VOID,
    280    _FailOnNegI32,
    281    _ThrowReported,
    282    6,
    283    {_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
    284 constexpr SymbolicAddressSignature SASigMemInitM64 = {
    285    SymbolicAddress::MemInitM64,
    286    _VOID,
    287    _FailOnNegI32,
    288    _ThrowReported,
    289    6,
    290    {_PTR, _I64, _I32, _I32, _I32, _I32, _END}};
    291 constexpr SymbolicAddressSignature SASigTableCopy = {
    292    SymbolicAddress::TableCopy,
    293    _VOID,
    294    _FailOnNegI32,
    295    _ThrowReported,
    296    6,
    297    {_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
    298 constexpr SymbolicAddressSignature SASigElemDrop = {
    299    SymbolicAddress::ElemDrop, _VOID, _FailOnNegI32, _ThrowReported, 2,
    300    {_PTR, _I32, _END}};
    301 constexpr SymbolicAddressSignature SASigTableFill = {
    302    SymbolicAddress::TableFill,
    303    _VOID,
    304    _FailOnNegI32,
    305    _ThrowReported,
    306    5,
    307    {_PTR, _I32, _RoN, _I32, _I32, _END}};
    308 constexpr SymbolicAddressSignature SASigTableGet = {
    309    SymbolicAddress::TableGet, _RoN, _FailOnInvalidRef, _ThrowReported, 3,
    310    {_PTR, _I32, _I32, _END}};
    311 constexpr SymbolicAddressSignature SASigTableGrow = {
    312    SymbolicAddress::TableGrow,    _I32, _Infallible, _NoTrap, 4,
    313    {_PTR, _RoN, _I32, _I32, _END}};
    314 constexpr SymbolicAddressSignature SASigTableInit = {
    315    SymbolicAddress::TableInit,
    316    _VOID,
    317    _FailOnNegI32,
    318    _ThrowReported,
    319    6,
    320    {_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
    321 constexpr SymbolicAddressSignature SASigTableSet = {
    322    SymbolicAddress::TableSet,     _VOID, _FailOnNegI32, _ThrowReported, 4,
    323    {_PTR, _I32, _RoN, _I32, _END}};
    324 constexpr SymbolicAddressSignature SASigTableSize = {
    325    SymbolicAddress::TableSize, _I32, _Infallible, _NoTrap, 2,
    326    {_PTR, _I32, _END}};
    327 constexpr SymbolicAddressSignature SASigRefFunc = {
    328    SymbolicAddress::RefFunc, _RoN, _FailOnInvalidRef,
    329    _ThrowReported,           2,    {_PTR, _I32, _END}};
    330 constexpr SymbolicAddressSignature SASigPostBarrierEdge = {
    331    SymbolicAddress::PostBarrierEdge,
    332    _VOID,
    333    _Infallible,
    334    _NoTrap,
    335    2,
    336    {_PTR, _PTR, _END}};
    337 constexpr SymbolicAddressSignature SASigPostBarrierEdgePrecise = {
    338    SymbolicAddress::PostBarrierEdgePrecise,
    339    _VOID,
    340    _Infallible,
    341    _NoTrap,
    342    3,
    343    {_PTR, _PTR, _RoN, _END}};
    344 constexpr SymbolicAddressSignature SASigPostBarrierWholeCell = {
    345    SymbolicAddress::PostBarrierWholeCell,
    346    _VOID,
    347    _Infallible,
    348    _NoTrap,
    349    2,
    350    {_PTR, _PTR, _END}};
    351 constexpr SymbolicAddressSignature SASigExceptionNew = {
    352    SymbolicAddress::ExceptionNew,
    353    _RoN,
    354    _FailOnNullPtr,
    355    _ThrowReported,
    356    2,
    357    {_PTR, _RoN, _END}};
    358 constexpr SymbolicAddressSignature SASigThrowException = {
    359    SymbolicAddress::ThrowException,
    360    _VOID,
    361    _FailOnNegI32,
    362    _ThrowReported,
    363    2,
    364    {_PTR, _RoN, _END}};
    365 constexpr SymbolicAddressSignature SASigStructNewIL_true = {
    366    SymbolicAddress::StructNewIL_true, _RoN, _FailOnNullPtr, _ThrowReported, 3,
    367    {_PTR, _I32, _PTR, _END}};
    368 constexpr SymbolicAddressSignature SASigStructNewIL_false = {
    369    SymbolicAddress::StructNewIL_false,
    370    _RoN,
    371    _FailOnNullPtr,
    372    _ThrowReported,
    373    3,
    374    {_PTR, _I32, _PTR, _END}};
    375 constexpr SymbolicAddressSignature SASigStructNewOOL_true = {
    376    SymbolicAddress::StructNewOOL_true,
    377    _RoN,
    378    _FailOnNullPtr,
    379    _ThrowReported,
    380    3,
    381    {_PTR, _I32, _PTR, _END}};
    382 constexpr SymbolicAddressSignature SASigStructNewOOL_false = {
    383    SymbolicAddress::StructNewOOL_false,
    384    _RoN,
    385    _FailOnNullPtr,
    386    _ThrowReported,
    387    3,
    388    {_PTR, _I32, _PTR, _END}};
    389 constexpr SymbolicAddressSignature SASigArrayNew_true = {
    390    SymbolicAddress::ArrayNew_true, _RoN, _FailOnNullPtr, _ThrowReported, 4,
    391    {_PTR, _I32, _I32, _PTR, _END}};
    392 constexpr SymbolicAddressSignature SASigArrayNew_false = {
    393    SymbolicAddress::ArrayNew_false, _RoN, _FailOnNullPtr, _ThrowReported, 4,
    394    {_PTR, _I32, _I32, _PTR, _END}};
    395 constexpr SymbolicAddressSignature SASigArrayNewData = {
    396    SymbolicAddress::ArrayNewData,
    397    _RoN,
    398    _FailOnNullPtr,
    399    _ThrowReported,
    400    6,
    401    {_PTR, _I32, _I32, _I32, _PTR, _I32, _END}};
    402 constexpr SymbolicAddressSignature SASigArrayNewElem = {
    403    SymbolicAddress::ArrayNewElem,
    404    _RoN,
    405    _FailOnNullPtr,
    406    _ThrowReported,
    407    6,
    408    {_PTR, _I32, _I32, _I32, _PTR, _I32, _END}};
    409 constexpr SymbolicAddressSignature SASigArrayInitData = {
    410    SymbolicAddress::ArrayInitData,
    411    _VOID,
    412    _FailOnNegI32,
    413    _ThrowReported,
    414    6,
    415    {_PTR, _RoN, _I32, _I32, _I32, _I32, _END}};
    416 constexpr SymbolicAddressSignature SASigArrayInitElem = {
    417    SymbolicAddress::ArrayInitElem,
    418    _VOID,
    419    _FailOnNegI32,
    420    _ThrowReported,
    421    7,
    422    {_PTR, _RoN, _I32, _I32, _I32, _I32, _I32, _END}};
    423 constexpr SymbolicAddressSignature SASigArrayCopy = {
    424    SymbolicAddress::ArrayCopy,
    425    _VOID,
    426    _FailOnNegI32,
    427    _ThrowReported,
    428    7,
    429    {_PTR, _RoN, _I32, _RoN, _I32, _I32, _I32, _END}};
    430 
    431 #define VISIT_BUILTIN_FUNC(op, export, sa_name, ...)    \
    432  constexpr SymbolicAddressSignature SASig##sa_name = { \
    433      SymbolicAddress::sa_name,                         \
    434      DECLARE_BUILTIN_MODULE_FUNC_RESULT_MIRTYPE_##op,  \
    435      DECLARE_BUILTIN_MODULE_FUNC_FAILMODE_##op,        \
    436      DECLARE_BUILTIN_MODULE_FUNC_FAILTRAP_##op,        \
    437      DECLARE_BUILTIN_MODULE_FUNC_PARAM_MIRTYPES_##op};
    438 
    439 FOR_EACH_BUILTIN_MODULE_FUNC(VISIT_BUILTIN_FUNC)
    440 #undef VISIT_BUILTIN_FUNC
    441 
    442 #ifdef ENABLE_WASM_JSPI
    443 constexpr SymbolicAddressSignature SASigUpdateSuspenderState = {
    444    SymbolicAddress::UpdateSuspenderState,
    445    _VOID,
    446    _Infallible,
    447    _NoTrap,
    448    3,
    449    {_PTR, _PTR, _I32, _END}};
    450 #endif
    451 
    452 }  // namespace wasm
    453 }  // namespace js
    454 
    455 #undef _F64
    456 #undef _F32
    457 #undef _I32
    458 #undef _I64
    459 #undef _PTR
    460 #undef _RoN
    461 #undef _VOID
    462 #undef _END
    463 #undef _Infallible
    464 #undef _FailOnNegI32
    465 #undef _FailOnNullPtr
    466 
    467 #ifdef DEBUG
    468 ABIType ToABIType(FailureMode mode) {
    469  switch (mode) {
    470    case FailureMode::FailOnNegI32:
    471      return ABIType::Int32;
    472    case FailureMode::FailOnNullPtr:
    473    case FailureMode::FailOnInvalidRef:
    474      return ABIType::General;
    475    default:
    476      MOZ_CRASH("unexpected failure mode");
    477  }
    478 }
    479 
    480 ABIType ToABIType(MIRType type) {
    481  switch (type) {
    482    case MIRType::None:
    483    case MIRType::Int32:
    484      return ABIType::Int32;
    485    case MIRType::Int64:
    486      return ABIType::Int64;
    487    case MIRType::Pointer:
    488    case MIRType::WasmAnyRef:
    489      return ABIType::General;
    490    case MIRType::Float32:
    491      return ABIType::Float32;
    492    case MIRType::Double:
    493      return ABIType::Float64;
    494    default:
    495      MOZ_CRASH("unexpected type");
    496  }
    497 }
    498 
    499 ABIFunctionType ToABIType(const SymbolicAddressSignature& sig) {
    500  MOZ_ASSERT_IF(sig.failureMode != FailureMode::Infallible,
    501                ToABIType(sig.failureMode) == ToABIType(sig.retType));
    502  int abiType = 0;
    503  for (int i = 0; i < sig.numArgs; i++) {
    504    abiType <<= ABITypeArgShift;
    505    abiType |= uint32_t(ToABIType(sig.argTypes[i]));
    506  }
    507  abiType <<= ABITypeArgShift;
    508  abiType |= uint32_t(ToABIType(sig.retType));
    509  return ABIFunctionType(abiType);
    510 }
    511 #endif
    512 
    513 // ============================================================================
    514 // WebAssembly builtin C++ functions called from wasm code to implement internal
    515 // wasm operations: implementations.
    516 
    517 #if defined(JS_CODEGEN_ARM)
    518 extern "C" {
    519 
    520 extern MOZ_EXPORT int64_t __aeabi_idivmod(int, int);
    521 
    522 extern MOZ_EXPORT int64_t __aeabi_uidivmod(int, int);
    523 }
    524 #endif
    525 
    526 // This utility function can only be called for builtins that are called
    527 // directly from wasm code.
    528 static JitActivation* CallingActivation(JSContext* cx) {
    529  Activation* act = cx->activation();
    530  MOZ_ASSERT(act->asJit()->hasWasmExitFP());
    531  return act->asJit();
    532 }
    533 
    534 static bool WasmHandleDebugTrap() {
    535  JSContext* cx = TlsContext.get();  // Cold code
    536  JitActivation* activation = CallingActivation(cx);
    537  Frame* fp = activation->wasmExitFP();
    538  Instance* instance = GetNearestEffectiveInstance(fp);
    539  const Code& code = instance->code();
    540  MOZ_ASSERT(code.debugEnabled());
    541 #ifdef ENABLE_WASM_JSPI
    542  MOZ_ASSERT(!cx->wasm().onSuspendableStack());
    543 #endif
    544 
    545  // The debug trap stub is the innermost frame. It's return address is the
    546  // actual trap site.
    547  CallSite site;
    548  MOZ_ALWAYS_TRUE(code.lookupCallSite(fp->returnAddress(), &site));
    549 
    550  // Advance to the actual trapping frame.
    551  fp = fp->wasmCaller();
    552  DebugFrame* debugFrame = DebugFrame::from(fp);
    553 
    554  if (site.kind() == CallSiteKind::EnterFrame) {
    555    if (!instance->debug().enterFrameTrapsEnabled()) {
    556      return true;
    557    }
    558    debugFrame->setIsDebuggee();
    559    debugFrame->observe(cx);
    560    if (!DebugAPI::onEnterFrame(cx, js::AbstractFramePtr(debugFrame))) {
    561      if (cx->isPropagatingForcedReturn()) {
    562        cx->clearPropagatingForcedReturn();
    563        // Ignoring forced return because changing code execution order is
    564        // not yet implemented in the wasm baseline.
    565        // TODO properly handle forced return and resume wasm execution.
    566        JS_ReportErrorASCII(cx,
    567                            "Unexpected resumption value from onEnterFrame");
    568      }
    569      return false;
    570    }
    571    return true;
    572  }
    573  if (site.kind() == CallSiteKind::LeaveFrame ||
    574      site.kind() == CallSiteKind::CollapseFrame) {
    575    if (site.kind() == CallSiteKind::LeaveFrame &&
    576        !debugFrame->updateReturnJSValue(cx)) {
    577      return false;
    578    }
    579    if (site.kind() == CallSiteKind::CollapseFrame) {
    580      debugFrame->discardReturnJSValue();
    581    }
    582    bool ok = DebugAPI::onLeaveFrame(cx, js::AbstractFramePtr(debugFrame),
    583                                     (const jsbytecode*)nullptr, true);
    584    debugFrame->leave(cx);
    585    return ok;
    586  }
    587 
    588  DebugState& debug = instance->debug();
    589  MOZ_ASSERT(debug.hasBreakpointTrapAtOffset(site.lineOrBytecode()));
    590  if (debug.stepModeEnabled(debugFrame->funcIndex())) {
    591    if (!DebugAPI::onSingleStep(cx)) {
    592      if (cx->isPropagatingForcedReturn()) {
    593        cx->clearPropagatingForcedReturn();
    594        // TODO properly handle forced return.
    595        JS_ReportErrorASCII(cx,
    596                            "Unexpected resumption value from onSingleStep");
    597      }
    598      return false;
    599    }
    600  }
    601  if (debug.hasBreakpointSite(site.lineOrBytecode())) {
    602    if (!DebugAPI::onTrap(cx)) {
    603      if (cx->isPropagatingForcedReturn()) {
    604        cx->clearPropagatingForcedReturn();
    605        // TODO properly handle forced return.
    606        JS_ReportErrorASCII(
    607            cx, "Unexpected resumption value from breakpoint handler");
    608      }
    609      return false;
    610    }
    611  }
    612  return true;
    613 }
    614 
    615 // Check if the pending exception, if any, is catchable by wasm.
    616 static WasmExceptionObject* GetOrWrapWasmException(JitActivation* activation,
    617                                                   JSContext* cx) {
    618  if (!cx->isExceptionPending()) {
    619    return nullptr;
    620  }
    621 
    622  // Traps are generally not catchable as wasm exceptions. The only case in
    623  // which they are catchable is for Trap::ThrowReported, which the wasm
    624  // compiler uses to throw exceptions and is the source of exceptions from C++.
    625  if (activation->isWasmTrapping() &&
    626      activation->wasmTrapData().trap != Trap::ThrowReported) {
    627    return nullptr;
    628  }
    629 
    630  if (cx->isThrowingOverRecursed() || cx->isThrowingOutOfMemory()) {
    631    return nullptr;
    632  }
    633 
    634  // Write the exception out here to exn to avoid having to get the pending
    635  // exception and checking for OOM multiple times.
    636  RootedValue exn(cx);
    637  if (cx->getPendingException(&exn)) {
    638    // Check if a JS exception originated from a wasm trap.
    639    if (exn.isObject() && exn.toObject().is<ErrorObject>()) {
    640      ErrorObject& err = exn.toObject().as<ErrorObject>();
    641      if (err.fromWasmTrap()) {
    642        return nullptr;
    643      }
    644    }
    645 
    646    // Get or create a wasm exception to represent the pending exception
    647    Rooted<WasmExceptionObject*> wasmExn(cx);
    648    if (exn.isObject() && exn.toObject().is<WasmExceptionObject>()) {
    649      // We're already throwing a wasm exception
    650      wasmExn = &exn.toObject().as<WasmExceptionObject>();
    651 
    652      // If wasm is rethrowing a wrapped JS value, then set the pending
    653      // exception on cx to be the wrapped value. This will ensure that if we
    654      // unwind out of wasm the wrapper exception will not escape.
    655      //
    656      // We also do this here, and not at the end of wasm::HandleThrow so that
    657      // any DebugAPI calls see the wrapped JS value, not the wrapper
    658      // exception.
    659      if (wasmExn->isWrappedJSValue()) {
    660        // Re-use exn to avoid needing a new root
    661        exn = wasmExn->wrappedJSValue();
    662        cx->setPendingException(exn, nullptr);
    663      }
    664    } else {
    665      // Wrap all thrown JS values in a wasm exception. This is required so
    666      // that all exceptions have tags, and the 'null' JS value becomes a
    667      // non-null wasm exception.
    668      wasmExn = WasmExceptionObject::wrapJSValue(cx, exn);
    669    }
    670 
    671    if (wasmExn) {
    672      return wasmExn;
    673    }
    674  }
    675 
    676  MOZ_ASSERT(cx->isThrowingOutOfMemory());
    677  return nullptr;
    678 }
    679 
    680 static const wasm::TryNote* FindNonDelegateTryNote(
    681    const wasm::Code& code, const uint8_t* pc, const CodeBlock** codeBlock) {
    682  const wasm::TryNote* tryNote = code.lookupTryNote((void*)pc, codeBlock);
    683  while (tryNote && tryNote->isDelegate()) {
    684    pc = (*codeBlock)->base() + tryNote->delegateOffset();
    685    const wasm::TryNote* delegateTryNote =
    686        code.lookupTryNote((void*)pc, codeBlock);
    687    MOZ_RELEASE_ASSERT(delegateTryNote == nullptr ||
    688                       delegateTryNote->tryBodyBegin() <
    689                           tryNote->tryBodyBegin());
    690    tryNote = delegateTryNote;
    691  }
    692  return tryNote;
    693 }
    694 
    695 // Request tier-2 compilation for the calling wasm function.
    696 
    697 static void WasmHandleRequestTierUp(Instance* instance) {
    698  JSContext* cx = instance->cx();
    699 
    700  // Don't turn this into a release assert - TlsContext.get() can be expensive.
    701  MOZ_ASSERT(cx == TlsContext.get());
    702 
    703  // Neither this routine nor the stub that calls it make any attempt to
    704  // communicate roots to the GC.  This is OK because we will only be
    705  // compiling code here, which shouldn't GC.  Nevertheless ..
    706  JS::AutoAssertNoGC nogc(cx);
    707 
    708  JitActivation* activation = CallingActivation(cx);
    709  Frame* fp = activation->wasmExitFP();
    710 
    711  // Similarly, don't turn this into a release assert.
    712  MOZ_ASSERT(instance == GetNearestEffectiveInstance(fp));
    713 
    714  // Figure out the requesting funcIndex.  We could add a field to the
    715  // Instance and, in the slow path of BaseCompiler::addHotnessCheck, write it
    716  // in there.  That would avoid having to call LookupCodeBlock here, but (1)
    717  // LookupCodeBlock is pretty cheap and (2) this would make hotness checks
    718  // larger.  It doesn't seem like a worthwhile tradeoff.
    719  void* resumePC = fp->returnAddress();
    720  const CodeRange* codeRange;
    721  const CodeBlock* codeBlock = LookupCodeBlock(resumePC, &codeRange);
    722  MOZ_RELEASE_ASSERT(codeBlock && codeRange);
    723 
    724  uint32_t funcIndex = codeRange->funcIndex();
    725 
    726  // See BaseCompiler::addHotnessCheck for rationale.  If this fails, and
    727  // `counter` is a very large negative number (close to -2^31), it may be that
    728  // a hotness check didn't have its step patched in.
    729  int32_t counter = instance->readHotnessCounter(funcIndex);
    730  MOZ_RELEASE_ASSERT(counter >= -127 && counter <= -1);
    731 
    732  // Function `funcIndex` is requesting tier-up.  This can go one of three ways:
    733  // - the request is a duplicate -- ignore
    734  // - tier-up compilation succeeds -- we hope
    735  // - tier-up compilation fails (eg, OOMs).
    736  //   We have no feasible way to recover.
    737  //
    738  // Regardless of the outcome, we want to defer duplicate requests as long as
    739  // possible.  So set the counter to "infinity" right now.
    740  instance->resetHotnessCounter(funcIndex);
    741 
    742  // Submit the collected profiling information for call_ref to be available
    743  // for compilation.
    744  instance->submitCallRefHints(funcIndex);
    745 
    746  if (JS::Prefs::wasm_lazy_tiering_synchronous()) {
    747    UniqueChars error;
    748    UniqueCharsVector warnings;
    749    mozilla::Atomic<bool> cancelled(false);
    750    bool ok = CompilePartialTier2(*codeBlock->code, funcIndex, &error,
    751                                  &warnings, &cancelled);
    752    ReportTier2ResultsOffThread(cancelled, ok, mozilla::Some(funcIndex),
    753                                codeBlock->code->codeMeta().scriptedCaller(),
    754                                error, warnings);
    755    return;
    756  }
    757 
    758  // Try to Ion-compile it.  Note that `ok == true` signifies either
    759  // "duplicate request" or "not a duplicate, and compilation succeeded".
    760  bool ok = codeBlock->code->requestTierUp(funcIndex);
    761 
    762  // If compilation failed, there's no feasible way to recover. We use the
    763  // 'off thread' logging mechanism to avoid possibly triggering a GC.
    764  if (!ok) {
    765    wasm::LogOffThread("Failed to tier-up function=%d in instance=%p.",
    766                       funcIndex, instance);
    767  }
    768 }
    769 
    770 // Unwind the activation in response to a thrown exception. This function is
    771 // responsible for notifying the debugger of each unwound frame.
    772 //
    773 // This function will look for try-catch handlers and, if not trapping or
    774 // throwing an uncatchable exception, will write the handler info in |*rfe|.
    775 //
    776 // If no try-catch handler is found, return to the caller to continue unwinding
    777 // JS JIT frames.
    778 void wasm::HandleExceptionWasm(JSContext* cx, JitFrameIter& iter,
    779                               jit::ResumeFromException* rfe) {
    780  MOZ_ASSERT(iter.isWasm());
    781  MOZ_ASSERT(CallingActivation(cx) == iter.activation());
    782  MOZ_ASSERT(cx->activation()->asJit()->hasWasmExitFP());
    783  MOZ_ASSERT(rfe->kind == ExceptionResumeKind::EntryFrame);
    784 
    785 #ifdef ENABLE_WASM_JSPI
    786  // This should always run on the main stack. The throw stub should perform
    787  // a stack switch if that's not the case.
    788  MOZ_ASSERT(!cx->wasm().onSuspendableStack());
    789 #endif
    790 
    791  // WasmFrameIter iterates down wasm frames in the activation starting at
    792  // JitActivation::wasmExitFP(). Calling WasmFrameIter::startUnwinding pops
    793  // JitActivation::wasmExitFP() once each time WasmFrameIter is incremented,
    794  // ultimately leaving no wasm exit FP when the WasmFrameIter is done(). This
    795  // is necessary to prevent a wasm::DebugFrame from being observed again after
    796  // we just called onLeaveFrame (which would lead to the frame being re-added
    797  // to the map of live frames, right as it becomes trash).
    798 
    799 #ifdef DEBUG
    800  auto onExit = mozilla::MakeScopeExit([cx] {
    801    MOZ_ASSERT(!cx->activation()->asJit()->isWasmTrapping(),
    802               "unwinding clears the trapping state");
    803    MOZ_ASSERT(!cx->activation()->asJit()->hasWasmExitFP(),
    804               "unwinding leaves no wasm exit fp");
    805  });
    806 #endif
    807 
    808  MOZ_ASSERT(!iter.done());
    809 
    810  // Make the iterator adjust the JitActivation so that each popped frame
    811  // will not be visible to other FrameIters that are created while we're
    812  // unwinding (such as by debugging code).
    813  iter.asWasm().setIsLeavingFrames();
    814 
    815  JitActivation* activation = CallingActivation(cx);
    816  Rooted<WasmExceptionObject*> wasmExn(cx,
    817                                       GetOrWrapWasmException(activation, cx));
    818 
    819  for (; !iter.done() && iter.isWasm(); ++iter) {
    820    // Wasm code can enter same-compartment realms, so reset cx->realm to
    821    // this frame's realm.
    822    WasmFrameIter& wasmFrame = iter.asWasm();
    823    cx->setRealmForJitExceptionHandler(wasmFrame.instance()->realm());
    824 
    825    // Only look for an exception handler if there's a catchable exception.
    826    if (wasmExn) {
    827      const wasm::Code& code = wasmFrame.instance()->code();
    828      const uint8_t* pc = wasmFrame.resumePCinCurrentFrame();
    829      const wasm::CodeBlock* codeBlock = nullptr;
    830      const wasm::TryNote* tryNote =
    831          FindNonDelegateTryNote(code, pc, &codeBlock);
    832 
    833      if (tryNote) {
    834        // Skip tryNote if pc is at return stub generated by
    835        // wasmCollapseFrameSlow.
    836        CallSite site;
    837        if (code.lookupCallSite((void*)pc, &site) &&
    838            site.kind() == CallSiteKind::ReturnStub) {
    839          continue;
    840        }
    841 
    842        cx->clearPendingException();
    843        wasmFrame.instance()->setPendingException(wasmExn);
    844 
    845        rfe->kind = ExceptionResumeKind::WasmCatch;
    846        rfe->framePointer = (uint8_t*)wasmFrame.frame();
    847        rfe->instance = wasmFrame.instance();
    848 
    849        rfe->stackPointer =
    850            (uint8_t*)(rfe->framePointer - tryNote->landingPadFramePushed());
    851        rfe->target = codeBlock->base() + tryNote->landingPadEntryPoint();
    852 
    853 #ifdef ENABLE_WASM_JSPI
    854        wasm::SuspenderObject* destSuspender = activation->wasmExitSuspender();
    855        if (destSuspender) {
    856          destSuspender->enter(cx);
    857        }
    858 #endif
    859 
    860        // Maintain the invariant that trapping and exit frame state is always
    861        // clear when we return back into wasm JIT code.
    862        if (activation->isWasmTrapping()) {
    863          // This will clear the exit fp and suspender state.
    864          activation->finishWasmTrap(/*isResuming=*/false);
    865        } else {
    866          // We need to manually clear the exit fp and suspender state.
    867          activation->setWasmExitFP(nullptr, nullptr);
    868        }
    869        return;
    870      }
    871    }
    872 
    873    if (!wasmFrame.debugEnabled()) {
    874      continue;
    875    }
    876 
    877    DebugFrame* frame = wasmFrame.debugFrame();
    878    frame->clearReturnJSValue();
    879 
    880    // Assume ResumeMode::Terminate if no exception is pending --
    881    // no onExceptionUnwind handlers must be fired.
    882    if (cx->isExceptionPending()) {
    883      if (!DebugAPI::onExceptionUnwind(cx, AbstractFramePtr(frame))) {
    884        if (cx->isPropagatingForcedReturn()) {
    885          cx->clearPropagatingForcedReturn();
    886          // Unexpected trap return -- raising error since throw recovery
    887          // is not yet implemented in the wasm baseline.
    888          // TODO properly handle forced return and resume wasm execution.
    889          JS_ReportErrorASCII(
    890              cx, "Unexpected resumption value from onExceptionUnwind");
    891          wasmExn = nullptr;
    892        }
    893      }
    894    }
    895 
    896    bool ok = DebugAPI::onLeaveFrame(cx, AbstractFramePtr(frame),
    897                                     (const jsbytecode*)nullptr, false);
    898    if (ok) {
    899      // Unexpected success from the handler onLeaveFrame -- raising error
    900      // since throw recovery is not yet implemented in the wasm baseline.
    901      // TODO properly handle success and resume wasm execution.
    902      JS_ReportErrorASCII(cx, "Unexpected success from onLeaveFrame");
    903      wasmExn = nullptr;
    904    }
    905    frame->leave(cx);
    906  }
    907 
    908  // Assert that any pending exception escaping to non-wasm code is not a
    909  // wrapper exception object
    910 #ifdef DEBUG
    911  if (cx->isExceptionPending()) {
    912    Rooted<Value> pendingException(cx, cx->getPendingExceptionUnwrapped());
    913    MOZ_ASSERT_IF(pendingException.isObject() &&
    914                      pendingException.toObject().is<WasmExceptionObject>(),
    915                  !pendingException.toObject()
    916                       .as<WasmExceptionObject>()
    917                       .isWrappedJSValue());
    918  }
    919 #endif
    920 }
    921 
    922 static void* WasmHandleThrow(jit::ResumeFromException* rfe) {
    923  // Return a pointer to the exception handler trampoline code to jump to from
    924  // the throw stub.
    925  JSContext* cx = TlsContext.get();
    926 #ifdef ENABLE_WASM_JSPI
    927  MOZ_ASSERT(!cx->wasm().onSuspendableStack());
    928 #endif
    929  jit::HandleException(rfe);
    930  return cx->runtime()->jitRuntime()->getExceptionTailReturnValueCheck().value;
    931 }
    932 
    933 // Has the same return-value convention as HandleTrap().
    934 static void* CheckInterrupt(JSContext* cx, JitActivation* activation) {
    935  ResetInterruptState(cx);
    936 
    937  if (!CheckForInterrupt(cx)) {
    938    return nullptr;
    939  }
    940 
    941  void* resumePC = activation->wasmTrapData().resumePC;
    942  activation->finishWasmTrap(/*isResuming=*/true);
    943  // Do not reset the exit frame pointer and suspender, or else we won't switch
    944  // back to the main stack.
    945  return resumePC;
    946 }
    947 
    948 // The calling convention between this function and its caller in the stub
    949 // generated by GenerateTrapExit() is:
    950 //   - return nullptr if the stub should jump to the throw stub to unwind
    951 //     the activation;
    952 //   - return the (non-null) resumePC that should be jumped if execution should
    953 //     resume after the trap.
    954 static void* WasmHandleTrap() {
    955  JSContext* cx = TlsContext.get();  // Cold code
    956  JitActivation* activation = CallingActivation(cx);
    957 #ifdef ENABLE_WASM_JSPI
    958  MOZ_ASSERT(!cx->wasm().onSuspendableStack());
    959 #endif
    960 
    961  switch (activation->wasmTrapData().trap) {
    962    case Trap::Unreachable: {
    963      ReportTrapError(cx, JSMSG_WASM_UNREACHABLE);
    964      return nullptr;
    965    }
    966    case Trap::IntegerOverflow: {
    967      ReportTrapError(cx, JSMSG_WASM_INTEGER_OVERFLOW);
    968      return nullptr;
    969    }
    970    case Trap::InvalidConversionToInteger: {
    971      ReportTrapError(cx, JSMSG_WASM_INVALID_CONVERSION);
    972      return nullptr;
    973    }
    974    case Trap::IntegerDivideByZero: {
    975      ReportTrapError(cx, JSMSG_WASM_INT_DIVIDE_BY_ZERO);
    976      return nullptr;
    977    }
    978    case Trap::IndirectCallToNull: {
    979      ReportTrapError(cx, JSMSG_WASM_IND_CALL_TO_NULL);
    980      return nullptr;
    981    }
    982    case Trap::IndirectCallBadSig: {
    983      ReportTrapError(cx, JSMSG_WASM_IND_CALL_BAD_SIG);
    984      return nullptr;
    985    }
    986    case Trap::NullPointerDereference: {
    987      ReportTrapError(cx, JSMSG_WASM_DEREF_NULL);
    988      return nullptr;
    989    }
    990    case Trap::BadCast: {
    991      ReportTrapError(cx, JSMSG_WASM_BAD_CAST);
    992      return nullptr;
    993    }
    994    case Trap::OutOfBounds: {
    995      ReportTrapError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
    996      return nullptr;
    997    }
    998    case Trap::UnalignedAccess: {
    999      ReportTrapError(cx, JSMSG_WASM_UNALIGNED_ACCESS);
   1000      return nullptr;
   1001    }
   1002    case Trap::CheckInterrupt:
   1003      return CheckInterrupt(cx, activation);
   1004    case Trap::StackOverflow: {
   1005      AutoCheckRecursionLimit recursion(cx);
   1006      if (!recursion.check(cx)) {
   1007        return nullptr;
   1008      }
   1009      ReportTrapError(cx, JSMSG_OVER_RECURSED);
   1010      return nullptr;
   1011    }
   1012    case Trap::ThrowReported:
   1013      // Error was already reported under another name.
   1014      return nullptr;
   1015    case Trap::Limit:
   1016      break;
   1017  }
   1018 
   1019  MOZ_CRASH("unexpected trap");
   1020 }
   1021 
   1022 static void WasmReportV128JSCall() {
   1023  JSContext* cx = TlsContext.get();  // Cold code
   1024  JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
   1025                           JSMSG_WASM_BAD_VAL_TYPE);
   1026 }
   1027 
   1028 static int32_t CoerceInPlace_ToInt32(Value* rawVal) {
   1029  JSContext* cx = TlsContext.get();  // Cold code
   1030 
   1031  int32_t i32;
   1032  RootedValue val(cx, *rawVal);
   1033  if (!ToInt32(cx, val, &i32)) {
   1034    *rawVal = PoisonedObjectValue(0x42);
   1035    return false;
   1036  }
   1037 
   1038  *rawVal = Int32Value(i32);
   1039  return true;
   1040 }
   1041 
   1042 static int32_t CoerceInPlace_ToBigInt(Value* rawVal) {
   1043  JSContext* cx = TlsContext.get();  // Cold code
   1044 
   1045  RootedValue val(cx, *rawVal);
   1046  BigInt* bi = ToBigInt(cx, val);
   1047  if (!bi) {
   1048    *rawVal = PoisonedObjectValue(0x43);
   1049    return false;
   1050  }
   1051 
   1052  *rawVal = BigIntValue(bi);
   1053  return true;
   1054 }
   1055 
   1056 static int32_t CoerceInPlace_ToNumber(Value* rawVal) {
   1057  JSContext* cx = TlsContext.get();  // Cold code
   1058 
   1059  double dbl;
   1060  RootedValue val(cx, *rawVal);
   1061  if (!ToNumber(cx, val, &dbl)) {
   1062    *rawVal = PoisonedObjectValue(0x42);
   1063    return false;
   1064  }
   1065 
   1066  *rawVal = DoubleValue(dbl);
   1067  return true;
   1068 }
   1069 
   1070 static void* BoxValue_Anyref(Value* rawVal) {
   1071  JSContext* cx = TlsContext.get();  // Cold code
   1072  RootedValue val(cx, *rawVal);
   1073  RootedAnyRef result(cx, AnyRef::null());
   1074  if (!AnyRef::fromJSValue(cx, val, &result)) {
   1075    return nullptr;
   1076  }
   1077  return result.get().forCompiledCode();
   1078 }
   1079 
   1080 static int32_t CoerceInPlace_JitEntry(int funcIndex, Instance* instance,
   1081                                      Value* argv) {
   1082  JSContext* cx = TlsContext.get();  // Cold code
   1083 
   1084  const Code& code = instance->code();
   1085  const FuncType& funcType = code.codeMeta().getFuncType(funcIndex);
   1086 
   1087  for (size_t i = 0; i < funcType.args().length(); i++) {
   1088    HandleValue arg = HandleValue::fromMarkedLocation(&argv[i]);
   1089    switch (funcType.args()[i].kind()) {
   1090      case ValType::I32: {
   1091        int32_t i32;
   1092        if (!ToInt32(cx, arg, &i32)) {
   1093          return false;
   1094        }
   1095        argv[i] = Int32Value(i32);
   1096        break;
   1097      }
   1098      case ValType::I64: {
   1099        // In this case we store a BigInt value as there is no value type
   1100        // corresponding directly to an I64. The conversion to I64 happens
   1101        // in the JIT entry stub.
   1102        BigInt* bigint = ToBigInt(cx, arg);
   1103        if (!bigint) {
   1104          return false;
   1105        }
   1106        argv[i] = BigIntValue(bigint);
   1107        break;
   1108      }
   1109      case ValType::F32:
   1110      case ValType::F64: {
   1111        double dbl;
   1112        if (!ToNumber(cx, arg, &dbl)) {
   1113          return false;
   1114        }
   1115        // No need to convert double-to-float for f32, it's done inline
   1116        // in the wasm stub later.
   1117        argv[i] = DoubleValue(dbl);
   1118        break;
   1119      }
   1120      case ValType::Ref: {
   1121        // Guarded against by temporarilyUnsupportedReftypeForEntry()
   1122        MOZ_RELEASE_ASSERT(funcType.args()[i].refType().isExtern());
   1123        // Perform any fallible boxing that may need to happen so that the JIT
   1124        // code does not need to.
   1125        if (AnyRef::valueNeedsBoxing(arg)) {
   1126          JSObject* boxedValue = AnyRef::boxValue(cx, arg);
   1127          if (!boxedValue) {
   1128            return false;
   1129          }
   1130          argv[i] = ObjectOrNullValue(boxedValue);
   1131        }
   1132        break;
   1133      }
   1134      case ValType::V128: {
   1135        // Guarded against by hasV128ArgOrRet()
   1136        MOZ_CRASH("unexpected input argument in CoerceInPlace_JitEntry");
   1137      }
   1138      default: {
   1139        MOZ_CRASH("unexpected input argument in CoerceInPlace_JitEntry");
   1140      }
   1141    }
   1142  }
   1143 
   1144  return true;
   1145 }
   1146 
   1147 // Allocate a BigInt without GC, corresponds to the similar VMFunction.
   1148 static BigInt* AllocateBigIntTenuredNoGC() {
   1149  JSContext* cx = TlsContext.get();  // Cold code (the caller is elaborate)
   1150 
   1151  BigInt* bi = cx->newCell<BigInt, NoGC>(gc::Heap::Tenured);
   1152  if (!bi) {
   1153    // The NoGC version doesn't report OOM so we have to do this ourselves.
   1154    ReportOutOfMemory(cx);
   1155    return nullptr;
   1156  }
   1157  return bi;
   1158 }
   1159 
   1160 static int64_t DivI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
   1161                      uint32_t y_lo) {
   1162  int64_t x = ((uint64_t)x_hi << 32) + x_lo;
   1163  int64_t y = ((uint64_t)y_hi << 32) + y_lo;
   1164  MOZ_ASSERT(x != INT64_MIN || y != -1);
   1165  MOZ_ASSERT(y != 0);
   1166  return x / y;
   1167 }
   1168 
   1169 static int64_t UDivI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
   1170                       uint32_t y_lo) {
   1171  uint64_t x = ((uint64_t)x_hi << 32) + x_lo;
   1172  uint64_t y = ((uint64_t)y_hi << 32) + y_lo;
   1173  MOZ_ASSERT(y != 0);
   1174  return int64_t(x / y);
   1175 }
   1176 
   1177 static int64_t ModI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
   1178                      uint32_t y_lo) {
   1179  int64_t x = ((uint64_t)x_hi << 32) + x_lo;
   1180  int64_t y = ((uint64_t)y_hi << 32) + y_lo;
   1181  MOZ_ASSERT(x != INT64_MIN || y != -1);
   1182  MOZ_ASSERT(y != 0);
   1183  return x % y;
   1184 }
   1185 
   1186 static int64_t UModI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
   1187                       uint32_t y_lo) {
   1188  uint64_t x = ((uint64_t)x_hi << 32) + x_lo;
   1189  uint64_t y = ((uint64_t)y_hi << 32) + y_lo;
   1190  MOZ_ASSERT(y != 0);
   1191  return int64_t(x % y);
   1192 }
   1193 
   1194 static int64_t TruncateDoubleToInt64(double input) {
   1195  // Note: INT64_MAX is not representable in double. It is actually
   1196  // INT64_MAX + 1.  Therefore also sending the failure value.
   1197  if (input >= double(INT64_MAX) || input < double(INT64_MIN) ||
   1198      std::isnan(input)) {
   1199    return int64_t(0x8000000000000000);
   1200  }
   1201  return int64_t(input);
   1202 }
   1203 
   1204 static uint64_t TruncateDoubleToUint64(double input) {
   1205  // Note: UINT64_MAX is not representable in double. It is actually
   1206  // UINT64_MAX + 1.  Therefore also sending the failure value.
   1207  if (input >= double(UINT64_MAX) || input <= -1.0 || std::isnan(input)) {
   1208    return int64_t(0x8000000000000000);
   1209  }
   1210  return uint64_t(input);
   1211 }
   1212 
   1213 static int64_t SaturatingTruncateDoubleToInt64(double input) {
   1214  // Handle in-range values (except INT64_MIN).
   1215  if (fabs(input) < -double(INT64_MIN)) {
   1216    return int64_t(input);
   1217  }
   1218  // Handle NaN.
   1219  if (std::isnan(input)) {
   1220    return 0;
   1221  }
   1222  // Handle positive overflow.
   1223  if (input > 0) {
   1224    return INT64_MAX;
   1225  }
   1226  // Handle negative overflow.
   1227  return INT64_MIN;
   1228 }
   1229 
   1230 static uint64_t SaturatingTruncateDoubleToUint64(double input) {
   1231  // Handle positive overflow.
   1232  if (input >= -double(INT64_MIN) * 2.0) {
   1233    return UINT64_MAX;
   1234  }
   1235  // Handle in-range values.
   1236  if (input > -1.0) {
   1237    return uint64_t(input);
   1238  }
   1239  // Handle NaN and negative overflow.
   1240  return 0;
   1241 }
   1242 
   1243 static double Int64ToDouble(int32_t x_hi, uint32_t x_lo) {
   1244  int64_t x = int64_t((uint64_t(x_hi) << 32)) + int64_t(x_lo);
   1245  return double(x);
   1246 }
   1247 
   1248 static float Int64ToFloat32(int32_t x_hi, uint32_t x_lo) {
   1249  int64_t x = int64_t((uint64_t(x_hi) << 32)) + int64_t(x_lo);
   1250  return float(x);
   1251 }
   1252 
   1253 static double Uint64ToDouble(int32_t x_hi, uint32_t x_lo) {
   1254  uint64_t x = (uint64_t(x_hi) << 32) + uint64_t(x_lo);
   1255  return double(x);
   1256 }
   1257 
   1258 static float Uint64ToFloat32(int32_t x_hi, uint32_t x_lo) {
   1259  uint64_t x = (uint64_t(x_hi) << 32) + uint64_t(x_lo);
   1260  return float(x);
   1261 }
   1262 
   1263 template <typename T>
   1264 static T Ceil(T value) {
   1265  // Perform addition to ensure quiet NaNs are returned. Also try to keep the
   1266  // NaN payload intact, so don't directly return a specific quiet NaN value.
   1267  if (std::isnan(value)) {
   1268    return value + value;
   1269  }
   1270  return std::ceil(value);
   1271 }
   1272 
   1273 template <typename T>
   1274 static T Floor(T value) {
   1275  // Perform addition to ensure quiet NaNs are returned. Also try to keep the
   1276  // NaN payload intact, so don't directly return a specific quiet NaN value.
   1277  if (std::isnan(value)) {
   1278    return value + value;
   1279  }
   1280  return std::floor(value);
   1281 }
   1282 
   1283 template <typename T>
   1284 static T Trunc(T value) {
   1285  // Perform addition to ensure quiet NaNs are returned. Also try to keep the
   1286  // NaN payload intact, so don't directly return a specific quiet NaN value.
   1287  if (std::isnan(value)) {
   1288    return value + value;
   1289  }
   1290  return std::trunc(value);
   1291 }
   1292 
   1293 template <typename T>
   1294 static T NearbyInt(T value) {
   1295  // Perform addition to ensure quiet NaNs are returned. Also try to keep the
   1296  // NaN payload intact, so don't directly return a specific quiet NaN value.
   1297  if (std::isnan(value)) {
   1298    return value + value;
   1299  }
   1300  return std::nearbyint(value);
   1301 }
   1302 
   1303 // Stack alignment on x86 Windows is 4 byte. Align to 16 bytes when calling
   1304 // rounding functions with double parameters.
   1305 //
   1306 // See |ABIStackAlignment| in "js/src/jit/x86/Assembler-x86.h".
   1307 #if defined(JS_CODEGEN_X86) && (!defined(__GNUC__) || defined(__MINGW32__))
   1308 #  define ALIGN_STACK_FOR_ROUNDING_FUNCTION \
   1309    __attribute__((force_align_arg_pointer))
   1310 #else
   1311 #  define ALIGN_STACK_FOR_ROUNDING_FUNCTION
   1312 #endif
   1313 
   1314 template ALIGN_STACK_FOR_ROUNDING_FUNCTION double Ceil(double);
   1315 template ALIGN_STACK_FOR_ROUNDING_FUNCTION double Floor(double);
   1316 template ALIGN_STACK_FOR_ROUNDING_FUNCTION double Trunc(double);
   1317 template ALIGN_STACK_FOR_ROUNDING_FUNCTION double NearbyInt(double);
   1318 
   1319 #undef ALIGN_STACK_FOR_ROUNDING_FUNCTION
   1320 
   1321 static void WasmArrayMemMove(uint8_t* destArrayData, uint32_t destIndex,
   1322                             const uint8_t* srcArrayData, uint32_t srcIndex,
   1323                             uint32_t elementSize, uint32_t count) {
   1324  AutoUnsafeCallWithABI unsafe;
   1325  memmove(&destArrayData[size_t(elementSize) * destIndex],
   1326          &srcArrayData[size_t(elementSize) * srcIndex],
   1327          size_t(elementSize) * count);
   1328 }
   1329 
   1330 static void WasmArrayRefsMove(WasmArrayObject* destArrayObject,
   1331                              WriteBarriered<AnyRef>* destArrayData,
   1332                              uint32_t destIndex, AnyRef* srcArrayData,
   1333                              uint32_t srcIndex, uint32_t count) {
   1334  AutoUnsafeCallWithABI unsafe;
   1335 
   1336  // Using std::copy will call set() on the barrier wrapper under the hood.
   1337  auto copyElements = [count](auto* dstBegin, auto* srcBegin) {
   1338    if (uintptr_t(dstBegin) < uintptr_t(srcBegin)) {
   1339      std::copy(srcBegin, srcBegin + count, dstBegin);
   1340    } else {
   1341      std::copy_backward(srcBegin, srcBegin + count, dstBegin + count);
   1342    }
   1343  };
   1344 
   1345  WriteBarriered<AnyRef>* dstBegin = destArrayData + destIndex;
   1346  AnyRef* srcBegin = srcArrayData + srcIndex;
   1347  if (destArrayObject->isTenured()) {
   1348    copyElements((GCPtr<AnyRef>*)dstBegin, srcBegin);
   1349  } else {
   1350    copyElements((PreBarriered<AnyRef>*)dstBegin, srcBegin);
   1351  }
   1352 }
   1353 
   1354 template <class F>
   1355 static inline void* FuncCast(F* funcPtr, ABIFunctionType abiType) {
   1356  void* pf = JS_FUNC_TO_DATA_PTR(void*, funcPtr);
   1357 #ifdef JS_SIMULATOR
   1358  pf = Simulator::RedirectNativeFunction(pf, abiType);
   1359 #endif
   1360  return pf;
   1361 }
   1362 
   1363 #ifdef WASM_CODEGEN_DEBUG
   1364 void wasm::PrintI32(int32_t val) { fprintf(stderr, "i32(%d) ", val); }
   1365 
   1366 void wasm::PrintPtr(uint8_t* val) { fprintf(stderr, "ptr(%p) ", val); }
   1367 
   1368 void wasm::PrintF32(float val) { fprintf(stderr, "f32(%f) ", val); }
   1369 
   1370 void wasm::PrintF64(double val) { fprintf(stderr, "f64(%lf) ", val); }
   1371 
   1372 void wasm::PrintText(const char* out) { fprintf(stderr, "%s", out); }
   1373 #endif
   1374 
   1375 void* wasm::AddressOf(SymbolicAddress imm, ABIFunctionType* abiType) {
   1376  // See NeedsBuiltinThunk for a classification of the different names here.
   1377  switch (imm) {
   1378    case SymbolicAddress::HandleDebugTrap:
   1379      *abiType = Args_General0;
   1380      return FuncCast(WasmHandleDebugTrap, *abiType);
   1381    case SymbolicAddress::HandleRequestTierUp:
   1382      *abiType = Args_General1;
   1383      return FuncCast(WasmHandleRequestTierUp, *abiType);
   1384    case SymbolicAddress::HandleThrow:
   1385      *abiType = Args_General1;
   1386      return FuncCast(WasmHandleThrow, *abiType);
   1387    case SymbolicAddress::HandleTrap:
   1388      *abiType = Args_General0;
   1389      return FuncCast(WasmHandleTrap, *abiType);
   1390    case SymbolicAddress::ReportV128JSCall:
   1391      *abiType = Args_General0;
   1392      return FuncCast(WasmReportV128JSCall, *abiType);
   1393    case SymbolicAddress::CallImport_General:
   1394      *abiType = Args_Int32_GeneralInt32Int32General;
   1395      return FuncCast(Instance::callImport_general, *abiType);
   1396    case SymbolicAddress::CoerceInPlace_ToInt32:
   1397      *abiType = Args_General1;
   1398      return FuncCast(CoerceInPlace_ToInt32, *abiType);
   1399    case SymbolicAddress::CoerceInPlace_ToBigInt:
   1400      *abiType = Args_General1;
   1401      return FuncCast(CoerceInPlace_ToBigInt, *abiType);
   1402    case SymbolicAddress::CoerceInPlace_ToNumber:
   1403      *abiType = Args_General1;
   1404      return FuncCast(CoerceInPlace_ToNumber, *abiType);
   1405    case SymbolicAddress::CoerceInPlace_JitEntry:
   1406      *abiType = Args_General3;
   1407      return FuncCast(CoerceInPlace_JitEntry, *abiType);
   1408    case SymbolicAddress::ToInt32:
   1409      *abiType = Args_Int_Double;
   1410      return FuncCast<int32_t(double)>(JS::ToInt32, *abiType);
   1411    case SymbolicAddress::BoxValue_Anyref:
   1412      *abiType = Args_General1;
   1413      return FuncCast(BoxValue_Anyref, *abiType);
   1414    case SymbolicAddress::AllocateBigInt:
   1415      *abiType = Args_General0;
   1416      return FuncCast(AllocateBigIntTenuredNoGC, *abiType);
   1417    case SymbolicAddress::DivI64:
   1418      *abiType = Args_Int64_Int32Int32Int32Int32;
   1419      return FuncCast(DivI64, *abiType);
   1420    case SymbolicAddress::UDivI64:
   1421      *abiType = Args_Int64_Int32Int32Int32Int32;
   1422      return FuncCast(UDivI64, *abiType);
   1423    case SymbolicAddress::ModI64:
   1424      *abiType = Args_Int64_Int32Int32Int32Int32;
   1425      return FuncCast(ModI64, *abiType);
   1426    case SymbolicAddress::UModI64:
   1427      *abiType = Args_Int64_Int32Int32Int32Int32;
   1428      return FuncCast(UModI64, *abiType);
   1429    case SymbolicAddress::TruncateDoubleToUint64:
   1430      *abiType = Args_Int64_Double;
   1431      return FuncCast(TruncateDoubleToUint64, *abiType);
   1432    case SymbolicAddress::TruncateDoubleToInt64:
   1433      *abiType = Args_Int64_Double;
   1434      return FuncCast(TruncateDoubleToInt64, *abiType);
   1435    case SymbolicAddress::SaturatingTruncateDoubleToUint64:
   1436      *abiType = Args_Int64_Double;
   1437      return FuncCast(SaturatingTruncateDoubleToUint64, *abiType);
   1438    case SymbolicAddress::SaturatingTruncateDoubleToInt64:
   1439      *abiType = Args_Int64_Double;
   1440      return FuncCast(SaturatingTruncateDoubleToInt64, *abiType);
   1441    case SymbolicAddress::Uint64ToDouble:
   1442      *abiType = Args_Double_IntInt;
   1443      return FuncCast(Uint64ToDouble, *abiType);
   1444    case SymbolicAddress::Uint64ToFloat32:
   1445      *abiType = Args_Float32_IntInt;
   1446      return FuncCast(Uint64ToFloat32, *abiType);
   1447    case SymbolicAddress::Int64ToDouble:
   1448      *abiType = Args_Double_IntInt;
   1449      return FuncCast(Int64ToDouble, *abiType);
   1450    case SymbolicAddress::Int64ToFloat32:
   1451      *abiType = Args_Float32_IntInt;
   1452      return FuncCast(Int64ToFloat32, *abiType);
   1453 #if defined(JS_CODEGEN_ARM)
   1454    case SymbolicAddress::aeabi_idivmod:
   1455      *abiType = Args_Int64_GeneralGeneral;
   1456      return FuncCast(__aeabi_idivmod, *abiType);
   1457    case SymbolicAddress::aeabi_uidivmod:
   1458      *abiType = Args_Int64_GeneralGeneral;
   1459      return FuncCast(__aeabi_uidivmod, *abiType);
   1460 #endif
   1461    case SymbolicAddress::ModD:
   1462      *abiType = Args_Double_DoubleDouble;
   1463      return FuncCast(NumberMod, *abiType);
   1464    case SymbolicAddress::SinNativeD:
   1465      *abiType = Args_Double_Double;
   1466      return FuncCast<double(double)>(sin, *abiType);
   1467    case SymbolicAddress::SinFdlibmD:
   1468      *abiType = Args_Double_Double;
   1469      return FuncCast<double(double)>(fdlibm_sin, *abiType);
   1470    case SymbolicAddress::CosNativeD:
   1471      *abiType = Args_Double_Double;
   1472      return FuncCast<double(double)>(cos, *abiType);
   1473    case SymbolicAddress::CosFdlibmD:
   1474      *abiType = Args_Double_Double;
   1475      return FuncCast<double(double)>(fdlibm_cos, *abiType);
   1476    case SymbolicAddress::TanNativeD:
   1477      *abiType = Args_Double_Double;
   1478      return FuncCast<double(double)>(tan, *abiType);
   1479    case SymbolicAddress::TanFdlibmD:
   1480      *abiType = Args_Double_Double;
   1481      return FuncCast<double(double)>(fdlibm_tan, *abiType);
   1482    case SymbolicAddress::ASinD:
   1483      *abiType = Args_Double_Double;
   1484      return FuncCast<double(double)>(fdlibm_asin, *abiType);
   1485    case SymbolicAddress::ACosD:
   1486      *abiType = Args_Double_Double;
   1487      return FuncCast<double(double)>(fdlibm_acos, *abiType);
   1488    case SymbolicAddress::ATanD:
   1489      *abiType = Args_Double_Double;
   1490      return FuncCast<double(double)>(fdlibm_atan, *abiType);
   1491    case SymbolicAddress::CeilD:
   1492      *abiType = Args_Double_Double;
   1493      return FuncCast<double(double)>(Ceil, *abiType);
   1494    case SymbolicAddress::CeilF:
   1495      *abiType = Args_Float32_Float32;
   1496      return FuncCast<float(float)>(Ceil, *abiType);
   1497    case SymbolicAddress::FloorD:
   1498      *abiType = Args_Double_Double;
   1499      return FuncCast<double(double)>(Floor, *abiType);
   1500    case SymbolicAddress::FloorF:
   1501      *abiType = Args_Float32_Float32;
   1502      return FuncCast<float(float)>(Floor, *abiType);
   1503    case SymbolicAddress::TruncD:
   1504      *abiType = Args_Double_Double;
   1505      return FuncCast<double(double)>(Trunc, *abiType);
   1506    case SymbolicAddress::TruncF:
   1507      *abiType = Args_Float32_Float32;
   1508      return FuncCast<float(float)>(Trunc, *abiType);
   1509    case SymbolicAddress::NearbyIntD:
   1510      *abiType = Args_Double_Double;
   1511      return FuncCast<double(double)>(NearbyInt, *abiType);
   1512    case SymbolicAddress::NearbyIntF:
   1513      *abiType = Args_Float32_Float32;
   1514      return FuncCast<float(float)>(NearbyInt, *abiType);
   1515    case SymbolicAddress::ExpD:
   1516      *abiType = Args_Double_Double;
   1517      return FuncCast<double(double)>(fdlibm_exp, *abiType);
   1518    case SymbolicAddress::LogD:
   1519      *abiType = Args_Double_Double;
   1520      return FuncCast<double(double)>(fdlibm_log, *abiType);
   1521    case SymbolicAddress::PowD:
   1522      *abiType = Args_Double_DoubleDouble;
   1523      return FuncCast(ecmaPow, *abiType);
   1524    case SymbolicAddress::ATan2D:
   1525      *abiType = Args_Double_DoubleDouble;
   1526      return FuncCast(ecmaAtan2, *abiType);
   1527    case SymbolicAddress::ArrayMemMove:
   1528      *abiType = Args_Void_GeneralInt32GeneralInt32Int32Int32;
   1529      return FuncCast(WasmArrayMemMove, *abiType);
   1530    case SymbolicAddress::ArrayRefsMove:
   1531      *abiType = Args_Void_GeneralGeneralInt32GeneralInt32Int32;
   1532      return FuncCast(WasmArrayRefsMove, *abiType);
   1533 
   1534    case SymbolicAddress::MemoryGrowM32:
   1535      *abiType = Args_Int32_GeneralInt32Int32;
   1536      MOZ_ASSERT(*abiType == ToABIType(SASigMemoryGrowM32));
   1537      return FuncCast(Instance::memoryGrow_m32, *abiType);
   1538    case SymbolicAddress::MemoryGrowM64:
   1539      *abiType = Args_Int64_GeneralInt64Int32;
   1540      MOZ_ASSERT(*abiType == ToABIType(SASigMemoryGrowM64));
   1541      return FuncCast(Instance::memoryGrow_m64, *abiType);
   1542    case SymbolicAddress::MemorySizeM32:
   1543      *abiType = Args_Int32_GeneralInt32;
   1544      MOZ_ASSERT(*abiType == ToABIType(SASigMemorySizeM32));
   1545      return FuncCast(Instance::memorySize_m32, *abiType);
   1546    case SymbolicAddress::MemorySizeM64:
   1547      *abiType = Args_Int64_GeneralInt32;
   1548      MOZ_ASSERT(*abiType == ToABIType(SASigMemorySizeM64));
   1549      return FuncCast(Instance::memorySize_m64, *abiType);
   1550    case SymbolicAddress::WaitI32M32:
   1551      *abiType = Args_Int32_GeneralInt32Int32Int64Int32;
   1552      MOZ_ASSERT(*abiType == ToABIType(SASigWaitI32M32));
   1553      return FuncCast(Instance::wait_i32_m32, *abiType);
   1554    case SymbolicAddress::WaitI32M64:
   1555      *abiType = Args_Int32_GeneralInt64Int32Int64Int32;
   1556      MOZ_ASSERT(*abiType == ToABIType(SASigWaitI32M64));
   1557      return FuncCast(Instance::wait_i32_m64, *abiType);
   1558    case SymbolicAddress::WaitI64M32:
   1559      *abiType = Args_Int32_GeneralInt32Int64Int64Int32;
   1560      MOZ_ASSERT(*abiType == ToABIType(SASigWaitI64M32));
   1561      return FuncCast(Instance::wait_i64_m32, *abiType);
   1562    case SymbolicAddress::WaitI64M64:
   1563      *abiType = Args_Int32_GeneralInt64Int64Int64Int32;
   1564      MOZ_ASSERT(*abiType == ToABIType(SASigWaitI64M64));
   1565      return FuncCast(Instance::wait_i64_m64, *abiType);
   1566    case SymbolicAddress::WakeM32:
   1567      *abiType = Args_Int32_GeneralInt32Int32Int32;
   1568      MOZ_ASSERT(*abiType == ToABIType(SASigWakeM32));
   1569      return FuncCast(Instance::wake_m32, *abiType);
   1570    case SymbolicAddress::WakeM64:
   1571      *abiType = Args_Int32_GeneralInt64Int32Int32;
   1572      MOZ_ASSERT(*abiType == ToABIType(SASigWakeM64));
   1573      return FuncCast(Instance::wake_m64, *abiType);
   1574    case SymbolicAddress::MemCopyM32:
   1575      *abiType = Args_Int32_GeneralInt32Int32Int32General;
   1576      MOZ_ASSERT(*abiType == ToABIType(SASigMemCopyM32));
   1577      return FuncCast(Instance::memCopy_m32, *abiType);
   1578    case SymbolicAddress::MemCopySharedM32:
   1579      *abiType = Args_Int32_GeneralInt32Int32Int32General;
   1580      MOZ_ASSERT(*abiType == ToABIType(SASigMemCopySharedM32));
   1581      return FuncCast(Instance::memCopyShared_m32, *abiType);
   1582    case SymbolicAddress::MemCopyM64:
   1583      *abiType = Args_Int32_GeneralInt64Int64Int64General;
   1584      MOZ_ASSERT(*abiType == ToABIType(SASigMemCopyM64));
   1585      return FuncCast(Instance::memCopy_m64, *abiType);
   1586    case SymbolicAddress::MemCopySharedM64:
   1587      *abiType = Args_Int32_GeneralInt64Int64Int64General;
   1588      MOZ_ASSERT(*abiType == ToABIType(SASigMemCopySharedM64));
   1589      return FuncCast(Instance::memCopyShared_m64, *abiType);
   1590    case SymbolicAddress::MemCopyAny:
   1591      *abiType = Args_Int32_GeneralInt64Int64Int64Int32Int32;
   1592      MOZ_ASSERT(*abiType == ToABIType(SASigMemCopyAny));
   1593      return FuncCast(Instance::memCopy_any, *abiType);
   1594    case SymbolicAddress::DataDrop:
   1595      *abiType = Args_Int32_GeneralInt32;
   1596      MOZ_ASSERT(*abiType == ToABIType(SASigDataDrop));
   1597      return FuncCast(Instance::dataDrop, *abiType);
   1598    case SymbolicAddress::MemFillM32:
   1599      *abiType = Args_Int32_GeneralInt32Int32Int32General;
   1600      MOZ_ASSERT(*abiType == ToABIType(SASigMemFillM32));
   1601      return FuncCast(Instance::memFill_m32, *abiType);
   1602    case SymbolicAddress::MemFillSharedM32:
   1603      *abiType = Args_Int32_GeneralInt32Int32Int32General;
   1604      MOZ_ASSERT(*abiType == ToABIType(SASigMemFillSharedM32));
   1605      return FuncCast(Instance::memFillShared_m32, *abiType);
   1606    case SymbolicAddress::MemFillM64:
   1607      *abiType = Args_Int32_GeneralInt64Int32Int64General;
   1608      MOZ_ASSERT(*abiType == ToABIType(SASigMemFillM64));
   1609      return FuncCast(Instance::memFill_m64, *abiType);
   1610    case SymbolicAddress::MemFillSharedM64:
   1611      *abiType = Args_Int32_GeneralInt64Int32Int64General;
   1612      MOZ_ASSERT(*abiType == ToABIType(SASigMemFillSharedM64));
   1613      return FuncCast(Instance::memFillShared_m64, *abiType);
   1614    case SymbolicAddress::MemDiscardM32:
   1615      *abiType = Args_Int32_GeneralInt32Int32General;
   1616      MOZ_ASSERT(*abiType == ToABIType(SASigMemDiscardM32));
   1617      return FuncCast(Instance::memDiscard_m32, *abiType);
   1618    case SymbolicAddress::MemDiscardSharedM32:
   1619      *abiType = Args_Int32_GeneralInt32Int32General;
   1620      MOZ_ASSERT(*abiType == ToABIType(SASigMemDiscardSharedM32));
   1621      return FuncCast(Instance::memDiscardShared_m32, *abiType);
   1622    case SymbolicAddress::MemDiscardM64:
   1623      *abiType = Args_Int32_GeneralInt64Int64General;
   1624      MOZ_ASSERT(*abiType == ToABIType(SASigMemDiscardM64));
   1625      return FuncCast(Instance::memDiscard_m64, *abiType);
   1626    case SymbolicAddress::MemDiscardSharedM64:
   1627      *abiType = Args_Int32_GeneralInt64Int64General;
   1628      MOZ_ASSERT(*abiType == ToABIType(SASigMemDiscardSharedM64));
   1629      return FuncCast(Instance::memDiscardShared_m64, *abiType);
   1630    case SymbolicAddress::MemInitM32:
   1631      *abiType = Args_Int32_GeneralInt32Int32Int32Int32Int32;
   1632      MOZ_ASSERT(*abiType == ToABIType(SASigMemInitM32));
   1633      return FuncCast(Instance::memInit_m32, *abiType);
   1634    case SymbolicAddress::MemInitM64:
   1635      *abiType = Args_Int32_GeneralInt64Int32Int32Int32Int32;
   1636      MOZ_ASSERT(*abiType == ToABIType(SASigMemInitM64));
   1637      return FuncCast(Instance::memInit_m64, *abiType);
   1638    case SymbolicAddress::TableCopy:
   1639      *abiType = Args_Int32_GeneralInt32Int32Int32Int32Int32;
   1640      MOZ_ASSERT(*abiType == ToABIType(SASigTableCopy));
   1641      return FuncCast(Instance::tableCopy, *abiType);
   1642    case SymbolicAddress::ElemDrop:
   1643      *abiType = Args_Int32_GeneralInt32;
   1644      MOZ_ASSERT(*abiType == ToABIType(SASigElemDrop));
   1645      return FuncCast(Instance::elemDrop, *abiType);
   1646    case SymbolicAddress::TableFill:
   1647      *abiType = Args_Int32_GeneralInt32GeneralInt32Int32;
   1648      MOZ_ASSERT(*abiType == ToABIType(SASigTableFill));
   1649      return FuncCast(Instance::tableFill, *abiType);
   1650    case SymbolicAddress::TableInit:
   1651      *abiType = Args_Int32_GeneralInt32Int32Int32Int32Int32;
   1652      MOZ_ASSERT(*abiType == ToABIType(SASigTableInit));
   1653      return FuncCast(Instance::tableInit, *abiType);
   1654    case SymbolicAddress::TableGet:
   1655      *abiType = Args_General_GeneralInt32Int32;
   1656      MOZ_ASSERT(*abiType == ToABIType(SASigTableGet));
   1657      return FuncCast(Instance::tableGet, *abiType);
   1658    case SymbolicAddress::TableGrow:
   1659      *abiType = Args_Int32_GeneralGeneralInt32Int32;
   1660      MOZ_ASSERT(*abiType == ToABIType(SASigTableGrow));
   1661      return FuncCast(Instance::tableGrow, *abiType);
   1662    case SymbolicAddress::TableSet:
   1663      *abiType = Args_Int32_GeneralInt32GeneralInt32;
   1664      MOZ_ASSERT(*abiType == ToABIType(SASigTableSet));
   1665      return FuncCast(Instance::tableSet, *abiType);
   1666    case SymbolicAddress::TableSize:
   1667      *abiType = Args_Int32_GeneralInt32;
   1668      MOZ_ASSERT(*abiType == ToABIType(SASigTableSize));
   1669      return FuncCast(Instance::tableSize, *abiType);
   1670    case SymbolicAddress::RefFunc:
   1671      *abiType = Args_General_GeneralInt32;
   1672      MOZ_ASSERT(*abiType == ToABIType(SASigRefFunc));
   1673      return FuncCast(Instance::refFunc, *abiType);
   1674    case SymbolicAddress::PostBarrierEdge:
   1675      *abiType = Args_Int32_GeneralGeneral;
   1676      MOZ_ASSERT(*abiType == ToABIType(SASigPostBarrierEdge));
   1677      return FuncCast(Instance::postBarrierEdge, *abiType);
   1678    case SymbolicAddress::PostBarrierEdgePrecise:
   1679      *abiType = Args_Int32_GeneralGeneralGeneral;
   1680      MOZ_ASSERT(*abiType == ToABIType(SASigPostBarrierEdgePrecise));
   1681      return FuncCast(Instance::postBarrierEdgePrecise, *abiType);
   1682    case SymbolicAddress::PostBarrierWholeCell:
   1683      *abiType = Args_Int32_GeneralGeneral;
   1684      MOZ_ASSERT(*abiType == ToABIType(SASigPostBarrierWholeCell));
   1685      return FuncCast(Instance::postBarrierWholeCell, *abiType);
   1686    case SymbolicAddress::StructNewIL_true:
   1687      *abiType = Args_General_GeneralInt32General;
   1688      MOZ_ASSERT(*abiType == ToABIType(SASigStructNewIL_true));
   1689      return FuncCast(Instance::structNewIL<true>, *abiType);
   1690    case SymbolicAddress::StructNewIL_false:
   1691      *abiType = Args_General_GeneralInt32General;
   1692      MOZ_ASSERT(*abiType == ToABIType(SASigStructNewIL_false));
   1693      return FuncCast(Instance::structNewIL<false>, *abiType);
   1694    case SymbolicAddress::StructNewOOL_true:
   1695      *abiType = Args_General_GeneralInt32General;
   1696      MOZ_ASSERT(*abiType == ToABIType(SASigStructNewOOL_true));
   1697      return FuncCast(Instance::structNewOOL<true>, *abiType);
   1698    case SymbolicAddress::StructNewOOL_false:
   1699      *abiType = Args_General_GeneralInt32General;
   1700      MOZ_ASSERT(*abiType == ToABIType(SASigStructNewOOL_false));
   1701      return FuncCast(Instance::structNewOOL<false>, *abiType);
   1702    case SymbolicAddress::ArrayNew_true:
   1703      *abiType = Args_General_GeneralInt32Int32General;
   1704      MOZ_ASSERT(*abiType == ToABIType(SASigArrayNew_true));
   1705      return FuncCast(Instance::arrayNew<true>, *abiType);
   1706    case SymbolicAddress::ArrayNew_false:
   1707      *abiType = Args_General_GeneralInt32Int32General;
   1708      MOZ_ASSERT(*abiType == ToABIType(SASigArrayNew_false));
   1709      return FuncCast(Instance::arrayNew<false>, *abiType);
   1710    case SymbolicAddress::ArrayNewData:
   1711      *abiType = Args_General_GeneralInt32Int32Int32GeneralInt32;
   1712      MOZ_ASSERT(*abiType == ToABIType(SASigArrayNewData));
   1713      return FuncCast(Instance::arrayNewData, *abiType);
   1714    case SymbolicAddress::ArrayNewElem:
   1715      *abiType = Args_General_GeneralInt32Int32Int32GeneralInt32;
   1716      MOZ_ASSERT(*abiType == ToABIType(SASigArrayNewElem));
   1717      return FuncCast(Instance::arrayNewElem, *abiType);
   1718    case SymbolicAddress::ArrayInitData:
   1719      *abiType = Args_Int32_GeneralGeneralInt32Int32Int32Int32;
   1720      MOZ_ASSERT(*abiType == ToABIType(SASigArrayInitData));
   1721      return FuncCast(Instance::arrayInitData, *abiType);
   1722    case SymbolicAddress::ArrayInitElem:
   1723      *abiType = Args_Int32_GeneralGeneralInt32Int32Int32Int32Int32;
   1724      MOZ_ASSERT(*abiType == ToABIType(SASigArrayInitElem));
   1725      return FuncCast(Instance::arrayInitElem, *abiType);
   1726    case SymbolicAddress::ArrayCopy:
   1727      *abiType = Args_Int32_GeneralGeneralInt32GeneralInt32Int32Int32;
   1728      MOZ_ASSERT(*abiType == ToABIType(SASigArrayCopy));
   1729      return FuncCast(Instance::arrayCopy, *abiType);
   1730    case SymbolicAddress::SlotsToAllocKindBytesTable:
   1731      return (void*)gc::slotsToAllocKindBytes;
   1732    case SymbolicAddress::ExceptionNew:
   1733      *abiType = Args_General2;
   1734      MOZ_ASSERT(*abiType == ToABIType(SASigExceptionNew));
   1735      return FuncCast(Instance::exceptionNew, *abiType);
   1736    case SymbolicAddress::ThrowException:
   1737      *abiType = Args_Int32_GeneralGeneral;
   1738      MOZ_ASSERT(*abiType == ToABIType(SASigThrowException));
   1739      return FuncCast(Instance::throwException, *abiType);
   1740 
   1741 #ifdef ENABLE_WASM_JSPI
   1742    case SymbolicAddress::UpdateSuspenderState:
   1743      *abiType = Args_Int32_GeneralGeneralInt32;
   1744      MOZ_ASSERT(*abiType == ToABIType(SASigUpdateSuspenderState));
   1745      return FuncCast(UpdateSuspenderState, *abiType);
   1746 #endif
   1747 
   1748 #ifdef WASM_CODEGEN_DEBUG
   1749    case SymbolicAddress::PrintI32:
   1750      *abiType = Args_General1;
   1751      return FuncCast(PrintI32, *abiType);
   1752    case SymbolicAddress::PrintPtr:
   1753      *abiType = Args_General1;
   1754      return FuncCast(PrintPtr, *abiType);
   1755    case SymbolicAddress::PrintF32:
   1756      *abiType = Args_Int_Float32;
   1757      return FuncCast(PrintF32, *abiType);
   1758    case SymbolicAddress::PrintF64:
   1759      *abiType = Args_Int_Double;
   1760      return FuncCast(PrintF64, *abiType);
   1761    case SymbolicAddress::PrintText:
   1762      *abiType = Args_General1;
   1763      return FuncCast(PrintText, *abiType);
   1764 #endif
   1765 #define VISIT_BUILTIN_FUNC(op, export, sa_name, abitype, needs_thunk, entry, \
   1766                           ...)                                              \
   1767  case SymbolicAddress::sa_name:                                             \
   1768    *abiType = abitype;                                                      \
   1769    return FuncCast(entry, *abiType);
   1770      FOR_EACH_BUILTIN_MODULE_FUNC(VISIT_BUILTIN_FUNC)
   1771 #undef VISIT_BUILTIN_FUNC
   1772    case SymbolicAddress::Limit:
   1773      break;
   1774  }
   1775 
   1776  MOZ_CRASH("Bad SymbolicAddress");
   1777 }
   1778 
   1779 bool wasm::IsRoundingFunction(SymbolicAddress callee, jit::RoundingMode* mode) {
   1780  switch (callee) {
   1781    case SymbolicAddress::FloorD:
   1782    case SymbolicAddress::FloorF:
   1783      *mode = jit::RoundingMode::Down;
   1784      return true;
   1785    case SymbolicAddress::CeilD:
   1786    case SymbolicAddress::CeilF:
   1787      *mode = jit::RoundingMode::Up;
   1788      return true;
   1789    case SymbolicAddress::TruncD:
   1790    case SymbolicAddress::TruncF:
   1791      *mode = jit::RoundingMode::TowardsZero;
   1792      return true;
   1793    case SymbolicAddress::NearbyIntD:
   1794    case SymbolicAddress::NearbyIntF:
   1795      *mode = jit::RoundingMode::NearestTiesToEven;
   1796      return true;
   1797    default:
   1798      return false;
   1799  }
   1800 }
   1801 
   1802 bool wasm::NeedsBuiltinThunk(SymbolicAddress sym) {
   1803  // Also see "The Wasm Builtin ABIs" in WasmFrame.h.
   1804  switch (sym) {
   1805    // No thunk, because they do their work within the activation
   1806    case SymbolicAddress::HandleThrow:  // GenerateThrowStub
   1807    case SymbolicAddress::HandleTrap:   // GenerateTrapExit
   1808      return false;
   1809 
   1810    // No thunk, because some work has to be done within the activation before
   1811    // the activation exit: when called, arbitrary wasm registers are live and
   1812    // must be saved, and the stack pointer may not be aligned for any ABI.
   1813    case SymbolicAddress::HandleDebugTrap:      // GenerateDebugStub
   1814    case SymbolicAddress::HandleRequestTierUp:  // GenerateRequestTierUpStub
   1815 
   1816    // No thunk, because their caller manages the activation exit explicitly
   1817    case SymbolicAddress::CallImport_General:      // GenerateImportInterpExit
   1818    case SymbolicAddress::CoerceInPlace_ToInt32:   // GenerateImportJitExit
   1819    case SymbolicAddress::CoerceInPlace_ToNumber:  // GenerateImportJitExit
   1820    case SymbolicAddress::CoerceInPlace_ToBigInt:  // GenerateImportJitExit
   1821    case SymbolicAddress::BoxValue_Anyref:         // GenerateImportJitExit
   1822      return false;
   1823 
   1824 #ifdef WASM_CODEGEN_DEBUG
   1825    // No thunk, because they call directly into C++ code that does not interact
   1826    // with the rest of the VM at all.
   1827    case SymbolicAddress::PrintI32:  // Debug stub printers
   1828    case SymbolicAddress::PrintPtr:
   1829    case SymbolicAddress::PrintF32:
   1830    case SymbolicAddress::PrintF64:
   1831    case SymbolicAddress::PrintText:
   1832      return false;
   1833 #endif
   1834 
   1835    // No thunk because they're just data
   1836    case SymbolicAddress::SlotsToAllocKindBytesTable:
   1837      return false;
   1838 
   1839    // Everyone else gets a thunk to handle the exit from the activation
   1840    case SymbolicAddress::ToInt32:
   1841    case SymbolicAddress::DivI64:
   1842    case SymbolicAddress::UDivI64:
   1843    case SymbolicAddress::ModI64:
   1844    case SymbolicAddress::UModI64:
   1845    case SymbolicAddress::TruncateDoubleToUint64:
   1846    case SymbolicAddress::TruncateDoubleToInt64:
   1847    case SymbolicAddress::SaturatingTruncateDoubleToUint64:
   1848    case SymbolicAddress::SaturatingTruncateDoubleToInt64:
   1849    case SymbolicAddress::Uint64ToDouble:
   1850    case SymbolicAddress::Uint64ToFloat32:
   1851    case SymbolicAddress::Int64ToDouble:
   1852    case SymbolicAddress::Int64ToFloat32:
   1853 #if defined(JS_CODEGEN_ARM)
   1854    case SymbolicAddress::aeabi_idivmod:
   1855    case SymbolicAddress::aeabi_uidivmod:
   1856 #endif
   1857    case SymbolicAddress::AllocateBigInt:
   1858    case SymbolicAddress::ModD:
   1859    case SymbolicAddress::SinNativeD:
   1860    case SymbolicAddress::SinFdlibmD:
   1861    case SymbolicAddress::CosNativeD:
   1862    case SymbolicAddress::CosFdlibmD:
   1863    case SymbolicAddress::TanNativeD:
   1864    case SymbolicAddress::TanFdlibmD:
   1865    case SymbolicAddress::ASinD:
   1866    case SymbolicAddress::ACosD:
   1867    case SymbolicAddress::ATanD:
   1868    case SymbolicAddress::CeilD:
   1869    case SymbolicAddress::CeilF:
   1870    case SymbolicAddress::FloorD:
   1871    case SymbolicAddress::FloorF:
   1872    case SymbolicAddress::TruncD:
   1873    case SymbolicAddress::TruncF:
   1874    case SymbolicAddress::NearbyIntD:
   1875    case SymbolicAddress::NearbyIntF:
   1876    case SymbolicAddress::ExpD:
   1877    case SymbolicAddress::LogD:
   1878    case SymbolicAddress::PowD:
   1879    case SymbolicAddress::ATan2D:
   1880    case SymbolicAddress::ArrayMemMove:
   1881    case SymbolicAddress::ArrayRefsMove:
   1882    case SymbolicAddress::MemoryGrowM32:
   1883    case SymbolicAddress::MemoryGrowM64:
   1884    case SymbolicAddress::MemorySizeM32:
   1885    case SymbolicAddress::MemorySizeM64:
   1886    case SymbolicAddress::WaitI32M32:
   1887    case SymbolicAddress::WaitI32M64:
   1888    case SymbolicAddress::WaitI64M32:
   1889    case SymbolicAddress::WaitI64M64:
   1890    case SymbolicAddress::WakeM32:
   1891    case SymbolicAddress::WakeM64:
   1892    case SymbolicAddress::CoerceInPlace_JitEntry:
   1893    case SymbolicAddress::ReportV128JSCall:
   1894    case SymbolicAddress::MemCopyM32:
   1895    case SymbolicAddress::MemCopySharedM32:
   1896    case SymbolicAddress::MemCopyM64:
   1897    case SymbolicAddress::MemCopySharedM64:
   1898    case SymbolicAddress::MemCopyAny:
   1899    case SymbolicAddress::DataDrop:
   1900    case SymbolicAddress::MemFillM32:
   1901    case SymbolicAddress::MemFillSharedM32:
   1902    case SymbolicAddress::MemFillM64:
   1903    case SymbolicAddress::MemFillSharedM64:
   1904    case SymbolicAddress::MemDiscardM32:
   1905    case SymbolicAddress::MemDiscardSharedM32:
   1906    case SymbolicAddress::MemDiscardM64:
   1907    case SymbolicAddress::MemDiscardSharedM64:
   1908    case SymbolicAddress::MemInitM32:
   1909    case SymbolicAddress::MemInitM64:
   1910    case SymbolicAddress::TableCopy:
   1911    case SymbolicAddress::ElemDrop:
   1912    case SymbolicAddress::TableFill:
   1913    case SymbolicAddress::TableGet:
   1914    case SymbolicAddress::TableGrow:
   1915    case SymbolicAddress::TableInit:
   1916    case SymbolicAddress::TableSet:
   1917    case SymbolicAddress::TableSize:
   1918    case SymbolicAddress::RefFunc:
   1919    case SymbolicAddress::PostBarrierEdge:
   1920    case SymbolicAddress::PostBarrierEdgePrecise:
   1921    case SymbolicAddress::PostBarrierWholeCell:
   1922    case SymbolicAddress::ExceptionNew:
   1923    case SymbolicAddress::ThrowException:
   1924    case SymbolicAddress::StructNewIL_true:
   1925    case SymbolicAddress::StructNewIL_false:
   1926    case SymbolicAddress::StructNewOOL_true:
   1927    case SymbolicAddress::StructNewOOL_false:
   1928    case SymbolicAddress::ArrayNew_true:
   1929    case SymbolicAddress::ArrayNew_false:
   1930    case SymbolicAddress::ArrayNewData:
   1931    case SymbolicAddress::ArrayNewElem:
   1932    case SymbolicAddress::ArrayInitData:
   1933    case SymbolicAddress::ArrayInitElem:
   1934    case SymbolicAddress::ArrayCopy:
   1935 #ifdef ENABLE_WASM_JSPI
   1936    case SymbolicAddress::UpdateSuspenderState:
   1937 #endif
   1938      return true;
   1939 
   1940 #define VISIT_BUILTIN_FUNC(op, export, sa_name, sa_type, needs_thunk, ...) \
   1941  case SymbolicAddress::sa_name:                                           \
   1942    return needs_thunk;
   1943      FOR_EACH_BUILTIN_MODULE_FUNC(VISIT_BUILTIN_FUNC)
   1944 #undef VISIT_BUILTIN_FUNC
   1945    case SymbolicAddress::Limit:
   1946      break;
   1947  }
   1948 
   1949  MOZ_CRASH("unexpected symbolic address");
   1950 }
   1951 
   1952 static bool NeedsDynamicSwitchToMainStack(SymbolicAddress sym) {
   1953  MOZ_ASSERT(NeedsBuiltinThunk(sym));
   1954  switch (sym) {
   1955 #if ENABLE_WASM_JSPI
   1956    // These builtins must run on the suspendable so that they can access the
   1957    // wasm::Context::activeSuspender().
   1958    case SymbolicAddress::UpdateSuspenderState:
   1959    case SymbolicAddress::CurrentSuspender:
   1960      return false;
   1961 #endif
   1962 
   1963    // Nothing else should be running on a suspendable stack right now.
   1964    default:
   1965      return true;
   1966  }
   1967 }
   1968 
   1969 // ============================================================================
   1970 // [SMDOC] JS Fast Wasm Imports
   1971 //
   1972 // JS builtins that can be imported by wasm modules and called efficiently
   1973 // through thunks. These thunks conform to the internal wasm ABI and thus can be
   1974 // patched in for import calls. Calling a JS builtin through a thunk is much
   1975 // faster than calling out through the generic import call trampoline which will
   1976 // end up in the slowest C++ Instance::callImport path.
   1977 //
   1978 // Each JS builtin can have several overloads. These must all be enumerated in
   1979 // PopulateTypedNatives() so they can be included in the process-wide thunk set.
   1980 // Additionally to the traditional overloading based on types, every builtin
   1981 // can also have a version implemented by fdlibm or the native math library.
   1982 // This is useful for fingerprinting resistance.
   1983 
   1984 #define FOR_EACH_SIN_COS_TAN_NATIVE(_) \
   1985  _(math_sin, MathSin)                 \
   1986  _(math_tan, MathTan)                 \
   1987  _(math_cos, MathCos)
   1988 
   1989 #define FOR_EACH_UNARY_NATIVE(_) \
   1990  _(math_exp, MathExp)           \
   1991  _(math_log, MathLog)           \
   1992  _(math_asin, MathASin)         \
   1993  _(math_atan, MathATan)         \
   1994  _(math_acos, MathACos)         \
   1995  _(math_log10, MathLog10)       \
   1996  _(math_log2, MathLog2)         \
   1997  _(math_log1p, MathLog1P)       \
   1998  _(math_expm1, MathExpM1)       \
   1999  _(math_sinh, MathSinH)         \
   2000  _(math_tanh, MathTanH)         \
   2001  _(math_cosh, MathCosH)         \
   2002  _(math_asinh, MathASinH)       \
   2003  _(math_atanh, MathATanH)       \
   2004  _(math_acosh, MathACosH)       \
   2005  _(math_sign, MathSign)         \
   2006  _(math_trunc, MathTrunc)       \
   2007  _(math_cbrt, MathCbrt)
   2008 
   2009 #define FOR_EACH_BINARY_NATIVE(_) \
   2010  _(ecmaAtan2, MathATan2)         \
   2011  _(ecmaHypot, MathHypot)         \
   2012  _(ecmaPow, MathPow)
   2013 
   2014 #define DEFINE_SIN_COS_TAN_FLOAT_WRAPPER(func, _) \
   2015  static float func##_native_impl_f32(float x) {  \
   2016    return float(func##_native_impl(double(x)));  \
   2017  }                                               \
   2018  static float func##_fdlibm_impl_f32(float x) {  \
   2019    return float(func##_fdlibm_impl(double(x)));  \
   2020  }
   2021 
   2022 #define DEFINE_UNARY_FLOAT_WRAPPER(func, _) \
   2023  static float func##_impl_f32(float x) {   \
   2024    return float(func##_impl(double(x)));   \
   2025  }
   2026 
   2027 #define DEFINE_BINARY_FLOAT_WRAPPER(func, _)  \
   2028  static float func##_f32(float x, float y) { \
   2029    return float(func(double(x), double(y))); \
   2030  }
   2031 
   2032 FOR_EACH_SIN_COS_TAN_NATIVE(DEFINE_SIN_COS_TAN_FLOAT_WRAPPER)
   2033 FOR_EACH_UNARY_NATIVE(DEFINE_UNARY_FLOAT_WRAPPER)
   2034 FOR_EACH_BINARY_NATIVE(DEFINE_BINARY_FLOAT_WRAPPER)
   2035 
   2036 #undef DEFINE_UNARY_FLOAT_WRAPPER
   2037 #undef DEFINE_BINARY_FLOAT_WRAPPER
   2038 
   2039 struct TypedNative {
   2040  InlinableNative native;
   2041  ABIFunctionType abiType;
   2042  enum class FdlibmImpl : uint8_t { No, Yes } fdlibm;
   2043 
   2044  TypedNative(InlinableNative native, ABIFunctionType abiType,
   2045              FdlibmImpl fdlibm)
   2046      : native(native), abiType(abiType), fdlibm(fdlibm) {}
   2047 
   2048  using Lookup = TypedNative;
   2049  static HashNumber hash(const Lookup& l) {
   2050    return HashGeneric(uint32_t(l.native), uint32_t(l.abiType),
   2051                       uint32_t(l.fdlibm));
   2052  }
   2053  static bool match(const TypedNative& lhs, const Lookup& rhs) {
   2054    return lhs.native == rhs.native && lhs.abiType == rhs.abiType &&
   2055           lhs.fdlibm == rhs.fdlibm;
   2056  }
   2057 };
   2058 
   2059 using TypedNativeToFuncPtrMap =
   2060    HashMap<TypedNative, void*, TypedNative, SystemAllocPolicy>;
   2061 
   2062 static bool PopulateTypedNatives(TypedNativeToFuncPtrMap* typedNatives) {
   2063 #define ADD_OVERLOAD(funcName, native, abiType, fdlibm)                   \
   2064  if (!typedNatives->putNew(TypedNative(InlinableNative::native, abiType, \
   2065                                        TypedNative::FdlibmImpl::fdlibm), \
   2066                            FuncCast(funcName, abiType)))                 \
   2067    return false;
   2068 
   2069 #define ADD_SIN_COS_TAN_OVERLOADS(funcName, native)                          \
   2070  ADD_OVERLOAD(funcName##_native_impl, native, Args_Double_Double, No)       \
   2071  ADD_OVERLOAD(funcName##_fdlibm_impl, native, Args_Double_Double, Yes)      \
   2072  ADD_OVERLOAD(funcName##_native_impl_f32, native, Args_Float32_Float32, No) \
   2073  ADD_OVERLOAD(funcName##_fdlibm_impl_f32, native, Args_Float32_Float32, Yes)
   2074 
   2075 #define ADD_UNARY_OVERLOADS(funcName, native)                   \
   2076  ADD_OVERLOAD(funcName##_impl, native, Args_Double_Double, No) \
   2077  ADD_OVERLOAD(funcName##_impl_f32, native, Args_Float32_Float32, No)
   2078 
   2079 #define ADD_BINARY_OVERLOADS(funcName, native)                 \
   2080  ADD_OVERLOAD(funcName, native, Args_Double_DoubleDouble, No) \
   2081  ADD_OVERLOAD(funcName##_f32, native, Args_Float32_Float32Float32, No)
   2082 
   2083  FOR_EACH_SIN_COS_TAN_NATIVE(ADD_SIN_COS_TAN_OVERLOADS)
   2084  FOR_EACH_UNARY_NATIVE(ADD_UNARY_OVERLOADS)
   2085  FOR_EACH_BINARY_NATIVE(ADD_BINARY_OVERLOADS)
   2086 
   2087 #undef ADD_UNARY_OVERLOADS
   2088 #undef ADD_BINARY_OVERLOADS
   2089 
   2090  return true;
   2091 }
   2092 
   2093 #undef FOR_EACH_UNARY_NATIVE
   2094 #undef FOR_EACH_BINARY_NATIVE
   2095 
   2096 // ============================================================================
   2097 // [SMDOC] Process-wide builtin thunk set
   2098 //
   2099 // Thunks are inserted between wasm calls and the C++ callee and achieve two
   2100 // things:
   2101 //  - bridging the few differences between the internal wasm ABI and the
   2102 //    external native ABI (viz. float returns on x86 and soft-fp ARM)
   2103 //  - executing an exit prologue/epilogue which in turn allows any profiling
   2104 //    iterator to see the full stack up to the wasm operation that called out
   2105 //
   2106 // Thunks are created for two kinds of C++ callees, enumerated above:
   2107 //  - SymbolicAddress: for statically compiled calls in the wasm module
   2108 //  - Imported JS builtins: optimized calls to imports
   2109 //
   2110 // All thunks are created up front, lazily, when the first wasm module is
   2111 // compiled in the process. Thunks are kept alive until the JS engine shuts down
   2112 // in the process. No thunks are created at runtime after initialization. This
   2113 // simple scheme allows several simplifications:
   2114 //  - no reference counting to keep thunks alive
   2115 //  - no problems toggling W^X permissions which, because of multiple executing
   2116 //    threads, would require each thunk allocation to be on its own page
   2117 // The cost for creating all thunks at once is relatively low since all thunks
   2118 // fit within the smallest executable-code allocation quantum (64k).
   2119 
   2120 using TypedNativeToCodeRangeMap =
   2121    HashMap<TypedNative, uint32_t, TypedNative, SystemAllocPolicy>;
   2122 
   2123 using SymbolicAddressToCodeRangeArray =
   2124    EnumeratedArray<SymbolicAddress, uint32_t, size_t(SymbolicAddress::Limit)>;
   2125 
   2126 struct BuiltinThunks {
   2127  uint8_t* codeBase;
   2128  size_t codeSize;
   2129  CodeRangeVector codeRanges;
   2130  TypedNativeToCodeRangeMap typedNativeToCodeRange;
   2131  SymbolicAddressToCodeRangeArray symbolicAddressToCodeRange;
   2132  uint32_t provisionalLazyJitEntryOffset;
   2133 
   2134  BuiltinThunks() : codeBase(nullptr), codeSize(0) {}
   2135 
   2136  ~BuiltinThunks() {
   2137    if (codeBase) {
   2138      DeallocateExecutableMemory(codeBase, codeSize);
   2139    }
   2140  }
   2141 };
   2142 
   2143 MOZ_RUNINIT Mutex initBuiltinThunks(mutexid::WasmInitBuiltinThunks);
   2144 mozilla::Atomic<const BuiltinThunks*> builtinThunks;
   2145 
   2146 bool wasm::EnsureBuiltinThunksInitialized() {
   2147  AutoMarkJitCodeWritableForThread writable;
   2148  return EnsureBuiltinThunksInitialized(writable);
   2149 }
   2150 
   2151 bool wasm::EnsureBuiltinThunksInitialized(
   2152    AutoMarkJitCodeWritableForThread& writable) {
   2153  LockGuard<Mutex> guard(initBuiltinThunks);
   2154  if (builtinThunks) {
   2155    return true;
   2156  }
   2157 
   2158  auto thunks = MakeUnique<BuiltinThunks>();
   2159  if (!thunks) {
   2160    return false;
   2161  }
   2162 
   2163  LifoAlloc lifo(BUILTIN_THUNK_LIFO_SIZE, js::MallocArena);
   2164  TempAllocator tempAlloc(&lifo);
   2165  WasmMacroAssembler masm(tempAlloc);
   2166  AutoCreatedBy acb(masm, "wasm::EnsureBuiltinThunksInitialized");
   2167 
   2168  for (auto sym : MakeEnumeratedRange(SymbolicAddress::Limit)) {
   2169    if (!NeedsBuiltinThunk(sym)) {
   2170      thunks->symbolicAddressToCodeRange[sym] = UINT32_MAX;
   2171      continue;
   2172    }
   2173 
   2174    uint32_t codeRangeIndex = thunks->codeRanges.length();
   2175    thunks->symbolicAddressToCodeRange[sym] = codeRangeIndex;
   2176 
   2177    ABIFunctionType abiType;
   2178    void* funcPtr = AddressOf(sym, &abiType);
   2179 
   2180    ExitReason exitReason(sym);
   2181 
   2182    // All thunked builtins must use the wasm ABI.
   2183    MOZ_ASSERT(ABIForBuiltin(sym) == ABIKind::Wasm);
   2184 
   2185    CallableOffsets offsets;
   2186    if (!GenerateBuiltinThunk(masm, abiType, NeedsDynamicSwitchToMainStack(sym),
   2187                              exitReason, funcPtr, &offsets)) {
   2188      return false;
   2189    }
   2190    if (!thunks->codeRanges.emplaceBack(CodeRange::BuiltinThunk, offsets)) {
   2191      return false;
   2192    }
   2193  }
   2194 
   2195  TypedNativeToFuncPtrMap typedNatives;
   2196  if (!PopulateTypedNatives(&typedNatives)) {
   2197    return false;
   2198  }
   2199 
   2200  for (TypedNativeToFuncPtrMap::Range r = typedNatives.all(); !r.empty();
   2201       r.popFront()) {
   2202    TypedNative typedNative = r.front().key();
   2203 
   2204    uint32_t codeRangeIndex = thunks->codeRanges.length();
   2205    if (!thunks->typedNativeToCodeRange.putNew(typedNative, codeRangeIndex)) {
   2206      return false;
   2207    }
   2208 
   2209    ABIFunctionType abiType = typedNative.abiType;
   2210    void* funcPtr = r.front().value();
   2211 
   2212    ExitReason exitReason = ExitReason::Fixed::BuiltinNative;
   2213 
   2214    CallableOffsets offsets;
   2215    if (!GenerateBuiltinThunk(masm, abiType, /*dynamicSwitchToMainStack*/ true,
   2216                              exitReason, funcPtr, &offsets)) {
   2217      return false;
   2218    }
   2219    if (!thunks->codeRanges.emplaceBack(CodeRange::BuiltinThunk, offsets)) {
   2220      return false;
   2221    }
   2222  }
   2223 
   2224  // Provisional lazy JitEntry stub: This is a shared stub that can be installed
   2225  // in the jit-entry jump table.  It uses the JIT ABI and when invoked will
   2226  // retrieve (via TlsContext()) and invoke the context-appropriate
   2227  // invoke-from-interpreter jit stub, thus serving as the initial, unoptimized
   2228  // jit-entry stub for any exported wasm function that has a jit-entry.
   2229 
   2230 #ifdef DEBUG
   2231  // We need to allow this machine code to bake in a C++ code pointer, so we
   2232  // disable the wasm restrictions while generating this stub.
   2233  JitContext jitContext;
   2234  bool oldFlag = jitContext.setIsCompilingWasm(false);
   2235 #endif
   2236 
   2237  Offsets provisionalLazyJitEntryOffsets;
   2238  if (!GenerateProvisionalLazyJitEntryStub(masm,
   2239                                           &provisionalLazyJitEntryOffsets)) {
   2240    return false;
   2241  }
   2242  thunks->provisionalLazyJitEntryOffset = provisionalLazyJitEntryOffsets.begin;
   2243 
   2244 #ifdef DEBUG
   2245  jitContext.setIsCompilingWasm(oldFlag);
   2246 #endif
   2247 
   2248  masm.finish();
   2249  if (masm.oom()) {
   2250    return false;
   2251  }
   2252 
   2253  size_t allocSize = AlignBytes(masm.bytesNeeded(), ExecutableCodePageSize);
   2254 
   2255  thunks->codeSize = allocSize;
   2256  thunks->codeBase = (uint8_t*)AllocateExecutableMemory(
   2257      allocSize, ProtectionSetting::Writable, MemCheckKind::MakeUndefined);
   2258  if (!thunks->codeBase) {
   2259    return false;
   2260  }
   2261 
   2262  masm.executableCopy(thunks->codeBase);
   2263  memset(thunks->codeBase + masm.bytesNeeded(), 0,
   2264         allocSize - masm.bytesNeeded());
   2265 
   2266  masm.processCodeLabels(thunks->codeBase);
   2267  PatchDebugSymbolicAccesses(thunks->codeBase, masm);
   2268 
   2269  MOZ_ASSERT(masm.callSites().empty());
   2270  MOZ_ASSERT(masm.callSiteTargets().empty());
   2271  MOZ_ASSERT(masm.trapSites().empty());
   2272  MOZ_ASSERT(masm.tryNotes().empty());
   2273  MOZ_ASSERT(masm.codeRangeUnwindInfos().empty());
   2274 
   2275  if (!ExecutableAllocator::makeExecutableAndFlushICache(thunks->codeBase,
   2276                                                         thunks->codeSize)) {
   2277    return false;
   2278  }
   2279 
   2280  builtinThunks = thunks.release();
   2281  return true;
   2282 }
   2283 
   2284 void wasm::ReleaseBuiltinThunks() {
   2285  if (builtinThunks) {
   2286    const BuiltinThunks* ptr = builtinThunks;
   2287    js_delete(const_cast<BuiltinThunks*>(ptr));
   2288    builtinThunks = nullptr;
   2289  }
   2290 }
   2291 
   2292 void* wasm::SymbolicAddressTarget(SymbolicAddress sym) {
   2293  MOZ_ASSERT(builtinThunks);
   2294 
   2295  ABIFunctionType abiType;
   2296  void* funcPtr = AddressOf(sym, &abiType);
   2297 
   2298  if (!NeedsBuiltinThunk(sym)) {
   2299    return funcPtr;
   2300  }
   2301 
   2302  const BuiltinThunks& thunks = *builtinThunks;
   2303  uint32_t codeRangeIndex = thunks.symbolicAddressToCodeRange[sym];
   2304  return thunks.codeBase + thunks.codeRanges[codeRangeIndex].begin();
   2305 }
   2306 
   2307 void* wasm::ProvisionalLazyJitEntryStub() {
   2308  MOZ_ASSERT(builtinThunks);
   2309 
   2310  const BuiltinThunks& thunks = *builtinThunks;
   2311  return thunks.codeBase + thunks.provisionalLazyJitEntryOffset;
   2312 }
   2313 
   2314 static Maybe<ABIFunctionType> ToBuiltinABIFunctionType(
   2315    const FuncType& funcType) {
   2316  const ValTypeVector& args = funcType.args();
   2317  const ValTypeVector& results = funcType.results();
   2318 
   2319  if (results.length() != 1) {
   2320    return Nothing();
   2321  }
   2322 
   2323  if ((args.length() + 1) > (sizeof(uint32_t) * 8 / ABITypeArgShift)) {
   2324    return Nothing();
   2325  }
   2326 
   2327  uint32_t abiType = 0;
   2328  for (size_t i = 0; i < args.length(); i++) {
   2329    switch (args[i].kind()) {
   2330      case ValType::F32:
   2331        abiType <<= ABITypeArgShift;
   2332        abiType |= uint32_t(ABIType::Float32);
   2333        break;
   2334      case ValType::F64:
   2335        abiType <<= ABITypeArgShift;
   2336        abiType |= uint32_t(ABIType::Float64);
   2337        break;
   2338      default:
   2339        return Nothing();
   2340    }
   2341  }
   2342 
   2343  abiType <<= ABITypeArgShift;
   2344  switch (results[0].kind()) {
   2345    case ValType::F32:
   2346      abiType |= uint32_t(ABIType::Float32);
   2347      break;
   2348    case ValType::F64:
   2349      abiType |= uint32_t(ABIType::Float64);
   2350      break;
   2351    default:
   2352      return Nothing();
   2353  }
   2354 
   2355  return Some(ABIFunctionType(abiType));
   2356 }
   2357 
   2358 void* wasm::MaybeGetTypedNative(JSFunction* f, const FuncType& funcType) {
   2359  MOZ_ASSERT(builtinThunks);
   2360 
   2361  if (!f->isNativeFun() || !f->hasJitInfo() ||
   2362      f->jitInfo()->type() != JSJitInfo::InlinableNative) {
   2363    return nullptr;
   2364  }
   2365 
   2366  Maybe<ABIFunctionType> abiType = ToBuiltinABIFunctionType(funcType);
   2367  if (!abiType) {
   2368    return nullptr;
   2369  }
   2370 
   2371  const BuiltinThunks& thunks = *builtinThunks;
   2372 
   2373  // If this function must use the fdlibm implementation first try to lookup
   2374  // the fdlibm version. If that version doesn't exist we still fallback to
   2375  // the normal native.
   2376  if (math_use_fdlibm_for_sin_cos_tan() ||
   2377      f->realm()->creationOptions().alwaysUseFdlibm()) {
   2378    TypedNative typedNative(f->jitInfo()->inlinableNative, *abiType,
   2379                            TypedNative::FdlibmImpl::Yes);
   2380    auto p =
   2381        thunks.typedNativeToCodeRange.readonlyThreadsafeLookup(typedNative);
   2382    if (p) {
   2383      return thunks.codeBase + thunks.codeRanges[p->value()].begin();
   2384    }
   2385  }
   2386 
   2387  TypedNative typedNative(f->jitInfo()->inlinableNative, *abiType,
   2388                          TypedNative::FdlibmImpl::No);
   2389  auto p = thunks.typedNativeToCodeRange.readonlyThreadsafeLookup(typedNative);
   2390  if (!p) {
   2391    return nullptr;
   2392  }
   2393 
   2394  return thunks.codeBase + thunks.codeRanges[p->value()].begin();
   2395 }
   2396 
   2397 bool wasm::LookupBuiltinThunk(void* pc, const CodeRange** codeRange,
   2398                              const uint8_t** codeBase) {
   2399  if (!builtinThunks) {
   2400    return false;
   2401  }
   2402 
   2403  const BuiltinThunks& thunks = *builtinThunks;
   2404  if (pc < thunks.codeBase || pc >= thunks.codeBase + thunks.codeSize) {
   2405    return false;
   2406  }
   2407 
   2408  *codeBase = thunks.codeBase;
   2409 
   2410  CodeRange::OffsetInCode target((uint8_t*)pc - thunks.codeBase);
   2411  *codeRange = LookupInSorted(thunks.codeRanges, target);
   2412 
   2413  return !!*codeRange;
   2414 }