tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

WasmBCStkMgmt-inl.h (32458B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 *
      4 * Copyright 2016 Mozilla Foundation
      5 *
      6 * Licensed under the Apache License, Version 2.0 (the "License");
      7 * you may not use this file except in compliance with the License.
      8 * You may obtain a copy of the License at
      9 *
     10 *     http://www.apache.org/licenses/LICENSE-2.0
     11 *
     12 * Unless required by applicable law or agreed to in writing, software
     13 * distributed under the License is distributed on an "AS IS" BASIS,
     14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     15 * See the License for the specific language governing permissions and
     16 * limitations under the License.
     17 */
     18 
     19 // This is an INTERNAL header for Wasm baseline compiler: inline methods in the
     20 // compiler for Stk values and value stack management.
     21 
     22 #ifndef wasm_wasm_baseline_stk_mgmt_inl_h
     23 #define wasm_wasm_baseline_stk_mgmt_inl_h
     24 
     25 namespace js {
     26 namespace wasm {
     27 
     28 #ifdef DEBUG
     29 size_t BaseCompiler::countMemRefsOnStk() {
     30  size_t nRefs = 0;
     31  for (Stk& v : stk_) {
     32    if (v.kind() == Stk::MemRef) {
     33      nRefs++;
     34    }
     35  }
     36  return nRefs;
     37 }
     38 
     39 bool BaseCompiler::hasLiveRegsOnStk() {
     40  for (Stk& v : stk_) {
     41    if (v.isReg()) {
     42      return true;
     43    }
     44  }
     45  return false;
     46 }
     47 #endif
     48 
     49 template <typename T>
     50 void BaseCompiler::push(T item) {
     51  // None of the single-arg Stk constructors create a Stk::MemRef, so
     52  // there's no need to increment stackMapGenerator_.memRefsOnStk here.
     53  stk_.infallibleEmplaceBack(Stk(item));
     54 }
     55 
     56 void BaseCompiler::pushConstRef(intptr_t v) {
     57  stk_.infallibleEmplaceBack(Stk::StkRef(v));
     58 }
     59 
     60 void BaseCompiler::loadConstI32(const Stk& src, RegI32 dest) {
     61  moveImm32(src.i32val(), dest);
     62 }
     63 
     64 void BaseCompiler::loadMemI32(const Stk& src, RegI32 dest) {
     65  fr.loadStackI32(src.offs(), dest);
     66 }
     67 
     68 void BaseCompiler::loadLocalI32(const Stk& src, RegI32 dest) {
     69  fr.loadLocalI32(localFromSlot(src.slot(), MIRType::Int32), dest);
     70 }
     71 
     72 void BaseCompiler::loadRegisterI32(const Stk& src, RegI32 dest) {
     73  moveI32(src.i32reg(), dest);
     74 }
     75 
     76 void BaseCompiler::loadConstI64(const Stk& src, RegI64 dest) {
     77  moveImm64(src.i64val(), dest);
     78 }
     79 
     80 void BaseCompiler::loadMemI64(const Stk& src, RegI64 dest) {
     81  fr.loadStackI64(src.offs(), dest);
     82 }
     83 
     84 void BaseCompiler::loadLocalI64(const Stk& src, RegI64 dest) {
     85  fr.loadLocalI64(localFromSlot(src.slot(), MIRType::Int64), dest);
     86 }
     87 
     88 void BaseCompiler::loadRegisterI64(const Stk& src, RegI64 dest) {
     89  moveI64(src.i64reg(), dest);
     90 }
     91 
     92 void BaseCompiler::loadConstRef(const Stk& src, RegRef dest) {
     93  moveImmRef(src.refval(), dest);
     94 }
     95 
     96 void BaseCompiler::loadMemRef(const Stk& src, RegRef dest) {
     97  fr.loadStackRef(src.offs(), dest);
     98 }
     99 
    100 void BaseCompiler::loadLocalRef(const Stk& src, RegRef dest) {
    101  fr.loadLocalRef(localFromSlot(src.slot(), MIRType::WasmAnyRef), dest);
    102 }
    103 
    104 void BaseCompiler::loadRegisterRef(const Stk& src, RegRef dest) {
    105  moveRef(src.refReg(), dest);
    106 }
    107 
    108 void BaseCompiler::loadConstF64(const Stk& src, RegF64 dest) {
    109  double d;
    110  src.f64val(&d);
    111  masm.loadConstantDouble(d, dest);
    112 }
    113 
    114 void BaseCompiler::loadMemF64(const Stk& src, RegF64 dest) {
    115  fr.loadStackF64(src.offs(), dest);
    116 }
    117 
    118 void BaseCompiler::loadLocalF64(const Stk& src, RegF64 dest) {
    119  fr.loadLocalF64(localFromSlot(src.slot(), MIRType::Double), dest);
    120 }
    121 
    122 void BaseCompiler::loadRegisterF64(const Stk& src, RegF64 dest) {
    123  moveF64(src.f64reg(), dest);
    124 }
    125 
    126 void BaseCompiler::loadConstF32(const Stk& src, RegF32 dest) {
    127  float f;
    128  src.f32val(&f);
    129  masm.loadConstantFloat32(f, dest);
    130 }
    131 
    132 void BaseCompiler::loadMemF32(const Stk& src, RegF32 dest) {
    133  fr.loadStackF32(src.offs(), dest);
    134 }
    135 
    136 void BaseCompiler::loadLocalF32(const Stk& src, RegF32 dest) {
    137  fr.loadLocalF32(localFromSlot(src.slot(), MIRType::Float32), dest);
    138 }
    139 
    140 void BaseCompiler::loadRegisterF32(const Stk& src, RegF32 dest) {
    141  moveF32(src.f32reg(), dest);
    142 }
    143 
    144 #ifdef ENABLE_WASM_SIMD
    145 void BaseCompiler::loadConstV128(const Stk& src, RegV128 dest) {
    146  V128 f;
    147  src.v128val(&f);
    148  masm.loadConstantSimd128(SimdConstant::CreateX16((int8_t*)f.bytes), dest);
    149 }
    150 
    151 void BaseCompiler::loadMemV128(const Stk& src, RegV128 dest) {
    152  fr.loadStackV128(src.offs(), dest);
    153 }
    154 
    155 void BaseCompiler::loadLocalV128(const Stk& src, RegV128 dest) {
    156  fr.loadLocalV128(localFromSlot(src.slot(), MIRType::Simd128), dest);
    157 }
    158 
    159 void BaseCompiler::loadRegisterV128(const Stk& src, RegV128 dest) {
    160  moveV128(src.v128reg(), dest);
    161 }
    162 #endif
    163 
    164 void BaseCompiler::loadI32(const Stk& src, RegI32 dest) {
    165  switch (src.kind()) {
    166    case Stk::ConstI32:
    167      loadConstI32(src, dest);
    168      break;
    169    case Stk::MemI32:
    170      loadMemI32(src, dest);
    171      break;
    172    case Stk::LocalI32:
    173      loadLocalI32(src, dest);
    174      break;
    175    case Stk::RegisterI32:
    176      loadRegisterI32(src, dest);
    177      break;
    178    default:
    179      MOZ_CRASH("Compiler bug: Expected I32 on stack");
    180  }
    181 }
    182 
    183 void BaseCompiler::loadI64(const Stk& src, RegI64 dest) {
    184  switch (src.kind()) {
    185    case Stk::ConstI64:
    186      loadConstI64(src, dest);
    187      break;
    188    case Stk::MemI64:
    189      loadMemI64(src, dest);
    190      break;
    191    case Stk::LocalI64:
    192      loadLocalI64(src, dest);
    193      break;
    194    case Stk::RegisterI64:
    195      loadRegisterI64(src, dest);
    196      break;
    197    default:
    198      MOZ_CRASH("Compiler bug: Expected I64 on stack");
    199  }
    200 }
    201 
    202 #if !defined(JS_PUNBOX64)
    203 void BaseCompiler::loadI64Low(const Stk& src, RegI32 dest) {
    204  switch (src.kind()) {
    205    case Stk::ConstI64:
    206      moveImm32(int32_t(src.i64val()), dest);
    207      break;
    208    case Stk::MemI64:
    209      fr.loadStackI64Low(src.offs(), dest);
    210      break;
    211    case Stk::LocalI64:
    212      fr.loadLocalI64Low(localFromSlot(src.slot(), MIRType::Int64), dest);
    213      break;
    214    case Stk::RegisterI64:
    215      moveI32(RegI32(src.i64reg().low), dest);
    216      break;
    217    default:
    218      MOZ_CRASH("Compiler bug: Expected I64 on stack");
    219  }
    220 }
    221 
    222 void BaseCompiler::loadI64High(const Stk& src, RegI32 dest) {
    223  switch (src.kind()) {
    224    case Stk::ConstI64:
    225      moveImm32(int32_t(src.i64val() >> 32), dest);
    226      break;
    227    case Stk::MemI64:
    228      fr.loadStackI64High(src.offs(), dest);
    229      break;
    230    case Stk::LocalI64:
    231      fr.loadLocalI64High(localFromSlot(src.slot(), MIRType::Int64), dest);
    232      break;
    233    case Stk::RegisterI64:
    234      moveI32(RegI32(src.i64reg().high), dest);
    235      break;
    236    default:
    237      MOZ_CRASH("Compiler bug: Expected I64 on stack");
    238  }
    239 }
    240 #endif
    241 
    242 void BaseCompiler::loadF64(const Stk& src, RegF64 dest) {
    243  switch (src.kind()) {
    244    case Stk::ConstF64:
    245      loadConstF64(src, dest);
    246      break;
    247    case Stk::MemF64:
    248      loadMemF64(src, dest);
    249      break;
    250    case Stk::LocalF64:
    251      loadLocalF64(src, dest);
    252      break;
    253    case Stk::RegisterF64:
    254      loadRegisterF64(src, dest);
    255      break;
    256    default:
    257      MOZ_CRASH("Compiler bug: expected F64 on stack");
    258  }
    259 }
    260 
    261 void BaseCompiler::loadF32(const Stk& src, RegF32 dest) {
    262  switch (src.kind()) {
    263    case Stk::ConstF32:
    264      loadConstF32(src, dest);
    265      break;
    266    case Stk::MemF32:
    267      loadMemF32(src, dest);
    268      break;
    269    case Stk::LocalF32:
    270      loadLocalF32(src, dest);
    271      break;
    272    case Stk::RegisterF32:
    273      loadRegisterF32(src, dest);
    274      break;
    275    default:
    276      MOZ_CRASH("Compiler bug: expected F32 on stack");
    277  }
    278 }
    279 
    280 #ifdef ENABLE_WASM_SIMD
    281 void BaseCompiler::loadV128(const Stk& src, RegV128 dest) {
    282  switch (src.kind()) {
    283    case Stk::ConstV128:
    284      loadConstV128(src, dest);
    285      break;
    286    case Stk::MemV128:
    287      loadMemV128(src, dest);
    288      break;
    289    case Stk::LocalV128:
    290      loadLocalV128(src, dest);
    291      break;
    292    case Stk::RegisterV128:
    293      loadRegisterV128(src, dest);
    294      break;
    295    default:
    296      MOZ_CRASH("Compiler bug: expected V128 on stack");
    297  }
    298 }
    299 #endif
    300 
    301 void BaseCompiler::loadRef(const Stk& src, RegRef dest) {
    302  switch (src.kind()) {
    303    case Stk::ConstRef:
    304      loadConstRef(src, dest);
    305      break;
    306    case Stk::MemRef:
    307      loadMemRef(src, dest);
    308      break;
    309    case Stk::LocalRef:
    310      loadLocalRef(src, dest);
    311      break;
    312    case Stk::RegisterRef:
    313      loadRegisterRef(src, dest);
    314      break;
    315    default:
    316      MOZ_CRASH("Compiler bug: expected ref on stack");
    317  }
    318 }
    319 
    320 void BaseCompiler::peekRefAt(uint32_t depth, RegRef dest) {
    321  MOZ_ASSERT(depth < stk_.length());
    322  Stk& src = peek(stk_.length() - depth - 1);
    323  loadRef(src, dest);
    324 }
    325 
    326 // Flush all local and register value stack elements to memory.
    327 //
    328 // TODO / OPTIMIZE: As this is fairly expensive and causes worse
    329 // code to be emitted subsequently, it is useful to avoid calling
    330 // it.  (Bug 1316802)
    331 //
    332 // Some optimization has been done already.  Remaining
    333 // opportunities:
    334 //
    335 //  - It would be interesting to see if we can specialize it
    336 //    before calls with particularly simple signatures, or where
    337 //    we can do parallel assignment of register arguments, or
    338 //    similar.  See notes in emitCall().
    339 //
    340 //  - Operations that need specific registers: multiply, quotient,
    341 //    remainder, will tend to sync because the registers we need
    342 //    will tend to be allocated.  We may be able to avoid that by
    343 //    prioritizing registers differently (takeLast instead of
    344 //    takeFirst) but we may also be able to allocate an unused
    345 //    register on demand to free up one we need, thus avoiding the
    346 //    sync.  That type of fix would go into needI32().
    347 
    348 void BaseCompiler::sync() {
    349  size_t start = 0;
    350  size_t lim = stk_.length();
    351 
    352  for (size_t i = lim; i > 0; i--) {
    353    // Memory opcodes are first in the enum, single check against MemLast is
    354    // fine.
    355    if (stk_[i - 1].kind() <= Stk::MemLast) {
    356      start = i;
    357      break;
    358    }
    359  }
    360 
    361  for (size_t i = start; i < lim; i++) {
    362    Stk& v = stk_[i];
    363    switch (v.kind()) {
    364      case Stk::LocalI32: {
    365        ScratchI32 scratch(*this);
    366        loadLocalI32(v, scratch);
    367        uint32_t offs = fr.pushGPR(scratch);
    368        v.setOffs(Stk::MemI32, offs);
    369        break;
    370      }
    371      case Stk::RegisterI32: {
    372        uint32_t offs = fr.pushGPR(v.i32reg());
    373        freeI32(v.i32reg());
    374        v.setOffs(Stk::MemI32, offs);
    375        break;
    376      }
    377      case Stk::LocalI64: {
    378        ScratchI32 scratch(*this);
    379 #ifdef JS_PUNBOX64
    380        loadI64(v, fromI32(scratch));
    381        uint32_t offs = fr.pushGPR(scratch);
    382 #else
    383        fr.loadLocalI64High(localFromSlot(v.slot(), MIRType::Int64), scratch);
    384        fr.pushGPR(scratch);
    385        fr.loadLocalI64Low(localFromSlot(v.slot(), MIRType::Int64), scratch);
    386        uint32_t offs = fr.pushGPR(scratch);
    387 #endif
    388        v.setOffs(Stk::MemI64, offs);
    389        break;
    390      }
    391      case Stk::RegisterI64: {
    392 #ifdef JS_PUNBOX64
    393        uint32_t offs = fr.pushGPR(v.i64reg().reg);
    394        freeI64(v.i64reg());
    395 #else
    396        fr.pushGPR(v.i64reg().high);
    397        uint32_t offs = fr.pushGPR(v.i64reg().low);
    398        freeI64(v.i64reg());
    399 #endif
    400        v.setOffs(Stk::MemI64, offs);
    401        break;
    402      }
    403      case Stk::LocalF64: {
    404        ScratchF64 scratch(*this);
    405        loadF64(v, scratch);
    406        uint32_t offs = fr.pushDouble(scratch);
    407        v.setOffs(Stk::MemF64, offs);
    408        break;
    409      }
    410      case Stk::RegisterF64: {
    411        uint32_t offs = fr.pushDouble(v.f64reg());
    412        freeF64(v.f64reg());
    413        v.setOffs(Stk::MemF64, offs);
    414        break;
    415      }
    416      case Stk::LocalF32: {
    417        ScratchF32 scratch(*this);
    418        loadF32(v, scratch);
    419        uint32_t offs = fr.pushFloat32(scratch);
    420        v.setOffs(Stk::MemF32, offs);
    421        break;
    422      }
    423      case Stk::RegisterF32: {
    424        uint32_t offs = fr.pushFloat32(v.f32reg());
    425        freeF32(v.f32reg());
    426        v.setOffs(Stk::MemF32, offs);
    427        break;
    428      }
    429 #ifdef ENABLE_WASM_SIMD
    430      case Stk::LocalV128: {
    431        ScratchV128 scratch(*this);
    432        loadV128(v, scratch);
    433        uint32_t offs = fr.pushV128(scratch);
    434        v.setOffs(Stk::MemV128, offs);
    435        break;
    436      }
    437      case Stk::RegisterV128: {
    438        uint32_t offs = fr.pushV128(v.v128reg());
    439        freeV128(v.v128reg());
    440        v.setOffs(Stk::MemV128, offs);
    441        break;
    442      }
    443 #endif
    444      case Stk::LocalRef: {
    445        ScratchRef scratch(*this);
    446        loadLocalRef(v, scratch);
    447        uint32_t offs = fr.pushGPR(scratch);
    448        v.setOffs(Stk::MemRef, offs);
    449        stackMapGenerator_.memRefsOnStk++;
    450        break;
    451      }
    452      case Stk::RegisterRef: {
    453        uint32_t offs = fr.pushGPR(v.refReg());
    454        freeRef(v.refReg());
    455        v.setOffs(Stk::MemRef, offs);
    456        stackMapGenerator_.memRefsOnStk++;
    457        break;
    458      }
    459      default: {
    460        break;
    461      }
    462    }
    463  }
    464 }
    465 
    466 // This is an optimization used to avoid calling sync() for
    467 // setLocal(): if the local does not exist unresolved on the stack
    468 // then we can skip the sync.
    469 
    470 bool BaseCompiler::hasLocal(uint32_t slot) {
    471  for (size_t i = stk_.length(); i > 0; i--) {
    472    // Memory opcodes are first in the enum, single check against MemLast is
    473    // fine.
    474    Stk::Kind kind = stk_[i - 1].kind();
    475    if (kind <= Stk::MemLast) {
    476      return false;
    477    }
    478 
    479    // Local opcodes follow memory opcodes in the enum, single check against
    480    // LocalLast is sufficient.
    481    if (kind <= Stk::LocalLast && stk_[i - 1].slot() == slot) {
    482      return true;
    483    }
    484  }
    485  return false;
    486 }
    487 
    488 void BaseCompiler::syncLocal(uint32_t slot) {
    489  if (hasLocal(slot)) {
    490    sync();  // TODO / OPTIMIZE: Improve this?  (Bug 1316817)
    491  }
    492 }
    493 
    494 // Push the register r onto the stack.
    495 
    496 void BaseCompiler::pushAny(AnyReg r) {
    497  switch (r.tag) {
    498    case AnyReg::I32: {
    499      pushI32(r.i32());
    500      break;
    501    }
    502    case AnyReg::I64: {
    503      pushI64(r.i64());
    504      break;
    505    }
    506    case AnyReg::F32: {
    507      pushF32(r.f32());
    508      break;
    509    }
    510    case AnyReg::F64: {
    511      pushF64(r.f64());
    512      break;
    513    }
    514 #ifdef ENABLE_WASM_SIMD
    515    case AnyReg::V128: {
    516      pushV128(r.v128());
    517      break;
    518    }
    519 #endif
    520    case AnyReg::REF: {
    521      pushRef(r.ref());
    522      break;
    523    }
    524  }
    525 }
    526 
    527 void BaseCompiler::pushI32(RegI32 r) {
    528  MOZ_ASSERT(!isAvailableI32(r));
    529  push(Stk(r));
    530 }
    531 
    532 void BaseCompiler::pushI64(RegI64 r) {
    533  MOZ_ASSERT(!isAvailableI64(r));
    534  push(Stk(r));
    535 }
    536 
    537 void BaseCompiler::pushRef(RegRef r) {
    538  MOZ_ASSERT(!isAvailableRef(r));
    539  push(Stk(r));
    540 }
    541 
    542 void BaseCompiler::pushPtr(RegPtr r) {
    543  MOZ_ASSERT(!isAvailablePtr(r));
    544 #ifdef JS_64BIT
    545  pushI64(RegI64(Register64(r)));
    546 #else
    547  pushI32(RegI32(r));
    548 #endif
    549 }
    550 
    551 void BaseCompiler::pushF64(RegF64 r) {
    552  MOZ_ASSERT(!isAvailableF64(r));
    553  push(Stk(r));
    554 }
    555 
    556 void BaseCompiler::pushF32(RegF32 r) {
    557  MOZ_ASSERT(!isAvailableF32(r));
    558  push(Stk(r));
    559 }
    560 
    561 #ifdef ENABLE_WASM_SIMD
    562 void BaseCompiler::pushV128(RegV128 r) {
    563  MOZ_ASSERT(!isAvailableV128(r));
    564  push(Stk(r));
    565 }
    566 #endif
    567 
    568 // Push the value onto the stack.  PushI32 can also take uint32_t, and PushI64
    569 // can take uint64_t; the semantics are the same.  Appropriate sign extension
    570 // for a 32-bit value on a 64-bit architecture happens when the value is
    571 // popped, see the definition of moveImm32 below.
    572 
    573 void BaseCompiler::pushI32(int32_t v) { push(Stk(v)); }
    574 
    575 void BaseCompiler::pushI64(int64_t v) { push(Stk(v)); }
    576 
    577 void BaseCompiler::pushRef(intptr_t v) { pushConstRef(v); }
    578 
    579 void BaseCompiler::pushPtr(intptr_t v) {
    580 #ifdef JS_64BIT
    581  pushI64(v);
    582 #else
    583  pushI32(v);
    584 #endif
    585 }
    586 
    587 void BaseCompiler::pushF64(double v) { push(Stk(v)); }
    588 
    589 void BaseCompiler::pushF32(float v) { push(Stk(v)); }
    590 
    591 #ifdef ENABLE_WASM_SIMD
    592 void BaseCompiler::pushV128(V128 v) { push(Stk(v)); }
    593 #endif
    594 
    595 // Push the local slot onto the stack.  The slot will not be read
    596 // here; it will be read when it is consumed, or when a side
    597 // effect to the slot forces its value to be saved.
    598 
    599 void BaseCompiler::pushLocalI32(uint32_t slot) {
    600  stk_.infallibleEmplaceBack(Stk(Stk::LocalI32, slot));
    601 }
    602 
    603 void BaseCompiler::pushLocalI64(uint32_t slot) {
    604  stk_.infallibleEmplaceBack(Stk(Stk::LocalI64, slot));
    605 }
    606 
    607 void BaseCompiler::pushLocalRef(uint32_t slot) {
    608  stk_.infallibleEmplaceBack(Stk(Stk::LocalRef, slot));
    609 }
    610 
    611 void BaseCompiler::pushLocalF64(uint32_t slot) {
    612  stk_.infallibleEmplaceBack(Stk(Stk::LocalF64, slot));
    613 }
    614 
    615 void BaseCompiler::pushLocalF32(uint32_t slot) {
    616  stk_.infallibleEmplaceBack(Stk(Stk::LocalF32, slot));
    617 }
    618 
    619 #ifdef ENABLE_WASM_SIMD
    620 void BaseCompiler::pushLocalV128(uint32_t slot) {
    621  stk_.infallibleEmplaceBack(Stk(Stk::LocalV128, slot));
    622 }
    623 #endif
    624 
    625 void BaseCompiler::pushU32AsI64(RegI32 rs) {
    626  RegI64 rd = widenI32(rs);
    627  masm.move32To64ZeroExtend(rs, rd);
    628  pushI64(rd);
    629 }
    630 
    631 AnyReg BaseCompiler::popAny(AnyReg specific) {
    632  switch (stk_.back().kind()) {
    633    case Stk::MemI32:
    634    case Stk::LocalI32:
    635    case Stk::RegisterI32:
    636    case Stk::ConstI32:
    637      return AnyReg(popI32(specific.i32()));
    638 
    639    case Stk::MemI64:
    640    case Stk::LocalI64:
    641    case Stk::RegisterI64:
    642    case Stk::ConstI64:
    643      return AnyReg(popI64(specific.i64()));
    644 
    645    case Stk::MemF32:
    646    case Stk::LocalF32:
    647    case Stk::RegisterF32:
    648    case Stk::ConstF32:
    649      return AnyReg(popF32(specific.f32()));
    650 
    651    case Stk::MemF64:
    652    case Stk::LocalF64:
    653    case Stk::RegisterF64:
    654    case Stk::ConstF64:
    655      return AnyReg(popF64(specific.f64()));
    656 
    657 #ifdef ENABLE_WASM_SIMD
    658    case Stk::MemV128:
    659    case Stk::LocalV128:
    660    case Stk::RegisterV128:
    661    case Stk::ConstV128:
    662      return AnyReg(popV128(specific.v128()));
    663 #endif
    664 
    665    case Stk::MemRef:
    666    case Stk::LocalRef:
    667    case Stk::RegisterRef:
    668    case Stk::ConstRef:
    669      return AnyReg(popRef(specific.ref()));
    670 
    671    case Stk::Unknown:
    672      MOZ_CRASH();
    673 
    674    default:
    675      MOZ_CRASH();
    676  }
    677 }
    678 
    679 AnyReg BaseCompiler::popAny() {
    680  switch (stk_.back().kind()) {
    681    case Stk::MemI32:
    682    case Stk::LocalI32:
    683    case Stk::RegisterI32:
    684    case Stk::ConstI32:
    685      return AnyReg(popI32());
    686 
    687    case Stk::MemI64:
    688    case Stk::LocalI64:
    689    case Stk::RegisterI64:
    690    case Stk::ConstI64:
    691      return AnyReg(popI64());
    692 
    693    case Stk::MemF32:
    694    case Stk::LocalF32:
    695    case Stk::RegisterF32:
    696    case Stk::ConstF32:
    697      return AnyReg(popF32());
    698 
    699    case Stk::MemF64:
    700    case Stk::LocalF64:
    701    case Stk::RegisterF64:
    702    case Stk::ConstF64:
    703      return AnyReg(popF64());
    704 
    705 #ifdef ENABLE_WASM_SIMD
    706    case Stk::MemV128:
    707    case Stk::LocalV128:
    708    case Stk::RegisterV128:
    709    case Stk::ConstV128:
    710      return AnyReg(popV128());
    711 #endif
    712 
    713    case Stk::MemRef:
    714    case Stk::LocalRef:
    715    case Stk::RegisterRef:
    716    case Stk::ConstRef:
    717      return AnyReg(popRef());
    718 
    719    case Stk::Unknown:
    720      MOZ_CRASH();
    721 
    722    default:
    723      MOZ_CRASH();
    724  }
    725 }
    726 
    727 // Call only from other popI32() variants.
    728 // v must be the stack top.  May pop the CPU stack.
    729 
    730 void BaseCompiler::popI32(const Stk& v, RegI32 dest) {
    731  MOZ_ASSERT(&v == &stk_.back());
    732  switch (v.kind()) {
    733    case Stk::ConstI32:
    734      loadConstI32(v, dest);
    735      break;
    736    case Stk::LocalI32:
    737      loadLocalI32(v, dest);
    738      break;
    739    case Stk::MemI32:
    740      fr.popGPR(dest);
    741      break;
    742    case Stk::RegisterI32:
    743      loadRegisterI32(v, dest);
    744      break;
    745    default:
    746      MOZ_CRASH("Compiler bug: expected int on stack");
    747  }
    748 }
    749 
    750 RegI32 BaseCompiler::popI32() {
    751  Stk& v = stk_.back();
    752  RegI32 r;
    753  if (v.kind() == Stk::RegisterI32) {
    754    r = v.i32reg();
    755  } else {
    756    popI32(v, (r = needI32()));
    757  }
    758  stk_.popBack();
    759  return r;
    760 }
    761 
    762 RegI32 BaseCompiler::popI32(RegI32 specific) {
    763  Stk& v = stk_.back();
    764 
    765  if (!(v.kind() == Stk::RegisterI32 && v.i32reg() == specific)) {
    766    needI32(specific);
    767    popI32(v, specific);
    768    if (v.kind() == Stk::RegisterI32) {
    769      freeI32(v.i32reg());
    770    }
    771  }
    772 
    773  stk_.popBack();
    774  return specific;
    775 }
    776 
    777 #ifdef ENABLE_WASM_SIMD
    778 // Call only from other popV128() variants.
    779 // v must be the stack top.  May pop the CPU stack.
    780 
    781 void BaseCompiler::popV128(const Stk& v, RegV128 dest) {
    782  MOZ_ASSERT(&v == &stk_.back());
    783  switch (v.kind()) {
    784    case Stk::ConstV128:
    785      loadConstV128(v, dest);
    786      break;
    787    case Stk::LocalV128:
    788      loadLocalV128(v, dest);
    789      break;
    790    case Stk::MemV128:
    791      fr.popV128(dest);
    792      break;
    793    case Stk::RegisterV128:
    794      loadRegisterV128(v, dest);
    795      break;
    796    default:
    797      MOZ_CRASH("Compiler bug: expected int on stack");
    798  }
    799 }
    800 
    801 RegV128 BaseCompiler::popV128() {
    802  Stk& v = stk_.back();
    803  RegV128 r;
    804  if (v.kind() == Stk::RegisterV128) {
    805    r = v.v128reg();
    806  } else {
    807    popV128(v, (r = needV128()));
    808  }
    809  stk_.popBack();
    810  return r;
    811 }
    812 
    813 RegV128 BaseCompiler::popV128(RegV128 specific) {
    814  Stk& v = stk_.back();
    815 
    816  if (!(v.kind() == Stk::RegisterV128 && v.v128reg() == specific)) {
    817    needV128(specific);
    818    popV128(v, specific);
    819    if (v.kind() == Stk::RegisterV128) {
    820      freeV128(v.v128reg());
    821    }
    822  }
    823 
    824  stk_.popBack();
    825  return specific;
    826 }
    827 #endif
    828 
    829 // Call only from other popI64() variants.
    830 // v must be the stack top.  May pop the CPU stack.
    831 
    832 void BaseCompiler::popI64(const Stk& v, RegI64 dest) {
    833  MOZ_ASSERT(&v == &stk_.back());
    834  switch (v.kind()) {
    835    case Stk::ConstI64:
    836      loadConstI64(v, dest);
    837      break;
    838    case Stk::LocalI64:
    839      loadLocalI64(v, dest);
    840      break;
    841    case Stk::MemI64:
    842 #ifdef JS_PUNBOX64
    843      fr.popGPR(dest.reg);
    844 #else
    845      fr.popGPR(dest.low);
    846      fr.popGPR(dest.high);
    847 #endif
    848      break;
    849    case Stk::RegisterI64:
    850      loadRegisterI64(v, dest);
    851      break;
    852    default:
    853      MOZ_CRASH("Compiler bug: expected long on stack");
    854  }
    855 }
    856 
    857 RegI64 BaseCompiler::popI64() {
    858  Stk& v = stk_.back();
    859  RegI64 r;
    860  if (v.kind() == Stk::RegisterI64) {
    861    r = v.i64reg();
    862  } else {
    863    popI64(v, (r = needI64()));
    864  }
    865  stk_.popBack();
    866  return r;
    867 }
    868 
    869 // Note, the stack top can be in one half of "specific" on 32-bit
    870 // systems.  We can optimize, but for simplicity, if the register
    871 // does not match exactly, then just force the stack top to memory
    872 // and then read it back in.
    873 
    874 RegI64 BaseCompiler::popI64(RegI64 specific) {
    875  Stk& v = stk_.back();
    876 
    877  if (!(v.kind() == Stk::RegisterI64 && v.i64reg() == specific)) {
    878    needI64(specific);
    879    popI64(v, specific);
    880    if (v.kind() == Stk::RegisterI64) {
    881      freeI64(v.i64reg());
    882    }
    883  }
    884 
    885  stk_.popBack();
    886  return specific;
    887 }
    888 
    889 // Call only from other popRef() variants.
    890 // v must be the stack top.  May pop the CPU stack.
    891 
    892 void BaseCompiler::popRef(const Stk& v, RegRef dest) {
    893  MOZ_ASSERT(&v == &stk_.back());
    894  switch (v.kind()) {
    895    case Stk::ConstRef:
    896      loadConstRef(v, dest);
    897      break;
    898    case Stk::LocalRef:
    899      loadLocalRef(v, dest);
    900      break;
    901    case Stk::MemRef:
    902      fr.popGPR(dest);
    903      break;
    904    case Stk::RegisterRef:
    905      loadRegisterRef(v, dest);
    906      break;
    907    default:
    908      MOZ_CRASH("Compiler bug: expected ref on stack");
    909  }
    910 }
    911 
    912 RegRef BaseCompiler::popRef(RegRef specific) {
    913  Stk& v = stk_.back();
    914 
    915  if (!(v.kind() == Stk::RegisterRef && v.refReg() == specific)) {
    916    needRef(specific);
    917    popRef(v, specific);
    918    if (v.kind() == Stk::RegisterRef) {
    919      freeRef(v.refReg());
    920    }
    921  }
    922 
    923  stk_.popBack();
    924  if (v.kind() == Stk::MemRef) {
    925    stackMapGenerator_.memRefsOnStk--;
    926  }
    927  return specific;
    928 }
    929 
    930 RegRef BaseCompiler::popRef() {
    931  Stk& v = stk_.back();
    932  RegRef r;
    933  if (v.kind() == Stk::RegisterRef) {
    934    r = v.refReg();
    935  } else {
    936    popRef(v, (r = needRef()));
    937  }
    938  stk_.popBack();
    939  if (v.kind() == Stk::MemRef) {
    940    stackMapGenerator_.memRefsOnStk--;
    941  }
    942  return r;
    943 }
    944 
    945 // Call only from other popPtr() variants.
    946 // v must be the stack top.  May pop the CPU stack.
    947 
    948 void BaseCompiler::popPtr(const Stk& v, RegPtr dest) {
    949 #ifdef JS_64BIT
    950  popI64(v, RegI64(Register64(dest)));
    951 #else
    952  popI32(v, RegI32(dest));
    953 #endif
    954 }
    955 
    956 RegPtr BaseCompiler::popPtr(RegPtr specific) {
    957 #ifdef JS_64BIT
    958  return RegPtr(popI64(RegI64(Register64(specific))).reg);
    959 #else
    960  return RegPtr(popI32(RegI32(specific)));
    961 #endif
    962 }
    963 
    964 RegPtr BaseCompiler::popPtr() {
    965 #ifdef JS_64BIT
    966  return RegPtr(popI64().reg);
    967 #else
    968  return RegPtr(popI32());
    969 #endif
    970 }
    971 
    972 // Call only from other popF64() variants.
    973 // v must be the stack top.  May pop the CPU stack.
    974 
    975 void BaseCompiler::popF64(const Stk& v, RegF64 dest) {
    976  MOZ_ASSERT(&v == &stk_.back());
    977  switch (v.kind()) {
    978    case Stk::ConstF64:
    979      loadConstF64(v, dest);
    980      break;
    981    case Stk::LocalF64:
    982      loadLocalF64(v, dest);
    983      break;
    984    case Stk::MemF64:
    985      fr.popDouble(dest);
    986      break;
    987    case Stk::RegisterF64:
    988      loadRegisterF64(v, dest);
    989      break;
    990    default:
    991      MOZ_CRASH("Compiler bug: expected double on stack");
    992  }
    993 }
    994 
    995 RegF64 BaseCompiler::popF64() {
    996  Stk& v = stk_.back();
    997  RegF64 r;
    998  if (v.kind() == Stk::RegisterF64) {
    999    r = v.f64reg();
   1000  } else {
   1001    popF64(v, (r = needF64()));
   1002  }
   1003  stk_.popBack();
   1004  return r;
   1005 }
   1006 
   1007 RegF64 BaseCompiler::popF64(RegF64 specific) {
   1008  Stk& v = stk_.back();
   1009 
   1010  if (!(v.kind() == Stk::RegisterF64 && v.f64reg() == specific)) {
   1011    needF64(specific);
   1012    popF64(v, specific);
   1013    if (v.kind() == Stk::RegisterF64) {
   1014      freeF64(v.f64reg());
   1015    }
   1016  }
   1017 
   1018  stk_.popBack();
   1019  return specific;
   1020 }
   1021 
   1022 // Call only from other popF32() variants.
   1023 // v must be the stack top.  May pop the CPU stack.
   1024 
   1025 void BaseCompiler::popF32(const Stk& v, RegF32 dest) {
   1026  MOZ_ASSERT(&v == &stk_.back());
   1027  switch (v.kind()) {
   1028    case Stk::ConstF32:
   1029      loadConstF32(v, dest);
   1030      break;
   1031    case Stk::LocalF32:
   1032      loadLocalF32(v, dest);
   1033      break;
   1034    case Stk::MemF32:
   1035      fr.popFloat32(dest);
   1036      break;
   1037    case Stk::RegisterF32:
   1038      loadRegisterF32(v, dest);
   1039      break;
   1040    default:
   1041      MOZ_CRASH("Compiler bug: expected float on stack");
   1042  }
   1043 }
   1044 
   1045 RegF32 BaseCompiler::popF32() {
   1046  Stk& v = stk_.back();
   1047  RegF32 r;
   1048  if (v.kind() == Stk::RegisterF32) {
   1049    r = v.f32reg();
   1050  } else {
   1051    popF32(v, (r = needF32()));
   1052  }
   1053  stk_.popBack();
   1054  return r;
   1055 }
   1056 
   1057 RegF32 BaseCompiler::popF32(RegF32 specific) {
   1058  Stk& v = stk_.back();
   1059 
   1060  if (!(v.kind() == Stk::RegisterF32 && v.f32reg() == specific)) {
   1061    needF32(specific);
   1062    popF32(v, specific);
   1063    if (v.kind() == Stk::RegisterF32) {
   1064      freeF32(v.f32reg());
   1065    }
   1066  }
   1067 
   1068  stk_.popBack();
   1069  return specific;
   1070 }
   1071 
   1072 bool BaseCompiler::hasConst() const {
   1073  const Stk& v = stk_.back();
   1074  switch (v.kind()) {
   1075    case Stk::ConstI32:
   1076    case Stk::ConstI64:
   1077    case Stk::ConstF32:
   1078    case Stk::ConstF64:
   1079 #ifdef ENABLE_WASM_SIMD
   1080    case Stk::ConstV128:
   1081 #endif
   1082    case Stk::ConstRef:
   1083      return true;
   1084    default:
   1085      return false;
   1086  }
   1087 }
   1088 
   1089 bool BaseCompiler::popConst(int32_t* c) {
   1090  Stk& v = stk_.back();
   1091  if (v.kind() != Stk::ConstI32) {
   1092    return false;
   1093  }
   1094  *c = v.i32val();
   1095  stk_.popBack();
   1096  return true;
   1097 }
   1098 
   1099 bool BaseCompiler::popConst(int64_t* c) {
   1100  Stk& v = stk_.back();
   1101  if (v.kind() != Stk::ConstI64) {
   1102    return false;
   1103  }
   1104  *c = v.i64val();
   1105  stk_.popBack();
   1106  return true;
   1107 }
   1108 
   1109 bool BaseCompiler::peekConst(int32_t* c) {
   1110  Stk& v = stk_.back();
   1111  if (v.kind() != Stk::ConstI32) {
   1112    return false;
   1113  }
   1114  *c = v.i32val();
   1115  return true;
   1116 }
   1117 
   1118 bool BaseCompiler::peekConst(int64_t* c) {
   1119  Stk& v = stk_.back();
   1120  if (v.kind() != Stk::ConstI64) {
   1121    return false;
   1122  }
   1123  *c = v.i64val();
   1124  return true;
   1125 }
   1126 
   1127 bool BaseCompiler::peek2xConst(int32_t* c0, int32_t* c1) {
   1128  MOZ_ASSERT(stk_.length() >= 2);
   1129  const Stk& v0 = *(stk_.end() - 1);
   1130  const Stk& v1 = *(stk_.end() - 2);
   1131  if (v0.kind() != Stk::ConstI32 || v1.kind() != Stk::ConstI32) {
   1132    return false;
   1133  }
   1134  *c0 = v0.i32val();
   1135  *c1 = v1.i32val();
   1136  return true;
   1137 }
   1138 
   1139 bool BaseCompiler::popConstPositivePowerOfTwo(int32_t* c, uint_fast8_t* power,
   1140                                              int32_t cutoff) {
   1141  Stk& v = stk_.back();
   1142  if (v.kind() != Stk::ConstI32) {
   1143    return false;
   1144  }
   1145  *c = v.i32val();
   1146  if (*c <= cutoff || !mozilla::IsPowerOfTwo(static_cast<uint32_t>(*c))) {
   1147    return false;
   1148  }
   1149  *power = mozilla::FloorLog2(*c);
   1150  stk_.popBack();
   1151  return true;
   1152 }
   1153 
   1154 bool BaseCompiler::popConstPositivePowerOfTwo(int64_t* c, uint_fast8_t* power,
   1155                                              int64_t cutoff) {
   1156  Stk& v = stk_.back();
   1157  if (v.kind() != Stk::ConstI64) {
   1158    return false;
   1159  }
   1160  *c = v.i64val();
   1161  if (*c <= cutoff || !mozilla::IsPowerOfTwo(static_cast<uint64_t>(*c))) {
   1162    return false;
   1163  }
   1164  *power = mozilla::FloorLog2(*c);
   1165  stk_.popBack();
   1166  return true;
   1167 }
   1168 
   1169 void BaseCompiler::pop2xI32(RegI32* r0, RegI32* r1) {
   1170  *r1 = popI32();
   1171  *r0 = popI32();
   1172 }
   1173 
   1174 void BaseCompiler::pop2xI64(RegI64* r0, RegI64* r1) {
   1175  *r1 = popI64();
   1176  *r0 = popI64();
   1177 }
   1178 
   1179 void BaseCompiler::pop2xF32(RegF32* r0, RegF32* r1) {
   1180  *r1 = popF32();
   1181  *r0 = popF32();
   1182 }
   1183 
   1184 void BaseCompiler::pop2xF64(RegF64* r0, RegF64* r1) {
   1185  *r1 = popF64();
   1186  *r0 = popF64();
   1187 }
   1188 
   1189 #ifdef ENABLE_WASM_SIMD
   1190 void BaseCompiler::pop2xV128(RegV128* r0, RegV128* r1) {
   1191  *r1 = popV128();
   1192  *r0 = popV128();
   1193 }
   1194 #endif
   1195 
   1196 void BaseCompiler::pop2xRef(RegRef* r0, RegRef* r1) {
   1197  *r1 = popRef();
   1198  *r0 = popRef();
   1199 }
   1200 
   1201 // Pop to a specific register
   1202 RegI32 BaseCompiler::popI32ToSpecific(RegI32 specific) {
   1203  freeI32(specific);
   1204  return popI32(specific);
   1205 }
   1206 
   1207 RegI64 BaseCompiler::popI64ToSpecific(RegI64 specific) {
   1208  freeI64(specific);
   1209  return popI64(specific);
   1210 }
   1211 
   1212 RegI64 BaseCompiler::popAddressToInt64(AddressType addressType) {
   1213  if (addressType == AddressType::I64) {
   1214    return popI64();
   1215  }
   1216 
   1217  MOZ_ASSERT(addressType == AddressType::I32);
   1218 #ifdef JS_64BIT
   1219  return RegI64(Register64(popI32()));
   1220 #else
   1221  RegI32 lowPart = popI32();
   1222  RegI32 highPart = needI32();
   1223  masm.xor32(highPart, highPart);
   1224  return RegI64(Register64(highPart, lowPart));
   1225 #endif
   1226 }
   1227 
   1228 RegI32 BaseCompiler::popTableAddressToClampedInt32(AddressType addressType) {
   1229  if (addressType == AddressType::I32) {
   1230    return popI32();
   1231  }
   1232 
   1233  MOZ_ASSERT(addressType == AddressType::I64);
   1234  RegI64 val = popI64();
   1235  RegI32 clamped = narrowI64(val);
   1236  masm.wasmClampTable64Address(val, clamped);
   1237  return clamped;
   1238 }
   1239 
   1240 void BaseCompiler::replaceTableAddressWithClampedInt32(
   1241    AddressType addressType) {
   1242  if (addressType == AddressType::I32) {
   1243    return;
   1244  }
   1245 
   1246  pushI32(popTableAddressToClampedInt32(addressType));
   1247 }
   1248 
   1249 #ifdef JS_CODEGEN_ARM
   1250 // Pop an I64 as a valid register pair.
   1251 RegI64 BaseCompiler::popI64Pair() {
   1252  RegI64 r = needI64Pair();
   1253  popI64ToSpecific(r);
   1254  return r;
   1255 }
   1256 #endif
   1257 
   1258 // Pop an I64 but narrow it and return the narrowed part.
   1259 RegI32 BaseCompiler::popI64ToI32() {
   1260  RegI64 r = popI64();
   1261  return narrowI64(r);
   1262 }
   1263 
   1264 RegI32 BaseCompiler::popI64ToSpecificI32(RegI32 specific) {
   1265  RegI64 rd = widenI32(specific);
   1266  popI64ToSpecific(rd);
   1267  return narrowI64(rd);
   1268 }
   1269 
   1270 bool BaseCompiler::peekLocal(uint32_t* local) {
   1271  Stk& v = stk_.back();
   1272  // See hasLocal() for documentation of this logic.
   1273  if (v.kind() <= Stk::MemLast || v.kind() > Stk::LocalLast) {
   1274    return false;
   1275  }
   1276  *local = v.slot();
   1277  return true;
   1278 }
   1279 
   1280 size_t BaseCompiler::stackConsumed(size_t numval) {
   1281  size_t size = 0;
   1282  MOZ_ASSERT(numval <= stk_.length());
   1283  for (uint32_t i = stk_.length() - 1; numval > 0; numval--, i--) {
   1284    Stk& v = stk_[i];
   1285    switch (v.kind()) {
   1286      case Stk::MemRef:
   1287        size += BaseStackFrame::StackSizeOfPtr;
   1288        break;
   1289      case Stk::MemI32:
   1290        size += BaseStackFrame::StackSizeOfPtr;
   1291        break;
   1292      case Stk::MemI64:
   1293        size += BaseStackFrame::StackSizeOfInt64;
   1294        break;
   1295      case Stk::MemF64:
   1296        size += BaseStackFrame::StackSizeOfDouble;
   1297        break;
   1298      case Stk::MemF32:
   1299        size += BaseStackFrame::StackSizeOfFloat;
   1300        break;
   1301 #ifdef ENABLE_WASM_SIMD
   1302      case Stk::MemV128:
   1303        size += BaseStackFrame::StackSizeOfV128;
   1304        break;
   1305 #endif
   1306      default:
   1307        break;
   1308    }
   1309  }
   1310  return size;
   1311 }
   1312 
   1313 void BaseCompiler::popValueStackTo(uint32_t stackSize) {
   1314  for (uint32_t i = stk_.length(); i > stackSize; i--) {
   1315    Stk& v = stk_[i - 1];
   1316    switch (v.kind()) {
   1317      case Stk::RegisterI32:
   1318        freeI32(v.i32reg());
   1319        break;
   1320      case Stk::RegisterI64:
   1321        freeI64(v.i64reg());
   1322        break;
   1323      case Stk::RegisterF64:
   1324        freeF64(v.f64reg());
   1325        break;
   1326      case Stk::RegisterF32:
   1327        freeF32(v.f32reg());
   1328        break;
   1329 #ifdef ENABLE_WASM_SIMD
   1330      case Stk::RegisterV128:
   1331        freeV128(v.v128reg());
   1332        break;
   1333 #endif
   1334      case Stk::RegisterRef:
   1335        freeRef(v.refReg());
   1336        break;
   1337      case Stk::MemRef:
   1338        stackMapGenerator_.memRefsOnStk--;
   1339        break;
   1340      default:
   1341        break;
   1342    }
   1343  }
   1344  stk_.shrinkTo(stackSize);
   1345 }
   1346 
   1347 void BaseCompiler::popValueStackBy(uint32_t items) {
   1348  popValueStackTo(stk_.length() - items);
   1349 }
   1350 
   1351 void BaseCompiler::dropValue() {
   1352  if (peek(0).isMem()) {
   1353    fr.popBytes(stackConsumed(1));
   1354  }
   1355  popValueStackBy(1);
   1356 }
   1357 
   1358 // Peek at the stack, for calls.
   1359 
   1360 Stk& BaseCompiler::peek(uint32_t relativeDepth) {
   1361  return stk_[stk_.length() - 1 - relativeDepth];
   1362 }
   1363 }  // namespace wasm
   1364 }  // namespace js
   1365 
   1366 #endif  // wasm_wasm_baseline_stk_mgmt_inl_h