tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

WasmGC.cpp (12897B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 * vim: set ts=8 sts=2 et sw=2 tw=80:
      3 *
      4 * Copyright 2019 Mozilla Foundation
      5 *
      6 * Licensed under the Apache License, Version 2.0 (the "License");
      7 * you may not use this file except in compliance with the License.
      8 * You may obtain a copy of the License at
      9 *
     10 *     http://www.apache.org/licenses/LICENSE-2.0
     11 *
     12 * Unless required by applicable law or agreed to in writing, software
     13 * distributed under the License is distributed on an "AS IS" BASIS,
     14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     15 * See the License for the specific language governing permissions and
     16 * limitations under the License.
     17 */
     18 
     19 #include "wasm/WasmGC.h"
     20 #include "wasm/WasmInstance.h"
     21 #include "jit/MacroAssembler-inl.h"
     22 
     23 using namespace js;
     24 using namespace js::jit;
     25 using namespace js::wasm;
     26 
     27 // Generate a stackmap for a function's stack-overflow-at-entry trap, with
     28 // the structure:
     29 //
     30 //    <reg dump area>
     31 //    |       ++ <space reserved before trap, if any>
     32 //    |               ++ <space for Frame>
     33 //    |                       ++ <inbound arg area>
     34 //    |                                           |
     35 //    Lowest Addr                                 Highest Addr
     36 //
     37 // The caller owns the resulting stackmap.  This assumes a grow-down stack.
     38 //
     39 // For non-debug builds, if the stackmap would contain no pointers, no
     40 // stackmap is created, and nullptr is returned.  For a debug build, a
     41 // stackmap is always created and returned.
     42 //
     43 // The "space reserved before trap" is the space reserved by
     44 // MacroAssembler::wasmReserveStackChecked, in the case where the frame is
     45 // "small", as determined by that function.
     46 bool wasm::CreateStackMapForFunctionEntryTrap(
     47    const wasm::ArgTypeVector& argTypes, const RegisterOffsets& trapExitLayout,
     48    size_t trapExitLayoutWords, size_t nBytesReservedBeforeTrap,
     49    size_t nInboundStackArgBytes, wasm::StackMaps& stackMaps,
     50    wasm::StackMap** result) {
     51  // Ensure this is defined on all return paths.
     52  *result = nullptr;
     53 
     54  // The size of the wasm::Frame itself.
     55  const size_t nFrameBytes = sizeof(wasm::Frame);
     56 
     57  // The size of the register dump (trap) area.
     58  const size_t trapExitLayoutBytes = trapExitLayoutWords * sizeof(void*);
     59 
     60  // The stack map owns any alignment padding for incoming stack args.
     61  MOZ_ASSERT(nInboundStackArgBytes % sizeof(void*) == 0);
     62  const size_t nInboundStackArgBytesAligned =
     63      AlignStackArgAreaSize(nInboundStackArgBytes);
     64  const size_t numStackArgWords = nInboundStackArgBytesAligned / sizeof(void*);
     65 
     66  // This is the total number of bytes covered by the map.
     67  const size_t nTotalBytes = trapExitLayoutBytes + nBytesReservedBeforeTrap +
     68                             nFrameBytes + nInboundStackArgBytesAligned;
     69 
     70 #ifndef DEBUG
     71  bool hasRefs = false;
     72  for (ABIArgIter i(argTypes, ABIKind::Wasm); !i.done(); i++) {
     73    if (i.mirType() == MIRType::WasmAnyRef) {
     74      hasRefs = true;
     75      break;
     76    }
     77  }
     78 
     79  // There are no references, and this is a non-debug build, so don't bother
     80  // building the stackmap.
     81  if (!hasRefs) {
     82    return true;
     83  }
     84 #endif
     85 
     86  wasm::StackMap* stackMap = stackMaps.create(nTotalBytes / sizeof(void*));
     87  if (!stackMap) {
     88    return false;
     89  }
     90  stackMap->setExitStubWords(trapExitLayoutWords);
     91  stackMap->setFrameOffsetFromTop(nFrameBytes / sizeof(void*) +
     92                                  numStackArgWords);
     93 
     94  // REG DUMP AREA
     95  wasm::ExitStubMapVector trapExitExtras;
     96  if (!GenerateStackmapEntriesForTrapExit(
     97          argTypes, trapExitLayout, trapExitLayoutWords, &trapExitExtras)) {
     98    return false;
     99  }
    100  MOZ_ASSERT(trapExitExtras.length() == trapExitLayoutWords);
    101 
    102  for (size_t i = 0; i < trapExitLayoutWords; i++) {
    103    if (trapExitExtras[i]) {
    104      stackMap->set(i, wasm::StackMap::AnyRef);
    105    }
    106  }
    107 
    108  // INBOUND ARG AREA
    109  const size_t stackArgOffset =
    110      (trapExitLayoutBytes + nBytesReservedBeforeTrap + nFrameBytes) /
    111      sizeof(void*);
    112  for (ABIArgIter i(argTypes, ABIKind::Wasm); !i.done(); i++) {
    113    ABIArg argLoc = *i;
    114    if (argLoc.kind() == ABIArg::Stack &&
    115        argTypes[i.index()] == MIRType::WasmAnyRef) {
    116      uint32_t offset = argLoc.offsetFromArgBase();
    117      MOZ_ASSERT(offset < nInboundStackArgBytes);
    118      MOZ_ASSERT(offset % sizeof(void*) == 0);
    119      stackMap->set(stackArgOffset + offset / sizeof(void*),
    120                    wasm::StackMap::AnyRef);
    121    }
    122  }
    123 
    124 #ifdef DEBUG
    125  for (uint32_t i = 0; i < nFrameBytes / sizeof(void*); i++) {
    126    MOZ_ASSERT(stackMap->get(stackMap->header.numMappedWords -
    127                             stackMap->header.frameOffsetFromTop + i) ==
    128               StackMap::Kind::POD);
    129  }
    130 #endif
    131 
    132  *result = stackMaps.finalize(stackMap);
    133  return true;
    134 }
    135 
    136 bool wasm::GenerateStackmapEntriesForTrapExit(
    137    const ArgTypeVector& args, const RegisterOffsets& trapExitLayout,
    138    const size_t trapExitLayoutNumWords, ExitStubMapVector* extras) {
    139  MOZ_ASSERT(extras->empty());
    140 
    141  if (!extras->appendN(false, trapExitLayoutNumWords)) {
    142    return false;
    143  }
    144 
    145  for (ABIArgIter i(args, ABIKind::Wasm); !i.done(); i++) {
    146    if (!i->argInRegister() || i.mirType() != MIRType::WasmAnyRef) {
    147      continue;
    148    }
    149 
    150    size_t offsetFromTop = trapExitLayout.getOffset(i->gpr());
    151 
    152    // If this doesn't hold, the associated register wasn't saved by
    153    // the trap exit stub.  Better to crash now than much later, in
    154    // some obscure place, and possibly with security consequences.
    155    MOZ_RELEASE_ASSERT(offsetFromTop < trapExitLayoutNumWords);
    156 
    157    // offsetFromTop is an offset in words down from the highest
    158    // address in the exit stub save area.  Switch it around to be an
    159    // offset up from the bottom of the (integer register) save area.
    160    size_t offsetFromBottom = trapExitLayoutNumWords - 1 - offsetFromTop;
    161 
    162    (*extras)[offsetFromBottom] = true;
    163  }
    164 
    165  return true;
    166 }
    167 
    168 template <class Addr>
    169 void wasm::EmitWasmPreBarrierGuard(MacroAssembler& masm, Register instance,
    170                                   Register scratch, Addr addr,
    171                                   Label* skipBarrier,
    172                                   MaybeTrapSiteDesc trapSiteDesc) {
    173  // If no incremental GC has started, we don't need the barrier.
    174  masm.loadPtr(
    175      Address(instance, Instance::offsetOfAddressOfNeedsIncrementalBarrier()),
    176      scratch);
    177  masm.branchTest32(Assembler::Zero, Address(scratch, 0), Imm32(0x1),
    178                    skipBarrier);
    179 
    180  // If the previous value is not a GC thing, we don't need the barrier.
    181  FaultingCodeOffset fco = masm.loadPtr(addr, scratch);
    182  masm.branchWasmAnyRefIsGCThing(false, scratch, skipBarrier);
    183 
    184  // Emit metadata for a potential null access when reading the previous value.
    185  if (trapSiteDesc) {
    186    masm.append(wasm::Trap::NullPointerDereference,
    187                TrapMachineInsnForLoadWord(), fco.get(), *trapSiteDesc);
    188  }
    189 }
    190 
    191 template void wasm::EmitWasmPreBarrierGuard<Address>(
    192    MacroAssembler& masm, Register instance, Register scratch, Address addr,
    193    Label* skipBarrier, MaybeTrapSiteDesc trapSiteDesc);
    194 template void wasm::EmitWasmPreBarrierGuard<BaseIndex>(
    195    MacroAssembler& masm, Register instance, Register scratch, BaseIndex addr,
    196    Label* skipBarrier, MaybeTrapSiteDesc trapSiteDesc);
    197 
    198 void wasm::EmitWasmPreBarrierCallImmediate(MacroAssembler& masm,
    199                                           Register instance, Register scratch,
    200                                           Register valueAddr,
    201                                           size_t valueOffset) {
    202  MOZ_ASSERT(valueAddr == PreBarrierReg);
    203 
    204  // Add the offset to the PreBarrierReg, if any.
    205  if (valueOffset != 0) {
    206    masm.addPtr(Imm32(valueOffset), valueAddr);
    207  }
    208 
    209 #if defined(DEBUG) && defined(JS_CODEGEN_ARM64)
    210  // The prebarrier assumes that x28 == sp.
    211  Label ok;
    212  masm.Cmp(sp, vixl::Operand(x28));
    213  masm.B(&ok, Assembler::Equal);
    214  masm.breakpoint();
    215  masm.bind(&ok);
    216 #endif
    217 
    218  // Load and call the pre-write barrier code. It will preserve all volatile
    219  // registers.
    220  masm.loadPtr(Address(instance, Instance::offsetOfPreBarrierCode()), scratch);
    221  masm.call(scratch);
    222 
    223  // Remove the offset we folded into PreBarrierReg, if any.
    224  if (valueOffset != 0) {
    225    masm.subPtr(Imm32(valueOffset), valueAddr);
    226  }
    227 }
    228 
    229 void wasm::EmitWasmPreBarrierCallIndex(MacroAssembler& masm, Register instance,
    230                                       Register scratch1, Register scratch2,
    231                                       BaseIndex addr) {
    232  MOZ_ASSERT(addr.base == PreBarrierReg);
    233 
    234  // Save the original base so we can restore it later.
    235  masm.movePtr(AsRegister(addr.base), scratch2);
    236 
    237  // Compute the final address into PrebarrierReg, as the barrier expects it
    238  // there.
    239  masm.computeEffectiveAddress(addr, PreBarrierReg);
    240 
    241 #if defined(DEBUG) && defined(JS_CODEGEN_ARM64)
    242  // The prebarrier assumes that x28 == sp.
    243  Label ok;
    244  masm.Cmp(sp, vixl::Operand(x28));
    245  masm.B(&ok, Assembler::Equal);
    246  masm.breakpoint();
    247  masm.bind(&ok);
    248 #endif
    249 
    250  // Load and call the pre-write barrier code. It will preserve all volatile
    251  // registers.
    252  masm.loadPtr(Address(instance, Instance::offsetOfPreBarrierCode()), scratch1);
    253  masm.call(scratch1);
    254 
    255  // Restore the original base
    256  masm.movePtr(scratch2, AsRegister(addr.base));
    257 }
    258 
    259 void wasm::EmitWasmPostBarrierGuard(MacroAssembler& masm,
    260                                    const mozilla::Maybe<Register>& object,
    261                                    Register otherScratch, Register setValue,
    262                                    Label* skipBarrier) {
    263  // If there is a containing object and it is in the nursery, no barrier.
    264  if (object) {
    265    masm.branchPtrInNurseryChunk(Assembler::Equal, *object, otherScratch,
    266                                 skipBarrier);
    267  }
    268 
    269  // If the pointer being stored is to a tenured object, no barrier.
    270  masm.branchWasmAnyRefIsNurseryCell(false, setValue, otherScratch,
    271                                     skipBarrier);
    272 }
    273 
    274 void wasm::CheckWholeCellLastElementCache(MacroAssembler& masm,
    275                                          Register instance, Register object,
    276                                          Register temp, Label* skipBarrier) {
    277  masm.loadPtr(
    278      Address(instance,
    279              wasm::Instance::offsetOfAddressOfLastBufferedWholeCell()),
    280      temp);
    281  masm.branchPtr(Assembler::Equal, Address(temp, 0), object, skipBarrier);
    282 }
    283 
    284 #ifdef DEBUG
    285 bool wasm::IsPlausibleStackMapKey(const uint8_t* nextPC) {
    286 #  if defined(JS_CODEGEN_X64) || defined(JS_CODEGEN_X86)
    287  const uint8_t* insn = nextPC;
    288  return (insn[-2] == 0x0F && insn[-1] == 0x0B) ||           // ud2
    289         (insn[-2] == 0xFF && (insn[-1] & 0xF8) == 0xD0) ||  // call *%r_
    290         insn[-5] == 0xE8;                                   // call simm32
    291 
    292 #  elif defined(JS_CODEGEN_ARM)
    293  const uint32_t* insn = (const uint32_t*)nextPC;
    294  return ((uintptr_t(insn) & 3) == 0) &&            // must be ARM, not Thumb
    295         (insn[-1] == 0xe7f000f0 ||                 // udf
    296          (insn[-1] & 0xfffffff0) == 0xe12fff30 ||  // blx reg (ARM, enc A1)
    297          (insn[-1] & 0x0f000000) == 0x0b000000);  // bl.cc simm24 (ARM, enc A1)
    298 
    299 #  elif defined(JS_CODEGEN_ARM64)
    300  const uint32_t hltInsn = 0xd4a00000;
    301  const uint32_t* insn = (const uint32_t*)nextPC;
    302  return ((uintptr_t(insn) & 3) == 0) &&
    303         (insn[-1] == hltInsn ||                    // hlt
    304          (insn[-1] & 0xfffffc1f) == 0xd63f0000 ||  // blr reg
    305          (insn[-1] & 0xfc000000) == 0x94000000);   // bl simm26
    306 
    307 #  elif defined(JS_CODEGEN_MIPS64)
    308  // TODO (bug 1699696): Implement this.  As for the platforms above, we need to
    309  // enumerate all code sequences that can precede the stackmap location.
    310  return true;
    311 #  elif defined(JS_CODEGEN_LOONG64)
    312  // TODO(loong64): Implement IsValidStackMapKey.
    313  return true;
    314 #  elif defined(JS_CODEGEN_RISCV64)
    315  const uint32_t* insn = reinterpret_cast<const uint32_t*>(nextPC);
    316  return (((uintptr_t(insn) & 3) == 0) &&
    317          ((insn[-1] == 0x00006037 && insn[-2] == 0x00100073) ||  // break;
    318           ((insn[-1] & kBaseOpcodeMask) == JALR) ||              // jalr
    319           ((insn[-1] & kBaseOpcodeMask) == JAL) ||               // jal
    320           ((insn[-2] & kBaseOpcodeMask) == JAL &&
    321            insn[-1] == 0x00000013 /* addi zero, zero, 0 */) ||  // jal; nop
    322           (insn[-1] == 0x00100073 &&
    323            (insn[-2] & kITypeMask) == RO_CSRRWI)));  // wasm trap
    324 #  else
    325  MOZ_CRASH("IsValidStackMapKey: requires implementation on this platform");
    326 #  endif
    327 }
    328 #endif
    329 
    330 void StackMaps::checkInvariants(const uint8_t* base) const {
    331 #ifdef DEBUG
    332  // Chech that each entry points from the stackmap structure points
    333  // to a plausible instruction.
    334  for (auto iter = codeOffsetToStackMap_.iter(); !iter.done(); iter.next()) {
    335    MOZ_ASSERT(IsPlausibleStackMapKey(base + iter.get().key()),
    336               "wasm stackmap does not reference a valid insn");
    337  }
    338 #endif
    339 }