WasmCodegenTypes.cpp (15092B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * 4 * Copyright 2021 Mozilla Foundation 5 * 6 * Licensed under the Apache License, Version 2.0 (the "License"); 7 * you may not use this file except in compliance with the License. 8 * You may obtain a copy of the License at 9 * 10 * http://www.apache.org/licenses/LICENSE-2.0 11 * 12 * Unless required by applicable law or agreed to in writing, software 13 * distributed under the License is distributed on an "AS IS" BASIS, 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 * See the License for the specific language governing permissions and 16 * limitations under the License. 17 */ 18 19 #include "wasm/WasmCodegenTypes.h" 20 21 #include "mozilla/PodOperations.h" 22 #include "wasm/WasmExprType.h" 23 #include "wasm/WasmStubs.h" 24 #include "wasm/WasmSummarizeInsn.h" 25 #include "wasm/WasmTypeDef.h" 26 #include "wasm/WasmValidate.h" 27 #include "wasm/WasmValue.h" 28 29 using mozilla::MakeEnumeratedRange; 30 using mozilla::PodZero; 31 32 using namespace js; 33 using namespace js::wasm; 34 35 ArgTypeVector::ArgTypeVector(const FuncType& funcType) 36 : args_(funcType.args()), 37 hasStackResults_(ABIResultIter::HasStackResults( 38 ResultType::Vector(funcType.results()))) {} 39 40 bool TrapSitesForKind::lookup(uint32_t trapInstructionOffset, 41 const InliningContext& inliningContext, 42 TrapSite* trapOut) const { 43 size_t lowerBound = 0; 44 size_t upperBound = pcOffsets_.length(); 45 46 size_t match; 47 if (BinarySearch(pcOffsets_, lowerBound, upperBound, trapInstructionOffset, 48 &match)) { 49 TrapSite site; 50 site.bytecodeOffset = bytecodeOffsets_[match]; 51 if (auto inlinedCallerOffsetsIndex = 52 inlinedCallerOffsetsMap_.readonlyThreadsafeLookup(match)) { 53 site.inlinedCallerOffsets = 54 inliningContext[inlinedCallerOffsetsIndex->value()]; 55 } else { 56 site.inlinedCallerOffsets = nullptr; 57 } 58 *trapOut = site; 59 return true; 60 } 61 return false; 62 } 63 64 #ifdef DEBUG 65 const char* wasm::ToString(Trap trap) { 66 switch (trap) { 67 case Trap::Unreachable: 68 return "Unreachable"; 69 case Trap::IntegerOverflow: 70 return "IntegerOverflow"; 71 case Trap::InvalidConversionToInteger: 72 return "InvalidConversionToInteger"; 73 case Trap::IntegerDivideByZero: 74 return "IntegerDivideByZero"; 75 case Trap::OutOfBounds: 76 return "OutOfBounds"; 77 case Trap::UnalignedAccess: 78 return "UnalignedAccess"; 79 case Trap::IndirectCallToNull: 80 return "IndirectCallToNull"; 81 case Trap::IndirectCallBadSig: 82 return "IndirectCallBadSig"; 83 case Trap::NullPointerDereference: 84 return "NullPointerDereference"; 85 case Trap::BadCast: 86 return "BadCast"; 87 case Trap::StackOverflow: 88 return "StackOverflow"; 89 case Trap::CheckInterrupt: 90 return "CheckInterrupt"; 91 case Trap::ThrowReported: 92 return "ThrowReported"; 93 case Trap::Limit: 94 return "Limit"; 95 default: 96 return "Unknown"; 97 } 98 } 99 100 const char* wasm::ToString(TrapMachineInsn tmi) { 101 switch (tmi) { 102 case TrapMachineInsn::OfficialUD: 103 return "OfficialUD"; 104 case TrapMachineInsn::Load8: 105 return "Load8"; 106 case TrapMachineInsn::Load16: 107 return "Load16"; 108 case TrapMachineInsn::Load32: 109 return "Load32"; 110 case TrapMachineInsn::Load64: 111 return "Load64"; 112 case TrapMachineInsn::Load128: 113 return "Load128"; 114 case TrapMachineInsn::Store8: 115 return "Store8"; 116 case TrapMachineInsn::Store16: 117 return "Store16"; 118 case TrapMachineInsn::Store32: 119 return "Store32"; 120 case TrapMachineInsn::Store64: 121 return "Store64"; 122 case TrapMachineInsn::Store128: 123 return "Store128"; 124 case TrapMachineInsn::Atomic: 125 return "Atomic"; 126 default: 127 return "Unknown"; 128 } 129 } 130 #endif // DEBUG 131 132 void TrapSitesForKind::checkInvariants(const uint8_t* codeBase) const { 133 #ifdef DEBUG 134 MOZ_ASSERT(machineInsns_.length() == pcOffsets_.length()); 135 MOZ_ASSERT(pcOffsets_.length() == bytecodeOffsets_.length()); 136 137 uint32_t last = 0; 138 for (uint32_t pcOffset : pcOffsets_) { 139 MOZ_ASSERT(pcOffset > last); 140 last = pcOffset; 141 } 142 143 # if (defined(JS_CODEGEN_X64) || defined(JS_CODEGEN_X86) || \ 144 defined(JS_CODEGEN_ARM64) || defined(JS_CODEGEN_ARM) || \ 145 defined(JS_CODEGEN_LOONG64) || defined(JS_CODEGEN_MIPS64)) 146 // Check that each trapsite is associated with a plausible instruction. The 147 // required instruction kind depends on the trapsite kind. 148 // 149 // NOTE: currently enabled on x86_{32,64}, arm{32,64}, loongson64 and mips64. 150 // Ideally it should be extended to riscv64 too. 151 // 152 for (uint32_t i = 0; i < length(); i++) { 153 uint32_t pcOffset = pcOffsets_[i]; 154 TrapMachineInsn expected = machineInsns_[i]; 155 156 const uint8_t* insnAddr = codeBase + uintptr_t(pcOffset); 157 // `expected` describes the kind of instruction we expect to see at 158 // `insnAddr`. Find out what is actually there and check it matches. 159 mozilla::Maybe<TrapMachineInsn> actual = SummarizeTrapInstruction(insnAddr); 160 bool valid = actual.isSome() && actual.value() == expected; 161 // This is useful for diagnosing validation failures. 162 // if (!valid) { 163 // fprintf(stderr, 164 // "FAIL: reason=%-22s expected=%-12s " 165 // "pcOffset=%-5u addr= %p\n", 166 // ToString(trap), ToString(expected), 167 // pcOffset, insnAddr); 168 // if (actual.isSome()) { 169 // fprintf(stderr, "FAIL: identified as %s\n", 170 // actual.isSome() ? ToString(actual.value()) 171 // : "(insn not identified)"); 172 // } 173 // } 174 MOZ_ASSERT(valid, "wasm trapsite does not reference a valid insn"); 175 } 176 177 for (auto iter = inlinedCallerOffsetsMap_.iter(); !iter.done(); iter.next()) { 178 MOZ_ASSERT(iter.get().key() < length()); 179 MOZ_ASSERT(!iter.get().value().isNone()); 180 } 181 # endif 182 #endif 183 } 184 185 CodeRange::CodeRange(Kind kind, Offsets offsets) 186 : begin_(offsets.begin), ret_(0), end_(offsets.end), kind_(kind) { 187 MOZ_ASSERT(begin_ <= end_); 188 PodZero(&u); 189 #ifdef DEBUG 190 switch (kind_) { 191 case FarJumpIsland: 192 case TrapExit: 193 case Throw: 194 break; 195 default: 196 MOZ_CRASH("should use more specific constructor"); 197 } 198 #endif 199 } 200 201 CodeRange::CodeRange(Kind kind, uint32_t funcIndex, Offsets offsets) 202 : begin_(offsets.begin), ret_(0), end_(offsets.end), kind_(kind) { 203 u.funcIndex_ = funcIndex; 204 u.func.beginToUncheckedCallEntry_ = 0; 205 u.func.beginToTierEntry_ = 0; 206 u.func.hasUnwindInfo_ = false; 207 MOZ_ASSERT(isEntry()); 208 MOZ_ASSERT(begin_ <= end_); 209 } 210 211 CodeRange::CodeRange(Kind kind, CallableOffsets offsets) 212 : begin_(offsets.begin), ret_(offsets.ret), end_(offsets.end), kind_(kind) { 213 MOZ_ASSERT(begin_ < ret_); 214 MOZ_ASSERT(ret_ < end_); 215 PodZero(&u); 216 #ifdef DEBUG 217 switch (kind_) { 218 case DebugStub: 219 case BuiltinThunk: 220 case RequestTierUpStub: 221 case UpdateCallRefMetricsStub: 222 break; 223 default: 224 MOZ_CRASH("should use more specific constructor"); 225 } 226 #endif 227 } 228 229 CodeRange::CodeRange(Kind kind, uint32_t funcIndex, CallableOffsets offsets) 230 : begin_(offsets.begin), ret_(offsets.ret), end_(offsets.end), kind_(kind) { 231 MOZ_ASSERT(isImportExit() || isJitEntry()); 232 MOZ_ASSERT(begin_ < ret_); 233 MOZ_ASSERT(ret_ < end_); 234 u.funcIndex_ = funcIndex; 235 u.func.beginToUncheckedCallEntry_ = 0; 236 u.func.beginToTierEntry_ = 0; 237 u.func.hasUnwindInfo_ = false; 238 } 239 240 CodeRange::CodeRange(Kind kind, uint32_t funcIndex, ImportOffsets offsets) 241 : begin_(offsets.begin), ret_(offsets.ret), end_(offsets.end), kind_(kind) { 242 MOZ_ASSERT(isImportJitExit()); 243 MOZ_ASSERT(begin_ < ret_); 244 MOZ_ASSERT(ret_ < end_); 245 uint32_t entry = offsets.afterFallbackCheck; 246 MOZ_ASSERT(begin_ <= entry && entry <= ret_); 247 u.funcIndex_ = funcIndex; 248 u.jitExitEntry_ = entry - begin_; 249 } 250 251 CodeRange::CodeRange(uint32_t funcIndex, FuncOffsets offsets, 252 bool hasUnwindInfo) 253 : begin_(offsets.begin), 254 ret_(offsets.ret), 255 end_(offsets.end), 256 kind_(Function) { 257 MOZ_ASSERT(begin_ < ret_); 258 MOZ_ASSERT(ret_ < end_); 259 MOZ_ASSERT(offsets.uncheckedCallEntry - begin_ <= UINT16_MAX); 260 MOZ_ASSERT(offsets.tierEntry - begin_ <= UINT16_MAX); 261 u.funcIndex_ = funcIndex; 262 u.func.beginToUncheckedCallEntry_ = offsets.uncheckedCallEntry - begin_; 263 u.func.beginToTierEntry_ = offsets.tierEntry - begin_; 264 u.func.hasUnwindInfo_ = hasUnwindInfo; 265 } 266 267 const CodeRange* wasm::LookupInSorted(const CodeRangeVector& codeRanges, 268 CodeRange::OffsetInCode target) { 269 size_t lowerBound = 0; 270 size_t upperBound = codeRanges.length(); 271 272 size_t match; 273 if (!BinarySearch(codeRanges, lowerBound, upperBound, target, &match)) { 274 return nullptr; 275 } 276 277 return &codeRanges[match]; 278 } 279 280 bool CallSites::lookup(uint32_t returnAddressOffset, 281 const InliningContext& inliningContext, 282 CallSite* callSite) const { 283 size_t lowerBound = 0; 284 size_t upperBound = returnAddressOffsets_.length(); 285 286 size_t match; 287 if (BinarySearch(returnAddressOffsets_, lowerBound, upperBound, 288 returnAddressOffset, &match)) { 289 *callSite = get(match, inliningContext); 290 return true; 291 } 292 return false; 293 } 294 295 CallIndirectId CallIndirectId::forAsmJSFunc() { 296 return CallIndirectId(CallIndirectIdKind::AsmJS); 297 } 298 299 CallIndirectId CallIndirectId::forFunc(const CodeMetadata& codeMeta, 300 uint32_t funcIndex) { 301 // asm.js tables are homogenous and don't require a signature check 302 if (codeMeta.isAsmJS()) { 303 return CallIndirectId::forAsmJSFunc(); 304 } 305 306 FuncDesc func = codeMeta.funcs[funcIndex]; 307 if (!func.canRefFunc()) { 308 return CallIndirectId(); 309 } 310 return CallIndirectId::forFuncType(codeMeta, 311 codeMeta.funcs[funcIndex].typeIndex); 312 } 313 314 CallIndirectId CallIndirectId::forFuncType(const CodeMetadata& codeMeta, 315 uint32_t funcTypeIndex) { 316 // asm.js tables are homogenous and don't require a signature check 317 if (codeMeta.isAsmJS()) { 318 return CallIndirectId::forAsmJSFunc(); 319 } 320 321 const TypeDef& typeDef = codeMeta.types->type(funcTypeIndex); 322 const FuncType& funcType = typeDef.funcType(); 323 CallIndirectId callIndirectId; 324 if (funcType.hasImmediateTypeId()) { 325 callIndirectId.kind_ = CallIndirectIdKind::Immediate; 326 callIndirectId.immediate_ = funcType.immediateTypeId(); 327 } else { 328 callIndirectId.kind_ = CallIndirectIdKind::Global; 329 callIndirectId.global_.instanceDataOffset_ = 330 codeMeta.offsetOfTypeDef(funcTypeIndex); 331 callIndirectId.global_.hasSuperType_ = typeDef.superTypeDef() != nullptr; 332 } 333 return callIndirectId; 334 } 335 336 CalleeDesc CalleeDesc::function(uint32_t funcIndex) { 337 CalleeDesc c; 338 c.which_ = Func; 339 c.u.funcIndex_ = funcIndex; 340 return c; 341 } 342 CalleeDesc CalleeDesc::import(uint32_t instanceDataOffset) { 343 CalleeDesc c; 344 c.which_ = Import; 345 c.u.import.instanceDataOffset_ = instanceDataOffset; 346 return c; 347 } 348 CalleeDesc CalleeDesc::wasmTable(const CodeMetadata& codeMeta, 349 const TableDesc& desc, uint32_t tableIndex, 350 CallIndirectId callIndirectId) { 351 CalleeDesc c; 352 c.which_ = WasmTable; 353 c.u.table.instanceDataOffset_ = 354 codeMeta.offsetOfTableInstanceData(tableIndex); 355 c.u.table.minLength_ = desc.initialLength(); 356 c.u.table.maxLength_ = desc.maximumLength(); 357 c.u.table.callIndirectId_ = callIndirectId; 358 return c; 359 } 360 CalleeDesc CalleeDesc::asmJSTable(const CodeMetadata& codeMeta, 361 uint32_t tableIndex) { 362 CalleeDesc c; 363 c.which_ = AsmJSTable; 364 c.u.table.instanceDataOffset_ = 365 codeMeta.offsetOfTableInstanceData(tableIndex); 366 return c; 367 } 368 CalleeDesc CalleeDesc::builtin(SymbolicAddress callee) { 369 CalleeDesc c; 370 c.which_ = Builtin; 371 c.u.builtin_ = callee; 372 return c; 373 } 374 CalleeDesc CalleeDesc::builtinInstanceMethod(SymbolicAddress callee) { 375 CalleeDesc c; 376 c.which_ = BuiltinInstanceMethod; 377 c.u.builtin_ = callee; 378 return c; 379 } 380 CalleeDesc CalleeDesc::wasmFuncRef() { 381 CalleeDesc c; 382 c.which_ = FuncRef; 383 return c; 384 } 385 386 void CompileStats::merge(const CompileStats& other) { 387 MOZ_ASSERT(&other != this); 388 numFuncs += other.numFuncs; 389 bytecodeSize += other.bytecodeSize; 390 inlinedDirectCallCount += other.inlinedDirectCallCount; 391 inlinedCallRefCount += other.inlinedCallRefCount; 392 inlinedDirectCallBytecodeSize += other.inlinedDirectCallBytecodeSize; 393 inlinedCallRefBytecodeSize += other.inlinedCallRefBytecodeSize; 394 numInliningBudgetOverruns += other.numInliningBudgetOverruns; 395 numLargeFunctionBackoffs += other.numLargeFunctionBackoffs; 396 } 397 398 void CompileAndLinkStats::merge(const CompileAndLinkStats& other) { 399 MOZ_ASSERT(&other != this); 400 CompileStats::merge(other); 401 codeBytesMapped += other.codeBytesMapped; 402 codeBytesUsed += other.codeBytesUsed; 403 } 404 405 void CompileAndLinkStats::print() const { 406 #ifdef JS_JITSPEW 407 // To see the statistics printed here: 408 // * configure with --enable-jitspew or --enable-debug 409 // * run with MOZ_LOG=wasmPerf:3 410 // * this works for both JS builds and full browser builds 411 JS_LOG(wasmPerf, Info, " %7zu functions compiled", numFuncs); 412 JS_LOG(wasmPerf, Info, " %7zu bytecode bytes compiled", bytecodeSize); 413 JS_LOG(wasmPerf, Info, " %7zu direct-calls inlined", 414 inlinedDirectCallCount); 415 JS_LOG(wasmPerf, Info, " %7zu call_ref-calls inlined", 416 inlinedCallRefCount); 417 JS_LOG(wasmPerf, Info, " %7zu direct-call bytecodes inlined", 418 inlinedDirectCallBytecodeSize); 419 JS_LOG(wasmPerf, Info, " %7zu call_ref-call bytecodes inlined", 420 inlinedCallRefBytecodeSize); 421 JS_LOG(wasmPerf, Info, " %7zu functions overran inlining budget", 422 numInliningBudgetOverruns); 423 JS_LOG(wasmPerf, Info, " %7zu functions needed large-function backoff", 424 numLargeFunctionBackoffs); 425 JS_LOG(wasmPerf, Info, " %7zu bytes mmap'd for code storage", 426 codeBytesMapped); 427 JS_LOG(wasmPerf, Info, " %7zu bytes actually used for code storage", 428 codeBytesUsed); 429 430 size_t inlinedTotalBytecodeSize = 431 inlinedDirectCallBytecodeSize + inlinedCallRefBytecodeSize; 432 433 // This value will be 0.0 if inlining did not cause any code expansion. A 434 // value of 1.0 means inlining doubled the total amount of bytecode, 2.0 435 // means tripled it, etc. Take care not to compute 0.0 / 0.0 as that is, 436 // confusingly, -nan. 437 float inliningExpansion = 438 inlinedTotalBytecodeSize == 0 439 ? 0.0 440 : float(inlinedTotalBytecodeSize) / float(bytecodeSize); 441 442 // This is always between 0.0 and 1.0. 443 float codeSpaceUseRatio = 444 codeBytesUsed == 0 ? 0.0 : float(codeBytesUsed) / float(codeBytesMapped); 445 446 JS_LOG(wasmPerf, Info, " %5.1f%% bytecode expansion caused by inlining", 447 inliningExpansion * 100.0); 448 JS_LOG(wasmPerf, Info, " %4.1f%% of mapped code space used", 449 codeSpaceUseRatio * 100.0); 450 #endif 451 }