WasmInstance.h (29668B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * 4 * Copyright 2016 Mozilla Foundation 5 * 6 * Licensed under the Apache License, Version 2.0 (the "License"); 7 * you may not use this file except in compliance with the License. 8 * You may obtain a copy of the License at 9 * 10 * http://www.apache.org/licenses/LICENSE-2.0 11 * 12 * Unless required by applicable law or agreed to in writing, software 13 * distributed under the License is distributed on an "AS IS" BASIS, 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 * See the License for the specific language governing permissions and 16 * limitations under the License. 17 */ 18 19 #ifndef wasm_instance_h 20 #define wasm_instance_h 21 22 #include "mozilla/Atomics.h" 23 #include "mozilla/Maybe.h" 24 25 #include <functional> 26 27 #include "gc/Barrier.h" 28 #include "js/shadow/Zone.h" // for BarrierState 29 #include "js/Stack.h" // JS::NativeStackLimit 30 #include "js/TypeDecls.h" 31 #include "vm/SharedMem.h" 32 #include "wasm/WasmExprType.h" // for ResultType 33 #include "wasm/WasmLog.h" // for PrintCallback 34 #include "wasm/WasmModuleTypes.h" 35 #include "wasm/WasmShareable.h" // for SeenSet 36 #include "wasm/WasmTypeDecls.h" 37 #include "wasm/WasmValue.h" 38 39 namespace js { 40 41 class SharedArrayRawBuffer; 42 class WasmBreakpointSite; 43 44 class WasmGcObject; 45 class WasmStructObject; 46 class WasmArrayObject; 47 48 struct AllocationMetadataBuilder; 49 50 namespace gc { 51 class StoreBuffer; 52 } // namespace gc 53 54 namespace wasm { 55 56 struct CodeTailMetadata; 57 struct FuncDefInstanceData; 58 class FuncImport; 59 struct FuncImportInstanceData; 60 struct FuncExportInstanceData; 61 struct MemoryDesc; 62 struct MemoryInstanceData; 63 class GlobalDesc; 64 struct TableDesc; 65 struct TableInstanceData; 66 struct TagDesc; 67 struct TagInstanceData; 68 struct TypeDefInstanceData; 69 struct CallRefMetrics; 70 class WasmFrameIter; 71 72 // Instance represents a wasm instance and provides all the support for runtime 73 // execution of code in the instance. Instances share various immutable data 74 // structures with the Module from which they were instantiated and other 75 // instances instantiated from the same Module. However, an Instance has no 76 // direct reference to its source Module which allows a Module to be destroyed 77 // while it still has live Instances. 78 // 79 // The instance's code may be shared among multiple instances. 80 // 81 // An Instance is also known as a 'TlsData'. They used to be separate objects, 82 // but have now been unified. Extant references to 'TlsData' will be cleaned 83 // up over time. 84 class alignas(16) Instance { 85 // NOTE: The first fields of Instance are reserved for commonly accessed data 86 // from the JIT, such that they have as small an offset as possible. See the 87 // next note for the end of this region. 88 89 // Pointer to the base of memory 0 (or null if there is no memories). This is 90 // always in sync with the MemoryInstanceData for memory 0. 91 uint8_t* memory0Base_; 92 93 // Bounds check limit in bytes for memory 0. If there is no memory 0, this 94 // value will be zero. 95 // 96 // This is 64 bits on 64-bit systems so as to allow for heap lengths up to and 97 // beyond 4GB, and 32 bits on 32-bit systems, where memories are limited to 98 // 2GB. 99 // 100 // See "Linear memory addresses and bounds checking" in WasmMemory.cpp. 101 uintptr_t memory0BoundsCheckLimit_; 102 103 // Null or a pointer to a per-module builtin stub that will invoke the Debug 104 // Trap Handler. 105 void* debugStub_; 106 107 // The containing JS::Realm. 108 JS::Realm* realm_; 109 110 // The containing JSContext. 111 JSContext* cx_; 112 113 // The pending exception that was found during stack unwinding after a throw. 114 // 115 // - Only non-null while unwinding the control stack from a wasm-exit stub. 116 // until the nearest enclosing Wasm try-catch or try-delegate block. 117 // - Set by wasm::HandleThrow, unset by Instance::consumePendingException. 118 // - If the unwind target is a `try-delegate`, it is unset by the delegated 119 // try-catch block or function body block. 120 GCPtr<AnyRef> pendingException_; 121 // The tag object of the pending exception. 122 GCPtr<AnyRef> pendingExceptionTag_; 123 124 // Set to 1 when wasm should call CheckForInterrupt. 125 mozilla::Atomic<uint32_t, mozilla::Relaxed> interrupt_; 126 127 // The address of the realm()->zone()->needsIncrementalBarrier(). This is 128 // specific to this instance and not a process wide field, and so it cannot 129 // be linked into code. 130 const JS::shadow::Zone::BarrierState* addressOfNeedsIncrementalBarrier_; 131 132 // An array of AllocSites allocated for Wasm GC operations such as struct.new, 133 // array.new, etc. 134 js::gc::AllocSite* allocSites_; 135 136 public: 137 // NOTE: All fields commonly accessed by the JIT must be above this method, 138 // and this method adapted for the last field present. This method is used 139 // to assert that we can use compact offsets on x86(-64) for these fields. 140 // We cannot have the assertion here, due to C++ 'offsetof' rules. 141 static constexpr size_t offsetOfLastCommonJitField() { 142 return offsetof(Instance, allocSites_); 143 } 144 145 // The number of baseline scratch storage words available. 146 static constexpr size_t N_BASELINE_SCRATCH_WORDS = 4; 147 148 private: 149 // When compiling with tiering, the jumpTable has one entry for each 150 // baseline-compiled function. 151 void** jumpTable_; 152 153 // 4 words of scratch storage for the baseline compiler, which can't always 154 // use the stack for this. 155 uintptr_t baselineScratchWords_[N_BASELINE_SCRATCH_WORDS]; 156 157 // The class_ of WasmValueBox, this is a per-process value. We could patch 158 // this into code, but the only use-sites are register restricted and cannot 159 // easily use a symbolic address. 160 const JSClass* valueBoxClass_; 161 162 // Address of the JitRuntime's exception handler trampoline 163 void* jsJitExceptionHandler_; 164 165 // Address of the JitRuntime's object prebarrier trampoline 166 void* preBarrierCode_; 167 168 // Address of the store buffer for this instance 169 gc::StoreBuffer* storeBuffer_; 170 171 // Weak pointer to WasmInstanceObject that owns this instance 172 WeakHeapPtr<WasmInstanceObject*> object_; 173 174 // The wasm::Code for this instance 175 const SharedCode code_; 176 177 // The tables for this instance, if any 178 const SharedTableVector tables_; 179 180 // Passive data segments for use with bulk memory instructions 181 DataSegmentVector passiveDataSegments_; 182 183 // Passive elem segments for use with tables 184 InstanceElemSegmentVector passiveElemSegments_; 185 186 // The wasm::DebugState for this instance, if any 187 const UniqueDebugState maybeDebug_; 188 189 // If debugging, this is a per-funcIndex bit table denoting whether debugging 190 // is currently enabled for the function within the instance. The flag is set 191 // if any breakpoint or function entry or exit point needs to be visited. It 192 // is OK to conservatively set this flag, but there is very significant 193 // overhead to taking a breakpoint trap, so managing it precisely is 194 // worthwhile. 195 uint32_t* debugFilter_; 196 197 // A pointer to an array of metrics for all the call_ref's in this instance. 198 // This is only used with lazy tiering for collecting speculative inlining 199 // information. 200 CallRefMetrics* callRefMetrics_; 201 202 // The exclusive maximum index of a global that has been initialized so far. 203 uint32_t maxInitializedGlobalsIndexPlus1_; 204 205 // Pointer that should be freed (due to padding before the Instance). 206 void* allocatedBase_; 207 208 // Fields from the JS context for memory allocation, stashed on the instance 209 // so it can be accessed from JIT code efficiently. 210 const void* addressOfNurseryPosition_; 211 #ifdef JS_GC_ZEAL 212 const void* addressOfGCZealModeBits_; 213 #endif 214 const js::AllocationMetadataBuilder* allocationMetadataBuilder_; 215 216 // A copy of the runtime's addressOfLastBufferedWholeCell, used for whole-cell 217 // store buffer entries. 218 const void* addressOfLastBufferedWholeCell_; 219 220 // Pointer to a per-module builtin stub that will request tier-up for the 221 // wasm function that calls it. 222 void* requestTierUpStub_ = nullptr; 223 224 // Pointer to a per-module builtin stub that does the OOL component of a 225 // call-ref metrics update. 226 void* updateCallRefMetricsStub_ = nullptr; 227 228 // The data must be the last field. Globals for the module start here 229 // and are inline in this structure. 16-byte alignment is required for SIMD 230 // data. 231 alignas(16) char data_; 232 233 // Internal helpers: 234 FuncDefInstanceData* funcDefInstanceData(uint32_t funcIndex) const; 235 TypeDefInstanceData* typeDefInstanceData(uint32_t typeIndex) const; 236 const void* addressOfGlobalCell(const GlobalDesc& globalDesc) const; 237 FuncImportInstanceData& funcImportInstanceData(uint32_t funcIndex); 238 FuncExportInstanceData& funcExportInstanceData(uint32_t funcExportIndex); 239 MemoryInstanceData& memoryInstanceData(uint32_t memoryIndex) const; 240 TableInstanceData& tableInstanceData(uint32_t tableIndex) const; 241 TagInstanceData& tagInstanceData(uint32_t tagIndex) const; 242 243 // Only WasmInstanceObject can call the private trace function. 244 friend class js::WasmInstanceObject; 245 void tracePrivate(JSTracer* trc); 246 247 bool callImport(JSContext* cx, uint32_t funcImportIndex, unsigned argc, 248 uint64_t* argv); 249 250 Instance(JSContext* cx, Handle<WasmInstanceObject*> object, 251 const SharedCode& code, SharedTableVector&& tables, 252 UniqueDebugState maybeDebug); 253 ~Instance(); 254 255 public: 256 static Instance* create(JSContext* cx, Handle<WasmInstanceObject*> object, 257 const SharedCode& code, uint32_t instanceDataLength, 258 SharedTableVector&& tables, 259 UniqueDebugState maybeDebug); 260 static void destroy(Instance* instance); 261 262 bool init(JSContext* cx, const JSObjectVector& funcImports, 263 const ValVector& globalImportValues, 264 Handle<WasmMemoryObjectVector> memories, 265 const WasmGlobalObjectVector& globalObjs, 266 const WasmTagObjectVector& tagObjs, 267 const DataSegmentVector& dataSegments, 268 const ModuleElemSegmentVector& elemSegments); 269 270 // Trace any GC roots on the stack, for the frame associated with |wfi|, 271 // whose next instruction to execute is |nextPC|. 272 // 273 // For consistency checking of StackMap sizes in debug builds, this also 274 // takes |highestByteVisitedInPrevFrame|, which is the address of the 275 // highest byte scanned in the frame below this one on the stack, and in 276 // turn it returns the address of the highest byte scanned in this frame. 277 // 278 // The method does not assert RootMarkingPhase since it can be used to trace 279 // suspended stacks. 280 uintptr_t traceFrame(JSTracer* trc, const wasm::WasmFrameIter& wfi, 281 uint8_t* nextPC, 282 uintptr_t highestByteVisitedInPrevFrame); 283 void updateFrameForMovingGC(const wasm::WasmFrameIter& wfi, uint8_t* nextPC, 284 Nursery& nursery); 285 286 static constexpr size_t offsetOfMemory0Base() { 287 return offsetof(Instance, memory0Base_); 288 } 289 static constexpr size_t offsetOfMemory0BoundsCheckLimit() { 290 return offsetof(Instance, memory0BoundsCheckLimit_); 291 } 292 static constexpr size_t offsetOfDebugStub() { 293 return offsetof(Instance, debugStub_); 294 } 295 static constexpr size_t offsetOfRequestTierUpStub() { 296 return offsetof(Instance, requestTierUpStub_); 297 } 298 static constexpr size_t offsetOfUpdateCallRefMetricsStub() { 299 return offsetof(Instance, updateCallRefMetricsStub_); 300 } 301 302 static constexpr size_t offsetOfRealm() { return offsetof(Instance, realm_); } 303 static constexpr size_t offsetOfCx() { return offsetof(Instance, cx_); } 304 static constexpr size_t offsetOfValueBoxClass() { 305 return offsetof(Instance, valueBoxClass_); 306 } 307 static constexpr size_t offsetOfPendingException() { 308 return offsetof(Instance, pendingException_); 309 } 310 static constexpr size_t offsetOfPendingExceptionTag() { 311 return offsetof(Instance, pendingExceptionTag_); 312 } 313 static constexpr size_t offsetOfInterrupt() { 314 return offsetof(Instance, interrupt_); 315 } 316 static constexpr size_t offsetOfAllocSites() { 317 return offsetof(Instance, allocSites_); 318 } 319 static constexpr size_t offsetOfAllocationMetadataBuilder() { 320 return offsetof(Instance, allocationMetadataBuilder_); 321 } 322 static constexpr size_t offsetOfAddressOfLastBufferedWholeCell() { 323 return offsetof(Instance, addressOfLastBufferedWholeCell_); 324 } 325 static constexpr size_t offsetOfAddressOfNeedsIncrementalBarrier() { 326 return offsetof(Instance, addressOfNeedsIncrementalBarrier_); 327 } 328 static constexpr size_t offsetOfJumpTable() { 329 return offsetof(Instance, jumpTable_); 330 } 331 static constexpr size_t offsetOfBaselineScratchWords() { 332 return offsetof(Instance, baselineScratchWords_); 333 } 334 static constexpr size_t sizeOfBaselineScratchWords() { 335 return sizeof(baselineScratchWords_); 336 } 337 static constexpr size_t offsetOfJSJitExceptionHandler() { 338 return offsetof(Instance, jsJitExceptionHandler_); 339 } 340 static constexpr size_t offsetOfPreBarrierCode() { 341 return offsetof(Instance, preBarrierCode_); 342 } 343 static constexpr size_t offsetOfDebugFilter() { 344 return offsetof(Instance, debugFilter_); 345 } 346 static constexpr size_t offsetOfCallRefMetrics() { 347 return offsetof(Instance, callRefMetrics_); 348 } 349 static constexpr size_t offsetOfData() { return offsetof(Instance, data_); } 350 static constexpr size_t offsetInData(size_t offset) { 351 return offsetOfData() + offset; 352 } 353 static constexpr size_t offsetOfAddressOfNurseryPosition() { 354 return offsetof(Instance, addressOfNurseryPosition_); 355 } 356 #ifdef JS_GC_ZEAL 357 static constexpr size_t offsetOfAddressOfGCZealModeBits() { 358 return offsetof(Instance, addressOfGCZealModeBits_); 359 } 360 #endif 361 362 JSContext* cx() const { return cx_; } 363 void* debugStub() const { return debugStub_; } 364 void setDebugStub(void* newStub) { debugStub_ = newStub; } 365 void setRequestTierUpStub(void* newStub) { requestTierUpStub_ = newStub; } 366 void setUpdateCallRefMetricsStub(void* newStub) { 367 updateCallRefMetricsStub_ = newStub; 368 } 369 JS::Realm* realm() const { return realm_; } 370 bool debugEnabled() const { return !!maybeDebug_; } 371 DebugState& debug() { return *maybeDebug_; } 372 uint8_t* data() const { return (uint8_t*)&data_; } 373 const SharedTableVector& tables() const { return tables_; } 374 SharedMem<uint8_t*> memoryBase(uint32_t memoryIndex) const; 375 WasmMemoryObject* memory(uint32_t memoryIndex) const; 376 size_t memoryMappedSize(uint32_t memoryIndex) const; 377 SharedArrayRawBuffer* sharedMemoryBuffer( 378 uint32_t memoryIndex) const; // never null 379 bool memoryAccessInGuardRegion(const uint8_t* addr, unsigned numBytes) const; 380 381 // Methods to set, test and clear the interrupt fields. Both interrupt 382 // fields are Relaxed and so no consistency/ordering can be assumed. 383 384 void setInterrupt(); 385 bool isInterrupted() const; 386 void resetInterrupt(); 387 388 void setAllocationMetadataBuilder( 389 const js::AllocationMetadataBuilder* allocationMetadataBuilder) { 390 allocationMetadataBuilder_ = allocationMetadataBuilder; 391 } 392 393 int32_t computeInitialHotnessCounter(uint32_t funcIndex, 394 size_t codeSectionSize); 395 void resetHotnessCounter(uint32_t funcIndex); 396 int32_t readHotnessCounter(uint32_t funcIndex) const; 397 void submitCallRefHints(uint32_t funcIndex); 398 399 bool debugFilter(uint32_t funcIndex) const; 400 void setDebugFilter(uint32_t funcIndex, bool value); 401 402 const Code& code() const { return *code_; } 403 inline const CodeMetadata& codeMeta() const; 404 inline const CodeTailMetadata& codeTailMeta() const; 405 inline const CodeMetadataForAsmJS* codeMetaForAsmJS() const; 406 inline bool isAsmJS() const; 407 408 // This method returns a pointer to the GC object that owns this Instance. 409 // Instances may be reached via weak edges (e.g., Realm::instances_) 410 // so this perform a read-barrier on the returned object unless the barrier 411 // is explicitly waived. 412 413 WasmInstanceObject* object() const; 414 WasmInstanceObject* objectUnbarriered() const; 415 416 // Get or create the exported function wrapper for a function index. 417 418 [[nodiscard]] bool getExportedFunction(JSContext* cx, uint32_t funcIndex, 419 MutableHandleFunction result); 420 421 // Execute the given export given the JS call arguments, storing the return 422 // value in args.rval. 423 424 [[nodiscard]] bool callExport(JSContext* cx, uint32_t funcIndex, 425 const CallArgs& args, 426 CoercionLevel level = CoercionLevel::Spec); 427 428 // Exception handling support 429 430 void setPendingException(Handle<WasmExceptionObject*> exn); 431 432 // Constant expression support 433 434 void constantGlobalGet(uint32_t globalIndex, MutableHandleVal result); 435 WasmStructObject* constantStructNewDefault(JSContext* cx, uint32_t typeIndex); 436 WasmArrayObject* constantArrayNewDefault(JSContext* cx, uint32_t typeIndex, 437 uint32_t numElements); 438 439 // Return the name associated with a given function index, or generate one 440 // if none was given by the module. 441 442 JSAtom* getFuncDisplayAtom(JSContext* cx, uint32_t funcIndex) const; 443 void ensureProfilingLabels(bool profilingEnabled) const; 444 445 // Called by Wasm(Memory|Table)Object when a moving resize occurs: 446 447 void onMovingGrowMemory(const WasmMemoryObject* memory); 448 void onMovingGrowTable(const Table* table); 449 450 bool initSegments(JSContext* cx, const DataSegmentVector& dataSegments, 451 const ModuleElemSegmentVector& elemSegments); 452 453 // Called to apply a single ElemSegment at a given offset, assuming 454 // that all bounds validation has already been performed. 455 [[nodiscard]] bool initElems(JSContext* cx, uint32_t tableIndex, 456 const ModuleElemSegment& seg, 457 uint32_t dstOffset); 458 459 // Iterates through elements of a ModuleElemSegment containing functions. 460 // Unlike iterElemsAnyrefs, this method can get function data (instance and 461 // code pointers) without creating intermediate JSFunctions. 462 // 463 // NOTE: This method only works for element segments that use the index 464 // encoding. If the expression encoding is used, you must use 465 // iterElemsAnyrefs. 466 // 467 // Signature for onFunc: 468 // 469 // (uint32_t index, void* code, Instance* instance) -> bool 470 // 471 template <typename F> 472 [[nodiscard]] bool iterElemsFunctions(const ModuleElemSegment& seg, 473 const F& onFunc); 474 475 // Iterates through elements of a ModuleElemSegment. This method works for any 476 // type of wasm ref and both element segment encodings. As required by AnyRef, 477 // any functions will be wrapped in JSFunction - if possible, you should use 478 // iterElemsFunctions to avoid this. 479 // 480 // Signature for onAnyRef: 481 // 482 // (uint32_t index, AnyRef ref) -> bool 483 // 484 template <typename F> 485 [[nodiscard]] bool iterElemsAnyrefs(JSContext* cx, 486 const ModuleElemSegment& seg, 487 const F& onAnyRef); 488 489 // Debugger support: 490 491 JSString* createDisplayURL(JSContext* cx); 492 WasmBreakpointSite* getOrCreateBreakpointSite(JSContext* cx, uint32_t offset); 493 void destroyBreakpointSite(JS::GCContext* gcx, uint32_t offset); 494 495 // about:memory reporting: 496 497 void addSizeOfMisc(mozilla::MallocSizeOf mallocSizeOf, 498 SeenSet<CodeMetadata>* seenCodeMeta, 499 SeenSet<CodeMetadataForAsmJS>* seenCodeMetaForAsmJS, 500 SeenSet<Code>* seenCode, SeenSet<Table>* seenTables, 501 size_t* code, size_t* data) const; 502 503 // Wasm disassembly support 504 505 void disassembleExport(JSContext* cx, uint32_t funcIndex, Tier tier, 506 PrintCallback printString) const; 507 508 public: 509 // Functions to be called directly from wasm code. 510 static int32_t callImport_general(Instance*, int32_t, int32_t, uint64_t*); 511 static uint32_t memoryGrow_m32(Instance* instance, uint32_t delta, 512 uint32_t memoryIndex); 513 static uint64_t memoryGrow_m64(Instance* instance, uint64_t delta, 514 uint32_t memoryIndex); 515 static uint32_t memorySize_m32(Instance* instance, uint32_t memoryIndex); 516 static uint64_t memorySize_m64(Instance* instance, uint32_t memoryIndex); 517 static int32_t memCopy_m32(Instance* instance, uint32_t dstByteOffset, 518 uint32_t srcByteOffset, uint32_t len, 519 uint8_t* memBase); 520 static int32_t memCopyShared_m32(Instance* instance, uint32_t dstByteOffset, 521 uint32_t srcByteOffset, uint32_t len, 522 uint8_t* memBase); 523 static int32_t memCopy_m64(Instance* instance, uint64_t dstByteOffset, 524 uint64_t srcByteOffset, uint64_t len, 525 uint8_t* memBase); 526 static int32_t memCopyShared_m64(Instance* instance, uint64_t dstByteOffset, 527 uint64_t srcByteOffset, uint64_t len, 528 uint8_t* memBase); 529 static int32_t memCopy_any(Instance* instance, uint64_t dstByteOffset, 530 uint64_t srcByteOffset, uint64_t len, 531 uint32_t dstMemIndex, uint32_t srcMemIndex); 532 533 static int32_t memFill_m32(Instance* instance, uint32_t byteOffset, 534 uint32_t value, uint32_t len, uint8_t* memBase); 535 static int32_t memFillShared_m32(Instance* instance, uint32_t byteOffset, 536 uint32_t value, uint32_t len, 537 uint8_t* memBase); 538 static int32_t memFill_m64(Instance* instance, uint64_t byteOffset, 539 uint32_t value, uint64_t len, uint8_t* memBase); 540 static int32_t memFillShared_m64(Instance* instance, uint64_t byteOffset, 541 uint32_t value, uint64_t len, 542 uint8_t* memBase); 543 static int32_t memInit_m32(Instance* instance, uint32_t dstOffset, 544 uint32_t srcOffset, uint32_t len, 545 uint32_t segIndex, uint32_t memIndex); 546 static int32_t memInit_m64(Instance* instance, uint64_t dstOffset, 547 uint32_t srcOffset, uint32_t len, 548 uint32_t segIndex, uint32_t memIndex); 549 static int32_t dataDrop(Instance* instance, uint32_t segIndex); 550 static int32_t tableCopy(Instance* instance, uint32_t dstOffset, 551 uint32_t srcOffset, uint32_t len, 552 uint32_t dstTableIndex, uint32_t srcTableIndex); 553 static int32_t tableFill(Instance* instance, uint32_t start, void* value, 554 uint32_t len, uint32_t tableIndex); 555 static int32_t memDiscard_m32(Instance* instance, uint32_t byteOffset, 556 uint32_t byteLen, uint8_t* memBase); 557 static int32_t memDiscardShared_m32(Instance* instance, uint32_t byteOffset, 558 uint32_t byteLen, uint8_t* memBase); 559 static int32_t memDiscard_m64(Instance* instance, uint64_t byteOffset, 560 uint64_t byteLen, uint8_t* memBase); 561 static int32_t memDiscardShared_m64(Instance* instance, uint64_t byteOffset, 562 uint64_t byteLen, uint8_t* memBase); 563 static void* tableGet(Instance* instance, uint32_t address, 564 uint32_t tableIndex); 565 static uint32_t tableGrow(Instance* instance, void* initValue, uint32_t delta, 566 uint32_t tableIndex); 567 static int32_t tableSet(Instance* instance, uint32_t address, void* value, 568 uint32_t tableIndex); 569 static uint32_t tableSize(Instance* instance, uint32_t tableIndex); 570 static int32_t tableInit(Instance* instance, uint32_t dstOffset, 571 uint32_t srcOffset, uint32_t len, uint32_t segIndex, 572 uint32_t tableIndex); 573 static int32_t elemDrop(Instance* instance, uint32_t segIndex); 574 static int32_t wait_i32_m32(Instance* instance, uint32_t byteOffset, 575 int32_t value, int64_t timeout, 576 uint32_t memoryIndex); 577 static int32_t wait_i32_m64(Instance* instance, uint64_t byteOffset, 578 int32_t value, int64_t timeout, 579 uint32_t memoryIndex); 580 static int32_t wait_i64_m32(Instance* instance, uint32_t byteOffset, 581 int64_t value, int64_t timeout, 582 uint32_t memoryIndex); 583 static int32_t wait_i64_m64(Instance* instance, uint64_t byteOffset, 584 int64_t value, int64_t timeout, 585 uint32_t memoryIndex); 586 static int32_t wake_m32(Instance* instance, uint32_t byteOffset, 587 int32_t count, uint32_t memoryIndex); 588 static int32_t wake_m64(Instance* instance, uint64_t byteOffset, 589 int32_t count, uint32_t memoryIndex); 590 static void* refFunc(Instance* instance, uint32_t funcIndex); 591 static void postBarrierEdge(Instance* instance, AnyRef* location); 592 static void postBarrierEdgePrecise(Instance* instance, AnyRef* location, 593 void* prev); 594 static void postBarrierWholeCell(Instance* instance, gc::Cell* object); 595 static void* exceptionNew(Instance* instance, void* exceptionArg); 596 static int32_t throwException(Instance* instance, void* exceptionArg); 597 template <bool ZeroFields> 598 static void* structNewIL(Instance* instance, uint32_t typeDefIndex, 599 gc::AllocSite* allocSite); 600 template <bool ZeroFields> 601 static void* structNewOOL(Instance* instance, uint32_t typeDefIndex, 602 gc::AllocSite* allocSite); 603 template <bool ZeroFields> 604 static void* arrayNew(Instance* instance, uint32_t numElements, 605 uint32_t typeDefIndex, gc::AllocSite* allocSite); 606 static void* arrayNewData(Instance* instance, uint32_t segByteOffset, 607 uint32_t numElements, uint32_t typeDefIndex, 608 gc::AllocSite* allocSite, uint32_t segIndex); 609 static void* arrayNewElem(Instance* instance, uint32_t srcOffset, 610 uint32_t numElements, uint32_t typeDefIndex, 611 gc::AllocSite* allocSite, uint32_t segIndex); 612 static int32_t arrayInitData(Instance* instance, void* array, uint32_t index, 613 uint32_t segByteOffset, uint32_t numElements, 614 uint32_t segIndex); 615 static int32_t arrayInitElem(Instance* instance, void* array, uint32_t index, 616 uint32_t segOffset, uint32_t numElements, 617 uint32_t typeDefIndex, uint32_t segIndex); 618 static int32_t arrayCopy(Instance* instance, void* dstArray, 619 uint32_t dstIndex, void* srcArray, uint32_t srcIndex, 620 uint32_t numElements, uint32_t elementSize); 621 static int32_t refTest(Instance* instance, void* refPtr, 622 const wasm::TypeDef* typeDef); 623 static int32_t intrI8VecMul(Instance* instance, uint32_t dest, uint32_t src1, 624 uint32_t src2, uint32_t len, uint8_t* memBase); 625 626 static int32_t stringTest(Instance* instance, void* stringArg); 627 static void* stringCast(Instance* instance, void* stringArg); 628 static void* stringFromCharCodeArray(Instance* instance, void* arrayArg, 629 uint32_t arrayStart, uint32_t arrayEnd); 630 static int32_t stringIntoCharCodeArray(Instance* instance, void* stringArg, 631 void* arrayArg, uint32_t arrayStart); 632 static void* stringFromCharCode(Instance* instance, uint32_t charCode); 633 static void* stringFromCodePoint(Instance* instance, uint32_t codePoint); 634 static int32_t stringCharCodeAt(Instance* instance, void* stringArg, 635 uint32_t index); 636 static int32_t stringCodePointAt(Instance* instance, void* stringArg, 637 uint32_t index); 638 static int32_t stringLength(Instance* instance, void* stringArg); 639 static void* stringConcat(Instance* instance, void* firstStringArg, 640 void* secondStringArg); 641 static void* stringSubstring(Instance* instance, void* stringArg, 642 uint32_t startIndex, uint32_t endIndex); 643 static int32_t stringEquals(Instance* instance, void* firstStringArg, 644 void* secondStringArg); 645 static int32_t stringCompare(Instance* instance, void* firstStringArg, 646 void* secondStringArg); 647 }; 648 649 bool ResultsToJSValue(JSContext* cx, ResultType type, void* registerResultLoc, 650 mozilla::Maybe<char*> stackResultsLoc, 651 MutableHandleValue rval, 652 CoercionLevel level = CoercionLevel::Spec); 653 654 // Report an error to `cx` and mark it as a 'trap' so that it cannot be caught 655 // by wasm exception handlers. 656 void ReportTrapError(JSContext* cx, unsigned errorNumber); 657 658 // Mark an already reported error as a 'trap' so that it cannot be caught by 659 // wasm exception handlers. 660 void MarkPendingExceptionAsTrap(JSContext* cx); 661 662 // Instance is not a GC thing itself but contains GC thing pointers. Ensure they 663 // are traced appropriately. 664 void TraceInstanceEdge(JSTracer* trc, Instance* instance, const char* name); 665 666 } // namespace wasm 667 } // namespace js 668 669 #endif // wasm_instance_h