WasmModuleTypes.h (29251B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * 4 * Copyright 2021 Mozilla Foundation 5 * 6 * Licensed under the Apache License, Version 2.0 (the "License"); 7 * you may not use this file except in compliance with the License. 8 * You may obtain a copy of the License at 9 * 10 * http://www.apache.org/licenses/LICENSE-2.0 11 * 12 * Unless required by applicable law or agreed to in writing, software 13 * distributed under the License is distributed on an "AS IS" BASIS, 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 * See the License for the specific language governing permissions and 16 * limitations under the License. 17 */ 18 19 #ifndef wasm_module_types_h 20 #define wasm_module_types_h 21 22 #include "mozilla/RefPtr.h" 23 #include "mozilla/Span.h" 24 25 #include "js/AllocPolicy.h" 26 #include "js/HashTable.h" 27 #include "js/RefCounted.h" 28 #include "js/Utility.h" 29 #include "js/Vector.h" 30 31 #include "wasm/WasmCompileArgs.h" 32 #include "wasm/WasmConstants.h" 33 #include "wasm/WasmExprType.h" 34 #include "wasm/WasmInitExpr.h" 35 #include "wasm/WasmMemory.h" 36 #include "wasm/WasmSerialize.h" 37 #include "wasm/WasmShareable.h" 38 #include "wasm/WasmTypeDecls.h" 39 #include "wasm/WasmValType.h" 40 #include "wasm/WasmValue.h" 41 42 namespace js { 43 namespace wasm { 44 45 class FuncType; 46 47 // A Module can either be asm.js or wasm. 48 49 enum ModuleKind { Wasm, AsmJS }; 50 51 // CacheableChars is used to cacheably store UniqueChars. 52 53 struct CacheableChars : UniqueChars { 54 CacheableChars() = default; 55 explicit CacheableChars(char* ptr) : UniqueChars(ptr) {} 56 MOZ_IMPLICIT CacheableChars(UniqueChars&& rhs) 57 : UniqueChars(std::move(rhs)) {} 58 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; 59 }; 60 61 using CacheableCharsVector = Vector<CacheableChars, 0, SystemAllocPolicy>; 62 63 // CacheableName is used to cacheably store a UTF-8 string that may contain 64 // null terminators in sequence. 65 66 struct CacheableName { 67 private: 68 UTF8Bytes bytes_; 69 70 const char* begin() const { return (const char*)bytes_.begin(); } 71 size_t length() const { return bytes_.length(); } 72 73 public: 74 CacheableName() = default; 75 MOZ_IMPLICIT CacheableName(UTF8Bytes&& rhs) : bytes_(std::move(rhs)) {} 76 77 bool isEmpty() const { return bytes_.length() == 0; } 78 79 mozilla::Span<char> utf8Bytes() { return mozilla::Span<char>(bytes_); } 80 mozilla::Span<const char> utf8Bytes() const { 81 return mozilla::Span<const char>(bytes_); 82 } 83 84 [[nodiscard]] bool clone(CacheableName* name) const { 85 UTF8Bytes bytesCopy; 86 if (!bytesCopy.appendAll(bytes_)) { 87 return false; 88 } 89 *name = CacheableName(std::move(bytesCopy)); 90 return true; 91 } 92 93 static CacheableName fromUTF8Chars(UniqueChars&& utf8Chars); 94 [[nodiscard]] static bool fromUTF8Chars(const char* utf8Chars, 95 CacheableName* name); 96 97 [[nodiscard]] JSString* toJSString(JSContext* cx) const; 98 [[nodiscard]] JSAtom* toAtom(JSContext* cx) const; 99 [[nodiscard]] bool toPropertyKey(JSContext* cx, 100 MutableHandleId propertyKey) const; 101 [[nodiscard]] UniqueChars toQuotedString(JSContext* cx) const; 102 103 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; 104 WASM_DECLARE_FRIEND_SERIALIZE(CacheableName); 105 }; 106 107 using CacheableNameVector = Vector<CacheableName, 0, SystemAllocPolicy>; 108 109 // A hash policy for names. 110 struct NameHasher { 111 using Key = mozilla::Span<const char>; 112 using Lookup = mozilla::Span<const char>; 113 114 static HashNumber hash(const Lookup& aLookup) { 115 return mozilla::HashString(aLookup.data(), aLookup.Length()); 116 } 117 118 static bool match(const Key& aKey, const Lookup& aLookup) { 119 return aKey == aLookup; 120 } 121 }; 122 123 // Import describes a single wasm import. An ImportVector describes all 124 // of a single module's imports. 125 // 126 // ImportVector is built incrementally by ModuleGenerator and then stored 127 // immutably by Module. 128 129 struct Import { 130 CacheableName module; 131 CacheableName field; 132 DefinitionKind kind; 133 134 Import() = default; 135 Import(CacheableName&& module, CacheableName&& field, DefinitionKind kind) 136 : module(std::move(module)), field(std::move(field)), kind(kind) {} 137 138 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; 139 }; 140 141 using ImportVector = Vector<Import, 0, SystemAllocPolicy>; 142 143 // Export describes the export of a definition in a Module to a field in the 144 // export object. The Export stores the index of the exported item in the 145 // appropriate type-specific module data structure (function table, global 146 // table, table table, and - eventually - memory table). 147 // 148 // Note a single definition can be exported by multiple Exports in the 149 // ExportVector. 150 // 151 // ExportVector is built incrementally by ModuleGenerator and then stored 152 // immutably by Module. 153 154 class Export { 155 public: 156 struct CacheablePod { 157 DefinitionKind kind_; 158 uint32_t index_; 159 160 WASM_CHECK_CACHEABLE_POD(kind_, index_); 161 }; 162 163 private: 164 CacheableName fieldName_; 165 CacheablePod pod; 166 167 public: 168 Export() = default; 169 explicit Export(CacheableName&& fieldName, uint32_t index, 170 DefinitionKind kind); 171 172 const CacheableName& fieldName() const { return fieldName_; } 173 174 DefinitionKind kind() const { return pod.kind_; } 175 uint32_t funcIndex() const; 176 uint32_t tagIndex() const; 177 uint32_t memoryIndex() const; 178 uint32_t globalIndex() const; 179 uint32_t tableIndex() const; 180 181 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; 182 WASM_DECLARE_FRIEND_SERIALIZE(Export); 183 }; 184 185 WASM_DECLARE_CACHEABLE_POD(Export::CacheablePod); 186 187 using ExportVector = Vector<Export, 0, SystemAllocPolicy>; 188 189 // FuncFlags provides metadata for a function definition. 190 191 enum class FuncFlags : uint8_t { 192 None = 0x0, 193 // The function maybe be accessible by JS and needs thunks generated for it. 194 // See `[SMDOC] Exported wasm functions and the jit-entry stubs` in 195 // WasmJS.cpp for more information. 196 Exported = 0x1, 197 // The function should have thunks generated upon instantiation, not upon 198 // first call. May only be set if `Exported` is set. 199 Eager = 0x2, 200 // The function can be the target of a ref.func instruction in the code 201 // section. May only be set if `Exported` is set. 202 CanRefFunc = 0x4, 203 }; 204 205 // A FuncDesc describes a single function definition. 206 207 struct FuncDesc { 208 // Bit pack to keep this struct small on 32-bit systems 209 uint32_t typeIndex : 24; 210 FuncFlags flags : 8; 211 212 WASM_CHECK_CACHEABLE_POD(typeIndex, flags); 213 214 // Assert that the bit packing scheme is viable 215 static_assert(MaxTypes <= (1 << 24) - 1); 216 static_assert(sizeof(FuncFlags) == sizeof(uint8_t)); 217 218 FuncDesc() = default; 219 explicit FuncDesc(uint32_t typeIndex) 220 : typeIndex(typeIndex), flags(FuncFlags::None) {} 221 222 void declareFuncExported(bool eager, bool canRefFunc) { 223 // Set the `Exported` flag, if not set. 224 flags = FuncFlags(uint8_t(flags) | uint8_t(FuncFlags::Exported)); 225 226 // Merge in the `Eager` and `CanRefFunc` flags, if they're set. Be sure 227 // to not unset them if they've already been set. 228 if (eager) { 229 flags = FuncFlags(uint8_t(flags) | uint8_t(FuncFlags::Eager)); 230 } 231 if (canRefFunc) { 232 flags = FuncFlags(uint8_t(flags) | uint8_t(FuncFlags::CanRefFunc)); 233 } 234 } 235 236 bool isExported() const { 237 return uint8_t(flags) & uint8_t(FuncFlags::Exported); 238 } 239 bool isEager() const { return uint8_t(flags) & uint8_t(FuncFlags::Eager); } 240 bool canRefFunc() const { 241 return uint8_t(flags) & uint8_t(FuncFlags::CanRefFunc); 242 } 243 }; 244 245 WASM_DECLARE_CACHEABLE_POD(FuncDesc); 246 247 using FuncDescVector = Vector<FuncDesc, 0, SystemAllocPolicy>; 248 249 struct CallRefMetricsRange { 250 explicit CallRefMetricsRange() {} 251 explicit CallRefMetricsRange(uint32_t begin, uint32_t length) 252 : begin(begin), length(length) {} 253 254 uint32_t begin = 0; 255 uint32_t length = 0; 256 257 void offsetBy(uint32_t offset) { begin += offset; } 258 259 WASM_CHECK_CACHEABLE_POD(begin, length); 260 }; 261 262 struct AllocSitesRange { 263 explicit AllocSitesRange() {} 264 explicit AllocSitesRange(uint32_t begin, uint32_t length) 265 : begin(begin), length(length) {} 266 267 uint32_t begin = 0; 268 uint32_t length = 0; 269 270 void offsetBy(uint32_t offset) { begin += offset; } 271 272 WASM_CHECK_CACHEABLE_POD(begin, length); 273 }; 274 275 // A compact plain data summary of CallRefMetrics for use by our function 276 // compilers. See CallRefMetrics in WasmInstanceData.h for more information. 277 // 278 // We cannot allow the metrics collected by an instance to directly be read 279 // from our function compilers because they contain thread-local data and are 280 // written into without any synchronization. 281 // 282 // Instead, CodeMetadata contains an array of CallRefHint that every instance 283 // writes into when it has a function that requests a tier-up. This array is 284 // 1:1 with the non-threadsafe CallRefMetrics that is stored on the instance. 285 // 286 // This class must be thread safe, as it's read and written from different 287 // threads. It is an array of up to 3 function indices, and the entire array 288 // can be read/written atomically. Each function index is represented in 20 289 // bits, and 2 of the remaining 4 bits are used to indicate the array's current 290 // size. 291 // 292 // Although unstated and unenforced here, it is expected that -- in the case 293 // where more than one function index is stored -- the func index at `.get(0)` 294 // is the "most important" in terms of inlining, that at `.get(1)` is the 295 // second most important, etc. 296 // 297 // Note that the fact that this array has 3 elements is unrelated to the value 298 // of CallRefMetrics::NUM_TRACKED. The target-collection mechanism will work 299 // properly even if CallRefMetrics::NUM_TRACKED is greater than 3, in which 300 // case at most only 3 targets (probably the hottest ones) will get baked into 301 // the CallRefHint. 302 class CallRefHint { 303 public: 304 using Repr = uint64_t; 305 static constexpr size_t NUM_ENTRIES = 3; 306 307 private: 308 // Representation is: 309 // 310 // 63 61 42 41 22 21 2 1 0 311 // | | | | | | | | | 312 // 00 index#2 index#1 index#0 length 313 static constexpr uint32_t ElemBits = 20; 314 static constexpr uint32_t LengthBits = 2; 315 static constexpr uint64_t Mask = (uint64_t(1) << ElemBits) - 1; 316 static_assert(js::wasm::MaxFuncs <= Mask); 317 static_assert(3 * ElemBits + LengthBits <= 8 * sizeof(Repr)); 318 319 Repr state_ = 0; 320 321 bool valid() const { 322 // Shift out the length field and all of the entries that the length field 323 // implies are occupied. What remains should be all zeroes. 324 return (state_ >> (length() * ElemBits + LengthBits)) == 0; 325 } 326 327 public: 328 // We omit the obvious single-argument constructor that takes a `Repr`, 329 // because that is too easily confused with one that takes a function index, 330 // and in any case it is not necessary. 331 332 uint32_t length() const { return state_ & 3; } 333 bool empty() const { return length() == 0; } 334 bool full() const { return length() == 3; } 335 336 uint32_t get(uint32_t index) const { 337 MOZ_ASSERT(index < length()); 338 uint64_t res = (state_ >> (index * ElemBits + LengthBits)) & Mask; 339 return uint32_t(res); 340 } 341 void set(uint32_t index, uint32_t funcIndex) { 342 MOZ_ASSERT(index < length()); 343 MOZ_ASSERT(funcIndex <= Mask); 344 uint32_t shift = index * ElemBits + LengthBits; 345 uint64_t c = uint64_t(Mask) << shift; 346 uint64_t s = uint64_t(funcIndex) << shift; 347 state_ = (state_ & ~c) | s; 348 } 349 350 void append(uint32_t funcIndex) { 351 MOZ_RELEASE_ASSERT(!full()); 352 // We know the lowest two bits of `state_` are not 0b11, so we can 353 // increment the length field by incrementing `state_` as a whole. 354 state_++; 355 set(length() - 1, funcIndex); 356 } 357 358 static CallRefHint fromRepr(Repr repr) { 359 CallRefHint res; 360 res.state_ = repr; 361 MOZ_ASSERT(res.valid()); 362 return res; 363 } 364 Repr toRepr() const { return state_; } 365 }; 366 367 static_assert(sizeof(CallRefHint) == sizeof(CallRefHint::Repr)); 368 369 using MutableCallRefHint = mozilla::Atomic<CallRefHint::Repr>; 370 using MutableCallRefHints = 371 mozilla::UniquePtr<MutableCallRefHint[], JS::FreePolicy>; 372 373 WASM_DECLARE_CACHEABLE_POD(CallRefMetricsRange); 374 375 using CallRefMetricsRangeVector = 376 Vector<CallRefMetricsRange, 0, SystemAllocPolicy>; 377 378 WASM_DECLARE_CACHEABLE_POD(AllocSitesRange); 379 380 using AllocSitesRangeVector = Vector<AllocSitesRange, 0, SystemAllocPolicy>; 381 382 enum class BranchHint : uint8_t { Unlikely = 0, Likely = 1, Invalid = 2 }; 383 384 // Stores pairs of <BranchOffset, BranchHint> 385 struct BranchHintEntry { 386 uint32_t branchOffset; 387 BranchHint value; 388 389 BranchHintEntry() = default; 390 BranchHintEntry(uint32_t branchOffset, BranchHint value) 391 : branchOffset(branchOffset), value(value) {} 392 }; 393 394 // Branch hint sorted vector for a function, 395 // stores tuples of <BranchOffset, BranchHint> 396 using BranchHintVector = Vector<BranchHintEntry, 0, SystemAllocPolicy>; 397 using BranchHintFuncMap = HashMap<uint32_t, BranchHintVector, 398 DefaultHasher<uint32_t>, SystemAllocPolicy>; 399 400 struct BranchHintCollection { 401 private: 402 // Used for lookups into the collection if a function 403 // doesn't contain any hints. 404 static BranchHintVector invalidVector_; 405 406 // Map from function index to their collection of branch hints 407 BranchHintFuncMap branchHintsMap_; 408 // Whether the module had branch hints, but we failed to parse them. This 409 // is not semantically visible to user code, but used for internal testing. 410 bool failedParse_ = false; 411 412 public: 413 // Add all the branch hints for a function 414 [[nodiscard]] bool addHintsForFunc(uint32_t functionIndex, 415 BranchHintVector&& branchHints) { 416 return branchHintsMap_.put(functionIndex, std::move(branchHints)); 417 } 418 419 // Return the vector with branch hints for a funcIndex. 420 // If this function doesn't contain any hints, return an empty vector. 421 BranchHintVector& getHintVector(uint32_t funcIndex) const { 422 if (auto hintsVector = 423 branchHintsMap_.readonlyThreadsafeLookup(funcIndex)) { 424 return hintsVector->value(); 425 } 426 427 // If not found, return the empty invalid Vector 428 return invalidVector_; 429 } 430 431 bool isEmpty() const { return branchHintsMap_.empty(); } 432 433 void setFailedAndClear() { 434 failedParse_ = true; 435 branchHintsMap_.clearAndCompact(); 436 } 437 bool failedParse() const { return failedParse_; } 438 }; 439 440 enum class GlobalKind { Import, Constant, Variable }; 441 442 // A GlobalDesc describes a single global variable. 443 // 444 // wasm can import and export mutable and immutable globals. 445 // 446 // asm.js can import mutable and immutable globals, but a mutable global has a 447 // location that is private to the module, and its initial value is copied into 448 // that cell from the environment. asm.js cannot export globals. 449 class GlobalDesc { 450 GlobalKind kind_; 451 // Stores the value type of this global for all kinds, and the initializer 452 // expression when `constant` or `variable`. 453 InitExpr initial_; 454 // Metadata for the global when `variable` or `import`. 455 unsigned offset_; 456 bool isMutable_; 457 bool isWasm_; 458 bool isExport_; 459 // Metadata for the global when `import`. 460 uint32_t importIndex_; 461 462 // Private, as they have unusual semantics. 463 464 bool isExport() const { return !isConstant() && isExport_; } 465 bool isWasm() const { return !isConstant() && isWasm_; } 466 467 public: 468 GlobalDesc() = default; 469 470 explicit GlobalDesc(InitExpr&& initial, bool isMutable, 471 ModuleKind kind = ModuleKind::Wasm) 472 : kind_((!isMutable && initial.isLiteral()) ? GlobalKind::Constant 473 : GlobalKind::Variable) { 474 initial_ = std::move(initial); 475 if (isVariable()) { 476 isMutable_ = isMutable; 477 isWasm_ = kind == Wasm; 478 isExport_ = false; 479 offset_ = UINT32_MAX; 480 } 481 } 482 483 explicit GlobalDesc(ValType type, bool isMutable, uint32_t importIndex, 484 ModuleKind kind = ModuleKind::Wasm) 485 : kind_(GlobalKind::Import) { 486 initial_ = InitExpr(LitVal(type)); 487 importIndex_ = importIndex; 488 isMutable_ = isMutable; 489 isWasm_ = kind == Wasm; 490 isExport_ = false; 491 offset_ = UINT32_MAX; 492 } 493 494 void setOffset(unsigned offset) { 495 MOZ_ASSERT(!isConstant()); 496 MOZ_ASSERT(offset_ == UINT32_MAX); 497 offset_ = offset; 498 } 499 unsigned offset() const { 500 MOZ_ASSERT(!isConstant()); 501 MOZ_ASSERT(offset_ != UINT32_MAX); 502 return offset_; 503 } 504 505 void setIsExport() { 506 if (!isConstant()) { 507 isExport_ = true; 508 } 509 } 510 511 GlobalKind kind() const { return kind_; } 512 bool isVariable() const { return kind_ == GlobalKind::Variable; } 513 bool isConstant() const { return kind_ == GlobalKind::Constant; } 514 bool isImport() const { return kind_ == GlobalKind::Import; } 515 516 bool isMutable() const { return !isConstant() && isMutable_; } 517 const InitExpr& initExpr() const { 518 MOZ_ASSERT(!isImport()); 519 return initial_; 520 } 521 uint32_t importIndex() const { 522 MOZ_ASSERT(isImport()); 523 return importIndex_; 524 } 525 526 LitVal constantValue() const { return initial_.literal(); } 527 528 // If isIndirect() is true then storage for the value is not in the 529 // instance's global area, but in a WasmGlobalObject::Cell hanging off a 530 // WasmGlobalObject; the global area contains a pointer to the Cell. 531 // 532 // We don't want to indirect unless we must, so only mutable, exposed 533 // globals are indirected - in all other cases we copy values into and out 534 // of their module. 535 // 536 // Note that isIndirect() isn't equivalent to getting a WasmGlobalObject: 537 // an immutable exported global will still get an object, but will not be 538 // indirect. 539 bool isIndirect() const { 540 return isMutable() && isWasm() && (isImport() || isExport()); 541 } 542 543 ValType type() const { return initial_.type(); } 544 545 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; 546 WASM_DECLARE_FRIEND_SERIALIZE(GlobalDesc); 547 }; 548 549 using GlobalDescVector = Vector<GlobalDesc, 0, SystemAllocPolicy>; 550 551 // A TagDesc represents fresh per-instance tags that are used for the 552 // exception handling proposal and potentially other future proposals. 553 554 // The TagOffsetVector represents the offsets in the layout of the 555 // data buffer stored in a Wasm exception. 556 using TagOffsetVector = Vector<uint32_t, 2, SystemAllocPolicy>; 557 558 class TagType : public AtomicRefCounted<TagType> { 559 SharedTypeDef type_; 560 TagOffsetVector argOffsets_; 561 uint32_t size_; 562 563 public: 564 TagType() : size_(0) {} 565 566 [[nodiscard]] bool initialize(const SharedTypeDef& funcType); 567 568 const TypeDef& type() const { return *type_; } 569 const ValTypeVector& argTypes() const { return type_->funcType().args(); } 570 const TagOffsetVector& argOffsets() const { return argOffsets_; } 571 ResultType resultType() const { return ResultType::Vector(argTypes()); } 572 573 uint32_t tagSize() const { return size_; } 574 575 static bool matches(const TagType& a, const TagType& b) { 576 // Note that this does NOT use subtyping. This is deliberate per the spec. 577 return a.type_ == b.type_; 578 } 579 580 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; 581 WASM_DECLARE_FRIEND_SERIALIZE(TagType); 582 }; 583 584 using MutableTagType = RefPtr<TagType>; 585 using SharedTagType = RefPtr<const TagType>; 586 587 struct TagDesc { 588 TagKind kind; 589 SharedTagType type; 590 bool isExport; 591 592 TagDesc() : isExport(false) {} 593 TagDesc(TagKind kind, const SharedTagType& type, bool isExport = false) 594 : kind(kind), type(type), isExport(isExport) {} 595 596 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; 597 }; 598 599 using TagDescVector = Vector<TagDesc, 0, SystemAllocPolicy>; 600 using ElemExprOffsetVector = Vector<size_t, 0, SystemAllocPolicy>; 601 602 // This holds info about elem segments that is needed for instantiation. It 603 // can be dropped when the associated wasm::Module is dropped. 604 struct ModuleElemSegment { 605 enum class Kind { 606 Active, 607 Passive, 608 Declared, 609 }; 610 611 // The type of encoding used by this element segment. 0 is an invalid value to 612 // make sure we notice if we fail to correctly initialize the element segment 613 // - reading from the wrong representation could be a bad time. 614 enum class Encoding { 615 Indices = 1, 616 Expressions, 617 }; 618 619 struct Expressions { 620 size_t count = 0; 621 Bytes exprBytes; 622 623 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; 624 }; 625 626 Kind kind; 627 uint32_t tableIndex; 628 RefType elemType; 629 mozilla::Maybe<InitExpr> offsetIfActive; 630 631 // We store either an array of indices or the full bytecode of the element 632 // expressions, depending on the encoding used for the element segment. 633 Encoding encoding; 634 Uint32Vector elemIndices; 635 Expressions elemExpressions; 636 637 bool active() const { return kind == Kind::Active; } 638 639 const InitExpr& offset() const { return *offsetIfActive; } 640 641 size_t numElements() const { 642 switch (encoding) { 643 case Encoding::Indices: 644 return elemIndices.length(); 645 case Encoding::Expressions: 646 return elemExpressions.count; 647 default: 648 MOZ_CRASH("unknown element segment encoding"); 649 } 650 } 651 652 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; 653 }; 654 655 using ModuleElemSegmentVector = Vector<ModuleElemSegment, 0, SystemAllocPolicy>; 656 657 using InstanceElemSegment = GCVector<HeapPtr<AnyRef>, 0, SystemAllocPolicy>; 658 using InstanceElemSegmentVector = 659 GCVector<InstanceElemSegment, 0, SystemAllocPolicy>; 660 661 // DataSegmentRange holds the initial results of decoding a data segment from 662 // the bytecode and is stored in the ModuleMetadata. It contains the bytecode 663 // bounds of the data segment, and some auxiliary information, but not the 664 // segment contents itself. 665 // 666 // When compilation completes, each DataSegmentRange is transformed into a 667 // DataSegment, which are also stored in the ModuleMetadata. DataSegment 668 // contains the same information as DataSegmentRange but additionally contains 669 // the segment contents itself. This allows non-compilation uses of wasm 670 // validation to avoid expensive copies. 671 // 672 // A DataSegment that is "passive" is shared between a ModuleMetadata and its 673 // wasm::Instances. To allow each segment to be released as soon as the last 674 // Instance mem.drops it and the Module (hence, also the ModuleMetadata) is 675 // destroyed, each DataSegment is individually atomically ref-counted. 676 677 constexpr uint32_t InvalidMemoryIndex = UINT32_MAX; 678 static_assert(InvalidMemoryIndex > MaxMemories, "Invariant"); 679 680 struct DataSegmentRange { 681 uint32_t memoryIndex; 682 mozilla::Maybe<InitExpr> offsetIfActive; 683 uint32_t bytecodeOffset; 684 uint32_t length; 685 }; 686 687 using DataSegmentRangeVector = Vector<DataSegmentRange, 0, SystemAllocPolicy>; 688 689 struct DataSegment : AtomicRefCounted<DataSegment> { 690 uint32_t memoryIndex; 691 mozilla::Maybe<InitExpr> offsetIfActive; 692 Bytes bytes; 693 694 DataSegment() = default; 695 696 bool active() const { return !!offsetIfActive; } 697 698 const InitExpr& offset() const { return *offsetIfActive; } 699 700 [[nodiscard]] bool init(const BytecodeSource& bytecode, 701 const DataSegmentRange& src) { 702 memoryIndex = src.memoryIndex; 703 if (src.offsetIfActive) { 704 offsetIfActive.emplace(); 705 if (!offsetIfActive->clone(*src.offsetIfActive)) { 706 return false; 707 } 708 } 709 MOZ_ASSERT(bytes.length() == 0); 710 BytecodeSpan span = 711 bytecode.getSpan(BytecodeRange(src.bytecodeOffset, src.length)); 712 return bytes.append(span.data(), span.size()); 713 } 714 715 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; 716 }; 717 718 using MutableDataSegment = RefPtr<DataSegment>; 719 using SharedDataSegment = RefPtr<const DataSegment>; 720 using DataSegmentVector = Vector<SharedDataSegment, 0, SystemAllocPolicy>; 721 722 // CustomSectionRange and CustomSection are related in the same way that 723 // DataSegmentRange and DataSegment are: the CustomSectionRanges are stored in 724 // the ModuleMetadata, and are transformed into CustomSections at the end of 725 // compilation and stored in wasm::Module. 726 727 struct CustomSectionRange { 728 BytecodeRange name; 729 BytecodeRange payload; 730 731 WASM_CHECK_CACHEABLE_POD(name, payload); 732 }; 733 734 WASM_DECLARE_CACHEABLE_POD(CustomSectionRange); 735 736 using CustomSectionRangeVector = 737 Vector<CustomSectionRange, 0, SystemAllocPolicy>; 738 739 struct CustomSection { 740 Bytes name; 741 SharedBytes payload; 742 743 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; 744 }; 745 746 using CustomSectionVector = Vector<CustomSection, 0, SystemAllocPolicy>; 747 748 // A Name represents a string of utf8 chars embedded within the name custom 749 // section. The offset of a name is expressed relative to the beginning of the 750 // name section's payload so that Names can stored in wasm::Code, which only 751 // holds the name section's bytes, not the whole bytecode. 752 753 struct Name { 754 // All fields are treated as cacheable POD: 755 uint32_t offsetInNamePayload; 756 uint32_t length; 757 758 WASM_CHECK_CACHEABLE_POD(offsetInNamePayload, length); 759 760 Name() : offsetInNamePayload(UINT32_MAX), length(0) {} 761 }; 762 763 WASM_DECLARE_CACHEABLE_POD(Name); 764 765 using NameVector = Vector<Name, 0, SystemAllocPolicy>; 766 767 struct NameSection { 768 Name moduleName; 769 NameVector funcNames; 770 uint32_t customSectionIndex; 771 772 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const; 773 }; 774 775 // The kind of limits to decode or convert from JS. 776 777 enum class LimitsKind { 778 Memory, 779 Table, 780 }; 781 782 extern const char* ToString(LimitsKind kind); 783 784 // Represents the resizable limits of memories and tables. 785 786 struct Limits { 787 // `addressType` may be I64 when memory64 is enabled. 788 AddressType addressType; 789 790 // The initial and maximum limit. The unit is pages for memories and elements 791 // for tables. 792 uint64_t initial; 793 mozilla::Maybe<uint64_t> maximum; 794 795 // `shared` is Shareable::False for tables but may be Shareable::True for 796 // memories. 797 Shareable shared; 798 799 // `pageSize` is used only for memories. Defaults to the standard page size 800 // but may be set to other values with the custom page size proposal. 801 PageSize pageSize = PageSize::Standard; 802 803 WASM_CHECK_CACHEABLE_POD(addressType, initial, maximum, shared, pageSize); 804 805 Limits() = default; 806 Limits(uint64_t initial, const mozilla::Maybe<uint64_t>& maximum, 807 Shareable shared, PageSize pageSize) 808 : addressType(AddressType::I32), 809 initial(initial), 810 maximum(maximum), 811 shared(shared), 812 pageSize(pageSize) {} 813 }; 814 815 WASM_DECLARE_CACHEABLE_POD(Limits); 816 817 // MemoryDesc describes a memory. 818 819 struct MemoryDesc { 820 // The limits of this memory 821 Limits limits; 822 // The index of the import if this memory is imported 823 mozilla::Maybe<uint32_t> importIndex; 824 825 WASM_CHECK_CACHEABLE_POD(limits, importIndex); 826 827 bool isShared() const { return limits.shared == Shareable::True; } 828 829 // Whether a backing store for this memory may move when grown. 830 bool canMovingGrow() const { return limits.maximum.isNothing(); } 831 832 // Whether the boundsCheckLimit will always fit within 32 bits. See the SMDOC 833 // for "WASM Linear Memory structure". 834 bool boundsCheckLimitIsAlways32Bits() const { 835 return limits.maximum.isSome() && 836 limits.maximum.value() < (0x100000000 / PageSizeInBytes(pageSize())); 837 } 838 839 AddressType addressType() const { return limits.addressType; } 840 841 PageSize pageSize() const { return limits.pageSize; } 842 843 // The initial length of this memory in pages. 844 Pages initialPages() const { 845 return Pages::fromPageCount(limits.initial, pageSize()); 846 } 847 848 // The maximum length of this memory in pages. 849 mozilla::Maybe<Pages> maximumPages() const { 850 return limits.maximum.map( 851 [&](uint64_t x) { return Pages::fromPageCount(x, pageSize()); }); 852 } 853 854 uint64_t initialLength() const { 855 // See static_assert after MemoryDesc for why this is safe for memory32. 856 MOZ_ASSERT_IF(addressType() == AddressType::I64, 857 limits.initial <= UINT64_MAX / PageSizeInBytes(pageSize())); 858 return addressType() == AddressType::I64 ? initialPages().byteLength64() 859 : initialPages().byteLength(); 860 } 861 862 MemoryDesc() = default; 863 explicit MemoryDesc(Limits limits) 864 : limits(limits), importIndex(mozilla::Nothing()) {} 865 }; 866 867 WASM_DECLARE_CACHEABLE_POD(MemoryDesc); 868 869 using MemoryDescVector = Vector<MemoryDesc, 1, SystemAllocPolicy>; 870 871 // We never need to worry about overflow with a Memory32 field when 872 // using a uint64_t. 873 static_assert(MaxMemory32StandardPagesValidation <= 874 UINT64_MAX / StandardPageSizeBytes); 875 #ifdef ENABLE_WASM_CUSTOM_PAGE_SIZES 876 static_assert(MaxMemory32TinyPagesValidation <= UINT64_MAX); 877 #endif 878 879 struct TableDesc { 880 Limits limits; 881 RefType elemType; 882 bool isImported; 883 bool isExported; 884 bool isAsmJS; 885 mozilla::Maybe<InitExpr> initExpr; 886 887 TableDesc() = default; 888 TableDesc(Limits limits, RefType elemType, 889 mozilla::Maybe<InitExpr>&& initExpr, bool isAsmJS, 890 bool isImported = false, bool isExported = false) 891 : limits(limits), 892 elemType(elemType), 893 isImported(isImported), 894 isExported(isExported), 895 isAsmJS(isAsmJS), 896 initExpr(std::move(initExpr)) {} 897 898 AddressType addressType() const { return limits.addressType; } 899 900 uint64_t initialLength() const { return limits.initial; } 901 902 mozilla::Maybe<uint64_t> maximumLength() const { return limits.maximum; } 903 }; 904 905 using TableDescVector = Vector<TableDesc, 0, SystemAllocPolicy>; 906 907 } // namespace wasm 908 } // namespace js 909 910 #endif // wasm_module_types_h