WasmValidate.cpp (137055B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * 4 * Copyright 2016 Mozilla Foundation 5 * 6 * Licensed under the Apache License, Version 2.0 (the "License"); 7 * you may not use this file except in compliance with the License. 8 * You may obtain a copy of the License at 9 * 10 * http://www.apache.org/licenses/LICENSE-2.0 11 * 12 * Unless required by applicable law or agreed to in writing, software 13 * distributed under the License is distributed on an "AS IS" BASIS, 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 * See the License for the specific language governing permissions and 16 * limitations under the License. 17 */ 18 19 #include "wasm/WasmValidate.h" 20 21 #include "mozilla/CheckedInt.h" 22 #include "mozilla/Span.h" 23 #include "mozilla/Utf8.h" 24 25 #include "js/Printf.h" 26 #include "js/String.h" // JS::MaxStringLength 27 #include "vm/JSContext.h" 28 #include "vm/Realm.h" 29 #include "wasm/WasmDump.h" 30 #include "wasm/WasmInitExpr.h" 31 #include "wasm/WasmOpIter.h" 32 #include "wasm/WasmTypeDecls.h" 33 34 using namespace js; 35 using namespace js::jit; 36 using namespace js::wasm; 37 38 using mozilla::AsChars; 39 using mozilla::CheckedInt; 40 using mozilla::IsUtf8; 41 using mozilla::Maybe; 42 using mozilla::Nothing; 43 using mozilla::Some; 44 using mozilla::Span; 45 46 // Misc helpers. 47 48 bool wasm::EncodeLocalEntries(Encoder& e, const ValTypeVector& locals) { 49 if (locals.length() > MaxLocals) { 50 return false; 51 } 52 53 uint32_t numLocalEntries = 0; 54 if (locals.length()) { 55 ValType prev = locals[0]; 56 numLocalEntries++; 57 for (ValType t : locals) { 58 if (t != prev) { 59 numLocalEntries++; 60 prev = t; 61 } 62 } 63 } 64 65 if (!e.writeVarU32(numLocalEntries)) { 66 return false; 67 } 68 69 if (numLocalEntries) { 70 ValType prev = locals[0]; 71 uint32_t count = 1; 72 for (uint32_t i = 1; i < locals.length(); i++, count++) { 73 if (prev != locals[i]) { 74 if (!e.writeVarU32(count)) { 75 return false; 76 } 77 if (!e.writeValType(prev)) { 78 return false; 79 } 80 prev = locals[i]; 81 count = 0; 82 } 83 } 84 if (!e.writeVarU32(count)) { 85 return false; 86 } 87 if (!e.writeValType(prev)) { 88 return false; 89 } 90 } 91 92 return true; 93 } 94 95 bool wasm::DecodeLocalEntriesWithParams(Decoder& d, 96 const CodeMetadata& codeMeta, 97 uint32_t funcIndex, 98 ValTypeVector* locals) { 99 uint32_t numLocalEntries; 100 if (!d.readVarU32(&numLocalEntries)) { 101 return d.fail("failed to read number of local entries"); 102 } 103 104 if (!locals->appendAll(codeMeta.getFuncType(funcIndex).args())) { 105 return false; 106 } 107 108 for (uint32_t i = 0; i < numLocalEntries; i++) { 109 uint32_t count; 110 if (!d.readVarU32(&count)) { 111 return d.fail("failed to read local entry count"); 112 } 113 114 if (MaxLocals - locals->length() < count) { 115 return d.fail("too many locals"); 116 } 117 118 ValType type; 119 if (!d.readValType(*codeMeta.types, codeMeta.features(), &type)) { 120 return false; 121 } 122 123 if (!locals->appendN(type, count)) { 124 return false; 125 } 126 } 127 128 return true; 129 } 130 131 bool wasm::DecodeValidatedLocalEntries(const TypeContext& types, Decoder& d, 132 ValTypeVector* locals) { 133 uint32_t numLocalEntries; 134 MOZ_ALWAYS_TRUE(d.readVarU32(&numLocalEntries)); 135 136 for (uint32_t i = 0; i < numLocalEntries; i++) { 137 uint32_t count = d.uncheckedReadVarU32(); 138 MOZ_ASSERT(MaxLocals - locals->length() >= count); 139 if (!locals->appendN(d.uncheckedReadValType(types), count)) { 140 return false; 141 } 142 } 143 144 return true; 145 } 146 147 bool wasm::CheckIsSubtypeOf(Decoder& d, const CodeMetadata& codeMeta, 148 size_t opcodeOffset, ResultType subType, 149 ResultType superType) { 150 if (subType.length() != superType.length()) { 151 UniqueChars error( 152 JS_smprintf("type mismatch: expected %zu values, got %zu values", 153 superType.length(), subType.length())); 154 if (!error) { 155 return false; 156 } 157 MOZ_ASSERT(!ResultType::isSubTypeOf(subType, superType)); 158 return d.fail(opcodeOffset, error.get()); 159 } 160 for (uint32_t i = 0; i < subType.length(); i++) { 161 StorageType sub = subType[i].storageType(); 162 StorageType super = superType[i].storageType(); 163 if (!CheckIsSubtypeOf(d, codeMeta, opcodeOffset, sub, super)) { 164 MOZ_ASSERT(!ResultType::isSubTypeOf(subType, superType)); 165 return false; 166 } 167 } 168 MOZ_ASSERT(ResultType::isSubTypeOf(subType, superType)); 169 return true; 170 } 171 172 bool wasm::CheckIsSubtypeOf(Decoder& d, const CodeMetadata& codeMeta, 173 size_t opcodeOffset, StorageType subType, 174 StorageType superType) { 175 if (StorageType::isSubTypeOf(subType, superType)) { 176 return true; 177 } 178 179 UniqueChars subText = ToString(subType, codeMeta.types); 180 if (!subText) { 181 return false; 182 } 183 184 UniqueChars superText = ToString(superType, codeMeta.types); 185 if (!superText) { 186 return false; 187 } 188 189 UniqueChars error( 190 JS_smprintf("type mismatch: expression has type %s but expected %s", 191 subText.get(), superText.get())); 192 if (!error) { 193 return false; 194 } 195 196 return d.fail(opcodeOffset, error.get()); 197 } 198 199 // Function body validation. 200 201 template <class T> 202 bool wasm::ValidateOps(ValidatingOpIter& iter, T& dumper, 203 const CodeMetadata& codeMeta) { 204 while (true) { 205 OpBytes op; 206 if (!iter.readOp(&op)) { 207 return false; 208 } 209 210 // End instructions get handled differently since we don't actually want to 211 // dump the final `end`. Also, Else instructions need to have their 212 // indentation managed when dumping. 213 if (op.b0 != uint16_t(Op::End)) { 214 if (op.b0 == uint64_t(Op::Else)) { 215 dumper.endScope(); 216 } 217 dumper.dumpOpBegin(op); 218 if (op.b0 == uint64_t(Op::Else)) { 219 dumper.startScope(); 220 } 221 } 222 223 Nothing nothing; 224 NothingVector nothings{}; 225 BlockType blockType; 226 ResultType resultType; 227 228 switch (op.b0) { 229 case uint16_t(Op::End): { 230 LabelKind unusedKind; 231 if (!iter.readEnd(&unusedKind, &resultType, ¬hings, ¬hings)) { 232 return false; 233 } 234 iter.popEnd(); 235 if (iter.controlStackEmpty()) { 236 return true; 237 } 238 239 // Only dump `end` if it was not the final `end` of the expression. 240 dumper.endScope(); 241 dumper.dumpOpBegin(op); 242 243 break; 244 } 245 case uint16_t(Op::Nop): { 246 if (!iter.readNop()) { 247 return false; 248 } 249 break; 250 } 251 case uint16_t(Op::Drop): { 252 if (!iter.readDrop()) { 253 return false; 254 } 255 break; 256 } 257 case uint16_t(Op::Call): { 258 uint32_t funcIndex; 259 NothingVector unusedArgs{}; 260 if (!iter.readCall(&funcIndex, &unusedArgs)) { 261 return false; 262 } 263 dumper.dumpFuncIndex(funcIndex); 264 break; 265 } 266 case uint16_t(Op::CallIndirect): { 267 uint32_t funcTypeIndex, tableIndex; 268 NothingVector unusedArgs{}; 269 if (!iter.readCallIndirect(&funcTypeIndex, &tableIndex, ¬hing, 270 &unusedArgs)) { 271 return false; 272 } 273 dumper.dumpTableIndex(tableIndex); 274 dumper.dumpTypeIndex(funcTypeIndex, /*asTypeUse=*/true); 275 break; 276 } 277 case uint16_t(Op::ReturnCall): { 278 uint32_t funcIndex; 279 NothingVector unusedArgs{}; 280 if (!iter.readReturnCall(&funcIndex, &unusedArgs)) { 281 return false; 282 } 283 dumper.dumpFuncIndex(funcIndex); 284 break; 285 } 286 case uint16_t(Op::ReturnCallIndirect): { 287 uint32_t funcTypeIndex, tableIndex; 288 NothingVector unusedArgs{}; 289 if (!iter.readReturnCallIndirect(&funcTypeIndex, &tableIndex, ¬hing, 290 &unusedArgs)) { 291 return false; 292 } 293 dumper.dumpTableIndex(tableIndex); 294 dumper.dumpTypeIndex(funcTypeIndex, /*asTypeUse=*/true); 295 break; 296 } 297 case uint16_t(Op::CallRef): { 298 uint32_t funcTypeIndex; 299 NothingVector unusedArgs{}; 300 if (!iter.readCallRef(&funcTypeIndex, ¬hing, &unusedArgs)) { 301 return false; 302 } 303 dumper.dumpTypeIndex(funcTypeIndex); 304 break; 305 } 306 case uint16_t(Op::ReturnCallRef): { 307 uint32_t funcTypeIndex; 308 NothingVector unusedArgs{}; 309 if (!iter.readReturnCallRef(&funcTypeIndex, ¬hing, &unusedArgs)) { 310 return false; 311 } 312 dumper.dumpTypeIndex(funcTypeIndex); 313 break; 314 } 315 case uint16_t(Op::I32Const): { 316 int32_t constant; 317 if (!iter.readI32Const(&constant)) { 318 return false; 319 } 320 dumper.dumpI32Const(constant); 321 break; 322 } 323 case uint16_t(Op::I64Const): { 324 int64_t constant; 325 if (!iter.readI64Const(&constant)) { 326 return false; 327 } 328 dumper.dumpI64Const(constant); 329 break; 330 } 331 case uint16_t(Op::F32Const): { 332 float constant; 333 if (!iter.readF32Const(&constant)) { 334 return false; 335 } 336 dumper.dumpF32Const(constant); 337 break; 338 } 339 case uint16_t(Op::F64Const): { 340 double constant; 341 if (!iter.readF64Const(&constant)) { 342 return false; 343 } 344 dumper.dumpF64Const(constant); 345 break; 346 } 347 case uint16_t(Op::LocalGet): { 348 uint32_t localIndex; 349 if (!iter.readGetLocal(&localIndex)) { 350 return false; 351 } 352 dumper.dumpLocalIndex(localIndex); 353 break; 354 } 355 case uint16_t(Op::LocalSet): { 356 uint32_t localIndex; 357 if (!iter.readSetLocal(&localIndex, ¬hing)) { 358 return false; 359 } 360 dumper.dumpLocalIndex(localIndex); 361 break; 362 } 363 case uint16_t(Op::LocalTee): { 364 uint32_t localIndex; 365 if (!iter.readTeeLocal(&localIndex, ¬hing)) { 366 return false; 367 } 368 dumper.dumpLocalIndex(localIndex); 369 break; 370 } 371 case uint16_t(Op::GlobalGet): { 372 uint32_t globalIndex; 373 if (!iter.readGetGlobal(&globalIndex)) { 374 return false; 375 } 376 dumper.dumpGlobalIndex(globalIndex); 377 break; 378 } 379 case uint16_t(Op::GlobalSet): { 380 uint32_t globalIndex; 381 if (!iter.readSetGlobal(&globalIndex, ¬hing)) { 382 return false; 383 } 384 dumper.dumpGlobalIndex(globalIndex); 385 break; 386 } 387 case uint16_t(Op::TableGet): { 388 uint32_t tableIndex; 389 if (!iter.readTableGet(&tableIndex, ¬hing)) { 390 return false; 391 } 392 dumper.dumpTableIndex(tableIndex); 393 break; 394 } 395 case uint16_t(Op::TableSet): { 396 uint32_t tableIndex; 397 if (!iter.readTableSet(&tableIndex, ¬hing, ¬hing)) { 398 return false; 399 } 400 dumper.dumpTableIndex(tableIndex); 401 break; 402 } 403 case uint16_t(Op::SelectNumeric): { 404 StackType unused; 405 if (!iter.readSelect(/*typed*/ false, &unused, ¬hing, ¬hing, 406 ¬hing)) { 407 return false; 408 } 409 break; 410 } 411 case uint16_t(Op::SelectTyped): { 412 StackType type; 413 if (!iter.readSelect(/*typed*/ true, &type, ¬hing, ¬hing, 414 ¬hing)) { 415 return false; 416 } 417 dumper.dumpValType(type.valType()); 418 break; 419 } 420 case uint16_t(Op::Block): { 421 if (!iter.readBlock(&blockType)) { 422 return false; 423 } 424 dumper.dumpBlockType(blockType); 425 dumper.startScope(); 426 break; 427 } 428 case uint16_t(Op::Loop): { 429 if (!iter.readLoop(&blockType)) { 430 return false; 431 } 432 dumper.dumpBlockType(blockType); 433 dumper.startScope(); 434 break; 435 } 436 case uint16_t(Op::If): { 437 if (!iter.readIf(&blockType, ¬hing)) { 438 return false; 439 } 440 dumper.dumpBlockType(blockType); 441 dumper.startScope(); 442 break; 443 } 444 case uint16_t(Op::Else): { 445 if (!iter.readElse(&resultType, &resultType, ¬hings)) { 446 return false; 447 } 448 break; 449 } 450 case uint16_t(Op::I32Clz): 451 case uint16_t(Op::I32Ctz): 452 case uint16_t(Op::I32Popcnt): { 453 if (!iter.readUnary(ValType::I32, ¬hing)) { 454 return false; 455 } 456 break; 457 } 458 case uint16_t(Op::I64Clz): 459 case uint16_t(Op::I64Ctz): 460 case uint16_t(Op::I64Popcnt): { 461 if (!iter.readUnary(ValType::I64, ¬hing)) { 462 return false; 463 } 464 break; 465 } 466 case uint16_t(Op::F32Abs): 467 case uint16_t(Op::F32Neg): 468 case uint16_t(Op::F32Ceil): 469 case uint16_t(Op::F32Floor): 470 case uint16_t(Op::F32Sqrt): 471 case uint16_t(Op::F32Trunc): 472 case uint16_t(Op::F32Nearest): { 473 if (!iter.readUnary(ValType::F32, ¬hing)) { 474 return false; 475 } 476 break; 477 } 478 case uint16_t(Op::F64Abs): 479 case uint16_t(Op::F64Neg): 480 case uint16_t(Op::F64Ceil): 481 case uint16_t(Op::F64Floor): 482 case uint16_t(Op::F64Sqrt): 483 case uint16_t(Op::F64Trunc): 484 case uint16_t(Op::F64Nearest): { 485 if (!iter.readUnary(ValType::F64, ¬hing)) { 486 return false; 487 } 488 break; 489 } 490 case uint16_t(Op::I32Add): 491 case uint16_t(Op::I32Sub): 492 case uint16_t(Op::I32Mul): 493 case uint16_t(Op::I32DivS): 494 case uint16_t(Op::I32DivU): 495 case uint16_t(Op::I32RemS): 496 case uint16_t(Op::I32RemU): 497 case uint16_t(Op::I32And): 498 case uint16_t(Op::I32Or): 499 case uint16_t(Op::I32Xor): 500 case uint16_t(Op::I32Shl): 501 case uint16_t(Op::I32ShrS): 502 case uint16_t(Op::I32ShrU): 503 case uint16_t(Op::I32Rotl): 504 case uint16_t(Op::I32Rotr): { 505 if (!iter.readBinary(ValType::I32, ¬hing, ¬hing)) { 506 return false; 507 } 508 break; 509 } 510 case uint16_t(Op::I64Add): 511 case uint16_t(Op::I64Sub): 512 case uint16_t(Op::I64Mul): 513 case uint16_t(Op::I64DivS): 514 case uint16_t(Op::I64DivU): 515 case uint16_t(Op::I64RemS): 516 case uint16_t(Op::I64RemU): 517 case uint16_t(Op::I64And): 518 case uint16_t(Op::I64Or): 519 case uint16_t(Op::I64Xor): 520 case uint16_t(Op::I64Shl): 521 case uint16_t(Op::I64ShrS): 522 case uint16_t(Op::I64ShrU): 523 case uint16_t(Op::I64Rotl): 524 case uint16_t(Op::I64Rotr): { 525 if (!iter.readBinary(ValType::I64, ¬hing, ¬hing)) { 526 return false; 527 } 528 break; 529 } 530 case uint16_t(Op::F32Add): 531 case uint16_t(Op::F32Sub): 532 case uint16_t(Op::F32Mul): 533 case uint16_t(Op::F32Div): 534 case uint16_t(Op::F32Min): 535 case uint16_t(Op::F32Max): 536 case uint16_t(Op::F32CopySign): { 537 if (!iter.readBinary(ValType::F32, ¬hing, ¬hing)) { 538 return false; 539 } 540 break; 541 } 542 case uint16_t(Op::F64Add): 543 case uint16_t(Op::F64Sub): 544 case uint16_t(Op::F64Mul): 545 case uint16_t(Op::F64Div): 546 case uint16_t(Op::F64Min): 547 case uint16_t(Op::F64Max): 548 case uint16_t(Op::F64CopySign): { 549 if (!iter.readBinary(ValType::F64, ¬hing, ¬hing)) { 550 return false; 551 } 552 break; 553 } 554 case uint16_t(Op::I32Eq): 555 case uint16_t(Op::I32Ne): 556 case uint16_t(Op::I32LtS): 557 case uint16_t(Op::I32LtU): 558 case uint16_t(Op::I32LeS): 559 case uint16_t(Op::I32LeU): 560 case uint16_t(Op::I32GtS): 561 case uint16_t(Op::I32GtU): 562 case uint16_t(Op::I32GeS): 563 case uint16_t(Op::I32GeU): { 564 if (!iter.readComparison(ValType::I32, ¬hing, ¬hing)) { 565 return false; 566 } 567 break; 568 } 569 case uint16_t(Op::I64Eq): 570 case uint16_t(Op::I64Ne): 571 case uint16_t(Op::I64LtS): 572 case uint16_t(Op::I64LtU): 573 case uint16_t(Op::I64LeS): 574 case uint16_t(Op::I64LeU): 575 case uint16_t(Op::I64GtS): 576 case uint16_t(Op::I64GtU): 577 case uint16_t(Op::I64GeS): 578 case uint16_t(Op::I64GeU): { 579 if (!iter.readComparison(ValType::I64, ¬hing, ¬hing)) { 580 return false; 581 } 582 break; 583 } 584 case uint16_t(Op::F32Eq): 585 case uint16_t(Op::F32Ne): 586 case uint16_t(Op::F32Lt): 587 case uint16_t(Op::F32Le): 588 case uint16_t(Op::F32Gt): 589 case uint16_t(Op::F32Ge): { 590 if (!iter.readComparison(ValType::F32, ¬hing, ¬hing)) { 591 return false; 592 } 593 break; 594 } 595 case uint16_t(Op::F64Eq): 596 case uint16_t(Op::F64Ne): 597 case uint16_t(Op::F64Lt): 598 case uint16_t(Op::F64Le): 599 case uint16_t(Op::F64Gt): 600 case uint16_t(Op::F64Ge): { 601 if (!iter.readComparison(ValType::F64, ¬hing, ¬hing)) { 602 return false; 603 } 604 break; 605 } 606 case uint16_t(Op::I32Eqz): { 607 if (!iter.readConversion(ValType::I32, ValType::I32, ¬hing)) { 608 return false; 609 } 610 break; 611 } 612 case uint16_t(Op::I64Eqz): 613 case uint16_t(Op::I32WrapI64): { 614 if (!iter.readConversion(ValType::I64, ValType::I32, ¬hing)) { 615 return false; 616 } 617 break; 618 } 619 case uint16_t(Op::I32TruncF32S): 620 case uint16_t(Op::I32TruncF32U): 621 case uint16_t(Op::I32ReinterpretF32): { 622 if (!iter.readConversion(ValType::F32, ValType::I32, ¬hing)) { 623 return false; 624 } 625 break; 626 } 627 case uint16_t(Op::I32TruncF64S): 628 case uint16_t(Op::I32TruncF64U): { 629 if (!iter.readConversion(ValType::F64, ValType::I32, ¬hing)) { 630 return false; 631 } 632 break; 633 } 634 case uint16_t(Op::I64ExtendI32S): 635 case uint16_t(Op::I64ExtendI32U): { 636 if (!iter.readConversion(ValType::I32, ValType::I64, ¬hing)) { 637 return false; 638 } 639 break; 640 } 641 case uint16_t(Op::I64TruncF32S): 642 case uint16_t(Op::I64TruncF32U): { 643 if (!iter.readConversion(ValType::F32, ValType::I64, ¬hing)) { 644 return false; 645 } 646 break; 647 } 648 case uint16_t(Op::I64TruncF64S): 649 case uint16_t(Op::I64TruncF64U): 650 case uint16_t(Op::I64ReinterpretF64): { 651 if (!iter.readConversion(ValType::F64, ValType::I64, ¬hing)) { 652 return false; 653 } 654 break; 655 } 656 case uint16_t(Op::F32ConvertI32S): 657 case uint16_t(Op::F32ConvertI32U): 658 case uint16_t(Op::F32ReinterpretI32): { 659 if (!iter.readConversion(ValType::I32, ValType::F32, ¬hing)) { 660 return false; 661 } 662 break; 663 } 664 case uint16_t(Op::F32ConvertI64S): 665 case uint16_t(Op::F32ConvertI64U): { 666 if (!iter.readConversion(ValType::I64, ValType::F32, ¬hing)) { 667 return false; 668 } 669 break; 670 } 671 case uint16_t(Op::F32DemoteF64): { 672 if (!iter.readConversion(ValType::F64, ValType::F32, ¬hing)) { 673 return false; 674 } 675 break; 676 } 677 case uint16_t(Op::F64ConvertI32S): 678 case uint16_t(Op::F64ConvertI32U): { 679 if (!iter.readConversion(ValType::I32, ValType::F64, ¬hing)) { 680 return false; 681 } 682 break; 683 } 684 case uint16_t(Op::F64ConvertI64S): 685 case uint16_t(Op::F64ConvertI64U): 686 case uint16_t(Op::F64ReinterpretI64): { 687 if (!iter.readConversion(ValType::I64, ValType::F64, ¬hing)) { 688 return false; 689 } 690 break; 691 } 692 case uint16_t(Op::F64PromoteF32): { 693 if (!iter.readConversion(ValType::F32, ValType::F64, ¬hing)) { 694 return false; 695 } 696 break; 697 } 698 case uint16_t(Op::I32Extend8S): 699 case uint16_t(Op::I32Extend16S): { 700 if (!iter.readConversion(ValType::I32, ValType::I32, ¬hing)) { 701 return false; 702 } 703 break; 704 } 705 case uint16_t(Op::I64Extend8S): 706 case uint16_t(Op::I64Extend16S): 707 case uint16_t(Op::I64Extend32S): { 708 if (!iter.readConversion(ValType::I64, ValType::I64, ¬hing)) { 709 return false; 710 } 711 break; 712 } 713 case uint16_t(Op::I32Load8S): 714 case uint16_t(Op::I32Load8U): { 715 LinearMemoryAddress<Nothing> addr; 716 if (!iter.readLoad(ValType::I32, 1, &addr)) { 717 return false; 718 } 719 dumper.dumpLinearMemoryAddress(addr); 720 break; 721 } 722 case uint16_t(Op::I32Load16S): 723 case uint16_t(Op::I32Load16U): { 724 LinearMemoryAddress<Nothing> addr; 725 if (!iter.readLoad(ValType::I32, 2, &addr)) { 726 return false; 727 } 728 dumper.dumpLinearMemoryAddress(addr); 729 break; 730 } 731 case uint16_t(Op::I32Load): { 732 LinearMemoryAddress<Nothing> addr; 733 if (!iter.readLoad(ValType::I32, 4, &addr)) { 734 return false; 735 } 736 dumper.dumpLinearMemoryAddress(addr); 737 break; 738 } 739 case uint16_t(Op::I64Load8S): 740 case uint16_t(Op::I64Load8U): { 741 LinearMemoryAddress<Nothing> addr; 742 if (!iter.readLoad(ValType::I64, 1, &addr)) { 743 return false; 744 } 745 dumper.dumpLinearMemoryAddress(addr); 746 break; 747 } 748 case uint16_t(Op::I64Load16S): 749 case uint16_t(Op::I64Load16U): { 750 LinearMemoryAddress<Nothing> addr; 751 if (!iter.readLoad(ValType::I64, 2, &addr)) { 752 return false; 753 } 754 dumper.dumpLinearMemoryAddress(addr); 755 break; 756 } 757 case uint16_t(Op::I64Load32S): 758 case uint16_t(Op::I64Load32U): { 759 LinearMemoryAddress<Nothing> addr; 760 if (!iter.readLoad(ValType::I64, 4, &addr)) { 761 return false; 762 } 763 dumper.dumpLinearMemoryAddress(addr); 764 break; 765 } 766 case uint16_t(Op::I64Load): { 767 LinearMemoryAddress<Nothing> addr; 768 if (!iter.readLoad(ValType::I64, 8, &addr)) { 769 return false; 770 } 771 dumper.dumpLinearMemoryAddress(addr); 772 break; 773 } 774 case uint16_t(Op::F32Load): { 775 LinearMemoryAddress<Nothing> addr; 776 if (!iter.readLoad(ValType::F32, 4, &addr)) { 777 return false; 778 } 779 dumper.dumpLinearMemoryAddress(addr); 780 break; 781 } 782 case uint16_t(Op::F64Load): { 783 LinearMemoryAddress<Nothing> addr; 784 if (!iter.readLoad(ValType::F64, 8, &addr)) { 785 return false; 786 } 787 dumper.dumpLinearMemoryAddress(addr); 788 break; 789 } 790 case uint16_t(Op::I32Store8): { 791 LinearMemoryAddress<Nothing> addr; 792 if (!iter.readStore(ValType::I32, 1, &addr, ¬hing)) { 793 return false; 794 } 795 dumper.dumpLinearMemoryAddress(addr); 796 break; 797 } 798 case uint16_t(Op::I32Store16): { 799 LinearMemoryAddress<Nothing> addr; 800 if (!iter.readStore(ValType::I32, 2, &addr, ¬hing)) { 801 return false; 802 } 803 dumper.dumpLinearMemoryAddress(addr); 804 break; 805 } 806 case uint16_t(Op::I32Store): { 807 LinearMemoryAddress<Nothing> addr; 808 if (!iter.readStore(ValType::I32, 4, &addr, ¬hing)) { 809 return false; 810 } 811 dumper.dumpLinearMemoryAddress(addr); 812 break; 813 } 814 case uint16_t(Op::I64Store8): { 815 LinearMemoryAddress<Nothing> addr; 816 if (!iter.readStore(ValType::I64, 1, &addr, ¬hing)) { 817 return false; 818 } 819 dumper.dumpLinearMemoryAddress(addr); 820 break; 821 } 822 case uint16_t(Op::I64Store16): { 823 LinearMemoryAddress<Nothing> addr; 824 if (!iter.readStore(ValType::I64, 2, &addr, ¬hing)) { 825 return false; 826 } 827 dumper.dumpLinearMemoryAddress(addr); 828 break; 829 } 830 case uint16_t(Op::I64Store32): { 831 LinearMemoryAddress<Nothing> addr; 832 if (!iter.readStore(ValType::I64, 4, &addr, ¬hing)) { 833 return false; 834 } 835 dumper.dumpLinearMemoryAddress(addr); 836 break; 837 } 838 case uint16_t(Op::I64Store): { 839 LinearMemoryAddress<Nothing> addr; 840 if (!iter.readStore(ValType::I64, 8, &addr, ¬hing)) { 841 return false; 842 } 843 dumper.dumpLinearMemoryAddress(addr); 844 break; 845 } 846 case uint16_t(Op::F32Store): { 847 LinearMemoryAddress<Nothing> addr; 848 if (!iter.readStore(ValType::F32, 4, &addr, ¬hing)) { 849 return false; 850 } 851 dumper.dumpLinearMemoryAddress(addr); 852 break; 853 } 854 case uint16_t(Op::F64Store): { 855 LinearMemoryAddress<Nothing> addr; 856 if (!iter.readStore(ValType::F64, 8, &addr, ¬hing)) { 857 return false; 858 } 859 dumper.dumpLinearMemoryAddress(addr); 860 break; 861 } 862 case uint16_t(Op::MemoryGrow): { 863 uint32_t memoryIndex; 864 if (!iter.readMemoryGrow(&memoryIndex, ¬hing)) { 865 return false; 866 } 867 dumper.dumpMemoryIndex(memoryIndex); 868 break; 869 } 870 case uint16_t(Op::MemorySize): { 871 uint32_t memoryIndex; 872 if (!iter.readMemorySize(&memoryIndex)) { 873 return false; 874 } 875 dumper.dumpMemoryIndex(memoryIndex); 876 break; 877 } 878 case uint16_t(Op::Br): { 879 uint32_t depth; 880 if (!iter.readBr(&depth, &resultType, ¬hings)) { 881 return false; 882 } 883 dumper.dumpBlockDepth(depth); 884 break; 885 } 886 case uint16_t(Op::BrIf): { 887 uint32_t depth; 888 if (!iter.readBrIf(&depth, &resultType, ¬hings, ¬hing)) { 889 return false; 890 } 891 dumper.dumpBlockDepth(depth); 892 break; 893 } 894 case uint16_t(Op::BrTable): { 895 Uint32Vector depths; 896 uint32_t defaultDepth; 897 if (!iter.readBrTable(&depths, &defaultDepth, &resultType, ¬hings, 898 ¬hing)) { 899 return false; 900 } 901 dumper.dumpBlockDepths(depths); 902 dumper.dumpBlockDepth(defaultDepth); 903 break; 904 } 905 case uint16_t(Op::Return): { 906 if (!iter.readReturn(¬hings)) { 907 return false; 908 } 909 break; 910 } 911 case uint16_t(Op::Unreachable): { 912 if (!iter.readUnreachable()) { 913 return false; 914 } 915 break; 916 } 917 case uint16_t(Op::GcPrefix): { 918 switch (op.b1) { 919 case uint32_t(GcOp::StructNew): { 920 uint32_t typeIndex; 921 NothingVector unusedArgs{}; 922 if (!iter.readStructNew(&typeIndex, &unusedArgs)) { 923 return false; 924 } 925 dumper.dumpTypeIndex(typeIndex); 926 break; 927 } 928 case uint32_t(GcOp::StructNewDefault): { 929 uint32_t typeIndex; 930 if (!iter.readStructNewDefault(&typeIndex)) { 931 return false; 932 } 933 dumper.dumpTypeIndex(typeIndex); 934 break; 935 } 936 case uint32_t(GcOp::StructGet): { 937 uint32_t typeIndex, fieldIndex; 938 if (!iter.readStructGet(&typeIndex, &fieldIndex, 939 FieldWideningOp::None, ¬hing)) { 940 return false; 941 } 942 dumper.dumpTypeIndex(typeIndex); 943 dumper.dumpFieldIndex(fieldIndex); 944 break; 945 } 946 case uint32_t(GcOp::StructGetS): { 947 uint32_t typeIndex, fieldIndex; 948 if (!iter.readStructGet(&typeIndex, &fieldIndex, 949 FieldWideningOp::Signed, ¬hing)) { 950 return false; 951 } 952 dumper.dumpTypeIndex(typeIndex); 953 dumper.dumpFieldIndex(fieldIndex); 954 break; 955 } 956 case uint32_t(GcOp::StructGetU): { 957 uint32_t typeIndex, fieldIndex; 958 if (!iter.readStructGet(&typeIndex, &fieldIndex, 959 FieldWideningOp::Unsigned, ¬hing)) { 960 return false; 961 } 962 dumper.dumpTypeIndex(typeIndex); 963 dumper.dumpFieldIndex(fieldIndex); 964 break; 965 } 966 case uint32_t(GcOp::StructSet): { 967 uint32_t typeIndex, fieldIndex; 968 if (!iter.readStructSet(&typeIndex, &fieldIndex, ¬hing, 969 ¬hing)) { 970 return false; 971 } 972 dumper.dumpTypeIndex(typeIndex); 973 dumper.dumpFieldIndex(fieldIndex); 974 break; 975 } 976 case uint32_t(GcOp::ArrayNew): { 977 uint32_t typeIndex; 978 if (!iter.readArrayNew(&typeIndex, ¬hing, ¬hing)) { 979 return false; 980 } 981 dumper.dumpTypeIndex(typeIndex); 982 break; 983 } 984 case uint32_t(GcOp::ArrayNewFixed): { 985 uint32_t typeIndex, numElements; 986 if (!iter.readArrayNewFixed(&typeIndex, &numElements, ¬hings)) { 987 return false; 988 } 989 dumper.dumpTypeIndex(typeIndex); 990 dumper.dumpNumElements(numElements); 991 break; 992 } 993 case uint32_t(GcOp::ArrayNewDefault): { 994 uint32_t typeIndex; 995 if (!iter.readArrayNewDefault(&typeIndex, ¬hing)) { 996 return false; 997 } 998 dumper.dumpTypeIndex(typeIndex); 999 break; 1000 } 1001 case uint32_t(GcOp::ArrayNewData): { 1002 uint32_t typeIndex, dataIndex; 1003 if (!iter.readArrayNewData(&typeIndex, &dataIndex, ¬hing, 1004 ¬hing)) { 1005 return false; 1006 } 1007 dumper.dumpTypeIndex(typeIndex); 1008 dumper.dumpDataIndex(dataIndex); 1009 break; 1010 } 1011 case uint32_t(GcOp::ArrayNewElem): { 1012 uint32_t typeIndex, elemIndex; 1013 if (!iter.readArrayNewElem(&typeIndex, &elemIndex, ¬hing, 1014 ¬hing)) { 1015 return false; 1016 } 1017 dumper.dumpTypeIndex(typeIndex); 1018 dumper.dumpElemIndex(elemIndex); 1019 break; 1020 } 1021 case uint32_t(GcOp::ArrayInitData): { 1022 uint32_t typeIndex, dataIndex; 1023 if (!iter.readArrayInitData(&typeIndex, &dataIndex, ¬hing, 1024 ¬hing, ¬hing, ¬hing)) { 1025 return false; 1026 } 1027 dumper.dumpTypeIndex(typeIndex); 1028 dumper.dumpDataIndex(dataIndex); 1029 break; 1030 } 1031 case uint32_t(GcOp::ArrayInitElem): { 1032 uint32_t typeIndex, elemIndex; 1033 if (!iter.readArrayInitElem(&typeIndex, &elemIndex, ¬hing, 1034 ¬hing, ¬hing, ¬hing)) { 1035 return false; 1036 } 1037 dumper.dumpTypeIndex(typeIndex); 1038 dumper.dumpElemIndex(elemIndex); 1039 break; 1040 } 1041 case uint32_t(GcOp::ArrayGet): { 1042 uint32_t typeIndex; 1043 if (!iter.readArrayGet(&typeIndex, FieldWideningOp::None, ¬hing, 1044 ¬hing)) { 1045 return false; 1046 } 1047 dumper.dumpTypeIndex(typeIndex); 1048 break; 1049 } 1050 case uint32_t(GcOp::ArrayGetS): { 1051 uint32_t typeIndex; 1052 if (!iter.readArrayGet(&typeIndex, FieldWideningOp::Signed, 1053 ¬hing, ¬hing)) { 1054 return false; 1055 } 1056 dumper.dumpTypeIndex(typeIndex); 1057 break; 1058 } 1059 case uint32_t(GcOp::ArrayGetU): { 1060 uint32_t typeIndex; 1061 if (!iter.readArrayGet(&typeIndex, FieldWideningOp::Unsigned, 1062 ¬hing, ¬hing)) { 1063 return false; 1064 } 1065 dumper.dumpTypeIndex(typeIndex); 1066 break; 1067 } 1068 case uint32_t(GcOp::ArraySet): { 1069 uint32_t typeIndex; 1070 if (!iter.readArraySet(&typeIndex, ¬hing, ¬hing, ¬hing)) { 1071 return false; 1072 } 1073 dumper.dumpTypeIndex(typeIndex); 1074 break; 1075 } 1076 case uint32_t(GcOp::ArrayLen): { 1077 if (!iter.readArrayLen(¬hing)) { 1078 return false; 1079 } 1080 break; 1081 } 1082 case uint32_t(GcOp::ArrayCopy): { 1083 uint32_t dstArrayTypeIndex; 1084 uint32_t srcArrayTypeIndex; 1085 if (!iter.readArrayCopy(&dstArrayTypeIndex, &srcArrayTypeIndex, 1086 ¬hing, ¬hing, ¬hing, ¬hing, 1087 ¬hing)) { 1088 return false; 1089 } 1090 dumper.dumpTypeIndex(dstArrayTypeIndex); 1091 dumper.dumpTypeIndex(srcArrayTypeIndex); 1092 break; 1093 } 1094 case uint32_t(GcOp::ArrayFill): { 1095 uint32_t typeIndex; 1096 if (!iter.readArrayFill(&typeIndex, ¬hing, ¬hing, ¬hing, 1097 ¬hing)) { 1098 return false; 1099 } 1100 dumper.dumpTypeIndex(typeIndex); 1101 break; 1102 } 1103 case uint32_t(GcOp::RefI31): { 1104 if (!iter.readConversion(ValType::I32, 1105 ValType(RefType::i31().asNonNullable()), 1106 ¬hing)) { 1107 return false; 1108 } 1109 break; 1110 } 1111 case uint32_t(GcOp::I31GetS): { 1112 if (!iter.readConversion(ValType(RefType::i31()), ValType::I32, 1113 ¬hing)) { 1114 return false; 1115 } 1116 break; 1117 } 1118 case uint32_t(GcOp::I31GetU): { 1119 if (!iter.readConversion(ValType(RefType::i31()), ValType::I32, 1120 ¬hing)) { 1121 return false; 1122 } 1123 break; 1124 } 1125 case uint16_t(GcOp::RefTest): { 1126 RefType srcType; 1127 RefType destType; 1128 if (!iter.readRefTest(false, &srcType, &destType, ¬hing)) { 1129 return false; 1130 } 1131 dumper.dumpRefType(destType); 1132 break; 1133 } 1134 case uint16_t(GcOp::RefTestNull): { 1135 RefType srcType; 1136 RefType destType; 1137 if (!iter.readRefTest(true, &srcType, &destType, ¬hing)) { 1138 return false; 1139 } 1140 dumper.dumpRefType(srcType); 1141 dumper.dumpRefType(destType); 1142 break; 1143 } 1144 case uint16_t(GcOp::RefCast): { 1145 RefType srcType; 1146 RefType destType; 1147 if (!iter.readRefCast(false, &srcType, &destType, ¬hing)) { 1148 return false; 1149 } 1150 dumper.dumpRefType(destType); 1151 break; 1152 } 1153 case uint16_t(GcOp::RefCastNull): { 1154 RefType srcType; 1155 RefType destType; 1156 if (!iter.readRefCast(true, &srcType, &destType, ¬hing)) { 1157 return false; 1158 } 1159 dumper.dumpRefType(destType); 1160 break; 1161 } 1162 case uint16_t(GcOp::BrOnCast): { 1163 uint32_t relativeDepth; 1164 RefType srcType; 1165 RefType destType; 1166 if (!iter.readBrOnCast(true, &relativeDepth, &srcType, &destType, 1167 &resultType, ¬hings)) { 1168 return false; 1169 } 1170 dumper.dumpBlockDepth(relativeDepth); 1171 dumper.dumpRefType(srcType); 1172 dumper.dumpRefType(destType); 1173 break; 1174 } 1175 case uint16_t(GcOp::BrOnCastFail): { 1176 uint32_t relativeDepth; 1177 RefType srcType; 1178 RefType destType; 1179 if (!iter.readBrOnCast(false, &relativeDepth, &srcType, &destType, 1180 &resultType, ¬hings)) { 1181 return false; 1182 } 1183 dumper.dumpBlockDepth(relativeDepth); 1184 dumper.dumpRefType(srcType); 1185 dumper.dumpRefType(destType); 1186 break; 1187 } 1188 case uint16_t(GcOp::AnyConvertExtern): { 1189 if (!iter.readRefConversion(RefType::extern_(), RefType::any(), 1190 ¬hing)) { 1191 return false; 1192 } 1193 break; 1194 } 1195 case uint16_t(GcOp::ExternConvertAny): { 1196 if (!iter.readRefConversion(RefType::any(), RefType::extern_(), 1197 ¬hing)) { 1198 return false; 1199 } 1200 break; 1201 } 1202 default: 1203 return iter.unrecognizedOpcode(&op); 1204 } 1205 break; 1206 } 1207 1208 #ifdef ENABLE_WASM_SIMD 1209 case uint16_t(Op::SimdPrefix): { 1210 if (!codeMeta.simdAvailable()) { 1211 return iter.unrecognizedOpcode(&op); 1212 } 1213 uint32_t laneIndex; 1214 switch (op.b1) { 1215 case uint32_t(SimdOp::I8x16ExtractLaneS): 1216 case uint32_t(SimdOp::I8x16ExtractLaneU): { 1217 if (!iter.readExtractLane(ValType::I32, 16, &laneIndex, ¬hing)) { 1218 return false; 1219 } 1220 dumper.dumpLaneIndex(laneIndex); 1221 break; 1222 } 1223 case uint32_t(SimdOp::I16x8ExtractLaneS): 1224 case uint32_t(SimdOp::I16x8ExtractLaneU): { 1225 if (!iter.readExtractLane(ValType::I32, 8, &laneIndex, ¬hing)) { 1226 return false; 1227 } 1228 dumper.dumpLaneIndex(laneIndex); 1229 break; 1230 } 1231 case uint32_t(SimdOp::I32x4ExtractLane): { 1232 if (!iter.readExtractLane(ValType::I32, 4, &laneIndex, ¬hing)) { 1233 return false; 1234 } 1235 dumper.dumpLaneIndex(laneIndex); 1236 break; 1237 } 1238 case uint32_t(SimdOp::I64x2ExtractLane): { 1239 if (!iter.readExtractLane(ValType::I64, 2, &laneIndex, ¬hing)) { 1240 return false; 1241 } 1242 dumper.dumpLaneIndex(laneIndex); 1243 break; 1244 } 1245 case uint32_t(SimdOp::F32x4ExtractLane): { 1246 if (!iter.readExtractLane(ValType::F32, 4, &laneIndex, ¬hing)) { 1247 return false; 1248 } 1249 dumper.dumpLaneIndex(laneIndex); 1250 break; 1251 } 1252 case uint32_t(SimdOp::F64x2ExtractLane): { 1253 if (!iter.readExtractLane(ValType::F64, 2, &laneIndex, ¬hing)) { 1254 return false; 1255 } 1256 dumper.dumpLaneIndex(laneIndex); 1257 break; 1258 } 1259 1260 case uint32_t(SimdOp::I8x16Splat): 1261 case uint32_t(SimdOp::I16x8Splat): 1262 case uint32_t(SimdOp::I32x4Splat): { 1263 if (!iter.readConversion(ValType::I32, ValType::V128, ¬hing)) { 1264 return false; 1265 } 1266 break; 1267 } 1268 case uint32_t(SimdOp::I64x2Splat): { 1269 if (!iter.readConversion(ValType::I64, ValType::V128, ¬hing)) { 1270 return false; 1271 } 1272 break; 1273 } 1274 case uint32_t(SimdOp::F32x4Splat): { 1275 if (!iter.readConversion(ValType::F32, ValType::V128, ¬hing)) { 1276 return false; 1277 } 1278 break; 1279 } 1280 case uint32_t(SimdOp::F64x2Splat): { 1281 if (!iter.readConversion(ValType::F64, ValType::V128, ¬hing)) { 1282 return false; 1283 } 1284 break; 1285 } 1286 1287 case uint32_t(SimdOp::V128AnyTrue): 1288 case uint32_t(SimdOp::I8x16AllTrue): 1289 case uint32_t(SimdOp::I16x8AllTrue): 1290 case uint32_t(SimdOp::I32x4AllTrue): 1291 case uint32_t(SimdOp::I64x2AllTrue): 1292 case uint32_t(SimdOp::I8x16Bitmask): 1293 case uint32_t(SimdOp::I16x8Bitmask): 1294 case uint32_t(SimdOp::I32x4Bitmask): 1295 case uint32_t(SimdOp::I64x2Bitmask): { 1296 if (!iter.readConversion(ValType::V128, ValType::I32, ¬hing)) { 1297 return false; 1298 } 1299 break; 1300 } 1301 1302 case uint32_t(SimdOp::I8x16ReplaceLane): { 1303 if (!iter.readReplaceLane(ValType::I32, 16, &laneIndex, ¬hing, 1304 ¬hing)) { 1305 return false; 1306 } 1307 dumper.dumpLaneIndex(laneIndex); 1308 break; 1309 } 1310 case uint32_t(SimdOp::I16x8ReplaceLane): { 1311 if (!iter.readReplaceLane(ValType::I32, 8, &laneIndex, ¬hing, 1312 ¬hing)) { 1313 return false; 1314 } 1315 dumper.dumpLaneIndex(laneIndex); 1316 break; 1317 } 1318 case uint32_t(SimdOp::I32x4ReplaceLane): { 1319 if (!iter.readReplaceLane(ValType::I32, 4, &laneIndex, ¬hing, 1320 ¬hing)) { 1321 return false; 1322 } 1323 dumper.dumpLaneIndex(laneIndex); 1324 break; 1325 } 1326 case uint32_t(SimdOp::I64x2ReplaceLane): { 1327 if (!iter.readReplaceLane(ValType::I64, 2, &laneIndex, ¬hing, 1328 ¬hing)) { 1329 return false; 1330 } 1331 dumper.dumpLaneIndex(laneIndex); 1332 break; 1333 } 1334 case uint32_t(SimdOp::F32x4ReplaceLane): { 1335 if (!iter.readReplaceLane(ValType::F32, 4, &laneIndex, ¬hing, 1336 ¬hing)) { 1337 return false; 1338 } 1339 dumper.dumpLaneIndex(laneIndex); 1340 break; 1341 } 1342 case uint32_t(SimdOp::F64x2ReplaceLane): { 1343 if (!iter.readReplaceLane(ValType::F64, 2, &laneIndex, ¬hing, 1344 ¬hing)) { 1345 return false; 1346 } 1347 dumper.dumpLaneIndex(laneIndex); 1348 break; 1349 } 1350 1351 case uint32_t(SimdOp::I8x16Eq): 1352 case uint32_t(SimdOp::I8x16Ne): 1353 case uint32_t(SimdOp::I8x16LtS): 1354 case uint32_t(SimdOp::I8x16LtU): 1355 case uint32_t(SimdOp::I8x16GtS): 1356 case uint32_t(SimdOp::I8x16GtU): 1357 case uint32_t(SimdOp::I8x16LeS): 1358 case uint32_t(SimdOp::I8x16LeU): 1359 case uint32_t(SimdOp::I8x16GeS): 1360 case uint32_t(SimdOp::I8x16GeU): 1361 case uint32_t(SimdOp::I16x8Eq): 1362 case uint32_t(SimdOp::I16x8Ne): 1363 case uint32_t(SimdOp::I16x8LtS): 1364 case uint32_t(SimdOp::I16x8LtU): 1365 case uint32_t(SimdOp::I16x8GtS): 1366 case uint32_t(SimdOp::I16x8GtU): 1367 case uint32_t(SimdOp::I16x8LeS): 1368 case uint32_t(SimdOp::I16x8LeU): 1369 case uint32_t(SimdOp::I16x8GeS): 1370 case uint32_t(SimdOp::I16x8GeU): 1371 case uint32_t(SimdOp::I32x4Eq): 1372 case uint32_t(SimdOp::I32x4Ne): 1373 case uint32_t(SimdOp::I32x4LtS): 1374 case uint32_t(SimdOp::I32x4LtU): 1375 case uint32_t(SimdOp::I32x4GtS): 1376 case uint32_t(SimdOp::I32x4GtU): 1377 case uint32_t(SimdOp::I32x4LeS): 1378 case uint32_t(SimdOp::I32x4LeU): 1379 case uint32_t(SimdOp::I32x4GeS): 1380 case uint32_t(SimdOp::I32x4GeU): 1381 case uint32_t(SimdOp::I64x2Eq): 1382 case uint32_t(SimdOp::I64x2Ne): 1383 case uint32_t(SimdOp::I64x2LtS): 1384 case uint32_t(SimdOp::I64x2GtS): 1385 case uint32_t(SimdOp::I64x2LeS): 1386 case uint32_t(SimdOp::I64x2GeS): 1387 case uint32_t(SimdOp::F32x4Eq): 1388 case uint32_t(SimdOp::F32x4Ne): 1389 case uint32_t(SimdOp::F32x4Lt): 1390 case uint32_t(SimdOp::F32x4Gt): 1391 case uint32_t(SimdOp::F32x4Le): 1392 case uint32_t(SimdOp::F32x4Ge): 1393 case uint32_t(SimdOp::F64x2Eq): 1394 case uint32_t(SimdOp::F64x2Ne): 1395 case uint32_t(SimdOp::F64x2Lt): 1396 case uint32_t(SimdOp::F64x2Gt): 1397 case uint32_t(SimdOp::F64x2Le): 1398 case uint32_t(SimdOp::F64x2Ge): 1399 case uint32_t(SimdOp::V128And): 1400 case uint32_t(SimdOp::V128Or): 1401 case uint32_t(SimdOp::V128Xor): 1402 case uint32_t(SimdOp::V128AndNot): 1403 case uint32_t(SimdOp::I8x16AvgrU): 1404 case uint32_t(SimdOp::I16x8AvgrU): 1405 case uint32_t(SimdOp::I8x16Add): 1406 case uint32_t(SimdOp::I8x16AddSatS): 1407 case uint32_t(SimdOp::I8x16AddSatU): 1408 case uint32_t(SimdOp::I8x16Sub): 1409 case uint32_t(SimdOp::I8x16SubSatS): 1410 case uint32_t(SimdOp::I8x16SubSatU): 1411 case uint32_t(SimdOp::I8x16MinS): 1412 case uint32_t(SimdOp::I8x16MinU): 1413 case uint32_t(SimdOp::I8x16MaxS): 1414 case uint32_t(SimdOp::I8x16MaxU): 1415 case uint32_t(SimdOp::I16x8Add): 1416 case uint32_t(SimdOp::I16x8AddSatS): 1417 case uint32_t(SimdOp::I16x8AddSatU): 1418 case uint32_t(SimdOp::I16x8Sub): 1419 case uint32_t(SimdOp::I16x8SubSatS): 1420 case uint32_t(SimdOp::I16x8SubSatU): 1421 case uint32_t(SimdOp::I16x8Mul): 1422 case uint32_t(SimdOp::I16x8MinS): 1423 case uint32_t(SimdOp::I16x8MinU): 1424 case uint32_t(SimdOp::I16x8MaxS): 1425 case uint32_t(SimdOp::I16x8MaxU): 1426 case uint32_t(SimdOp::I32x4Add): 1427 case uint32_t(SimdOp::I32x4Sub): 1428 case uint32_t(SimdOp::I32x4Mul): 1429 case uint32_t(SimdOp::I32x4MinS): 1430 case uint32_t(SimdOp::I32x4MinU): 1431 case uint32_t(SimdOp::I32x4MaxS): 1432 case uint32_t(SimdOp::I32x4MaxU): 1433 case uint32_t(SimdOp::I64x2Add): 1434 case uint32_t(SimdOp::I64x2Sub): 1435 case uint32_t(SimdOp::I64x2Mul): 1436 case uint32_t(SimdOp::F32x4Add): 1437 case uint32_t(SimdOp::F32x4Sub): 1438 case uint32_t(SimdOp::F32x4Mul): 1439 case uint32_t(SimdOp::F32x4Div): 1440 case uint32_t(SimdOp::F32x4Min): 1441 case uint32_t(SimdOp::F32x4Max): 1442 case uint32_t(SimdOp::F64x2Add): 1443 case uint32_t(SimdOp::F64x2Sub): 1444 case uint32_t(SimdOp::F64x2Mul): 1445 case uint32_t(SimdOp::F64x2Div): 1446 case uint32_t(SimdOp::F64x2Min): 1447 case uint32_t(SimdOp::F64x2Max): 1448 case uint32_t(SimdOp::I8x16NarrowI16x8S): 1449 case uint32_t(SimdOp::I8x16NarrowI16x8U): 1450 case uint32_t(SimdOp::I16x8NarrowI32x4S): 1451 case uint32_t(SimdOp::I16x8NarrowI32x4U): 1452 case uint32_t(SimdOp::I8x16Swizzle): 1453 case uint32_t(SimdOp::F32x4PMax): 1454 case uint32_t(SimdOp::F32x4PMin): 1455 case uint32_t(SimdOp::F64x2PMax): 1456 case uint32_t(SimdOp::F64x2PMin): 1457 case uint32_t(SimdOp::I32x4DotI16x8S): 1458 case uint32_t(SimdOp::I16x8ExtmulLowI8x16S): 1459 case uint32_t(SimdOp::I16x8ExtmulHighI8x16S): 1460 case uint32_t(SimdOp::I16x8ExtmulLowI8x16U): 1461 case uint32_t(SimdOp::I16x8ExtmulHighI8x16U): 1462 case uint32_t(SimdOp::I32x4ExtmulLowI16x8S): 1463 case uint32_t(SimdOp::I32x4ExtmulHighI16x8S): 1464 case uint32_t(SimdOp::I32x4ExtmulLowI16x8U): 1465 case uint32_t(SimdOp::I32x4ExtmulHighI16x8U): 1466 case uint32_t(SimdOp::I64x2ExtmulLowI32x4S): 1467 case uint32_t(SimdOp::I64x2ExtmulHighI32x4S): 1468 case uint32_t(SimdOp::I64x2ExtmulLowI32x4U): 1469 case uint32_t(SimdOp::I64x2ExtmulHighI32x4U): 1470 case uint32_t(SimdOp::I16x8Q15MulrSatS): { 1471 if (!iter.readBinary(ValType::V128, ¬hing, ¬hing)) { 1472 return false; 1473 } 1474 break; 1475 } 1476 1477 case uint32_t(SimdOp::I8x16Neg): 1478 case uint32_t(SimdOp::I16x8Neg): 1479 case uint32_t(SimdOp::I16x8ExtendLowI8x16S): 1480 case uint32_t(SimdOp::I16x8ExtendHighI8x16S): 1481 case uint32_t(SimdOp::I16x8ExtendLowI8x16U): 1482 case uint32_t(SimdOp::I16x8ExtendHighI8x16U): 1483 case uint32_t(SimdOp::I32x4Neg): 1484 case uint32_t(SimdOp::I32x4ExtendLowI16x8S): 1485 case uint32_t(SimdOp::I32x4ExtendHighI16x8S): 1486 case uint32_t(SimdOp::I32x4ExtendLowI16x8U): 1487 case uint32_t(SimdOp::I32x4ExtendHighI16x8U): 1488 case uint32_t(SimdOp::I32x4TruncSatF32x4S): 1489 case uint32_t(SimdOp::I32x4TruncSatF32x4U): 1490 case uint32_t(SimdOp::I64x2Neg): 1491 case uint32_t(SimdOp::I64x2ExtendLowI32x4S): 1492 case uint32_t(SimdOp::I64x2ExtendHighI32x4S): 1493 case uint32_t(SimdOp::I64x2ExtendLowI32x4U): 1494 case uint32_t(SimdOp::I64x2ExtendHighI32x4U): 1495 case uint32_t(SimdOp::F32x4Abs): 1496 case uint32_t(SimdOp::F32x4Neg): 1497 case uint32_t(SimdOp::F32x4Sqrt): 1498 case uint32_t(SimdOp::F32x4ConvertI32x4S): 1499 case uint32_t(SimdOp::F32x4ConvertI32x4U): 1500 case uint32_t(SimdOp::F64x2Abs): 1501 case uint32_t(SimdOp::F64x2Neg): 1502 case uint32_t(SimdOp::F64x2Sqrt): 1503 case uint32_t(SimdOp::V128Not): 1504 case uint32_t(SimdOp::I8x16Popcnt): 1505 case uint32_t(SimdOp::I8x16Abs): 1506 case uint32_t(SimdOp::I16x8Abs): 1507 case uint32_t(SimdOp::I32x4Abs): 1508 case uint32_t(SimdOp::I64x2Abs): 1509 case uint32_t(SimdOp::F32x4Ceil): 1510 case uint32_t(SimdOp::F32x4Floor): 1511 case uint32_t(SimdOp::F32x4Trunc): 1512 case uint32_t(SimdOp::F32x4Nearest): 1513 case uint32_t(SimdOp::F64x2Ceil): 1514 case uint32_t(SimdOp::F64x2Floor): 1515 case uint32_t(SimdOp::F64x2Trunc): 1516 case uint32_t(SimdOp::F64x2Nearest): 1517 case uint32_t(SimdOp::F32x4DemoteF64x2Zero): 1518 case uint32_t(SimdOp::F64x2PromoteLowF32x4): 1519 case uint32_t(SimdOp::F64x2ConvertLowI32x4S): 1520 case uint32_t(SimdOp::F64x2ConvertLowI32x4U): 1521 case uint32_t(SimdOp::I32x4TruncSatF64x2SZero): 1522 case uint32_t(SimdOp::I32x4TruncSatF64x2UZero): 1523 case uint32_t(SimdOp::I16x8ExtaddPairwiseI8x16S): 1524 case uint32_t(SimdOp::I16x8ExtaddPairwiseI8x16U): 1525 case uint32_t(SimdOp::I32x4ExtaddPairwiseI16x8S): 1526 case uint32_t(SimdOp::I32x4ExtaddPairwiseI16x8U): { 1527 if (!iter.readUnary(ValType::V128, ¬hing)) { 1528 return false; 1529 } 1530 break; 1531 } 1532 1533 case uint32_t(SimdOp::I8x16Shl): 1534 case uint32_t(SimdOp::I8x16ShrS): 1535 case uint32_t(SimdOp::I8x16ShrU): 1536 case uint32_t(SimdOp::I16x8Shl): 1537 case uint32_t(SimdOp::I16x8ShrS): 1538 case uint32_t(SimdOp::I16x8ShrU): 1539 case uint32_t(SimdOp::I32x4Shl): 1540 case uint32_t(SimdOp::I32x4ShrS): 1541 case uint32_t(SimdOp::I32x4ShrU): 1542 case uint32_t(SimdOp::I64x2Shl): 1543 case uint32_t(SimdOp::I64x2ShrS): 1544 case uint32_t(SimdOp::I64x2ShrU): { 1545 if (!iter.readVectorShift(¬hing, ¬hing)) { 1546 return false; 1547 } 1548 break; 1549 } 1550 1551 case uint32_t(SimdOp::V128Bitselect): { 1552 if (!iter.readTernary(ValType::V128, ¬hing, ¬hing, 1553 ¬hing)) { 1554 return false; 1555 } 1556 break; 1557 } 1558 1559 case uint32_t(SimdOp::I8x16Shuffle): { 1560 V128 mask; 1561 if (!iter.readVectorShuffle(¬hing, ¬hing, &mask)) { 1562 return false; 1563 } 1564 dumper.dumpVectorMask(mask); 1565 break; 1566 } 1567 1568 case uint32_t(SimdOp::V128Const): { 1569 V128 constant; 1570 if (!iter.readV128Const(&constant)) { 1571 return false; 1572 } 1573 dumper.dumpV128Const(constant); 1574 break; 1575 } 1576 1577 case uint32_t(SimdOp::V128Load): { 1578 LinearMemoryAddress<Nothing> addr; 1579 if (!iter.readLoad(ValType::V128, 16, &addr)) { 1580 return false; 1581 } 1582 dumper.dumpLinearMemoryAddress(addr); 1583 break; 1584 } 1585 1586 case uint32_t(SimdOp::V128Load8Splat): { 1587 LinearMemoryAddress<Nothing> addr; 1588 if (!iter.readLoadSplat(1, &addr)) { 1589 return false; 1590 } 1591 dumper.dumpLinearMemoryAddress(addr); 1592 break; 1593 } 1594 1595 case uint32_t(SimdOp::V128Load16Splat): { 1596 LinearMemoryAddress<Nothing> addr; 1597 if (!iter.readLoadSplat(2, &addr)) { 1598 return false; 1599 } 1600 dumper.dumpLinearMemoryAddress(addr); 1601 break; 1602 } 1603 1604 case uint32_t(SimdOp::V128Load32Splat): { 1605 LinearMemoryAddress<Nothing> addr; 1606 if (!iter.readLoadSplat(4, &addr)) { 1607 return false; 1608 } 1609 dumper.dumpLinearMemoryAddress(addr); 1610 break; 1611 } 1612 1613 case uint32_t(SimdOp::V128Load64Splat): { 1614 LinearMemoryAddress<Nothing> addr; 1615 if (!iter.readLoadSplat(8, &addr)) { 1616 return false; 1617 } 1618 dumper.dumpLinearMemoryAddress(addr); 1619 break; 1620 } 1621 1622 case uint32_t(SimdOp::V128Load8x8S): 1623 case uint32_t(SimdOp::V128Load8x8U): { 1624 LinearMemoryAddress<Nothing> addr; 1625 if (!iter.readLoadExtend(&addr)) { 1626 return false; 1627 } 1628 dumper.dumpLinearMemoryAddress(addr); 1629 break; 1630 } 1631 1632 case uint32_t(SimdOp::V128Load16x4S): 1633 case uint32_t(SimdOp::V128Load16x4U): { 1634 LinearMemoryAddress<Nothing> addr; 1635 if (!iter.readLoadExtend(&addr)) { 1636 return false; 1637 } 1638 dumper.dumpLinearMemoryAddress(addr); 1639 break; 1640 } 1641 1642 case uint32_t(SimdOp::V128Load32x2S): 1643 case uint32_t(SimdOp::V128Load32x2U): { 1644 LinearMemoryAddress<Nothing> addr; 1645 if (!iter.readLoadExtend(&addr)) { 1646 return false; 1647 } 1648 dumper.dumpLinearMemoryAddress(addr); 1649 break; 1650 } 1651 1652 case uint32_t(SimdOp::V128Store): { 1653 LinearMemoryAddress<Nothing> addr; 1654 if (!iter.readStore(ValType::V128, 16, &addr, ¬hing)) { 1655 return false; 1656 } 1657 dumper.dumpLinearMemoryAddress(addr); 1658 break; 1659 } 1660 1661 case uint32_t(SimdOp::V128Load32Zero): { 1662 LinearMemoryAddress<Nothing> addr; 1663 if (!iter.readLoadSplat(4, &addr)) { 1664 return false; 1665 } 1666 dumper.dumpLinearMemoryAddress(addr); 1667 break; 1668 } 1669 1670 case uint32_t(SimdOp::V128Load64Zero): { 1671 LinearMemoryAddress<Nothing> addr; 1672 if (!iter.readLoadSplat(8, &addr)) { 1673 return false; 1674 } 1675 dumper.dumpLinearMemoryAddress(addr); 1676 break; 1677 } 1678 1679 case uint32_t(SimdOp::V128Load8Lane): { 1680 LinearMemoryAddress<Nothing> addr; 1681 if (!iter.readLoadLane(1, &addr, &laneIndex, ¬hing)) { 1682 return false; 1683 } 1684 dumper.dumpLinearMemoryAddress(addr); 1685 dumper.dumpLaneIndex(laneIndex); 1686 break; 1687 } 1688 1689 case uint32_t(SimdOp::V128Load16Lane): { 1690 LinearMemoryAddress<Nothing> addr; 1691 if (!iter.readLoadLane(2, &addr, &laneIndex, ¬hing)) { 1692 return false; 1693 } 1694 dumper.dumpLinearMemoryAddress(addr); 1695 dumper.dumpLaneIndex(laneIndex); 1696 break; 1697 } 1698 1699 case uint32_t(SimdOp::V128Load32Lane): { 1700 LinearMemoryAddress<Nothing> addr; 1701 if (!iter.readLoadLane(4, &addr, &laneIndex, ¬hing)) { 1702 return false; 1703 } 1704 dumper.dumpLinearMemoryAddress(addr); 1705 dumper.dumpLaneIndex(laneIndex); 1706 break; 1707 } 1708 1709 case uint32_t(SimdOp::V128Load64Lane): { 1710 LinearMemoryAddress<Nothing> addr; 1711 if (!iter.readLoadLane(8, &addr, &laneIndex, ¬hing)) { 1712 return false; 1713 } 1714 dumper.dumpLinearMemoryAddress(addr); 1715 dumper.dumpLaneIndex(laneIndex); 1716 break; 1717 } 1718 1719 case uint32_t(SimdOp::V128Store8Lane): { 1720 LinearMemoryAddress<Nothing> addr; 1721 if (!iter.readStoreLane(1, &addr, &laneIndex, ¬hing)) { 1722 return false; 1723 } 1724 dumper.dumpLinearMemoryAddress(addr); 1725 dumper.dumpLaneIndex(laneIndex); 1726 break; 1727 } 1728 1729 case uint32_t(SimdOp::V128Store16Lane): { 1730 LinearMemoryAddress<Nothing> addr; 1731 if (!iter.readStoreLane(2, &addr, &laneIndex, ¬hing)) { 1732 return false; 1733 } 1734 dumper.dumpLinearMemoryAddress(addr); 1735 dumper.dumpLaneIndex(laneIndex); 1736 break; 1737 } 1738 1739 case uint32_t(SimdOp::V128Store32Lane): { 1740 LinearMemoryAddress<Nothing> addr; 1741 if (!iter.readStoreLane(4, &addr, &laneIndex, ¬hing)) { 1742 return false; 1743 } 1744 dumper.dumpLinearMemoryAddress(addr); 1745 dumper.dumpLaneIndex(laneIndex); 1746 break; 1747 } 1748 1749 case uint32_t(SimdOp::V128Store64Lane): { 1750 LinearMemoryAddress<Nothing> addr; 1751 if (!iter.readStoreLane(8, &addr, &laneIndex, ¬hing)) { 1752 return false; 1753 } 1754 dumper.dumpLinearMemoryAddress(addr); 1755 dumper.dumpLaneIndex(laneIndex); 1756 break; 1757 } 1758 1759 # ifdef ENABLE_WASM_RELAXED_SIMD 1760 case uint32_t(SimdOp::F32x4RelaxedMadd): 1761 case uint32_t(SimdOp::F32x4RelaxedNmadd): 1762 case uint32_t(SimdOp::F64x2RelaxedMadd): 1763 case uint32_t(SimdOp::F64x2RelaxedNmadd): 1764 case uint32_t(SimdOp::I8x16RelaxedLaneSelect): 1765 case uint32_t(SimdOp::I16x8RelaxedLaneSelect): 1766 case uint32_t(SimdOp::I32x4RelaxedLaneSelect): 1767 case uint32_t(SimdOp::I64x2RelaxedLaneSelect): 1768 case uint32_t(SimdOp::I32x4RelaxedDotI8x16I7x16AddS): { 1769 if (!codeMeta.v128RelaxedEnabled()) { 1770 return iter.unrecognizedOpcode(&op); 1771 } 1772 if (!iter.readTernary(ValType::V128, ¬hing, ¬hing, 1773 ¬hing)) { 1774 return false; 1775 } 1776 break; 1777 } 1778 case uint32_t(SimdOp::F32x4RelaxedMin): 1779 case uint32_t(SimdOp::F32x4RelaxedMax): 1780 case uint32_t(SimdOp::F64x2RelaxedMin): 1781 case uint32_t(SimdOp::F64x2RelaxedMax): 1782 case uint32_t(SimdOp::I16x8RelaxedQ15MulrS): 1783 case uint32_t(SimdOp::I16x8RelaxedDotI8x16I7x16S): { 1784 if (!codeMeta.v128RelaxedEnabled()) { 1785 return iter.unrecognizedOpcode(&op); 1786 } 1787 if (!iter.readBinary(ValType::V128, ¬hing, ¬hing)) { 1788 return false; 1789 } 1790 break; 1791 } 1792 case uint32_t(SimdOp::I32x4RelaxedTruncF32x4S): 1793 case uint32_t(SimdOp::I32x4RelaxedTruncF32x4U): 1794 case uint32_t(SimdOp::I32x4RelaxedTruncF64x2SZero): 1795 case uint32_t(SimdOp::I32x4RelaxedTruncF64x2UZero): { 1796 if (!codeMeta.v128RelaxedEnabled()) { 1797 return iter.unrecognizedOpcode(&op); 1798 } 1799 if (!iter.readUnary(ValType::V128, ¬hing)) { 1800 return false; 1801 } 1802 break; 1803 } 1804 case uint32_t(SimdOp::I8x16RelaxedSwizzle): { 1805 if (!codeMeta.v128RelaxedEnabled()) { 1806 return iter.unrecognizedOpcode(&op); 1807 } 1808 if (!iter.readBinary(ValType::V128, ¬hing, ¬hing)) { 1809 return false; 1810 } 1811 break; 1812 } 1813 # endif 1814 1815 default: 1816 return iter.unrecognizedOpcode(&op); 1817 } 1818 break; 1819 } 1820 #endif // ENABLE_WASM_SIMD 1821 1822 case uint16_t(Op::MiscPrefix): { 1823 switch (op.b1) { 1824 case uint32_t(MiscOp::I32TruncSatF32S): 1825 case uint32_t(MiscOp::I32TruncSatF32U): { 1826 if (!iter.readConversion(ValType::F32, ValType::I32, ¬hing)) { 1827 return false; 1828 } 1829 break; 1830 } 1831 case uint32_t(MiscOp::I32TruncSatF64S): 1832 case uint32_t(MiscOp::I32TruncSatF64U): { 1833 if (!iter.readConversion(ValType::F64, ValType::I32, ¬hing)) { 1834 return false; 1835 } 1836 break; 1837 } 1838 case uint32_t(MiscOp::I64TruncSatF32S): 1839 case uint32_t(MiscOp::I64TruncSatF32U): { 1840 if (!iter.readConversion(ValType::F32, ValType::I64, ¬hing)) { 1841 return false; 1842 } 1843 break; 1844 } 1845 case uint32_t(MiscOp::I64TruncSatF64S): 1846 case uint32_t(MiscOp::I64TruncSatF64U): { 1847 if (!iter.readConversion(ValType::F64, ValType::I64, ¬hing)) { 1848 return false; 1849 } 1850 break; 1851 } 1852 case uint32_t(MiscOp::MemoryCopy): { 1853 uint32_t destMemIndex; 1854 uint32_t srcMemIndex; 1855 if (!iter.readMemOrTableCopy(/*isMem=*/true, &destMemIndex, 1856 ¬hing, &srcMemIndex, ¬hing, 1857 ¬hing)) { 1858 return false; 1859 } 1860 dumper.dumpMemoryIndex(destMemIndex); 1861 dumper.dumpMemoryIndex(srcMemIndex); 1862 break; 1863 } 1864 case uint32_t(MiscOp::DataDrop): { 1865 uint32_t dataIndex; 1866 if (!iter.readDataOrElemDrop(/*isData=*/true, &dataIndex)) { 1867 return false; 1868 } 1869 dumper.dumpDataIndex(dataIndex); 1870 break; 1871 } 1872 case uint32_t(MiscOp::MemoryFill): { 1873 uint32_t memoryIndex; 1874 if (!iter.readMemFill(&memoryIndex, ¬hing, ¬hing, ¬hing)) { 1875 return false; 1876 } 1877 dumper.dumpMemoryIndex(memoryIndex); 1878 break; 1879 } 1880 case uint32_t(MiscOp::MemoryInit): { 1881 uint32_t dataIndex; 1882 uint32_t memoryIndex; 1883 if (!iter.readMemOrTableInit(/*isMem=*/true, &dataIndex, 1884 &memoryIndex, ¬hing, ¬hing, 1885 ¬hing)) { 1886 return false; 1887 } 1888 dumper.dumpMemoryIndex(memoryIndex); 1889 dumper.dumpDataIndex(dataIndex); 1890 break; 1891 } 1892 case uint32_t(MiscOp::TableCopy): { 1893 uint32_t destTableIndex; 1894 uint32_t srcTableIndex; 1895 if (!iter.readMemOrTableCopy( 1896 /*isMem=*/false, &destTableIndex, ¬hing, &srcTableIndex, 1897 ¬hing, ¬hing)) { 1898 return false; 1899 } 1900 dumper.dumpTableIndex(destTableIndex); 1901 dumper.dumpTableIndex(srcTableIndex); 1902 break; 1903 } 1904 case uint32_t(MiscOp::ElemDrop): { 1905 uint32_t elemIndex; 1906 if (!iter.readDataOrElemDrop(/*isData=*/false, &elemIndex)) { 1907 return false; 1908 } 1909 dumper.dumpElemIndex(elemIndex); 1910 break; 1911 } 1912 case uint32_t(MiscOp::TableInit): { 1913 uint32_t elemIndex; 1914 uint32_t tableIndex; 1915 if (!iter.readMemOrTableInit(/*isMem=*/false, &elemIndex, 1916 &tableIndex, ¬hing, ¬hing, 1917 ¬hing)) { 1918 return false; 1919 } 1920 dumper.dumpTableIndex(tableIndex); 1921 dumper.dumpElemIndex(elemIndex); 1922 break; 1923 } 1924 case uint32_t(MiscOp::TableFill): { 1925 uint32_t tableIndex; 1926 if (!iter.readTableFill(&tableIndex, ¬hing, ¬hing, 1927 ¬hing)) { 1928 return false; 1929 } 1930 dumper.dumpTableIndex(tableIndex); 1931 break; 1932 } 1933 #ifdef ENABLE_WASM_MEMORY_CONTROL 1934 case uint32_t(MiscOp::MemoryDiscard): { 1935 if (!codeMeta.memoryControlEnabled()) { 1936 return iter.unrecognizedOpcode(&op); 1937 } 1938 uint32_t memoryIndex; 1939 if (!iter.readMemDiscard(&memoryIndex, ¬hing, ¬hing)) { 1940 return false; 1941 } 1942 dumper.dumpMemoryIndex(memoryIndex); 1943 break; 1944 } 1945 #endif 1946 case uint32_t(MiscOp::TableGrow): { 1947 uint32_t tableIndex; 1948 if (!iter.readTableGrow(&tableIndex, ¬hing, ¬hing)) { 1949 return false; 1950 } 1951 dumper.dumpTableIndex(tableIndex); 1952 break; 1953 } 1954 case uint32_t(MiscOp::TableSize): { 1955 uint32_t tableIndex; 1956 if (!iter.readTableSize(&tableIndex)) { 1957 return false; 1958 } 1959 dumper.dumpTableIndex(tableIndex); 1960 break; 1961 } 1962 default: 1963 return iter.unrecognizedOpcode(&op); 1964 } 1965 break; 1966 } 1967 case uint16_t(Op::RefAsNonNull): { 1968 if (!iter.readRefAsNonNull(¬hing)) { 1969 return false; 1970 } 1971 break; 1972 } 1973 case uint16_t(Op::BrOnNull): { 1974 uint32_t depth; 1975 if (!iter.readBrOnNull(&depth, &resultType, ¬hings, ¬hing)) { 1976 return false; 1977 } 1978 dumper.dumpBlockDepth(depth); 1979 break; 1980 } 1981 case uint16_t(Op::BrOnNonNull): { 1982 uint32_t depth; 1983 if (!iter.readBrOnNonNull(&depth, &resultType, ¬hings, ¬hing)) { 1984 return false; 1985 } 1986 dumper.dumpBlockDepth(depth); 1987 break; 1988 } 1989 case uint16_t(Op::RefEq): { 1990 if (!iter.readComparison(RefType::eq(), ¬hing, ¬hing)) { 1991 return false; 1992 } 1993 break; 1994 } 1995 case uint16_t(Op::RefFunc): { 1996 uint32_t funcIndex; 1997 if (!iter.readRefFunc(&funcIndex)) { 1998 return false; 1999 } 2000 dumper.dumpFuncIndex(funcIndex); 2001 break; 2002 } 2003 case uint16_t(Op::RefNull): { 2004 RefType type; 2005 if (!iter.readRefNull(&type)) { 2006 return false; 2007 } 2008 dumper.dumpHeapType(type); 2009 break; 2010 } 2011 case uint16_t(Op::RefIsNull): { 2012 Nothing nothing; 2013 if (!iter.readRefIsNull(¬hing)) { 2014 return false; 2015 } 2016 break; 2017 } 2018 case uint16_t(Op::Try): { 2019 if (!iter.readTry(&blockType)) { 2020 return false; 2021 } 2022 dumper.dumpBlockType(blockType); 2023 break; 2024 } 2025 case uint16_t(Op::Catch): { 2026 LabelKind unusedKind; 2027 uint32_t tagIndex; 2028 if (!iter.readCatch(&unusedKind, &tagIndex, &resultType, &resultType, 2029 ¬hings)) { 2030 return false; 2031 } 2032 dumper.dumpTagIndex(tagIndex); 2033 break; 2034 } 2035 case uint16_t(Op::CatchAll): { 2036 LabelKind unusedKind; 2037 if (!iter.readCatchAll(&unusedKind, &resultType, &resultType, 2038 ¬hings)) { 2039 return false; 2040 } 2041 break; 2042 } 2043 case uint16_t(Op::Delegate): { 2044 uint32_t depth; 2045 if (!iter.readDelegate(&depth, &resultType, ¬hings)) { 2046 return false; 2047 } 2048 iter.popDelegate(); 2049 dumper.dumpBlockDepth(depth); 2050 break; 2051 } 2052 case uint16_t(Op::Throw): { 2053 uint32_t tagIndex; 2054 if (!iter.readThrow(&tagIndex, ¬hings)) { 2055 return false; 2056 } 2057 dumper.dumpTagIndex(tagIndex); 2058 break; 2059 } 2060 case uint16_t(Op::Rethrow): { 2061 uint32_t depth; 2062 if (!iter.readRethrow(&depth)) { 2063 return false; 2064 } 2065 dumper.dumpBlockDepth(depth); 2066 break; 2067 } 2068 case uint16_t(Op::ThrowRef): { 2069 if (!iter.readThrowRef(¬hing)) { 2070 return false; 2071 } 2072 break; 2073 } 2074 case uint16_t(Op::TryTable): { 2075 TryTableCatchVector catches; 2076 if (!iter.readTryTable(&blockType, &catches)) { 2077 return false; 2078 } 2079 dumper.dumpTryTableCatches(catches); 2080 break; 2081 } 2082 case uint16_t(Op::ThreadPrefix): { 2083 // Though thread ops can be used on nonshared memories, we make them 2084 // unavailable if shared memory has been disabled in the prefs, for 2085 // maximum predictability and safety and consistency with JS. 2086 if (codeMeta.sharedMemoryEnabled() == Shareable::False) { 2087 return iter.unrecognizedOpcode(&op); 2088 } 2089 switch (op.b1) { 2090 case uint32_t(ThreadOp::Notify): { 2091 LinearMemoryAddress<Nothing> addr; 2092 if (!iter.readNotify(&addr, ¬hing)) { 2093 return false; 2094 } 2095 dumper.dumpLinearMemoryAddress(addr); 2096 break; 2097 } 2098 case uint32_t(ThreadOp::I32Wait): { 2099 LinearMemoryAddress<Nothing> addr; 2100 if (!iter.readWait(&addr, ValType::I32, 4, ¬hing, ¬hing)) { 2101 return false; 2102 } 2103 dumper.dumpLinearMemoryAddress(addr); 2104 break; 2105 } 2106 case uint32_t(ThreadOp::I64Wait): { 2107 LinearMemoryAddress<Nothing> addr; 2108 if (!iter.readWait(&addr, ValType::I64, 8, ¬hing, ¬hing)) { 2109 return false; 2110 } 2111 dumper.dumpLinearMemoryAddress(addr); 2112 break; 2113 } 2114 case uint32_t(ThreadOp::Fence): { 2115 if (!iter.readFence()) { 2116 return false; 2117 } 2118 break; 2119 } 2120 case uint32_t(ThreadOp::I32AtomicLoad): { 2121 LinearMemoryAddress<Nothing> addr; 2122 if (!iter.readAtomicLoad(&addr, ValType::I32, 4)) { 2123 return false; 2124 } 2125 dumper.dumpLinearMemoryAddress(addr); 2126 break; 2127 } 2128 case uint32_t(ThreadOp::I64AtomicLoad): { 2129 LinearMemoryAddress<Nothing> addr; 2130 if (!iter.readAtomicLoad(&addr, ValType::I64, 8)) { 2131 return false; 2132 } 2133 dumper.dumpLinearMemoryAddress(addr); 2134 break; 2135 } 2136 case uint32_t(ThreadOp::I32AtomicLoad8U): { 2137 LinearMemoryAddress<Nothing> addr; 2138 if (!iter.readAtomicLoad(&addr, ValType::I32, 1)) { 2139 return false; 2140 } 2141 dumper.dumpLinearMemoryAddress(addr); 2142 break; 2143 } 2144 case uint32_t(ThreadOp::I32AtomicLoad16U): { 2145 LinearMemoryAddress<Nothing> addr; 2146 if (!iter.readAtomicLoad(&addr, ValType::I32, 2)) { 2147 return false; 2148 } 2149 dumper.dumpLinearMemoryAddress(addr); 2150 break; 2151 } 2152 case uint32_t(ThreadOp::I64AtomicLoad8U): { 2153 LinearMemoryAddress<Nothing> addr; 2154 if (!iter.readAtomicLoad(&addr, ValType::I64, 1)) { 2155 return false; 2156 } 2157 dumper.dumpLinearMemoryAddress(addr); 2158 break; 2159 } 2160 case uint32_t(ThreadOp::I64AtomicLoad16U): { 2161 LinearMemoryAddress<Nothing> addr; 2162 if (!iter.readAtomicLoad(&addr, ValType::I64, 2)) { 2163 return false; 2164 } 2165 dumper.dumpLinearMemoryAddress(addr); 2166 break; 2167 } 2168 case uint32_t(ThreadOp::I64AtomicLoad32U): { 2169 LinearMemoryAddress<Nothing> addr; 2170 if (!iter.readAtomicLoad(&addr, ValType::I64, 4)) { 2171 return false; 2172 } 2173 dumper.dumpLinearMemoryAddress(addr); 2174 break; 2175 } 2176 case uint32_t(ThreadOp::I32AtomicStore): { 2177 LinearMemoryAddress<Nothing> addr; 2178 if (!iter.readAtomicStore(&addr, ValType::I32, 4, ¬hing)) { 2179 return false; 2180 } 2181 dumper.dumpLinearMemoryAddress(addr); 2182 break; 2183 } 2184 case uint32_t(ThreadOp::I64AtomicStore): { 2185 LinearMemoryAddress<Nothing> addr; 2186 if (!iter.readAtomicStore(&addr, ValType::I64, 8, ¬hing)) { 2187 return false; 2188 } 2189 dumper.dumpLinearMemoryAddress(addr); 2190 break; 2191 } 2192 case uint32_t(ThreadOp::I32AtomicStore8U): { 2193 LinearMemoryAddress<Nothing> addr; 2194 if (!iter.readAtomicStore(&addr, ValType::I32, 1, ¬hing)) { 2195 return false; 2196 } 2197 dumper.dumpLinearMemoryAddress(addr); 2198 break; 2199 } 2200 case uint32_t(ThreadOp::I32AtomicStore16U): { 2201 LinearMemoryAddress<Nothing> addr; 2202 if (!iter.readAtomicStore(&addr, ValType::I32, 2, ¬hing)) { 2203 return false; 2204 } 2205 dumper.dumpLinearMemoryAddress(addr); 2206 break; 2207 } 2208 case uint32_t(ThreadOp::I64AtomicStore8U): { 2209 LinearMemoryAddress<Nothing> addr; 2210 if (!iter.readAtomicStore(&addr, ValType::I64, 1, ¬hing)) { 2211 return false; 2212 } 2213 dumper.dumpLinearMemoryAddress(addr); 2214 break; 2215 } 2216 case uint32_t(ThreadOp::I64AtomicStore16U): { 2217 LinearMemoryAddress<Nothing> addr; 2218 if (!iter.readAtomicStore(&addr, ValType::I64, 2, ¬hing)) { 2219 return false; 2220 } 2221 dumper.dumpLinearMemoryAddress(addr); 2222 break; 2223 } 2224 case uint32_t(ThreadOp::I64AtomicStore32U): { 2225 LinearMemoryAddress<Nothing> addr; 2226 if (!iter.readAtomicStore(&addr, ValType::I64, 4, ¬hing)) { 2227 return false; 2228 } 2229 dumper.dumpLinearMemoryAddress(addr); 2230 break; 2231 } 2232 case uint32_t(ThreadOp::I32AtomicAdd): 2233 case uint32_t(ThreadOp::I32AtomicSub): 2234 case uint32_t(ThreadOp::I32AtomicAnd): 2235 case uint32_t(ThreadOp::I32AtomicOr): 2236 case uint32_t(ThreadOp::I32AtomicXor): 2237 case uint32_t(ThreadOp::I32AtomicXchg): { 2238 LinearMemoryAddress<Nothing> addr; 2239 if (!iter.readAtomicRMW(&addr, ValType::I32, 4, ¬hing)) { 2240 return false; 2241 } 2242 dumper.dumpLinearMemoryAddress(addr); 2243 break; 2244 } 2245 case uint32_t(ThreadOp::I64AtomicAdd): 2246 case uint32_t(ThreadOp::I64AtomicSub): 2247 case uint32_t(ThreadOp::I64AtomicAnd): 2248 case uint32_t(ThreadOp::I64AtomicOr): 2249 case uint32_t(ThreadOp::I64AtomicXor): 2250 case uint32_t(ThreadOp::I64AtomicXchg): { 2251 LinearMemoryAddress<Nothing> addr; 2252 if (!iter.readAtomicRMW(&addr, ValType::I64, 8, ¬hing)) { 2253 return false; 2254 } 2255 dumper.dumpLinearMemoryAddress(addr); 2256 break; 2257 } 2258 case uint32_t(ThreadOp::I32AtomicAdd8U): 2259 case uint32_t(ThreadOp::I32AtomicSub8U): 2260 case uint32_t(ThreadOp::I32AtomicAnd8U): 2261 case uint32_t(ThreadOp::I32AtomicOr8U): 2262 case uint32_t(ThreadOp::I32AtomicXor8U): 2263 case uint32_t(ThreadOp::I32AtomicXchg8U): { 2264 LinearMemoryAddress<Nothing> addr; 2265 if (!iter.readAtomicRMW(&addr, ValType::I32, 1, ¬hing)) { 2266 return false; 2267 } 2268 dumper.dumpLinearMemoryAddress(addr); 2269 break; 2270 } 2271 case uint32_t(ThreadOp::I32AtomicAdd16U): 2272 case uint32_t(ThreadOp::I32AtomicSub16U): 2273 case uint32_t(ThreadOp::I32AtomicAnd16U): 2274 case uint32_t(ThreadOp::I32AtomicOr16U): 2275 case uint32_t(ThreadOp::I32AtomicXor16U): 2276 case uint32_t(ThreadOp::I32AtomicXchg16U): { 2277 LinearMemoryAddress<Nothing> addr; 2278 if (!iter.readAtomicRMW(&addr, ValType::I32, 2, ¬hing)) { 2279 return false; 2280 } 2281 dumper.dumpLinearMemoryAddress(addr); 2282 break; 2283 } 2284 case uint32_t(ThreadOp::I64AtomicAdd8U): 2285 case uint32_t(ThreadOp::I64AtomicSub8U): 2286 case uint32_t(ThreadOp::I64AtomicAnd8U): 2287 case uint32_t(ThreadOp::I64AtomicOr8U): 2288 case uint32_t(ThreadOp::I64AtomicXor8U): 2289 case uint32_t(ThreadOp::I64AtomicXchg8U): { 2290 LinearMemoryAddress<Nothing> addr; 2291 if (!iter.readAtomicRMW(&addr, ValType::I64, 1, ¬hing)) { 2292 return false; 2293 } 2294 dumper.dumpLinearMemoryAddress(addr); 2295 break; 2296 } 2297 case uint32_t(ThreadOp::I64AtomicAdd16U): 2298 case uint32_t(ThreadOp::I64AtomicSub16U): 2299 case uint32_t(ThreadOp::I64AtomicAnd16U): 2300 case uint32_t(ThreadOp::I64AtomicOr16U): 2301 case uint32_t(ThreadOp::I64AtomicXor16U): 2302 case uint32_t(ThreadOp::I64AtomicXchg16U): { 2303 LinearMemoryAddress<Nothing> addr; 2304 if (!iter.readAtomicRMW(&addr, ValType::I64, 2, ¬hing)) { 2305 return false; 2306 } 2307 dumper.dumpLinearMemoryAddress(addr); 2308 break; 2309 } 2310 case uint32_t(ThreadOp::I64AtomicAdd32U): 2311 case uint32_t(ThreadOp::I64AtomicSub32U): 2312 case uint32_t(ThreadOp::I64AtomicAnd32U): 2313 case uint32_t(ThreadOp::I64AtomicOr32U): 2314 case uint32_t(ThreadOp::I64AtomicXor32U): 2315 case uint32_t(ThreadOp::I64AtomicXchg32U): { 2316 LinearMemoryAddress<Nothing> addr; 2317 if (!iter.readAtomicRMW(&addr, ValType::I64, 4, ¬hing)) { 2318 return false; 2319 } 2320 dumper.dumpLinearMemoryAddress(addr); 2321 break; 2322 } 2323 case uint32_t(ThreadOp::I32AtomicCmpXchg): { 2324 LinearMemoryAddress<Nothing> addr; 2325 if (!iter.readAtomicCmpXchg(&addr, ValType::I32, 4, ¬hing, 2326 ¬hing)) { 2327 return false; 2328 } 2329 dumper.dumpLinearMemoryAddress(addr); 2330 break; 2331 } 2332 case uint32_t(ThreadOp::I64AtomicCmpXchg): { 2333 LinearMemoryAddress<Nothing> addr; 2334 if (!iter.readAtomicCmpXchg(&addr, ValType::I64, 8, ¬hing, 2335 ¬hing)) { 2336 return false; 2337 } 2338 dumper.dumpLinearMemoryAddress(addr); 2339 break; 2340 } 2341 case uint32_t(ThreadOp::I32AtomicCmpXchg8U): { 2342 LinearMemoryAddress<Nothing> addr; 2343 if (!iter.readAtomicCmpXchg(&addr, ValType::I32, 1, ¬hing, 2344 ¬hing)) { 2345 return false; 2346 } 2347 dumper.dumpLinearMemoryAddress(addr); 2348 break; 2349 } 2350 case uint32_t(ThreadOp::I32AtomicCmpXchg16U): { 2351 LinearMemoryAddress<Nothing> addr; 2352 if (!iter.readAtomicCmpXchg(&addr, ValType::I32, 2, ¬hing, 2353 ¬hing)) { 2354 return false; 2355 } 2356 dumper.dumpLinearMemoryAddress(addr); 2357 break; 2358 } 2359 case uint32_t(ThreadOp::I64AtomicCmpXchg8U): { 2360 LinearMemoryAddress<Nothing> addr; 2361 if (!iter.readAtomicCmpXchg(&addr, ValType::I64, 1, ¬hing, 2362 ¬hing)) { 2363 return false; 2364 } 2365 dumper.dumpLinearMemoryAddress(addr); 2366 break; 2367 } 2368 case uint32_t(ThreadOp::I64AtomicCmpXchg16U): { 2369 LinearMemoryAddress<Nothing> addr; 2370 if (!iter.readAtomicCmpXchg(&addr, ValType::I64, 2, ¬hing, 2371 ¬hing)) { 2372 return false; 2373 } 2374 dumper.dumpLinearMemoryAddress(addr); 2375 break; 2376 } 2377 case uint32_t(ThreadOp::I64AtomicCmpXchg32U): { 2378 LinearMemoryAddress<Nothing> addr; 2379 if (!iter.readAtomicCmpXchg(&addr, ValType::I64, 4, ¬hing, 2380 ¬hing)) { 2381 return false; 2382 } 2383 dumper.dumpLinearMemoryAddress(addr); 2384 break; 2385 } 2386 default: 2387 return iter.unrecognizedOpcode(&op); 2388 } 2389 break; 2390 } 2391 case uint16_t(Op::MozPrefix): 2392 return iter.unrecognizedOpcode(&op); 2393 default: 2394 return iter.unrecognizedOpcode(&op); 2395 } 2396 2397 dumper.dumpOpEnd(); 2398 } 2399 2400 MOZ_CRASH("unreachable"); 2401 } 2402 2403 template bool wasm::ValidateOps<NopOpDumper>(ValidatingOpIter& iter, 2404 NopOpDumper& dumper, 2405 const CodeMetadata& codeMeta); 2406 template bool wasm::ValidateOps<OpDumper>(ValidatingOpIter& iter, 2407 OpDumper& dumper, 2408 const CodeMetadata& codeMeta); 2409 2410 bool wasm::ValidateFunctionBody(const CodeMetadata& codeMeta, 2411 uint32_t funcIndex, uint32_t bodySize, 2412 Decoder& d) { 2413 const uint8_t* bodyBegin = d.currentPosition(); 2414 const uint8_t* bodyEnd = bodyBegin + bodySize; 2415 2416 ValTypeVector locals; 2417 if (!DecodeLocalEntriesWithParams(d, codeMeta, funcIndex, &locals)) { 2418 return false; 2419 } 2420 2421 ValidatingOpIter iter(codeMeta, d, locals); 2422 NopOpDumper visitor; 2423 2424 if (!iter.startFunction(funcIndex)) { 2425 return false; 2426 } 2427 2428 if (!ValidateOps(iter, visitor, codeMeta)) { 2429 return false; 2430 } 2431 2432 return iter.endFunction(bodyEnd); 2433 } 2434 2435 // Section macros. 2436 2437 static bool DecodePreamble(Decoder& d) { 2438 if (d.bytesRemain() > MaxModuleBytes) { 2439 return d.fail("module too big"); 2440 } 2441 2442 uint32_t u32; 2443 if (!d.readFixedU32(&u32) || u32 != MagicNumber) { 2444 return d.fail("failed to match magic number"); 2445 } 2446 2447 if (!d.readFixedU32(&u32) || u32 != EncodingVersion) { 2448 return d.failf("binary version 0x%" PRIx32 2449 " does not match expected version 0x%" PRIx32, 2450 u32, EncodingVersion); 2451 } 2452 2453 return true; 2454 } 2455 2456 static bool DecodeValTypeVector(Decoder& d, CodeMetadata* codeMeta, 2457 uint32_t count, ValTypeVector* valTypes) { 2458 if (!valTypes->resize(count)) { 2459 return false; 2460 } 2461 2462 for (uint32_t i = 0; i < count; i++) { 2463 if (!d.readValType(*codeMeta->types, codeMeta->features(), 2464 &(*valTypes)[i])) { 2465 return false; 2466 } 2467 } 2468 return true; 2469 } 2470 2471 static bool DecodeFuncType(Decoder& d, CodeMetadata* codeMeta, 2472 FuncType* funcType) { 2473 uint32_t numArgs; 2474 if (!d.readVarU32(&numArgs)) { 2475 return d.fail("bad number of function args"); 2476 } 2477 if (numArgs > MaxParams) { 2478 return d.fail("too many arguments in signature"); 2479 } 2480 ValTypeVector args; 2481 if (!DecodeValTypeVector(d, codeMeta, numArgs, &args)) { 2482 return false; 2483 } 2484 2485 uint32_t numResults; 2486 if (!d.readVarU32(&numResults)) { 2487 return d.fail("bad number of function returns"); 2488 } 2489 if (numResults > MaxResults) { 2490 return d.fail("too many returns in signature"); 2491 } 2492 ValTypeVector results; 2493 if (!DecodeValTypeVector(d, codeMeta, numResults, &results)) { 2494 return false; 2495 } 2496 2497 *funcType = FuncType(std::move(args), std::move(results)); 2498 return true; 2499 } 2500 2501 static bool DecodeStructType(Decoder& d, CodeMetadata* codeMeta, 2502 StructType* structType) { 2503 uint32_t numFields; 2504 if (!d.readVarU32(&numFields)) { 2505 return d.fail("Bad number of fields"); 2506 } 2507 2508 if (numFields > MaxStructFields) { 2509 return d.fail("too many fields in struct"); 2510 } 2511 2512 FieldTypeVector fields; 2513 if (!fields.resize(numFields)) { 2514 return false; 2515 } 2516 2517 for (uint32_t i = 0; i < numFields; i++) { 2518 if (!d.readStorageType(*codeMeta->types, codeMeta->features(), 2519 &fields[i].type)) { 2520 return false; 2521 } 2522 2523 uint8_t flags; 2524 if (!d.readFixedU8(&flags)) { 2525 return d.fail("expected flag"); 2526 } 2527 if ((flags & ~uint8_t(FieldFlags::AllowedMask)) != 0) { 2528 return d.fail("garbage flag bits"); 2529 } 2530 fields[i].isMutable = flags & uint8_t(FieldFlags::Mutable); 2531 } 2532 2533 *structType = StructType(std::move(fields)); 2534 2535 // Compute the struct layout, and fail if the struct is too large 2536 if (!structType->init()) { 2537 return d.fail("too many fields in struct"); 2538 } 2539 return true; 2540 } 2541 2542 static bool DecodeArrayType(Decoder& d, CodeMetadata* codeMeta, 2543 ArrayType* arrayType) { 2544 StorageType elementType; 2545 if (!d.readStorageType(*codeMeta->types, codeMeta->features(), 2546 &elementType)) { 2547 return false; 2548 } 2549 2550 uint8_t flags; 2551 if (!d.readFixedU8(&flags)) { 2552 return d.fail("expected flag"); 2553 } 2554 if ((flags & ~uint8_t(FieldFlags::AllowedMask)) != 0) { 2555 return d.fail("garbage flag bits"); 2556 } 2557 bool isMutable = flags & uint8_t(FieldFlags::Mutable); 2558 2559 *arrayType = ArrayType(elementType, isMutable); 2560 return true; 2561 } 2562 2563 static bool DecodeTypeSection(Decoder& d, CodeMetadata* codeMeta) { 2564 MaybeBytecodeRange range; 2565 if (!d.startSection(SectionId::Type, codeMeta, &range, "type")) { 2566 return false; 2567 } 2568 if (!range) { 2569 return true; 2570 } 2571 2572 uint32_t numRecGroups; 2573 if (!d.readVarU32(&numRecGroups)) { 2574 return d.fail("expected number of types"); 2575 } 2576 2577 // Check if we've reached our implementation defined limit of recursion 2578 // groups. 2579 if (numRecGroups > MaxRecGroups) { 2580 return d.fail("too many types"); 2581 } 2582 2583 for (uint32_t recGroupIndex = 0; recGroupIndex < numRecGroups; 2584 recGroupIndex++) { 2585 uint32_t recGroupLength = 1; 2586 2587 uint8_t firstTypeCode; 2588 if (!d.peekByte(&firstTypeCode)) { 2589 return d.fail("expected type form"); 2590 } 2591 2592 if (firstTypeCode == (uint8_t)TypeCode::RecGroup) { 2593 // Skip over the prefix byte that was peeked. 2594 d.uncheckedReadFixedU8(); 2595 2596 // Read the number of types in this recursion group 2597 if (!d.readVarU32(&recGroupLength)) { 2598 return d.fail("expected recursion group length"); 2599 } 2600 } 2601 2602 // Check if we've reached our implementation defined limit of type 2603 // definitions. 2604 mozilla::CheckedUint32 newNumTypes(codeMeta->types->length()); 2605 newNumTypes += recGroupLength; 2606 if (!newNumTypes.isValid() || newNumTypes.value() > MaxTypes) { 2607 return d.fail("too many types"); 2608 } 2609 2610 // Start a recursion group. This will extend the type context with empty 2611 // type definitions to be filled. 2612 MutableRecGroup recGroup = codeMeta->types->startRecGroup(recGroupLength); 2613 if (!recGroup) { 2614 return false; 2615 } 2616 2617 // First, iterate over the types, validate them and set super types. 2618 // Subtyping relationship will be checked in a second iteration. 2619 for (uint32_t recGroupTypeIndex = 0; recGroupTypeIndex < recGroupLength; 2620 recGroupTypeIndex++) { 2621 uint32_t typeIndex = 2622 codeMeta->types->length() - recGroupLength + recGroupTypeIndex; 2623 2624 // This is ensured by above 2625 MOZ_ASSERT(typeIndex < MaxTypes); 2626 2627 uint8_t form; 2628 const TypeDef* superTypeDef = nullptr; 2629 2630 // By default, all types are final unless the sub keyword is specified. 2631 bool finalTypeFlag = true; 2632 2633 // Decode an optional declared super type index. 2634 if (d.peekByte(&form) && (form == (uint8_t)TypeCode::SubNoFinalType || 2635 form == (uint8_t)TypeCode::SubFinalType)) { 2636 if (form == (uint8_t)TypeCode::SubNoFinalType) { 2637 finalTypeFlag = false; 2638 } 2639 2640 // Skip over the `sub` or `final` prefix byte we peeked. 2641 d.uncheckedReadFixedU8(); 2642 2643 // Decode the number of super types, which is currently limited to at 2644 // most one. 2645 uint32_t numSuperTypes; 2646 if (!d.readVarU32(&numSuperTypes)) { 2647 return d.fail("expected number of super types"); 2648 } 2649 if (numSuperTypes > 1) { 2650 return d.fail("too many super types"); 2651 } 2652 2653 // Decode the super type, if any. 2654 if (numSuperTypes == 1) { 2655 uint32_t superTypeDefIndex; 2656 if (!d.readVarU32(&superTypeDefIndex)) { 2657 return d.fail("expected super type index"); 2658 } 2659 2660 // A super type index must be strictly less than the current type 2661 // index in order to avoid cycles. 2662 if (superTypeDefIndex >= typeIndex) { 2663 return d.fail("invalid super type index"); 2664 } 2665 2666 superTypeDef = &codeMeta->types->type(superTypeDefIndex); 2667 } 2668 } 2669 2670 // Decode the kind of type definition 2671 if (!d.readFixedU8(&form)) { 2672 return d.fail("expected type form"); 2673 } 2674 2675 TypeDef* typeDef = &recGroup->type(recGroupTypeIndex); 2676 switch (form) { 2677 case uint8_t(TypeCode::Func): { 2678 FuncType funcType; 2679 if (!DecodeFuncType(d, codeMeta, &funcType)) { 2680 return false; 2681 } 2682 *typeDef = std::move(funcType); 2683 break; 2684 } 2685 case uint8_t(TypeCode::Struct): { 2686 StructType structType; 2687 if (!DecodeStructType(d, codeMeta, &structType)) { 2688 return false; 2689 } 2690 *typeDef = std::move(structType); 2691 break; 2692 } 2693 case uint8_t(TypeCode::Array): { 2694 ArrayType arrayType; 2695 if (!DecodeArrayType(d, codeMeta, &arrayType)) { 2696 return false; 2697 } 2698 *typeDef = std::move(arrayType); 2699 break; 2700 } 2701 default: 2702 return d.fail("expected type form"); 2703 } 2704 2705 typeDef->setFinal(finalTypeFlag); 2706 if (superTypeDef) { 2707 // Check that we aren't creating too deep of a subtyping chain 2708 if (superTypeDef->subTypingDepth() >= MaxSubTypingDepth) { 2709 return d.fail("type is too deep"); 2710 } 2711 2712 typeDef->setSuperTypeDef(superTypeDef); 2713 } 2714 2715 if (typeDef->isFuncType()) { 2716 typeDef->funcType().initImmediateTypeId(typeDef->isFinal(), 2717 superTypeDef, recGroupLength); 2718 } 2719 } 2720 2721 // Check the super types to make sure they are compatible with their 2722 // subtypes. This is done in a second iteration to avoid dealing with not 2723 // yet loaded types. 2724 for (uint32_t recGroupTypeIndex = 0; recGroupTypeIndex < recGroupLength; 2725 recGroupTypeIndex++) { 2726 TypeDef* typeDef = &recGroup->type(recGroupTypeIndex); 2727 if (typeDef->superTypeDef()) { 2728 // Check that the super type is compatible with this type 2729 if (!TypeDef::canBeSubTypeOf(typeDef, typeDef->superTypeDef())) { 2730 return d.fail("incompatible super type"); 2731 } 2732 } 2733 } 2734 2735 // Finish the recursion group, which will canonicalize the types. 2736 if (!codeMeta->types->endRecGroup()) { 2737 return false; 2738 } 2739 } 2740 2741 return d.finishSection(*range, "type"); 2742 } 2743 2744 [[nodiscard]] static bool DecodeName(Decoder& d, CacheableName* name) { 2745 uint32_t numBytes; 2746 if (!d.readVarU32(&numBytes)) { 2747 return false; 2748 } 2749 2750 const uint8_t* bytes; 2751 if (!d.readBytes(numBytes, &bytes)) { 2752 return false; 2753 } 2754 2755 if (!IsUtf8(AsChars(Span(bytes, numBytes)))) { 2756 return false; 2757 } 2758 2759 UTF8Bytes utf8Bytes; 2760 if (!utf8Bytes.resizeUninitialized(numBytes)) { 2761 return false; 2762 } 2763 memcpy(utf8Bytes.begin(), bytes, numBytes); 2764 2765 *name = CacheableName(std::move(utf8Bytes)); 2766 return true; 2767 } 2768 2769 static bool DecodeFuncTypeIndex(Decoder& d, const SharedTypeContext& types, 2770 uint32_t* funcTypeIndex) { 2771 if (!d.readVarU32(funcTypeIndex)) { 2772 return d.fail("expected signature index"); 2773 } 2774 2775 if (*funcTypeIndex >= types->length()) { 2776 return d.fail("signature index out of range"); 2777 } 2778 2779 const TypeDef& def = (*types)[*funcTypeIndex]; 2780 2781 if (!def.isFuncType()) { 2782 return d.fail("signature index references non-signature"); 2783 } 2784 2785 return true; 2786 } 2787 2788 static bool DecodeLimitBound(Decoder& d, AddressType addressType, 2789 uint64_t* bound) { 2790 if (addressType == AddressType::I64) { 2791 return d.readVarU64(bound); 2792 } 2793 2794 // Spec tests assert that we only decode a LEB32 when address type is I32. 2795 uint32_t bound32; 2796 if (!d.readVarU32(&bound32)) { 2797 return false; 2798 } 2799 *bound = bound32; 2800 return true; 2801 } 2802 2803 static bool DecodeLimits(Decoder& d, CodeMetadata* codeMeta, LimitsKind kind, 2804 Limits* limits) { 2805 uint8_t flags; 2806 if (!d.readFixedU8(&flags)) { 2807 return d.fail("expected flags"); 2808 } 2809 2810 uint8_t mask = kind == LimitsKind::Memory ? uint8_t(LimitsMask::Memory) 2811 : uint8_t(LimitsMask::Table); 2812 2813 if (flags & ~uint8_t(mask)) { 2814 return d.failf("unexpected bits set in flags: %" PRIu32, 2815 uint32_t(flags & ~uint8_t(mask))); 2816 } 2817 2818 // Memory limits may be shared 2819 if (kind == LimitsKind::Memory) { 2820 if ((flags & uint8_t(LimitsFlags::IsShared)) && 2821 !(flags & uint8_t(LimitsFlags::HasMaximum))) { 2822 return d.fail("maximum length required for shared memory"); 2823 } 2824 2825 limits->shared = (flags & uint8_t(LimitsFlags::IsShared)) 2826 ? Shareable::True 2827 : Shareable::False; 2828 } else { 2829 limits->shared = Shareable::False; 2830 } 2831 2832 limits->addressType = (flags & uint8_t(LimitsFlags::IsI64)) 2833 ? AddressType::I64 2834 : AddressType::I32; 2835 2836 uint64_t initial; 2837 if (!DecodeLimitBound(d, limits->addressType, &initial)) { 2838 return d.fail("expected initial length"); 2839 } 2840 limits->initial = initial; 2841 2842 if (flags & uint8_t(LimitsFlags::HasMaximum)) { 2843 uint64_t maximum; 2844 if (!DecodeLimitBound(d, limits->addressType, &maximum)) { 2845 return d.fail("expected maximum length"); 2846 } 2847 2848 if (limits->initial > maximum) { 2849 return d.failf( 2850 "%s size minimum must not be greater than maximum; " 2851 "maximum length %" PRIu64 " is less than initial length %" PRIu64, 2852 kind == LimitsKind::Memory ? "memory" : "table", maximum, 2853 limits->initial); 2854 } 2855 2856 limits->maximum.emplace(maximum); 2857 } 2858 2859 if (kind == LimitsKind::Memory) { 2860 limits->pageSize = PageSize::Standard; 2861 #ifdef ENABLE_WASM_CUSTOM_PAGE_SIZES 2862 if (flags & uint8_t(LimitsFlags::HasCustomPageSize)) { 2863 if (!codeMeta->customPageSizesEnabled()) { 2864 return d.fail("custom page sizes are disabled"); 2865 } 2866 2867 uint32_t customPageSize; 2868 if (!d.readVarU32(&customPageSize)) { 2869 return d.fail("failed to decode custom page size"); 2870 } 2871 2872 if (customPageSize == static_cast<uint32_t>(PageSize::Tiny)) { 2873 limits->pageSize = PageSize::Tiny; 2874 } else if (customPageSize != static_cast<uint32_t>(PageSize::Standard)) { 2875 return d.fail("bad custom page size"); 2876 } 2877 } 2878 #endif 2879 } 2880 2881 return true; 2882 } 2883 2884 // Combined decoding for both table types and the augmented form of table types 2885 // that can include init expressions: 2886 // 2887 // https://wasm-dsl.github.io/spectec/core/binary/types.html#table-types 2888 // https://wasm-dsl.github.io/spectec/core/binary/modules.html#table-section 2889 // 2890 // Only defined tables are therefore allowed to have init expressions, not 2891 // imported tables. 2892 static bool DecodeTableType(Decoder& d, CodeMetadata* codeMeta, bool isImport) { 2893 bool initExprPresent = false; 2894 uint8_t typeCode; 2895 if (!d.peekByte(&typeCode)) { 2896 return d.fail("expected type code"); 2897 } 2898 if (typeCode == (uint8_t)TypeCode::TableHasInitExpr) { 2899 if (isImport) { 2900 return d.fail("imported tables cannot have initializer expressions"); 2901 } 2902 d.uncheckedReadFixedU8(); 2903 uint8_t flags; 2904 if (!d.readFixedU8(&flags) || flags != 0) { 2905 return d.fail("expected reserved byte to be 0"); 2906 } 2907 initExprPresent = true; 2908 } 2909 2910 RefType tableElemType; 2911 if (!d.readRefType(*codeMeta->types, codeMeta->features(), &tableElemType)) { 2912 return false; 2913 } 2914 2915 Limits limits; 2916 if (!DecodeLimits(d, codeMeta, LimitsKind::Table, &limits)) { 2917 return false; 2918 } 2919 2920 // If there's a maximum, check it is in range. The check to exclude 2921 // initial > maximum is carried out by the DecodeLimits call above, so 2922 // we don't repeat it here. 2923 if (limits.initial > MaxTableElemsValidation(limits.addressType) || 2924 ((limits.maximum.isSome() && 2925 limits.maximum.value() > 2926 MaxTableElemsValidation(limits.addressType)))) { 2927 return d.fail("too many table elements"); 2928 } 2929 2930 if (codeMeta->tables.length() >= MaxTables) { 2931 return d.fail("too many tables"); 2932 } 2933 2934 Maybe<InitExpr> initExpr; 2935 if (initExprPresent) { 2936 InitExpr initializer; 2937 if (!InitExpr::decodeAndValidate(d, codeMeta, tableElemType, 2938 &initializer)) { 2939 return false; 2940 } 2941 initExpr = Some(std::move(initializer)); 2942 } else { 2943 if (!tableElemType.isNullable() && !isImport) { 2944 return d.fail("table with non-nullable references requires initializer"); 2945 } 2946 } 2947 2948 return codeMeta->tables.emplaceBack(limits, tableElemType, 2949 std::move(initExpr), 2950 /* isAsmJS */ false, isImport); 2951 } 2952 2953 static bool DecodeGlobalType(Decoder& d, const SharedTypeContext& types, 2954 const FeatureArgs& features, ValType* type, 2955 bool* isMutable) { 2956 if (!d.readValType(*types, features, type)) { 2957 return d.fail("expected global type"); 2958 } 2959 2960 uint8_t flags; 2961 if (!d.readFixedU8(&flags)) { 2962 return d.fail("expected global flags"); 2963 } 2964 2965 if (flags & ~uint8_t(GlobalTypeImmediate::AllowedMask)) { 2966 return d.fail("unexpected bits set in global flags"); 2967 } 2968 2969 *isMutable = flags & uint8_t(GlobalTypeImmediate::IsMutable); 2970 return true; 2971 } 2972 2973 static bool DecodeMemoryTypeAndLimits(Decoder& d, CodeMetadata* codeMeta, 2974 MemoryDescVector* memories) { 2975 if (codeMeta->numMemories() >= MaxMemories) { 2976 return d.fail("too many memories"); 2977 } 2978 2979 Limits limits; 2980 if (!DecodeLimits(d, codeMeta, LimitsKind::Memory, &limits)) { 2981 return false; 2982 } 2983 2984 uint64_t maxField = 2985 MaxMemoryPagesValidation(limits.addressType, limits.pageSize); 2986 2987 if (limits.initial > maxField) { 2988 return d.fail("initial memory size too big"); 2989 } 2990 2991 if (limits.maximum && *limits.maximum > maxField) { 2992 return d.fail("maximum memory size too big"); 2993 } 2994 2995 if (limits.shared == Shareable::True && 2996 codeMeta->sharedMemoryEnabled() == Shareable::False) { 2997 return d.fail("shared memory is disabled"); 2998 } 2999 3000 return memories->emplaceBack(MemoryDesc(limits)); 3001 } 3002 3003 static bool DecodeTag(Decoder& d, CodeMetadata* codeMeta, TagKind* tagKind, 3004 uint32_t* funcTypeIndex) { 3005 uint32_t tagCode; 3006 if (!d.readVarU32(&tagCode)) { 3007 return d.fail("expected tag kind"); 3008 } 3009 3010 if (TagKind(tagCode) != TagKind::Exception) { 3011 return d.fail("illegal tag kind"); 3012 } 3013 *tagKind = TagKind(tagCode); 3014 3015 if (!d.readVarU32(funcTypeIndex)) { 3016 return d.fail("expected function index in tag"); 3017 } 3018 if (*funcTypeIndex >= codeMeta->numTypes()) { 3019 return d.fail("function type index in tag out of bounds"); 3020 } 3021 if (!(*codeMeta->types)[*funcTypeIndex].isFuncType()) { 3022 return d.fail("function type index must index a function type"); 3023 } 3024 if ((*codeMeta->types)[*funcTypeIndex].funcType().results().length() != 0) { 3025 return d.fail("tag function types must not return anything"); 3026 } 3027 return true; 3028 } 3029 3030 static bool DecodeImport(Decoder& d, CodeMetadata* codeMeta, 3031 ModuleMetadata* moduleMeta) { 3032 CacheableName moduleName; 3033 if (!DecodeName(d, &moduleName)) { 3034 return d.fail("expected valid import module name"); 3035 } 3036 3037 CacheableName fieldName; 3038 if (!DecodeName(d, &fieldName)) { 3039 return d.fail("expected valid import field name"); 3040 } 3041 3042 uint8_t rawImportKind; 3043 if (!d.readFixedU8(&rawImportKind)) { 3044 return d.fail("failed to read import kind"); 3045 } 3046 3047 DefinitionKind importKind = DefinitionKind(rawImportKind); 3048 3049 switch (importKind) { 3050 case DefinitionKind::Function: { 3051 uint32_t funcTypeIndex; 3052 if (!DecodeFuncTypeIndex(d, codeMeta->types, &funcTypeIndex)) { 3053 return false; 3054 } 3055 if (!codeMeta->funcs.append(FuncDesc(funcTypeIndex))) { 3056 return false; 3057 } 3058 if (codeMeta->funcs.length() > MaxFuncs) { 3059 return d.fail("too many functions"); 3060 } 3061 break; 3062 } 3063 case DefinitionKind::Table: { 3064 if (!DecodeTableType(d, codeMeta, /*isImport=*/true)) { 3065 return false; 3066 } 3067 break; 3068 } 3069 case DefinitionKind::Memory: { 3070 if (!DecodeMemoryTypeAndLimits(d, codeMeta, &codeMeta->memories)) { 3071 return false; 3072 } 3073 codeMeta->memories.back().importIndex = 3074 Some(moduleMeta->imports.length()); 3075 break; 3076 } 3077 case DefinitionKind::Global: { 3078 ValType type; 3079 bool isMutable; 3080 if (!DecodeGlobalType(d, codeMeta->types, codeMeta->features(), &type, 3081 &isMutable)) { 3082 return false; 3083 } 3084 if (!codeMeta->globals.append( 3085 GlobalDesc(type, isMutable, codeMeta->globals.length()))) { 3086 return false; 3087 } 3088 if (codeMeta->globals.length() > MaxGlobals) { 3089 return d.fail("too many globals"); 3090 } 3091 break; 3092 } 3093 case DefinitionKind::Tag: { 3094 TagKind tagKind; 3095 uint32_t funcTypeIndex; 3096 if (!DecodeTag(d, codeMeta, &tagKind, &funcTypeIndex)) { 3097 return false; 3098 } 3099 MutableTagType tagType = js_new<TagType>(); 3100 if (!tagType || 3101 !tagType->initialize(&(*codeMeta->types)[funcTypeIndex])) { 3102 return false; 3103 } 3104 if (!codeMeta->tags.emplaceBack(tagKind, tagType)) { 3105 return false; 3106 } 3107 if (codeMeta->tags.length() > MaxTags) { 3108 return d.fail("too many tags"); 3109 } 3110 break; 3111 } 3112 default: 3113 return d.fail("unsupported import kind"); 3114 } 3115 3116 return moduleMeta->imports.emplaceBack(std::move(moduleName), 3117 std::move(fieldName), importKind); 3118 } 3119 3120 static bool CheckImportsAgainstBuiltinModules(Decoder& d, 3121 CodeMetadata* codeMeta, 3122 ModuleMetadata* moduleMeta) { 3123 const BuiltinModuleIds& builtinModules = codeMeta->features().builtinModules; 3124 3125 // Skip this pass if there are no builtin modules enabled 3126 if (builtinModules.hasNone()) { 3127 return true; 3128 } 3129 3130 uint32_t importFuncIndex = 0; 3131 uint32_t importGlobalIndex = 0; 3132 for (auto& import : moduleMeta->imports) { 3133 Maybe<BuiltinModuleId> builtinModule = 3134 ImportMatchesBuiltinModule(import.module.utf8Bytes(), builtinModules); 3135 3136 switch (import.kind) { 3137 case DefinitionKind::Function: { 3138 const FuncDesc& func = codeMeta->funcs[importFuncIndex]; 3139 uint32_t funcIndex = importFuncIndex; 3140 importFuncIndex += 1; 3141 3142 // Skip this import if it doesn't refer to a builtin module. We do have 3143 // to increment the import function index regardless though. 3144 if (!builtinModule) { 3145 continue; 3146 } 3147 3148 // Check if this import refers to a builtin module function 3149 const BuiltinModuleFunc* builtinFunc = nullptr; 3150 BuiltinModuleFuncId builtinFuncId; 3151 if (!ImportMatchesBuiltinModuleFunc(import.field.utf8Bytes(), 3152 *builtinModule, &builtinFunc, 3153 &builtinFuncId)) { 3154 return d.fail("unrecognized builtin module field"); 3155 } 3156 3157 const TypeDef& importTypeDef = (*codeMeta->types)[func.typeIndex]; 3158 if (!TypeDef::isSubTypeOf(builtinFunc->typeDef(), &importTypeDef)) { 3159 return d.failf("type mismatch in %s", builtinFunc->exportName()); 3160 } 3161 3162 codeMeta->knownFuncImports[funcIndex] = builtinFuncId; 3163 break; 3164 } 3165 case DefinitionKind::Global: { 3166 const GlobalDesc& global = codeMeta->globals[importGlobalIndex]; 3167 importGlobalIndex += 1; 3168 3169 // Skip this import if it doesn't refer to a builtin module. We do have 3170 // to increment the import global index regardless though. 3171 if (!builtinModule) { 3172 continue; 3173 } 3174 3175 // Only the imported string constants module has globals defined. 3176 if (*builtinModule != BuiltinModuleId::JSStringConstants) { 3177 return d.fail("unrecognized builtin module field"); 3178 } 3179 3180 // All imported globals must match a provided global type of 3181 // `(global (ref extern))`. 3182 if (global.isMutable() || 3183 !ValType::isSubTypeOf(ValType(RefType::extern_().asNonNullable()), 3184 global.type())) { 3185 return d.failf("type mismatch"); 3186 } 3187 3188 break; 3189 } 3190 default: { 3191 if (!builtinModule) { 3192 continue; 3193 } 3194 return d.fail("unrecognized builtin import"); 3195 } 3196 } 3197 } 3198 3199 return true; 3200 } 3201 3202 static bool DecodeImportSection(Decoder& d, CodeMetadata* codeMeta, 3203 ModuleMetadata* moduleMeta) { 3204 MaybeBytecodeRange range; 3205 if (!d.startSection(SectionId::Import, codeMeta, &range, "import")) { 3206 return false; 3207 } 3208 if (!range) { 3209 return true; 3210 } 3211 3212 uint32_t numImports; 3213 if (!d.readVarU32(&numImports)) { 3214 return d.fail("failed to read number of imports"); 3215 } 3216 3217 if (numImports > MaxImports) { 3218 return d.fail("too many imports"); 3219 } 3220 3221 for (uint32_t i = 0; i < numImports; i++) { 3222 if (!DecodeImport(d, codeMeta, moduleMeta)) { 3223 return false; 3224 } 3225 } 3226 3227 if (!d.finishSection(*range, "import")) { 3228 return false; 3229 } 3230 3231 codeMeta->numFuncImports = codeMeta->funcs.length(); 3232 if (!codeMeta->knownFuncImports.resize(codeMeta->numFuncImports)) { 3233 return false; 3234 } 3235 codeMeta->numGlobalImports = codeMeta->globals.length(); 3236 return true; 3237 } 3238 3239 static bool DecodeFunctionSection(Decoder& d, CodeMetadata* codeMeta) { 3240 MaybeBytecodeRange range; 3241 if (!d.startSection(SectionId::Function, codeMeta, &range, "function")) { 3242 return false; 3243 } 3244 if (!range) { 3245 return true; 3246 } 3247 3248 uint32_t numDefs; 3249 if (!d.readVarU32(&numDefs)) { 3250 return d.fail("expected number of function definitions"); 3251 } 3252 3253 CheckedInt<uint32_t> numFuncs = codeMeta->funcs.length(); 3254 numFuncs += numDefs; 3255 if (!numFuncs.isValid() || numFuncs.value() > MaxFuncs) { 3256 return d.fail("too many functions"); 3257 } 3258 3259 if (!codeMeta->funcs.reserve(numFuncs.value())) { 3260 return false; 3261 } 3262 3263 for (uint32_t i = 0; i < numDefs; i++) { 3264 uint32_t funcTypeIndex; 3265 if (!DecodeFuncTypeIndex(d, codeMeta->types, &funcTypeIndex)) { 3266 return false; 3267 } 3268 codeMeta->funcs.infallibleAppend(funcTypeIndex); 3269 } 3270 3271 return d.finishSection(*range, "function"); 3272 } 3273 3274 static bool DecodeTableSection(Decoder& d, CodeMetadata* codeMeta) { 3275 MaybeBytecodeRange range; 3276 if (!d.startSection(SectionId::Table, codeMeta, &range, "table")) { 3277 return false; 3278 } 3279 if (!range) { 3280 return true; 3281 } 3282 3283 uint32_t numTables; 3284 if (!d.readVarU32(&numTables)) { 3285 return d.fail("failed to read number of tables"); 3286 } 3287 3288 for (uint32_t i = 0; i < numTables; ++i) { 3289 if (!DecodeTableType(d, codeMeta, /*isImport=*/false)) { 3290 return false; 3291 } 3292 } 3293 3294 return d.finishSection(*range, "table"); 3295 } 3296 3297 static bool DecodeMemorySection(Decoder& d, CodeMetadata* codeMeta) { 3298 MaybeBytecodeRange range; 3299 if (!d.startSection(SectionId::Memory, codeMeta, &range, "memory")) { 3300 return false; 3301 } 3302 if (!range) { 3303 return true; 3304 } 3305 3306 uint32_t numMemories; 3307 if (!d.readVarU32(&numMemories)) { 3308 return d.fail("failed to read number of memories"); 3309 } 3310 3311 for (uint32_t i = 0; i < numMemories; ++i) { 3312 if (!DecodeMemoryTypeAndLimits(d, codeMeta, &codeMeta->memories)) { 3313 return false; 3314 } 3315 } 3316 3317 return d.finishSection(*range, "memory"); 3318 } 3319 3320 static bool DecodeGlobalSection(Decoder& d, CodeMetadata* codeMeta) { 3321 MaybeBytecodeRange range; 3322 if (!d.startSection(SectionId::Global, codeMeta, &range, "global")) { 3323 return false; 3324 } 3325 if (!range) { 3326 return true; 3327 } 3328 3329 uint32_t numDefs; 3330 if (!d.readVarU32(&numDefs)) { 3331 return d.fail("expected number of globals"); 3332 } 3333 3334 CheckedInt<uint32_t> numGlobals = codeMeta->globals.length(); 3335 numGlobals += numDefs; 3336 if (!numGlobals.isValid() || numGlobals.value() > MaxGlobals) { 3337 return d.fail("too many globals"); 3338 } 3339 3340 if (!codeMeta->globals.reserve(numGlobals.value())) { 3341 return false; 3342 } 3343 3344 for (uint32_t i = 0; i < numDefs; i++) { 3345 ValType type; 3346 bool isMutable; 3347 if (!DecodeGlobalType(d, codeMeta->types, codeMeta->features(), &type, 3348 &isMutable)) { 3349 return false; 3350 } 3351 3352 InitExpr initializer; 3353 if (!InitExpr::decodeAndValidate(d, codeMeta, type, &initializer)) { 3354 return false; 3355 } 3356 3357 codeMeta->globals.infallibleAppend( 3358 GlobalDesc(std::move(initializer), isMutable)); 3359 } 3360 3361 return d.finishSection(*range, "global"); 3362 } 3363 3364 static bool DecodeTagSection(Decoder& d, CodeMetadata* codeMeta) { 3365 MaybeBytecodeRange range; 3366 if (!d.startSection(SectionId::Tag, codeMeta, &range, "tag")) { 3367 return false; 3368 } 3369 if (!range) { 3370 return true; 3371 } 3372 3373 uint32_t numDefs; 3374 if (!d.readVarU32(&numDefs)) { 3375 return d.fail("expected number of tags"); 3376 } 3377 3378 CheckedInt<uint32_t> numTags = codeMeta->tags.length(); 3379 numTags += numDefs; 3380 if (!numTags.isValid() || numTags.value() > MaxTags) { 3381 return d.fail("too many tags"); 3382 } 3383 3384 if (!codeMeta->tags.reserve(numTags.value())) { 3385 return false; 3386 } 3387 3388 for (uint32_t i = 0; i < numDefs; i++) { 3389 TagKind tagKind; 3390 uint32_t funcTypeIndex; 3391 if (!DecodeTag(d, codeMeta, &tagKind, &funcTypeIndex)) { 3392 return false; 3393 } 3394 MutableTagType tagType = js_new<TagType>(); 3395 if (!tagType || !tagType->initialize(&(*codeMeta->types)[funcTypeIndex])) { 3396 return false; 3397 } 3398 codeMeta->tags.infallibleEmplaceBack(tagKind, tagType); 3399 } 3400 3401 return d.finishSection(*range, "tag"); 3402 } 3403 3404 using NameSet = HashSet<Span<char>, NameHasher, SystemAllocPolicy>; 3405 3406 [[nodiscard]] static bool DecodeExportName(Decoder& d, NameSet* dupSet, 3407 CacheableName* exportName) { 3408 if (!DecodeName(d, exportName)) { 3409 d.fail("expected valid export name"); 3410 return false; 3411 } 3412 3413 NameSet::AddPtr p = dupSet->lookupForAdd(exportName->utf8Bytes()); 3414 if (p) { 3415 d.fail("duplicate export"); 3416 return false; 3417 } 3418 3419 return dupSet->add(p, exportName->utf8Bytes()); 3420 } 3421 3422 static bool DecodeExport(Decoder& d, CodeMetadata* codeMeta, 3423 ModuleMetadata* moduleMeta, NameSet* dupSet) { 3424 CacheableName fieldName; 3425 if (!DecodeExportName(d, dupSet, &fieldName)) { 3426 return false; 3427 } 3428 3429 uint8_t exportKind; 3430 if (!d.readFixedU8(&exportKind)) { 3431 return d.fail("failed to read export kind"); 3432 } 3433 3434 switch (DefinitionKind(exportKind)) { 3435 case DefinitionKind::Function: { 3436 uint32_t funcIndex; 3437 if (!d.readVarU32(&funcIndex)) { 3438 return d.fail("expected function index"); 3439 } 3440 3441 if (funcIndex >= codeMeta->numFuncs()) { 3442 return d.fail("exported function index out of bounds"); 3443 } 3444 3445 codeMeta->funcs[funcIndex].declareFuncExported(/* eager */ true, 3446 /* canRefFunc */ true); 3447 return moduleMeta->exports.emplaceBack(std::move(fieldName), funcIndex, 3448 DefinitionKind::Function); 3449 } 3450 case DefinitionKind::Table: { 3451 uint32_t tableIndex; 3452 if (!d.readVarU32(&tableIndex)) { 3453 return d.fail("expected table index"); 3454 } 3455 3456 if (tableIndex >= codeMeta->tables.length()) { 3457 return d.fail("exported table index out of bounds"); 3458 } 3459 codeMeta->tables[tableIndex].isExported = true; 3460 return moduleMeta->exports.emplaceBack(std::move(fieldName), tableIndex, 3461 DefinitionKind::Table); 3462 } 3463 case DefinitionKind::Memory: { 3464 uint32_t memoryIndex; 3465 if (!d.readVarU32(&memoryIndex)) { 3466 return d.fail("expected memory index"); 3467 } 3468 3469 if (memoryIndex >= codeMeta->numMemories()) { 3470 return d.fail("exported memory index out of bounds"); 3471 } 3472 3473 return moduleMeta->exports.emplaceBack(std::move(fieldName), memoryIndex, 3474 DefinitionKind::Memory); 3475 } 3476 case DefinitionKind::Global: { 3477 uint32_t globalIndex; 3478 if (!d.readVarU32(&globalIndex)) { 3479 return d.fail("expected global index"); 3480 } 3481 3482 if (globalIndex >= codeMeta->globals.length()) { 3483 return d.fail("exported global index out of bounds"); 3484 } 3485 3486 GlobalDesc* global = &codeMeta->globals[globalIndex]; 3487 global->setIsExport(); 3488 3489 return moduleMeta->exports.emplaceBack(std::move(fieldName), globalIndex, 3490 DefinitionKind::Global); 3491 } 3492 case DefinitionKind::Tag: { 3493 uint32_t tagIndex; 3494 if (!d.readVarU32(&tagIndex)) { 3495 return d.fail("expected tag index"); 3496 } 3497 if (tagIndex >= codeMeta->tags.length()) { 3498 return d.fail("exported tag index out of bounds"); 3499 } 3500 3501 codeMeta->tags[tagIndex].isExport = true; 3502 return moduleMeta->exports.emplaceBack(std::move(fieldName), tagIndex, 3503 DefinitionKind::Tag); 3504 } 3505 default: 3506 return d.fail("unexpected export kind"); 3507 } 3508 3509 MOZ_CRASH("unreachable"); 3510 } 3511 3512 static bool DecodeExportSection(Decoder& d, CodeMetadata* codeMeta, 3513 ModuleMetadata* moduleMeta) { 3514 MaybeBytecodeRange range; 3515 if (!d.startSection(SectionId::Export, codeMeta, &range, "export")) { 3516 return false; 3517 } 3518 if (!range) { 3519 return true; 3520 } 3521 3522 NameSet dupSet; 3523 3524 uint32_t numExports; 3525 if (!d.readVarU32(&numExports)) { 3526 return d.fail("failed to read number of exports"); 3527 } 3528 3529 if (numExports > MaxExports) { 3530 return d.fail("too many exports"); 3531 } 3532 3533 for (uint32_t i = 0; i < numExports; i++) { 3534 if (!DecodeExport(d, codeMeta, moduleMeta, &dupSet)) { 3535 return false; 3536 } 3537 } 3538 3539 return d.finishSection(*range, "export"); 3540 } 3541 3542 static bool DecodeStartSection(Decoder& d, CodeMetadata* codeMeta, 3543 ModuleMetadata* moduleMeta) { 3544 MaybeBytecodeRange range; 3545 if (!d.startSection(SectionId::Start, codeMeta, &range, "start")) { 3546 return false; 3547 } 3548 if (!range) { 3549 return true; 3550 } 3551 3552 uint32_t funcIndex; 3553 if (!d.readVarU32(&funcIndex)) { 3554 return d.fail("failed to read start func index"); 3555 } 3556 3557 if (funcIndex >= codeMeta->numFuncs()) { 3558 return d.fail("unknown start function"); 3559 } 3560 3561 const FuncType& funcType = codeMeta->getFuncType(funcIndex); 3562 if (funcType.results().length() > 0) { 3563 return d.fail("start function must not return anything"); 3564 } 3565 3566 if (funcType.args().length()) { 3567 return d.fail("start function must be nullary"); 3568 } 3569 3570 codeMeta->funcs[funcIndex].declareFuncExported(/* eager */ true, 3571 /* canFuncRef */ false); 3572 codeMeta->startFuncIndex = Some(funcIndex); 3573 3574 return d.finishSection(*range, "start"); 3575 } 3576 3577 static inline ModuleElemSegment::Kind NormalizeElemSegmentKind( 3578 ElemSegmentKind decodedKind) { 3579 switch (decodedKind) { 3580 case ElemSegmentKind::Active: 3581 case ElemSegmentKind::ActiveWithTableIndex: { 3582 return ModuleElemSegment::Kind::Active; 3583 } 3584 case ElemSegmentKind::Passive: { 3585 return ModuleElemSegment::Kind::Passive; 3586 } 3587 case ElemSegmentKind::Declared: { 3588 return ModuleElemSegment::Kind::Declared; 3589 } 3590 } 3591 MOZ_CRASH("unexpected elem segment kind"); 3592 } 3593 3594 static bool DecodeElemSegment(Decoder& d, CodeMetadata* codeMeta, 3595 ModuleMetadata* moduleMeta) { 3596 uint32_t segmentFlags; 3597 if (!d.readVarU32(&segmentFlags)) { 3598 return d.fail("expected elem segment flags field"); 3599 } 3600 3601 Maybe<ElemSegmentFlags> flags = ElemSegmentFlags::construct(segmentFlags); 3602 if (!flags) { 3603 return d.fail("invalid elem segment flags field"); 3604 } 3605 3606 ModuleElemSegment seg = ModuleElemSegment(); 3607 3608 ElemSegmentKind segmentKind = flags->kind(); 3609 seg.kind = NormalizeElemSegmentKind(segmentKind); 3610 3611 if (segmentKind == ElemSegmentKind::Active || 3612 segmentKind == ElemSegmentKind::ActiveWithTableIndex) { 3613 if (codeMeta->tables.length() == 0) { 3614 return d.fail("active elem segment requires a table"); 3615 } 3616 3617 uint32_t tableIndex = 0; 3618 if (segmentKind == ElemSegmentKind::ActiveWithTableIndex && 3619 !d.readVarU32(&tableIndex)) { 3620 return d.fail("expected table index"); 3621 } 3622 if (tableIndex >= codeMeta->tables.length()) { 3623 return d.fail("table index out of range for element segment"); 3624 } 3625 seg.tableIndex = tableIndex; 3626 3627 InitExpr offset; 3628 if (!InitExpr::decodeAndValidate( 3629 d, codeMeta, ToValType(codeMeta->tables[tableIndex].addressType()), 3630 &offset)) { 3631 return false; 3632 } 3633 seg.offsetIfActive.emplace(std::move(offset)); 3634 } else { 3635 // Too many bugs result from keeping this value zero. For passive 3636 // or declared segments, there really is no table index, and we should 3637 // never touch the field. 3638 MOZ_ASSERT(segmentKind == ElemSegmentKind::Passive || 3639 segmentKind == ElemSegmentKind::Declared); 3640 seg.tableIndex = (uint32_t)-1; 3641 } 3642 3643 ElemSegmentPayload payload = flags->payload(); 3644 RefType elemType; 3645 3646 // `ActiveWithTableIndex`, `Declared`, and `Passive` element segments encode 3647 // the type or definition kind of the payload. `Active` element segments are 3648 // restricted to MVP behavior, which assumes only function indices. 3649 if (segmentKind == ElemSegmentKind::Active) { 3650 // Bizarrely, the spec prescribes that the default type is (ref func) when 3651 // encoding function indices, and (ref null func) when encoding expressions. 3652 elemType = payload == ElemSegmentPayload::Expressions 3653 ? RefType::func() 3654 : RefType::func().asNonNullable(); 3655 } else { 3656 switch (payload) { 3657 case ElemSegmentPayload::Expressions: { 3658 if (!d.readRefType(*codeMeta->types, codeMeta->features(), &elemType)) { 3659 return false; 3660 } 3661 } break; 3662 case ElemSegmentPayload::Indices: { 3663 uint8_t elemKind; 3664 if (!d.readFixedU8(&elemKind)) { 3665 return d.fail("expected element kind"); 3666 } 3667 3668 if (elemKind != uint8_t(DefinitionKind::Function)) { 3669 return d.fail("invalid element kind"); 3670 } 3671 elemType = RefType::func().asNonNullable(); 3672 } break; 3673 } 3674 } 3675 3676 // For active segments, check if the element type is compatible with the 3677 // destination table type. 3678 if (seg.active()) { 3679 RefType tblElemType = codeMeta->tables[seg.tableIndex].elemType; 3680 if (!CheckIsSubtypeOf(d, *codeMeta, d.currentOffset(), 3681 ValType(elemType).storageType(), 3682 ValType(tblElemType).storageType())) { 3683 return false; 3684 } 3685 } 3686 seg.elemType = elemType; 3687 3688 uint32_t numElems; 3689 if (!d.readVarU32(&numElems)) { 3690 return d.fail("expected element segment size"); 3691 } 3692 3693 if (numElems > MaxElemSegmentLength) { 3694 return d.fail("too many elements in element segment"); 3695 } 3696 3697 bool isAsmJS = seg.active() && codeMeta->tables[seg.tableIndex].isAsmJS; 3698 3699 switch (payload) { 3700 case ElemSegmentPayload::Indices: { 3701 seg.encoding = ModuleElemSegment::Encoding::Indices; 3702 if (!seg.elemIndices.reserve(numElems)) { 3703 return false; 3704 } 3705 3706 for (uint32_t i = 0; i < numElems; i++) { 3707 uint32_t elemIndex; 3708 if (!d.readVarU32(&elemIndex)) { 3709 return d.fail("failed to read element index"); 3710 } 3711 // The only valid type of index right now is a function index. 3712 if (elemIndex >= codeMeta->numFuncs()) { 3713 return d.fail("element index out of range"); 3714 } 3715 3716 seg.elemIndices.infallibleAppend(elemIndex); 3717 if (!isAsmJS) { 3718 codeMeta->funcs[elemIndex].declareFuncExported(/*eager=*/false, 3719 /*canRefFunc=*/true); 3720 } 3721 } 3722 } break; 3723 case ElemSegmentPayload::Expressions: { 3724 seg.encoding = ModuleElemSegment::Encoding::Expressions; 3725 const uint8_t* exprsStart = d.currentPosition(); 3726 seg.elemExpressions.count = numElems; 3727 for (uint32_t i = 0; i < numElems; i++) { 3728 Maybe<LitVal> unusedLiteral; 3729 if (!DecodeConstantExpression(d, codeMeta, elemType, &unusedLiteral)) { 3730 return false; 3731 } 3732 } 3733 const uint8_t* exprsEnd = d.currentPosition(); 3734 if (!seg.elemExpressions.exprBytes.append(exprsStart, exprsEnd)) { 3735 return false; 3736 } 3737 } break; 3738 } 3739 3740 codeMeta->elemSegmentTypes.infallibleAppend(seg.elemType); 3741 moduleMeta->elemSegments.infallibleAppend(std::move(seg)); 3742 3743 return true; 3744 } 3745 3746 static bool DecodeElemSection(Decoder& d, CodeMetadata* codeMeta, 3747 ModuleMetadata* moduleMeta) { 3748 MaybeBytecodeRange range; 3749 if (!d.startSection(SectionId::Elem, codeMeta, &range, "elem")) { 3750 return false; 3751 } 3752 if (!range) { 3753 return true; 3754 } 3755 3756 uint32_t numSegments; 3757 if (!d.readVarU32(&numSegments)) { 3758 return d.fail("failed to read number of elem segments"); 3759 } 3760 3761 if (numSegments > MaxElemSegments) { 3762 return d.fail("too many elem segments"); 3763 } 3764 3765 if (!moduleMeta->elemSegments.reserve(numSegments) || 3766 !codeMeta->elemSegmentTypes.reserve(numSegments)) { 3767 return false; 3768 } 3769 3770 for (uint32_t i = 0; i < numSegments; i++) { 3771 if (!DecodeElemSegment(d, codeMeta, moduleMeta)) { 3772 return false; 3773 } 3774 } 3775 3776 return d.finishSection(*range, "elem"); 3777 } 3778 3779 static bool DecodeDataCountSection(Decoder& d, CodeMetadata* codeMeta) { 3780 MaybeBytecodeRange range; 3781 if (!d.startSection(SectionId::DataCount, codeMeta, &range, "datacount")) { 3782 return false; 3783 } 3784 if (!range) { 3785 return true; 3786 } 3787 3788 uint32_t dataCount; 3789 if (!d.readVarU32(&dataCount)) { 3790 return d.fail("expected data segment count"); 3791 } 3792 3793 codeMeta->dataCount.emplace(dataCount); 3794 3795 return d.finishSection(*range, "datacount"); 3796 } 3797 3798 bool wasm::StartsCodeSection(const uint8_t* begin, const uint8_t* end, 3799 BytecodeRange* codeSection) { 3800 UniqueChars unused; 3801 Decoder d(begin, end, 0, &unused); 3802 3803 if (!DecodePreamble(d)) { 3804 return false; 3805 } 3806 3807 while (!d.done()) { 3808 uint8_t id; 3809 BytecodeRange range; 3810 if (!d.readSectionHeader(&id, &range)) { 3811 return false; 3812 } 3813 3814 if (id == uint8_t(SectionId::Code)) { 3815 if (range.size() > MaxCodeSectionBytes) { 3816 return false; 3817 } 3818 3819 *codeSection = range; 3820 return true; 3821 } 3822 3823 if (!d.readBytes(range.size())) { 3824 return false; 3825 } 3826 } 3827 3828 return false; 3829 } 3830 3831 #ifdef ENABLE_WASM_BRANCH_HINTING 3832 static bool ParseBranchHintingSection(Decoder& d, CodeMetadata* codeMeta) { 3833 uint32_t functionCount; 3834 if (!d.readVarU32(&functionCount)) { 3835 return d.fail("failed to read function count"); 3836 } 3837 3838 for (uint32_t i = 0; i < functionCount; i++) { 3839 uint32_t functionIndex; 3840 if (!d.readVarU32(&functionIndex)) { 3841 return d.fail("failed to read function index"); 3842 } 3843 3844 // Disallow branch hints on imported functions. 3845 if ((functionIndex >= codeMeta->funcs.length()) || 3846 (functionIndex < codeMeta->numFuncImports)) { 3847 return d.fail("invalid function index in branch hint"); 3848 } 3849 3850 uint32_t hintCount; 3851 if (!d.readVarU32(&hintCount)) { 3852 return d.fail("failed to read hint count"); 3853 } 3854 3855 BranchHintVector hintVector; 3856 if (!hintVector.reserve(hintCount)) { 3857 return false; 3858 } 3859 3860 // Branch hint offsets must appear in increasing byte offset order, at most 3861 // once for each offset. 3862 uint32_t prevOffsetPlus1 = 0; 3863 for (uint32_t hintIndex = 0; hintIndex < hintCount; hintIndex++) { 3864 uint32_t branchOffset; 3865 if (!d.readVarU32(&branchOffset)) { 3866 return d.fail("failed to read branch offset"); 3867 } 3868 if (branchOffset <= prevOffsetPlus1) { 3869 return d.fail("Invalid offset in code hint"); 3870 } 3871 3872 uint32_t reserved; 3873 if (!d.readVarU32(&reserved) || (reserved != 1)) { 3874 return d.fail("Invalid reserved value for code hint"); 3875 } 3876 3877 uint32_t branchHintValue; 3878 if (!d.readVarU32(&branchHintValue) || 3879 (branchHintValue >= MaxBranchHintValue)) { 3880 return d.fail("Invalid branch hint value"); 3881 } 3882 3883 BranchHint branchHint = static_cast<BranchHint>(branchHintValue); 3884 BranchHintEntry entry(branchOffset, branchHint); 3885 hintVector.infallibleAppend(entry); 3886 3887 prevOffsetPlus1 = branchOffset; 3888 } 3889 3890 // Save this collection in the module 3891 if (!codeMeta->branchHints.addHintsForFunc(functionIndex, 3892 std::move(hintVector))) { 3893 return false; 3894 } 3895 } 3896 3897 return true; 3898 } 3899 3900 static bool DecodeBranchHintingSection(Decoder& d, CodeMetadata* codeMeta) { 3901 MaybeBytecodeRange range; 3902 if (!d.startCustomSection(BranchHintingSectionName, codeMeta, &range)) { 3903 return false; 3904 } 3905 if (!range) { 3906 return true; 3907 } 3908 3909 // Skip this custom section if errors are encountered during parsing. 3910 if (!ParseBranchHintingSection(d, codeMeta)) { 3911 codeMeta->branchHints.setFailedAndClear(); 3912 } 3913 3914 if (!d.finishCustomSection(BranchHintingSectionName, *range)) { 3915 codeMeta->branchHints.setFailedAndClear(); 3916 } 3917 return true; 3918 } 3919 #endif 3920 3921 bool wasm::DecodeModuleEnvironment(Decoder& d, CodeMetadata* codeMeta, 3922 ModuleMetadata* moduleMeta) { 3923 if (!DecodePreamble(d)) { 3924 return false; 3925 } 3926 3927 if (!DecodeTypeSection(d, codeMeta)) { 3928 return false; 3929 } 3930 3931 if (!DecodeImportSection(d, codeMeta, moduleMeta)) { 3932 return false; 3933 } 3934 3935 // Eagerly check imports for future link errors against any known builtin 3936 // module. 3937 if (!CheckImportsAgainstBuiltinModules(d, codeMeta, moduleMeta)) { 3938 return false; 3939 } 3940 3941 if (!DecodeFunctionSection(d, codeMeta)) { 3942 return false; 3943 } 3944 3945 if (!DecodeTableSection(d, codeMeta)) { 3946 return false; 3947 } 3948 3949 if (!DecodeMemorySection(d, codeMeta)) { 3950 return false; 3951 } 3952 3953 if (!DecodeTagSection(d, codeMeta)) { 3954 return false; 3955 } 3956 3957 if (!DecodeGlobalSection(d, codeMeta)) { 3958 return false; 3959 } 3960 3961 if (!DecodeExportSection(d, codeMeta, moduleMeta)) { 3962 return false; 3963 } 3964 3965 if (!DecodeStartSection(d, codeMeta, moduleMeta)) { 3966 return false; 3967 } 3968 3969 if (!DecodeElemSection(d, codeMeta, moduleMeta)) { 3970 return false; 3971 } 3972 3973 if (!DecodeDataCountSection(d, codeMeta)) { 3974 return false; 3975 } 3976 3977 #ifdef ENABLE_WASM_BRANCH_HINTING 3978 if (codeMeta->branchHintingEnabled() && 3979 !DecodeBranchHintingSection(d, codeMeta)) { 3980 return false; 3981 } 3982 #endif 3983 3984 if (!d.startSection(SectionId::Code, codeMeta, &codeMeta->codeSectionRange, 3985 "code")) { 3986 return false; 3987 } 3988 3989 if (codeMeta->codeSectionRange && 3990 codeMeta->codeSectionRange->size() > MaxCodeSectionBytes) { 3991 return d.fail("code section too big"); 3992 } 3993 3994 return true; 3995 } 3996 3997 static bool DecodeFunctionBody(Decoder& d, const CodeMetadata& codeMeta, 3998 uint32_t funcIndex) { 3999 uint32_t bodySize; 4000 if (!d.readVarU32(&bodySize)) { 4001 return d.fail("expected number of function body bytes"); 4002 } 4003 4004 if (bodySize > MaxFunctionBytes) { 4005 return d.fail("function body too big"); 4006 } 4007 4008 if (d.bytesRemain() < bodySize) { 4009 return d.fail("function body length too big"); 4010 } 4011 4012 return ValidateFunctionBody(codeMeta, funcIndex, bodySize, d); 4013 } 4014 4015 static bool DecodeCodeSection(Decoder& d, CodeMetadata* codeMeta) { 4016 if (!codeMeta->codeSectionRange) { 4017 if (codeMeta->numFuncDefs() != 0) { 4018 return d.fail("expected code section"); 4019 } 4020 return true; 4021 } 4022 4023 uint32_t numFuncDefs; 4024 if (!d.readVarU32(&numFuncDefs)) { 4025 return d.fail("expected function body count"); 4026 } 4027 4028 if (numFuncDefs != codeMeta->numFuncDefs()) { 4029 return d.fail( 4030 "function body count does not match function signature count"); 4031 } 4032 4033 for (uint32_t funcDefIndex = 0; funcDefIndex < numFuncDefs; funcDefIndex++) { 4034 if (!DecodeFunctionBody(d, *codeMeta, 4035 codeMeta->numFuncImports + funcDefIndex)) { 4036 return false; 4037 } 4038 } 4039 4040 return d.finishSection(*codeMeta->codeSectionRange, "code"); 4041 } 4042 4043 static bool DecodeDataSection(Decoder& d, CodeMetadata* codeMeta, 4044 ModuleMetadata* moduleMeta) { 4045 MaybeBytecodeRange range; 4046 if (!d.startSection(SectionId::Data, codeMeta, &range, "data")) { 4047 return false; 4048 } 4049 if (!range) { 4050 if (codeMeta->dataCount.isSome() && *codeMeta->dataCount > 0) { 4051 return d.fail("number of data segments does not match declared count"); 4052 } 4053 return true; 4054 } 4055 4056 uint32_t numSegments; 4057 if (!d.readVarU32(&numSegments)) { 4058 return d.fail("failed to read number of data segments"); 4059 } 4060 4061 if (numSegments > MaxDataSegments) { 4062 return d.fail("too many data segments"); 4063 } 4064 4065 if (codeMeta->dataCount.isSome() && numSegments != *codeMeta->dataCount) { 4066 return d.fail("number of data segments does not match declared count"); 4067 } 4068 4069 for (uint32_t i = 0; i < numSegments; i++) { 4070 uint32_t initializerKindVal; 4071 if (!d.readVarU32(&initializerKindVal)) { 4072 return d.fail("expected data initializer-kind field"); 4073 } 4074 4075 switch (initializerKindVal) { 4076 case uint32_t(DataSegmentKind::Active): 4077 case uint32_t(DataSegmentKind::Passive): 4078 case uint32_t(DataSegmentKind::ActiveWithMemoryIndex): 4079 break; 4080 default: 4081 return d.fail("invalid data initializer-kind field"); 4082 } 4083 4084 DataSegmentKind initializerKind = DataSegmentKind(initializerKindVal); 4085 4086 if (initializerKind != DataSegmentKind::Passive && 4087 codeMeta->numMemories() == 0) { 4088 return d.fail("active data segment requires a memory section"); 4089 } 4090 4091 DataSegmentRange segRange; 4092 if (initializerKind == DataSegmentKind::ActiveWithMemoryIndex) { 4093 if (!d.readVarU32(&segRange.memoryIndex)) { 4094 return d.fail("expected memory index"); 4095 } 4096 } else if (initializerKind == DataSegmentKind::Active) { 4097 segRange.memoryIndex = 0; 4098 } else { 4099 segRange.memoryIndex = InvalidMemoryIndex; 4100 } 4101 4102 if (initializerKind == DataSegmentKind::Active || 4103 initializerKind == DataSegmentKind::ActiveWithMemoryIndex) { 4104 if (segRange.memoryIndex >= codeMeta->numMemories()) { 4105 return d.fail("invalid memory index"); 4106 } 4107 4108 InitExpr segOffset; 4109 ValType exprType = 4110 ToValType(codeMeta->memories[segRange.memoryIndex].addressType()); 4111 if (!InitExpr::decodeAndValidate(d, codeMeta, exprType, &segOffset)) { 4112 return false; 4113 } 4114 segRange.offsetIfActive.emplace(std::move(segOffset)); 4115 } 4116 4117 if (!d.readVarU32(&segRange.length)) { 4118 return d.fail("expected segment size"); 4119 } 4120 4121 if (segRange.length > MaxDataSegmentLengthPages * StandardPageSizeBytes) { 4122 return d.fail("segment size too big"); 4123 } 4124 4125 segRange.bytecodeOffset = d.currentOffset(); 4126 4127 if (!d.readBytes(segRange.length)) { 4128 return d.fail("data segment shorter than declared"); 4129 } 4130 4131 if (!moduleMeta->dataSegmentRanges.append(std::move(segRange))) { 4132 return false; 4133 } 4134 } 4135 4136 return d.finishSection(*range, "data"); 4137 } 4138 4139 static bool DecodeModuleNameSubsection(Decoder& d, 4140 const CustomSectionRange& nameSection, 4141 CodeMetadata* codeMeta, 4142 ModuleMetadata* moduleMeta) { 4143 Maybe<uint32_t> endOffset; 4144 if (!d.startNameSubsection(NameType::Module, &endOffset)) { 4145 return false; 4146 } 4147 if (!endOffset) { 4148 return true; 4149 } 4150 4151 Name moduleName; 4152 if (!d.readVarU32(&moduleName.length)) { 4153 return d.fail("failed to read module name length"); 4154 } 4155 4156 MOZ_ASSERT(d.currentOffset() >= nameSection.payload.start); 4157 moduleName.offsetInNamePayload = 4158 d.currentOffset() - nameSection.payload.start; 4159 4160 const uint8_t* bytes; 4161 if (!d.readBytes(moduleName.length, &bytes)) { 4162 return d.fail("failed to read module name bytes"); 4163 } 4164 4165 if (!d.finishNameSubsection(*endOffset)) { 4166 return false; 4167 } 4168 4169 // Only save the module name if the whole subsection validates. 4170 codeMeta->nameSection->moduleName = moduleName; 4171 return true; 4172 } 4173 4174 static bool DecodeFunctionNameSubsection(Decoder& d, 4175 const CustomSectionRange& nameSection, 4176 CodeMetadata* codeMeta, 4177 ModuleMetadata* moduleMeta) { 4178 Maybe<uint32_t> endOffset; 4179 if (!d.startNameSubsection(NameType::Function, &endOffset)) { 4180 return false; 4181 } 4182 if (!endOffset) { 4183 return true; 4184 } 4185 4186 uint32_t nameCount = 0; 4187 if (!d.readVarU32(&nameCount) || nameCount > MaxFuncs) { 4188 return d.fail("bad function name count"); 4189 } 4190 4191 NameVector funcNames; 4192 4193 for (uint32_t i = 0; i < nameCount; ++i) { 4194 uint32_t funcIndex = 0; 4195 if (!d.readVarU32(&funcIndex)) { 4196 return d.fail("unable to read function index"); 4197 } 4198 4199 // Names must refer to real functions and be given in ascending order. 4200 if (funcIndex >= codeMeta->numFuncs() || funcIndex < funcNames.length()) { 4201 return d.fail("invalid function index"); 4202 } 4203 4204 Name funcName; 4205 if (!d.readVarU32(&funcName.length) || 4206 funcName.length > JS::MaxStringLength) { 4207 return d.fail("unable to read function name length"); 4208 } 4209 4210 if (!funcName.length) { 4211 continue; 4212 } 4213 4214 if (!funcNames.resize(funcIndex + 1)) { 4215 return false; 4216 } 4217 4218 MOZ_ASSERT(d.currentOffset() >= nameSection.payload.start); 4219 funcName.offsetInNamePayload = 4220 d.currentOffset() - nameSection.payload.start; 4221 4222 if (!d.readBytes(funcName.length)) { 4223 return d.fail("unable to read function name bytes"); 4224 } 4225 4226 funcNames[funcIndex] = funcName; 4227 } 4228 4229 if (!d.finishNameSubsection(*endOffset)) { 4230 return false; 4231 } 4232 4233 // Only save names if the entire subsection decoded correctly. 4234 codeMeta->nameSection->funcNames = std::move(funcNames); 4235 return true; 4236 } 4237 4238 static bool DecodeNameSection(Decoder& d, CodeMetadata* codeMeta, 4239 ModuleMetadata* moduleMeta) { 4240 MaybeBytecodeRange range; 4241 if (!d.startCustomSection(NameSectionName, codeMeta, &range)) { 4242 return false; 4243 } 4244 if (!range) { 4245 return true; 4246 } 4247 4248 codeMeta->nameSection.emplace((NameSection){ 4249 .customSectionIndex = 4250 uint32_t(codeMeta->customSectionRanges.length() - 1), 4251 }); 4252 const CustomSectionRange& nameSection = codeMeta->customSectionRanges.back(); 4253 4254 // Once started, custom sections do not report validation errors. 4255 4256 if (!DecodeModuleNameSubsection(d, nameSection, codeMeta, moduleMeta)) { 4257 goto finish; 4258 } 4259 4260 if (!DecodeFunctionNameSubsection(d, nameSection, codeMeta, moduleMeta)) { 4261 goto finish; 4262 } 4263 4264 while (d.currentOffset() < range->end) { 4265 if (!d.skipNameSubsection()) { 4266 goto finish; 4267 } 4268 } 4269 4270 finish: 4271 if (!d.finishCustomSection(NameSectionName, *range)) { 4272 codeMeta->nameSection = mozilla::Nothing(); 4273 } 4274 return true; 4275 } 4276 4277 bool wasm::DecodeModuleTail(Decoder& d, CodeMetadata* codeMeta, 4278 ModuleMetadata* moduleMeta) { 4279 if (!DecodeDataSection(d, codeMeta, moduleMeta)) { 4280 return false; 4281 } 4282 4283 if (!DecodeNameSection(d, codeMeta, moduleMeta)) { 4284 return false; 4285 } 4286 4287 while (!d.done()) { 4288 if (!d.skipCustomSection(codeMeta)) { 4289 return false; 4290 } 4291 } 4292 4293 return true; 4294 } 4295 4296 // Validate algorithm. 4297 4298 bool wasm::Validate(JSContext* cx, const BytecodeSource& bytecode, 4299 const FeatureOptions& options, UniqueChars* error) { 4300 FeatureArgs features = FeatureArgs::build(cx, options); 4301 SharedCompileArgs compileArgs = CompileArgs::buildForValidation(features); 4302 if (!compileArgs) { 4303 return false; 4304 } 4305 MutableModuleMetadata moduleMeta = js_new<ModuleMetadata>(); 4306 if (!moduleMeta || !moduleMeta->init(*compileArgs)) { 4307 return false; 4308 } 4309 MutableCodeMetadata codeMeta = moduleMeta->codeMeta; 4310 4311 Decoder envDecoder(bytecode.envSpan(), bytecode.envRange().start, error); 4312 if (!DecodeModuleEnvironment(envDecoder, codeMeta, moduleMeta)) { 4313 return false; 4314 } 4315 4316 if (bytecode.hasCodeSection()) { 4317 // DecodeModuleEnvironment will stop and return true if there is an unknown 4318 // section before the code section. We must check this and return an error. 4319 if (!moduleMeta->codeMeta->codeSectionRange) { 4320 envDecoder.fail("unknown section before code section"); 4321 return false; 4322 } 4323 4324 // Our pre-parse that split the module should ensure that after we've 4325 // parsed the environment there are no bytes left. 4326 MOZ_RELEASE_ASSERT(envDecoder.done()); 4327 4328 Decoder codeDecoder(bytecode.codeSpan(), bytecode.codeRange().start, error); 4329 if (!DecodeCodeSection(codeDecoder, codeMeta)) { 4330 return false; 4331 } 4332 // Our pre-parse that split the module should ensure that after we've 4333 // parsed the code section there are no bytes left. 4334 MOZ_RELEASE_ASSERT(codeDecoder.done()); 4335 4336 Decoder tailDecoder(bytecode.tailSpan(), bytecode.tailRange().start, error); 4337 if (!DecodeModuleTail(tailDecoder, codeMeta, moduleMeta)) { 4338 return false; 4339 } 4340 // Decoding the module tail should consume all remaining bytes. 4341 MOZ_RELEASE_ASSERT(tailDecoder.done()); 4342 } else { 4343 if (!DecodeCodeSection(envDecoder, codeMeta)) { 4344 return false; 4345 } 4346 if (!DecodeModuleTail(envDecoder, codeMeta, moduleMeta)) { 4347 return false; 4348 } 4349 // Decoding the module tail should consume all remaining bytes. 4350 MOZ_RELEASE_ASSERT(envDecoder.done()); 4351 } 4352 4353 MOZ_ASSERT(!*error, "unreported error in decoding"); 4354 return true; 4355 }