commit 74a5eb2166f033f2f4bd95a7799b0b0eee89ab42
parent a192b4adaca5dce8d96331066ed88390554f7167
Author: Julien Pages <jpages@mozilla.com>
Date: Mon, 15 Dec 2025 14:38:27 +0000
Bug 1998895 - wasm: Avoid an out of memory situation when creating lazy stubs. r=rhunt
Differential Revision: https://phabricator.services.mozilla.com/D275455
Diffstat:
2 files changed, 50 insertions(+), 9 deletions(-)
diff --git a/js/src/jit-test/tests/wasm/gc/bug1998895.js b/js/src/jit-test/tests/wasm/gc/bug1998895.js
@@ -0,0 +1,13 @@
+oomTest(function() {
+ var x = new WebAssembly.Instance(
+ new WebAssembly.Module(
+ wasmTextToBinary(
+ '(module (func $g (result f32) f32.const 1)(table (export "table") 1 funcref)(elem (i32.const 0) $g))',
+ ),
+ ),
+ ).exports.table.get(0);
+ try {
+ x.apply();
+ } catch (e) {}
+ x.call();
+});
diff --git a/js/src/wasm/WasmCode.cpp b/js/src/wasm/WasmCode.cpp
@@ -483,6 +483,16 @@ bool Code::createManyLazyEntryStubs(const WriteGuard& guard,
*stubBlockIndex = guard->blocks.length();
+ if (!guard->lazyExports.reserve(guard->lazyExports.length() +
+ funcExportIndices.length()) ||
+ !addCodeBlock(guard, std::move(stubCodeBlock), nullptr)) {
+ return false;
+ }
+
+ // Everything after this point must be guaranteed to succeed. A failure after
+ // this point can leave things in an inconsistent state, and be observed if we
+ // retry to create a lazy stub.
+
uint32_t codeRangeIndex = 0;
for (uint32_t funcExportIndex : funcExportIndices) {
const FuncExport& fe = funcExports[funcExportIndex];
@@ -511,17 +521,17 @@ bool Code::createManyLazyEntryStubs(const WriteGuard& guard,
MOZ_ASSERT(oldKind == CodeBlockKind::SharedStubs ||
oldKind == CodeBlockKind::BaselineTier);
guard->lazyExports[exportIndex] = std::move(lazyExport);
- } else if (!guard->lazyExports.insert(
- guard->lazyExports.begin() + exportIndex,
- std::move(lazyExport))) {
- return false;
+ } else {
+ // We reserved memory earlier, this should not fail.
+ MOZ_RELEASE_ASSERT(guard->lazyExports.insert(
+ guard->lazyExports.begin() + exportIndex, std::move(lazyExport)));
}
}
stubCodeBlock->sendToProfiler(*codeMeta_, *codeTailMeta_, codeMetaForAsmJS_,
FuncIonPerfSpewerSpan(),
FuncBaselinePerfSpewerSpan());
- return addCodeBlock(guard, std::move(stubCodeBlock), nullptr);
+ return true;
}
bool Code::createOneLazyEntryStub(const WriteGuard& guard,
@@ -787,10 +797,28 @@ bool Code::addCodeBlock(const WriteGuard& guard, UniqueCodeBlock block,
CodeBlock* blockPtr = block.get();
size_t codeBlockIndex = guard->blocks.length();
- return guard->blocks.append(std::move(block)) &&
- guard->blocksLinkData.append(std::move(maybeLinkData)) &&
- blockMap_.insert(blockPtr) &&
- blockPtr->initialize(*this, codeBlockIndex);
+
+ if (!guard->blocks.reserve(guard->blocks.length() + 1) ||
+ !guard->blocksLinkData.reserve(guard->blocksLinkData.length() + 1)) {
+ return false;
+ }
+
+ // If anything fails here, be careful to reset our state back so that we are
+ // not in an inconsistent state.
+ if (!blockPtr->initialize(*this, codeBlockIndex)) {
+ return false;
+ }
+
+ if (!blockMap_.insert(blockPtr)) {
+ // We don't need to deinitialize the blockPtr, because that will be
+ // automatically handled by its destructor.
+ return false;
+ }
+
+ guard->blocks.infallibleAppend(std::move(block));
+ guard->blocksLinkData.infallibleAppend(std::move(maybeLinkData));
+
+ return true;
}
SharedCodeSegment Code::createFuncCodeSegmentFromPool(