VMFunctions.cpp (107708B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #include "jit/VMFunctions.h" 8 9 #include "mozilla/FloatingPoint.h" 10 11 #include "builtin/MapObject.h" 12 #include "builtin/String.h" 13 #include "gc/Cell.h" 14 #include "gc/GC.h" 15 #include "jit/arm/Simulator-arm.h" 16 #include "jit/AtomicOperations.h" 17 #include "jit/BaselineIC.h" 18 #include "jit/CalleeToken.h" 19 #include "jit/JitFrames.h" 20 #include "jit/JitRuntime.h" 21 #include "jit/mips64/Simulator-mips64.h" 22 #include "jit/Simulator.h" 23 #include "js/experimental/JitInfo.h" 24 #include "js/friend/ErrorMessages.h" // js::GetErrorMessage, JSMSG_* 25 #include "js/friend/StackLimits.h" // js::AutoCheckRecursionLimit 26 #include "js/friend/WindowProxy.h" // js::IsWindow 27 #include "js/Printf.h" 28 #include "js/TraceKind.h" 29 #include "proxy/ScriptedProxyHandler.h" 30 #include "util/Unicode.h" 31 #include "vm/ArrayObject.h" 32 #include "vm/Compartment.h" 33 #include "vm/DateObject.h" 34 #include "vm/Float16.h" 35 #include "vm/Interpreter.h" 36 #include "vm/JSAtomUtils.h" // AtomizeString 37 #include "vm/PlainObject.h" // js::PlainObject 38 #include "vm/SelfHosting.h" 39 #include "vm/StaticStrings.h" 40 #include "vm/TypedArrayObject.h" 41 #include "vm/TypeofEqOperand.h" // TypeofEqOperand 42 #include "vm/Watchtower.h" 43 #include "vm/WrapperObject.h" 44 #include "wasm/WasmGcObject.h" 45 46 #include "debugger/DebugAPI-inl.h" 47 #include "gc/StoreBuffer-inl.h" 48 #include "jit/BaselineFrame-inl.h" 49 #include "jit/VMFunctionList-inl.h" 50 #include "vm/Interpreter-inl.h" 51 #include "vm/JSAtomUtils-inl.h" // TypeName 52 #include "vm/JSContext-inl.h" 53 #include "vm/JSScript-inl.h" 54 #include "vm/NativeObject-inl.h" 55 #include "vm/PlainObject-inl.h" // js::CreateThis 56 #include "vm/StringObject-inl.h" 57 58 using namespace js; 59 using namespace js::jit; 60 61 namespace js { 62 63 class ArgumentsObject; 64 class NamedLambdaObject; 65 class AsyncFunctionGeneratorObject; 66 class RegExpObject; 67 68 namespace jit { 69 70 struct IonOsrTempData; 71 72 struct PopValues { 73 uint8_t numValues; 74 explicit constexpr PopValues(uint8_t numValues = 0) : numValues(numValues) {} 75 }; 76 77 template <class> 78 struct ReturnTypeToDataType { /* Unexpected return type for a VMFunction. */ 79 }; 80 template <> 81 struct ReturnTypeToDataType<void> { 82 static const DataType result = Type_Void; 83 }; 84 template <> 85 struct ReturnTypeToDataType<bool> { 86 static const DataType result = Type_Bool; 87 }; 88 template <class T> 89 struct ReturnTypeToDataType<T*> { 90 // Assume by default that any pointer return types are cells. 91 static_assert(std::is_base_of_v<gc::Cell, T>); 92 93 static const DataType result = Type_Cell; 94 }; 95 96 // Convert argument types to properties of the argument known by the jit. 97 template <class T> 98 struct TypeToArgProperties { 99 static const uint32_t result = 100 (sizeof(T) <= sizeof(void*) ? VMFunctionData::Word 101 : VMFunctionData::Double); 102 }; 103 template <> 104 struct TypeToArgProperties<const Value&> { 105 static const uint32_t result = 106 TypeToArgProperties<Value>::result | VMFunctionData::ByRef; 107 }; 108 template <> 109 struct TypeToArgProperties<HandleValue> { 110 static const uint32_t result = 111 TypeToArgProperties<Value>::result | VMFunctionData::ByRef; 112 }; 113 template <> 114 struct TypeToArgProperties<MutableHandleValue> { 115 static const uint32_t result = 116 TypeToArgProperties<Value>::result | VMFunctionData::ByRef; 117 }; 118 template <> 119 struct TypeToArgProperties<HandleId> { 120 static const uint32_t result = 121 TypeToArgProperties<jsid>::result | VMFunctionData::ByRef; 122 }; 123 template <class T> 124 struct TypeToArgProperties<Handle<T*>> { 125 // Assume by default that any pointer handle types are cells. 126 static_assert(std::is_base_of_v<gc::Cell, T>); 127 128 static const uint32_t result = 129 TypeToArgProperties<T*>::result | VMFunctionData::ByRef; 130 }; 131 template <class T> 132 struct TypeToArgProperties<Handle<T>> { 133 // Fail for Handle types that aren't specialized above. 134 }; 135 136 // Convert argument type to whether or not it should be passed in a float 137 // register on platforms that have them, like x64. 138 template <class T> 139 struct TypeToPassInFloatReg { 140 static const uint32_t result = 0; 141 }; 142 template <> 143 struct TypeToPassInFloatReg<double> { 144 static const uint32_t result = 1; 145 }; 146 147 // Convert argument types to root types used by the gc, see TraceJitExitFrame. 148 template <class T> 149 struct TypeToRootType { 150 static const uint32_t result = VMFunctionData::RootNone; 151 }; 152 template <> 153 struct TypeToRootType<HandleValue> { 154 static const uint32_t result = VMFunctionData::RootValue; 155 }; 156 template <> 157 struct TypeToRootType<MutableHandleValue> { 158 static const uint32_t result = VMFunctionData::RootValue; 159 }; 160 template <> 161 struct TypeToRootType<HandleId> { 162 static const uint32_t result = VMFunctionData::RootId; 163 }; 164 template <class T> 165 struct TypeToRootType<Handle<T*>> { 166 // Assume by default that any pointer types are cells. 167 static_assert(std::is_base_of_v<gc::Cell, T>); 168 169 static constexpr uint32_t rootType() { 170 using JS::TraceKind; 171 172 switch (JS::MapTypeToTraceKind<T>::kind) { 173 case TraceKind::Object: 174 return VMFunctionData::RootObject; 175 case TraceKind::BigInt: 176 return VMFunctionData::RootBigInt; 177 case TraceKind::String: 178 return VMFunctionData::RootString; 179 case TraceKind::Shape: 180 case TraceKind::Script: 181 case TraceKind::Scope: 182 return VMFunctionData::RootCell; 183 case TraceKind::Symbol: 184 case TraceKind::BaseShape: 185 case TraceKind::Null: 186 case TraceKind::JitCode: 187 case TraceKind::RegExpShared: 188 case TraceKind::GetterSetter: 189 case TraceKind::PropMap: 190 MOZ_CRASH("Unexpected trace kind"); 191 } 192 } 193 194 static constexpr uint32_t result = rootType(); 195 }; 196 template <class T> 197 struct TypeToRootType<Handle<T>> { 198 // Fail for Handle types that aren't specialized above. 199 }; 200 201 template <class> 202 struct OutParamToDataType { 203 static const DataType result = Type_Void; 204 }; 205 template <class T> 206 struct OutParamToDataType<const T*> { 207 // Const pointers can't be output parameters. 208 static const DataType result = Type_Void; 209 }; 210 template <> 211 struct OutParamToDataType<uint64_t*> { 212 // Already used as an input type, so it can't be used as an output param. 213 static const DataType result = Type_Void; 214 }; 215 template <> 216 struct OutParamToDataType<JSObject*> { 217 // Already used as an input type, so it can't be used as an output param. 218 static const DataType result = Type_Void; 219 }; 220 template <> 221 struct OutParamToDataType<JSString*> { 222 // Already used as an input type, so it can't be used as an output param. 223 static const DataType result = Type_Void; 224 }; 225 template <> 226 struct OutParamToDataType<BaselineFrame*> { 227 // Already used as an input type, so it can't be used as an output param. 228 static const DataType result = Type_Void; 229 }; 230 template <> 231 struct OutParamToDataType<gc::AllocSite*> { 232 // Already used as an input type, so it can't be used as an output param. 233 static const DataType result = Type_Void; 234 }; 235 template <> 236 struct OutParamToDataType<Value*> { 237 static const DataType result = Type_Value; 238 }; 239 template <> 240 struct OutParamToDataType<int*> { 241 static const DataType result = Type_Int32; 242 }; 243 template <> 244 struct OutParamToDataType<uint32_t*> { 245 static const DataType result = Type_Int32; 246 }; 247 template <> 248 struct OutParamToDataType<bool*> { 249 static const DataType result = Type_Bool; 250 }; 251 template <> 252 struct OutParamToDataType<double*> { 253 static const DataType result = Type_Double; 254 }; 255 template <class T> 256 struct OutParamToDataType<T*> { 257 // Fail for pointer types that aren't specialized above. 258 }; 259 template <class T> 260 struct OutParamToDataType<T**> { 261 static const DataType result = Type_Pointer; 262 }; 263 template <class T> 264 struct OutParamToDataType<MutableHandle<T>> { 265 static const DataType result = Type_Handle; 266 }; 267 268 template <class> 269 struct OutParamToRootType { 270 static const VMFunctionData::RootType result = VMFunctionData::RootNone; 271 }; 272 template <> 273 struct OutParamToRootType<MutableHandleValue> { 274 static const VMFunctionData::RootType result = VMFunctionData::RootValue; 275 }; 276 template <> 277 struct OutParamToRootType<MutableHandleObject> { 278 static const VMFunctionData::RootType result = VMFunctionData::RootObject; 279 }; 280 template <> 281 struct OutParamToRootType<MutableHandleString> { 282 static const VMFunctionData::RootType result = VMFunctionData::RootString; 283 }; 284 template <> 285 struct OutParamToRootType<MutableHandleBigInt> { 286 static const VMFunctionData::RootType result = VMFunctionData::RootBigInt; 287 }; 288 289 // Construct a bit mask from a list of types. The mask is constructed as an OR 290 // of the mask produced for each argument. The result of each argument is 291 // shifted by its index, such that the result of the first argument is on the 292 // low bits of the mask, and the result of the last argument in part of the 293 // high bits of the mask. 294 template <template <typename> class Each, typename ResultType, size_t Shift, 295 typename... Args> 296 struct BitMask; 297 298 template <template <typename> class Each, typename ResultType, size_t Shift> 299 struct BitMask<Each, ResultType, Shift> { 300 static constexpr ResultType result = ResultType(); 301 }; 302 303 template <template <typename> class Each, typename ResultType, size_t Shift, 304 typename HeadType, typename... TailTypes> 305 struct BitMask<Each, ResultType, Shift, HeadType, TailTypes...> { 306 static_assert(ResultType(Each<HeadType>::result) < (1 << Shift), 307 "not enough bits reserved by the shift for individual results"); 308 static_assert(sizeof...(TailTypes) < (8 * sizeof(ResultType) / Shift), 309 "not enough bits in the result type to store all bit masks"); 310 311 static constexpr ResultType result = 312 ResultType(Each<HeadType>::result) | 313 (BitMask<Each, ResultType, Shift, TailTypes...>::result << Shift); 314 }; 315 316 // Helper template to build the VMFunctionData for a function. 317 template <typename... Args> 318 struct VMFunctionDataHelper; 319 320 template <class R, typename... Args> 321 struct VMFunctionDataHelper<R (*)(JSContext*, Args...)> 322 : public VMFunctionData { 323 using Fun = R (*)(JSContext*, Args...); 324 325 static constexpr DataType returnType() { 326 return ReturnTypeToDataType<R>::result; 327 } 328 static constexpr DataType outParam() { 329 return OutParamToDataType<typename LastArg<Args...>::Type>::result; 330 } 331 static constexpr RootType outParamRootType() { 332 return OutParamToRootType<typename LastArg<Args...>::Type>::result; 333 } 334 static constexpr size_t NbArgs() { return sizeof...(Args); } 335 static constexpr size_t explicitArgs() { 336 return NbArgs() - (outParam() != Type_Void ? 1 : 0); 337 } 338 static constexpr uint32_t argumentProperties() { 339 return BitMask<TypeToArgProperties, uint32_t, 2, Args...>::result; 340 } 341 static constexpr uint32_t argumentPassedInFloatRegs() { 342 return BitMask<TypeToPassInFloatReg, uint32_t, 2, Args...>::result; 343 } 344 static constexpr uint64_t argumentRootTypes() { 345 return BitMask<TypeToRootType, uint64_t, 3, Args...>::result; 346 } 347 constexpr explicit VMFunctionDataHelper(const char* name) 348 : VMFunctionData(name, explicitArgs(), argumentProperties(), 349 argumentPassedInFloatRegs(), argumentRootTypes(), 350 outParam(), outParamRootType(), returnType(), 351 /* extraValuesToPop = */ 0) {} 352 constexpr explicit VMFunctionDataHelper(const char* name, 353 PopValues extraValuesToPop) 354 : VMFunctionData(name, explicitArgs(), argumentProperties(), 355 argumentPassedInFloatRegs(), argumentRootTypes(), 356 outParam(), outParamRootType(), returnType(), 357 extraValuesToPop.numValues) {} 358 }; 359 360 // GCC warns when the signature does not have matching attributes (for example 361 // [[nodiscard]]). Squelch this warning to avoid a GCC-only footgun. 362 #if MOZ_IS_GCC 363 # pragma GCC diagnostic push 364 # pragma GCC diagnostic ignored "-Wignored-attributes" 365 #endif 366 367 // Generate VMFunctionData array. 368 static constexpr VMFunctionData vmFunctions[] = { 369 #define DEF_VMFUNCTION(name, fp, valuesToPop...) \ 370 VMFunctionDataHelper<decltype(&fp)>(#name, PopValues(valuesToPop)), 371 VMFUNCTION_LIST(DEF_VMFUNCTION) 372 #undef DEF_VMFUNCTION 373 }; 374 375 #if MOZ_IS_GCC 376 # pragma GCC diagnostic pop 377 #endif 378 379 // Generate arrays storing C++ function pointers. These pointers are not stored 380 // in VMFunctionData because there's no good way to cast them to void* in 381 // constexpr code. Compilers are smart enough to treat the const array below as 382 // constexpr. 383 #define DEF_VMFUNCTION(name, fp, ...) (void*)(fp), 384 static void* const vmFunctionTargets[] = {VMFUNCTION_LIST(DEF_VMFUNCTION)}; 385 #undef DEF_VMFUNCTION 386 387 const VMFunctionData& GetVMFunction(VMFunctionId id) { 388 return vmFunctions[size_t(id)]; 389 } 390 391 static DynFn GetVMFunctionTarget(VMFunctionId id) { 392 return DynFn{vmFunctionTargets[size_t(id)]}; 393 } 394 395 size_t NumVMFunctions() { return size_t(VMFunctionId::Count); } 396 397 size_t VMFunctionData::sizeOfOutParamStackSlot() const { 398 switch (outParam) { 399 case Type_Value: 400 return sizeof(Value); 401 402 case Type_Pointer: 403 case Type_Int32: 404 case Type_Bool: 405 return sizeof(uintptr_t); 406 407 case Type_Double: 408 return sizeof(double); 409 410 case Type_Handle: 411 switch (outParamRootType) { 412 case RootNone: 413 MOZ_CRASH("Handle must have root type"); 414 case RootObject: 415 case RootString: 416 case RootCell: 417 case RootBigInt: 418 case RootId: 419 return sizeof(uintptr_t); 420 case RootValue: 421 return sizeof(Value); 422 } 423 MOZ_CRASH("Invalid type"); 424 425 case Type_Void: 426 return 0; 427 428 case Type_Cell: 429 MOZ_CRASH("Unexpected outparam type"); 430 } 431 432 MOZ_CRASH("Invalid type"); 433 } 434 435 bool JitRuntime::generateVMWrappers(JSContext* cx, MacroAssembler& masm, 436 PerfSpewerRangeRecorder& rangeRecorder) { 437 // Generate all VM function wrappers. 438 439 static constexpr size_t NumVMFunctions = size_t(VMFunctionId::Count); 440 441 if (!functionWrapperOffsets_.reserve(NumVMFunctions)) { 442 return false; 443 } 444 445 #ifdef DEBUG 446 const char* lastName = nullptr; 447 #endif 448 449 for (size_t i = 0; i < NumVMFunctions; i++) { 450 VMFunctionId id = VMFunctionId(i); 451 const VMFunctionData& fun = GetVMFunction(id); 452 453 #ifdef DEBUG 454 // Assert the list is sorted by name. 455 if (lastName) { 456 MOZ_ASSERT(strcmp(lastName, fun.name()) < 0, 457 "VM function list must be sorted by name"); 458 } 459 lastName = fun.name(); 460 #endif 461 462 JitSpew(JitSpew_Codegen, "# VM function wrapper (%s)", fun.name()); 463 464 uint32_t offset; 465 if (!generateVMWrapper(cx, masm, id, fun, GetVMFunctionTarget(id), 466 &offset)) { 467 return false; 468 } 469 #if defined(JS_ION_PERF) 470 rangeRecorder.recordVMWrapperOffset(fun.name()); 471 #else 472 rangeRecorder.recordOffset("Trampoline: VMWrapper"); 473 #endif 474 475 MOZ_ASSERT(functionWrapperOffsets_.length() == size_t(id)); 476 functionWrapperOffsets_.infallibleAppend(offset); 477 } 478 479 return true; 480 }; 481 482 bool InvokeFunction(JSContext* cx, HandleObject obj, bool constructing, 483 bool ignoresReturnValue, uint32_t argc, Value* argv, 484 MutableHandleValue rval) { 485 RootedExternalValueArray argvRoot(cx, argc + 1 + constructing, argv); 486 487 // Data in the argument vector is arranged for a JIT -> JIT call. 488 RootedValue thisv(cx, argv[0]); 489 Value* argvWithoutThis = argv + 1; 490 491 RootedValue fval(cx, ObjectValue(*obj)); 492 if (constructing) { 493 if (!IsConstructor(fval)) { 494 ReportValueError(cx, JSMSG_NOT_CONSTRUCTOR, JSDVG_IGNORE_STACK, fval, 495 nullptr); 496 return false; 497 } 498 499 ConstructArgs cargs(cx); 500 if (!cargs.init(cx, argc)) { 501 return false; 502 } 503 504 for (uint32_t i = 0; i < argc; i++) { 505 cargs[i].set(argvWithoutThis[i]); 506 } 507 508 RootedValue newTarget(cx, argvWithoutThis[argc]); 509 510 // The JIT ABI expects at least callee->nargs() arguments, with undefined 511 // values passed for missing formal arguments. These undefined values are 512 // passed before newTarget. We don't normally insert undefined values when 513 // calling native functions like this one, but by detecting and supporting 514 // that case here, it is easier for jit code to fall back to InvokeFunction 515 // as a slow path. 516 if (newTarget.isUndefined()) { 517 MOZ_RELEASE_ASSERT(obj->is<JSFunction>()); 518 JSFunction* callee = &obj->as<JSFunction>(); 519 #ifdef DEBUG 520 MOZ_ASSERT(callee->nargs() > argc); 521 for (uint32_t i = argc; i < callee->nargs(); i++) { 522 MOZ_ASSERT(argvWithoutThis[i].isUndefined()); 523 } 524 #endif 525 newTarget = argvWithoutThis[callee->nargs()]; 526 MOZ_ASSERT(newTarget.isObject()); 527 } 528 529 // See CreateThisFromIon for why this can be NullValue. 530 if (thisv.isNull()) { 531 thisv.setMagic(JS_IS_CONSTRUCTING); 532 } 533 534 // If |this| hasn't been created, or is JS_UNINITIALIZED_LEXICAL, 535 // we can use normal construction code without creating an extraneous 536 // object. 537 if (thisv.isMagic()) { 538 MOZ_ASSERT(thisv.whyMagic() == JS_IS_CONSTRUCTING || 539 thisv.whyMagic() == JS_UNINITIALIZED_LEXICAL); 540 541 RootedObject obj(cx); 542 if (!Construct(cx, fval, cargs, newTarget, &obj)) { 543 return false; 544 } 545 546 rval.setObject(*obj); 547 return true; 548 } 549 550 // Otherwise the default |this| has already been created. We could 551 // almost perform a *call* at this point, but we'd break |new.target| 552 // in the function. So in this one weird case we call a one-off 553 // construction path that *won't* set |this| to JS_IS_CONSTRUCTING. 554 return InternalConstructWithProvidedThis(cx, fval, thisv, cargs, newTarget, 555 rval); 556 } 557 558 InvokeArgsMaybeIgnoresReturnValue args(cx); 559 if (!args.init(cx, argc, ignoresReturnValue)) { 560 return false; 561 } 562 563 for (size_t i = 0; i < argc; i++) { 564 args[i].set(argvWithoutThis[i]); 565 } 566 567 return Call(cx, fval, thisv, args, rval); 568 } 569 570 void* GetContextSensitiveInterpreterStub() { 571 return TlsContext.get()->runtime()->jitRuntime()->interpreterStub().value; 572 } 573 574 bool InvokeFromInterpreterStub(JSContext* cx, 575 InterpreterStubExitFrameLayout* frame) { 576 JitFrameLayout* jsFrame = frame->jsFrame(); 577 CalleeToken token = jsFrame->calleeToken(); 578 579 Value* argv = jsFrame->thisAndActualArgs(); 580 uint32_t numActualArgs = jsFrame->numActualArgs(); 581 bool constructing = CalleeTokenIsConstructing(token); 582 RootedFunction fun(cx, CalleeTokenToFunction(token)); 583 584 // Ensure new.target immediately follows the actual arguments (the JIT 585 // ABI passes `undefined` for missing formals). 586 if (constructing && numActualArgs < fun->nargs()) { 587 argv[1 + numActualArgs] = argv[1 + fun->nargs()]; 588 } 589 590 RootedValue rval(cx); 591 if (!InvokeFunction(cx, fun, constructing, 592 /* ignoresReturnValue = */ false, numActualArgs, argv, 593 &rval)) { 594 return false; 595 } 596 597 // Overwrite |this| with the return value. 598 argv[0] = rval; 599 return true; 600 } 601 602 static bool CheckOverRecursedImpl(JSContext* cx, size_t extra) { 603 // We just failed the jitStackLimit check. There are two possible reasons: 604 // 1) jitStackLimit was the real stack limit and we're over-recursed 605 // 2) jitStackLimit was set to JS::NativeStackLimitMin by 606 // JSContext::requestInterrupt and we need to call 607 // JSContext::handleInterrupt. 608 609 // This handles 1). 610 #ifdef JS_SIMULATOR 611 if (cx->simulator()->overRecursedWithExtra(extra)) { 612 ReportOverRecursed(cx); 613 return false; 614 } 615 #else 616 AutoCheckRecursionLimit recursion(cx); 617 if (!recursion.checkWithExtra(cx, extra)) { 618 return false; 619 } 620 #endif 621 622 // This handles 2). 623 gc::MaybeVerifyBarriers(cx); 624 return cx->handleInterrupt(); 625 } 626 627 bool CheckOverRecursed(JSContext* cx) { return CheckOverRecursedImpl(cx, 0); } 628 629 bool CheckOverRecursedBaseline(JSContext* cx, BaselineFrame* frame) { 630 // The stack check in Baseline happens before pushing locals so we have to 631 // account for that by including script->nslots() in the C++ recursion check. 632 size_t extra = frame->script()->nslots() * sizeof(Value); 633 return CheckOverRecursedImpl(cx, extra); 634 } 635 636 bool MutatePrototype(JSContext* cx, Handle<PlainObject*> obj, 637 HandleValue value) { 638 if (!value.isObjectOrNull()) { 639 return true; 640 } 641 642 RootedObject newProto(cx, value.toObjectOrNull()); 643 return SetPrototype(cx, obj, newProto); 644 } 645 646 template <EqualityKind Kind> 647 bool StringsEqual(JSContext* cx, HandleString lhs, HandleString rhs, 648 bool* res) { 649 JSLinearString* linearLhs = lhs->ensureLinear(cx); 650 if (!linearLhs) { 651 return false; 652 } 653 JSLinearString* linearRhs = rhs->ensureLinear(cx); 654 if (!linearRhs) { 655 return false; 656 } 657 658 *res = EqualChars(linearLhs, linearRhs); 659 660 if constexpr (Kind == EqualityKind::NotEqual) { 661 *res = !*res; 662 } 663 return true; 664 } 665 666 template bool StringsEqual<EqualityKind::Equal>(JSContext* cx, HandleString lhs, 667 HandleString rhs, bool* res); 668 template bool StringsEqual<EqualityKind::NotEqual>(JSContext* cx, 669 HandleString lhs, 670 HandleString rhs, bool* res); 671 672 template <ComparisonKind Kind> 673 bool StringsCompare(JSContext* cx, HandleString lhs, HandleString rhs, 674 bool* res) { 675 int32_t result; 676 if (!js::CompareStrings(cx, lhs, rhs, &result)) { 677 return false; 678 } 679 if (Kind == ComparisonKind::LessThan) { 680 *res = result < 0; 681 } else { 682 *res = result >= 0; 683 } 684 return true; 685 } 686 687 template bool StringsCompare<ComparisonKind::LessThan>(JSContext* cx, 688 HandleString lhs, 689 HandleString rhs, 690 bool* res); 691 template bool StringsCompare<ComparisonKind::GreaterThanOrEqual>( 692 JSContext* cx, HandleString lhs, HandleString rhs, bool* res); 693 694 JSString* ArrayJoin(JSContext* cx, HandleObject array, HandleString sep) { 695 JS::RootedValueArray<3> argv(cx); 696 argv[0].setUndefined(); 697 argv[1].setObject(*array); 698 argv[2].setString(sep); 699 if (!js::array_join(cx, 1, argv.begin())) { 700 return nullptr; 701 } 702 return argv[0].toString(); 703 } 704 705 bool SetArrayLength(JSContext* cx, HandleObject obj, HandleValue value, 706 bool strict) { 707 Handle<ArrayObject*> array = obj.as<ArrayObject>(); 708 709 RootedId id(cx, NameToId(cx->names().length)); 710 ObjectOpResult result; 711 712 // SetArrayLength is called by IC stubs for SetProp and SetElem on arrays' 713 // "length" property. 714 // 715 // ArraySetLength below coerces |value| before checking for length being 716 // writable, and in the case of illegal values, will throw RangeError even 717 // when "length" is not writable. This is incorrect observable behavior, 718 // as a regular [[Set]] operation will check for "length" being 719 // writable before attempting any assignment. 720 // 721 // So, perform ArraySetLength if and only if "length" is writable. 722 if (array->lengthIsWritable()) { 723 Rooted<PropertyDescriptor> desc( 724 cx, PropertyDescriptor::Data(value, JS::PropertyAttribute::Writable)); 725 if (!ArraySetLength(cx, array, id, desc, result)) { 726 return false; 727 } 728 } else { 729 MOZ_ALWAYS_TRUE(result.fail(JSMSG_READ_ONLY)); 730 } 731 732 return result.checkStrictModeError(cx, obj, id, strict); 733 } 734 735 bool CharCodeAt(JSContext* cx, HandleString str, int32_t index, 736 uint32_t* code) { 737 char16_t c; 738 if (!str->getChar(cx, index, &c)) { 739 return false; 740 } 741 *code = c; 742 return true; 743 } 744 745 bool CodePointAt(JSContext* cx, HandleString str, int32_t index, 746 uint32_t* code) { 747 char32_t codePoint; 748 if (!str->getCodePoint(cx, size_t(index), &codePoint)) { 749 return false; 750 } 751 *code = codePoint; 752 return true; 753 } 754 755 JSLinearString* StringFromCharCodeNoGC(JSContext* cx, int32_t code) { 756 AutoUnsafeCallWithABI unsafe; 757 758 char16_t c = char16_t(code); 759 760 if (StaticStrings::hasUnit(c)) { 761 return cx->staticStrings().getUnit(c); 762 } 763 764 return NewInlineString<NoGC>(cx, {c}, 1); 765 } 766 767 JSLinearString* LinearizeForCharAccessPure(JSString* str) { 768 AutoUnsafeCallWithABI unsafe; 769 770 // Should only be called on ropes. 771 MOZ_ASSERT(str->isRope()); 772 773 // ensureLinear is intentionally called with a nullptr to avoid OOM reporting. 774 return str->ensureLinear(nullptr); 775 } 776 777 JSLinearString* LinearizeForCharAccess(JSContext* cx, JSString* str) { 778 // Should only be called on ropes. 779 MOZ_ASSERT(str->isRope()); 780 781 return str->ensureLinear(cx); 782 } 783 784 template <typename CharT> 785 static size_t StringTrimStartIndex(mozilla::Range<CharT> chars) { 786 size_t begin = 0; 787 while (begin < chars.length() && unicode::IsSpace(chars[begin])) { 788 ++begin; 789 } 790 return begin; 791 } 792 793 template <typename CharT> 794 static size_t StringTrimEndIndex(mozilla::Range<CharT> chars, size_t begin) { 795 size_t end = chars.length(); 796 while (end > begin && unicode::IsSpace(chars[end - 1])) { 797 --end; 798 } 799 return end; 800 } 801 802 int32_t StringTrimStartIndex(const JSString* str) { 803 AutoUnsafeCallWithABI unsafe; 804 805 MOZ_ASSERT(str->isLinear()); 806 807 const auto* linear = &str->asLinear(); 808 809 size_t begin; 810 if (linear->hasLatin1Chars()) { 811 JS::AutoCheckCannotGC nogc; 812 begin = StringTrimStartIndex(linear->latin1Range(nogc)); 813 } else { 814 JS::AutoCheckCannotGC nogc; 815 begin = StringTrimStartIndex(linear->twoByteRange(nogc)); 816 } 817 return int32_t(begin); 818 } 819 820 int32_t StringTrimEndIndex(const JSString* str, int32_t start) { 821 AutoUnsafeCallWithABI unsafe; 822 823 MOZ_ASSERT(str->isLinear()); 824 MOZ_ASSERT(start >= 0 && size_t(start) <= str->length()); 825 826 const auto* linear = &str->asLinear(); 827 828 size_t end; 829 if (linear->hasLatin1Chars()) { 830 JS::AutoCheckCannotGC nogc; 831 end = StringTrimEndIndex(linear->latin1Range(nogc), size_t(start)); 832 } else { 833 JS::AutoCheckCannotGC nogc; 834 end = StringTrimEndIndex(linear->twoByteRange(nogc), size_t(start)); 835 } 836 return int32_t(end); 837 } 838 839 JSString* CharCodeToLowerCase(JSContext* cx, int32_t code) { 840 JSString* str = StringFromCharCode(cx, code); 841 if (!str) { 842 return nullptr; 843 } 844 return js::StringToLowerCase(cx, str); 845 } 846 847 JSString* CharCodeToUpperCase(JSContext* cx, int32_t code) { 848 JSString* str = StringFromCharCode(cx, code); 849 if (!str) { 850 return nullptr; 851 } 852 return js::StringToUpperCase(cx, str); 853 } 854 855 bool SetProperty(JSContext* cx, HandleObject obj, Handle<PropertyName*> name, 856 HandleValue value, bool strict, jsbytecode* pc) { 857 RootedId id(cx, NameToId(name)); 858 859 RootedValue receiver(cx, ObjectValue(*obj)); 860 ObjectOpResult result; 861 if (MOZ_LIKELY(!obj->getOpsSetProperty())) { 862 JSOp op = JSOp(*pc); 863 if (op == JSOp::SetName || op == JSOp::StrictSetName || 864 op == JSOp::SetGName || op == JSOp::StrictSetGName) { 865 if (!NativeSetProperty<Unqualified>(cx, obj.as<NativeObject>(), id, value, 866 receiver, result)) { 867 return false; 868 } 869 } else { 870 if (!NativeSetProperty<Qualified>(cx, obj.as<NativeObject>(), id, value, 871 receiver, result)) { 872 return false; 873 } 874 } 875 } else { 876 if (!SetProperty(cx, obj, id, value, receiver, result)) { 877 return false; 878 } 879 } 880 return result.checkStrictModeError(cx, obj, id, strict); 881 } 882 883 bool InterruptCheck(JSContext* cx) { 884 gc::MaybeVerifyBarriers(cx); 885 886 return CheckForInterrupt(cx); 887 } 888 889 JSObject* NewStringObject(JSContext* cx, HandleString str) { 890 return StringObject::create(cx, str); 891 } 892 893 bool OperatorIn(JSContext* cx, HandleValue key, HandleObject obj, bool* out) { 894 RootedId id(cx); 895 return ToPropertyKey(cx, key, &id) && HasProperty(cx, obj, id, out); 896 } 897 898 bool GetIntrinsicValue(JSContext* cx, Handle<PropertyName*> name, 899 MutableHandleValue rval) { 900 return GlobalObject::getIntrinsicValue(cx, cx->global(), name, rval); 901 } 902 903 bool CreateThisFromIC(JSContext* cx, HandleObject callee, 904 HandleObject newTarget, MutableHandleValue rval) { 905 HandleFunction fun = callee.as<JSFunction>(); 906 MOZ_ASSERT(fun->isInterpreted()); 907 MOZ_ASSERT(fun->isConstructor()); 908 MOZ_ASSERT(cx->realm() == fun->realm(), 909 "Realm switching happens before creating this"); 910 911 // CreateThis expects rval to be this magic value. 912 rval.set(MagicValue(JS_IS_CONSTRUCTING)); 913 914 if (!js::CreateThis(cx, fun, newTarget, GenericObject, rval)) { 915 return false; 916 } 917 918 MOZ_ASSERT_IF(rval.isObject(), fun->realm() == rval.toObject().nonCCWRealm()); 919 return true; 920 } 921 922 bool CreateThisFromIon(JSContext* cx, HandleObject callee, 923 HandleObject newTarget, MutableHandleValue rval) { 924 // Return JS_IS_CONSTRUCTING for cases not supported by the inline call path. 925 rval.set(MagicValue(JS_IS_CONSTRUCTING)); 926 927 if (!callee->is<JSFunction>()) { 928 return true; 929 } 930 931 HandleFunction fun = callee.as<JSFunction>(); 932 if (!fun->isInterpreted() || !fun->isConstructor()) { 933 return true; 934 } 935 936 // If newTarget is not a function or is a function with a possibly-getter 937 // .prototype property, return NullValue to signal to LCallGeneric that it has 938 // to take the slow path. Note that we return NullValue instead of a 939 // MagicValue only because it's easier and faster to check for in JIT code 940 // (if we returned a MagicValue, JIT code would have to check both the type 941 // tag and the JSWhyMagic payload). 942 if (!fun->constructorNeedsUninitializedThis()) { 943 if (!newTarget->is<JSFunction>()) { 944 rval.setNull(); 945 return true; 946 } 947 JSFunction* newTargetFun = &newTarget->as<JSFunction>(); 948 if (!newTargetFun->hasNonConfigurablePrototypeDataProperty()) { 949 rval.setNull(); 950 return true; 951 } 952 } 953 954 AutoRealm ar(cx, fun); 955 if (!js::CreateThis(cx, fun, newTarget, GenericObject, rval)) { 956 return false; 957 } 958 959 MOZ_ASSERT_IF(rval.isObject(), fun->realm() == rval.toObject().nonCCWRealm()); 960 return true; 961 } 962 963 void PostWriteBarrier(JSRuntime* rt, js::gc::Cell* cell) { 964 AutoUnsafeCallWithABI unsafe; 965 rt->gc.storeBuffer().putWholeCellDontCheckLast(cell); 966 } 967 968 static const size_t MAX_WHOLE_CELL_BUFFER_SIZE = 256; 969 970 void PostWriteElementBarrier(JSRuntime* rt, JSObject* obj, int32_t index) { 971 AutoUnsafeCallWithABI unsafe; 972 973 MOZ_ASSERT(!IsInsideNursery(obj)); 974 975 NativeObject* nobj = &obj->as<NativeObject>(); 976 977 MOZ_ASSERT(index >= 0); 978 MOZ_ASSERT(uint32_t(index) < nobj->getDenseInitializedLength()); 979 980 if (gc::StoreBuffer::isInWholeCellBuffer(nobj)) { 981 return; 982 } 983 984 gc::StoreBuffer* sb = &rt->gc.storeBuffer(); 985 if (nobj->getDenseInitializedLength() > MAX_WHOLE_CELL_BUFFER_SIZE || 986 rt->hasZealMode(gc::ZealMode::ElementsBarrier)) { 987 sb->putSlot(nobj, HeapSlot::Element, nobj->unshiftedIndex(index), 1); 988 return; 989 } 990 991 sb->putWholeCell(obj); 992 } 993 994 void PostGlobalWriteBarrier(JSRuntime* rt, GlobalObject* obj) { 995 MOZ_ASSERT(obj->JSObject::is<GlobalObject>()); 996 997 if (!obj->realm()->globalWriteBarriered) { 998 AutoUnsafeCallWithABI unsafe; 999 rt->gc.storeBuffer().putWholeCell(obj); 1000 obj->realm()->globalWriteBarriered = 1; 1001 } 1002 } 1003 1004 bool GetInt32FromStringPure(JSContext* cx, JSString* str, int32_t* result) { 1005 // We shouldn't GC here as this is called directly from IC code. 1006 AutoUnsafeCallWithABI unsafe; 1007 1008 double d; 1009 if (!StringToNumberPure(cx, str, &d)) { 1010 return false; 1011 } 1012 1013 return mozilla::NumberIsInt32(d, result); 1014 } 1015 1016 int32_t GetIndexFromString(JSString* str) { 1017 // We shouldn't GC here as this is called directly from IC code. 1018 AutoUnsafeCallWithABI unsafe; 1019 1020 if (!str->isLinear()) { 1021 return -1; 1022 } 1023 1024 uint32_t index = UINT32_MAX; // Initialize this to appease Valgrind. 1025 if (!str->asLinear().isIndex(&index) || index > INT32_MAX) { 1026 return -1; 1027 } 1028 1029 return int32_t(index); 1030 } 1031 1032 JSObject* WrapObjectPure(JSContext* cx, JSObject* obj) { 1033 // IC code calls this directly so we shouldn't GC. 1034 AutoUnsafeCallWithABI unsafe; 1035 1036 MOZ_ASSERT(obj); 1037 MOZ_ASSERT(cx->compartment() != obj->compartment()); 1038 1039 // From: Compartment::getNonWrapperObjectForCurrentCompartment 1040 // Note that if the object is same-compartment, but has been wrapped into a 1041 // different compartment, we need to unwrap it and return the bare same- 1042 // compartment object. Note again that windows are always wrapped by a 1043 // WindowProxy even when same-compartment so take care not to strip this 1044 // particular wrapper. 1045 obj = UncheckedUnwrap(obj, /* stopAtWindowProxy = */ true); 1046 if (cx->compartment() == obj->compartment()) { 1047 MOZ_ASSERT(!IsWindow(obj)); 1048 JS::ExposeObjectToActiveJS(obj); 1049 return obj; 1050 } 1051 1052 // Try to Lookup an existing wrapper for this object. We assume that 1053 // if we can find such a wrapper, not calling preWrap is correct. 1054 if (ObjectWrapperMap::Ptr p = cx->compartment()->lookupWrapper(obj)) { 1055 JSObject* wrapped = p->value().get(); 1056 1057 // Ensure the wrapper is still exposed. 1058 JS::ExposeObjectToActiveJS(wrapped); 1059 return wrapped; 1060 } 1061 1062 return nullptr; 1063 } 1064 1065 bool DebugPrologue(JSContext* cx, BaselineFrame* frame) { 1066 return DebugAPI::onEnterFrame(cx, frame); 1067 } 1068 1069 bool DebugEpilogueOnBaselineReturn(JSContext* cx, BaselineFrame* frame, 1070 const jsbytecode* pc) { 1071 if (!DebugEpilogue(cx, frame, pc, true)) { 1072 return false; 1073 } 1074 1075 return true; 1076 } 1077 1078 bool DebugEpilogue(JSContext* cx, BaselineFrame* frame, const jsbytecode* pc, 1079 bool ok) { 1080 // If DebugAPI::onLeaveFrame returns |true| we have to return the frame's 1081 // return value. If it returns |false|, the debugger threw an exception. 1082 // In both cases we have to pop debug scopes. 1083 ok = DebugAPI::onLeaveFrame(cx, frame, pc, ok); 1084 1085 // Unwind to the outermost environment. 1086 EnvironmentIter ei(cx, frame, pc); 1087 UnwindAllEnvironmentsInFrame(cx, ei); 1088 1089 if (!ok) { 1090 // Pop this frame by updating packedExitFP, so that the exception 1091 // handling code will start at the previous frame. 1092 JitFrameLayout* prefix = frame->framePrefix(); 1093 EnsureUnwoundJitExitFrame(cx->activation()->asJit(), prefix); 1094 return false; 1095 } 1096 1097 return true; 1098 } 1099 1100 void FrameIsDebuggeeCheck(BaselineFrame* frame) { 1101 AutoUnsafeCallWithABI unsafe; 1102 if (frame->script()->isDebuggee()) { 1103 frame->setIsDebuggee(); 1104 } 1105 } 1106 1107 JSObject* CreateGeneratorFromFrame(JSContext* cx, BaselineFrame* frame) { 1108 return AbstractGeneratorObject::createFromFrame(cx, frame); 1109 } 1110 1111 JSObject* CreateGenerator(JSContext* cx, HandleFunction callee, 1112 HandleScript script, HandleObject environmentChain, 1113 HandleObject args) { 1114 Rooted<ArgumentsObject*> argsObj( 1115 cx, args ? &args->as<ArgumentsObject>() : nullptr); 1116 return AbstractGeneratorObject::create(cx, callee, script, environmentChain, 1117 argsObj); 1118 } 1119 1120 bool NormalSuspend(JSContext* cx, HandleObject obj, BaselineFrame* frame, 1121 uint32_t frameSize, const jsbytecode* pc) { 1122 MOZ_ASSERT(JSOp(*pc) == JSOp::InitialYield || JSOp(*pc) == JSOp::Yield || 1123 JSOp(*pc) == JSOp::Await); 1124 1125 // Minus one because we don't want to include the return value. 1126 uint32_t numSlots = frame->numValueSlots(frameSize) - 1; 1127 MOZ_ASSERT(numSlots >= frame->script()->nfixed()); 1128 return AbstractGeneratorObject::suspend(cx, obj, frame, pc, numSlots); 1129 } 1130 1131 bool FinalSuspend(JSContext* cx, HandleObject obj, const jsbytecode* pc) { 1132 MOZ_ASSERT(JSOp(*pc) == JSOp::FinalYieldRval); 1133 AbstractGeneratorObject::finalSuspend(cx, obj); 1134 return true; 1135 } 1136 1137 bool InterpretResume(JSContext* cx, HandleObject obj, Value* stackValues, 1138 MutableHandleValue rval) { 1139 MOZ_ASSERT(obj->is<AbstractGeneratorObject>()); 1140 1141 // The |stackValues| argument points to the JSOp::Resume operands on the 1142 // native stack. Because the stack grows down, these values are: 1143 // 1144 // [resumeKind, argument, generator, ..] 1145 1146 MOZ_ASSERT(stackValues[2].toObject() == *obj); 1147 1148 GeneratorResumeKind resumeKind = IntToResumeKind(stackValues[0].toInt32()); 1149 JSAtom* kind = ResumeKindToAtom(cx, resumeKind); 1150 1151 FixedInvokeArgs<3> args(cx); 1152 1153 args[0].setObject(*obj); 1154 args[1].set(stackValues[1]); 1155 args[2].setString(kind); 1156 1157 return CallSelfHostedFunction(cx, cx->names().InterpretGeneratorResume, 1158 UndefinedHandleValue, args, rval); 1159 } 1160 1161 bool DebugAfterYield(JSContext* cx, BaselineFrame* frame) { 1162 // The BaselineFrame has just been constructed by JSOp::Resume in the 1163 // caller. We need to set its debuggee flag as necessary. 1164 // 1165 // If a breakpoint is set on JSOp::AfterYield, or stepping is enabled, 1166 // we may already have done this work. Don't fire onEnterFrame again. 1167 if (frame->script()->isDebuggee() && !frame->isDebuggee()) { 1168 frame->setIsDebuggee(); 1169 return DebugAPI::onResumeFrame(cx, frame); 1170 } 1171 1172 return true; 1173 } 1174 1175 bool GeneratorThrowOrReturn(JSContext* cx, BaselineFrame* frame, 1176 Handle<AbstractGeneratorObject*> genObj, 1177 HandleValue arg, int32_t resumeKindArg) { 1178 GeneratorResumeKind resumeKind = IntToResumeKind(resumeKindArg); 1179 MOZ_ALWAYS_FALSE( 1180 js::GeneratorThrowOrReturn(cx, frame, genObj, arg, resumeKind)); 1181 return false; 1182 } 1183 1184 bool GlobalDeclInstantiationFromIon(JSContext* cx, HandleScript script, 1185 const jsbytecode* pc) { 1186 MOZ_ASSERT(!script->hasNonSyntacticScope()); 1187 1188 RootedObject envChain(cx, &cx->global()->lexicalEnvironment()); 1189 GCThingIndex lastFun = GET_GCTHING_INDEX(pc); 1190 1191 return GlobalOrEvalDeclInstantiation(cx, envChain, script, lastFun); 1192 } 1193 1194 bool InitFunctionEnvironmentObjects(JSContext* cx, BaselineFrame* frame) { 1195 return frame->initFunctionEnvironmentObjects(cx); 1196 } 1197 1198 bool NewArgumentsObject(JSContext* cx, BaselineFrame* frame, 1199 MutableHandleValue res) { 1200 ArgumentsObject* obj = ArgumentsObject::createExpected(cx, frame); 1201 if (!obj) { 1202 return false; 1203 } 1204 res.setObject(*obj); 1205 return true; 1206 } 1207 1208 ArrayObject* NewArrayObjectEnsureDenseInitLength(JSContext* cx, int32_t count) { 1209 MOZ_ASSERT(count >= 0); 1210 1211 auto* array = NewDenseFullyAllocatedArray(cx, count); 1212 if (!array) { 1213 return nullptr; 1214 } 1215 array->ensureDenseInitializedLength(0, count); 1216 1217 return array; 1218 } 1219 1220 ArrayObject* InitRestParameter(JSContext* cx, uint32_t length, Value* rest, 1221 Handle<ArrayObject*> arrRes) { 1222 if (arrRes) { 1223 // Fast path: we managed to allocate the array inline; initialize the 1224 // elements. 1225 MOZ_ASSERT(arrRes->getDenseInitializedLength() == 0); 1226 1227 // We don't call this function if we can initialize the elements in JIT 1228 // code. 1229 MOZ_ASSERT(length > arrRes->getDenseCapacity()); 1230 1231 if (!arrRes->growElements(cx, length)) { 1232 return nullptr; 1233 } 1234 arrRes->initDenseElements(rest, length); 1235 arrRes->setLengthToInitializedLength(); 1236 return arrRes; 1237 } 1238 1239 return NewDenseCopiedArray(cx, length, rest); 1240 } 1241 1242 bool HandleDebugTrap(JSContext* cx, BaselineFrame* frame, 1243 const uint8_t* retAddr) { 1244 RootedScript script(cx, frame->script()); 1245 jsbytecode* pc; 1246 if (frame->runningInInterpreter()) { 1247 pc = frame->interpreterPC(); 1248 } else { 1249 BaselineScript* blScript = script->baselineScript(); 1250 pc = blScript->retAddrEntryFromReturnAddress(retAddr).pc(script); 1251 } 1252 1253 // The Baseline Interpreter calls HandleDebugTrap for every op when the script 1254 // is in step mode or has breakpoints. The Baseline Compiler can toggle 1255 // breakpoints more granularly for specific bytecode PCs. 1256 if (frame->runningInInterpreter()) { 1257 MOZ_ASSERT(DebugAPI::hasAnyBreakpointsOrStepMode(script)); 1258 } else { 1259 MOZ_ASSERT(DebugAPI::stepModeEnabled(script) || 1260 DebugAPI::hasBreakpointsAt(script, pc)); 1261 } 1262 1263 if (JSOp(*pc) == JSOp::AfterYield) { 1264 // JSOp::AfterYield will set the frame's debuggee flag and call the 1265 // onEnterFrame handler, but if we set a breakpoint there we have to do 1266 // it now. 1267 MOZ_ASSERT(!frame->isDebuggee()); 1268 1269 if (!DebugAfterYield(cx, frame)) { 1270 return false; 1271 } 1272 1273 // If the frame is not a debuggee we're done. This can happen, for instance, 1274 // if the onEnterFrame hook called removeDebuggee. 1275 if (!frame->isDebuggee()) { 1276 return true; 1277 } 1278 } 1279 1280 MOZ_ASSERT(frame->isDebuggee()); 1281 1282 if (DebugAPI::stepModeEnabled(script) && !DebugAPI::onSingleStep(cx)) { 1283 return false; 1284 } 1285 1286 if (DebugAPI::hasBreakpointsAt(script, pc) && !DebugAPI::onTrap(cx)) { 1287 return false; 1288 } 1289 1290 return true; 1291 } 1292 1293 bool OnDebuggerStatement(JSContext* cx, BaselineFrame* frame) { 1294 return DebugAPI::onDebuggerStatement(cx, frame); 1295 } 1296 1297 bool GlobalHasLiveOnDebuggerStatement(JSContext* cx) { 1298 AutoUnsafeCallWithABI unsafe; 1299 return cx->realm()->isDebuggee() && 1300 DebugAPI::hasDebuggerStatementHook(cx->global()); 1301 } 1302 1303 bool PushLexicalEnv(JSContext* cx, BaselineFrame* frame, 1304 Handle<LexicalScope*> scope) { 1305 return frame->pushLexicalEnvironment(cx, scope); 1306 } 1307 1308 bool DebugLeaveThenPopLexicalEnv(JSContext* cx, BaselineFrame* frame, 1309 const jsbytecode* pc) { 1310 MOZ_ALWAYS_TRUE(DebugLeaveLexicalEnv(cx, frame, pc)); 1311 frame->popOffEnvironmentChain<ScopedLexicalEnvironmentObject>(); 1312 return true; 1313 } 1314 1315 bool FreshenLexicalEnv(JSContext* cx, BaselineFrame* frame) { 1316 return frame->freshenLexicalEnvironment<false>(cx); 1317 } 1318 1319 bool DebuggeeFreshenLexicalEnv(JSContext* cx, BaselineFrame* frame, 1320 const jsbytecode* pc) { 1321 return frame->freshenLexicalEnvironment<true>(cx, pc); 1322 } 1323 1324 bool RecreateLexicalEnv(JSContext* cx, BaselineFrame* frame) { 1325 return frame->recreateLexicalEnvironment<false>(cx); 1326 } 1327 1328 bool DebuggeeRecreateLexicalEnv(JSContext* cx, BaselineFrame* frame, 1329 const jsbytecode* pc) { 1330 return frame->recreateLexicalEnvironment<true>(cx, pc); 1331 } 1332 1333 bool DebugLeaveLexicalEnv(JSContext* cx, BaselineFrame* frame, 1334 const jsbytecode* pc) { 1335 MOZ_ASSERT_IF(!frame->runningInInterpreter(), 1336 frame->script()->baselineScript()->hasDebugInstrumentation()); 1337 if (cx->realm()->isDebuggee()) { 1338 DebugEnvironments::onPopLexical(cx, frame, pc); 1339 } 1340 return true; 1341 } 1342 1343 bool PushClassBodyEnv(JSContext* cx, BaselineFrame* frame, 1344 Handle<ClassBodyScope*> scope) { 1345 return frame->pushClassBodyEnvironment(cx, scope); 1346 } 1347 1348 bool PushVarEnv(JSContext* cx, BaselineFrame* frame, Handle<Scope*> scope) { 1349 return frame->pushVarEnvironment(cx, scope); 1350 } 1351 1352 bool EnterWith(JSContext* cx, BaselineFrame* frame, HandleValue val, 1353 Handle<WithScope*> templ) { 1354 return EnterWithOperation(cx, frame, val, templ); 1355 } 1356 1357 bool LeaveWith(JSContext* cx, BaselineFrame* frame) { 1358 if (MOZ_UNLIKELY(frame->isDebuggee())) { 1359 DebugEnvironments::onPopWith(frame); 1360 } 1361 frame->popOffEnvironmentChain<WithEnvironmentObject>(); 1362 return true; 1363 } 1364 1365 bool InitBaselineFrameForOsr(BaselineFrame* frame, 1366 InterpreterFrame* interpFrame, 1367 uint32_t numStackValues) { 1368 return frame->initForOsr(interpFrame, numStackValues); 1369 } 1370 1371 JSString* StringReplace(JSContext* cx, HandleString string, 1372 HandleString pattern, HandleString repl) { 1373 MOZ_ASSERT(string); 1374 MOZ_ASSERT(pattern); 1375 MOZ_ASSERT(repl); 1376 1377 return str_replace_string_raw(cx, string, pattern, repl); 1378 } 1379 1380 void AssertValidBigIntPtr(JSContext* cx, JS::BigInt* bi) { 1381 AutoUnsafeCallWithABI unsafe; 1382 // FIXME: check runtime? 1383 MOZ_ASSERT(cx->zone() == bi->zone()); 1384 MOZ_ASSERT(bi->isAligned()); 1385 MOZ_ASSERT(bi->getAllocKind() == gc::AllocKind::BIGINT); 1386 } 1387 1388 void AssertValidObjectPtr(JSContext* cx, JSObject* obj) { 1389 AutoUnsafeCallWithABI unsafe; 1390 #ifdef DEBUG 1391 // Check what we can, so that we'll hopefully assert/crash if we get a 1392 // bogus object (pointer). 1393 MOZ_ASSERT(obj->compartment() == cx->compartment()); 1394 MOZ_ASSERT(obj->zoneFromAnyThread() == cx->zone()); 1395 MOZ_ASSERT(obj->runtimeFromMainThread() == cx->runtime()); 1396 1397 if (obj->isTenured()) { 1398 MOZ_ASSERT(obj->isAligned()); 1399 gc::AllocKind kind = obj->asTenured().getAllocKind(); 1400 MOZ_ASSERT(gc::IsObjectAllocKind(kind)); 1401 } 1402 #endif 1403 } 1404 1405 void AssertValidStringPtr(JSContext* cx, JSString* str) { 1406 AutoUnsafeCallWithABI unsafe; 1407 #ifdef DEBUG 1408 // We can't closely inspect strings from another runtime. 1409 if (str->runtimeFromAnyThread() != cx->runtime()) { 1410 MOZ_ASSERT(str->isPermanentAtom()); 1411 return; 1412 } 1413 1414 if (str->isAtom()) { 1415 MOZ_ASSERT(str->zone()->isAtomsZone()); 1416 } else { 1417 MOZ_ASSERT(str->zone() == cx->zone()); 1418 } 1419 1420 MOZ_ASSERT(str->isAligned()); 1421 MOZ_ASSERT(str->length() <= JSString::MAX_LENGTH); 1422 1423 gc::AllocKind kind = str->getAllocKind(); 1424 if (str->isFatInline()) { 1425 if (str->isAtom()) { 1426 MOZ_ASSERT(kind == gc::AllocKind::FAT_INLINE_ATOM); 1427 } else { 1428 MOZ_ASSERT(kind == gc::AllocKind::FAT_INLINE_STRING); 1429 } 1430 } else if (str->isExternal()) { 1431 MOZ_ASSERT(kind == gc::AllocKind::EXTERNAL_STRING); 1432 } else if (str->isAtom()) { 1433 MOZ_ASSERT(kind == gc::AllocKind::ATOM); 1434 } else if (str->isLinear()) { 1435 MOZ_ASSERT(kind == gc::AllocKind::STRING || 1436 kind == gc::AllocKind::FAT_INLINE_STRING); 1437 } else { 1438 MOZ_ASSERT(kind == gc::AllocKind::STRING); 1439 } 1440 #endif 1441 } 1442 1443 void AssertValidSymbolPtr(JSContext* cx, JS::Symbol* sym) { 1444 AutoUnsafeCallWithABI unsafe; 1445 1446 // We can't closely inspect symbols from another runtime. 1447 if (sym->runtimeFromAnyThread() != cx->runtime()) { 1448 MOZ_ASSERT(sym->isWellKnownSymbol()); 1449 return; 1450 } 1451 1452 MOZ_ASSERT(sym->zone()->isAtomsZone()); 1453 MOZ_ASSERT(sym->isAligned()); 1454 if (JSAtom* desc = sym->description()) { 1455 AssertValidStringPtr(cx, desc); 1456 } 1457 1458 MOZ_ASSERT(sym->getAllocKind() == gc::AllocKind::SYMBOL); 1459 } 1460 1461 void AssertValidValue(JSContext* cx, Value* v) { 1462 AutoUnsafeCallWithABI unsafe; 1463 if (v->isObject()) { 1464 AssertValidObjectPtr(cx, &v->toObject()); 1465 } else if (v->isString()) { 1466 AssertValidStringPtr(cx, v->toString()); 1467 } else if (v->isSymbol()) { 1468 AssertValidSymbolPtr(cx, v->toSymbol()); 1469 } else if (v->isBigInt()) { 1470 AssertValidBigIntPtr(cx, v->toBigInt()); 1471 } 1472 } 1473 1474 bool ObjectIsCallable(JSObject* obj) { 1475 AutoUnsafeCallWithABI unsafe; 1476 return obj->isCallable(); 1477 } 1478 1479 bool ObjectIsConstructor(JSObject* obj) { 1480 AutoUnsafeCallWithABI unsafe; 1481 return obj->isConstructor(); 1482 } 1483 1484 JSObject* ObjectKeys(JSContext* cx, HandleObject obj) { 1485 JS::RootedValueArray<3> argv(cx); 1486 argv[0].setUndefined(); // rval 1487 argv[1].setUndefined(); // this 1488 argv[2].setObject(*obj); // arg0 1489 if (!js::obj_keys(cx, 1, argv.begin())) { 1490 return nullptr; 1491 } 1492 return argv[0].toObjectOrNull(); 1493 } 1494 1495 JSObject* ObjectKeysFromIterator(JSContext* cx, HandleObject iterObj) { 1496 MOZ_RELEASE_ASSERT(iterObj->is<PropertyIteratorObject>()); 1497 NativeIterator* iter = 1498 iterObj->as<PropertyIteratorObject>().getNativeIterator(); 1499 1500 size_t length = iter->ownPropertyCount(); 1501 Rooted<ArrayObject*> array(cx, NewDenseFullyAllocatedArray(cx, length)); 1502 if (!array) { 1503 return nullptr; 1504 } 1505 1506 array->ensureDenseInitializedLength(0, length); 1507 1508 for (size_t i = 0; i < length; ++i) { 1509 array->initDenseElement( 1510 i, StringValue((iter->propertiesBegin() + i)->asString())); 1511 } 1512 1513 return array; 1514 } 1515 1516 bool ObjectKeysLength(JSContext* cx, HandleObject obj, int32_t* length) { 1517 MOZ_ASSERT(!obj->is<ProxyObject>()); 1518 return js::obj_keys_length(cx, obj, *length); 1519 } 1520 1521 void JitValuePreWriteBarrier(JSRuntime* rt, Value* vp) { 1522 AutoUnsafeCallWithABI unsafe; 1523 MOZ_ASSERT(vp->isGCThing()); 1524 MOZ_ASSERT(!vp->toGCThing()->isMarkedBlack()); 1525 gc::ValuePreWriteBarrier(*vp); 1526 } 1527 1528 void JitStringPreWriteBarrier(JSRuntime* rt, JSString** stringp) { 1529 AutoUnsafeCallWithABI unsafe; 1530 MOZ_ASSERT(*stringp); 1531 MOZ_ASSERT(!(*stringp)->isMarkedBlack()); 1532 gc::PreWriteBarrier(*stringp); 1533 } 1534 1535 void JitObjectPreWriteBarrier(JSRuntime* rt, JSObject** objp) { 1536 AutoUnsafeCallWithABI unsafe; 1537 MOZ_ASSERT(*objp); 1538 MOZ_ASSERT(!(*objp)->isMarkedBlack()); 1539 gc::PreWriteBarrier(*objp); 1540 } 1541 1542 void JitShapePreWriteBarrier(JSRuntime* rt, Shape** shapep) { 1543 AutoUnsafeCallWithABI unsafe; 1544 MOZ_ASSERT(!(*shapep)->isMarkedBlack()); 1545 gc::PreWriteBarrier(*shapep); 1546 } 1547 1548 void JitWasmAnyRefPreWriteBarrier(JSRuntime* rt, wasm::AnyRef* refp) { 1549 AutoUnsafeCallWithABI unsafe; 1550 MOZ_ASSERT(refp->isGCThing()); 1551 MOZ_ASSERT(!(*refp).toGCThing()->isMarkedBlack()); 1552 gc::WasmAnyRefPreWriteBarrier(*refp); 1553 } 1554 1555 bool ThrowRuntimeLexicalError(JSContext* cx, unsigned errorNumber) { 1556 ScriptFrameIter iter(cx); 1557 RootedScript script(cx, iter.script()); 1558 ReportRuntimeLexicalError(cx, errorNumber, script, iter.pc()); 1559 return false; 1560 } 1561 1562 bool ThrowBadDerivedReturnOrUninitializedThis(JSContext* cx, HandleValue v) { 1563 MOZ_ASSERT(!v.isObject()); 1564 if (v.isUndefined()) { 1565 return js::ThrowUninitializedThis(cx); 1566 } 1567 1568 ReportValueError(cx, JSMSG_BAD_DERIVED_RETURN, JSDVG_IGNORE_STACK, v, 1569 nullptr); 1570 return false; 1571 } 1572 1573 bool BaselineGetFunctionThis(JSContext* cx, BaselineFrame* frame, 1574 MutableHandleValue res) { 1575 return GetFunctionThis(cx, frame, res); 1576 } 1577 1578 bool CallNativeGetter(JSContext* cx, HandleFunction callee, 1579 HandleValue receiver, MutableHandleValue result) { 1580 AutoRealm ar(cx, callee); 1581 1582 MOZ_ASSERT(callee->isNativeFun()); 1583 JSNative natfun = callee->native(); 1584 1585 JS::RootedValueArray<2> vp(cx); 1586 vp[0].setObject(*callee.get()); 1587 vp[1].set(receiver); 1588 1589 if (!natfun(cx, 0, vp.begin())) { 1590 return false; 1591 } 1592 1593 result.set(vp[0]); 1594 return true; 1595 } 1596 1597 bool CallDOMGetter(JSContext* cx, const JSJitInfo* info, HandleObject obj, 1598 MutableHandleValue result) { 1599 MOZ_ASSERT(info->type() == JSJitInfo::Getter); 1600 MOZ_ASSERT(obj->is<NativeObject>()); 1601 MOZ_ASSERT(obj->getClass()->isDOMClass()); 1602 MOZ_ASSERT(obj->as<NativeObject>().numFixedSlots() > 0); 1603 1604 #ifdef DEBUG 1605 DOMInstanceClassHasProtoAtDepth instanceChecker = 1606 cx->runtime()->DOMcallbacks->instanceClassMatchesProto; 1607 MOZ_ASSERT(instanceChecker(obj->getClass(), info->protoID, info->depth)); 1608 #endif 1609 1610 // Loading DOM_OBJECT_SLOT, which must be the first slot. 1611 JS::Value val = JS::GetReservedSlot(obj, 0); 1612 JSJitGetterOp getter = info->getter; 1613 return getter(cx, obj, val.toPrivate(), JSJitGetterCallArgs(result)); 1614 } 1615 1616 bool CallNativeSetter(JSContext* cx, HandleFunction callee, HandleObject obj, 1617 HandleValue rhs) { 1618 AutoRealm ar(cx, callee); 1619 1620 MOZ_ASSERT(callee->isNativeFun()); 1621 JSNative natfun = callee->native(); 1622 1623 JS::RootedValueArray<3> vp(cx); 1624 vp[0].setObject(*callee.get()); 1625 vp[1].setObject(*obj.get()); 1626 vp[2].set(rhs); 1627 1628 return natfun(cx, 1, vp.begin()); 1629 } 1630 1631 bool CallDOMSetter(JSContext* cx, const JSJitInfo* info, HandleObject obj, 1632 HandleValue value) { 1633 MOZ_ASSERT(info->type() == JSJitInfo::Setter); 1634 MOZ_ASSERT(obj->is<NativeObject>()); 1635 MOZ_ASSERT(obj->getClass()->isDOMClass()); 1636 MOZ_ASSERT(obj->as<NativeObject>().numFixedSlots() > 0); 1637 1638 #ifdef DEBUG 1639 DOMInstanceClassHasProtoAtDepth instanceChecker = 1640 cx->runtime()->DOMcallbacks->instanceClassMatchesProto; 1641 MOZ_ASSERT(instanceChecker(obj->getClass(), info->protoID, info->depth)); 1642 #endif 1643 1644 // Loading DOM_OBJECT_SLOT, which must be the first slot. 1645 JS::Value val = JS::GetReservedSlot(obj, 0); 1646 JSJitSetterOp setter = info->setter; 1647 1648 RootedValue v(cx, value); 1649 return setter(cx, obj, val.toPrivate(), JSJitSetterCallArgs(&v)); 1650 } 1651 1652 bool EqualStringsHelperPure(JSString* str1, JSString* str2) { 1653 // IC code calls this directly so we shouldn't GC. 1654 AutoUnsafeCallWithABI unsafe; 1655 1656 MOZ_ASSERT(str1->isAtom()); 1657 MOZ_ASSERT(!str2->isAtom()); 1658 MOZ_ASSERT(str1->length() == str2->length()); 1659 1660 // ensureLinear is intentionally called with a nullptr to avoid OOM 1661 // reporting; if it fails, we will continue to the next stub. 1662 JSLinearString* str2Linear = str2->ensureLinear(nullptr); 1663 if (!str2Linear) { 1664 return false; 1665 } 1666 1667 return EqualChars(&str1->asLinear(), str2Linear); 1668 } 1669 1670 static bool MaybeTypedArrayIndexString(PropertyKey key) { 1671 MOZ_ASSERT(key.isAtom() || key.isSymbol()); 1672 1673 if (MOZ_LIKELY(key.isAtom())) { 1674 JSAtom* str = key.toAtom(); 1675 if (str->length() > 0) { 1676 // Only check the first character because we want this function to be 1677 // fast. 1678 return CanStartTypedArrayIndex(str->latin1OrTwoByteChar(0)); 1679 } 1680 } 1681 return false; 1682 } 1683 1684 static void VerifyCacheEntry(JSContext* cx, NativeObject* obj, PropertyKey key, 1685 const MegamorphicCacheEntry& entry) { 1686 #ifdef DEBUG 1687 if (entry.isMissingProperty()) { 1688 NativeObject* pobj; 1689 PropertyResult prop; 1690 MOZ_ASSERT(LookupPropertyPure(cx, obj, key, &pobj, &prop)); 1691 MOZ_ASSERT(prop.isNotFound()); 1692 return; 1693 } 1694 if (entry.isMissingOwnProperty()) { 1695 MOZ_ASSERT(!obj->containsPure(key)); 1696 return; 1697 } 1698 MOZ_ASSERT(entry.isDataProperty() || entry.isAccessorProperty()); 1699 for (size_t i = 0, numHops = entry.numHops(); i < numHops; i++) { 1700 MOZ_ASSERT(!obj->containsPure(key)); 1701 obj = &obj->staticPrototype()->as<NativeObject>(); 1702 } 1703 mozilla::Maybe<PropertyInfo> prop = obj->lookupPure(key); 1704 MOZ_ASSERT(prop.isSome()); 1705 MOZ_ASSERT_IF(entry.isDataProperty(), prop->isDataProperty()); 1706 MOZ_ASSERT_IF(!entry.isDataProperty(), prop->isAccessorProperty()); 1707 MOZ_ASSERT(obj->getTaggedSlotOffset(prop->slot()) == entry.slotOffset()); 1708 #endif 1709 } 1710 1711 template <AllowGC allowGC> 1712 static MOZ_ALWAYS_INLINE bool MaybeGetNativePropertyAndWriteToCache( 1713 JSContext* cx, JSObject* obj, PropertyKey key, MegamorphicCacheEntry* entry, 1714 Value* vp) { 1715 MOZ_ASSERT(obj->is<NativeObject>()); 1716 NativeObject* nobj = &obj->as<NativeObject>(); 1717 Shape* receiverShape = obj->shape(); 1718 MegamorphicCache& cache = cx->caches().megamorphicCache; 1719 1720 MOZ_ASSERT(entry); 1721 1722 size_t numHops = 0; 1723 while (true) { 1724 MOZ_ASSERT(!nobj->getOpsLookupProperty()); 1725 1726 uint32_t index; 1727 if (PropMap* map = nobj->shape()->lookup(cx, key, &index)) { 1728 PropertyInfo prop = map->getPropertyInfo(index); 1729 if (prop.isDataProperty()) { 1730 TaggedSlotOffset offset = nobj->getTaggedSlotOffset(prop.slot()); 1731 cache.initEntryForDataProperty(entry, receiverShape, key, numHops, 1732 offset); 1733 *vp = nobj->getSlot(prop.slot()); 1734 return true; 1735 } 1736 if constexpr (allowGC) { 1737 // There's nothing fundamentally blocking us from supporting these, 1738 // it's just not a priority 1739 if (prop.isCustomDataProperty()) { 1740 return false; 1741 } 1742 1743 TaggedSlotOffset offset = nobj->getTaggedSlotOffset(prop.slot()); 1744 MOZ_ASSERT(prop.isAccessorProperty()); 1745 cache.initEntryForAccessorProperty(entry, receiverShape, key, numHops, 1746 offset); 1747 vp->setUndefined(); 1748 1749 if (!nobj->hasGetter(prop)) { 1750 return true; 1751 } 1752 1753 RootedValue getter(cx, nobj->getGetterValue(prop)); 1754 RootedValue receiver(cx, ObjectValue(*obj)); 1755 RootedValue rootedValue(cx); 1756 if (js::CallGetter(cx, receiver, getter, &rootedValue)) { 1757 *vp = rootedValue; 1758 return true; 1759 } 1760 return false; 1761 } else { 1762 return false; 1763 } 1764 } 1765 1766 // Property not found. Watch out for Class hooks and TypedArrays. 1767 if (MOZ_UNLIKELY(!nobj->is<PlainObject>())) { 1768 if (ClassMayResolveId(cx->names(), nobj->getClass(), key, nobj)) { 1769 return false; 1770 } 1771 1772 // Don't skip past TypedArrayObjects if the key can be a TypedArray index. 1773 if (nobj->is<TypedArrayObject>()) { 1774 if (MaybeTypedArrayIndexString(key)) { 1775 return false; 1776 } 1777 } 1778 } 1779 1780 JSObject* proto = nobj->staticPrototype(); 1781 if (!proto) { 1782 cache.initEntryForMissingProperty(entry, receiverShape, key); 1783 vp->setUndefined(); 1784 return true; 1785 } 1786 1787 if (!proto->is<NativeObject>()) { 1788 return false; 1789 } 1790 nobj = &proto->as<NativeObject>(); 1791 numHops++; 1792 } 1793 } 1794 1795 bool GetNativeDataPropertyPureWithCacheLookup(JSContext* cx, JSObject* obj, 1796 PropertyKey key, 1797 MegamorphicCacheEntry* entry, 1798 Value* vp) { 1799 AutoUnsafeCallWithABI unsafe; 1800 1801 // If we're on x86, we didn't have enough registers to populate this 1802 // directly in Baseline JITted code, so we do the lookup here. 1803 Shape* receiverShape = obj->shape(); 1804 MegamorphicCache& cache = cx->caches().megamorphicCache; 1805 1806 if (cache.lookup(receiverShape, key, &entry)) { 1807 NativeObject* nobj = &obj->as<NativeObject>(); 1808 VerifyCacheEntry(cx, nobj, key, *entry); 1809 if (entry->isDataProperty()) { 1810 for (size_t i = 0, numHops = entry->numHops(); i < numHops; i++) { 1811 nobj = &nobj->staticPrototype()->as<NativeObject>(); 1812 } 1813 uint32_t offset = entry->slotOffset().offset(); 1814 if (entry->slotOffset().isFixedSlot()) { 1815 size_t index = NativeObject::getFixedSlotIndexFromOffset(offset); 1816 *vp = nobj->getFixedSlot(index); 1817 } else { 1818 size_t index = NativeObject::getDynamicSlotIndexFromOffset(offset); 1819 *vp = nobj->getDynamicSlot(index); 1820 } 1821 return true; 1822 } 1823 if (entry->isMissingProperty()) { 1824 vp->setUndefined(); 1825 return true; 1826 } 1827 if (entry->isAccessorProperty()) { 1828 return false; 1829 } 1830 MOZ_ASSERT(entry->isMissingOwnProperty()); 1831 } 1832 1833 return MaybeGetNativePropertyAndWriteToCache<NoGC>(cx, obj, key, entry, vp); 1834 } 1835 1836 bool CheckProxyGetByValueResult(JSContext* cx, HandleObject obj, 1837 HandleValue idVal, HandleValue value, 1838 MutableHandleValue result) { 1839 MOZ_ASSERT(idVal.isString() || idVal.isSymbol()); 1840 RootedId rootedId(cx); 1841 if (!PrimitiveValueToId<CanGC>(cx, idVal, &rootedId)) { 1842 return false; 1843 } 1844 1845 auto validation = 1846 ScriptedProxyHandler::checkGetTrapResult(cx, obj, rootedId, value); 1847 if (validation != ScriptedProxyHandler::GetTrapValidationResult::OK) { 1848 ScriptedProxyHandler::reportGetTrapValidationError(cx, rootedId, 1849 validation); 1850 return false; 1851 } 1852 result.set(value); 1853 return true; 1854 } 1855 1856 bool GetNativeDataPropertyPure(JSContext* cx, JSObject* obj, PropertyKey id, 1857 MegamorphicCacheEntry* entry, Value* vp) { 1858 AutoUnsafeCallWithABI unsafe; 1859 return MaybeGetNativePropertyAndWriteToCache<NoGC>(cx, obj, id, entry, vp); 1860 } 1861 1862 // Non-inlined implementation of ValueToAtomOrSymbolPure for less common types. 1863 static bool ValueToAtomOrSymbolSlow(JSContext* cx, const Value& keyVal, 1864 PropertyKey* key) { 1865 MOZ_ASSERT(!keyVal.isString()); 1866 MOZ_ASSERT(!keyVal.isSymbol()); 1867 1868 if (keyVal.isNull()) { 1869 *key = NameToId(cx->names().null); 1870 return true; 1871 } 1872 if (keyVal.isUndefined()) { 1873 *key = NameToId(cx->names().undefined); 1874 return true; 1875 } 1876 if (keyVal.isBoolean()) { 1877 *key = 1878 NameToId(keyVal.toBoolean() ? cx->names().true_ : cx->names().false_); 1879 return true; 1880 } 1881 if (keyVal.isNumber() && !IsNumberIndex(keyVal)) { 1882 JSAtom* atom = NumberToAtom(cx, keyVal.toNumber()); 1883 if (!atom) { 1884 cx->recoverFromOutOfMemory(); 1885 return false; 1886 } 1887 *key = PropertyKey::NonIntAtom(atom); 1888 return true; 1889 } 1890 1891 return false; 1892 } 1893 1894 static MOZ_ALWAYS_INLINE bool ValueToAtomOrSymbolPure(JSContext* cx, 1895 const Value& keyVal, 1896 PropertyKey* key) { 1897 if (MOZ_LIKELY(keyVal.isString())) { 1898 JSAtom* atom = AtomizeString(cx, keyVal.toString()); 1899 if (!atom) { 1900 cx->recoverFromOutOfMemory(); 1901 return false; 1902 } 1903 1904 // Watch out for integer ids because they may be stored in dense elements. 1905 static_assert(PropertyKey::IntMin == 0); 1906 static_assert(NativeObject::MAX_DENSE_ELEMENTS_COUNT < PropertyKey::IntMax, 1907 "All dense elements must have integer jsids"); 1908 uint32_t index; 1909 if (MOZ_UNLIKELY(atom->isIndex(&index) && index <= PropertyKey::IntMax)) { 1910 return false; 1911 } 1912 1913 *key = PropertyKey::NonIntAtom(atom); 1914 return true; 1915 } 1916 1917 if (keyVal.isSymbol()) { 1918 *key = PropertyKey::Symbol(keyVal.toSymbol()); 1919 return true; 1920 } 1921 1922 return ValueToAtomOrSymbolSlow(cx, keyVal, key); 1923 } 1924 1925 bool GetNativeDataPropertyByValuePure(JSContext* cx, JSObject* obj, 1926 MegamorphicCacheEntry* entry, Value* vp) { 1927 AutoUnsafeCallWithABI unsafe; 1928 1929 // vp[0] contains the key, result will be stored in vp[1]. 1930 Value keyVal = vp[0]; 1931 PropertyKey key; 1932 if (!ValueToAtomOrSymbolPure(cx, keyVal, &key)) { 1933 return false; 1934 } 1935 1936 Shape* receiverShape = obj->shape(); 1937 MegamorphicCache& cache = cx->caches().megamorphicCache; 1938 if (!entry) { 1939 cache.lookup(receiverShape, key, &entry); 1940 } 1941 1942 Value* res = vp + 1; 1943 return MaybeGetNativePropertyAndWriteToCache<NoGC>(cx, obj, key, entry, res); 1944 } 1945 1946 bool GetPropertyCached(JSContext* cx, HandleObject obj, HandleId id, 1947 MegamorphicCacheEntry* entry, 1948 MutableHandleValue result) { 1949 if (entry->isMissingProperty()) { 1950 result.setUndefined(); 1951 return true; 1952 } 1953 1954 MOZ_ASSERT(entry->isDataProperty() || entry->isAccessorProperty()); 1955 1956 NativeObject* nobj = &obj->as<NativeObject>(); 1957 for (size_t i = 0, numHops = entry->numHops(); i < numHops; i++) { 1958 nobj = &nobj->staticPrototype()->as<NativeObject>(); 1959 } 1960 1961 uint32_t offset = entry->slotOffset().offset(); 1962 if (entry->slotOffset().isFixedSlot()) { 1963 size_t index = NativeObject::getFixedSlotIndexFromOffset(offset); 1964 result.set(nobj->getFixedSlot(index)); 1965 } else { 1966 size_t index = NativeObject::getDynamicSlotIndexFromOffset(offset); 1967 result.set(nobj->getDynamicSlot(index)); 1968 } 1969 1970 // If it's a data property, we're done - otherwise we need to try to call the 1971 // getter 1972 if (entry->isDataProperty()) { 1973 return true; 1974 } 1975 1976 JSObject* getter = result.toGCThing()->as<GetterSetter>()->getter(); 1977 if (getter) { 1978 RootedValue getterValue(cx, ObjectValue(*getter)); 1979 RootedValue receiver(cx, ObjectValue(*obj)); 1980 return js::CallGetter(cx, receiver, getterValue, result); 1981 } 1982 result.setUndefined(); 1983 return true; 1984 } 1985 1986 bool GetPropMaybeCached(JSContext* cx, HandleObject obj, HandleId id, 1987 MegamorphicCacheEntry* entry, 1988 MutableHandleValue result) { 1989 if (obj->is<NativeObject>()) { 1990 // Look up the entry in the cache if we don't have it 1991 Shape* receiverShape = obj->shape(); 1992 MegamorphicCache& cache = cx->caches().megamorphicCache; 1993 if (!entry) { 1994 cache.lookup(receiverShape, id, &entry); 1995 } 1996 1997 // If we hit it, load it from the cache. We can't though if it was a 1998 // MissingOwnProperty entry (added by the HasOwn handler), because we 1999 // need to look it up again to know if it's somewhere on the prototype 2000 // chain 2001 if (cache.isValidForLookup(*entry, receiverShape, id) && 2002 !entry->isMissingOwnProperty()) { 2003 return GetPropertyCached(cx, obj, id, entry, result); 2004 } 2005 2006 if (MaybeGetNativePropertyAndWriteToCache<CanGC>(cx, obj.get(), id.get(), 2007 entry, &result.get())) { 2008 return true; 2009 } 2010 2011 // The getter call in MaybeGetNativePropertyAndWriteToCache can throw, so 2012 // we need to check for that specifically 2013 // XXX: I know this is unusual, but I'm not sure on the best approach here - 2014 // is this alright? 2015 if (JS_IsExceptionPending(cx)) { 2016 return false; 2017 } 2018 } 2019 2020 return GetProperty(cx, obj, obj, id, result); 2021 } 2022 2023 bool GetElemMaybeCached(JSContext* cx, HandleObject obj, HandleValue keyVal, 2024 MegamorphicCacheEntry* entry, 2025 MutableHandleValue result) { 2026 PropertyKey key; 2027 if (obj->is<NativeObject>() && 2028 ValueToAtomOrSymbolPure(cx, keyVal.get(), &key)) { 2029 Shape* receiverShape = obj->shape(); 2030 MegamorphicCache& cache = cx->caches().megamorphicCache; 2031 if (!entry) { 2032 cache.lookup(receiverShape, key, &entry); 2033 } 2034 2035 if (cache.isValidForLookup(*entry, receiverShape, key) && 2036 !entry->isMissingOwnProperty()) { 2037 Rooted<PropertyKey> rkey(cx, key); 2038 return GetPropertyCached(cx, obj, rkey, entry, result); 2039 } 2040 2041 if (MaybeGetNativePropertyAndWriteToCache<CanGC>(cx, obj.get(), key, entry, 2042 &result.get())) { 2043 return true; 2044 } 2045 2046 // The getter call in MaybeGetNativePropertyAndWriteToCache can throw, so 2047 // we need to check for that specifically 2048 if (JS_IsExceptionPending(cx)) { 2049 return false; 2050 } 2051 } 2052 2053 RootedValue objVal(cx, ObjectValue(*obj)); 2054 return GetObjectElementOperation(cx, JSOp::GetElem, obj, objVal, keyVal, 2055 result); 2056 } 2057 2058 bool ObjectHasGetterSetterPure(JSContext* cx, JSObject* objArg, jsid id, 2059 GetterSetter* getterSetter) { 2060 AutoUnsafeCallWithABI unsafe; 2061 2062 // Window objects may require outerizing (passing the WindowProxy to the 2063 // getter/setter), so we don't support them here. 2064 if (MOZ_UNLIKELY(!objArg->is<NativeObject>() || IsWindow(objArg))) { 2065 return false; 2066 } 2067 2068 NativeObject* nobj = &objArg->as<NativeObject>(); 2069 2070 while (true) { 2071 uint32_t index; 2072 if (PropMap* map = nobj->shape()->lookup(cx, id, &index)) { 2073 PropertyInfo prop = map->getPropertyInfo(index); 2074 if (!prop.isAccessorProperty()) { 2075 return false; 2076 } 2077 GetterSetter* actualGetterSetter = nobj->getGetterSetter(prop); 2078 if (actualGetterSetter == getterSetter) { 2079 return true; 2080 } 2081 return (actualGetterSetter->getter() == getterSetter->getter() && 2082 actualGetterSetter->setter() == getterSetter->setter()); 2083 } 2084 2085 // Property not found. Watch out for Class hooks. 2086 if (!nobj->is<PlainObject>()) { 2087 if (ClassMayResolveId(cx->names(), nobj->getClass(), id, nobj)) { 2088 return false; 2089 } 2090 } 2091 2092 JSObject* proto = nobj->staticPrototype(); 2093 if (!proto) { 2094 return false; 2095 } 2096 2097 if (!proto->is<NativeObject>()) { 2098 return false; 2099 } 2100 nobj = &proto->as<NativeObject>(); 2101 } 2102 } 2103 2104 template <bool HasOwn> 2105 bool HasNativeDataPropertyPure(JSContext* cx, JSObject* obj, 2106 MegamorphicCacheEntry* entry, Value* vp) { 2107 AutoUnsafeCallWithABI unsafe; 2108 2109 // vp[0] contains the key, result will be stored in vp[1]. 2110 Value keyVal = vp[0]; 2111 PropertyKey key; 2112 if (!ValueToAtomOrSymbolPure(cx, keyVal, &key)) { 2113 return false; 2114 } 2115 2116 MegamorphicCache& cache = cx->caches().megamorphicCache; 2117 Shape* receiverShape = obj->shape(); 2118 if (!entry) { 2119 if (cache.lookup(receiverShape, key, &entry)) { 2120 VerifyCacheEntry(cx, &obj->as<NativeObject>(), key, *entry); 2121 } 2122 } 2123 2124 size_t numHops = 0; 2125 do { 2126 if (MOZ_UNLIKELY(!obj->is<NativeObject>())) { 2127 return false; 2128 } 2129 2130 MOZ_ASSERT(!obj->getOpsLookupProperty()); 2131 2132 NativeObject* nobj = &obj->as<NativeObject>(); 2133 uint32_t index; 2134 if (PropMap* map = nobj->shape()->lookup(cx, key, &index)) { 2135 PropertyInfo prop = map->getPropertyInfo(index); 2136 if (prop.isDataProperty()) { 2137 TaggedSlotOffset offset = nobj->getTaggedSlotOffset(prop.slot()); 2138 cache.initEntryForDataProperty(entry, receiverShape, key, numHops, 2139 offset); 2140 } 2141 vp[1].setBoolean(true); 2142 return true; 2143 } 2144 2145 // Property not found. Watch out for Class hooks and TypedArrays. 2146 if (MOZ_UNLIKELY(!obj->is<PlainObject>())) { 2147 // Fail if there's a resolve hook, unless the mayResolve hook tells us 2148 // the resolve hook won't define a property with this key. 2149 if (ClassMayResolveId(cx->names(), obj->getClass(), key, obj)) { 2150 return false; 2151 } 2152 2153 // Don't skip past TypedArrayObjects if the key can be a TypedArray 2154 // index. 2155 if (obj->is<TypedArrayObject>()) { 2156 if (MaybeTypedArrayIndexString(key)) { 2157 return false; 2158 } 2159 } 2160 } 2161 2162 // If implementing Object.hasOwnProperty, don't follow protochain. 2163 if constexpr (HasOwn) { 2164 break; 2165 } 2166 2167 // Get prototype. Objects that may allow dynamic prototypes are already 2168 // filtered out above. 2169 obj = obj->staticPrototype(); 2170 numHops++; 2171 } while (obj); 2172 2173 // Missing property. 2174 if (entry) { 2175 if constexpr (HasOwn) { 2176 cache.initEntryForMissingOwnProperty(entry, receiverShape, key); 2177 } else { 2178 cache.initEntryForMissingProperty(entry, receiverShape, key); 2179 } 2180 } 2181 vp[1].setBoolean(false); 2182 return true; 2183 } 2184 2185 template bool HasNativeDataPropertyPure<true>(JSContext* cx, JSObject* obj, 2186 MegamorphicCacheEntry* entry, 2187 Value* vp); 2188 2189 template bool HasNativeDataPropertyPure<false>(JSContext* cx, JSObject* obj, 2190 MegamorphicCacheEntry* entry, 2191 Value* vp); 2192 2193 bool HasNativeElementPure(JSContext* cx, NativeObject* obj, int32_t index, 2194 Value* vp) { 2195 AutoUnsafeCallWithABI unsafe; 2196 2197 MOZ_ASSERT(obj->is<NativeObject>()); 2198 MOZ_ASSERT(!obj->getOpsHasProperty()); 2199 MOZ_ASSERT(!obj->getOpsLookupProperty()); 2200 MOZ_ASSERT(!obj->getOpsGetOwnPropertyDescriptor()); 2201 2202 if (MOZ_UNLIKELY(index < 0)) { 2203 return false; 2204 } 2205 2206 if (obj->containsDenseElement(index)) { 2207 vp[0].setBoolean(true); 2208 return true; 2209 } 2210 2211 PropertyKey key = PropertyKey::Int(index); 2212 uint32_t unused; 2213 if (obj->shape()->lookup(cx, key, &unused)) { 2214 vp[0].setBoolean(true); 2215 return true; 2216 } 2217 2218 // Fail if there's a resolve hook, unless the mayResolve hook tells 2219 // us the resolve hook won't define a property with this key. 2220 if (MOZ_UNLIKELY(ClassMayResolveId(cx->names(), obj->getClass(), key, obj))) { 2221 return false; 2222 } 2223 // TypedArrayObject are also native and contain indexed properties. 2224 if (MOZ_UNLIKELY(obj->is<TypedArrayObject>())) { 2225 size_t length = obj->as<TypedArrayObject>().length().valueOr(0); 2226 vp[0].setBoolean(uint32_t(index) < length); 2227 return true; 2228 } 2229 2230 vp[0].setBoolean(false); 2231 return true; 2232 } 2233 2234 // Fast path for setting/adding a plain object property. This is the common case 2235 // for megamorphic SetProp/SetElem. 2236 template <bool UseCache> 2237 static bool TryAddOrSetPlainObjectProperty(JSContext* cx, 2238 Handle<PlainObject*> obj, 2239 PropertyKey key, HandleValue value, 2240 bool* optimized) { 2241 MOZ_ASSERT(!*optimized); 2242 2243 Shape* receiverShape = obj->shape(); 2244 MegamorphicSetPropCache& cache = *cx->caches().megamorphicSetPropCache; 2245 2246 #ifdef DEBUG 2247 if constexpr (UseCache) { 2248 MegamorphicSetPropCache::Entry* entry; 2249 if (cache.lookup(receiverShape, key, &entry)) { 2250 if (entry->afterShape() != nullptr) { // AddProp 2251 NativeObject* holder = nullptr; 2252 PropertyResult prop; 2253 MOZ_ASSERT(LookupPropertyPure(cx, obj, key, &holder, &prop)); 2254 MOZ_ASSERT(obj != holder); 2255 MOZ_ASSERT_IF(prop.isFound(), 2256 prop.isNativeProperty() && 2257 prop.propertyInfo().isDataProperty() && 2258 prop.propertyInfo().writable()); 2259 } else { // SetProp 2260 mozilla::Maybe<PropertyInfo> prop = obj->lookupPure(key); 2261 MOZ_ASSERT(prop.isSome()); 2262 MOZ_ASSERT(prop->isDataProperty()); 2263 MOZ_ASSERT(obj->getTaggedSlotOffset(prop->slot()) == 2264 entry->slotOffset()); 2265 } 2266 } 2267 } 2268 #endif 2269 2270 // Fast path for changing a data property. 2271 uint32_t index; 2272 if (PropMap* map = obj->shape()->lookup(cx, key, &index)) { 2273 PropertyInfo prop = map->getPropertyInfo(index); 2274 if (!prop.isDataProperty() || !prop.writable()) { 2275 return true; 2276 } 2277 bool watchesPropValue = Watchtower::watchesPropertyValueChange(obj); 2278 if (MOZ_UNLIKELY(watchesPropValue)) { 2279 Watchtower::watchPropertyValueChange<AllowGC::NoGC>(cx, obj, key, value, 2280 prop); 2281 } 2282 obj->setSlot(prop.slot(), value); 2283 *optimized = true; 2284 2285 if constexpr (UseCache) { 2286 // Don't add an entry to the MegamorphicSetPropCache if we need to invoke 2287 // the Watchtower hook for property value changes. The cache is used 2288 // directly from JIT code and we can't easily call into Watchtower from 2289 // there. 2290 if (!watchesPropValue) { 2291 TaggedSlotOffset offset = obj->getTaggedSlotOffset(prop.slot()); 2292 cache.set(receiverShape, nullptr, key, offset, 0); 2293 } 2294 } 2295 return true; 2296 } 2297 2298 // Don't support "__proto__". This lets us take advantage of the 2299 // hasNonWritableOrAccessorPropExclProto optimization below. 2300 if (MOZ_UNLIKELY(!obj->isExtensible() || key.isAtom(cx->names().proto_))) { 2301 return true; 2302 } 2303 2304 // Ensure the proto chain contains only plain objects. Deoptimize for accessor 2305 // properties and non-writable data properties (we can't shadow non-writable 2306 // properties). 2307 JSObject* proto = obj->staticPrototype(); 2308 while (proto) { 2309 if (!proto->is<PlainObject>()) { 2310 return true; 2311 } 2312 PlainObject* plainProto = &proto->as<PlainObject>(); 2313 if (plainProto->hasNonWritableOrAccessorPropExclProto()) { 2314 uint32_t index; 2315 if (PropMap* map = plainProto->shape()->lookup(cx, key, &index)) { 2316 PropertyInfo prop = map->getPropertyInfo(index); 2317 if (!prop.isDataProperty() || !prop.writable()) { 2318 return true; 2319 } 2320 break; 2321 } 2322 } 2323 proto = plainProto->staticPrototype(); 2324 } 2325 2326 #ifdef DEBUG 2327 // At this point either the property is missing or it's a writable data 2328 // property on the proto chain that we can shadow. 2329 { 2330 NativeObject* holder = nullptr; 2331 PropertyResult prop; 2332 MOZ_ASSERT(LookupPropertyPure(cx, obj, key, &holder, &prop)); 2333 MOZ_ASSERT(obj != holder); 2334 MOZ_ASSERT_IF(prop.isFound(), prop.isNativeProperty() && 2335 prop.propertyInfo().isDataProperty() && 2336 prop.propertyInfo().writable()); 2337 } 2338 #endif 2339 2340 *optimized = true; 2341 Rooted<PropertyKey> keyRoot(cx, key); 2342 Rooted<Shape*> receiverShapeRoot(cx, receiverShape); 2343 uint32_t resultSlot = 0; 2344 size_t numDynamic = obj->numDynamicSlots(); 2345 bool res = AddDataPropertyToPlainObject(cx, obj, keyRoot, value, &resultSlot); 2346 2347 if constexpr (UseCache) { 2348 if (res && obj->shape()->isShared() && 2349 resultSlot < SharedPropMap::MaxPropsForNonDictionary && 2350 !Watchtower::watchesPropertyAdd(obj)) { 2351 TaggedSlotOffset offset = obj->getTaggedSlotOffset(resultSlot); 2352 uint32_t newCapacity = 0; 2353 if (!(resultSlot < obj->numFixedSlots() || 2354 (resultSlot - obj->numFixedSlots()) < numDynamic)) { 2355 newCapacity = obj->numDynamicSlots(); 2356 } 2357 cache.set(receiverShapeRoot, obj->shape(), keyRoot, offset, newCapacity); 2358 } 2359 } 2360 2361 return res; 2362 } 2363 2364 template <bool Cached> 2365 bool SetElementMegamorphic(JSContext* cx, HandleObject obj, HandleValue index, 2366 HandleValue value, bool strict) { 2367 if (obj->is<PlainObject>()) { 2368 PropertyKey key; 2369 if (ValueToAtomOrSymbolPure(cx, index, &key)) { 2370 bool optimized = false; 2371 if (!TryAddOrSetPlainObjectProperty<Cached>(cx, obj.as<PlainObject>(), 2372 key, value, &optimized)) { 2373 return false; 2374 } 2375 if (optimized) { 2376 return true; 2377 } 2378 } 2379 } 2380 Rooted<Value> receiver(cx, ObjectValue(*obj)); 2381 return SetObjectElementWithReceiver(cx, obj, index, value, receiver, strict); 2382 } 2383 2384 template bool SetElementMegamorphic<false>(JSContext* cx, HandleObject obj, 2385 HandleValue index, HandleValue value, 2386 bool strict); 2387 template bool SetElementMegamorphic<true>(JSContext* cx, HandleObject obj, 2388 HandleValue index, HandleValue value, 2389 bool strict); 2390 2391 template <bool Cached> 2392 bool SetPropertyMegamorphic(JSContext* cx, HandleObject obj, HandleId id, 2393 HandleValue value, bool strict) { 2394 if (obj->is<PlainObject>()) { 2395 bool optimized = false; 2396 if (!TryAddOrSetPlainObjectProperty<Cached>(cx, obj.as<PlainObject>(), id, 2397 value, &optimized)) { 2398 return false; 2399 } 2400 if (optimized) { 2401 return true; 2402 } 2403 } 2404 Rooted<Value> receiver(cx, ObjectValue(*obj)); 2405 ObjectOpResult result; 2406 return SetProperty(cx, obj, id, value, receiver, result) && 2407 result.checkStrictModeError(cx, obj, id, strict); 2408 } 2409 2410 template bool SetPropertyMegamorphic<false>(JSContext* cx, HandleObject obj, 2411 HandleId id, HandleValue value, 2412 bool strict); 2413 template bool SetPropertyMegamorphic<true>(JSContext* cx, HandleObject obj, 2414 HandleId id, HandleValue value, 2415 bool strict); 2416 2417 void HandleCodeCoverageAtPC(BaselineFrame* frame, jsbytecode* pc) { 2418 AutoUnsafeCallWithABI unsafe(UnsafeABIStrictness::AllowPendingExceptions); 2419 2420 MOZ_ASSERT(frame->runningInInterpreter()); 2421 2422 JSScript* script = frame->script(); 2423 MOZ_ASSERT(pc == script->main() || BytecodeIsJumpTarget(JSOp(*pc))); 2424 2425 if (!script->hasScriptCounts()) { 2426 if (!script->realm()->collectCoverageForDebug()) { 2427 return; 2428 } 2429 JSContext* cx = script->runtimeFromMainThread()->mainContextFromOwnThread(); 2430 AutoEnterOOMUnsafeRegion oomUnsafe; 2431 if (!script->initScriptCounts(cx)) { 2432 oomUnsafe.crash("initScriptCounts"); 2433 } 2434 } 2435 2436 PCCounts* counts = script->maybeGetPCCounts(pc); 2437 MOZ_ASSERT(counts); 2438 counts->numExec()++; 2439 } 2440 2441 void HandleCodeCoverageAtPrologue(BaselineFrame* frame) { 2442 AutoUnsafeCallWithABI unsafe; 2443 2444 MOZ_ASSERT(frame->runningInInterpreter()); 2445 2446 JSScript* script = frame->script(); 2447 jsbytecode* main = script->main(); 2448 if (!BytecodeIsJumpTarget(JSOp(*main))) { 2449 HandleCodeCoverageAtPC(frame, main); 2450 } 2451 } 2452 2453 JSString* TypeOfNameObject(JSObject* obj, JSRuntime* rt) { 2454 AutoUnsafeCallWithABI unsafe; 2455 JSType type = js::TypeOfObject(obj); 2456 return TypeName(type, *rt->commonNames); 2457 } 2458 2459 bool TypeOfEqObject(JSObject* obj, TypeofEqOperand operand) { 2460 AutoUnsafeCallWithABI unsafe; 2461 bool result = js::TypeOfObject(obj) == operand.type(); 2462 if (operand.compareOp() == JSOp::Ne) { 2463 result = !result; 2464 } 2465 return result; 2466 } 2467 2468 bool GetPrototypeOf(JSContext* cx, HandleObject target, 2469 MutableHandleValue rval) { 2470 MOZ_ASSERT(target->hasDynamicPrototype()); 2471 2472 RootedObject proto(cx); 2473 if (!GetPrototype(cx, target, &proto)) { 2474 return false; 2475 } 2476 rval.setObjectOrNull(proto); 2477 return true; 2478 } 2479 2480 static JSString* ConvertObjectToStringForConcat(JSContext* cx, 2481 HandleValue obj) { 2482 MOZ_ASSERT(obj.isObject()); 2483 RootedValue rootedObj(cx, obj); 2484 if (!ToPrimitive(cx, &rootedObj)) { 2485 return nullptr; 2486 } 2487 return ToString<CanGC>(cx, rootedObj); 2488 } 2489 2490 bool DoConcatStringObject(JSContext* cx, HandleValue lhs, HandleValue rhs, 2491 MutableHandleValue res) { 2492 JSString* lstr = nullptr; 2493 JSString* rstr = nullptr; 2494 2495 if (lhs.isString()) { 2496 // Convert rhs first. 2497 MOZ_ASSERT(lhs.isString() && rhs.isObject()); 2498 rstr = ConvertObjectToStringForConcat(cx, rhs); 2499 if (!rstr) { 2500 return false; 2501 } 2502 2503 // lhs is already string. 2504 lstr = lhs.toString(); 2505 } else { 2506 MOZ_ASSERT(rhs.isString() && lhs.isObject()); 2507 // Convert lhs first. 2508 lstr = ConvertObjectToStringForConcat(cx, lhs); 2509 if (!lstr) { 2510 return false; 2511 } 2512 2513 // rhs is already string. 2514 rstr = rhs.toString(); 2515 } 2516 2517 JSString* str = ConcatStrings<NoGC>(cx, lstr, rstr); 2518 if (!str) { 2519 RootedString nlstr(cx, lstr), nrstr(cx, rstr); 2520 str = ConcatStrings<CanGC>(cx, nlstr, nrstr); 2521 if (!str) { 2522 return false; 2523 } 2524 } 2525 2526 res.setString(str); 2527 return true; 2528 } 2529 2530 bool IsPossiblyWrappedTypedArray(JSContext* cx, JSObject* obj, bool* result) { 2531 MOZ_ASSERT(obj->is<WrapperObject>(), "non-wrappers are handled in JIT code"); 2532 2533 JSObject* unwrapped = CheckedUnwrapDynamic(obj, cx); 2534 if (!unwrapped) { 2535 ReportAccessDenied(cx); 2536 return false; 2537 } 2538 2539 *result = unwrapped->is<TypedArrayObject>(); 2540 return true; 2541 } 2542 2543 // Called from CreateDependentString::generateFallback. 2544 void* AllocateDependentString(JSContext* cx) { 2545 AutoUnsafeCallWithABI unsafe; 2546 return cx->newCell<JSDependentString, NoGC>(js::gc::Heap::Default); 2547 } 2548 void* AllocateFatInlineString(JSContext* cx) { 2549 AutoUnsafeCallWithABI unsafe; 2550 return cx->newCell<JSFatInlineString, NoGC>(js::gc::Heap::Default); 2551 } 2552 2553 // Called to allocate a BigInt if inline allocation failed. 2554 void* AllocateBigIntNoGC(JSContext* cx, bool requestMinorGC) { 2555 AutoUnsafeCallWithABI unsafe; 2556 2557 if (requestMinorGC && cx->nursery().isEnabled()) { 2558 cx->nursery().requestMinorGC(JS::GCReason::OUT_OF_NURSERY); 2559 } 2560 2561 return cx->newCell<JS::BigInt, NoGC>(js::gc::Heap::Tenured); 2562 } 2563 2564 void AllocateAndInitTypedArrayBuffer(JSContext* cx, 2565 FixedLengthTypedArrayObject* obj, 2566 int32_t count, size_t inlineCapacity) { 2567 AutoUnsafeCallWithABI unsafe; 2568 2569 // Inline implementation of the last steps in 2570 // `FixedLengthTypedArrayObjectTemplate::makeTypedArrayWithTemplate`. 2571 // 2572 // 1. Perform FixedLengthTypedArrayObjectTemplate::initTypedArraySlots: 2573 // - Initialize BUFFER_SLOT, LENGTH_SLOT, and BYTEOFFSET_SLOT. 2574 // - Mark zero-length typed arrays with `ZeroLengthArrayData`. 2575 // 2. Perform FixedLengthTypedArrayObjectTemplate::initTypedArrayData: 2576 // - Initialize the DATA_SLOT. 2577 2578 // The data slot is initialized to UndefinedValue when copying slots from the 2579 // template object. If the slot isn't overwritten below, this value is used as 2580 // a signal to our JIT caller that the allocation failed. 2581 MOZ_RELEASE_ASSERT( 2582 obj->getFixedSlot(TypedArrayObject::DATA_SLOT).isUndefined(), 2583 "DATA_SLOT initialized to UndefinedValue in JIT code"); 2584 2585 // The buffer and byte-offset slots are initialized to their default values. 2586 MOZ_ASSERT(obj->getFixedSlot(TypedArrayObject::BUFFER_SLOT).isFalse(), 2587 "BUFFER_SLOT initialized to FalseValue in JIT code"); 2588 MOZ_ASSERT(obj->getFixedSlot(TypedArrayObject::BYTEOFFSET_SLOT) == 2589 PrivateValue(size_t(0)), 2590 "BUFFER_SLOT initialized to PrivateValue(0) in JIT code"); 2591 2592 // Negative numbers will bail out to the slow path, which in turn will raise 2593 // an invalid argument exception. 2594 constexpr size_t byteLengthLimit = TypedArrayObject::ByteLengthLimit; 2595 size_t bytesPerElement = obj->bytesPerElement(); 2596 if (count < 0 || size_t(count) > byteLengthLimit / bytesPerElement) { 2597 obj->setFixedSlot(TypedArrayObject::LENGTH_SLOT, PrivateValue(size_t(0))); 2598 return; 2599 } 2600 2601 size_t nbytes = size_t(count) * bytesPerElement; 2602 MOZ_ASSERT(nbytes <= byteLengthLimit); 2603 2604 // Overwrite the slot with the length of the newly allocated typed array. 2605 obj->setFixedSlot(TypedArrayObject::LENGTH_SLOT, PrivateValue(count)); 2606 2607 // If possible try to use the available inline space allocated through the 2608 // template object's alloc-kind. 2609 if (inlineCapacity > 0 && nbytes <= inlineCapacity) { 2610 uint8_t* data = 2611 obj->fixedData(FixedLengthTypedArrayObject::FIXED_DATA_START); 2612 std::memset(data, 0, nbytes); 2613 2614 #ifdef DEBUG 2615 if (count == 0) { 2616 data[0] = TypedArrayObject::ZeroLengthArrayData; 2617 } 2618 #endif 2619 2620 obj->initFixedSlot(TypedArrayObject::DATA_SLOT, PrivateValue(data)); 2621 return; 2622 } 2623 2624 // Zero-length typed arrays have to be tagged with |ZeroLengthArrayData|, but 2625 // there's not enough space when exceeding the inline buffer limit. Fall back 2626 // to the slow path. 2627 if (count == 0) { 2628 MOZ_ASSERT(inlineCapacity == 0); 2629 return; 2630 } 2631 2632 nbytes = RoundUp(nbytes, sizeof(Value)); 2633 2634 MOZ_ASSERT(!obj->isTenured()); 2635 void* buf = cx->nursery().allocateZeroedBuffer(obj, nbytes, 2636 js::ArrayBufferContentsArena); 2637 if (buf) { 2638 InitReservedSlot(obj, TypedArrayObject::DATA_SLOT, buf, nbytes, 2639 MemoryUse::TypedArrayElements); 2640 } 2641 } 2642 2643 #ifdef JS_GC_PROBES 2644 void TraceCreateObject(JSObject* obj) { 2645 AutoUnsafeCallWithABI unsafe; 2646 js::gc::gcprobes::CreateObject(obj); 2647 } 2648 #endif 2649 2650 BigInt* CreateBigIntFromInt32(JSContext* cx, int32_t i32) { 2651 return js::BigInt::createFromInt64(cx, int64_t(i32)); 2652 } 2653 2654 #if JS_BITS_PER_WORD == 32 2655 BigInt* CreateBigIntFromInt64(JSContext* cx, uint32_t low, uint32_t high) { 2656 uint64_t n = (static_cast<uint64_t>(high) << 32) + low; 2657 return js::BigInt::createFromInt64(cx, n); 2658 } 2659 2660 BigInt* CreateBigIntFromUint64(JSContext* cx, uint32_t low, uint32_t high) { 2661 uint64_t n = (static_cast<uint64_t>(high) << 32) + low; 2662 return js::BigInt::createFromUint64(cx, n); 2663 } 2664 #else 2665 BigInt* CreateBigIntFromInt64(JSContext* cx, uint64_t i64) { 2666 return js::BigInt::createFromInt64(cx, i64); 2667 } 2668 2669 BigInt* CreateBigIntFromUint64(JSContext* cx, uint64_t i64) { 2670 return js::BigInt::createFromUint64(cx, i64); 2671 } 2672 #endif 2673 2674 bool DoStringToInt64(JSContext* cx, HandleString str, uint64_t* res) { 2675 BigInt* bi; 2676 JS_TRY_VAR_OR_RETURN_FALSE(cx, bi, js::StringToBigInt(cx, str)); 2677 2678 if (!bi) { 2679 JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, 2680 JSMSG_BIGINT_INVALID_SYNTAX); 2681 return false; 2682 } 2683 2684 *res = js::BigInt::toUint64(bi); 2685 return true; 2686 } 2687 2688 bool PreserveWrapper(JSContext* cx, JSObject* obj) { 2689 AutoUnsafeCallWithABI unsafe; 2690 return cx->zone()->preserveWrapper(obj); 2691 } 2692 2693 template <EqualityKind Kind> 2694 bool BigIntEqual(BigInt* x, BigInt* y) { 2695 AutoUnsafeCallWithABI unsafe; 2696 bool res = BigInt::equal(x, y); 2697 if (Kind != EqualityKind::Equal) { 2698 res = !res; 2699 } 2700 return res; 2701 } 2702 2703 template bool BigIntEqual<EqualityKind::Equal>(BigInt* x, BigInt* y); 2704 template bool BigIntEqual<EqualityKind::NotEqual>(BigInt* x, BigInt* y); 2705 2706 template <ComparisonKind Kind> 2707 bool BigIntCompare(BigInt* x, BigInt* y) { 2708 AutoUnsafeCallWithABI unsafe; 2709 bool res = BigInt::lessThan(x, y); 2710 if (Kind != ComparisonKind::LessThan) { 2711 res = !res; 2712 } 2713 return res; 2714 } 2715 2716 template bool BigIntCompare<ComparisonKind::LessThan>(BigInt* x, BigInt* y); 2717 template bool BigIntCompare<ComparisonKind::GreaterThanOrEqual>(BigInt* x, 2718 BigInt* y); 2719 2720 template <EqualityKind Kind> 2721 bool BigIntNumberEqual(BigInt* x, double y) { 2722 AutoUnsafeCallWithABI unsafe; 2723 bool res = BigInt::equal(x, y); 2724 if (Kind != EqualityKind::Equal) { 2725 res = !res; 2726 } 2727 return res; 2728 } 2729 2730 template bool BigIntNumberEqual<EqualityKind::Equal>(BigInt* x, double y); 2731 template bool BigIntNumberEqual<EqualityKind::NotEqual>(BigInt* x, double y); 2732 2733 template <ComparisonKind Kind> 2734 bool BigIntNumberCompare(BigInt* x, double y) { 2735 AutoUnsafeCallWithABI unsafe; 2736 mozilla::Maybe<bool> res = BigInt::lessThan(x, y); 2737 if (Kind == ComparisonKind::LessThan) { 2738 return res.valueOr(false); 2739 } 2740 return !res.valueOr(true); 2741 } 2742 2743 template bool BigIntNumberCompare<ComparisonKind::LessThan>(BigInt* x, 2744 double y); 2745 template bool BigIntNumberCompare<ComparisonKind::GreaterThanOrEqual>(BigInt* x, 2746 double y); 2747 2748 template <ComparisonKind Kind> 2749 bool NumberBigIntCompare(double x, BigInt* y) { 2750 AutoUnsafeCallWithABI unsafe; 2751 mozilla::Maybe<bool> res = BigInt::lessThan(x, y); 2752 if (Kind == ComparisonKind::LessThan) { 2753 return res.valueOr(false); 2754 } 2755 return !res.valueOr(true); 2756 } 2757 2758 template bool NumberBigIntCompare<ComparisonKind::LessThan>(double x, 2759 BigInt* y); 2760 template bool NumberBigIntCompare<ComparisonKind::GreaterThanOrEqual>( 2761 double x, BigInt* y); 2762 2763 template <EqualityKind Kind> 2764 bool BigIntStringEqual(JSContext* cx, HandleBigInt x, HandleString y, 2765 bool* res) { 2766 JS_TRY_VAR_OR_RETURN_FALSE(cx, *res, BigInt::equal(cx, x, y)); 2767 if (Kind != EqualityKind::Equal) { 2768 *res = !*res; 2769 } 2770 return true; 2771 } 2772 2773 template bool BigIntStringEqual<EqualityKind::Equal>(JSContext* cx, 2774 HandleBigInt x, 2775 HandleString y, bool* res); 2776 template bool BigIntStringEqual<EqualityKind::NotEqual>(JSContext* cx, 2777 HandleBigInt x, 2778 HandleString y, 2779 bool* res); 2780 2781 template <ComparisonKind Kind> 2782 bool BigIntStringCompare(JSContext* cx, HandleBigInt x, HandleString y, 2783 bool* res) { 2784 mozilla::Maybe<bool> result; 2785 if (!BigInt::lessThan(cx, x, y, result)) { 2786 return false; 2787 } 2788 if (Kind == ComparisonKind::LessThan) { 2789 *res = result.valueOr(false); 2790 } else { 2791 *res = !result.valueOr(true); 2792 } 2793 return true; 2794 } 2795 2796 template bool BigIntStringCompare<ComparisonKind::LessThan>(JSContext* cx, 2797 HandleBigInt x, 2798 HandleString y, 2799 bool* res); 2800 template bool BigIntStringCompare<ComparisonKind::GreaterThanOrEqual>( 2801 JSContext* cx, HandleBigInt x, HandleString y, bool* res); 2802 2803 template <ComparisonKind Kind> 2804 bool StringBigIntCompare(JSContext* cx, HandleString x, HandleBigInt y, 2805 bool* res) { 2806 mozilla::Maybe<bool> result; 2807 if (!BigInt::lessThan(cx, x, y, result)) { 2808 return false; 2809 } 2810 if (Kind == ComparisonKind::LessThan) { 2811 *res = result.valueOr(false); 2812 } else { 2813 *res = !result.valueOr(true); 2814 } 2815 return true; 2816 } 2817 2818 template bool StringBigIntCompare<ComparisonKind::LessThan>(JSContext* cx, 2819 HandleString x, 2820 HandleBigInt y, 2821 bool* res); 2822 template bool StringBigIntCompare<ComparisonKind::GreaterThanOrEqual>( 2823 JSContext* cx, HandleString x, HandleBigInt y, bool* res); 2824 2825 BigInt* BigIntAsIntN(JSContext* cx, HandleBigInt x, int32_t bits) { 2826 MOZ_ASSERT(bits >= 0); 2827 return BigInt::asIntN(cx, x, uint64_t(bits)); 2828 } 2829 2830 BigInt* BigIntAsUintN(JSContext* cx, HandleBigInt x, int32_t bits) { 2831 MOZ_ASSERT(bits >= 0); 2832 return BigInt::asUintN(cx, x, uint64_t(bits)); 2833 } 2834 2835 template <typename T> 2836 static int32_t AtomicsCompareExchange(TypedArrayObject* typedArray, 2837 size_t index, int32_t expected, 2838 int32_t replacement) { 2839 AutoUnsafeCallWithABI unsafe; 2840 2841 MOZ_ASSERT(!typedArray->hasDetachedBuffer()); 2842 MOZ_ASSERT_IF(typedArray->hasResizableBuffer(), !typedArray->isOutOfBounds()); 2843 MOZ_ASSERT(index < typedArray->length().valueOr(0)); 2844 2845 SharedMem<T*> addr = typedArray->dataPointerEither().cast<T*>(); 2846 return jit::AtomicOperations::compareExchangeSeqCst(addr + index, T(expected), 2847 T(replacement)); 2848 } 2849 2850 AtomicsCompareExchangeFn AtomicsCompareExchange(Scalar::Type elementType) { 2851 switch (elementType) { 2852 case Scalar::Int8: 2853 return AtomicsCompareExchange<int8_t>; 2854 case Scalar::Uint8: 2855 return AtomicsCompareExchange<uint8_t>; 2856 case Scalar::Int16: 2857 return AtomicsCompareExchange<int16_t>; 2858 case Scalar::Uint16: 2859 return AtomicsCompareExchange<uint16_t>; 2860 case Scalar::Int32: 2861 return AtomicsCompareExchange<int32_t>; 2862 case Scalar::Uint32: 2863 return AtomicsCompareExchange<uint32_t>; 2864 default: 2865 MOZ_CRASH("Unexpected TypedArray type"); 2866 } 2867 } 2868 2869 template <typename T> 2870 static int32_t AtomicsExchange(TypedArrayObject* typedArray, size_t index, 2871 int32_t value) { 2872 AutoUnsafeCallWithABI unsafe; 2873 2874 MOZ_ASSERT(!typedArray->hasDetachedBuffer()); 2875 MOZ_ASSERT_IF(typedArray->hasResizableBuffer(), !typedArray->isOutOfBounds()); 2876 MOZ_ASSERT(index < typedArray->length().valueOr(0)); 2877 2878 SharedMem<T*> addr = typedArray->dataPointerEither().cast<T*>(); 2879 return jit::AtomicOperations::exchangeSeqCst(addr + index, T(value)); 2880 } 2881 2882 AtomicsReadWriteModifyFn AtomicsExchange(Scalar::Type elementType) { 2883 switch (elementType) { 2884 case Scalar::Int8: 2885 return AtomicsExchange<int8_t>; 2886 case Scalar::Uint8: 2887 return AtomicsExchange<uint8_t>; 2888 case Scalar::Int16: 2889 return AtomicsExchange<int16_t>; 2890 case Scalar::Uint16: 2891 return AtomicsExchange<uint16_t>; 2892 case Scalar::Int32: 2893 return AtomicsExchange<int32_t>; 2894 case Scalar::Uint32: 2895 return AtomicsExchange<uint32_t>; 2896 default: 2897 MOZ_CRASH("Unexpected TypedArray type"); 2898 } 2899 } 2900 2901 template <typename T> 2902 static int32_t AtomicsAdd(TypedArrayObject* typedArray, size_t index, 2903 int32_t value) { 2904 AutoUnsafeCallWithABI unsafe; 2905 2906 MOZ_ASSERT(!typedArray->hasDetachedBuffer()); 2907 MOZ_ASSERT_IF(typedArray->hasResizableBuffer(), !typedArray->isOutOfBounds()); 2908 MOZ_ASSERT(index < typedArray->length().valueOr(0)); 2909 2910 SharedMem<T*> addr = typedArray->dataPointerEither().cast<T*>(); 2911 return jit::AtomicOperations::fetchAddSeqCst(addr + index, T(value)); 2912 } 2913 2914 AtomicsReadWriteModifyFn AtomicsAdd(Scalar::Type elementType) { 2915 switch (elementType) { 2916 case Scalar::Int8: 2917 return AtomicsAdd<int8_t>; 2918 case Scalar::Uint8: 2919 return AtomicsAdd<uint8_t>; 2920 case Scalar::Int16: 2921 return AtomicsAdd<int16_t>; 2922 case Scalar::Uint16: 2923 return AtomicsAdd<uint16_t>; 2924 case Scalar::Int32: 2925 return AtomicsAdd<int32_t>; 2926 case Scalar::Uint32: 2927 return AtomicsAdd<uint32_t>; 2928 default: 2929 MOZ_CRASH("Unexpected TypedArray type"); 2930 } 2931 } 2932 2933 template <typename T> 2934 static int32_t AtomicsSub(TypedArrayObject* typedArray, size_t index, 2935 int32_t value) { 2936 AutoUnsafeCallWithABI unsafe; 2937 2938 MOZ_ASSERT(!typedArray->hasDetachedBuffer()); 2939 MOZ_ASSERT_IF(typedArray->hasResizableBuffer(), !typedArray->isOutOfBounds()); 2940 MOZ_ASSERT(index < typedArray->length().valueOr(0)); 2941 2942 SharedMem<T*> addr = typedArray->dataPointerEither().cast<T*>(); 2943 return jit::AtomicOperations::fetchSubSeqCst(addr + index, T(value)); 2944 } 2945 2946 AtomicsReadWriteModifyFn AtomicsSub(Scalar::Type elementType) { 2947 switch (elementType) { 2948 case Scalar::Int8: 2949 return AtomicsSub<int8_t>; 2950 case Scalar::Uint8: 2951 return AtomicsSub<uint8_t>; 2952 case Scalar::Int16: 2953 return AtomicsSub<int16_t>; 2954 case Scalar::Uint16: 2955 return AtomicsSub<uint16_t>; 2956 case Scalar::Int32: 2957 return AtomicsSub<int32_t>; 2958 case Scalar::Uint32: 2959 return AtomicsSub<uint32_t>; 2960 default: 2961 MOZ_CRASH("Unexpected TypedArray type"); 2962 } 2963 } 2964 2965 template <typename T> 2966 static int32_t AtomicsAnd(TypedArrayObject* typedArray, size_t index, 2967 int32_t value) { 2968 AutoUnsafeCallWithABI unsafe; 2969 2970 MOZ_ASSERT(!typedArray->hasDetachedBuffer()); 2971 MOZ_ASSERT_IF(typedArray->hasResizableBuffer(), !typedArray->isOutOfBounds()); 2972 MOZ_ASSERT(index < typedArray->length().valueOr(0)); 2973 2974 SharedMem<T*> addr = typedArray->dataPointerEither().cast<T*>(); 2975 return jit::AtomicOperations::fetchAndSeqCst(addr + index, T(value)); 2976 } 2977 2978 AtomicsReadWriteModifyFn AtomicsAnd(Scalar::Type elementType) { 2979 switch (elementType) { 2980 case Scalar::Int8: 2981 return AtomicsAnd<int8_t>; 2982 case Scalar::Uint8: 2983 return AtomicsAnd<uint8_t>; 2984 case Scalar::Int16: 2985 return AtomicsAnd<int16_t>; 2986 case Scalar::Uint16: 2987 return AtomicsAnd<uint16_t>; 2988 case Scalar::Int32: 2989 return AtomicsAnd<int32_t>; 2990 case Scalar::Uint32: 2991 return AtomicsAnd<uint32_t>; 2992 default: 2993 MOZ_CRASH("Unexpected TypedArray type"); 2994 } 2995 } 2996 2997 template <typename T> 2998 static int32_t AtomicsOr(TypedArrayObject* typedArray, size_t index, 2999 int32_t value) { 3000 AutoUnsafeCallWithABI unsafe; 3001 3002 MOZ_ASSERT(!typedArray->hasDetachedBuffer()); 3003 MOZ_ASSERT_IF(typedArray->hasResizableBuffer(), !typedArray->isOutOfBounds()); 3004 MOZ_ASSERT(index < typedArray->length().valueOr(0)); 3005 3006 SharedMem<T*> addr = typedArray->dataPointerEither().cast<T*>(); 3007 return jit::AtomicOperations::fetchOrSeqCst(addr + index, T(value)); 3008 } 3009 3010 AtomicsReadWriteModifyFn AtomicsOr(Scalar::Type elementType) { 3011 switch (elementType) { 3012 case Scalar::Int8: 3013 return AtomicsOr<int8_t>; 3014 case Scalar::Uint8: 3015 return AtomicsOr<uint8_t>; 3016 case Scalar::Int16: 3017 return AtomicsOr<int16_t>; 3018 case Scalar::Uint16: 3019 return AtomicsOr<uint16_t>; 3020 case Scalar::Int32: 3021 return AtomicsOr<int32_t>; 3022 case Scalar::Uint32: 3023 return AtomicsOr<uint32_t>; 3024 default: 3025 MOZ_CRASH("Unexpected TypedArray type"); 3026 } 3027 } 3028 3029 template <typename T> 3030 static int32_t AtomicsXor(TypedArrayObject* typedArray, size_t index, 3031 int32_t value) { 3032 AutoUnsafeCallWithABI unsafe; 3033 3034 MOZ_ASSERT(!typedArray->hasDetachedBuffer()); 3035 MOZ_ASSERT_IF(typedArray->hasResizableBuffer(), !typedArray->isOutOfBounds()); 3036 MOZ_ASSERT(index < typedArray->length().valueOr(0)); 3037 3038 SharedMem<T*> addr = typedArray->dataPointerEither().cast<T*>(); 3039 return jit::AtomicOperations::fetchXorSeqCst(addr + index, T(value)); 3040 } 3041 3042 AtomicsReadWriteModifyFn AtomicsXor(Scalar::Type elementType) { 3043 switch (elementType) { 3044 case Scalar::Int8: 3045 return AtomicsXor<int8_t>; 3046 case Scalar::Uint8: 3047 return AtomicsXor<uint8_t>; 3048 case Scalar::Int16: 3049 return AtomicsXor<int16_t>; 3050 case Scalar::Uint16: 3051 return AtomicsXor<uint16_t>; 3052 case Scalar::Int32: 3053 return AtomicsXor<int32_t>; 3054 case Scalar::Uint32: 3055 return AtomicsXor<uint32_t>; 3056 default: 3057 MOZ_CRASH("Unexpected TypedArray type"); 3058 } 3059 } 3060 3061 template <typename AtomicOp, typename... Args> 3062 static BigInt* AtomicAccess64(JSContext* cx, TypedArrayObject* typedArray, 3063 size_t index, AtomicOp op, Args... args) { 3064 MOZ_ASSERT(Scalar::isBigIntType(typedArray->type())); 3065 MOZ_ASSERT(!typedArray->hasDetachedBuffer()); 3066 MOZ_ASSERT_IF(typedArray->hasResizableBuffer(), !typedArray->isOutOfBounds()); 3067 MOZ_ASSERT(index < typedArray->length().valueOr(0)); 3068 3069 if (typedArray->type() == Scalar::BigInt64) { 3070 SharedMem<int64_t*> addr = typedArray->dataPointerEither().cast<int64_t*>(); 3071 int64_t v = op(addr + index, BigInt::toInt64(args)...); 3072 return BigInt::createFromInt64(cx, v); 3073 } 3074 3075 SharedMem<uint64_t*> addr = typedArray->dataPointerEither().cast<uint64_t*>(); 3076 uint64_t v = op(addr + index, BigInt::toUint64(args)...); 3077 return BigInt::createFromUint64(cx, v); 3078 } 3079 3080 template <typename AtomicOp, typename... Args> 3081 static auto AtomicAccess64(TypedArrayObject* typedArray, size_t index, 3082 AtomicOp op, Args... args) { 3083 MOZ_ASSERT(Scalar::isBigIntType(typedArray->type())); 3084 MOZ_ASSERT(!typedArray->hasDetachedBuffer()); 3085 MOZ_ASSERT_IF(typedArray->hasResizableBuffer(), !typedArray->isOutOfBounds()); 3086 MOZ_ASSERT(index < typedArray->length().valueOr(0)); 3087 3088 if (typedArray->type() == Scalar::BigInt64) { 3089 SharedMem<int64_t*> addr = typedArray->dataPointerEither().cast<int64_t*>(); 3090 return op(addr + index, BigInt::toInt64(args)...); 3091 } 3092 3093 SharedMem<uint64_t*> addr = typedArray->dataPointerEither().cast<uint64_t*>(); 3094 return op(addr + index, BigInt::toUint64(args)...); 3095 } 3096 3097 BigInt* AtomicsLoad64(JSContext* cx, TypedArrayObject* typedArray, 3098 size_t index) { 3099 return AtomicAccess64(cx, typedArray, index, [](auto addr) { 3100 return jit::AtomicOperations::loadSeqCst(addr); 3101 }); 3102 } 3103 3104 void AtomicsStore64(TypedArrayObject* typedArray, size_t index, 3105 const BigInt* value) { 3106 AutoUnsafeCallWithABI unsafe; 3107 3108 AtomicAccess64( 3109 typedArray, index, 3110 [](auto addr, auto val) { 3111 jit::AtomicOperations::storeSeqCst(addr, val); 3112 }, 3113 value); 3114 } 3115 3116 BigInt* AtomicsCompareExchange64(JSContext* cx, TypedArrayObject* typedArray, 3117 size_t index, const BigInt* expected, 3118 const BigInt* replacement) { 3119 return AtomicAccess64( 3120 cx, typedArray, index, 3121 [](auto addr, auto oldval, auto newval) { 3122 return jit::AtomicOperations::compareExchangeSeqCst(addr, oldval, 3123 newval); 3124 }, 3125 expected, replacement); 3126 } 3127 3128 BigInt* AtomicsExchange64(JSContext* cx, TypedArrayObject* typedArray, 3129 size_t index, const BigInt* value) { 3130 return AtomicAccess64( 3131 cx, typedArray, index, 3132 [](auto addr, auto val) { 3133 return jit::AtomicOperations::exchangeSeqCst(addr, val); 3134 }, 3135 value); 3136 } 3137 3138 BigInt* AtomicsAdd64(JSContext* cx, TypedArrayObject* typedArray, size_t index, 3139 const BigInt* value) { 3140 return AtomicAccess64( 3141 cx, typedArray, index, 3142 [](auto addr, auto val) { 3143 return jit::AtomicOperations::fetchAddSeqCst(addr, val); 3144 }, 3145 value); 3146 } 3147 3148 BigInt* AtomicsAnd64(JSContext* cx, TypedArrayObject* typedArray, size_t index, 3149 const BigInt* value) { 3150 return AtomicAccess64( 3151 cx, typedArray, index, 3152 [](auto addr, auto val) { 3153 return jit::AtomicOperations::fetchAndSeqCst(addr, val); 3154 }, 3155 value); 3156 } 3157 3158 BigInt* AtomicsOr64(JSContext* cx, TypedArrayObject* typedArray, size_t index, 3159 const BigInt* value) { 3160 return AtomicAccess64( 3161 cx, typedArray, index, 3162 [](auto addr, auto val) { 3163 return jit::AtomicOperations::fetchOrSeqCst(addr, val); 3164 }, 3165 value); 3166 } 3167 3168 BigInt* AtomicsSub64(JSContext* cx, TypedArrayObject* typedArray, size_t index, 3169 const BigInt* value) { 3170 return AtomicAccess64( 3171 cx, typedArray, index, 3172 [](auto addr, auto val) { 3173 return jit::AtomicOperations::fetchSubSeqCst(addr, val); 3174 }, 3175 value); 3176 } 3177 3178 BigInt* AtomicsXor64(JSContext* cx, TypedArrayObject* typedArray, size_t index, 3179 const BigInt* value) { 3180 return AtomicAccess64( 3181 cx, typedArray, index, 3182 [](auto addr, auto val) { 3183 return jit::AtomicOperations::fetchXorSeqCst(addr, val); 3184 }, 3185 value); 3186 } 3187 3188 float RoundFloat16ToFloat32(int32_t d) { 3189 AutoUnsafeCallWithABI unsafe; 3190 return static_cast<float>(js::float16{d}); 3191 } 3192 3193 float RoundFloat16ToFloat32(float d) { 3194 AutoUnsafeCallWithABI unsafe; 3195 return static_cast<float>(js::float16{d}); 3196 } 3197 3198 float RoundFloat16ToFloat32(double d) { 3199 AutoUnsafeCallWithABI unsafe; 3200 return static_cast<float>(js::float16{d}); 3201 } 3202 3203 float Float16ToFloat32(int32_t value) { 3204 AutoUnsafeCallWithABI unsafe; 3205 return static_cast<float>(js::float16::fromRawBits(value)); 3206 } 3207 3208 int32_t Float32ToFloat16(float value) { 3209 AutoUnsafeCallWithABI unsafe; 3210 return static_cast<int32_t>(js::float16{value}.toRawBits()); 3211 } 3212 3213 void DateFillLocalTimeSlots(DateObject* dateObj) { 3214 AutoUnsafeCallWithABI unsafe; 3215 dateObj->fillLocalTimeSlots(); 3216 } 3217 3218 JSAtom* AtomizeStringNoGC(JSContext* cx, JSString* str) { 3219 // IC code calls this directly so we shouldn't GC. 3220 AutoUnsafeCallWithABI unsafe; 3221 3222 JSAtom* atom = AtomizeString(cx, str); 3223 if (!atom) { 3224 cx->recoverFromOutOfMemory(); 3225 return nullptr; 3226 } 3227 3228 return atom; 3229 } 3230 3231 bool SetObjectHas(JSContext* cx, Handle<SetObject*> obj, HandleValue key, 3232 bool* rval) { 3233 return obj->has(cx, key, rval); 3234 } 3235 3236 bool SetObjectDelete(JSContext* cx, Handle<SetObject*> obj, HandleValue key, 3237 bool* rval) { 3238 return obj->delete_(cx, key, rval); 3239 } 3240 3241 bool SetObjectAdd(JSContext* cx, Handle<SetObject*> obj, HandleValue key) { 3242 return obj->add(cx, key); 3243 } 3244 3245 bool SetObjectAddFromIC(JSContext* cx, Handle<SetObject*> obj, HandleValue key, 3246 MutableHandleValue rval) { 3247 if (!SetObjectAdd(cx, obj, key)) { 3248 return false; 3249 } 3250 rval.setObject(*obj); 3251 return true; 3252 } 3253 3254 bool MapObjectHas(JSContext* cx, Handle<MapObject*> obj, HandleValue key, 3255 bool* rval) { 3256 return obj->has(cx, key, rval); 3257 } 3258 3259 bool MapObjectGet(JSContext* cx, Handle<MapObject*> obj, HandleValue key, 3260 MutableHandleValue rval) { 3261 return obj->get(cx, key, rval); 3262 } 3263 3264 bool MapObjectDelete(JSContext* cx, Handle<MapObject*> obj, HandleValue key, 3265 bool* rval) { 3266 return obj->delete_(cx, key, rval); 3267 } 3268 3269 bool MapObjectSet(JSContext* cx, Handle<MapObject*> obj, HandleValue key, 3270 HandleValue val) { 3271 return obj->set(cx, key, val); 3272 } 3273 3274 bool MapObjectSetFromIC(JSContext* cx, Handle<MapObject*> obj, HandleValue key, 3275 HandleValue val, MutableHandleValue rval) { 3276 if (!MapObjectSet(cx, obj, key, val)) { 3277 return false; 3278 } 3279 rval.setObject(*obj); 3280 return true; 3281 } 3282 3283 #ifdef DEBUG 3284 template <class T> 3285 static mozilla::HashNumber HashValue(JSContext* cx, T* obj, 3286 const Value* value) { 3287 MOZ_ASSERT(obj->size() > 0); 3288 3289 HashableValue hashable; 3290 MOZ_ALWAYS_TRUE(hashable.setValue(cx, *value)); 3291 3292 using Table = typename T::Table; 3293 return *Table(obj).hash(hashable); 3294 } 3295 #endif 3296 3297 void AssertSetObjectHash(JSContext* cx, SetObject* obj, const Value* value, 3298 mozilla::HashNumber actualHash) { 3299 AutoUnsafeCallWithABI unsafe; 3300 3301 MOZ_ASSERT(actualHash == HashValue(cx, obj, value)); 3302 } 3303 3304 void AssertMapObjectHash(JSContext* cx, MapObject* obj, const Value* value, 3305 mozilla::HashNumber actualHash) { 3306 AutoUnsafeCallWithABI unsafe; 3307 3308 MOZ_ASSERT(actualHash == HashValue(cx, obj, value)); 3309 } 3310 3311 void AssertPropertyLookup(NativeObject* obj, PropertyKey id, uint32_t slot) { 3312 AutoUnsafeCallWithABI unsafe; 3313 #ifdef DEBUG 3314 mozilla::Maybe<PropertyInfo> prop = obj->lookupPure(id); 3315 MOZ_ASSERT(prop.isSome()); 3316 MOZ_ASSERT(prop->slot() == slot); 3317 #else 3318 MOZ_CRASH("This should only be called in debug builds."); 3319 #endif 3320 } 3321 3322 // This is a specialized version of ExposeJSThingToActiveJS 3323 void ReadBarrier(gc::Cell* cell) { 3324 AutoUnsafeCallWithABI unsafe; 3325 3326 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting()); 3327 MOZ_ASSERT(!gc::IsInsideNursery(cell)); 3328 3329 gc::TenuredCell* tenured = &cell->asTenured(); 3330 MOZ_ASSERT(!gc::detail::TenuredCellIsMarkedBlack(tenured)); 3331 3332 Zone* zone = tenured->zone(); 3333 if (zone->needsIncrementalBarrier()) { 3334 gc::PerformIncrementalReadBarrier(tenured); 3335 } else if (!zone->isGCPreparing() && 3336 gc::detail::NonBlackCellIsMarkedGray(tenured)) { 3337 gc::UnmarkGrayGCThingRecursively(tenured); 3338 } 3339 MOZ_ASSERT_IF(!zone->isGCPreparing(), 3340 !gc::detail::TenuredCellIsMarkedGray(tenured)); 3341 } 3342 3343 void AssumeUnreachable(const char* output) { 3344 MOZ_ReportAssertionFailure(output, __FILE__, __LINE__); 3345 } 3346 3347 void Printf0(const char* output) { 3348 AutoUnsafeCallWithABI unsafe; 3349 3350 // Use stderr instead of stdout because this is only used for debug 3351 // output. stderr is less likely to interfere with the program's normal 3352 // output, and it's always unbuffered. 3353 fprintf(stderr, "%s", output); 3354 } 3355 3356 void Printf1(const char* output, uintptr_t value) { 3357 AutoUnsafeCallWithABI unsafe; 3358 AutoEnterOOMUnsafeRegion oomUnsafe; 3359 js::UniqueChars line = JS_sprintf_append(nullptr, output, value); 3360 if (!line) { 3361 oomUnsafe.crash("OOM at masm.printf"); 3362 } 3363 fprintf(stderr, "%s", line.get()); 3364 } 3365 3366 } // namespace jit 3367 } // namespace js