Source code

Revision control

Copy as Markdown

Other Tools

/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* vim: set ts=8 sts=2 et sw=2 tw=80:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "jit/VMFunctions.h"
#include "mozilla/FloatingPoint.h"
#include "builtin/MapObject.h"
#include "builtin/String.h"
#include "ds/OrderedHashTable.h"
#include "gc/Cell.h"
#include "gc/GC.h"
#include "jit/arm/Simulator-arm.h"
#include "jit/AtomicOperations.h"
#include "jit/BaselineIC.h"
#include "jit/CalleeToken.h"
#include "jit/JitFrames.h"
#include "jit/JitRuntime.h"
#include "jit/mips32/Simulator-mips32.h"
#include "jit/mips64/Simulator-mips64.h"
#include "jit/Simulator.h"
#include "js/experimental/JitInfo.h"
#include "js/friend/ErrorMessages.h" // js::GetErrorMessage, JSMSG_*
#include "js/friend/StackLimits.h" // js::AutoCheckRecursionLimit
#include "js/friend/WindowProxy.h" // js::IsWindow
#include "js/Printf.h"
#include "js/TraceKind.h"
#include "proxy/ScriptedProxyHandler.h"
#include "util/Unicode.h"
#include "vm/ArrayObject.h"
#include "vm/Compartment.h"
#include "vm/Float16.h"
#include "vm/Interpreter.h"
#include "vm/JSAtomUtils.h" // AtomizeString
#include "vm/PlainObject.h" // js::PlainObject
#include "vm/SelfHosting.h"
#include "vm/StaticStrings.h"
#include "vm/TypedArrayObject.h"
#include "vm/TypeofEqOperand.h" // TypeofEqOperand
#include "vm/Watchtower.h"
#include "wasm/WasmGcObject.h"
#include "debugger/DebugAPI-inl.h"
#include "jit/BaselineFrame-inl.h"
#include "jit/VMFunctionList-inl.h"
#include "vm/Interpreter-inl.h"
#include "vm/JSAtomUtils-inl.h" // TypeName
#include "vm/JSContext-inl.h"
#include "vm/JSScript-inl.h"
#include "vm/NativeObject-inl.h"
#include "vm/PlainObject-inl.h" // js::CreateThis
#include "vm/StringObject-inl.h"
using namespace js;
using namespace js::jit;
namespace js {
class ArgumentsObject;
class NamedLambdaObject;
class AsyncFunctionGeneratorObject;
class RegExpObject;
namespace jit {
struct IonOsrTempData;
struct PopValues {
uint8_t numValues;
explicit constexpr PopValues(uint8_t numValues = 0) : numValues(numValues) {}
};
template <class>
struct ReturnTypeToDataType { /* Unexpected return type for a VMFunction. */
};
template <>
struct ReturnTypeToDataType<void> {
static const DataType result = Type_Void;
};
template <>
struct ReturnTypeToDataType<bool> {
static const DataType result = Type_Bool;
};
template <class T>
struct ReturnTypeToDataType<T*> {
// Assume by default that any pointer return types are cells.
static_assert(std::is_base_of_v<gc::Cell, T>);
static const DataType result = Type_Cell;
};
// Convert argument types to properties of the argument known by the jit.
template <class T>
struct TypeToArgProperties {
static const uint32_t result =
(sizeof(T) <= sizeof(void*) ? VMFunctionData::Word
: VMFunctionData::Double);
};
template <>
struct TypeToArgProperties<const Value&> {
static const uint32_t result =
TypeToArgProperties<Value>::result | VMFunctionData::ByRef;
};
template <>
struct TypeToArgProperties<HandleValue> {
static const uint32_t result =
TypeToArgProperties<Value>::result | VMFunctionData::ByRef;
};
template <>
struct TypeToArgProperties<MutableHandleValue> {
static const uint32_t result =
TypeToArgProperties<Value>::result | VMFunctionData::ByRef;
};
template <>
struct TypeToArgProperties<HandleId> {
static const uint32_t result =
TypeToArgProperties<jsid>::result | VMFunctionData::ByRef;
};
template <class T>
struct TypeToArgProperties<Handle<T*>> {
// Assume by default that any pointer handle types are cells.
static_assert(std::is_base_of_v<gc::Cell, T>);
static const uint32_t result =
TypeToArgProperties<T*>::result | VMFunctionData::ByRef;
};
template <class T>
struct TypeToArgProperties<Handle<T>> {
// Fail for Handle types that aren't specialized above.
};
// Convert argument type to whether or not it should be passed in a float
// register on platforms that have them, like x64.
template <class T>
struct TypeToPassInFloatReg {
static const uint32_t result = 0;
};
template <>
struct TypeToPassInFloatReg<double> {
static const uint32_t result = 1;
};
// Convert argument types to root types used by the gc, see TraceJitExitFrame.
template <class T>
struct TypeToRootType {
static const uint32_t result = VMFunctionData::RootNone;
};
template <>
struct TypeToRootType<HandleValue> {
static const uint32_t result = VMFunctionData::RootValue;
};
template <>
struct TypeToRootType<MutableHandleValue> {
static const uint32_t result = VMFunctionData::RootValue;
};
template <>
struct TypeToRootType<HandleId> {
static const uint32_t result = VMFunctionData::RootId;
};
template <class T>
struct TypeToRootType<Handle<T*>> {
// Assume by default that any pointer types are cells.
static_assert(std::is_base_of_v<gc::Cell, T>);
static constexpr uint32_t rootType() {
using JS::TraceKind;
switch (JS::MapTypeToTraceKind<T>::kind) {
case TraceKind::Object:
return VMFunctionData::RootObject;
case TraceKind::BigInt:
return VMFunctionData::RootBigInt;
case TraceKind::String:
return VMFunctionData::RootString;
case TraceKind::Shape:
case TraceKind::Script:
case TraceKind::Scope:
return VMFunctionData::RootCell;
case TraceKind::Symbol:
case TraceKind::BaseShape:
case TraceKind::Null:
case TraceKind::JitCode:
case TraceKind::RegExpShared:
case TraceKind::GetterSetter:
case TraceKind::PropMap:
MOZ_CRASH("Unexpected trace kind");
}
}
static constexpr uint32_t result = rootType();
};
template <class T>
struct TypeToRootType<Handle<T>> {
// Fail for Handle types that aren't specialized above.
};
template <class>
struct OutParamToDataType {
static const DataType result = Type_Void;
};
template <class T>
struct OutParamToDataType<const T*> {
// Const pointers can't be output parameters.
static const DataType result = Type_Void;
};
template <>
struct OutParamToDataType<uint64_t*> {
// Already used as an input type, so it can't be used as an output param.
static const DataType result = Type_Void;
};
template <>
struct OutParamToDataType<JSObject*> {
// Already used as an input type, so it can't be used as an output param.
static const DataType result = Type_Void;
};
template <>
struct OutParamToDataType<JSString*> {
// Already used as an input type, so it can't be used as an output param.
static const DataType result = Type_Void;
};
template <>
struct OutParamToDataType<BaselineFrame*> {
// Already used as an input type, so it can't be used as an output param.
static const DataType result = Type_Void;
};
template <>
struct OutParamToDataType<gc::AllocSite*> {
// Already used as an input type, so it can't be used as an output param.
static const DataType result = Type_Void;
};
template <>
struct OutParamToDataType<Value*> {
static const DataType result = Type_Value;
};
template <>
struct OutParamToDataType<int*> {
static const DataType result = Type_Int32;
};
template <>
struct OutParamToDataType<uint32_t*> {
static const DataType result = Type_Int32;
};
template <>
struct OutParamToDataType<bool*> {
static const DataType result = Type_Bool;
};
template <>
struct OutParamToDataType<double*> {
static const DataType result = Type_Double;
};
template <class T>
struct OutParamToDataType<T*> {
// Fail for pointer types that aren't specialized above.
};
template <class T>
struct OutParamToDataType<T**> {
static const DataType result = Type_Pointer;
};
template <class T>
struct OutParamToDataType<MutableHandle<T>> {
static const DataType result = Type_Handle;
};
template <class>
struct OutParamToRootType {
static const VMFunctionData::RootType result = VMFunctionData::RootNone;
};
template <>
struct OutParamToRootType<MutableHandleValue> {
static const VMFunctionData::RootType result = VMFunctionData::RootValue;
};
template <>
struct OutParamToRootType<MutableHandleObject> {
static const VMFunctionData::RootType result = VMFunctionData::RootObject;
};
template <>
struct OutParamToRootType<MutableHandleString> {
static const VMFunctionData::RootType result = VMFunctionData::RootString;
};
template <>
struct OutParamToRootType<MutableHandleBigInt> {
static const VMFunctionData::RootType result = VMFunctionData::RootBigInt;
};
// Construct a bit mask from a list of types. The mask is constructed as an OR
// of the mask produced for each argument. The result of each argument is
// shifted by its index, such that the result of the first argument is on the
// low bits of the mask, and the result of the last argument in part of the
// high bits of the mask.
template <template <typename> class Each, typename ResultType, size_t Shift,
typename... Args>
struct BitMask;
template <template <typename> class Each, typename ResultType, size_t Shift>
struct BitMask<Each, ResultType, Shift> {
static constexpr ResultType result = ResultType();
};
template <template <typename> class Each, typename ResultType, size_t Shift,
typename HeadType, typename... TailTypes>
struct BitMask<Each, ResultType, Shift, HeadType, TailTypes...> {
static_assert(ResultType(Each<HeadType>::result) < (1 << Shift),
"not enough bits reserved by the shift for individual results");
static_assert(sizeof...(TailTypes) < (8 * sizeof(ResultType) / Shift),
"not enough bits in the result type to store all bit masks");
static constexpr ResultType result =
ResultType(Each<HeadType>::result) |
(BitMask<Each, ResultType, Shift, TailTypes...>::result << Shift);
};
// Helper template to build the VMFunctionData for a function.
template <typename... Args>
struct VMFunctionDataHelper;
template <class R, typename... Args>
struct VMFunctionDataHelper<R (*)(JSContext*, Args...)>
: public VMFunctionData {
using Fun = R (*)(JSContext*, Args...);
static constexpr DataType returnType() {
return ReturnTypeToDataType<R>::result;
}
static constexpr DataType outParam() {
return OutParamToDataType<typename LastArg<Args...>::Type>::result;
}
static constexpr RootType outParamRootType() {
return OutParamToRootType<typename LastArg<Args...>::Type>::result;
}
static constexpr size_t NbArgs() { return sizeof...(Args); }
static constexpr size_t explicitArgs() {
return NbArgs() - (outParam() != Type_Void ? 1 : 0);
}
static constexpr uint32_t argumentProperties() {
return BitMask<TypeToArgProperties, uint32_t, 2, Args...>::result;
}
static constexpr uint32_t argumentPassedInFloatRegs() {
return BitMask<TypeToPassInFloatReg, uint32_t, 2, Args...>::result;
}
static constexpr uint64_t argumentRootTypes() {
return BitMask<TypeToRootType, uint64_t, 3, Args...>::result;
}
constexpr explicit VMFunctionDataHelper(const char* name)
: VMFunctionData(name, explicitArgs(), argumentProperties(),
argumentPassedInFloatRegs(), argumentRootTypes(),
outParam(), outParamRootType(), returnType(),
/* extraValuesToPop = */ 0) {}
constexpr explicit VMFunctionDataHelper(const char* name,
PopValues extraValuesToPop)
: VMFunctionData(name, explicitArgs(), argumentProperties(),
argumentPassedInFloatRegs(), argumentRootTypes(),
outParam(), outParamRootType(), returnType(),
extraValuesToPop.numValues) {}
};
// GCC warns when the signature does not have matching attributes (for example
// [[nodiscard]]). Squelch this warning to avoid a GCC-only footgun.
#if MOZ_IS_GCC
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wignored-attributes"
#endif
// Generate VMFunctionData array.
static constexpr VMFunctionData vmFunctions[] = {
#define DEF_VMFUNCTION(name, fp, valuesToPop...) \
VMFunctionDataHelper<decltype(&(::fp))>(#name, PopValues(valuesToPop)),
VMFUNCTION_LIST(DEF_VMFUNCTION)
#undef DEF_VMFUNCTION
};
#if MOZ_IS_GCC
# pragma GCC diagnostic pop
#endif
// Generate arrays storing C++ function pointers. These pointers are not stored
// in VMFunctionData because there's no good way to cast them to void* in
// constexpr code. Compilers are smart enough to treat the const array below as
// constexpr.
#define DEF_VMFUNCTION(name, fp, ...) (void*)(::fp),
static void* const vmFunctionTargets[] = {VMFUNCTION_LIST(DEF_VMFUNCTION)};
#undef DEF_VMFUNCTION
const VMFunctionData& GetVMFunction(VMFunctionId id) {
return vmFunctions[size_t(id)];
}
static DynFn GetVMFunctionTarget(VMFunctionId id) {
return DynFn{vmFunctionTargets[size_t(id)]};
}
size_t NumVMFunctions() { return size_t(VMFunctionId::Count); }
size_t VMFunctionData::sizeOfOutParamStackSlot() const {
switch (outParam) {
case Type_Value:
return sizeof(Value);
case Type_Pointer:
case Type_Int32:
case Type_Bool:
return sizeof(uintptr_t);
case Type_Double:
return sizeof(double);
case Type_Handle:
switch (outParamRootType) {
case RootNone:
MOZ_CRASH("Handle must have root type");
case RootObject:
case RootString:
case RootCell:
case RootBigInt:
case RootId:
return sizeof(uintptr_t);
case RootValue:
return sizeof(Value);
}
MOZ_CRASH("Invalid type");
case Type_Void:
return 0;
case Type_Cell:
MOZ_CRASH("Unexpected outparam type");
}
MOZ_CRASH("Invalid type");
}
bool JitRuntime::generateVMWrappers(JSContext* cx, MacroAssembler& masm,
PerfSpewerRangeRecorder& rangeRecorder) {
// Generate all VM function wrappers.
static constexpr size_t NumVMFunctions = size_t(VMFunctionId::Count);
if (!functionWrapperOffsets_.reserve(NumVMFunctions)) {
return false;
}
#ifdef DEBUG
const char* lastName = nullptr;
#endif
for (size_t i = 0; i < NumVMFunctions; i++) {
VMFunctionId id = VMFunctionId(i);
const VMFunctionData& fun = GetVMFunction(id);
#ifdef DEBUG
// Assert the list is sorted by name.
if (lastName) {
MOZ_ASSERT(strcmp(lastName, fun.name()) < 0,
"VM function list must be sorted by name");
}
lastName = fun.name();
#endif
JitSpew(JitSpew_Codegen, "# VM function wrapper (%s)", fun.name());
uint32_t offset;
if (!generateVMWrapper(cx, masm, id, fun, GetVMFunctionTarget(id),
&offset)) {
return false;
}
#if defined(JS_ION_PERF)
rangeRecorder.recordVMWrapperOffset(fun.name());
#else
rangeRecorder.recordOffset("Trampoline: VMWrapper");
#endif
MOZ_ASSERT(functionWrapperOffsets_.length() == size_t(id));
functionWrapperOffsets_.infallibleAppend(offset);
}
return true;
};
bool InvokeFunction(JSContext* cx, HandleObject obj, bool constructing,
bool ignoresReturnValue, uint32_t argc, Value* argv,
MutableHandleValue rval) {
RootedExternalValueArray argvRoot(cx, argc + 1 + constructing, argv);
// Data in the argument vector is arranged for a JIT -> JIT call.
RootedValue thisv(cx, argv[0]);
Value* argvWithoutThis = argv + 1;
RootedValue fval(cx, ObjectValue(*obj));
if (constructing) {
if (!IsConstructor(fval)) {
ReportValueError(cx, JSMSG_NOT_CONSTRUCTOR, JSDVG_IGNORE_STACK, fval,
nullptr);
return false;
}
ConstructArgs cargs(cx);
if (!cargs.init(cx, argc)) {
return false;
}
for (uint32_t i = 0; i < argc; i++) {
cargs[i].set(argvWithoutThis[i]);
}
RootedValue newTarget(cx, argvWithoutThis[argc]);
// See CreateThisFromIon for why this can be NullValue.
if (thisv.isNull()) {
thisv.setMagic(JS_IS_CONSTRUCTING);
}
// If |this| hasn't been created, or is JS_UNINITIALIZED_LEXICAL,
// we can use normal construction code without creating an extraneous
// object.
if (thisv.isMagic()) {
MOZ_ASSERT(thisv.whyMagic() == JS_IS_CONSTRUCTING ||
thisv.whyMagic() == JS_UNINITIALIZED_LEXICAL);
RootedObject obj(cx);
if (!Construct(cx, fval, cargs, newTarget, &obj)) {
return false;
}
rval.setObject(*obj);
return true;
}
// Otherwise the default |this| has already been created. We could
// almost perform a *call* at this point, but we'd break |new.target|
// in the function. So in this one weird case we call a one-off
// construction path that *won't* set |this| to JS_IS_CONSTRUCTING.
return InternalConstructWithProvidedThis(cx, fval, thisv, cargs, newTarget,
rval);
}
InvokeArgsMaybeIgnoresReturnValue args(cx);
if (!args.init(cx, argc, ignoresReturnValue)) {
return false;
}
for (size_t i = 0; i < argc; i++) {
args[i].set(argvWithoutThis[i]);
}
return Call(cx, fval, thisv, args, rval);
}
void* GetContextSensitiveInterpreterStub() {
return TlsContext.get()->runtime()->jitRuntime()->interpreterStub().value;
}
bool InvokeFromInterpreterStub(JSContext* cx,
InterpreterStubExitFrameLayout* frame) {
JitFrameLayout* jsFrame = frame->jsFrame();
CalleeToken token = jsFrame->calleeToken();
Value* argv = jsFrame->thisAndActualArgs();
uint32_t numActualArgs = jsFrame->numActualArgs();
bool constructing = CalleeTokenIsConstructing(token);
RootedFunction fun(cx, CalleeTokenToFunction(token));
// Ensure new.target immediately follows the actual arguments (the arguments
// rectifier added padding).
if (constructing && numActualArgs < fun->nargs()) {
argv[1 + numActualArgs] = argv[1 + fun->nargs()];
}
RootedValue rval(cx);
if (!InvokeFunction(cx, fun, constructing,
/* ignoresReturnValue = */ false, numActualArgs, argv,
&rval)) {
return false;
}
// Overwrite |this| with the return value.
argv[0] = rval;
return true;
}
static bool CheckOverRecursedImpl(JSContext* cx, size_t extra) {
// We just failed the jitStackLimit check. There are two possible reasons:
// 1) jitStackLimit was the real stack limit and we're over-recursed
// 2) jitStackLimit was set to JS::NativeStackLimitMin by
// JSContext::requestInterrupt and we need to call
// JSContext::handleInterrupt.
// This handles 1).
#ifdef JS_SIMULATOR
if (cx->simulator()->overRecursedWithExtra(extra)) {
ReportOverRecursed(cx);
return false;
}
#else
AutoCheckRecursionLimit recursion(cx);
if (!recursion.checkWithExtra(cx, extra)) {
return false;
}
#endif
// This handles 2).
gc::MaybeVerifyBarriers(cx);
return cx->handleInterrupt();
}
bool CheckOverRecursed(JSContext* cx) { return CheckOverRecursedImpl(cx, 0); }
bool CheckOverRecursedBaseline(JSContext* cx, BaselineFrame* frame) {
// The stack check in Baseline happens before pushing locals so we have to
// account for that by including script->nslots() in the C++ recursion check.
size_t extra = frame->script()->nslots() * sizeof(Value);
return CheckOverRecursedImpl(cx, extra);
}
bool MutatePrototype(JSContext* cx, Handle<PlainObject*> obj,
HandleValue value) {
if (!value.isObjectOrNull()) {
return true;
}
RootedObject newProto(cx, value.toObjectOrNull());
return SetPrototype(cx, obj, newProto);
}
template <EqualityKind Kind>
bool StringsEqual(JSContext* cx, HandleString lhs, HandleString rhs,
bool* res) {
JSLinearString* linearLhs = lhs->ensureLinear(cx);
if (!linearLhs) {
return false;
}
JSLinearString* linearRhs = rhs->ensureLinear(cx);
if (!linearRhs) {
return false;
}
*res = EqualChars(linearLhs, linearRhs);
if constexpr (Kind == EqualityKind::NotEqual) {
*res = !*res;
}
return true;
}
template bool StringsEqual<EqualityKind::Equal>(JSContext* cx, HandleString lhs,
HandleString rhs, bool* res);
template bool StringsEqual<EqualityKind::NotEqual>(JSContext* cx,
HandleString lhs,
HandleString rhs, bool* res);
template <ComparisonKind Kind>
bool StringsCompare(JSContext* cx, HandleString lhs, HandleString rhs,
bool* res) {
int32_t result;
if (!js::CompareStrings(cx, lhs, rhs, &result)) {
return false;
}
if (Kind == ComparisonKind::LessThan) {
*res = result < 0;
} else {
*res = result >= 0;
}
return true;
}
template bool StringsCompare<ComparisonKind::LessThan>(JSContext* cx,
HandleString lhs,
HandleString rhs,
bool* res);
template bool StringsCompare<ComparisonKind::GreaterThanOrEqual>(
JSContext* cx, HandleString lhs, HandleString rhs, bool* res);
JSString* ArrayJoin(JSContext* cx, HandleObject array, HandleString sep) {
JS::RootedValueArray<3> argv(cx);
argv[0].setUndefined();
argv[1].setObject(*array);
argv[2].setString(sep);
if (!js::array_join(cx, 1, argv.begin())) {
return nullptr;
}
return argv[0].toString();
}
bool SetArrayLength(JSContext* cx, HandleObject obj, HandleValue value,
bool strict) {
Handle<ArrayObject*> array = obj.as<ArrayObject>();
RootedId id(cx, NameToId(cx->names().length));
ObjectOpResult result;
// SetArrayLength is called by IC stubs for SetProp and SetElem on arrays'
// "length" property.
//
// ArraySetLength below coerces |value| before checking for length being
// writable, and in the case of illegal values, will throw RangeError even
// when "length" is not writable. This is incorrect observable behavior,
// as a regular [[Set]] operation will check for "length" being
// writable before attempting any assignment.
//
// So, perform ArraySetLength if and only if "length" is writable.
if (array->lengthIsWritable()) {
Rooted<PropertyDescriptor> desc(
cx, PropertyDescriptor::Data(value, JS::PropertyAttribute::Writable));
if (!ArraySetLength(cx, array, id, desc, result)) {
return false;
}
} else {
MOZ_ALWAYS_TRUE(result.fail(JSMSG_READ_ONLY));
}
return result.checkStrictModeError(cx, obj, id, strict);
}
bool CharCodeAt(JSContext* cx, HandleString str, int32_t index,
uint32_t* code) {
char16_t c;
if (!str->getChar(cx, index, &c)) {
return false;
}
*code = c;
return true;
}
bool CodePointAt(JSContext* cx, HandleString str, int32_t index,
uint32_t* code) {
char32_t codePoint;
if (!str->getCodePoint(cx, size_t(index), &codePoint)) {
return false;
}
*code = codePoint;
return true;
}
JSLinearString* StringFromCharCodeNoGC(JSContext* cx, int32_t code) {
AutoUnsafeCallWithABI unsafe;
char16_t c = char16_t(code);
if (StaticStrings::hasUnit(c)) {
return cx->staticStrings().getUnit(c);
}
return NewInlineString<NoGC>(cx, {c}, 1);
}
JSLinearString* LinearizeForCharAccessPure(JSString* str) {
AutoUnsafeCallWithABI unsafe;
// Should only be called on ropes.
MOZ_ASSERT(str->isRope());
// ensureLinear is intentionally called with a nullptr to avoid OOM reporting.
return str->ensureLinear(nullptr);
}
JSLinearString* LinearizeForCharAccess(JSContext* cx, JSString* str) {
// Should only be called on ropes.
MOZ_ASSERT(str->isRope());
return str->ensureLinear(cx);
}
template <typename CharT>
static size_t StringTrimStartIndex(mozilla::Range<CharT> chars) {
size_t begin = 0;
while (begin < chars.length() && unicode::IsSpace(chars[begin])) {
++begin;
}
return begin;
}
template <typename CharT>
static size_t StringTrimEndIndex(mozilla::Range<CharT> chars, size_t begin) {
size_t end = chars.length();
while (end > begin && unicode::IsSpace(chars[end - 1])) {
--end;
}
return end;
}
int32_t StringTrimStartIndex(const JSString* str) {
AutoUnsafeCallWithABI unsafe;
MOZ_ASSERT(str->isLinear());
const auto* linear = &str->asLinear();
size_t begin;
if (linear->hasLatin1Chars()) {
JS::AutoCheckCannotGC nogc;
begin = StringTrimStartIndex(linear->latin1Range(nogc));
} else {
JS::AutoCheckCannotGC nogc;
begin = StringTrimStartIndex(linear->twoByteRange(nogc));
}
return int32_t(begin);
}
int32_t StringTrimEndIndex(const JSString* str, int32_t start) {
AutoUnsafeCallWithABI unsafe;
MOZ_ASSERT(str->isLinear());
MOZ_ASSERT(start >= 0 && size_t(start) <= str->length());
const auto* linear = &str->asLinear();
size_t end;
if (linear->hasLatin1Chars()) {
JS::AutoCheckCannotGC nogc;
end = StringTrimEndIndex(linear->latin1Range(nogc), size_t(start));
} else {
JS::AutoCheckCannotGC nogc;
end = StringTrimEndIndex(linear->twoByteRange(nogc), size_t(start));
}
return int32_t(end);
}
JSString* CharCodeToLowerCase(JSContext* cx, int32_t code) {
RootedString str(cx, StringFromCharCode(cx, code));
if (!str) {
return nullptr;
}
return js::StringToLowerCase(cx, str);
}
JSString* CharCodeToUpperCase(JSContext* cx, int32_t code) {
RootedString str(cx, StringFromCharCode(cx, code));
if (!str) {
return nullptr;
}
return js::StringToUpperCase(cx, str);
}
bool SetProperty(JSContext* cx, HandleObject obj, Handle<PropertyName*> name,
HandleValue value, bool strict, jsbytecode* pc) {
RootedId id(cx, NameToId(name));
RootedValue receiver(cx, ObjectValue(*obj));
ObjectOpResult result;
if (MOZ_LIKELY(!obj->getOpsSetProperty())) {
JSOp op = JSOp(*pc);
if (op == JSOp::SetName || op == JSOp::StrictSetName ||
op == JSOp::SetGName || op == JSOp::StrictSetGName) {
if (!NativeSetProperty<Unqualified>(cx, obj.as<NativeObject>(), id, value,
receiver, result)) {
return false;
}
} else {
if (!NativeSetProperty<Qualified>(cx, obj.as<NativeObject>(), id, value,
receiver, result)) {
return false;
}
}
} else {
if (!SetProperty(cx, obj, id, value, receiver, result)) {
return false;
}
}
return result.checkStrictModeError(cx, obj, id, strict);
}
bool InterruptCheck(JSContext* cx) {
gc::MaybeVerifyBarriers(cx);
return CheckForInterrupt(cx);
}
JSObject* NewStringObject(JSContext* cx, HandleString str) {
return StringObject::create(cx, str);
}
bool OperatorIn(JSContext* cx, HandleValue key, HandleObject obj, bool* out) {
RootedId id(cx);
return ToPropertyKey(cx, key, &id) && HasProperty(cx, obj, id, out);
}
bool GetIntrinsicValue(JSContext* cx, Handle<PropertyName*> name,
MutableHandleValue rval) {
return GlobalObject::getIntrinsicValue(cx, cx->global(), name, rval);
}
bool CreateThisFromIC(JSContext* cx, HandleObject callee,
HandleObject newTarget, MutableHandleValue rval) {
HandleFunction fun = callee.as<JSFunction>();
MOZ_ASSERT(fun->isInterpreted());
MOZ_ASSERT(fun->isConstructor());
MOZ_ASSERT(cx->realm() == fun->realm(),
"Realm switching happens before creating this");
// CreateThis expects rval to be this magic value.
rval.set(MagicValue(JS_IS_CONSTRUCTING));
if (!js::CreateThis(cx, fun, newTarget, GenericObject, rval)) {
return false;
}
MOZ_ASSERT_IF(rval.isObject(), fun->realm() == rval.toObject().nonCCWRealm());
return true;
}
bool CreateThisFromIon(JSContext* cx, HandleObject callee,
HandleObject newTarget, MutableHandleValue rval) {
// Return JS_IS_CONSTRUCTING for cases not supported by the inline call path.
rval.set(MagicValue(JS_IS_CONSTRUCTING));
if (!callee->is<JSFunction>()) {
return true;
}
HandleFunction fun = callee.as<JSFunction>();
if (!fun->isInterpreted() || !fun->isConstructor()) {
return true;
}
// If newTarget is not a function or is a function with a possibly-getter
// .prototype property, return NullValue to signal to LCallGeneric that it has
// to take the slow path. Note that we return NullValue instead of a
// MagicValue only because it's easier and faster to check for in JIT code
// (if we returned a MagicValue, JIT code would have to check both the type
// tag and the JSWhyMagic payload).
if (!fun->constructorNeedsUninitializedThis()) {
if (!newTarget->is<JSFunction>()) {
rval.setNull();
return true;
}
JSFunction* newTargetFun = &newTarget->as<JSFunction>();
if (!newTargetFun->hasNonConfigurablePrototypeDataProperty()) {
rval.setNull();
return true;
}
}
AutoRealm ar(cx, fun);
if (!js::CreateThis(cx, fun, newTarget, GenericObject, rval)) {
return false;
}
MOZ_ASSERT_IF(rval.isObject(), fun->realm() == rval.toObject().nonCCWRealm());
return true;
}
void PostWriteBarrier(JSRuntime* rt, js::gc::Cell* cell) {
AutoUnsafeCallWithABI unsafe;
rt->gc.storeBuffer().putWholeCellDontCheckLast(cell);
}
static const size_t MAX_WHOLE_CELL_BUFFER_SIZE = 4096;
void PostWriteElementBarrier(JSRuntime* rt, JSObject* obj, int32_t index) {
AutoUnsafeCallWithABI unsafe;
MOZ_ASSERT(!IsInsideNursery(obj));
NativeObject* nobj = &obj->as<NativeObject>();
MOZ_ASSERT(index >= 0);
MOZ_ASSERT(uint32_t(index) < nobj->getDenseInitializedLength());
if (nobj->isInWholeCellBuffer()) {
return;
}
if (nobj->getDenseInitializedLength() > MAX_WHOLE_CELL_BUFFER_SIZE
#ifdef JS_GC_ZEAL
|| rt->hasZealMode(gc::ZealMode::ElementsBarrier)
#endif
) {
rt->gc.storeBuffer().putSlot(nobj, HeapSlot::Element,
nobj->unshiftedIndex(index), 1);
return;
}
rt->gc.storeBuffer().putWholeCell(obj);
}
void PostGlobalWriteBarrier(JSRuntime* rt, GlobalObject* obj) {
MOZ_ASSERT(obj->JSObject::is<GlobalObject>());
if (!obj->realm()->globalWriteBarriered) {
AutoUnsafeCallWithABI unsafe;
rt->gc.storeBuffer().putWholeCell(obj);
obj->realm()->globalWriteBarriered = 1;
}
}
bool GetInt32FromStringPure(JSContext* cx, JSString* str, int32_t* result) {
// We shouldn't GC here as this is called directly from IC code.
AutoUnsafeCallWithABI unsafe;
double d;
if (!StringToNumberPure(cx, str, &d)) {
return false;
}
return mozilla::NumberIsInt32(d, result);
}
int32_t GetIndexFromString(JSString* str) {
// We shouldn't GC here as this is called directly from IC code.
AutoUnsafeCallWithABI unsafe;
if (!str->isLinear()) {
return -1;
}
uint32_t index = UINT32_MAX; // Initialize this to appease Valgrind.
if (!str->asLinear().isIndex(&index) || index > INT32_MAX) {
return -1;
}
return int32_t(index);
}
JSObject* WrapObjectPure(JSContext* cx, JSObject* obj) {
// IC code calls this directly so we shouldn't GC.
AutoUnsafeCallWithABI unsafe;
MOZ_ASSERT(obj);
MOZ_ASSERT(cx->compartment() != obj->compartment());
// From: Compartment::getNonWrapperObjectForCurrentCompartment
// Note that if the object is same-compartment, but has been wrapped into a
// different compartment, we need to unwrap it and return the bare same-
// compartment object. Note again that windows are always wrapped by a
// WindowProxy even when same-compartment so take care not to strip this
// particular wrapper.
obj = UncheckedUnwrap(obj, /* stopAtWindowProxy = */ true);
if (cx->compartment() == obj->compartment()) {
MOZ_ASSERT(!IsWindow(obj));
JS::ExposeObjectToActiveJS(obj);
return obj;
}
// Try to Lookup an existing wrapper for this object. We assume that
// if we can find such a wrapper, not calling preWrap is correct.
if (ObjectWrapperMap::Ptr p = cx->compartment()->lookupWrapper(obj)) {
JSObject* wrapped = p->value().get();
// Ensure the wrapper is still exposed.
JS::ExposeObjectToActiveJS(wrapped);
return wrapped;
}
return nullptr;
}
bool DebugPrologue(JSContext* cx, BaselineFrame* frame) {
return DebugAPI::onEnterFrame(cx, frame);
}
bool DebugEpilogueOnBaselineReturn(JSContext* cx, BaselineFrame* frame,
const jsbytecode* pc) {
if (!DebugEpilogue(cx, frame, pc, true)) {
return false;
}
return true;
}
bool DebugEpilogue(JSContext* cx, BaselineFrame* frame, const jsbytecode* pc,
bool ok) {
// If DebugAPI::onLeaveFrame returns |true| we have to return the frame's
// return value. If it returns |false|, the debugger threw an exception.
// In both cases we have to pop debug scopes.
ok = DebugAPI::onLeaveFrame(cx, frame, pc, ok);
// Unwind to the outermost environment.
EnvironmentIter ei(cx, frame, pc);
UnwindAllEnvironmentsInFrame(cx, ei);
if (!ok) {
// Pop this frame by updating packedExitFP, so that the exception
// handling code will start at the previous frame.
JitFrameLayout* prefix = frame->framePrefix();
EnsureUnwoundJitExitFrame(cx->activation()->asJit(), prefix);
return false;
}
return true;
}
void FrameIsDebuggeeCheck(BaselineFrame* frame) {
AutoUnsafeCallWithABI unsafe;
if (frame->script()->isDebuggee()) {
frame->setIsDebuggee();
}
}
JSObject* CreateGeneratorFromFrame(JSContext* cx, BaselineFrame* frame) {
return AbstractGeneratorObject::createFromFrame(cx, frame);
}
JSObject* CreateGenerator(JSContext* cx, HandleFunction callee,
HandleScript script, HandleObject environmentChain,
HandleObject args) {
Rooted<ArgumentsObject*> argsObj(
cx, args ? &args->as<ArgumentsObject>() : nullptr);
return AbstractGeneratorObject::create(cx, callee, script, environmentChain,
argsObj);
}
bool NormalSuspend(JSContext* cx, HandleObject obj, BaselineFrame* frame,
uint32_t frameSize, const jsbytecode* pc) {
MOZ_ASSERT(JSOp(*pc) == JSOp::InitialYield || JSOp(*pc) == JSOp::Yield ||
JSOp(*pc) == JSOp::Await);
// Minus one because we don't want to include the return value.
uint32_t numSlots = frame->numValueSlots(frameSize) - 1;
MOZ_ASSERT(numSlots >= frame->script()->nfixed());
return AbstractGeneratorObject::suspend(cx, obj, frame, pc, numSlots);
}
bool FinalSuspend(JSContext* cx, HandleObject obj, const jsbytecode* pc) {
MOZ_ASSERT(JSOp(*pc) == JSOp::FinalYieldRval);
AbstractGeneratorObject::finalSuspend(cx, obj);
return true;
}
bool InterpretResume(JSContext* cx, HandleObject obj, Value* stackValues,
MutableHandleValue rval) {
MOZ_ASSERT(obj->is<AbstractGeneratorObject>());
// The |stackValues| argument points to the JSOp::Resume operands on the
// native stack. Because the stack grows down, these values are:
//
// [resumeKind, argument, generator, ..]
MOZ_ASSERT(stackValues[2].toObject() == *obj);
GeneratorResumeKind resumeKind = IntToResumeKind(stackValues[0].toInt32());
JSAtom* kind = ResumeKindToAtom(cx, resumeKind);
FixedInvokeArgs<3> args(cx);
args[0].setObject(*obj);
args[1].set(stackValues[1]);
args[2].setString(kind);
return CallSelfHostedFunction(cx, cx->names().InterpretGeneratorResume,
UndefinedHandleValue, args, rval);
}
bool DebugAfterYield(JSContext* cx, BaselineFrame* frame) {
// The BaselineFrame has just been constructed by JSOp::Resume in the
// caller. We need to set its debuggee flag as necessary.
//
// If a breakpoint is set on JSOp::AfterYield, or stepping is enabled,
// we may already have done this work. Don't fire onEnterFrame again.
if (frame->script()->isDebuggee() && !frame->isDebuggee()) {
frame->setIsDebuggee();
return DebugAPI::onResumeFrame(cx, frame);
}
return true;
}
bool GeneratorThrowOrReturn(JSContext* cx, BaselineFrame* frame,
Handle<AbstractGeneratorObject*> genObj,
HandleValue arg, int32_t resumeKindArg) {
GeneratorResumeKind resumeKind = IntToResumeKind(resumeKindArg);
MOZ_ALWAYS_FALSE(
js::GeneratorThrowOrReturn(cx, frame, genObj, arg, resumeKind));
return false;
}
bool GlobalDeclInstantiationFromIon(JSContext* cx, HandleScript script,
const jsbytecode* pc) {
MOZ_ASSERT(!script->hasNonSyntacticScope());
RootedObject envChain(cx, &cx->global()->lexicalEnvironment());
GCThingIndex lastFun = GET_GCTHING_INDEX(pc);
return GlobalOrEvalDeclInstantiation(cx, envChain, script, lastFun);
}
bool InitFunctionEnvironmentObjects(JSContext* cx, BaselineFrame* frame) {
return frame->initFunctionEnvironmentObjects(cx);
}
bool NewArgumentsObject(JSContext* cx, BaselineFrame* frame,
MutableHandleValue res) {
ArgumentsObject* obj = ArgumentsObject::createExpected(cx, frame);
if (!obj) {
return false;
}
res.setObject(*obj);
return true;
}
ArrayObject* NewArrayObjectEnsureDenseInitLength(JSContext* cx, int32_t count) {
MOZ_ASSERT(count >= 0);
auto* array = NewDenseFullyAllocatedArray(cx, count);
if (!array) {
return nullptr;
}
array->ensureDenseInitializedLength(0, count);
return array;
}
ArrayObject* InitRestParameter(JSContext* cx, uint32_t length, Value* rest,
Handle<ArrayObject*> arrRes) {
if (arrRes) {
// Fast path: we managed to allocate the array inline; initialize the
// elements.
MOZ_ASSERT(arrRes->getDenseInitializedLength() == 0);
// We don't call this function if we can initialize the elements in JIT
// code.
MOZ_ASSERT(length > arrRes->getDenseCapacity());
if (!arrRes->growElements(cx, length)) {
return nullptr;
}
arrRes->initDenseElements(rest, length);
arrRes->setLength(length);
return arrRes;
}
return NewDenseCopiedArray(cx, length, rest);
}
bool HandleDebugTrap(JSContext* cx, BaselineFrame* frame,
const uint8_t* retAddr) {
RootedScript script(cx, frame->script());
jsbytecode* pc;
if (frame->runningInInterpreter()) {
pc = frame->interpreterPC();
} else {
BaselineScript* blScript = script->baselineScript();
pc = blScript->retAddrEntryFromReturnAddress(retAddr).pc(script);
}
// The Baseline Interpreter calls HandleDebugTrap for every op when the script
// is in step mode or has breakpoints. The Baseline Compiler can toggle
// breakpoints more granularly for specific bytecode PCs.
if (frame->runningInInterpreter()) {
MOZ_ASSERT(DebugAPI::hasAnyBreakpointsOrStepMode(script));
} else {
MOZ_ASSERT(DebugAPI::stepModeEnabled(script) ||
DebugAPI::hasBreakpointsAt(script, pc));
}
if (JSOp(*pc) == JSOp::AfterYield) {
// JSOp::AfterYield will set the frame's debuggee flag and call the
// onEnterFrame handler, but if we set a breakpoint there we have to do
// it now.
MOZ_ASSERT(!frame->isDebuggee());
if (!DebugAfterYield(cx, frame)) {
return false;
}
// If the frame is not a debuggee we're done. This can happen, for instance,
// if the onEnterFrame hook called removeDebuggee.
if (!frame->isDebuggee()) {
return true;
}
}
MOZ_ASSERT(frame->isDebuggee());
if (DebugAPI::stepModeEnabled(script) && !DebugAPI::onSingleStep(cx)) {
return false;
}
if (DebugAPI::hasBreakpointsAt(script, pc) && !DebugAPI::onTrap(cx)) {
return false;
}
return true;
}
bool OnDebuggerStatement(JSContext* cx, BaselineFrame* frame) {
return DebugAPI::onDebuggerStatement(cx, frame);
}
bool GlobalHasLiveOnDebuggerStatement(JSContext* cx) {
AutoUnsafeCallWithABI unsafe;
return cx->realm()->isDebuggee() &&
DebugAPI::hasDebuggerStatementHook(cx->global());
}
bool PushLexicalEnv(JSContext* cx, BaselineFrame* frame,
Handle<LexicalScope*> scope) {
return frame->pushLexicalEnvironment(cx, scope);
}
bool DebugLeaveThenPopLexicalEnv(JSContext* cx, BaselineFrame* frame,
const jsbytecode* pc) {
MOZ_ALWAYS_TRUE(DebugLeaveLexicalEnv(cx, frame, pc));
frame->popOffEnvironmentChain<ScopedLexicalEnvironmentObject>();
return true;
}
bool FreshenLexicalEnv(JSContext* cx, BaselineFrame* frame) {
return frame->freshenLexicalEnvironment<false>(cx);
}
bool DebuggeeFreshenLexicalEnv(JSContext* cx, BaselineFrame* frame,
const jsbytecode* pc) {
return frame->freshenLexicalEnvironment<true>(cx, pc);
}
bool RecreateLexicalEnv(JSContext* cx, BaselineFrame* frame) {
return frame->recreateLexicalEnvironment<false>(cx);
}
bool DebuggeeRecreateLexicalEnv(JSContext* cx, BaselineFrame* frame,
const jsbytecode* pc) {
return frame->recreateLexicalEnvironment<true>(cx, pc);
}
bool DebugLeaveLexicalEnv(JSContext* cx, BaselineFrame* frame,
const jsbytecode* pc) {
MOZ_ASSERT_IF(!frame->runningInInterpreter(),
frame->script()->baselineScript()->hasDebugInstrumentation());
if (cx->realm()->isDebuggee()) {
DebugEnvironments::onPopLexical(cx, frame, pc);
}
return true;
}
bool PushClassBodyEnv(JSContext* cx, BaselineFrame* frame,
Handle<ClassBodyScope*> scope) {
return frame->pushClassBodyEnvironment(cx, scope);
}
bool PushVarEnv(JSContext* cx, BaselineFrame* frame, Handle<Scope*> scope) {
return frame->pushVarEnvironment(cx, scope);
}
bool EnterWith(JSContext* cx, BaselineFrame* frame, HandleValue val,
Handle<WithScope*> templ) {
return EnterWithOperation(cx, frame, val, templ);
}
bool LeaveWith(JSContext* cx, BaselineFrame* frame) {
if (MOZ_UNLIKELY(frame->isDebuggee())) {
DebugEnvironments::onPopWith(frame);
}
frame->popOffEnvironmentChain<WithEnvironmentObject>();
return true;
}
bool InitBaselineFrameForOsr(BaselineFrame* frame,
InterpreterFrame* interpFrame,
uint32_t numStackValues) {
return frame->initForOsr(interpFrame, numStackValues);
}
JSString* StringReplace(JSContext* cx, HandleString string,
HandleString pattern, HandleString repl) {
MOZ_ASSERT(string);
MOZ_ASSERT(pattern);
MOZ_ASSERT(repl);
return str_replace_string_raw(cx, string, pattern, repl);
}
void AssertValidBigIntPtr(JSContext* cx, JS::BigInt* bi) {
AutoUnsafeCallWithABI unsafe;
// FIXME: check runtime?
MOZ_ASSERT(cx->zone() == bi->zone());
MOZ_ASSERT(bi->isAligned());
MOZ_ASSERT(bi->getAllocKind() == gc::AllocKind::BIGINT);
}
void AssertValidObjectPtr(JSContext* cx, JSObject* obj) {
AutoUnsafeCallWithABI unsafe;
#ifdef DEBUG
// Check what we can, so that we'll hopefully assert/crash if we get a
// bogus object (pointer).
MOZ_ASSERT(obj->compartment() == cx->compartment());
MOZ_ASSERT(obj->zoneFromAnyThread() == cx->zone());
MOZ_ASSERT(obj->runtimeFromMainThread() == cx->runtime());
if (obj->isTenured()) {
MOZ_ASSERT(obj->isAligned());
gc::AllocKind kind = obj->asTenured().getAllocKind();
MOZ_ASSERT(gc::IsObjectAllocKind(kind));
}
#endif
}
void AssertValidStringPtr(JSContext* cx, JSString* str) {
AutoUnsafeCallWithABI unsafe;
#ifdef DEBUG
// We can't closely inspect strings from another runtime.
if (str->runtimeFromAnyThread() != cx->runtime()) {
MOZ_ASSERT(str->isPermanentAtom());
return;
}
if (str->isAtom()) {
MOZ_ASSERT(str->zone()->isAtomsZone());
} else {
MOZ_ASSERT(str->zone() == cx->zone());
}
MOZ_ASSERT(str->isAligned());
MOZ_ASSERT(str->length() <= JSString::MAX_LENGTH);
gc::AllocKind kind = str->getAllocKind();
if (str->isFatInline()) {
if (str->isAtom()) {
MOZ_ASSERT(kind == gc::AllocKind::FAT_INLINE_ATOM);
} else {
MOZ_ASSERT(kind == gc::AllocKind::FAT_INLINE_STRING);
}
} else if (str->isExternal()) {
MOZ_ASSERT(kind == gc::AllocKind::EXTERNAL_STRING);
} else if (str->isAtom()) {
MOZ_ASSERT(kind == gc::AllocKind::ATOM);
} else if (str->isLinear()) {
MOZ_ASSERT(kind == gc::AllocKind::STRING ||
kind == gc::AllocKind::FAT_INLINE_STRING);
} else {
MOZ_ASSERT(kind == gc::AllocKind::STRING);
}
#endif
}
void AssertValidSymbolPtr(JSContext* cx, JS::Symbol* sym) {
AutoUnsafeCallWithABI unsafe;
// We can't closely inspect symbols from another runtime.
if (sym->runtimeFromAnyThread() != cx->runtime()) {
MOZ_ASSERT(sym->isWellKnownSymbol());
return;
}
MOZ_ASSERT(sym->zone()->isAtomsZone());
MOZ_ASSERT(sym->isAligned());
if (JSAtom* desc = sym->description()) {
AssertValidStringPtr(cx, desc);
}
MOZ_ASSERT(sym->getAllocKind() == gc::AllocKind::SYMBOL);
}
void AssertValidValue(JSContext* cx, Value* v) {
AutoUnsafeCallWithABI unsafe;
if (v->isObject()) {
AssertValidObjectPtr(cx, &v->toObject());
} else if (v->isString()) {
AssertValidStringPtr(cx, v->toString());
} else if (v->isSymbol()) {
AssertValidSymbolPtr(cx, v->toSymbol());
} else if (v->isBigInt()) {
AssertValidBigIntPtr(cx, v->toBigInt());
}
}
bool ObjectIsCallable(JSObject* obj) {
AutoUnsafeCallWithABI unsafe;
return obj->isCallable();
}
bool ObjectIsConstructor(JSObject* obj) {
AutoUnsafeCallWithABI unsafe;
return obj->isConstructor();
}
JSObject* ObjectKeys(JSContext* cx, HandleObject obj) {
JS::RootedValueArray<3> argv(cx);
argv[0].setUndefined(); // rval
argv[1].setUndefined(); // this
argv[2].setObject(*obj); // arg0
if (!js::obj_keys(cx, 1, argv.begin())) {
return nullptr;
}
return argv[0].toObjectOrNull();
}
bool ObjectKeysLength(JSContext* cx, HandleObject obj, int32_t* length) {
MOZ_ASSERT(!obj->is<ProxyObject>());
return js::obj_keys_length(cx, obj, *length);
}
void JitValuePreWriteBarrier(JSRuntime* rt, Value* vp) {
AutoUnsafeCallWithABI unsafe;
MOZ_ASSERT(vp->isGCThing());
MOZ_ASSERT(!vp->toGCThing()->isMarkedBlack());
gc::ValuePreWriteBarrier(*vp);
}
void JitStringPreWriteBarrier(JSRuntime* rt, JSString** stringp) {
AutoUnsafeCallWithABI unsafe;
MOZ_ASSERT(*stringp);
MOZ_ASSERT(!(*stringp)->isMarkedBlack());
gc::PreWriteBarrier(*stringp);
}
void JitObjectPreWriteBarrier(JSRuntime* rt, JSObject** objp) {
AutoUnsafeCallWithABI unsafe;
MOZ_ASSERT(*objp);
MOZ_ASSERT(!(*objp)->isMarkedBlack());
gc::PreWriteBarrier(*objp);
}
void JitShapePreWriteBarrier(JSRuntime* rt, Shape** shapep) {
AutoUnsafeCallWithABI unsafe;
MOZ_ASSERT(!(*shapep)->isMarkedBlack());
gc::PreWriteBarrier(*shapep);
}
void JitWasmAnyRefPreWriteBarrier(JSRuntime* rt, wasm::AnyRef* refp) {
AutoUnsafeCallWithABI unsafe;
MOZ_ASSERT(refp->isGCThing());
MOZ_ASSERT(!(*refp).toGCThing()->isMarkedBlack());
gc::WasmAnyRefPreWriteBarrier(*refp);
}
bool ThrowRuntimeLexicalError(JSContext* cx, unsigned errorNumber) {
ScriptFrameIter iter(cx);
RootedScript script(cx, iter.script());
ReportRuntimeLexicalError(cx, errorNumber, script, iter.pc());
return false;
}
bool ThrowBadDerivedReturnOrUninitializedThis(JSContext* cx, HandleValue v) {
MOZ_ASSERT(!v.isObject());
if (v.isUndefined()) {
return js::ThrowUninitializedThis(cx);
}
ReportValueError(cx, JSMSG_BAD_DERIVED_RETURN, JSDVG_IGNORE_STACK, v,
nullptr);
return false;
}
bool BaselineGetFunctionThis(JSContext* cx, BaselineFrame* frame,
MutableHandleValue res) {
return GetFunctionThis(cx, frame, res);
}
bool CallNativeGetter(JSContext* cx, HandleFunction callee,
HandleValue receiver, MutableHandleValue result) {
AutoRealm ar(cx, callee);
MOZ_ASSERT(callee->isNativeFun());
JSNative natfun = callee->native();
JS::RootedValueArray<2> vp(cx);
vp[0].setObject(*callee.get());
vp[1].set(receiver);
if (!natfun(cx, 0, vp.begin())) {
return false;
}
result.set(vp[0]);
return true;
}
bool CallDOMGetter(JSContext* cx, const JSJitInfo* info, HandleObject obj,
MutableHandleValue result) {
MOZ_ASSERT(info->type() == JSJitInfo::Getter);
MOZ_ASSERT(obj->is<NativeObject>());
MOZ_ASSERT(obj->getClass()->isDOMClass());
MOZ_ASSERT(obj->as<NativeObject>().numFixedSlots() > 0);
#ifdef DEBUG
DOMInstanceClassHasProtoAtDepth instanceChecker =
cx->runtime()->DOMcallbacks->instanceClassMatchesProto;
MOZ_ASSERT(instanceChecker(obj->getClass(), info->protoID, info->depth));
#endif
// Loading DOM_OBJECT_SLOT, which must be the first slot.
JS::Value val = JS::GetReservedSlot(obj, 0);
JSJitGetterOp getter = info->getter;
return getter(cx, obj, val.toPrivate(), JSJitGetterCallArgs(result));
}
bool CallNativeSetter(JSContext* cx, HandleFunction callee, HandleObject obj,
HandleValue rhs) {
AutoRealm ar(cx, callee);
MOZ_ASSERT(callee->isNativeFun());
JSNative natfun = callee->native();
JS::RootedValueArray<3> vp(cx);
vp[0].setObject(*callee.get());
vp[1].setObject(*obj.get());
vp[2].set(rhs);
return natfun(cx, 1, vp.begin());
}
bool CallDOMSetter(JSContext* cx, const JSJitInfo* info, HandleObject obj,
HandleValue value) {
MOZ_ASSERT(info->type() == JSJitInfo::Setter);
MOZ_ASSERT(obj->is<NativeObject>());
MOZ_ASSERT(obj->getClass()->isDOMClass());
MOZ_ASSERT(obj->as<NativeObject>().numFixedSlots() > 0);
#ifdef DEBUG
DOMInstanceClassHasProtoAtDepth instanceChecker =
cx->runtime()->DOMcallbacks->instanceClassMatchesProto;
MOZ_ASSERT(instanceChecker(obj->getClass(), info->protoID, info->depth));
#endif
// Loading DOM_OBJECT_SLOT, which must be the first slot.
JS::Value val = JS::GetReservedSlot(obj, 0);
JSJitSetterOp setter = info->setter;
RootedValue v(cx, value);
return setter(cx, obj, val.toPrivate(), JSJitSetterCallArgs(&v));
}
bool EqualStringsHelperPure(JSString* str1, JSString* str2) {
// IC code calls this directly so we shouldn't GC.
AutoUnsafeCallWithABI unsafe;
MOZ_ASSERT(str1->isAtom());
MOZ_ASSERT(!str2->isAtom());
MOZ_ASSERT(str1->length() == str2->length());
// ensureLinear is intentionally called with a nullptr to avoid OOM
// reporting; if it fails, we will continue to the next stub.
JSLinearString* str2Linear = str2->ensureLinear(nullptr);
if (!str2Linear) {
return false;
}
return EqualChars(&str1->asLinear(), str2Linear);
}
static bool MaybeTypedArrayIndexString(jsid id) {
MOZ_ASSERT(id.isAtom() || id.isSymbol());
if (MOZ_LIKELY(id.isAtom())) {
JSAtom* str = id.toAtom();
if (str->length() > 0) {
// Only check the first character because we want this function to be
// fast.
return CanStartTypedArrayIndex(str->latin1OrTwoByteChar(0));
}
}
return false;
}
static void VerifyCacheEntry(JSContext* cx, NativeObject* obj, PropertyKey key,
const MegamorphicCacheEntry& entry) {
#ifdef DEBUG
if (entry.isMissingProperty()) {
NativeObject* pobj;
PropertyResult prop;
MOZ_ASSERT(LookupPropertyPure(cx, obj, key, &pobj, &prop));
MOZ_ASSERT(prop.isNotFound());
return;
}
if (entry.isMissingOwnProperty()) {
MOZ_ASSERT(!obj->containsPure(key));
return;
}
MOZ_ASSERT(entry.isDataProperty() || entry.isAccessorProperty());
for (size_t i = 0, numHops = entry.numHops(); i < numHops; i++) {
MOZ_ASSERT(!obj->containsPure(key));
obj = &obj->staticPrototype()->as<NativeObject>();
}
mozilla::Maybe<PropertyInfo> prop = obj->lookupPure(key);
MOZ_ASSERT(prop.isSome());
MOZ_ASSERT_IF(entry.isDataProperty(), prop->isDataProperty());
MOZ_ASSERT_IF(!entry.isDataProperty(), prop->isAccessorProperty());
MOZ_ASSERT(obj->getTaggedSlotOffset(prop->slot()) == entry.slotOffset());
#endif
}
template <AllowGC allowGC>
static MOZ_ALWAYS_INLINE bool MaybeGetNativePropertyAndWriteToCache(
JSContext* cx, JSObject* obj, jsid id, MegamorphicCacheEntry* entry,
Value* vp) {
MOZ_ASSERT(obj->is<NativeObject>());
NativeObject* nobj = &obj->as<NativeObject>();
Shape* receiverShape = obj->shape();
MegamorphicCache& cache = cx->caches().megamorphicCache;
MOZ_ASSERT(entry);
size_t numHops = 0;
while (true) {
MOZ_ASSERT(!nobj->getOpsLookupProperty());
uint32_t index;