Source code

Revision control

Other Tools

/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* vim: set ts=8 sts=2 et sw=2 tw=80:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "gc/Marking-inl.h"
#include "mozilla/ArrayUtils.h"
#include "mozilla/DebugOnly.h"
#include "mozilla/IntegerRange.h"
#include "mozilla/Maybe.h"
#include "mozilla/ReentrancyGuard.h"
#include "mozilla/ScopeExit.h"
#include "mozilla/Unused.h"
#include <algorithm>
#include <initializer_list>
#include <type_traits>
#include "jsfriendapi.h"
#include "builtin/ModuleObject.h"
#include "debugger/DebugAPI.h"
#include "gc/GCInternals.h"
#include "gc/GCProbes.h"
#include "gc/Policy.h"
#include "jit/JitCode.h"
#include "js/GCTypeMacros.h" // JS_FOR_EACH_PUBLIC_{,TAGGED_}GC_POINTER_TYPE
#include "js/SliceBudget.h"
#include "util/DiagnosticAssertions.h"
#include "util/Memory.h"
#include "util/Poison.h"
#include "vm/ArgumentsObject.h"
#include "vm/ArrayObject.h"
#include "vm/BigIntType.h"
#include "vm/EnvironmentObject.h"
#include "vm/GeneratorObject.h"
#include "vm/RegExpShared.h"
#include "vm/Scope.h"
#include "vm/Shape.h"
#include "vm/SymbolType.h"
#include "vm/TypedArrayObject.h"
#include "wasm/WasmJS.h"
#include "gc/GC-inl.h"
#include "gc/Nursery-inl.h"
#include "gc/PrivateIterators-inl.h"
#include "gc/WeakMap-inl.h"
#include "gc/Zone-inl.h"
#include "vm/GeckoProfiler-inl.h"
#include "vm/NativeObject-inl.h"
#include "vm/PlainObject-inl.h" // js::PlainObject
#include "vm/Realm-inl.h"
#include "vm/StringType-inl.h"
#define MAX_DEDUPLICATABLE_STRING_LENGTH 500
using namespace js;
using namespace js::gc;
using JS::MapTypeToTraceKind;
using mozilla::DebugOnly;
using mozilla::IntegerRange;
using mozilla::PodCopy;
// [SMDOC] GC Tracing
//
// Tracing Overview
// ================
//
// Tracing, in this context, refers to an abstract visitation of some or all of
// the GC-controlled heap. The effect of tracing an edge of the graph depends
// on the subclass of the JSTracer on whose behalf we are tracing.
//
// Marking
// -------
//
// The primary JSTracer is the GCMarker. The marking tracer causes the target
// of each traversed edge to be marked black and the target edge's children to
// be marked either gray (in the gc algorithm sense) or immediately black.
//
// Callback
// --------
//
// The secondary JSTracer is the CallbackTracer. This simply invokes a callback
// on each edge in a child.
//
// The following is a rough outline of the general struture of the tracing
// internals.
//
/* clang-format off */
//
// +----------------------+ ...................
// | | : :
// | v v :
// | TraceRoot TraceEdge TraceRange GCMarker:: :
// | | | | processMarkStackTop +---+---+
// | +-----------+-----------+ | | |
// | | | | Mark |
// | v | | Stack |
// | TraceEdgeInternal | | |
// | | | +---+---+
// | | | ^
// | +--------------+---------------+ +<----------+ :
// | | | | | | :
// | v v v v | :
// | DoCallback TenuringTracer:: DoMarking traverseEdge | :
// | | traverse | | | :
// | | +------+------+ | :
// | | | | :
// | v v | :
// | CallbackTracer:: GCMarker::traverse | :
// | dispatchToOnEdge | | :
// | | | :
// | +-------------------+-----------+------+ | :
// | | | | | :
// | v v v | :
// | markAndTraceChildren markAndPush eagerlyMarkChildren | :
// | | : | | :
// | v : +-----------+ :
// | T::traceChildren : :
// | | : :
// +-------------+ ......................................
//
// Legend:
// ------- Direct calls
// ....... Data flow
//
/* clang-format on */
/*** Tracing Invariants *****************************************************/
#if defined(DEBUG)
template <typename T>
static inline bool IsThingPoisoned(T* thing) {
const uint8_t poisonBytes[] = {
JS_FRESH_NURSERY_PATTERN, JS_SWEPT_NURSERY_PATTERN,
JS_ALLOCATED_NURSERY_PATTERN, JS_FRESH_TENURED_PATTERN,
JS_MOVED_TENURED_PATTERN, JS_SWEPT_TENURED_PATTERN,
JS_ALLOCATED_TENURED_PATTERN, JS_FREED_HEAP_PTR_PATTERN,
JS_FREED_CHUNK_PATTERN, JS_FREED_ARENA_PATTERN,
JS_SWEPT_TI_PATTERN, JS_SWEPT_CODE_PATTERN,
JS_RESET_VALUE_PATTERN, JS_POISONED_JSSCRIPT_DATA_PATTERN,
JS_OOB_PARSE_NODE_PATTERN, JS_LIFO_UNDEFINED_PATTERN,
JS_LIFO_UNINITIALIZED_PATTERN,
};
const int numPoisonBytes = sizeof(poisonBytes) / sizeof(poisonBytes[0]);
uint32_t* p =
reinterpret_cast<uint32_t*>(reinterpret_cast<FreeSpan*>(thing) + 1);
// Note: all free patterns are odd to make the common, not-poisoned case a
// single test.
if ((*p & 1) == 0) {
return false;
}
for (int i = 0; i < numPoisonBytes; ++i) {
const uint8_t pb = poisonBytes[i];
const uint32_t pw = pb | (pb << 8) | (pb << 16) | (pb << 24);
if (*p == pw) {
return true;
}
}
return false;
}
bool js::IsTracerKind(JSTracer* trc, JS::CallbackTracer::TracerKind kind) {
return trc->isCallbackTracer() &&
trc->asCallbackTracer()->getTracerKind() == kind;
}
#endif
bool ThingIsPermanentAtomOrWellKnownSymbol(JSString* str) {
return str->isPermanentAtom();
}
bool ThingIsPermanentAtomOrWellKnownSymbol(JS::Symbol* sym) {
return sym->isWellKnownSymbol();
}
template <typename T>
static inline bool IsOwnedByOtherRuntime(JSRuntime* rt, T thing) {
bool other = thing->runtimeFromAnyThread() != rt;
MOZ_ASSERT_IF(other, ThingIsPermanentAtomOrWellKnownSymbol(thing) ||
thing->zoneFromAnyThread()->isSelfHostingZone());
return other;
}
template <typename T>
void js::CheckTracedThing(JSTracer* trc, T* thing) {
#ifdef DEBUG
MOZ_ASSERT(trc);
MOZ_ASSERT(thing);
if (!trc->checkEdges()) {
return;
}
if (IsForwarded(thing)) {
MOZ_ASSERT(IsTracerKind(trc, JS::CallbackTracer::TracerKind::Moving) ||
trc->isTenuringTracer());
thing = Forwarded(thing);
}
/* This function uses data that's not available in the nursery. */
if (IsInsideNursery(thing)) {
return;
}
/*
* Permanent atoms and things in the self-hosting zone are not associated
* with this runtime, but will be ignored during marking.
*/
if (IsOwnedByOtherRuntime(trc->runtime(), thing)) {
return;
}
Zone* zone = thing->zoneFromAnyThread();
JSRuntime* rt = trc->runtime();
MOZ_ASSERT(zone->runtimeFromAnyThread() == rt);
bool isGcMarkingTracer = trc->isMarkingTracer();
bool isUnmarkGrayTracer =
IsTracerKind(trc, JS::CallbackTracer::TracerKind::UnmarkGray);
bool isClearEdgesTracer =
IsTracerKind(trc, JS::CallbackTracer::TracerKind::ClearEdges);
if (TlsContext.get()->isMainThreadContext()) {
// If we're on the main thread we must have access to the runtime and zone.
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
MOZ_ASSERT(CurrentThreadCanAccessZone(zone));
} else {
MOZ_ASSERT(
isGcMarkingTracer || isUnmarkGrayTracer || isClearEdgesTracer ||
IsTracerKind(trc, JS::CallbackTracer::TracerKind::Moving) ||
IsTracerKind(trc, JS::CallbackTracer::TracerKind::GrayBuffering) ||
IsTracerKind(trc, JS::CallbackTracer::TracerKind::Sweeping));
MOZ_ASSERT_IF(!isClearEdgesTracer, CurrentThreadIsPerformingGC());
}
// It shouldn't be possible to trace into zones used by helper threads, except
// for use of ClearEdgesTracer by GCManagedDeletePolicy on a helper thread.
MOZ_ASSERT_IF(!isClearEdgesTracer, !zone->usedByHelperThread());
MOZ_ASSERT(thing->isAligned());
MOZ_ASSERT(MapTypeToTraceKind<std::remove_pointer_t<T>>::kind ==
thing->getTraceKind());
if (isGcMarkingTracer) {
GCMarker* gcMarker = GCMarker::fromTracer(trc);
MOZ_ASSERT(zone->shouldMarkInZone());
MOZ_ASSERT_IF(gcMarker->shouldCheckCompartments(),
zone->isCollectingFromAnyThread() || zone->isAtomsZone());
MOZ_ASSERT_IF(gcMarker->markColor() == MarkColor::Gray,
!zone->isGCMarkingBlackOnly() || zone->isAtomsZone());
MOZ_ASSERT(!(zone->isGCSweeping() || zone->isGCFinished() ||
zone->isGCCompacting()));
// Check that we don't stray from the current compartment and zone without
// using TraceCrossCompartmentEdge.
Compartment* comp = thing->maybeCompartment();
MOZ_ASSERT_IF(gcMarker->tracingCompartment && comp,
gcMarker->tracingCompartment == comp);
MOZ_ASSERT_IF(gcMarker->tracingZone,
gcMarker->tracingZone == zone || zone->isAtomsZone());
}
/*
* Try to assert that the thing is allocated.
*
* We would like to assert that the thing is not in the free list, but this
* check is very slow. Instead we check whether the thing has been poisoned:
* if it has not then we assume it is allocated, but if it has then it is
* either free or uninitialized in which case we check the free list.
*
* Further complications are that background sweeping may be running and
* concurrently modifiying the free list and that tracing is done off
* thread during compacting GC and reading the contents of the thing by
* IsThingPoisoned would be racy in this case.
*/
MOZ_ASSERT_IF(JS::RuntimeHeapIsBusy() && !zone->isGCSweeping() &&
!zone->isGCFinished() && !zone->isGCCompacting(),
!IsThingPoisoned(thing) ||
!InFreeList(thing->asTenured().arena(), thing));
#endif
}
template <typename T>
void js::CheckTracedThing(JSTracer* trc, T thing) {
ApplyGCThingTyped(thing, [](auto t) { CheckTracedThing(t); });
}
namespace js {
#define IMPL_CHECK_TRACED_THING(_, type, _1, _2) \
template void CheckTracedThing<type>(JSTracer*, type*);
JS_FOR_EACH_TRACEKIND(IMPL_CHECK_TRACED_THING);
#undef IMPL_CHECK_TRACED_THING
} // namespace js
static inline bool ShouldMarkCrossCompartment(GCMarker* marker, JSObject* src,
Cell* dstCell) {
MarkColor color = marker->markColor();
if (!dstCell->isTenured()) {
MOZ_ASSERT(color == MarkColor::Black);
return false;
}
TenuredCell& dst = dstCell->asTenured();
JS::Zone* dstZone = dst.zone();
if (!src->zone()->isGCMarking() && !dstZone->isGCMarking()) {
return false;
}
if (color == MarkColor::Black) {
// Check our sweep groups are correct: we should never have to
// mark something in a zone that we have started sweeping.
MOZ_ASSERT_IF(!dst.isMarkedBlack(), !dstZone->isGCSweeping());
/*
* Having black->gray edges violates our promise to the cycle collector so
* we ensure that gray things we encounter when marking black end up getting
* marked black.
*
* This can happen for two reasons:
*
* 1) If we're collecting a compartment and it has an edge to an uncollected
* compartment it's possible that the source and destination of the
* cross-compartment edge should be gray, but the source was marked black by
* the write barrier.
*
* 2) If we yield during gray marking and the write barrier marks a gray
* thing black.
*
* We handle the first case before returning whereas the second case happens
* as part of normal marking.
*/
if (dst.isMarkedGray() && !dstZone->isGCMarking()) {
UnmarkGrayGCThingUnchecked(marker->runtime(),
JS::GCCellPtr(&dst, dst.getTraceKind()));
return false;
}
return dstZone->isGCMarking();
} else {
// Check our sweep groups are correct as above.
MOZ_ASSERT_IF(!dst.isMarkedAny(), !dstZone->isGCSweeping());
if (dstZone->isGCMarkingBlackOnly()) {
/*
* The destination compartment is being not being marked gray now,
* but it will be later, so record the cell so it can be marked gray
* at the appropriate time.
*/
if (!dst.isMarkedAny()) {
DelayCrossCompartmentGrayMarking(src);
}
return false;
}
return dstZone->isGCMarkingBlackAndGray();
}
}
static bool ShouldTraceCrossCompartment(JSTracer* trc, JSObject* src,
Cell* dstCell) {
if (!trc->isMarkingTracer()) {
return true;
}
return ShouldMarkCrossCompartment(GCMarker::fromTracer(trc), src, dstCell);
}
static bool ShouldTraceCrossCompartment(JSTracer* trc, JSObject* src,
const Value& val) {
return val.isGCThing() &&
ShouldTraceCrossCompartment(trc, src, val.toGCThing());
}
static void AssertShouldMarkInZone(Cell* thing) {
MOZ_ASSERT(thing->asTenured().zone()->shouldMarkInZone());
}
static void AssertShouldMarkInZone(JSString* str) {
#ifdef DEBUG
Zone* zone = str->zone();
MOZ_ASSERT(zone->shouldMarkInZone() || zone->isAtomsZone());
#endif
}
static void AssertShouldMarkInZone(JS::Symbol* sym) {
#ifdef DEBUG
Zone* zone = sym->asTenured().zone();
MOZ_ASSERT(zone->shouldMarkInZone() || zone->isAtomsZone());
#endif
}
#ifdef DEBUG
void js::gc::AssertRootMarkingPhase(JSTracer* trc) {
MOZ_ASSERT_IF(trc->isMarkingTracer(),
trc->runtime()->gc.state() == State::NotActive ||
trc->runtime()->gc.state() == State::MarkRoots);
}
#endif
/*** Tracing Interface ******************************************************/
template <typename T>
bool DoCallback(JS::CallbackTracer* trc, T** thingp, const char* name);
template <typename T>
bool DoCallback(JS::CallbackTracer* trc, T* thingp, const char* name);
template <typename T>
void DoMarking(GCMarker* gcmarker, T* thing);
template <typename T>
void DoMarking(GCMarker* gcmarker, const T& thing);
template <typename T>
static void TraceExternalEdgeHelper(JSTracer* trc, T* thingp,
const char* name) {
MOZ_ASSERT(InternalBarrierMethods<T>::isMarkable(*thingp));
TraceEdgeInternal(trc, ConvertToBase(thingp), name);
}
JS_PUBLIC_API void js::UnsafeTraceManuallyBarrieredEdge(JSTracer* trc,
JSObject** thingp,
const char* name) {
TraceEdgeInternal(trc, ConvertToBase(thingp), name);
}
template <typename T>
static void UnsafeTraceRootHelper(JSTracer* trc, T* thingp, const char* name) {
MOZ_ASSERT(thingp);
js::TraceNullableRoot(trc, thingp, name);
}
namespace js {
class AbstractGeneratorObject;
class SavedFrame;
} // namespace js
#define DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION(type) \
JS_PUBLIC_API void js::gc::TraceExternalEdge(JSTracer* trc, type* thingp, \
const char* name) { \
TraceExternalEdgeHelper(trc, thingp, name); \
}
// Define TraceExternalEdge for each public GC pointer type.
JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION)
JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION)
#undef DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION
#define DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(type) \
JS_PUBLIC_API void JS::UnsafeTraceRoot(JSTracer* trc, type* thingp, \
const char* name) { \
UnsafeTraceRootHelper(trc, thingp, name); \
}
// Define UnsafeTraceRoot for each public GC pointer type.
JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(DEFINE_UNSAFE_TRACE_ROOT_FUNCTION)
JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(DEFINE_UNSAFE_TRACE_ROOT_FUNCTION)
// Also, for the moment, define UnsafeTraceRoot for internal GC pointer types.
DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(AbstractGeneratorObject*)
DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(SavedFrame*)
#undef DEFINE_UNSAFE_TRACE_ROOT_FUNCTION
namespace js {
namespace gc {
#define INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(type) \
template bool TraceEdgeInternal<type>(JSTracer*, type*, const char*); \
template void TraceRangeInternal<type>(JSTracer*, size_t len, type*, \
const char*);
#define INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND(_1, type, _2, _3) \
INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(type*)
JS_FOR_EACH_TRACEKIND(INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND)
JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(INSTANTIATE_INTERNAL_TRACE_FUNCTIONS)
#undef INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND
#undef INSTANTIATE_INTERNAL_TRACE_FUNCTIONS
} // namespace gc
} // namespace js
// In debug builds, makes a note of the current compartment before calling a
// trace hook or traceChildren() method on a GC thing.
class MOZ_RAII AutoSetTracingSource {
#ifdef DEBUG
GCMarker* marker = nullptr;
#endif
public:
template <typename T>
AutoSetTracingSource(JSTracer* trc, T* thing) {
#ifdef DEBUG
if (trc->isMarkingTracer() && thing) {
marker = GCMarker::fromTracer(trc);
MOZ_ASSERT(!marker->tracingZone);
marker->tracingZone = thing->asTenured().zone();
MOZ_ASSERT(!marker->tracingCompartment);
marker->tracingCompartment = thing->maybeCompartment();
}
#endif
}
~AutoSetTracingSource() {
#ifdef DEBUG
if (marker) {
marker->tracingZone = nullptr;
marker->tracingCompartment = nullptr;
}
#endif
}
};
// In debug builds, clear the trace hook compartment. This happens
// after the trace hook has called back into one of our trace APIs and we've
// checked the traced thing.
class MOZ_RAII AutoClearTracingSource {
#ifdef DEBUG
GCMarker* marker = nullptr;
JS::Zone* prevZone = nullptr;
Compartment* prevCompartment = nullptr;
#endif
public:
explicit AutoClearTracingSource(JSTracer* trc) {
#ifdef DEBUG
if (trc->isMarkingTracer()) {
marker = GCMarker::fromTracer(trc);
prevZone = marker->tracingZone;
marker->tracingZone = nullptr;
prevCompartment = marker->tracingCompartment;
marker->tracingCompartment = nullptr;
}
#endif
}
~AutoClearTracingSource() {
#ifdef DEBUG
if (marker) {
marker->tracingZone = prevZone;
marker->tracingCompartment = prevCompartment;
}
#endif
}
};
template <typename T>
void js::TraceManuallyBarrieredCrossCompartmentEdge(JSTracer* trc,
JSObject* src, T* dst,
const char* name) {
// Clear expected compartment for cross-compartment edge.
AutoClearTracingSource acts(trc);
if (ShouldTraceCrossCompartment(trc, src, *dst)) {
TraceEdgeInternal(trc, dst, name);
}
}
template void js::TraceManuallyBarrieredCrossCompartmentEdge<Value>(
JSTracer*, JSObject*, Value*, const char*);
template void js::TraceManuallyBarrieredCrossCompartmentEdge<JSObject*>(
JSTracer*, JSObject*, JSObject**, const char*);
template void js::TraceManuallyBarrieredCrossCompartmentEdge<BaseScript*>(
JSTracer*, JSObject*, BaseScript**, const char*);
template <typename T>
void js::TraceWeakMapKeyEdgeInternal(JSTracer* trc, Zone* weakMapZone,
T** thingp, const char* name) {
// We can't use ShouldTraceCrossCompartment here because that assumes the
// source of the edge is a CCW object which could be used to delay gray
// marking. Instead, assert that the weak map zone is in the same marking
// state as the target thing's zone and therefore we can go ahead and mark it.
#ifdef DEBUG
auto thing = *thingp;
if (trc->isMarkingTracer()) {
MOZ_ASSERT(weakMapZone->isGCMarking());
MOZ_ASSERT(weakMapZone->gcState() == thing->zone()->gcState());
}
#endif
// Clear expected compartment for cross-compartment edge.
AutoClearTracingSource acts(trc);
TraceEdgeInternal(trc, thingp, name);
}
template void js::TraceWeakMapKeyEdgeInternal<JSObject>(JSTracer*, Zone*,
JSObject**,
const char*);
template void js::TraceWeakMapKeyEdgeInternal<BaseScript>(JSTracer*, Zone*,
BaseScript**,
const char*);
template <typename T>
void js::TraceProcessGlobalRoot(JSTracer* trc, T* thing, const char* name) {
AssertRootMarkingPhase(trc);
MOZ_ASSERT(ThingIsPermanentAtomOrWellKnownSymbol(thing));
// We have to mark permanent atoms and well-known symbols through a special
// method because the default DoMarking implementation automatically skips
// them. Fortunately, atoms (permanent and non) cannot refer to other GC
// things so they do not need to go through the mark stack and may simply
// be marked directly. Moreover, well-known symbols can refer only to
// permanent atoms, so likewise require no subsquent marking.
CheckTracedThing(trc, *ConvertToBase(&thing));
AutoClearTracingSource acts(trc);
if (trc->isMarkingTracer()) {
thing->asTenured().markIfUnmarked(gc::MarkColor::Black);
} else {
DoCallback(trc->asCallbackTracer(), ConvertToBase(&thing), name);
}
}
template void js::TraceProcessGlobalRoot<JSAtom>(JSTracer*, JSAtom*,
const char*);
template void js::TraceProcessGlobalRoot<JS::Symbol>(JSTracer*, JS::Symbol*,
const char*);
static Cell* TraceGenericPointerRootAndType(JSTracer* trc, Cell* thing,
JS::TraceKind kind,
const char* name) {
return MapGCThingTyped(thing, kind, [trc, name](auto t) -> Cell* {
TraceRoot(trc, &t, name);
return t;
});
}
void js::TraceGenericPointerRoot(JSTracer* trc, Cell** thingp,
const char* name) {
MOZ_ASSERT(thingp);
Cell* thing = *thingp;
if (!thing) {
return;
}
Cell* traced =
TraceGenericPointerRootAndType(trc, thing, thing->getTraceKind(), name);
if (traced != thing) {
*thingp = traced;
}
}
void js::TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, Cell** thingp,
const char* name) {
MOZ_ASSERT(thingp);
Cell* thing = *thingp;
if (!*thingp) {
return;
}
auto traced = MapGCThingTyped(thing, thing->getTraceKind(),
[trc, name](auto t) -> Cell* {
TraceManuallyBarrieredEdge(trc, &t, name);
return t;
});
if (traced != thing) {
*thingp = traced;
}
}
void js::TraceGCCellPtrRoot(JSTracer* trc, JS::GCCellPtr* thingp,
const char* name) {
Cell* thing = thingp->asCell();
if (!thing) {
return;
}
Cell* traced =
TraceGenericPointerRootAndType(trc, thing, thingp->kind(), name);
if (!traced) {
*thingp = JS::GCCellPtr();
} else if (traced != thingp->asCell()) {
*thingp = JS::GCCellPtr(traced, thingp->kind());
}
}
// This method is responsible for dynamic dispatch to the real tracer
// implementation. Consider replacing this choke point with virtual dispatch:
// a sufficiently smart C++ compiler may be able to devirtualize some paths.
template <typename T>
bool js::gc::TraceEdgeInternal(JSTracer* trc, T* thingp, const char* name) {
#define IS_SAME_TYPE_OR(name, type, _, _1) std::is_same_v<type*, T> ||
static_assert(JS_FOR_EACH_TRACEKIND(IS_SAME_TYPE_OR)
std::is_same_v<T, JS::Value> ||
std::is_same_v<T, jsid> || std::is_same_v<T, TaggedProto>,
"Only the base cell layout types are allowed into "
"marking/tracing internals");
#undef IS_SAME_TYPE_OR
if (trc->isMarkingTracer()) {
DoMarking(GCMarker::fromTracer(trc), *thingp);
return true;
}
if (trc->isTenuringTracer()) {
static_cast<TenuringTracer*>(trc)->traverse(thingp);
return true;
}
MOZ_ASSERT(trc->isCallbackTracer());
return DoCallback(trc->asCallbackTracer(), thingp, name);
}
template <typename T>
void js::gc::TraceRangeInternal(JSTracer* trc, size_t len, T* vec,
const char* name) {
JS::AutoTracingIndex index(trc);
for (auto i : IntegerRange(len)) {
if (InternalBarrierMethods<T>::isMarkable(vec[i])) {
TraceEdgeInternal(trc, &vec[i], name);
}
++index;
}
}
/*** GC Marking Interface ***************************************************/
namespace js {
using HasNoImplicitEdgesType = bool;
template <typename T>
struct ImplicitEdgeHolderType {
using Type = HasNoImplicitEdgesType;
};
// For now, we only handle JSObject* and BaseScript* keys, but the linear time
// algorithm can be easily extended by adding in more types here, then making
// GCMarker::traverse<T> call markImplicitEdges.
template <>
struct ImplicitEdgeHolderType<JSObject*> {
using Type = JSObject*;
};
template <>
struct ImplicitEdgeHolderType<BaseScript*> {
using Type = BaseScript*;
};
void GCMarker::markEphemeronValues(gc::Cell* markedCell,
WeakEntryVector& values) {
DebugOnly<size_t> initialLen = values.length();
for (const auto& markable : values) {
markable.weakmap->markKey(this, markedCell, markable.key);
}
// The vector should not be appended to during iteration because the key is
// already marked, and even in cases where we have a multipart key, we
// should only be inserting entries for the unmarked portions.
MOZ_ASSERT(values.length() == initialLen);
}
void GCMarker::forgetWeakKey(js::gc::WeakKeyTable& weakKeys, WeakMapBase* map,
gc::Cell* keyOrDelegate, gc::Cell* keyToRemove) {
// Find and remove the exact pair <map,keyToRemove> from the values of the
// weak keys table.
//
// This function is called when 'keyToRemove' is removed from a weakmap
// 'map'. If 'keyToRemove' has a delegate, then the delegate will be used as
// the lookup key in gcWeakKeys; otherwise, 'keyToRemove' itself will be. In
// either case, 'keyToRemove' is what we will be filtering out of the
// Markable values in the weakKey table.
auto p = weakKeys.get(keyOrDelegate);
// Note that this is not guaranteed to find anything. The key will have
// only been inserted into the weakKeys table if it was unmarked when the
// map was traced.
if (p) {
// Entries should only have been added to weakKeys if the map was marked.
for (auto r = p->value.all(); !r.empty(); r.popFront()) {
MOZ_ASSERT(r.front().weakmap->mapColor);
}
p->value.eraseIfEqual(WeakMarkable(map, keyToRemove));
}
}
void GCMarker::forgetWeakMap(WeakMapBase* map, Zone* zone) {
for (auto table : {&zone->gcNurseryWeakKeys(), &zone->gcWeakKeys()}) {
for (auto p = table->all(); !p.empty(); p.popFront()) {
p.front().value.eraseIf([map](const WeakMarkable& markable) -> bool {
return markable.weakmap == map;
});
}
}
}
// 'delegate' is no longer the delegate of 'key'.
void GCMarker::severWeakDelegate(JSObject* key, JSObject* delegate) {
JS::Zone* zone = delegate->zone();
auto p = zone->gcWeakKeys(delegate).get(delegate);
if (p) {
p->value.eraseIf([this, key](const WeakMarkable& markable) -> bool {
if (markable.key != key) {
return false;
}
markable.weakmap->postSeverDelegate(this, key, key->compartment());
return true;
});
}
}
template <typename T>
void GCMarker::markImplicitEdgesHelper(T markedThing) {
if (!isWeakMarking()) {
return;
}
Zone* zone = markedThing->asTenured().zone();
MOZ_ASSERT(zone->isGCMarking());
MOZ_ASSERT(!zone->isGCSweeping());
auto p = zone->gcWeakKeys().get(markedThing);
if (!p) {
return;
}
WeakEntryVector& markables = p->value;
// markedThing might be a key in a debugger weakmap, which can end up marking
// values that are in a different compartment.
AutoClearTracingSource acts(this);
markEphemeronValues(markedThing, markables);
markables.clear(); // If key address is reused, it should do nothing
}
template <>
void GCMarker::markImplicitEdgesHelper(HasNoImplicitEdgesType) {}
template <typename T>
void GCMarker::markImplicitEdges(T* thing) {
markImplicitEdgesHelper<typename ImplicitEdgeHolderType<T*>::Type>(thing);
}
template void GCMarker::markImplicitEdges(JSObject*);
template void GCMarker::markImplicitEdges(BaseScript*);
} // namespace js
template <typename T>
static inline bool ShouldMark(GCMarker* gcmarker, T thing) {
// Don't trace things that are owned by another runtime.
if (IsOwnedByOtherRuntime(gcmarker->runtime(), thing)) {
return false;
}
// Don't mark things outside a zone if we are in a per-zone GC.
return thing->zone()->shouldMarkInZone();
}
template <>
bool ShouldMark<JSObject*>(GCMarker* gcmarker, JSObject* obj) {
// Don't trace things that are owned by another runtime.
if (IsOwnedByOtherRuntime(gcmarker->runtime(), obj)) {
return false;
}
// We may mark a Nursery thing outside the context of the
// MinorCollectionTracer because of a pre-barrier. The pre-barrier is not
// needed in this case because we perform a minor collection before each
// incremental slice.
if (IsInsideNursery(obj)) {
return false;
}
// Don't mark things outside a zone if we are in a per-zone GC. It is
// faster to check our own arena, which we can do since we know that
// the object is tenured.
return obj->asTenured().zone()->shouldMarkInZone();
}
// JSStrings can also be in the nursery. See ShouldMark<JSObject*> for comments.
template <>
bool ShouldMark<JSString*>(GCMarker* gcmarker, JSString* str) {
if (IsOwnedByOtherRuntime(gcmarker->runtime(), str)) {
return false;
}
if (IsInsideNursery(str)) {
return false;
}
return str->asTenured().zone()->shouldMarkInZone();
}
// BigInts can also be in the nursery. See ShouldMark<JSObject*> for comments.
template <>
bool ShouldMark<JS::BigInt*>(GCMarker* gcmarker, JS::BigInt* bi) {
if (IsOwnedByOtherRuntime(gcmarker->runtime(), bi)) {
return false;
}
if (IsInsideNursery(bi)) {
return false;
}
return bi->asTenured().zone()->shouldMarkInZone();
}
template <typename T>
void DoMarking(GCMarker* gcmarker, T* thing) {
// Do per-type marking precondition checks.
if (!ShouldMark(gcmarker, thing)) {
MOZ_ASSERT(gc::detail::GetEffectiveColor(gcmarker->runtime(), thing) ==
js::gc::CellColor::Black);
return;
}
CheckTracedThing(gcmarker, thing);
AutoClearTracingSource acts(gcmarker);
gcmarker->traverse(thing);
// Mark the compartment as live.
SetMaybeAliveFlag(thing);
}
template <typename T>
void DoMarking(GCMarker* gcmarker, const T& thing) {
ApplyGCThingTyped(thing, [gcmarker](auto t) { DoMarking(gcmarker, t); });
}
JS_PUBLIC_API void js::gc::PerformIncrementalReadBarrier(JS::GCCellPtr thing) {
// Optimized marking for read barriers. This is called from
// ExposeGCThingToActiveJS which has already checked the prerequisites for
// performing a read barrier. This means we can skip a bunch of checks and
// call info the tracer directly.
MOZ_ASSERT(thing);
MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
TenuredCell* cell = &thing.asCell()->asTenured();
Zone* zone = cell->zone();
MOZ_ASSERT(zone->needsIncrementalBarrier());
// Skip disptaching on known tracer type.
GCMarker* gcmarker = GCMarker::fromTracer(zone->barrierTracer());
// Mark the argument, as DoMarking above.
ApplyGCThingTyped(thing, [gcmarker](auto thing) {
MOZ_ASSERT(ShouldMark(gcmarker, thing));
CheckTracedThing(gcmarker, thing);
AutoClearTracingSource acts(gcmarker);
gcmarker->traverse(thing);
});
}
// The simplest traversal calls out to the fully generic traceChildren function
// to visit the child edges. In the absence of other traversal mechanisms, this
// function will rapidly grow the stack past its bounds and crash the process.
// Thus, this generic tracing should only be used in cases where subsequent
// tracing will not recurse.
template <typename T>
void js::GCMarker::markAndTraceChildren(T* thing) {
if (ThingIsPermanentAtomOrWellKnownSymbol(thing)) {
return;
}
if (mark(thing)) {
AutoSetTracingSource asts(this, thing);
thing->traceChildren(this);
}
}
namespace js {
template <>
void GCMarker::traverse(BaseShape* thing) {
markAndTraceChildren(thing);
}
template <>
void GCMarker::traverse(JS::Symbol* thing) {
markAndTraceChildren(thing);
}
template <>
void GCMarker::traverse(JS::BigInt* thing) {
markAndTraceChildren(thing);
}
template <>
void GCMarker::traverse(RegExpShared* thing) {
markAndTraceChildren(thing);
}
} // namespace js
// Strings, Shapes, and Scopes are extremely common, but have simple patterns of
// recursion. We traverse trees of these edges immediately, with aggressive,
// manual inlining, implemented by eagerlyTraceChildren.
template <typename T>
void js::GCMarker::markAndScan(T* thing) {
if (ThingIsPermanentAtomOrWellKnownSymbol(thing)) {
return;
}
if (mark(thing)) {
eagerlyMarkChildren(thing);
}
}
namespace js {
template <>
void GCMarker::traverse(JSString* thing) {
markAndScan(thing);
}
template <>
void GCMarker::traverse(Shape* thing) {
markAndScan(thing);
}
template <>
void GCMarker::traverse(js::Scope* thing) {
markAndScan(thing);
}
} // namespace js
// Object and ObjectGroup are extremely common and can contain arbitrarily
// nested graphs, so are not trivially inlined. In this case we use a mark
// stack to control recursion. JitCode shares none of these properties, but is
// included for historical reasons. JSScript normally cannot recurse, but may
// be used as a weakmap key and thereby recurse into weakmapped values.
template <typename T>
void js::GCMarker::markAndPush(T* thing) {
if (!mark(thing)) {
return;
}
pushTaggedPtr(thing);
}
namespace js {
template <>
void GCMarker::traverse(JSObject* thing) {
markAndPush(thing);
}
template <>
void GCMarker::traverse(ObjectGroup* thing) {
markAndPush(thing);
}
template <>
void GCMarker::traverse(jit::JitCode* thing) {
markAndPush(thing);
}
template <>
void GCMarker::traverse(BaseScript* thing) {
markAndPush(thing);
}
} // namespace js
namespace js {
template <>
void GCMarker::traverse(AccessorShape* thing) {
MOZ_CRASH("AccessorShape must be marked as a Shape");
}
} // namespace js
template <typename S, typename T>
static void CheckTraversedEdge(S source, T* target) {
// Atoms and Symbols do not have or mark their internal pointers,
// respectively.
MOZ_ASSERT(!ThingIsPermanentAtomOrWellKnownSymbol(source));
// The Zones must match, unless the target is an atom.
MOZ_ASSERT_IF(
!ThingIsPermanentAtomOrWellKnownSymbol(target),
target->zone()->isAtomsZone() || target->zone() == source->zone());
// If we are marking an atom, that atom must be marked in the source zone's
// atom bitmap.
MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(target) &&
target->zone()->isAtomsZone() &&
!source->zone()->isAtomsZone(),
target->runtimeFromAnyThread()->gc.atomMarking.atomIsMarked(
source->zone(), reinterpret_cast<TenuredCell*>(target)));
// Atoms and Symbols do not have access to a compartment pointer, or we'd need
// to adjust the subsequent check to catch that case.
MOZ_ASSERT_IF(ThingIsPermanentAtomOrWellKnownSymbol(target),
!target->maybeCompartment());
MOZ_ASSERT_IF(target->zoneFromAnyThread()->isAtomsZone(),
!target->maybeCompartment());
// If we have access to a compartment pointer for both things, they must
// match.
MOZ_ASSERT_IF(source->maybeCompartment() && target->maybeCompartment(),
source->maybeCompartment() == target->maybeCompartment());
}
template <typename S, typename T>
void js::GCMarker::traverseEdge(S source, T* target) {
CheckTraversedEdge(source, target);
traverse(target);
}
template <typename S, typename T>
void js::GCMarker::traverseEdge(S source, const T& thing) {
ApplyGCThingTyped(thing,
[this, source](auto t) { this->traverseEdge(source, t); });
}
namespace {
template <typename T>
struct TraceKindCanBeGray {};
#define EXPAND_TRACEKIND_DEF(_, type, canBeGray, _1) \
template <> \
struct TraceKindCanBeGray<type> { \
static const bool value = canBeGray; \
};
JS_FOR_EACH_TRACEKIND(EXPAND_TRACEKIND_DEF)
#undef EXPAND_TRACEKIND_DEF
} // namespace
struct TraceKindCanBeGrayFunctor {
template <typename T>
bool operator()() {
return TraceKindCanBeGray<T>::value;
}
};
static bool TraceKindCanBeMarkedGray(JS::TraceKind kind) {
return DispatchTraceKindTyped(TraceKindCanBeGrayFunctor(), kind);
}
template <typename T>
bool js::GCMarker::mark(T* thing) {
if (!thing->isTenured()) {
return false;
}
AssertShouldMarkInZone(thing);
TenuredCell* cell = &thing->asTenured();
MarkColor color =
TraceKindCanBeGray<T>::value ? markColor() : MarkColor::Black;
bool marked = cell->markIfUnmarked(color);
if (marked) {
markCount++;
}
return marked;
}
/*** Inline, Eager GC Marking ***********************************************/
// Each of the eager, inline marking paths is directly preceeded by the
// out-of-line, generic tracing code for comparison. Both paths must end up
// traversing equivalent subgraphs.
void BaseScript::traceChildren(JSTracer* trc) {
TraceEdge(trc, &functionOrGlobal_, "function");
TraceEdge(trc, &sourceObject_, "sourceObject");
warmUpData_.trace(trc);
if (data_) {
data_->trace(trc);
}
// Scripts with bytecode may have optional data stored in per-runtime or
// per-zone maps. Note that a failed compilation must not have entries since
// the script itself will not be marked as having bytecode.
if (hasBytecode()) {
JSScript* script = this->asJSScript();
if (hasDebugScript()) {
DebugAPI::traceDebugScript(trc, script);
}
}
if (trc->isMarkingTracer()) {
GCMarker::fromTracer(trc)->markImplicitEdges(this);
}
}
void Shape::traceChildren(JSTracer* trc) {
TraceCellHeaderEdge(trc, this, "base");
TraceEdge(trc, &propidRef(), "propid");
if (parent) {
TraceEdge(trc, &parent, "parent");
}
if (dictNext.isObject()) {
JSObject* obj = dictNext.toObject();
TraceManuallyBarrieredEdge(trc, &obj, "dictNext object");
if (obj != dictNext.toObject()) {
dictNext.setObject(obj);
}
}
if (hasGetterObject()) {
TraceManuallyBarrieredEdge(trc, &asAccessorShape().getterObj, "getter");
}
if (hasSetterObject()) {
TraceManuallyBarrieredEdge(trc, &asAccessorShape().setterObj, "setter");
}
}
inline void js::GCMarker::eagerlyMarkChildren(Shape* shape) {
MOZ_ASSERT(shape->isMarked(markColor()));
do {
// Special case: if a base shape has a shape table then all its pointers
// must point to this shape or an anscestor. Since these pointers will
// be traced by this loop they do not need to be traced here as well.
BaseShape* base = shape->base();
CheckTraversedEdge(shape, base);
if (mark(base)) {
MOZ_ASSERT(base->canSkipMarkingShapeCache(shape));
base->traceChildrenSkipShapeCache(this);
}
traverseEdge(shape, shape->propidRef().get());
// Normally only the last shape in a dictionary list can have a pointer to
// an object here, but it's possible that we can see this if we trace
// barriers while removing a shape from a dictionary list.
if (shape->dictNext.isObject()) {
traverseEdge(shape, shape->dictNext.toObject());
}
// When triggered between slices on behalf of a barrier, these
// objects may reside in the nursery, so require an extra check.
// FIXME: Bug 1157967 - remove the isTenured checks.
if (shape->hasGetterObject() && shape->getterObject()->isTenured()) {
traverseEdge(shape, shape->getterObject());
}
if (shape->hasSetterObject() && shape->setterObject()->isTenured()) {
traverseEdge(shape, shape->setterObject());
}
shape = shape->previous();
} while (shape && mark(shape));
}
void JSString::traceChildren(JSTracer* trc) {
if (hasBase()) {
traceBase(trc);
} else if (isRope()) {
asRope().traceChildren(trc);
}
}
inline void GCMarker::eagerlyMarkChildren(JSString* str) {
if (str->isLinear()) {
eagerlyMarkChildren(&str->asLinear());
} else {
eagerlyMarkChildren(&str->asRope());
}
}
void JSString::traceBase(JSTracer* trc) {
MOZ_ASSERT(hasBase());
TraceManuallyBarrieredEdge(trc, &d.s.u3.base, "base");
}
inline void js::GCMarker::eagerlyMarkChildren(JSLinearString* linearStr) {
AssertShouldMarkInZone(linearStr);
MOZ_ASSERT(linearStr->isMarkedAny());
MOZ_ASSERT(linearStr->JSString::isLinear());
// Use iterative marking to avoid blowing out the stack.
while (linearStr->hasBase()) {
linearStr = linearStr->base();
MOZ_ASSERT(linearStr->JSString::isLinear());
if (linearStr->isPermanentAtom()) {
break;
}
AssertShouldMarkInZone(linearStr);
if (!mark(static_cast<JSString*>(linearStr))) {
break;
}
}
}
void JSRope::traceChildren(JSTracer* trc) {
js::TraceManuallyBarrieredEdge(trc, &d.s.u2.left, "left child");
js::TraceManuallyBarrieredEdge(trc, &d.s.u3.right, "right child");
}
inline void js::GCMarker::eagerlyMarkChildren(JSRope* rope) {
// This function tries to scan the whole rope tree using the marking stack
// as temporary storage. If that becomes full, the unscanned ropes are
// added to the delayed marking list. When the function returns, the
// marking stack is at the same depth as it was on entry. This way we avoid
// using tags when pushing ropes to the stack as ropes never leak to other
// users of the stack. This also assumes that a rope can only point to
// other ropes or linear strings, it cannot refer to GC things of other
// types.
gc::MarkStack& stack = currentStack();
size_t savedPos = stack.position();
MOZ_DIAGNOSTIC_ASSERT(rope->getTraceKind() == JS::TraceKind::String);
while (true) {
MOZ_DIAGNOSTIC_ASSERT(rope->getTraceKind() == JS::TraceKind::String);
MOZ_DIAGNOSTIC_ASSERT(rope->JSString::isRope());
AssertShouldMarkInZone(rope);
MOZ_ASSERT(rope->isMarkedAny());
JSRope* next = nullptr;
JSString* right = rope->rightChild();
if (!right->isPermanentAtom() && mark(right)) {
if (right->isLinear()) {
eagerlyMarkChildren(&right->asLinear());
} else {
next = &right->asRope();
}
}
JSString* left = rope->leftChild();
if (!left->isPermanentAtom() && mark(left)) {
if (left->isLinear()) {
eagerlyMarkChildren(&left->asLinear());
} else {
// When both children are ropes, set aside the right one to
// scan it later.
if (next && !stack.pushTempRope(next)) {
delayMarkingChildren(next);
}
next = &left->asRope();
}
}
if (next) {
rope = next;
} else if (savedPos != stack.position()) {
MOZ_ASSERT(savedPos < stack.position());
rope = stack.popPtr().asTempRope();
} else {
break;
}
}
MOZ_ASSERT(savedPos == stack.position());
}
static inline void TraceBindingNames(JSTracer* trc, BindingName* names,
uint32_t length) {
for (uint32_t i = 0; i < length; i++) {
JSAtom* name = names[i].name();
MOZ_ASSERT(name);
TraceManuallyBarrieredEdge(trc, &name, "scope name");
}
};
static inline void TraceNullableBindingNames(JSTracer* trc, BindingName* names,
uint32_t length) {
for (uint32_t i = 0; i < length; i++) {
if (JSAtom* name = names[i].name()) {
TraceManuallyBarrieredEdge(trc, &name, "scope name");
}
}
};
template <>
void AbstractBindingName<JSAtom>::trace(JSTracer* trc) {
if (JSAtom* atom = name()) {
TraceManuallyBarrieredEdge(trc, &atom, "binding name");
}
}
void BindingIter::trace(JSTracer* trc) {
TraceNullableBindingNames(trc, names_, length_);
}
template <>
void LexicalScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceBindingNames(trc, trailingNames.start(), length);
}
template <>
void FunctionScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceNullableEdge(trc, &canonicalFunction, "scope canonical function");
TraceNullableBindingNames(trc, trailingNames.start(), length);
}
template <>
void VarScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceBindingNames(trc, trailingNames.start(), length);
}
template <>
void GlobalScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceBindingNames(trc, trailingNames.start(), length);
}
template <>
void EvalScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceBindingNames(trc, trailingNames.start(), length);
}
template <>
void ModuleScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceNullableEdge(trc, &module, "scope module");
TraceBindingNames(trc, trailingNames.start(), length);
}
template <>
void WasmInstanceScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceNullableEdge(trc, &instance, "wasm instance");
TraceBindingNames(trc, trailingNames.start(), length);
}
template <>
void WasmFunctionScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceBindingNames(trc, trailingNames.start(), length);
}
void Scope::traceChildren(JSTracer* trc) {
TraceNullableEdge(trc, &environmentShape_, "scope env shape");
TraceNullableEdge(trc, &enclosingScope_, "scope enclosing");
applyScopeDataTyped([trc](auto data) { data->trace(trc); });
}
inline void js::GCMarker::eagerlyMarkChildren(Scope* scope) {
do {
if (scope->environmentShape()) {
traverseEdge(scope, scope->environmentShape());
}
AbstractTrailingNamesArray<JSAtom>* names = nullptr;
uint32_t length = 0;
switch (scope->kind()) {
case ScopeKind::Function: {
FunctionScope::Data& data = scope->as<FunctionScope>().data();
if (data.canonicalFunction) {
traverseObjectEdge(scope, data.canonicalFunction);
}
names = &data.trailingNames;
length = data.length;
break;
}
case ScopeKind::FunctionBodyVar: {
VarScope::Data& data = scope->as<VarScope>().data();
names = &data.trailingNames;
length = data.length;
break;
}
case ScopeKind::Lexical:
case ScopeKind::SimpleCatch:
case ScopeKind::Catch:
case ScopeKind::NamedLambda:
case ScopeKind::StrictNamedLambda:
case ScopeKind::FunctionLexical: {
LexicalScope::Data& data = scope->as<LexicalScope>().data();
names = &data.trailingNames;
length = data.length;
break;
}
case ScopeKind::Global:
case ScopeKind::NonSyntactic: {
GlobalScope::Data& data = scope->as<GlobalScope>().data();
names = &data.trailingNames;
length = data.length;
break;
}
case ScopeKind::Eval:
case ScopeKind::StrictEval: {
EvalScope::Data& data = scope->as<EvalScope>().data();
names = &data.trailingNames;
length = data.length;
break;
}
case ScopeKind::Module: {
ModuleScope::Data& data = scope->as<ModuleScope>().data();
traverseObjectEdge(scope, data.module);
names = &data.trailingNames;
length = data.length;
break;
}
case ScopeKind::With:
break;
case ScopeKind::WasmInstance: {
WasmInstanceScope::Data& data = scope->as<WasmInstanceScope>().data();
traverseObjectEdge(scope, data.instance);
names = &data.trailingNames;
length = data.length;
break;
}
case ScopeKind::WasmFunction: {
WasmFunctionScope::Data& data = scope->as<WasmFunctionScope>().data();
names = &data.trailingNames;
length = data.length;
break;
}
}
if (scope->kind_ == ScopeKind::Function) {
for (uint32_t i = 0; i < length; i++) {
if (JSAtom* name = names->get(i).name()) {
traverseStringEdge(scope, name);
}
}
} else {
for (uint32_t i = 0; i < length; i++) {
traverseStringEdge(scope, names->get(i).name());
}
}
scope = scope->enclosing();
} while (scope && mark(scope));
}
void js::ObjectGroup::traceChildren(JSTracer* trc) {
AutoSweepObjectGroup sweep(this);
if (!trc->canSkipJsids()) {
unsigned count = getPropertyCount(sweep);
for (unsigned i = 0; i < count; i++) {
if (ObjectGroup::Property* prop = getProperty(sweep, i)) {
TraceEdge(trc, &prop->id, "group_property");
}
}
}
if (proto().isObject()) {
TraceEdge(trc, &proto(), "group_proto");
}
// Note: the realm's global can be nullptr if we GC while creating the global.
if (JSObject* global = realm()->unsafeUnbarrieredMaybeGlobal()) {
TraceManuallyBarrieredEdge(trc, &global, "group_global");
}
if (newScript(sweep)) {
newScript(sweep)->trace(trc);
}
if (maybePreliminaryObjects(sweep)) {
maybePreliminaryObjects(sweep)->trace(trc);
}
if (JSObject* descr = maybeTypeDescr()) {
TraceManuallyBarrieredEdge(trc, &descr, "group_type_descr");
setTypeDescr(&descr->as<TypeDescr>());
}
if (JSObject* fun = maybeInterpretedFunction()) {
TraceManuallyBarrieredEdge(trc, &fun, "group_function");
setInterpretedFunction(&fun->as<JSFunction>());
}
}
void js::GCMarker::lazilyMarkChildren(ObjectGroup* group) {
AutoSweepObjectGroup sweep(group);
unsigned count = group->getPropertyCount(sweep);
for (unsigned i = 0; i < count; i++) {
if (ObjectGroup::Property* prop = group->getProperty(sweep, i)) {
traverseEdge(group, prop->id.get());
}
}
if (group->proto().isObject()) {
traverseEdge(group, group->proto().toObject());
}
// Note: the realm's global can be nullptr if we GC while creating the global.
if (GlobalObject* global = group->realm()->unsafeUnbarrieredMaybeGlobal()) {
traverseEdge(group, static_cast<JSObject*>(global));
}
if (group->newScript(sweep)) {
group->newScript(sweep)->trace(this);
}
if (group->maybePreliminaryObjects(sweep)) {
group->maybePreliminaryObjects(sweep)->trace(this);
}
if (TypeDescr* descr = group->maybeTypeDescr()) {
traverseEdge(group, static_cast<JSObject*>(descr));
}
if (JSFunction* fun = group->maybeInterpretedFunction()) {
traverseEdge(group, static_cast<JSObject*>(fun));
}
}
void JS::BigInt::traceChildren(JSTracer* trc) {}
template <typename Functor>
static void VisitTraceList(const Functor& f, const uint32_t* traceList,
uint8_t* memory);
// Call the trace hook set on the object, if present. If further tracing of
// NativeObject fields is required, this will return the native object.
enum class CheckGeneration { DoChecks, NoChecks };
template <typename Functor>
static inline NativeObject* CallTraceHook(Functor&& f, JSTracer* trc,
JSObject* obj,
CheckGeneration check) {
const JSClass* clasp = obj->getClass();
MOZ_ASSERT(clasp);
MOZ_ASSERT(obj->isNative() == clasp->isNative());
if (!clasp->hasTrace()) {
return &obj->as<NativeObject>();
}
if (clasp->isTrace(InlineTypedObject::obj_trace)) {
Shape** pshape = obj->as<InlineTypedObject>().addressOfShapeFromGC();
f(pshape);
InlineTypedObject& tobj = obj->as<InlineTypedObject>();
if (tobj.typeDescr().hasTraceList()) {
VisitTraceList(f, tobj.typeDescr().traceList(),
tobj.inlineTypedMemForGC());
}
return nullptr;
}
AutoSetTracingSource asts(trc, obj);
clasp->doTrace(trc, obj);
if (!clasp->isNative()) {
return nullptr;
}
return &obj->as<NativeObject>();
}
template <typename Functor>
static void VisitTraceList(const Functor& f, const uint32_t* traceList,
uint8_t* memory) {
size_t stringCount = *traceList++;
size_t objectCount = *traceList++;
size_t valueCount = *traceList++;
for (size_t i = 0; i < stringCount; i++) {
f(reinterpret_cast<JSString**>(memory + *traceList));
traceList++;
}
for (size_t i = 0; i < objectCount; i++) {
JSObject** objp = reinterpret_cast<JSObject**>(memory + *traceList);
if (*objp) {
f(objp);
}
traceList++;
}
for (size_t i = 0; i < valueCount; i++) {
f(reinterpret_cast<Value*>(memory + *traceList));
traceList++;
}
}
/*** Mark-stack Marking *****************************************************/
GCMarker::MarkQueueProgress GCMarker::processMarkQueue() {
#ifdef DEBUG
if (markQueue.empty()) {
return QueueComplete;
}
GCRuntime& gcrt = runtime()->gc;
if (queueMarkColor == mozilla::Some(MarkColor::Gray) &&
gcrt.state() != State::Sweep) {
return QueueSuspended;
}
// If the queue wants to be gray marking, but we've pushed a black object
// since set-color-gray was processed, then we can't switch to gray and must
// again wait until gray marking is possible.
//
// Remove this code if the restriction against marking gray during black is
// relaxed.
if (queueMarkColor == mozilla::Some(MarkColor::Gray) && hasBlackEntries()) {
return QueueSuspended;
}
// If the queue wants to be marking a particular color, switch to that color.
// In any case, restore the mark color to whatever it was when we entered
// this function.
AutoSetMarkColor autoRevertColor(*this, queueMarkColor.valueOr(markColor()));
// Process the mark queue by taking each object in turn, pushing it onto the
// mark stack, and processing just the top element with processMarkStackTop
// without recursing into reachable objects.
while (queuePos < markQueue.length()) {
Value val = markQueue[queuePos++].get().unbarrieredGet();
if (val.isObject()) {
JSObject* obj = &val.toObject();
JS::Zone* zone = obj->zone();
if (!zone->isGCMarking() || obj->isMarkedAtLeast(markColor())) {
continue;
}
// If we have started sweeping, obey sweep group ordering. But note that
// we will first be called during the initial sweep slice, when the sweep
// group indexes have not yet been computed. In that case, we can mark
// freely.
if (gcrt.state() == State::Sweep && gcrt.initialState != State::Sweep) {
if (zone->gcSweepGroupIndex < gcrt.getCurrentSweepGroupIndex()) {
// Too late. This must have been added after we started collecting,
// and we've already processed its sweep group. Skip it.
continue;
}
if (zone->gcSweepGroupIndex > gcrt.getCurrentSweepGroupIndex()) {
// Not ready yet. Wait until we reach the object's sweep group.
queuePos--;
return QueueSuspended;
}
}
if (markColor() == MarkColor::Gray && zone->isGCMarkingBlackOnly()) {
// Have not yet reached the point where we can mark this object, so
// continue with the GC.
queuePos--;
return QueueSuspended;
}
// Mark the object and push it onto the stack.
traverse(obj);
if (isMarkStackEmpty()) {
if (obj->asTenured().arena()->onDelayedMarkingList()) {
AutoEnterOOMUnsafeRegion oomUnsafe;
oomUnsafe.crash("mark queue OOM");
}
}
// Process just the one object that is now on top of the mark stack,
// possibly pushing more stuff onto the stack.
if (isMarkStackEmpty()) {
MOZ_ASSERT(obj->asTenured().arena()->onDelayedMarkingList());
// If we overflow the stack here and delay marking, then we won't be
// testing what we think we're testing.
AutoEnterOOMUnsafeRegion oomUnsafe;
oomUnsafe.crash("Overflowed stack while marking test queue");
}
SliceBudget unlimited = SliceBudget::unlimited();
processMarkStackTop(unlimited);
} else if (val.isString()) {
JSLinearString* str = &val.toString()->asLinear();
if (js::StringEqualsLiteral(str, "yield") && gcrt.isIncrementalGc()) {
return QueueYielded;
} else if (js::StringEqualsLiteral(str, "enter-weak-marking-mode") ||
js::StringEqualsLiteral(str, "abort-weak-marking-mode")) {
if (state == MarkingState::RegularMarking) {
// We can't enter weak marking mode at just any time, so instead
// we'll stop processing the queue and continue on with the GC. Once
// we enter weak marking mode, we can continue to the rest of the
// queue. Note that we will also suspend for aborting, and then abort
// the earliest following weak marking mode.
queuePos--;
return QueueSuspended;
}
if (js::StringEqualsLiteral(str, "abort-weak-marking-mode")) {
abortLinearWeakMarking();
}
} else if (js::StringEqualsLiteral(str, "drain")) {
auto unlimited = SliceBudget::unlimited();
MOZ_RELEASE_ASSERT(
markUntilBudgetExhausted(unlimited, DontReportMarkTime));
} else if (js::StringEqualsLiteral(str, "set-color-gray")) {
queueMarkColor = mozilla::Some(MarkColor::Gray);
if (gcrt.state() != State::Sweep) {
// Cannot mark gray yet, so continue with the GC.
queuePos--;
return QueueSuspended;
}
setMarkColor(MarkColor::Gray);
} else if (js::StringEqualsLiteral(str, "set-color-black")) {
queueMarkColor = mozilla::Some(MarkColor::Black);
setMarkColor(MarkColor::Black);
} else if (js::StringEqualsLiteral(str, "unset-color")) {
queueMarkColor.reset();
}
}
}
#endif
return QueueComplete;
}
static gcstats::PhaseKind GrayMarkingPhaseForCurrentPhase(
const gcstats::Statistics& stats) {
using namespace gcstats;
switch (stats.currentPhaseKind()) {
case PhaseKind::SWEEP_MARK:
return PhaseKind::SWEEP_MARK_GRAY;
case PhaseKind::SWEEP_MARK_WEAK:
return PhaseKind::SWEEP_MARK_GRAY_WEAK;
default:
MOZ_CRASH("Unexpected current phase");
}
}
bool GCMarker::markUntilBudgetExhausted(SliceBudget& budget,
ShouldReportMarkTime reportTime) {
#ifdef DEBUG
MOZ_ASSERT(!strictCompartmentChecking);
strictCompartmentChecking = true;
auto acc = mozilla::MakeScopeExit([&] { strictCompartmentChecking = false; });
#endif
if (budget.isOverBudget()) {
return false;
}
// This method leaves the mark color as it found it.
AutoSetMarkColor autoSetBlack(*this, MarkColor::Black);
// Change representation of value arrays on the stack while the mutator
// runs.
auto svr = mozilla::MakeScopeExit([&] { saveValueRanges(); });
for (;;) {
while (hasBlackEntries()) {
MOZ_ASSERT(markColor() == MarkColor::Black);
processMarkStackTop(budget);
if (budget.isOverBudget()) {
return false;
}
}
if (hasGrayEntries()) {
mozilla::Maybe<gcstats::AutoPhase> ap;
if (reportTime) {
auto& stats = runtime()->gc.stats();
ap.emplace(stats, GrayMarkingPhaseForCurrentPhase(stats));
}
AutoSetMarkColor autoSetGray(*this, MarkColor::Gray);
do {
processMarkStackTop(budget);
if (budget.isOverBudget()) {
return false;
}
} while (hasGrayEntries());
}
if (hasBlackEntries()) {
// We can end up marking black during gray marking in the following case:
// a WeakMap has a CCW key whose delegate (target) is black, and during
// gray marking we mark the map (gray). The delegate's color will be
// propagated to the key. (And we can't avoid this by marking the key
// gray, because even though the value will end up gray in either case,
// the WeakMap entry must be preserved because the CCW could get
// collected and then we could re-wrap the delegate and look it up in the
// map again, and need to get back the original value.)
continue;
}
if (!hasDelayedChildren()) {
break;
}
/*
* Mark children of things that caused too deep recursion during the
* above tracing. Don't do this until we're done with everything
* else.
*/
if (!markAllDelayedChildren(budget)) {
return false;
}
}
return true;
}
inline static bool ObjectDenseElementsMayBeMarkable(NativeObject* nobj) {
/*
* For arrays that are large enough it's worth checking the type information
* to see if the object's elements contain any GC pointers. If not, we
* don't need to trace them.
*/
const unsigned MinElementsLength = 32;
if (nobj->getDenseInitializedLength() < MinElementsLength ||
nobj->isSingleton()) {
return true;
}
ObjectGroup* group = nobj->group();
if (group->needsSweep() || group->unknownPropertiesDontCheckGeneration()) {
return true;
}
MOZ_ASSERT(IsTypeInferenceEnabled());
// This typeset doesn't escape this function so avoid sweeping here.
HeapTypeSet* typeSet = group->maybeGetPropertyDontCheckGeneration(JSID_VOID);
if (!typeSet) {
return true;
}
static const uint32_t flagMask = TYPE_FLAG_STRING | TYPE_FLAG_SYMBOL |
TYPE_FLAG_LAZYARGS | TYPE_FLAG_ANYOBJECT |
TYPE_FLAG_BIGINT;
bool mayBeMarkable =
typeSet->hasAnyFlag(flagMask) || typeSet->getObjectCount() != 0;
#ifdef DEBUG
if (!mayBeMarkable) {
const Value* elements = nobj->getDenseElementsAllowCopyOnWrite();
for (unsigned i = 0; i < nobj->getDenseInitializedLength(); i++) {
MOZ_ASSERT(!elements[i].isGCThing());
}
}
#endif
return mayBeMarkable;
}
static inline void CheckForCompartmentMismatch(JSObject* obj, JSObject* obj2) {
#ifdef DEBUG
if (MOZ_UNLIKELY(obj->compartment() != obj2->compartment())) {
fprintf(
stderr,
"Compartment mismatch in pointer from %s object slot to %s object\n",
obj->getClass()->name, obj2->getClass()->name);
MOZ_CRASH("Compartment mismatch");
}
#endif
}
inline void GCMarker::processMarkStackTop(SliceBudget& budget) {
/*
* The function uses explicit goto and implements the scanning of the
* object directly. It allows to eliminate the tail recursion and
* significantly improve the marking performance, see bug 641025.
*/
HeapSlot* vp;
HeapSlot* end;
JSObject* obj;
gc::MarkStack& stack = currentStack();
switch (stack.peekTag()) {
case MarkStack::ValueArrayTag: {
auto array = stack.popValueArray();
obj = array.ptr.asValueArrayObject();
vp = array.start;
end = array.end;
goto scan_value_array;
}
case MarkStack::ObjectTag: {
obj = stack.popPtr().as<JSObject>();
AssertShouldMarkInZone(obj);
goto scan_obj;
}
case MarkStack::GroupTag: {
auto group = stack.popPtr().as<ObjectGroup>();
return lazilyMarkChildren(group);
}
case MarkStack::JitCodeTag: {