Source code

Revision control

Other Tools

/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* vim: set ts=8 sts=2 et sw=2 tw=80:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "gc/Marking-inl.h"
#include "mozilla/ArrayUtils.h"
#include "mozilla/DebugOnly.h"
#include "mozilla/IntegerRange.h"
#include "mozilla/Maybe.h"
#include "mozilla/ReentrancyGuard.h"
#include "mozilla/ScopeExit.h"
#include "mozilla/Unused.h"
#include <algorithm>
#include <initializer_list>
#include <type_traits>
#include "jsfriendapi.h"
#include "builtin/ModuleObject.h"
#include "debugger/DebugAPI.h"
#include "gc/GCInternals.h"
#include "gc/GCProbes.h"
#include "gc/Policy.h"
#include "jit/JitCode.h"
#include "js/friend/DumpFunctions.h" // js::DumpObject
#include "js/GCTypeMacros.h" // JS_FOR_EACH_PUBLIC_{,TAGGED_}GC_POINTER_TYPE
#include "js/SliceBudget.h"
#include "util/DiagnosticAssertions.h"
#include "util/Memory.h"
#include "util/Poison.h"
#include "vm/ArgumentsObject.h"
#include "vm/ArrayObject.h"
#include "vm/BigIntType.h"
#include "vm/GeneratorObject.h"
#include "vm/RegExpShared.h"
#include "vm/Scope.h"
#include "vm/Shape.h"
#include "vm/SymbolType.h"
#include "vm/TypedArrayObject.h"
#include "wasm/WasmJS.h"
#include "gc/GC-inl.h"
#include "gc/Nursery-inl.h"
#include "gc/PrivateIterators-inl.h"
#include "gc/WeakMap-inl.h"
#include "gc/Zone-inl.h"
#include "vm/GeckoProfiler-inl.h"
#include "vm/NativeObject-inl.h"
#include "vm/PlainObject-inl.h" // js::PlainObject
#include "vm/Realm-inl.h"
#include "vm/StringType-inl.h"
#define MAX_DEDUPLICATABLE_STRING_LENGTH 500
using namespace js;
using namespace js::gc;
using JS::MapTypeToTraceKind;
using mozilla::DebugOnly;
using mozilla::IntegerRange;
using mozilla::PodCopy;
// [SMDOC] GC Tracing
//
// Tracing Overview
// ================
//
// Tracing, in this context, refers to an abstract visitation of some or all of
// the GC-controlled heap. The effect of tracing an edge of the graph depends
// on the subclass of the JSTracer on whose behalf we are tracing.
//
// Marking
// -------
//
// The primary JSTracer is the GCMarker. The marking tracer causes the target
// of each traversed edge to be marked black and the target edge's children to
// be marked either gray (in the gc algorithm sense) or immediately black.
//
// Callback
// --------
//
// The secondary JSTracer is the CallbackTracer. This simply invokes a callback
// on each edge in a child.
//
// The following is a rough outline of the general struture of the tracing
// internals.
//
/* clang-format off */
//
// +----------------------+ ...................
// | | : :
// | v v :
// | TraceRoot TraceEdge TraceRange GCMarker:: :
// | | | | processMarkStackTop +---+---+
// | +-----------+-----------+ | | |
// | | | | Mark |
// | v | | Stack |
// | TraceEdgeInternal | | |
// | | | +---+---+
// | | | ^
// | +--------------+---------------+ +<----------+ :
// | | | | | | :
// | v v v v | :
// | DoCallback TenuringTracer:: DoMarking traverseEdge | :
// | | traverse | | | :
// | | +------+------+ | :
// | | | | :
// | v v | :
// | CallbackTracer:: GCMarker::traverse | :
// | dispatchToOnEdge | | :
// | | | :
// | +-------------------+-----------+------+ | :
// | | | | | :
// | v v v | :
// | markAndTraceChildren markAndPush eagerlyMarkChildren | :
// | | : | | :
// | v : +-----------+ :
// | T::traceChildren : :
// | | : :
// +-------------+ ......................................
//
// Legend:
// ------- Direct calls
// ....... Data flow
//
/* clang-format on */
/*** Tracing Invariants *****************************************************/
#if defined(DEBUG)
template <typename T>
static inline bool IsThingPoisoned(T* thing) {
const uint8_t poisonBytes[] = {
JS_FRESH_NURSERY_PATTERN, JS_SWEPT_NURSERY_PATTERN,
JS_ALLOCATED_NURSERY_PATTERN, JS_FRESH_TENURED_PATTERN,
JS_MOVED_TENURED_PATTERN, JS_SWEPT_TENURED_PATTERN,
JS_ALLOCATED_TENURED_PATTERN, JS_FREED_HEAP_PTR_PATTERN,
JS_FREED_CHUNK_PATTERN, JS_FREED_ARENA_PATTERN,
JS_SWEPT_TI_PATTERN, JS_SWEPT_CODE_PATTERN,
JS_RESET_VALUE_PATTERN, JS_POISONED_JSSCRIPT_DATA_PATTERN,
JS_OOB_PARSE_NODE_PATTERN, JS_LIFO_UNDEFINED_PATTERN,
JS_LIFO_UNINITIALIZED_PATTERN,
};
const int numPoisonBytes = sizeof(poisonBytes) / sizeof(poisonBytes[0]);
uint32_t* p =
reinterpret_cast<uint32_t*>(reinterpret_cast<FreeSpan*>(thing) + 1);
// Note: all free patterns are odd to make the common, not-poisoned case a
// single test.
if ((*p & 1) == 0) {
return false;
}
for (int i = 0; i < numPoisonBytes; ++i) {
const uint8_t pb = poisonBytes[i];
const uint32_t pw = pb | (pb << 8) | (pb << 16) | (pb << 24);
if (*p == pw) {
return true;
}
}
return false;
}
#endif
template <typename T>
static inline bool IsOwnedByOtherRuntime(JSRuntime* rt, T thing) {
bool other = thing->runtimeFromAnyThread() != rt;
MOZ_ASSERT_IF(other, thing->isPermanentAndMayBeShared() ||
thing->zoneFromAnyThread()->isSelfHostingZone());
return other;
}
template <typename T>
void js::CheckTracedThing(JSTracer* trc, T* thing) {
#ifdef DEBUG
MOZ_ASSERT(trc);
MOZ_ASSERT(thing);
if (IsForwarded(thing)) {
MOZ_ASSERT(IsTracerKind(trc, JS::TracerKind::Moving) ||
trc->isTenuringTracer());
thing = Forwarded(thing);
}
/* This function uses data that's not available in the nursery. */
if (IsInsideNursery(thing)) {
return;
}
/*
* Permanent atoms and things in the self-hosting zone are not associated
* with this runtime, but will be ignored during marking.
*/
if (IsOwnedByOtherRuntime(trc->runtime(), thing)) {
return;
}
Zone* zone = thing->zoneFromAnyThread();
JSRuntime* rt = trc->runtime();
MOZ_ASSERT(zone->runtimeFromAnyThread() == rt);
bool isGcMarkingTracer = trc->isMarkingTracer();
bool isUnmarkGrayTracer = IsTracerKind(trc, JS::TracerKind::UnmarkGray);
bool isClearEdgesTracer = IsTracerKind(trc, JS::TracerKind::ClearEdges);
if (TlsContext.get()->isMainThreadContext()) {
// If we're on the main thread we must have access to the runtime and zone.
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
MOZ_ASSERT(CurrentThreadCanAccessZone(zone));
} else {
MOZ_ASSERT(isGcMarkingTracer || isUnmarkGrayTracer || isClearEdgesTracer ||
IsTracerKind(trc, JS::TracerKind::Moving) ||
IsTracerKind(trc, JS::TracerKind::GrayBuffering) ||
IsTracerKind(trc, JS::TracerKind::Sweeping));
MOZ_ASSERT_IF(!isClearEdgesTracer, CurrentThreadIsPerformingGC());
}
MOZ_ASSERT(thing->isAligned());
MOZ_ASSERT(MapTypeToTraceKind<std::remove_pointer_t<T>>::kind ==
thing->getTraceKind());
if (isGcMarkingTracer) {
GCMarker* gcMarker = GCMarker::fromTracer(trc);
MOZ_ASSERT(zone->shouldMarkInZone());
MOZ_ASSERT_IF(gcMarker->shouldCheckCompartments(),
zone->isCollectingFromAnyThread() || zone->isAtomsZone());
MOZ_ASSERT_IF(gcMarker->markColor() == MarkColor::Gray,
!zone->isGCMarkingBlackOnly() || zone->isAtomsZone());
MOZ_ASSERT(!(zone->isGCSweeping() || zone->isGCFinished() ||
zone->isGCCompacting()));
// Check that we don't stray from the current compartment and zone without
// using TraceCrossCompartmentEdge.
Compartment* comp = thing->maybeCompartment();
MOZ_ASSERT_IF(gcMarker->tracingCompartment && comp,
gcMarker->tracingCompartment == comp);
MOZ_ASSERT_IF(gcMarker->tracingZone,
gcMarker->tracingZone == zone || zone->isAtomsZone());
}
/*
* Try to assert that the thing is allocated.
*
* We would like to assert that the thing is not in the free list, but this
* check is very slow. Instead we check whether the thing has been poisoned:
* if it has not then we assume it is allocated, but if it has then it is
* either free or uninitialized in which case we check the free list.
*
* Further complications are that background sweeping may be running and
* concurrently modifiying the free list and that tracing is done off
* thread during compacting GC and reading the contents of the thing by
* IsThingPoisoned would be racy in this case.
*/
MOZ_ASSERT_IF(JS::RuntimeHeapIsBusy() && !zone->isGCSweeping() &&
!zone->isGCFinished() && !zone->isGCCompacting(),
!IsThingPoisoned(thing) ||
!InFreeList(thing->asTenured().arena(), thing));
#endif
}
template <typename T>
void js::CheckTracedThing(JSTracer* trc, T thing) {
ApplyGCThingTyped(thing, [](auto t) { CheckTracedThing(t); });
}
namespace js {
#define IMPL_CHECK_TRACED_THING(_, type, _1, _2) \
template void CheckTracedThing<type>(JSTracer*, type*);
JS_FOR_EACH_TRACEKIND(IMPL_CHECK_TRACED_THING);
#undef IMPL_CHECK_TRACED_THING
} // namespace js
static inline bool ShouldMarkCrossCompartment(GCMarker* marker, JSObject* src,
Cell* dstCell) {
MarkColor color = marker->markColor();
if (!dstCell->isTenured()) {
MOZ_ASSERT(color == MarkColor::Black);
return false;
}
TenuredCell& dst = dstCell->asTenured();
JS::Zone* dstZone = dst.zone();
if (!src->zone()->isGCMarking() && !dstZone->isGCMarking()) {
return false;
}
if (color == MarkColor::Black) {
// Check our sweep groups are correct: we should never have to
// mark something in a zone that we have started sweeping.
MOZ_ASSERT_IF(!dst.isMarkedBlack(), !dstZone->isGCSweeping());
/*
* Having black->gray edges violates our promise to the cycle collector so
* we ensure that gray things we encounter when marking black end up getting
* marked black.
*
* This can happen for two reasons:
*
* 1) If we're collecting a compartment and it has an edge to an uncollected
* compartment it's possible that the source and destination of the
* cross-compartment edge should be gray, but the source was marked black by
* the write barrier.
*
* 2) If we yield during gray marking and the write barrier marks a gray
* thing black.
*
* We handle the first case before returning whereas the second case happens
* as part of normal marking.
*/
if (dst.isMarkedGray() && !dstZone->isGCMarking()) {
UnmarkGrayGCThingUnchecked(marker->runtime(),
JS::GCCellPtr(&dst, dst.getTraceKind()));
return false;
}
return dstZone->isGCMarking();
} else {
// Check our sweep groups are correct as above.
MOZ_ASSERT_IF(!dst.isMarkedAny(), !dstZone->isGCSweeping());
if (dstZone->isGCMarkingBlackOnly()) {
/*
* The destination compartment is being not being marked gray now,
* but it will be later, so record the cell so it can be marked gray
* at the appropriate time.
*/
if (!dst.isMarkedAny()) {
DelayCrossCompartmentGrayMarking(src);
}
return false;
}
return dstZone->isGCMarkingBlackAndGray();
}
}
static bool ShouldTraceCrossCompartment(JSTracer* trc, JSObject* src,
Cell* dstCell) {
if (!trc->isMarkingTracer()) {
return true;
}
return ShouldMarkCrossCompartment(GCMarker::fromTracer(trc), src, dstCell);
}
static bool ShouldTraceCrossCompartment(JSTracer* trc, JSObject* src,
const Value& val) {
return val.isGCThing() &&
ShouldTraceCrossCompartment(trc, src, val.toGCThing());
}
static void AssertShouldMarkInZone(Cell* thing) {
MOZ_ASSERT(thing->asTenured().zone()->shouldMarkInZone());
}
static void AssertShouldMarkInZone(JSString* str) {
#ifdef DEBUG
Zone* zone = str->zone();
MOZ_ASSERT(zone->shouldMarkInZone() || zone->isAtomsZone());
#endif
}
static void AssertShouldMarkInZone(JS::Symbol* sym) {
#ifdef DEBUG
Zone* zone = sym->asTenured().zone();
MOZ_ASSERT(zone->shouldMarkInZone() || zone->isAtomsZone());
#endif
}
#ifdef DEBUG
void js::gc::AssertRootMarkingPhase(JSTracer* trc) {
MOZ_ASSERT_IF(trc->isMarkingTracer(),
trc->runtime()->gc.state() == State::NotActive ||
trc->runtime()->gc.state() == State::MarkRoots);
}
#endif
/*** Tracing Interface ******************************************************/
template <typename T>
bool DoCallback(GenericTracer* trc, T** thingp, const char* name);
template <typename T>
bool DoCallback(GenericTracer* trc, T* thingp, const char* name);
template <typename T>
void DoMarking(GCMarker* gcmarker, T* thing);
template <typename T>
void DoMarking(GCMarker* gcmarker, const T& thing);
template <typename T>
static void TraceExternalEdgeHelper(JSTracer* trc, T* thingp,
const char* name) {
MOZ_ASSERT(InternalBarrierMethods<T>::isMarkable(*thingp));
TraceEdgeInternal(trc, ConvertToBase(thingp), name);
}
JS_PUBLIC_API void js::UnsafeTraceManuallyBarrieredEdge(JSTracer* trc,
JSObject** thingp,
const char* name) {
TraceEdgeInternal(trc, ConvertToBase(thingp), name);
}
template <typename T>
static void UnsafeTraceRootHelper(JSTracer* trc, T* thingp, const char* name) {
MOZ_ASSERT(thingp);
js::TraceNullableRoot(trc, thingp, name);
}
namespace js {
class AbstractGeneratorObject;
class SavedFrame;
} // namespace js
#define DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION(type) \
JS_PUBLIC_API void js::gc::TraceExternalEdge(JSTracer* trc, type* thingp, \
const char* name) { \
TraceExternalEdgeHelper(trc, thingp, name); \
}
// Define TraceExternalEdge for each public GC pointer type.
JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION)
JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION)
#undef DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION
#define DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(type) \
JS_PUBLIC_API void JS::UnsafeTraceRoot(JSTracer* trc, type* thingp, \
const char* name) { \
UnsafeTraceRootHelper(trc, thingp, name); \
}
// Define UnsafeTraceRoot for each public GC pointer type.
JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(DEFINE_UNSAFE_TRACE_ROOT_FUNCTION)
JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(DEFINE_UNSAFE_TRACE_ROOT_FUNCTION)
// Also, for the moment, define UnsafeTraceRoot for internal GC pointer types.
DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(AbstractGeneratorObject*)
DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(SavedFrame*)
#undef DEFINE_UNSAFE_TRACE_ROOT_FUNCTION
namespace js {
namespace gc {
#define INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(type) \
template bool TraceEdgeInternal<type>(JSTracer*, type*, const char*); \
template void TraceRangeInternal<type>(JSTracer*, size_t len, type*, \
const char*);
#define INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND(_1, type, _2, _3) \
INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(type*)
JS_FOR_EACH_TRACEKIND(INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND)
JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(INSTANTIATE_INTERNAL_TRACE_FUNCTIONS)
#undef INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND
#undef INSTANTIATE_INTERNAL_TRACE_FUNCTIONS
} // namespace gc
} // namespace js
// In debug builds, makes a note of the current compartment before calling a
// trace hook or traceChildren() method on a GC thing.
class MOZ_RAII AutoSetTracingSource {
#ifdef DEBUG
GCMarker* marker = nullptr;
#endif
public:
template <typename T>
AutoSetTracingSource(JSTracer* trc, T* thing) {
#ifdef DEBUG
if (trc->isMarkingTracer() && thing) {
marker = GCMarker::fromTracer(trc);
MOZ_ASSERT(!marker->tracingZone);
marker->tracingZone = thing->asTenured().zone();
MOZ_ASSERT(!marker->tracingCompartment);
marker->tracingCompartment = thing->maybeCompartment();
}
#endif
}
~AutoSetTracingSource() {
#ifdef DEBUG
if (marker) {
marker->tracingZone = nullptr;
marker->tracingCompartment = nullptr;
}
#endif
}
};
// In debug builds, clear the trace hook compartment. This happens
// after the trace hook has called back into one of our trace APIs and we've
// checked the traced thing.
class MOZ_RAII AutoClearTracingSource {
#ifdef DEBUG
GCMarker* marker = nullptr;
JS::Zone* prevZone = nullptr;
Compartment* prevCompartment = nullptr;
#endif
public:
explicit AutoClearTracingSource(JSTracer* trc) {
#ifdef DEBUG
if (trc->isMarkingTracer()) {
marker = GCMarker::fromTracer(trc);
prevZone = marker->tracingZone;
marker->tracingZone = nullptr;
prevCompartment = marker->tracingCompartment;
marker->tracingCompartment = nullptr;
}
#endif
}
~AutoClearTracingSource() {
#ifdef DEBUG
if (marker) {
marker->tracingZone = prevZone;
marker->tracingCompartment = prevCompartment;
}
#endif
}
};
template <typename T>
void js::TraceManuallyBarrieredCrossCompartmentEdge(JSTracer* trc,
JSObject* src, T* dst,
const char* name) {
// Clear expected compartment for cross-compartment edge.
AutoClearTracingSource acts(trc);
if (ShouldTraceCrossCompartment(trc, src, *dst)) {
TraceEdgeInternal(trc, dst, name);
}
}
template void js::TraceManuallyBarrieredCrossCompartmentEdge<Value>(
JSTracer*, JSObject*, Value*, const char*);
template void js::TraceManuallyBarrieredCrossCompartmentEdge<JSObject*>(
JSTracer*, JSObject*, JSObject**, const char*);
template void js::TraceManuallyBarrieredCrossCompartmentEdge<BaseScript*>(
JSTracer*, JSObject*, BaseScript**, const char*);
template <typename T>
void js::TraceWeakMapKeyEdgeInternal(JSTracer* trc, Zone* weakMapZone,
T** thingp, const char* name) {
// We can't use ShouldTraceCrossCompartment here because that assumes the
// source of the edge is a CCW object which could be used to delay gray
// marking. Instead, assert that the weak map zone is in the same marking
// state as the target thing's zone and therefore we can go ahead and mark it.
#ifdef DEBUG
auto thing = *thingp;
if (trc->isMarkingTracer()) {
MOZ_ASSERT(weakMapZone->isGCMarking());
MOZ_ASSERT(weakMapZone->gcState() == thing->zone()->gcState());
}
#endif
// Clear expected compartment for cross-compartment edge.
AutoClearTracingSource acts(trc);
TraceEdgeInternal(trc, thingp, name);
}
template void js::TraceWeakMapKeyEdgeInternal<JSObject>(JSTracer*, Zone*,
JSObject**,
const char*);
template void js::TraceWeakMapKeyEdgeInternal<BaseScript>(JSTracer*, Zone*,
BaseScript**,
const char*);
template <typename T>
void js::TraceProcessGlobalRoot(JSTracer* trc, T* thing, const char* name) {
AssertRootMarkingPhase(trc);
MOZ_ASSERT(thing->isPermanentAndMayBeShared());
// We have to mark permanent atoms and well-known symbols through a special
// method because the default DoMarking implementation automatically skips
// them. Fortunately, atoms (permanent and non) cannot refer to other GC
// things so they do not need to go through the mark stack and may simply
// be marked directly. Moreover, well-known symbols can refer only to
// permanent atoms, so likewise require no subsquent marking.
CheckTracedThing(trc, *ConvertToBase(&thing));
AutoClearTracingSource acts(trc);
if (trc->isMarkingTracer()) {
thing->asTenured().markIfUnmarked(gc::MarkColor::Black);
} else {
DoCallback(trc->asCallbackTracer(), ConvertToBase(&thing), name);
}
}
template void js::TraceProcessGlobalRoot<JSAtom>(JSTracer*, JSAtom*,
const char*);
template void js::TraceProcessGlobalRoot<JS::Symbol>(JSTracer*, JS::Symbol*,
const char*);
static Cell* TraceGenericPointerRootAndType(JSTracer* trc, Cell* thing,
JS::TraceKind kind,
const char* name) {
return MapGCThingTyped(thing, kind, [trc, name](auto t) -> Cell* {
TraceRoot(trc, &t, name);
return t;
});
}
void js::TraceGenericPointerRoot(JSTracer* trc, Cell** thingp,
const char* name) {
MOZ_ASSERT(thingp);
Cell* thing = *thingp;
if (!thing) {
return;
}
Cell* traced =
TraceGenericPointerRootAndType(trc, thing, thing->getTraceKind(), name);
if (traced != thing) {
*thingp = traced;
}
}
void js::TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, Cell** thingp,
const char* name) {
MOZ_ASSERT(thingp);
Cell* thing = *thingp;
if (!*thingp) {
return;
}
auto traced = MapGCThingTyped(thing, thing->getTraceKind(),
[trc, name](auto t) -> Cell* {
TraceManuallyBarrieredEdge(trc, &t, name);
return t;
});
if (traced != thing) {
*thingp = traced;
}
}
void js::TraceGCCellPtrRoot(JSTracer* trc, JS::GCCellPtr* thingp,
const char* name) {
Cell* thing = thingp->asCell();
if (!thing) {
return;
}
Cell* traced =
TraceGenericPointerRootAndType(trc, thing, thingp->kind(), name);
if (!traced) {
*thingp = JS::GCCellPtr();
} else if (traced != thingp->asCell()) {
*thingp = JS::GCCellPtr(traced, thingp->kind());
}
}
// This method is responsible for dynamic dispatch to the real tracer
// implementation. Consider replacing this choke point with virtual dispatch:
// a sufficiently smart C++ compiler may be able to devirtualize some paths.
template <typename T>
bool js::gc::TraceEdgeInternal(JSTracer* trc, T* thingp, const char* name) {
#define IS_SAME_TYPE_OR(name, type, _, _1) std::is_same_v<type*, T> ||
static_assert(JS_FOR_EACH_TRACEKIND(IS_SAME_TYPE_OR)
std::is_same_v<T, JS::Value> ||
std::is_same_v<T, jsid> || std::is_same_v<T, TaggedProto>,
"Only the base cell layout types are allowed into "
"marking/tracing internals");
#undef IS_SAME_TYPE_OR
if (trc->isMarkingTracer()) {
DoMarking(GCMarker::fromTracer(trc), *thingp);
return true;
}
if (trc->isTenuringTracer()) {
static_cast<TenuringTracer*>(trc)->traverse(thingp);
return true;
}
MOZ_ASSERT(trc->isGenericTracer());
return DoCallback(trc->asGenericTracer(), thingp, name);
}
template <typename T>
void js::gc::TraceRangeInternal(JSTracer* trc, size_t len, T* vec,
const char* name) {
JS::AutoTracingIndex index(trc);
for (auto i : IntegerRange(len)) {
if (InternalBarrierMethods<T>::isMarkable(vec[i])) {
TraceEdgeInternal(trc, &vec[i], name);
}
++index;
}
}
/*** GC Marking Interface ***************************************************/
namespace js {
using HasNoImplicitEdgesType = bool;
template <typename T>
struct ImplicitEdgeHolderType {
using Type = HasNoImplicitEdgesType;
};
// For now, we only handle JSObject* and BaseScript* keys, but the linear time
// algorithm can be easily extended by adding in more types here, then making
// GCMarker::traverse<T> call markImplicitEdges.
template <>
struct ImplicitEdgeHolderType<JSObject*> {
using Type = JSObject*;
};
template <>
struct ImplicitEdgeHolderType<BaseScript*> {
using Type = BaseScript*;
};
void GCMarker::markEphemeronValues(gc::Cell* markedCell,
WeakEntryVector& values) {
DebugOnly<size_t> initialLen = values.length();
for (const auto& markable : values) {
markable.weakmap->markKey(this, markedCell, markable.key);
}
// The vector should not be appended to during iteration because the key is
// already marked, and even in cases where we have a multipart key, we
// should only be inserting entries for the unmarked portions.
MOZ_ASSERT(values.length() == initialLen);
}
void GCMarker::forgetWeakKey(js::gc::WeakKeyTable& weakKeys, WeakMapBase* map,
gc::Cell* keyOrDelegate, gc::Cell* keyToRemove) {
// Find and remove the exact pair <map,keyToRemove> from the values of the
// weak keys table.
//
// This function is called when 'keyToRemove' is removed from a weakmap
// 'map'. If 'keyToRemove' has a delegate, then the delegate will be used as
// the lookup key in gcWeakKeys; otherwise, 'keyToRemove' itself will be. In
// either case, 'keyToRemove' is what we will be filtering out of the
// Markable values in the weakKey table.
auto p = weakKeys.get(keyOrDelegate);
// Note that this is not guaranteed to find anything. The key will have
// only been inserted into the weakKeys table if it was unmarked when the
// map was traced.
if (p) {
// Entries should only have been added to weakKeys if the map was marked.
for (auto r = p->value.all(); !r.empty(); r.popFront()) {
MOZ_ASSERT(r.front().weakmap->mapColor);
}
p->value.eraseIfEqual(WeakMarkable(map, keyToRemove));
}
}
void GCMarker::forgetWeakMap(WeakMapBase* map, Zone* zone) {
for (auto table : {&zone->gcNurseryWeakKeys(), &zone->gcWeakKeys()}) {
for (auto p = table->all(); !p.empty(); p.popFront()) {
p.front().value.eraseIf([map](const WeakMarkable& markable) -> bool {
return markable.weakmap == map;
});
}
}
}
// 'delegate' is no longer the delegate of 'key'.
void GCMarker::severWeakDelegate(JSObject* key, JSObject* delegate) {
JS::Zone* zone = delegate->zone();
if (!zone->needsIncrementalBarrier()) {
MOZ_ASSERT(!zone->gcWeakKeys(delegate).get(delegate),
"non-collecting zone should not have populated gcWeakKeys");
return;
}
auto p = zone->gcWeakKeys(delegate).get(delegate);
if (!p) {
return;
}
// Remove all <weakmap, key> pairs associated with this delegate and key, and
// call postSeverDelegate on each of the maps found to record the key
// instead.
//
// But be careful: if key and delegate are in different compartments but the
// same zone, then the same gcWeakKeys table will be mutated by both the
// eraseIf and the postSeverDelegate, so we cannot nest them.
js::Vector<WeakMapBase*, 10, SystemAllocPolicy> severedKeyMaps;
p->value.eraseIf(
[key, &severedKeyMaps](const WeakMarkable& markable) -> bool {
if (markable.key != key) {
return false;
}
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!severedKeyMaps.append(markable.weakmap)) {
oomUnsafe.crash("OOM while recording all weakmaps with severed key");
}
return true;
});
for (WeakMapBase* weakmap : severedKeyMaps) {
if (weakmap->zone()->needsIncrementalBarrier()) {
weakmap->postSeverDelegate(this, key);
}
}
}
// 'delegate' is now the delegate of 'key'. Update weakmap marking state.
void GCMarker::restoreWeakDelegate(JSObject* key, JSObject* delegate) {
if (!key->zone()->needsIncrementalBarrier() ||
!delegate->zone()->needsIncrementalBarrier()) {
MOZ_ASSERT(!key->zone()->gcWeakKeys(key).get(key),
"non-collecting zone should not have populated gcWeakKeys");
return;
}
auto p = key->zone()->gcWeakKeys(key).get(key);
if (!p) {
return;
}
js::Vector<WeakMapBase*, 10, SystemAllocPolicy> maps;
p->value.eraseIf([key, &maps](const WeakMarkable& markable) -> bool {
if (markable.key != key) {
return false;
}
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!maps.append(markable.weakmap)) {
oomUnsafe.crash("OOM while recording all weakmaps with severed key");
}
return true;
});
for (WeakMapBase* weakmap : maps) {
if (weakmap->zone()->needsIncrementalBarrier()) {
weakmap->postRestoreDelegate(this, key, delegate);
}
}
}
template <typename T>
void GCMarker::markImplicitEdgesHelper(T markedThing) {
if (!isWeakMarking()) {
return;
}
Zone* zone = markedThing->asTenured().zone();
MOZ_ASSERT(zone->isGCMarking());
MOZ_ASSERT(!zone->isGCSweeping());
auto p = zone->gcWeakKeys().get(markedThing);
if (!p) {
return;
}
WeakEntryVector& markables = p->value;
// markedThing might be a key in a debugger weakmap, which can end up marking
// values that are in a different compartment.
AutoClearTracingSource acts(this);
markEphemeronValues(markedThing, markables);
markables.clear(); // If key address is reused, it should do nothing
}
template <>
void GCMarker::markImplicitEdgesHelper(HasNoImplicitEdgesType) {}
template <typename T>
void GCMarker::markImplicitEdges(T* thing) {
markImplicitEdgesHelper<typename ImplicitEdgeHolderType<T*>::Type>(thing);
}
template void GCMarker::markImplicitEdges(JSObject*);
template void GCMarker::markImplicitEdges(BaseScript*);
} // namespace js
template <typename T>
static inline bool ShouldMark(GCMarker* gcmarker, T thing) {
// Don't trace things that are owned by another runtime.
if (IsOwnedByOtherRuntime(gcmarker->runtime(), thing)) {
return false;
}
// Don't mark things outside a zone if we are in a per-zone GC.
return thing->zone()->shouldMarkInZone();
}
template <>
bool ShouldMark<JSObject*>(GCMarker* gcmarker, JSObject* obj) {
// Don't trace things that are owned by another runtime.
if (IsOwnedByOtherRuntime(gcmarker->runtime(), obj)) {
return false;
}
// We may mark a Nursery thing outside the context of the
// MinorCollectionTracer because of a pre-barrier. The pre-barrier is not
// needed in this case because we perform a minor collection before each
// incremental slice.
if (IsInsideNursery(obj)) {
return false;
}
// Don't mark things outside a zone if we are in a per-zone GC. It is
// faster to check our own arena, which we can do since we know that
// the object is tenured.
return obj->asTenured().zone()->shouldMarkInZone();
}
// JSStrings can also be in the nursery. See ShouldMark<JSObject*> for comments.
template <>
bool ShouldMark<JSString*>(GCMarker* gcmarker, JSString* str) {
if (IsOwnedByOtherRuntime(gcmarker->runtime(), str)) {
return false;
}
if (IsInsideNursery(str)) {
return false;
}
return str->asTenured().zone()->shouldMarkInZone();
}
// BigInts can also be in the nursery. See ShouldMark<JSObject*> for comments.
template <>
bool ShouldMark<JS::BigInt*>(GCMarker* gcmarker, JS::BigInt* bi) {
if (IsOwnedByOtherRuntime(gcmarker->runtime(), bi)) {
return false;
}
if (IsInsideNursery(bi)) {
return false;
}
return bi->asTenured().zone()->shouldMarkInZone();
}
template <typename T>
void DoMarking(GCMarker* gcmarker, T* thing) {
// Do per-type marking precondition checks.
if (!ShouldMark(gcmarker, thing)) {
MOZ_ASSERT(gc::detail::GetEffectiveColor(gcmarker->runtime(), thing) ==
js::gc::CellColor::Black);
return;
}
CheckTracedThing(gcmarker, thing);
AutoClearTracingSource acts(gcmarker);
gcmarker->traverse(thing);
// Mark the compartment as live.
SetMaybeAliveFlag(thing);
}
template <typename T>
void DoMarking(GCMarker* gcmarker, const T& thing) {
ApplyGCThingTyped(thing, [gcmarker](auto t) { DoMarking(gcmarker, t); });
}
JS_PUBLIC_API void js::gc::PerformIncrementalReadBarrier(JS::GCCellPtr thing) {
// Optimized marking for read barriers. This is called from
// ExposeGCThingToActiveJS which has already checked the prerequisites for
// performing a read barrier. This means we can skip a bunch of checks and
// call info the tracer directly.
AutoGeckoProfilerEntry profilingStackFrame(
TlsContext.get(), "PerformIncrementalReadBarrier",
JS::ProfilingCategoryPair::GCCC_Barrier);
MOZ_ASSERT(thing);
MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
TenuredCell* cell = &thing.asCell()->asTenured();
Zone* zone = cell->zone();
MOZ_ASSERT(zone->needsIncrementalBarrier());
// Skip disptaching on known tracer type.
GCMarker* gcmarker = GCMarker::fromTracer(zone->barrierTracer());
// Mark the argument, as DoMarking above.
ApplyGCThingTyped(thing, [gcmarker](auto thing) {
MOZ_ASSERT(ShouldMark(gcmarker, thing));
CheckTracedThing(gcmarker, thing);
AutoClearTracingSource acts(gcmarker);
gcmarker->traverse(thing);
});
}
// The simplest traversal calls out to the fully generic traceChildren function
// to visit the child edges. In the absence of other traversal mechanisms, this
// function will rapidly grow the stack past its bounds and crash the process.
// Thus, this generic tracing should only be used in cases where subsequent
// tracing will not recurse.
template <typename T>
void js::GCMarker::markAndTraceChildren(T* thing) {
if (thing->isPermanentAndMayBeShared()) {
return;
}
if (mark(thing)) {
AutoSetTracingSource asts(this, thing);
thing->traceChildren(this);
}
}
namespace js {
template <>
void GCMarker::traverse(BaseShape* thing) {
markAndTraceChildren(thing);
}
template <>
void GCMarker::traverse(JS::Symbol* thing) {
markAndTraceChildren(thing);
}
template <>
void GCMarker::traverse(JS::BigInt* thing) {
markAndTraceChildren(thing);
}
template <>
void GCMarker::traverse(RegExpShared* thing) {
markAndTraceChildren(thing);
}
} // namespace js
// Strings, Shapes, and Scopes are extremely common, but have simple patterns of
// recursion. We traverse trees of these edges immediately, with aggressive,
// manual inlining, implemented by eagerlyTraceChildren.
template <typename T>
void js::GCMarker::markAndScan(T* thing) {
if (thing->isPermanentAndMayBeShared()) {
return;
}
if (mark(thing)) {
eagerlyMarkChildren(thing);
}
}
namespace js {
template <>
void GCMarker::traverse(JSString* thing) {
markAndScan(thing);
}
template <>
void GCMarker::traverse(Shape* thing) {
markAndScan(thing);
}
template <>
void GCMarker::traverse(js::Scope* thing) {
markAndScan(thing);
}
} // namespace js
// Object and ObjectGroup are extremely common and can contain arbitrarily
// nested graphs, so are not trivially inlined. In this case we use a mark
// stack to control recursion. JitCode shares none of these properties, but is
// included for historical reasons. JSScript normally cannot recurse, but may
// be used as a weakmap key and thereby recurse into weakmapped values.
template <typename T>
void js::GCMarker::markAndPush(T* thing) {
if (!mark(thing)) {
return;
}
pushTaggedPtr(thing);
}
namespace js {
template <>
void GCMarker::traverse(JSObject* thing) {
markAndPush(thing);
}
template <>
void GCMarker::traverse(ObjectGroup* thing) {
markAndPush(thing);
}
template <>
void GCMarker::traverse(jit::JitCode* thing) {
markAndPush(thing);
}
template <>
void GCMarker::traverse(BaseScript* thing) {
markAndPush(thing);
}
} // namespace js
namespace js {
template <>
void GCMarker::traverse(AccessorShape* thing) {
MOZ_CRASH("AccessorShape must be marked as a Shape");
}
} // namespace js
template <typename S, typename T>
static inline void CheckTraversedEdge(S source, T* target) {
#ifdef DEBUG
// Atoms and Symbols do not have or mark their internal pointers,
// respectively.
MOZ_ASSERT(!source->isPermanentAndMayBeShared());
if (target->isPermanentAndMayBeShared()) {
MOZ_ASSERT(!target->maybeCompartment());
// No further checks for parmanent/shared things.
return;
}
Zone* sourceZone = source->zone();
Zone* targetZone = target->zone();
// Atoms and Symbols do not have access to a compartment pointer, or we'd need
// to adjust the subsequent check to catch that case.
MOZ_ASSERT_IF(targetZone->isAtomsZone(), !target->maybeCompartment());
// The Zones must match, unless the target is an atom.
MOZ_ASSERT(targetZone == sourceZone || targetZone->isAtomsZone());
// If we are marking an atom, that atom must be marked in the source zone's
// atom bitmap.
if (!sourceZone->isAtomsZone() && targetZone->isAtomsZone()) {
// We can't currently check this if the helper thread lock is held.
if (!gHelperThreadLock.ownedByCurrentThread()) {
MOZ_ASSERT(target->runtimeFromAnyThread()->gc.atomMarking.atomIsMarked(
sourceZone, reinterpret_cast<TenuredCell*>(target)));
}
}
// If we have access to a compartment pointer for both things, they must
// match.
MOZ_ASSERT_IF(source->maybeCompartment() && target->maybeCompartment(),
source->maybeCompartment() == target->maybeCompartment());
#endif
}
template <typename S, typename T>
void js::GCMarker::traverseEdge(S source, T* target) {
CheckTraversedEdge(source, target);
traverse(target);
}
template <typename S, typename T>
void js::GCMarker::traverseEdge(S source, const T& thing) {
ApplyGCThingTyped(thing,
[this, source](auto t) { this->traverseEdge(source, t); });
}
namespace {
template <typename T>
struct TraceKindCanBeGray {};
#define EXPAND_TRACEKIND_DEF(_, type, canBeGray, _1) \
template <> \
struct TraceKindCanBeGray<type> { \
static const bool value = canBeGray; \
};
JS_FOR_EACH_TRACEKIND(EXPAND_TRACEKIND_DEF)
#undef EXPAND_TRACEKIND_DEF
} // namespace
struct TraceKindCanBeGrayFunctor {
template <typename T>
bool operator()() {
return TraceKindCanBeGray<T>::value;
}
};
static bool TraceKindCanBeMarkedGray(JS::TraceKind kind) {
return DispatchTraceKindTyped(TraceKindCanBeGrayFunctor(), kind);
}
template <typename T>
bool js::GCMarker::mark(T* thing) {
if (!thing->isTenured()) {
return false;
}
AssertShouldMarkInZone(thing);
TenuredCell* cell = &thing->asTenured();
MarkColor color =
TraceKindCanBeGray<T>::value ? markColor() : MarkColor::Black;
bool marked = cell->markIfUnmarked(color);
if (marked) {
markCount++;
}
return marked;
}
/*** Inline, Eager GC Marking ***********************************************/
// Each of the eager, inline marking paths is directly preceeded by the
// out-of-line, generic tracing code for comparison. Both paths must end up
// traversing equivalent subgraphs.
void BaseScript::traceChildren(JSTracer* trc) {
TraceEdge(trc, &functionOrGlobal_, "function");
TraceEdge(trc, &sourceObject_, "sourceObject");
warmUpData_.trace(trc);
if (data_) {
data_->trace(trc);
}
// Scripts with bytecode may have optional data stored in per-runtime or
// per-zone maps. Note that a failed compilation must not have entries since
// the script itself will not be marked as having bytecode.
if (hasBytecode()) {
JSScript* script = this->asJSScript();
if (hasDebugScript()) {
DebugAPI::traceDebugScript(trc, script);
}
}
if (trc->isMarkingTracer()) {
GCMarker::fromTracer(trc)->markImplicitEdges(this);
}
}
void Shape::traceChildren(JSTracer* trc) {
TraceCellHeaderEdge(trc, this, "base");
TraceEdge(trc, &propidRef(), "propid");
if (parent) {
TraceEdge(trc, &parent, "parent");
}
if (dictNext.isObject()) {
JSObject* obj = dictNext.toObject();
TraceManuallyBarrieredEdge(trc, &obj, "dictNext object");
if (obj != dictNext.toObject()) {
dictNext.setObject(obj);
}
}
if (hasGetterObject()) {
TraceManuallyBarrieredEdge(trc, &asAccessorShape().getterObj, "getter");
}
if (hasSetterObject()) {
TraceManuallyBarrieredEdge(trc, &asAccessorShape().setterObj, "setter");
}
}
inline void js::GCMarker::eagerlyMarkChildren(Shape* shape) {
MOZ_ASSERT(shape->isMarked(markColor()));
do {
// Special case: if a base shape has a shape table then all its pointers
// must point to this shape or an anscestor. Since these pointers will
// be traced by this loop they do not need to be traced here as well.
BaseShape* base = shape->base();
CheckTraversedEdge(shape, base);
if (mark(base)) {
MOZ_ASSERT(base->canSkipMarkingShapeCache(shape));
base->traceChildrenSkipShapeCache(this);
}
traverseEdge(shape, shape->propidRef().get());
// Normally only the last shape in a dictionary list can have a pointer to
// an object here, but it's possible that we can see this if we trace
// barriers while removing a shape from a dictionary list.
if (shape->dictNext.isObject()) {
traverseEdge(shape, shape->dictNext.toObject());
}
// When triggered between slices on behalf of a barrier, these
// objects may reside in the nursery, so require an extra check.
// FIXME: Bug 1157967 - remove the isTenured checks.
if (shape->hasGetterObject() && shape->getterObject()->isTenured()) {
traverseEdge(shape, shape->getterObject());
}
if (shape->hasSetterObject() && shape->setterObject()->isTenured()) {
traverseEdge(shape, shape->setterObject());
}
shape = shape->previous();
} while (shape && mark(shape));
}
void JSString::traceChildren(JSTracer* trc) {
if (hasBase()) {
traceBase(trc);
} else if (isRope()) {
asRope().traceChildren(trc);
}
}
inline void GCMarker::eagerlyMarkChildren(JSString* str) {
if (str->isLinear()) {
eagerlyMarkChildren(&str->asLinear());
} else {
eagerlyMarkChildren(&str->asRope());
}
}
void JSString::traceBase(JSTracer* trc) {
MOZ_ASSERT(hasBase());
TraceManuallyBarrieredEdge(trc, &d.s.u3.base, "base");
}
inline void js::GCMarker::eagerlyMarkChildren(JSLinearString* linearStr) {
AssertShouldMarkInZone(linearStr);
MOZ_ASSERT(linearStr->isMarkedAny());
MOZ_ASSERT(linearStr->JSString::isLinear());
// Use iterative marking to avoid blowing out the stack.
while (linearStr->hasBase()) {
linearStr = linearStr->base();
MOZ_ASSERT(linearStr->JSString::isLinear());
if (linearStr->isPermanentAtom()) {
break;
}
AssertShouldMarkInZone(linearStr);
if (!mark(static_cast<JSString*>(linearStr))) {
break;
}
}
}
void JSRope::traceChildren(JSTracer* trc) {
js::TraceManuallyBarrieredEdge(trc, &d.s.u2.left, "left child");
js::TraceManuallyBarrieredEdge(trc, &d.s.u3.right, "right child");
}
inline void js::GCMarker::eagerlyMarkChildren(JSRope* rope) {
// This function tries to scan the whole rope tree using the marking stack
// as temporary storage. If that becomes full, the unscanned ropes are
// added to the delayed marking list. When the function returns, the
// marking stack is at the same depth as it was on entry. This way we avoid
// using tags when pushing ropes to the stack as ropes never leak to other
// users of the stack. This also assumes that a rope can only point to
// other ropes or linear strings, it cannot refer to GC things of other
// types.
gc::MarkStack& stack = currentStack();
size_t savedPos = stack.position();
MOZ_DIAGNOSTIC_ASSERT(rope->getTraceKind() == JS::TraceKind::String);
while (true) {
MOZ_DIAGNOSTIC_ASSERT(rope->getTraceKind() == JS::TraceKind::String);
MOZ_DIAGNOSTIC_ASSERT(rope->JSString::isRope());
AssertShouldMarkInZone(rope);
MOZ_ASSERT(rope->isMarkedAny());
JSRope* next = nullptr;
JSString* right = rope->rightChild();
if (!right->isPermanentAtom() && mark(right)) {
if (right->isLinear()) {
eagerlyMarkChildren(&right->asLinear());
} else {
next = &right->asRope();
}
}
JSString* left = rope->leftChild();
if (!left->isPermanentAtom() && mark(left)) {
if (left->isLinear()) {
eagerlyMarkChildren(&left->asLinear());
} else {
// When both children are ropes, set aside the right one to
// scan it later.
if (next && !stack.pushTempRope(next)) {
delayMarkingChildren(next);
}
next = &left->asRope();
}
}
if (next) {
rope = next;
} else if (savedPos != stack.position()) {
MOZ_ASSERT(savedPos < stack.position());
rope = stack.popPtr().asTempRope();
} else {
break;
}
}
MOZ_ASSERT(savedPos == stack.position());
}
static inline void TraceBindingNames(JSTracer* trc, BindingName* names,
uint32_t length) {
for (uint32_t i = 0; i < length; i++) {
JSAtom* name = names[i].name();
MOZ_ASSERT(name);
TraceManuallyBarrieredEdge(trc, &name, "scope name");
}
};
static inline void TraceNullableBindingNames(JSTracer* trc, BindingName* names,
uint32_t length) {
for (uint32_t i = 0; i < length; i++) {
if (JSAtom* name = names[i].name()) {
TraceManuallyBarrieredEdge(trc, &name, "scope name");
}
}
};
template <>
void AbstractBindingName<JSAtom>::trace(JSTracer* trc) {
if (JSAtom* atom = name()) {
TraceManuallyBarrieredEdge(trc, &atom, "binding name");
}
}
void BindingIter::trace(JSTracer* trc) {
TraceNullableBindingNames(trc, names_, length_);
}
template <>
void LexicalScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceBindingNames(trc, trailingNames.start(), length);
}
template <>
void FunctionScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceNullableEdge(trc, &canonicalFunction, "scope canonical function");
TraceNullableBindingNames(trc, trailingNames.start(), length);
}
template <>
void VarScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceBindingNames(trc, trailingNames.start(), length);
}
template <>
void GlobalScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceBindingNames(trc, trailingNames.start(), length);
}
template <>
void EvalScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceBindingNames(trc, trailingNames.start(), length);
}
template <>
void ModuleScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceNullableEdge(trc, &module, "scope module");
TraceBindingNames(trc, trailingNames.start(), length);
}
template <>
void WasmInstanceScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceNullableEdge(trc, &instance, "wasm instance");
TraceBindingNames(trc, trailingNames.start(), length);
}
template <>
void WasmFunctionScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceBindingNames(trc, trailingNames.start(), length);
}
void Scope::traceChildren(JSTracer* trc) {
TraceNullableEdge(trc, &environmentShape_, "scope env shape");
TraceNullableEdge(trc, &enclosingScope_, "scope enclosing");
applyScopeDataTyped([trc](auto data) { data->trace(trc); });
}
inline void js::GCMarker::eagerlyMarkChildren(Scope* scope) {
do {
if (scope->environmentShape()) {
traverseEdge(scope, scope->environmentShape());
}
AbstractTrailingNamesArray<JSAtom>* names = nullptr;
uint32_t length = 0;
switch (scope->kind()) {
case ScopeKind::Function: {
FunctionScope::Data& data = scope->as<FunctionScope>().data();
if (data.canonicalFunction) {
traverseObjectEdge(scope, data.canonicalFunction);
}
names = &data.trailingNames;
length = data.length;
break;
}
case ScopeKind::FunctionBodyVar: {
VarScope::Data& data = scope->as<VarScope>().data();
names = &data.trailingNames;
length = data.length;
break;
}
case ScopeKind::Lexical:
case ScopeKind::SimpleCatch:
case ScopeKind::Catch:
case ScopeKind::NamedLambda:
case ScopeKind::StrictNamedLambda:
case ScopeKind::FunctionLexical:
case ScopeKind::ClassBody: {
LexicalScope::Data& data = scope->as<LexicalScope>().data();
names = &data.trailingNames;
length = data.length;
break;
}
case ScopeKind::Global:
case ScopeKind::NonSyntactic: {
GlobalScope::Data& data = scope->as<GlobalScope>().data();
names = &data.trailingNames;
length = data.length;
break;
}
case ScopeKind::Eval:
case ScopeKind::StrictEval: {
EvalScope::Data& data = scope->as<EvalScope>().data();
names = &data.trailingNames;
length = data.length;
break;
}
case ScopeKind::Module: {
ModuleScope::Data& data = scope->as<ModuleScope>().data();
if (data.module) {
traverseObjectEdge(scope, data.module);
}
names = &data.trailingNames;
length = data.length;
break;
}
case ScopeKind::With:
break;
case ScopeKind::WasmInstance: {
WasmInstanceScope::Data& data = scope->as<WasmInstanceScope>().data();
traverseObjectEdge(scope, data.instance);
names = &data.trailingNames;
length = data.length;
break;
}
case ScopeKind::WasmFunction: {
WasmFunctionScope::Data& data = scope->as<WasmFunctionScope>().data();
names = &data.trailingNames;
length = data.length;
break;
}
}
if (scope->kind_ == ScopeKind::Function) {
for (uint32_t i = 0; i < length; i++) {
if (JSAtom* name = names->get(i).name()) {
traverseStringEdge(scope, name);
}
}
} else {
for (uint32_t i = 0; i < length; i++) {
traverseStringEdge(scope, names->get(i).name());
}
}
scope = scope->enclosing();
} while (scope && mark(scope));
}
void js::ObjectGroup::traceChildren(JSTracer* trc) {
AutoSweepObjectGroup sweep(this);
if (!trc->canSkipJsids()) {
unsigned count = getPropertyCount(sweep);
for (unsigned i = 0; i < count; i++) {
if (ObjectGroup::Property* prop = getProperty(sweep, i)) {
TraceEdge(trc, &prop->id, "group_property");
}
}
}
if (proto().isObject()) {
TraceEdge(trc, &proto(), "group_proto");
}
// Note: the realm's global can be nullptr if we GC while creating the global.