Source code
Revision control
Copy as Markdown
Other Tools
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* vim: set ts=8 sts=2 et sw=2 tw=80:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
/*
* JS bytecode generation.
*/
#include "frontend/BytecodeEmitter.h"
#include "mozilla/Casting.h" // mozilla::AssertedCast
#include "mozilla/DebugOnly.h" // mozilla::DebugOnly
#include "mozilla/FloatingPoint.h" // mozilla::NumberEqualsInt32, mozilla::NumberIsInt32
#include "mozilla/HashTable.h" // mozilla::HashSet
#include "mozilla/Maybe.h" // mozilla::{Maybe,Nothing,Some}
#include "mozilla/Saturate.h"
#include "mozilla/Variant.h" // mozilla::AsVariant
#include <algorithm>
#include <iterator>
#include <string.h>
#include "jstypes.h" // JS_BIT
#include "frontend/AbstractScopePtr.h" // ScopeIndex
#include "frontend/BytecodeControlStructures.h" // NestableControl, BreakableControl, LabelControl, LoopControl, TryFinallyControl
#include "frontend/CallOrNewEmitter.h" // CallOrNewEmitter
#include "frontend/CForEmitter.h" // CForEmitter
#include "frontend/DecoratorEmitter.h" // DecoratorEmitter
#include "frontend/DefaultEmitter.h" // DefaultEmitter
#include "frontend/DoWhileEmitter.h" // DoWhileEmitter
#include "frontend/ElemOpEmitter.h" // ElemOpEmitter
#include "frontend/EmitterScope.h" // EmitterScope
#include "frontend/ExpressionStatementEmitter.h" // ExpressionStatementEmitter
#include "frontend/ForInEmitter.h" // ForInEmitter
#include "frontend/ForOfEmitter.h" // ForOfEmitter
#include "frontend/FunctionEmitter.h" // FunctionEmitter, FunctionScriptEmitter, FunctionParamsEmitter
#include "frontend/IfEmitter.h" // IfEmitter, InternalIfEmitter, CondEmitter
#include "frontend/LabelEmitter.h" // LabelEmitter
#include "frontend/LexicalScopeEmitter.h" // LexicalScopeEmitter
#include "frontend/ModuleSharedContext.h" // ModuleSharedContext
#include "frontend/NameAnalysisTypes.h" // PrivateNameKind
#include "frontend/NameFunctions.h" // NameFunctions
#include "frontend/NameOpEmitter.h" // NameOpEmitter
#include "frontend/ObjectEmitter.h" // PropertyEmitter, ObjectEmitter, ClassEmitter
#include "frontend/OptionalEmitter.h" // OptionalEmitter
#include "frontend/ParseContext.h" // ParseContext::Scope
#include "frontend/ParseNode.h" // ParseNodeKind, ParseNode and subclasses
#include "frontend/Parser.h" // Parser
#include "frontend/ParserAtom.h" // ParserAtomsTable, ParserAtom
#include "frontend/PrivateOpEmitter.h" // PrivateOpEmitter
#include "frontend/PropOpEmitter.h" // PropOpEmitter
#include "frontend/SourceNotes.h" // SrcNote, SrcNoteType, SrcNoteWriter
#include "frontend/SwitchEmitter.h" // SwitchEmitter
#include "frontend/TaggedParserAtomIndexHasher.h" // TaggedParserAtomIndexHasher
#include "frontend/TDZCheckCache.h" // TDZCheckCache
#include "frontend/TryEmitter.h" // TryEmitter
#include "frontend/UsingEmitter.h" // UsingEmitter
#include "frontend/WhileEmitter.h" // WhileEmitter
#include "js/ColumnNumber.h" // JS::LimitedColumnNumberOneOrigin, JS::ColumnNumberOffset
#include "js/friend/ErrorMessages.h" // JSMSG_*
#include "js/friend/StackLimits.h" // AutoCheckRecursionLimit
#include "util/StringBuilder.h" // StringBuilder
#include "vm/BytecodeUtil.h" // JOF_*, IsArgOp, IsLocalOp, SET_UINT24, SET_ICINDEX, BytecodeFallsThrough, BytecodeIsJumpTarget
#include "vm/CompletionKind.h" // CompletionKind
#include "vm/FunctionPrefixKind.h" // FunctionPrefixKind
#include "vm/GeneratorObject.h" // AbstractGeneratorObject
#include "vm/Opcodes.h" // JSOp, JSOpLength_*
#include "vm/PropMap.h" // SharedPropMap::MaxPropsForNonDictionary
#include "vm/Scope.h" // GetScopeDataTrailingNames
#include "vm/SharedStencil.h" // ScopeNote
#include "vm/ThrowMsgKind.h" // ThrowMsgKind
#include "vm/TypeofEqOperand.h" // TypeofEqOperand
using namespace js;
using namespace js::frontend;
using mozilla::AssertedCast;
using mozilla::AsVariant;
using mozilla::DebugOnly;
using mozilla::Maybe;
using mozilla::Nothing;
using mozilla::NumberEqualsInt32;
using mozilla::NumberIsInt32;
using mozilla::Some;
static bool ParseNodeRequiresSpecialLineNumberNotes(ParseNode* pn) {
// The few node types listed below are exceptions to the usual
// location-source-note-emitting code in BytecodeEmitter::emitTree().
// Single-line `while` loops and C-style `for` loops require careful
// handling to avoid strange stepping behavior.
ParseNodeKind kind = pn->getKind();
return kind == ParseNodeKind::WhileStmt || kind == ParseNodeKind::ForStmt ||
kind == ParseNodeKind::Function;
}
static bool NeedsFieldInitializer(ParseNode* member, bool inStaticContext) {
// For the purposes of bytecode emission, StaticClassBlocks are treated as if
// they were static initializers.
return (member->is<StaticClassBlock>() && inStaticContext) ||
(member->is<ClassField>() &&
member->as<ClassField>().isStatic() == inStaticContext);
}
static bool NeedsAccessorInitializer(ParseNode* member, bool isStatic) {
if (isStatic) {
return false;
}
return member->is<ClassMethod>() &&
member->as<ClassMethod>().name().isKind(ParseNodeKind::PrivateName) &&
!member->as<ClassMethod>().isStatic() &&
member->as<ClassMethod>().accessorType() != AccessorType::None;
}
static bool ShouldSuppressBreakpointsAndSourceNotes(
SharedContext* sc, BytecodeEmitter::EmitterMode emitterMode) {
// Suppress for all self-hosting code.
if (emitterMode == BytecodeEmitter::EmitterMode::SelfHosting) {
return true;
}
// Suppress for synthesized class constructors.
if (sc->isFunctionBox()) {
FunctionBox* funbox = sc->asFunctionBox();
return funbox->isSyntheticFunction() && funbox->isClassConstructor();
}
return false;
}
BytecodeEmitter::BytecodeEmitter(BytecodeEmitter* parent, FrontendContext* fc,
SharedContext* sc,
const ErrorReporter& errorReporter,
CompilationState& compilationState,
EmitterMode emitterMode)
: sc(sc),
fc(fc),
parent(parent),
bytecodeSection_(fc, sc->extent().lineno,
JS::LimitedColumnNumberOneOrigin(sc->extent().column)),
perScriptData_(fc, compilationState),
errorReporter_(errorReporter),
compilationState(compilationState),
suppressBreakpointsAndSourceNotes(
ShouldSuppressBreakpointsAndSourceNotes(sc, emitterMode)),
emitterMode(emitterMode) {
MOZ_ASSERT_IF(parent, fc == parent->fc);
}
BytecodeEmitter::BytecodeEmitter(BytecodeEmitter* parent, SharedContext* sc)
: BytecodeEmitter(parent, parent->fc, sc, parent->errorReporter_,
parent->compilationState, parent->emitterMode) {}
BytecodeEmitter::BytecodeEmitter(FrontendContext* fc,
const EitherParser& parser, SharedContext* sc,
CompilationState& compilationState,
EmitterMode emitterMode)
: BytecodeEmitter(nullptr, fc, sc, parser.errorReporter(), compilationState,
emitterMode) {
ep_.emplace(parser);
}
void BytecodeEmitter::initFromBodyPosition(TokenPos bodyPosition) {
setScriptStartOffsetIfUnset(bodyPosition.begin);
setFunctionBodyEndPos(bodyPosition.end);
}
bool BytecodeEmitter::init() {
if (!parent) {
if (!compilationState.prepareSharedDataStorage(fc)) {
return false;
}
}
return perScriptData_.init(fc);
}
bool BytecodeEmitter::init(TokenPos bodyPosition) {
initFromBodyPosition(bodyPosition);
return init();
}
template <typename T>
T* BytecodeEmitter::findInnermostNestableControl() const {
return NestableControl::findNearest<T>(innermostNestableControl);
}
template <typename T, typename Predicate /* (T*) -> bool */>
T* BytecodeEmitter::findInnermostNestableControl(Predicate predicate) const {
return NestableControl::findNearest<T>(innermostNestableControl, predicate);
}
NameLocation BytecodeEmitter::lookupName(TaggedParserAtomIndex name) {
return innermostEmitterScope()->lookup(this, name);
}
void BytecodeEmitter::lookupPrivate(TaggedParserAtomIndex name,
NameLocation& loc,
Maybe<NameLocation>& brandLoc) {
innermostEmitterScope()->lookupPrivate(this, name, loc, brandLoc);
}
Maybe<NameLocation> BytecodeEmitter::locationOfNameBoundInScope(
TaggedParserAtomIndex name, EmitterScope* target) {
return innermostEmitterScope()->locationBoundInScope(name, target);
}
template <typename T>
Maybe<NameLocation> BytecodeEmitter::locationOfNameBoundInScopeType(
TaggedParserAtomIndex name, EmitterScope* source) {
EmitterScope* aScope = source;
while (!aScope->scope(this).is<T>()) {
aScope = aScope->enclosingInFrame();
}
return source->locationBoundInScope(name, aScope);
}
bool BytecodeEmitter::markStepBreakpoint() {
if (skipBreakpointSrcNotes()) {
return true;
}
if (!newSrcNote(SrcNoteType::BreakpointStepSep)) {
return false;
}
// We track the location of the most recent separator for use in
// markSimpleBreakpoint. Note that this means that the position must already
// be set before markStepBreakpoint is called.
bytecodeSection().updateSeparatorPosition();
return true;
}
bool BytecodeEmitter::markSimpleBreakpoint() {
if (skipBreakpointSrcNotes()) {
return true;
}
// If a breakable call ends up being the same location as the most recent
// expression start, we need to skip marking it breakable in order to avoid
// having two breakpoints with the same line/column position.
// Note: This assumes that the position for the call has already been set.
if (!bytecodeSection().isDuplicateLocation()) {
if (!newSrcNote(SrcNoteType::Breakpoint)) {
return false;
}
}
return true;
}
bool BytecodeEmitter::emitCheck(JSOp op, ptrdiff_t delta,
BytecodeOffset* offset) {
size_t oldLength = bytecodeSection().code().length();
*offset = BytecodeOffset(oldLength);
size_t newLength = oldLength + size_t(delta);
if (MOZ_UNLIKELY(newLength > MaxBytecodeLength)) {
ReportAllocationOverflow(fc);
return false;
}
if (!bytecodeSection().code().growByUninitialized(delta)) {
return false;
}
if (BytecodeOpHasIC(op)) {
// Even if every bytecode op is a JOF_IC op and the function has ARGC_LIMIT
// arguments, numICEntries cannot overflow.
static_assert(MaxBytecodeLength + 1 /* this */ + ARGC_LIMIT <= UINT32_MAX,
"numICEntries must not overflow");
bytecodeSection().incrementNumICEntries();
}
return true;
}
#ifdef DEBUG
bool BytecodeEmitter::checkStrictOrSloppy(JSOp op) const {
if (IsCheckStrictOp(op) && !sc->strict()) {
return false;
}
if (IsCheckSloppyOp(op) && sc->strict()) {
return false;
}
return true;
}
#endif
bool BytecodeEmitter::emit1(JSOp op) {
MOZ_ASSERT(checkStrictOrSloppy(op));
MOZ_ASSERT(GetOpLength(op) == 1);
BytecodeOffset offset;
if (!emitCheck(op, 1, &offset)) {
return false;
}
jsbytecode* code = bytecodeSection().code(offset);
code[0] = jsbytecode(op);
bytecodeSection().updateDepth(op, offset);
return true;
}
bool BytecodeEmitter::emit2(JSOp op, uint8_t op1) {
MOZ_ASSERT(checkStrictOrSloppy(op));
MOZ_ASSERT(GetOpLength(op) == 2);
BytecodeOffset offset;
if (!emitCheck(op, 2, &offset)) {
return false;
}
jsbytecode* code = bytecodeSection().code(offset);
code[0] = jsbytecode(op);
code[1] = jsbytecode(op1);
bytecodeSection().updateDepth(op, offset);
return true;
}
bool BytecodeEmitter::emit3(JSOp op, jsbytecode op1, jsbytecode op2) {
MOZ_ASSERT(checkStrictOrSloppy(op));
MOZ_ASSERT(GetOpLength(op) == 3);
/* These should filter through emitVarOp. */
MOZ_ASSERT(!IsArgOp(op));
MOZ_ASSERT(!IsLocalOp(op));
BytecodeOffset offset;
if (!emitCheck(op, 3, &offset)) {
return false;
}
jsbytecode* code = bytecodeSection().code(offset);
code[0] = jsbytecode(op);
code[1] = op1;
code[2] = op2;
bytecodeSection().updateDepth(op, offset);
return true;
}
bool BytecodeEmitter::emitN(JSOp op, size_t extra, BytecodeOffset* offset) {
MOZ_ASSERT(checkStrictOrSloppy(op));
ptrdiff_t length = 1 + ptrdiff_t(extra);
BytecodeOffset off;
if (!emitCheck(op, length, &off)) {
return false;
}
jsbytecode* code = bytecodeSection().code(off);
code[0] = jsbytecode(op);
/* The remaining |extra| bytes are set by the caller */
/*
* Don't updateDepth if op's use-count comes from the immediate
* operand yet to be stored in the extra bytes after op.
*/
if (CodeSpec(op).nuses >= 0) {
bytecodeSection().updateDepth(op, off);
}
if (offset) {
*offset = off;
}
return true;
}
bool BytecodeEmitter::emitJumpTargetOp(JSOp op, BytecodeOffset* off) {
MOZ_ASSERT(BytecodeIsJumpTarget(op));
// Record the current IC-entry index at start of this op.
uint32_t numEntries = bytecodeSection().numICEntries();
size_t n = GetOpLength(op) - 1;
MOZ_ASSERT(GetOpLength(op) >= 1 + ICINDEX_LEN);
if (!emitN(op, n, off)) {
return false;
}
SET_ICINDEX(bytecodeSection().code(*off), numEntries);
return true;
}
bool BytecodeEmitter::emitJumpTarget(JumpTarget* target) {
BytecodeOffset off = bytecodeSection().offset();
// Alias consecutive jump targets.
if (bytecodeSection().lastTargetOffset().valid() &&
off == bytecodeSection().lastTargetOffset() +
BytecodeOffsetDiff(JSOpLength_JumpTarget)) {
target->offset = bytecodeSection().lastTargetOffset();
return true;
}
target->offset = off;
bytecodeSection().setLastTargetOffset(off);
BytecodeOffset opOff;
return emitJumpTargetOp(JSOp::JumpTarget, &opOff);
}
bool BytecodeEmitter::emitJumpNoFallthrough(JSOp op, JumpList* jump) {
BytecodeOffset offset;
if (!emitCheck(op, 5, &offset)) {
return false;
}
jsbytecode* code = bytecodeSection().code(offset);
code[0] = jsbytecode(op);
MOZ_ASSERT(!jump->offset.valid() ||
(0 <= jump->offset.value() && jump->offset < offset));
jump->push(bytecodeSection().code(BytecodeOffset(0)), offset);
bytecodeSection().updateDepth(op, offset);
return true;
}
bool BytecodeEmitter::emitJump(JSOp op, JumpList* jump) {
if (!emitJumpNoFallthrough(op, jump)) {
return false;
}
if (BytecodeFallsThrough(op)) {
JumpTarget fallthrough;
if (!emitJumpTarget(&fallthrough)) {
return false;
}
}
return true;
}
void BytecodeEmitter::patchJumpsToTarget(JumpList jump, JumpTarget target) {
MOZ_ASSERT(
!jump.offset.valid() ||
(0 <= jump.offset.value() && jump.offset <= bytecodeSection().offset()));
MOZ_ASSERT(0 <= target.offset.value() &&
target.offset <= bytecodeSection().offset());
MOZ_ASSERT_IF(
jump.offset.valid() &&
target.offset + BytecodeOffsetDiff(4) <= bytecodeSection().offset(),
BytecodeIsJumpTarget(JSOp(*bytecodeSection().code(target.offset))));
jump.patchAll(bytecodeSection().code(BytecodeOffset(0)), target);
}
bool BytecodeEmitter::emitJumpTargetAndPatch(JumpList jump) {
if (!jump.offset.valid()) {
return true;
}
JumpTarget target;
if (!emitJumpTarget(&target)) {
return false;
}
patchJumpsToTarget(jump, target);
return true;
}
bool BytecodeEmitter::emitCall(JSOp op, uint16_t argc,
const Maybe<uint32_t>& sourceCoordOffset) {
if (sourceCoordOffset.isSome()) {
if (!updateSourceCoordNotes(*sourceCoordOffset)) {
return false;
}
}
return emit3(op, ARGC_LO(argc), ARGC_HI(argc));
}
bool BytecodeEmitter::emitCall(JSOp op, uint16_t argc, ParseNode* pn) {
return emitCall(op, argc, pn ? Some(pn->pn_pos.begin) : Nothing());
}
bool BytecodeEmitter::emitDupAt(unsigned slotFromTop, unsigned count) {
MOZ_ASSERT(slotFromTop < unsigned(bytecodeSection().stackDepth()));
MOZ_ASSERT(slotFromTop + 1 >= count);
if (slotFromTop == 0 && count == 1) {
return emit1(JSOp::Dup);
}
if (slotFromTop == 1 && count == 2) {
return emit1(JSOp::Dup2);
}
if (slotFromTop >= Bit(24)) {
reportError(nullptr, JSMSG_TOO_MANY_LOCALS);
return false;
}
for (unsigned i = 0; i < count; i++) {
BytecodeOffset off;
if (!emitN(JSOp::DupAt, 3, &off)) {
return false;
}
jsbytecode* pc = bytecodeSection().code(off);
SET_UINT24(pc, slotFromTop);
}
return true;
}
bool BytecodeEmitter::emitPopN(unsigned n) {
MOZ_ASSERT(n != 0);
if (n == 1) {
return emit1(JSOp::Pop);
}
// 2 JSOp::Pop instructions (2 bytes) are shorter than JSOp::PopN (3 bytes).
if (n == 2) {
return emit1(JSOp::Pop) && emit1(JSOp::Pop);
}
return emitUint16Operand(JSOp::PopN, n);
}
bool BytecodeEmitter::emitPickN(uint8_t n) {
MOZ_ASSERT(n != 0);
if (n == 1) {
return emit1(JSOp::Swap);
}
return emit2(JSOp::Pick, n);
}
bool BytecodeEmitter::emitUnpickN(uint8_t n) {
MOZ_ASSERT(n != 0);
if (n == 1) {
return emit1(JSOp::Swap);
}
return emit2(JSOp::Unpick, n);
}
bool BytecodeEmitter::emitCheckIsObj(CheckIsObjectKind kind) {
return emit2(JSOp::CheckIsObj, uint8_t(kind));
}
bool BytecodeEmitter::emitBuiltinObject(BuiltinObjectKind kind) {
return emit2(JSOp::BuiltinObject, uint8_t(kind));
}
/* Updates line number notes, not column notes. */
bool BytecodeEmitter::updateLineNumberNotes(uint32_t offset) {
if (skipLocationSrcNotes()) {
return true;
}
const ErrorReporter& er = errorReporter();
std::optional<bool> onThisLineStatus =
er.isOnThisLine(offset, bytecodeSection().currentLine());
if (!onThisLineStatus.has_value()) {
er.errorNoOffset(JSMSG_OUT_OF_MEMORY);
return false;
}
bool onThisLine = *onThisLineStatus;
if (!onThisLine) {
unsigned line = er.lineAt(offset);
unsigned delta = line - bytecodeSection().currentLine();
// If we use a `SetLine` note below, we want it to be relative to the
// scripts initial line number for better chance of sharing.
unsigned initialLine = sc->extent().lineno;
MOZ_ASSERT(line >= initialLine);
/*
* Encode any change in the current source line number by using
* either several SrcNoteType::NewLine notes or just one
* SrcNoteType::SetLine note, whichever consumes less space.
*
* NB: We handle backward line number deltas (possible with for
* loops where the update part is emitted after the body, but its
* line number is <= any line number in the body) here by letting
* unsigned delta_ wrap to a very large number, which triggers a
* SrcNoteType::SetLine.
*/
bytecodeSection().setCurrentLine(line, offset);
if (delta >= SrcNote::SetLine::lengthFor(line, initialLine)) {
if (!newSrcNote2(SrcNoteType::SetLine,
SrcNote::SetLine::toOperand(line, initialLine))) {
return false;
}
} else {
do {
if (!newSrcNote(SrcNoteType::NewLine)) {
return false;
}
} while (--delta != 0);
}
bytecodeSection().updateSeparatorPositionIfPresent();
}
return true;
}
/* Updates the line number and column number information in the source notes. */
bool BytecodeEmitter::updateSourceCoordNotes(uint32_t offset) {
if (skipLocationSrcNotes()) {
return true;
}
if (!updateLineNumberNotes(offset)) {
return false;
}
JS::LimitedColumnNumberOneOrigin columnIndex =
errorReporter().columnAt(offset);
// Assert colspan is always representable.
static_assert((0 - ptrdiff_t(JS::LimitedColumnNumberOneOrigin::Limit)) >=
SrcNote::ColSpan::MinColSpan);
static_assert((ptrdiff_t(JS::LimitedColumnNumberOneOrigin::Limit) - 0) <=
SrcNote::ColSpan::MaxColSpan);
JS::ColumnNumberOffset colspan = columnIndex - bytecodeSection().lastColumn();
if (colspan != JS::ColumnNumberOffset::zero()) {
if (lastLineOnlySrcNoteIndex != LastSrcNoteIsNotLineOnly) {
MOZ_ASSERT(bytecodeSection().lastColumn() ==
JS::LimitedColumnNumberOneOrigin());
const SrcNotesVector& notes = bytecodeSection().notes();
SrcNoteType type = notes[lastLineOnlySrcNoteIndex].type();
if (type == SrcNoteType::NewLine) {
if (!convertLastNewLineToNewLineColumn(columnIndex)) {
return false;
}
} else {
MOZ_ASSERT(type == SrcNoteType::SetLine);
if (!convertLastSetLineToSetLineColumn(columnIndex)) {
return false;
}
}
} else {
if (!newSrcNote2(SrcNoteType::ColSpan,
SrcNote::ColSpan::toOperand(colspan))) {
return false;
}
}
bytecodeSection().setLastColumn(columnIndex, offset);
bytecodeSection().updateSeparatorPositionIfPresent();
}
return true;
}
bool BytecodeEmitter::updateSourceCoordNotesIfNonLiteral(ParseNode* node) {
if (node->isLiteral()) {
return true;
}
return updateSourceCoordNotes(node->pn_pos.begin);
}
uint32_t BytecodeEmitter::getOffsetForLoop(ParseNode* nextpn) const {
// Try to give the JSOp::LoopHead the same line number as the next
// instruction. nextpn is often a block, in which case the next instruction
// typically comes from the first statement inside.
if (nextpn->is<LexicalScopeNode>()) {
nextpn = nextpn->as<LexicalScopeNode>().scopeBody();
}
if (nextpn->isKind(ParseNodeKind::StatementList)) {
if (ParseNode* firstStatement = nextpn->as<ListNode>().head()) {
nextpn = firstStatement;
}
}
return nextpn->pn_pos.begin;
}
bool BytecodeEmitter::emitUint16Operand(JSOp op, uint32_t operand) {
MOZ_ASSERT(operand <= UINT16_MAX);
if (!emit3(op, UINT16_LO(operand), UINT16_HI(operand))) {
return false;
}
return true;
}
bool BytecodeEmitter::emitUint32Operand(JSOp op, uint32_t operand) {
BytecodeOffset off;
if (!emitN(op, 4, &off)) {
return false;
}
SET_UINT32(bytecodeSection().code(off), operand);
return true;
}
bool BytecodeEmitter::emitGoto(NestableControl* target, GotoKind kind) {
NonLocalExitControl nle(this, kind == GotoKind::Continue
? NonLocalExitKind::Continue
: NonLocalExitKind::Break);
return nle.emitNonLocalJump(target);
}
AbstractScopePtr BytecodeEmitter::innermostScope() const {
return innermostEmitterScope()->scope(this);
}
ScopeIndex BytecodeEmitter::innermostScopeIndex() const {
return *innermostEmitterScope()->scopeIndex(this);
}
bool BytecodeEmitter::emitGCIndexOp(JSOp op, GCThingIndex index) {
MOZ_ASSERT(checkStrictOrSloppy(op));
constexpr size_t OpLength = 1 + GCTHING_INDEX_LEN;
MOZ_ASSERT(GetOpLength(op) == OpLength);
BytecodeOffset offset;
if (!emitCheck(op, OpLength, &offset)) {
return false;
}
jsbytecode* code = bytecodeSection().code(offset);
code[0] = jsbytecode(op);
SET_GCTHING_INDEX(code, index);
bytecodeSection().updateDepth(op, offset);
return true;
}
bool BytecodeEmitter::emitAtomOp(JSOp op, TaggedParserAtomIndex atom) {
MOZ_ASSERT(atom);
// .generator lookups should be emitted as JSOp::GetAliasedVar instead of
// JSOp::GetName etc, to bypass |with| objects on the scope chain.
// It's safe to emit .this lookups though because |with| objects skip
// those.
MOZ_ASSERT_IF(op == JSOp::GetName || op == JSOp::GetGName,
atom != TaggedParserAtomIndex::WellKnown::dot_generator_());
GCThingIndex index;
if (!makeAtomIndex(atom, ParserAtom::Atomize::Yes, &index)) {
return false;
}
return emitAtomOp(op, index);
}
bool BytecodeEmitter::emitAtomOp(JSOp op, GCThingIndex atomIndex) {
MOZ_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
#ifdef DEBUG
auto atom = perScriptData().gcThingList().getAtom(atomIndex);
MOZ_ASSERT(compilationState.parserAtoms.isInstantiatedAsJSAtom(atom));
#endif
return emitGCIndexOp(op, atomIndex);
}
bool BytecodeEmitter::emitStringOp(JSOp op, TaggedParserAtomIndex atom) {
MOZ_ASSERT(atom);
GCThingIndex index;
if (!makeAtomIndex(atom, ParserAtom::Atomize::No, &index)) {
return false;
}
return emitStringOp(op, index);
}
bool BytecodeEmitter::emitStringOp(JSOp op, GCThingIndex atomIndex) {
MOZ_ASSERT(JOF_OPTYPE(op) == JOF_STRING);
return emitGCIndexOp(op, atomIndex);
}
bool BytecodeEmitter::emitInternedScopeOp(GCThingIndex index, JSOp op) {
MOZ_ASSERT(JOF_OPTYPE(op) == JOF_SCOPE);
MOZ_ASSERT(index < perScriptData().gcThingList().length());
return emitGCIndexOp(op, index);
}
bool BytecodeEmitter::emitInternedObjectOp(GCThingIndex index, JSOp op) {
MOZ_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
MOZ_ASSERT(index < perScriptData().gcThingList().length());
return emitGCIndexOp(op, index);
}
bool BytecodeEmitter::emitRegExp(GCThingIndex index) {
return emitGCIndexOp(JSOp::RegExp, index);
}
bool BytecodeEmitter::emitLocalOp(JSOp op, uint32_t slot) {
MOZ_ASSERT(JOF_OPTYPE(op) != JOF_ENVCOORD);
MOZ_ASSERT(IsLocalOp(op));
BytecodeOffset off;
if (!emitN(op, LOCALNO_LEN, &off)) {
return false;
}
SET_LOCALNO(bytecodeSection().code(off), slot);
return true;
}
bool BytecodeEmitter::emitArgOp(JSOp op, uint16_t slot) {
MOZ_ASSERT(IsArgOp(op));
BytecodeOffset off;
if (!emitN(op, ARGNO_LEN, &off)) {
return false;
}
SET_ARGNO(bytecodeSection().code(off), slot);
return true;
}
bool BytecodeEmitter::emitEnvCoordOp(JSOp op, EnvironmentCoordinate ec) {
MOZ_ASSERT(JOF_OPTYPE(op) == JOF_ENVCOORD ||
JOF_OPTYPE(op) == JOF_DEBUGCOORD);
constexpr size_t N = ENVCOORD_HOPS_LEN + ENVCOORD_SLOT_LEN;
MOZ_ASSERT(GetOpLength(op) == 1 + N);
BytecodeOffset off;
if (!emitN(op, N, &off)) {
return false;
}
jsbytecode* pc = bytecodeSection().code(off);
SET_ENVCOORD_HOPS(pc, ec.hops());
pc += ENVCOORD_HOPS_LEN;
SET_ENVCOORD_SLOT(pc, ec.slot());
pc += ENVCOORD_SLOT_LEN;
return true;
}
bool BytecodeEmitter::checkSideEffects(ParseNode* pn, bool* answer) const {
AutoCheckRecursionLimit recursion(fc);
if (!recursion.check(fc)) {
return false;
}
restart:
switch (pn->getKind()) {
// Trivial cases with no side effects.
case ParseNodeKind::EmptyStmt:
case ParseNodeKind::TrueExpr:
case ParseNodeKind::FalseExpr:
case ParseNodeKind::NullExpr:
case ParseNodeKind::RawUndefinedExpr:
case ParseNodeKind::Elision:
case ParseNodeKind::Generator:
MOZ_ASSERT(pn->is<NullaryNode>());
*answer = false;
return true;
case ParseNodeKind::ObjectPropertyName:
case ParseNodeKind::PrivateName: // no side effects, unlike
// ParseNodeKind::Name
case ParseNodeKind::StringExpr:
case ParseNodeKind::TemplateStringExpr:
MOZ_ASSERT(pn->is<NameNode>());
*answer = false;
return true;
case ParseNodeKind::RegExpExpr:
MOZ_ASSERT(pn->is<RegExpLiteral>());
*answer = false;
return true;
case ParseNodeKind::NumberExpr:
MOZ_ASSERT(pn->is<NumericLiteral>());
*answer = false;
return true;
case ParseNodeKind::BigIntExpr:
MOZ_ASSERT(pn->is<BigIntLiteral>());
*answer = false;
return true;
// |this| can throw in derived class constructors, including nested arrow
// functions or eval.
case ParseNodeKind::ThisExpr:
MOZ_ASSERT(pn->is<UnaryNode>());
*answer = sc->needsThisTDZChecks();
return true;
// |new.target| doesn't have any side-effects.
case ParseNodeKind::NewTargetExpr: {
MOZ_ASSERT(pn->is<NewTargetNode>());
*answer = false;
return true;
}
// Trivial binary nodes with more token pos holders.
case ParseNodeKind::ImportMetaExpr: {
MOZ_ASSERT(pn->as<BinaryNode>().left()->isKind(ParseNodeKind::PosHolder));
MOZ_ASSERT(
pn->as<BinaryNode>().right()->isKind(ParseNodeKind::PosHolder));
*answer = false;
return true;
}
case ParseNodeKind::BreakStmt:
MOZ_ASSERT(pn->is<BreakStatement>());
*answer = true;
return true;
case ParseNodeKind::ContinueStmt:
MOZ_ASSERT(pn->is<ContinueStatement>());
*answer = true;
return true;
case ParseNodeKind::DebuggerStmt:
MOZ_ASSERT(pn->is<DebuggerStatement>());
*answer = true;
return true;
// Watch out for getters!
case ParseNodeKind::OptionalDotExpr:
case ParseNodeKind::DotExpr:
case ParseNodeKind::ArgumentsLength:
MOZ_ASSERT(pn->is<BinaryNode>());
*answer = true;
return true;
// Unary cases with side effects only if the child has them.
case ParseNodeKind::TypeOfExpr:
case ParseNodeKind::VoidExpr:
case ParseNodeKind::NotExpr:
return checkSideEffects(pn->as<UnaryNode>().kid(), answer);
// Even if the name expression is effect-free, performing ToPropertyKey on
// it might not be effect-free:
//
// RegExp.prototype.toString = () => { throw 42; };
// ({ [/regex/]: 0 }); // ToPropertyKey(/regex/) throws 42
//
// function Q() {
// ({ [new.target]: 0 });
// }
// Q.toString = () => { throw 17; };
// new Q; // new.target will be Q, ToPropertyKey(Q) throws 17
case ParseNodeKind::ComputedName:
MOZ_ASSERT(pn->is<UnaryNode>());
*answer = true;
return true;
// Looking up or evaluating the associated name could throw.
case ParseNodeKind::TypeOfNameExpr:
MOZ_ASSERT(pn->is<UnaryNode>());
*answer = true;
return true;
// This unary case has side effects on the enclosing object, sure. But
// that's not the question this function answers: it's whether the
// operation may have a side effect on something *other* than the result
// of the overall operation in which it's embedded. The answer to that
// is no, because an object literal having a mutated prototype only
// produces a value, without affecting anything else.
case ParseNodeKind::MutateProto:
return checkSideEffects(pn->as<UnaryNode>().kid(), answer);
// Unary cases with obvious side effects.
case ParseNodeKind::PreIncrementExpr:
case ParseNodeKind::PostIncrementExpr:
case ParseNodeKind::PreDecrementExpr:
case ParseNodeKind::PostDecrementExpr:
case ParseNodeKind::ThrowStmt:
MOZ_ASSERT(pn->is<UnaryNode>());
*answer = true;
return true;
// These might invoke valueOf/toString, even with a subexpression without
// side effects! Consider |+{ valueOf: null, toString: null }|.
case ParseNodeKind::BitNotExpr:
case ParseNodeKind::PosExpr:
case ParseNodeKind::NegExpr:
MOZ_ASSERT(pn->is<UnaryNode>());
*answer = true;
return true;
// This invokes the (user-controllable) iterator protocol.
case ParseNodeKind::Spread:
MOZ_ASSERT(pn->is<UnaryNode>());
*answer = true;
return true;
case ParseNodeKind::InitialYield:
case ParseNodeKind::YieldStarExpr:
case ParseNodeKind::YieldExpr:
case ParseNodeKind::AwaitExpr:
MOZ_ASSERT(pn->is<UnaryNode>());
*answer = true;
return true;
// Deletion generally has side effects, even if isolated cases have none.
case ParseNodeKind::DeleteNameExpr:
case ParseNodeKind::DeletePropExpr:
case ParseNodeKind::DeleteElemExpr:
case ParseNodeKind::DeleteOptionalChainExpr:
MOZ_ASSERT(pn->is<UnaryNode>());
*answer = true;
return true;
// Deletion of a non-Reference expression has side effects only through
// evaluating the expression.
case ParseNodeKind::DeleteExpr: {
ParseNode* expr = pn->as<UnaryNode>().kid();
return checkSideEffects(expr, answer);
}
case ParseNodeKind::ExpressionStmt:
return checkSideEffects(pn->as<UnaryNode>().kid(), answer);
// Binary cases with obvious side effects.
case ParseNodeKind::InitExpr:
*answer = true;
return true;
case ParseNodeKind::AssignExpr:
case ParseNodeKind::AddAssignExpr:
case ParseNodeKind::SubAssignExpr:
case ParseNodeKind::CoalesceAssignExpr:
case ParseNodeKind::OrAssignExpr:
case ParseNodeKind::AndAssignExpr:
case ParseNodeKind::BitOrAssignExpr:
case ParseNodeKind::BitXorAssignExpr:
case ParseNodeKind::BitAndAssignExpr:
case ParseNodeKind::LshAssignExpr:
case ParseNodeKind::RshAssignExpr:
case ParseNodeKind::UrshAssignExpr:
case ParseNodeKind::MulAssignExpr:
case ParseNodeKind::DivAssignExpr:
case ParseNodeKind::ModAssignExpr:
case ParseNodeKind::PowAssignExpr:
MOZ_ASSERT(pn->is<AssignmentNode>());
*answer = true;
return true;
case ParseNodeKind::SetThis:
MOZ_ASSERT(pn->is<BinaryNode>());
*answer = true;
return true;
case ParseNodeKind::StatementList:
// Strict equality operations and short circuit operators are well-behaved
// and perform no conversions.
case ParseNodeKind::CoalesceExpr:
case ParseNodeKind::OrExpr:
case ParseNodeKind::AndExpr:
case ParseNodeKind::StrictEqExpr:
case ParseNodeKind::StrictNeExpr:
// Any subexpression of a comma expression could be effectful.
case ParseNodeKind::CommaExpr:
MOZ_ASSERT(!pn->as<ListNode>().empty());
[[fallthrough]];
// Subcomponents of a literal may be effectful.
case ParseNodeKind::ArrayExpr:
case ParseNodeKind::ObjectExpr:
for (ParseNode* item : pn->as<ListNode>().contents()) {
if (!checkSideEffects(item, answer)) {
return false;
}
if (*answer) {
return true;
}
}
return true;
#ifdef ENABLE_DECORATORS
case ParseNodeKind::DecoratorList:
MOZ_CRASH("Decorators are not supported yet");
#endif
#ifdef ENABLE_EXPLICIT_RESOURCE_MANAGEMENT
case ParseNodeKind::UsingDecl:
case ParseNodeKind::AwaitUsingDecl:
MOZ_CRASH("Using declarations are not supported yet");
#endif
// Most other binary operations (parsed as lists in SpiderMonkey) may
// perform conversions triggering side effects. Math operations perform
// ToNumber and may fail invoking invalid user-defined toString/valueOf:
// |5 < { toString: null }|. |instanceof| throws if provided a
// non-object constructor: |null instanceof null|. |in| throws if given
// a non-object RHS: |5 in null|.
case ParseNodeKind::BitOrExpr:
case ParseNodeKind::BitXorExpr:
case ParseNodeKind::BitAndExpr:
case ParseNodeKind::EqExpr:
case ParseNodeKind::NeExpr:
case ParseNodeKind::LtExpr:
case ParseNodeKind::LeExpr:
case ParseNodeKind::GtExpr:
case ParseNodeKind::GeExpr:
case ParseNodeKind::InstanceOfExpr:
case ParseNodeKind::InExpr:
case ParseNodeKind::PrivateInExpr:
case ParseNodeKind::LshExpr:
case ParseNodeKind::RshExpr:
case ParseNodeKind::UrshExpr:
case ParseNodeKind::AddExpr:
case ParseNodeKind::SubExpr:
case ParseNodeKind::MulExpr:
case ParseNodeKind::DivExpr:
case ParseNodeKind::ModExpr:
case ParseNodeKind::PowExpr:
MOZ_ASSERT(pn->as<ListNode>().count() >= 2);
*answer = true;
return true;
case ParseNodeKind::PropertyDefinition:
case ParseNodeKind::Case: {
BinaryNode* node = &pn->as<BinaryNode>();
if (!checkSideEffects(node->left(), answer)) {
return false;
}
if (*answer) {
return true;
}
return checkSideEffects(node->right(), answer);
}
// More getters.
case ParseNodeKind::ElemExpr:
case ParseNodeKind::OptionalElemExpr:
MOZ_ASSERT(pn->is<BinaryNode>());
*answer = true;
return true;
// Throws if the operand is not of the right class. Can also call a private
// getter.
case ParseNodeKind::PrivateMemberExpr:
case ParseNodeKind::OptionalPrivateMemberExpr:
*answer = true;
return true;
// These affect visible names in this code, or in other code.
case ParseNodeKind::ImportDecl:
case ParseNodeKind::ExportFromStmt:
case ParseNodeKind::ExportDefaultStmt:
MOZ_ASSERT(pn->is<BinaryNode>());
*answer = true;
return true;
// Likewise.
case ParseNodeKind::ExportStmt:
MOZ_ASSERT(pn->is<UnaryNode>());
*answer = true;
return true;
case ParseNodeKind::CallImportExpr:
case ParseNodeKind::CallImportSpec:
MOZ_ASSERT(pn->is<BinaryNode>());
*answer = true;
return true;
// Every part of a loop might be effect-free, but looping infinitely *is*
// an effect. (Language lawyer trivia: C++ says threads can be assumed
// to exit or have side effects, C++14 [intro.multithread]p27, so a C++
// implementation's equivalent of the below could set |*answer = false;|
// if all loop sub-nodes set |*answer = false|!)
case ParseNodeKind::DoWhileStmt:
case ParseNodeKind::WhileStmt:
case ParseNodeKind::ForStmt:
MOZ_ASSERT(pn->is<BinaryNode>());
*answer = true;
return true;
// Declarations affect the name set of the relevant scope.
case ParseNodeKind::VarStmt:
case ParseNodeKind::ConstDecl:
case ParseNodeKind::LetDecl:
MOZ_ASSERT(pn->is<ListNode>());
*answer = true;
return true;
case ParseNodeKind::IfStmt:
case ParseNodeKind::ConditionalExpr: {
TernaryNode* node = &pn->as<TernaryNode>();
if (!checkSideEffects(node->kid1(), answer)) {
return false;
}
if (*answer) {
return true;
}
if (!checkSideEffects(node->kid2(), answer)) {
return false;
}
if (*answer) {
return true;
}
if ((pn = node->kid3())) {
goto restart;
}
return true;
}
// Function calls can invoke non-local code.
case ParseNodeKind::NewExpr:
case ParseNodeKind::CallExpr:
case ParseNodeKind::OptionalCallExpr:
case ParseNodeKind::TaggedTemplateExpr:
case ParseNodeKind::SuperCallExpr:
MOZ_ASSERT(pn->is<BinaryNode>());
*answer = true;
return true;
// Function arg lists can contain arbitrary expressions. Technically
// this only causes side-effects if one of the arguments does, but since
// the call being made will always trigger side-effects, it isn't needed.
case ParseNodeKind::Arguments:
MOZ_ASSERT(pn->is<ListNode>());
*answer = true;
return true;
case ParseNodeKind::OptionalChain:
MOZ_ASSERT(pn->is<UnaryNode>());
*answer = true;
return true;
// Classes typically introduce names. Even if no name is introduced,
// the heritage and/or class body (through computed property names)
// usually have effects.
case ParseNodeKind::ClassDecl:
MOZ_ASSERT(pn->is<ClassNode>());
*answer = true;
return true;
// |with| calls |ToObject| on its expression and so throws if that value
// is null/undefined.
case ParseNodeKind::WithStmt:
MOZ_ASSERT(pn->is<BinaryNode>());
*answer = true;
return true;
case ParseNodeKind::ReturnStmt:
MOZ_ASSERT(pn->is<BinaryNode>());
*answer = true;
return true;
case ParseNodeKind::Name:
MOZ_ASSERT(pn->is<NameNode>());
*answer = true;
return true;
// Shorthands could trigger getters: the |x| in the object literal in
// |with ({ get x() { throw 42; } }) ({ x });|, for example, triggers
// one. (Of course, it isn't necessary to use |with| for a shorthand to
// trigger a getter.)
case ParseNodeKind::Shorthand:
MOZ_ASSERT(pn->is<BinaryNode>());
*answer = true;
return true;
case ParseNodeKind::Function:
MOZ_ASSERT(pn->is<FunctionNode>());
/*
* A named function, contrary to ES3, is no longer effectful, because
* we bind its name lexically (using JSOp::Callee) instead of creating
* an Object instance and binding a readonly, permanent property in it
* (the object and binding can be detected and hijacked or captured).
* This is a bug fix to ES3; it is fixed in ES3.1 drafts.
*/
*answer = false;
return true;
case ParseNodeKind::Module:
*answer = false;
return true;
case ParseNodeKind::TryStmt: {
TryNode* tryNode = &pn->as<TryNode>();
if (!checkSideEffects(tryNode->body(), answer)) {
return false;
}
if (*answer) {
return true;
}
if (LexicalScopeNode* catchScope = tryNode->catchScope()) {
if (!checkSideEffects(catchScope, answer)) {
return false;
}
if (*answer) {
return true;
}
}
if (ParseNode* finallyBlock = tryNode->finallyBlock()) {
if (!checkSideEffects(finallyBlock, answer)) {
return false;
}
}
return true;
}
case ParseNodeKind::Catch: {
BinaryNode* catchClause = &pn->as<BinaryNode>();
if (ParseNode* name = catchClause->left()) {
if (!checkSideEffects(name, answer)) {
return false;
}
if (*answer) {
return true;
}
}
return checkSideEffects(catchClause->right(), answer);
}
case ParseNodeKind::SwitchStmt: {
SwitchStatement* switchStmt = &pn->as<SwitchStatement>();
if (!checkSideEffects(&switchStmt->discriminant(), answer)) {
return false;
}
return *answer ||
checkSideEffects(&switchStmt->lexicalForCaseList(), answer);
}
case ParseNodeKind::LabelStmt:
return checkSideEffects(pn->as<LabeledStatement>().statement(), answer);
case ParseNodeKind::LexicalScope:
return checkSideEffects(pn->as<LexicalScopeNode>().scopeBody(), answer);
// We could methodically check every interpolated expression, but it's
// probably not worth the trouble. Treat template strings as effect-free
// only if they don't contain any substitutions.
case ParseNodeKind::TemplateStringListExpr: {
ListNode* list = &pn->as<ListNode>();
MOZ_ASSERT(!list->empty());
MOZ_ASSERT((list->count() % 2) == 1,
"template strings must alternate template and substitution "
"parts");
*answer = list->count() > 1;
return true;
}
// This should be unreachable but is left as-is for now.
case ParseNodeKind::ParamsBody:
*answer = true;
return true;
case ParseNodeKind::ForIn: // by ParseNodeKind::For
case ParseNodeKind::ForOf: // by ParseNodeKind::For
case ParseNodeKind::ForHead: // by ParseNodeKind::For
case ParseNodeKind::DefaultConstructor: // by ParseNodeKind::ClassDecl
case ParseNodeKind::ClassBodyScope: // by ParseNodeKind::ClassDecl
case ParseNodeKind::ClassMethod: // by ParseNodeKind::ClassDecl
case ParseNodeKind::ClassField: // by ParseNodeKind::ClassDecl
case ParseNodeKind::ClassNames: // by ParseNodeKind::ClassDecl
case ParseNodeKind::StaticClassBlock: // by ParseNodeKind::ClassDecl
case ParseNodeKind::ClassMemberList: // by ParseNodeKind::ClassDecl
case ParseNodeKind::ImportSpecList: // by ParseNodeKind::Import
case ParseNodeKind::ImportSpec: // by ParseNodeKind::Import
case ParseNodeKind::ImportNamespaceSpec: // by ParseNodeKind::Import
case ParseNodeKind::ImportAttribute: // by ParseNodeKind::Import
case ParseNodeKind::ImportAttributeList: // by ParseNodeKind::Import
case ParseNodeKind::ImportModuleRequest: // by ParseNodeKind::Import
case ParseNodeKind::ExportBatchSpecStmt: // by ParseNodeKind::Export
case ParseNodeKind::ExportSpecList: // by ParseNodeKind::Export
case ParseNodeKind::ExportSpec: // by ParseNodeKind::Export
case ParseNodeKind::ExportNamespaceSpec: // by ParseNodeKind::Export
case ParseNodeKind::CallSiteObj: // by ParseNodeKind::TaggedTemplate
case ParseNodeKind::PosHolder: // by ParseNodeKind::NewTarget
case ParseNodeKind::SuperBase: // by ParseNodeKind::Elem and others
case ParseNodeKind::PropertyNameExpr: // by ParseNodeKind::Dot
MOZ_CRASH("handled by parent nodes");
case ParseNodeKind::LastUnused:
case ParseNodeKind::Limit:
MOZ_CRASH("invalid node kind");
}
MOZ_CRASH(
"invalid, unenumerated ParseNodeKind value encountered in "
"BytecodeEmitter::checkSideEffects");
}
bool BytecodeEmitter::isInLoop() const {
return findInnermostNestableControl<LoopControl>();
}
bool BytecodeEmitter::checkSingletonContext() const {
MOZ_ASSERT_IF(sc->treatAsRunOnce(), sc->isTopLevelContext());
return sc->treatAsRunOnce() && !isInLoop();
}
bool BytecodeEmitter::needsImplicitThis() const {
// Short-circuit if there is an enclosing 'with' scope.
if (sc->inWith()) {
return true;
}
// Otherwise see if the current point is under a 'with'.
for (EmitterScope* es = innermostEmitterScope(); es;
es = es->enclosingInFrame()) {
if (es->scope(this).kind() == ScopeKind::With) {
return true;
}
}
return false;
}
size_t BytecodeEmitter::countThisEnvironmentHops() const {
unsigned numHops = 0;
for (const auto* current = this; current; current = current->parent) {
for (EmitterScope* es = current->innermostEmitterScope(); es;
es = es->enclosingInFrame()) {
if (es->scope(current).is<FunctionScope>()) {
if (!es->scope(current).isArrow()) {
// The Parser is responsible for marking the environment as either
// closed-over or used-by-eval which ensure that is must exist.
MOZ_ASSERT(es->scope(current).hasEnvironment());
return numHops;
}
}
if (es->scope(current).hasEnvironment()) {
numHops++;
}
}
}
// The "this" environment exists outside of the compilation, but the
// `ScopeContext` recorded the number of additional hops needed, so add
// those in now.
MOZ_ASSERT(sc->allowSuperProperty());
numHops += compilationState.scopeContext.enclosingThisEnvironmentHops;
return numHops;
}
bool BytecodeEmitter::emitThisEnvironmentCallee() {
// Get the innermost enclosing function that has a |this| binding.
// Directly load callee from the frame if possible.
if (sc->isFunctionBox() && !sc->asFunctionBox()->isArrow()) {
return emit1(JSOp::Callee);
}
// We have to load the callee from the environment chain.
size_t numHops = countThisEnvironmentHops();
static_assert(
ENVCOORD_HOPS_LIMIT - 1 <= UINT8_MAX,
"JSOp::EnvCallee operand size should match ENVCOORD_HOPS_LIMIT");
MOZ_ASSERT(numHops < ENVCOORD_HOPS_LIMIT - 1);
return emit2(JSOp::EnvCallee, numHops);
}
bool BytecodeEmitter::emitSuperBase() {
if (!emitThisEnvironmentCallee()) {
return false;
}
return emit1(JSOp::SuperBase);
}
void BytecodeEmitter::reportError(ParseNode* pn, unsigned errorNumber,
...) const {
uint32_t offset = pn ? pn->pn_pos.begin : *scriptStartOffset;
va_list args;
va_start(args, errorNumber);
errorReporter().errorWithNotesAtVA(nullptr, AsVariant(offset), errorNumber,
&args);
va_end(args);
}
void BytecodeEmitter::reportError(uint32_t offset, unsigned errorNumber,
...) const {
va_list args;
va_start(args, errorNumber);
errorReporter().errorWithNotesAtVA(nullptr, AsVariant(offset), errorNumber,
&args);
va_end(args);
}
bool BytecodeEmitter::addObjLiteralData(ObjLiteralWriter& writer,
GCThingIndex* outIndex) {
if (!writer.checkForDuplicatedNames(fc)) {
return false;
}
size_t len = writer.getCode().size();
auto* code = compilationState.alloc.newArrayUninitialized<uint8_t>(len);
if (!code) {
js::ReportOutOfMemory(fc);
return false;
}
memcpy(code, writer.getCode().data(), len);
ObjLiteralIndex objIndex(compilationState.objLiteralData.length());
if (uint32_t(objIndex) >= TaggedScriptThingIndex::IndexLimit) {
ReportAllocationOverflow(fc);
return false;
}
if (!compilationState.objLiteralData.emplaceBack(code, len, writer.getKind(),
writer.getFlags(),
writer.getPropertyCount())) {
js::ReportOutOfMemory(fc);
return false;
}
return perScriptData().gcThingList().append(objIndex, outIndex);
}
bool BytecodeEmitter::emitPrepareIteratorResult() {
constexpr JSOp op = JSOp::NewObject;
ObjLiteralWriter writer;
writer.beginShape(op);
writer.setPropNameNoDuplicateCheck(parserAtoms(),
TaggedParserAtomIndex::WellKnown::value());
if (!writer.propWithUndefinedValue(fc)) {
return false;
}
writer.setPropNameNoDuplicateCheck(parserAtoms(),
TaggedParserAtomIndex::WellKnown::done());
if (!writer.propWithUndefinedValue(fc)) {
return false;
}
GCThingIndex shape;
if (!addObjLiteralData(writer, &shape)) {
return false;
}
return emitGCIndexOp(op, shape);
}
bool BytecodeEmitter::emitFinishIteratorResult(bool done) {
if (!emitAtomOp(JSOp::InitProp, TaggedParserAtomIndex::WellKnown::value())) {
return false;
}
if (!emit1(done ? JSOp::True : JSOp::False)) {
return false;
}
if (!emitAtomOp(JSOp::InitProp, TaggedParserAtomIndex::WellKnown::done())) {
return false;
}
return true;
}
bool BytecodeEmitter::emitGetNameAtLocation(TaggedParserAtomIndex name,
const NameLocation& loc) {
NameOpEmitter noe(this, name, loc, NameOpEmitter::Kind::Get);
if (!noe.emitGet()) {
return false;
}
return true;
}
bool BytecodeEmitter::emitGetName(NameNode* name) {
MOZ_ASSERT(name->isKind(ParseNodeKind::Name));
return emitGetName(name->name());
}
bool BytecodeEmitter::emitGetPrivateName(NameNode* name) {
MOZ_ASSERT(name->isKind(ParseNodeKind::PrivateName));
return emitGetPrivateName(name->name());
}
bool BytecodeEmitter::emitGetPrivateName(TaggedParserAtomIndex nameAtom) {
// The parser ensures the private name is present on the environment chain,
// but its location can be Dynamic or Global when emitting debugger
// eval-in-frame code.
NameLocation location = lookupName(nameAtom);
MOZ_ASSERT(location.kind() == NameLocation::Kind::FrameSlot ||
location.kind() == NameLocation::Kind::EnvironmentCoordinate ||
location.kind() == NameLocation::Kind::Dynamic ||
location.kind() == NameLocation::Kind::Global);
return emitGetNameAtLocation(nameAtom, location);
}
bool BytecodeEmitter::emitTDZCheckIfNeeded(TaggedParserAtomIndex name,
const NameLocation& loc,
ValueIsOnStack isOnStack) {
// Dynamic accesses have TDZ checks built into their VM code and should
// never emit explicit TDZ checks.
MOZ_ASSERT(loc.hasKnownSlot());
MOZ_ASSERT(loc.isLexical() || loc.isPrivateMethod() || loc.isSynthetic());
// Private names are implemented as lexical bindings, but it's just an
// implementation detail. Per spec there's no TDZ check when using them.
if (parserAtoms().isPrivateName(name)) {
return true;
}
Maybe<MaybeCheckTDZ> check =
innermostTDZCheckCache->needsTDZCheck(this, name);
if (!check) {
return false;
}
// We've already emitted a check in this basic block.
if (*check == DontCheckTDZ) {
return true;
}
// If the value is not on the stack, we have to load it first.
if (isOnStack == ValueIsOnStack::No) {
if (loc.kind() == NameLocation::Kind::FrameSlot) {
if (!emitLocalOp(JSOp::GetLocal, loc.frameSlot())) {
return false;
}
} else {
if (!emitEnvCoordOp(JSOp::GetAliasedVar, loc.environmentCoordinate())) {
return false;
}
}
}
// Emit the lexical check.
if (loc.kind() == NameLocation::Kind::FrameSlot) {
if (!emitLocalOp(JSOp::CheckLexical, loc.frameSlot())) {
return false;
}
} else {
if (!emitEnvCoordOp(JSOp::CheckAliasedLexical,
loc.environmentCoordinate())) {
return false;
}
}
// Pop the value if needed.
if (isOnStack == ValueIsOnStack::No) {
if (!emit1(JSOp::Pop)) {
return false;
}
}
return innermostTDZCheckCache->noteTDZCheck(this, name, DontCheckTDZ);
}
bool BytecodeEmitter::emitPropLHS(PropertyAccess* prop) {
MOZ_ASSERT(!prop->isSuper());
ParseNode* expr = &prop->expression();
if (!expr->is<PropertyAccess>() || expr->as<PropertyAccess>().isSuper()) {
// The non-optimized case.
return emitTree(expr);
}
// If the object operand is also a dotted property reference, reverse the
// list linked via expression() temporarily so we can iterate over it from
// the bottom up (reversing again as we go), to avoid excessive recursion.
PropertyAccess* pndot = &expr->as<PropertyAccess>();
ParseNode* pnup = nullptr;
ParseNode* pndown;
for (;;) {
// Reverse pndot->expression() to point up, not down.
pndown = &pndot->expression();
pndot->setExpression(pnup);
if (!pndown->is<PropertyAccess>() ||
pndown->as<PropertyAccess>().isSuper()) {
break;
}
pnup = pndot;
pndot = &pndown->as<PropertyAccess>();
}
// pndown is a primary expression, not a dotted property reference.
if (!emitTree(pndown)) {
return false;
}
while (true) {
// Walk back up the list, emitting annotated name ops.
if (!emitAtomOp(JSOp::GetProp, pndot->key().atom())) {
return false;
}
// Reverse the pndot->expression() link again.
pnup = pndot->maybeExpression();
pndot->setExpression(pndown);
pndown = pndot;
if (!pnup) {
break;
}
pndot = &pnup->as<PropertyAccess>();
}
return true;
}
bool BytecodeEmitter::emitPropIncDec(UnaryNode* incDec, ValueUsage valueUsage) {
PropertyAccess* prop = &incDec->kid()->as<PropertyAccess>();
bool isSuper = prop->isSuper();
ParseNodeKind kind = incDec->getKind();
PropOpEmitter poe(
this,
kind == ParseNodeKind::PostIncrementExpr
? PropOpEmitter::Kind::PostIncrement
: kind == ParseNodeKind::PreIncrementExpr
? PropOpEmitter::Kind::PreIncrement
: kind == ParseNodeKind::PostDecrementExpr
? PropOpEmitter::Kind::PostDecrement
: PropOpEmitter::Kind::PreDecrement,
isSuper ? PropOpEmitter::ObjKind::Super : PropOpEmitter::ObjKind::Other);
if (!poe.prepareForObj()) {
return false;
}
if (isSuper) {
UnaryNode* base = &prop->expression().as<UnaryNode>();
if (!emitGetThisForSuperBase(base)) {
// [stack] THIS
return false;
}
} else {
if (!emitPropLHS(prop)) {
// [stack] OBJ
return false;
}
}
if (!poe.emitIncDec(prop->key().atom(), valueUsage)) {
// [stack] RESULT
return false;
}
return true;
}
bool BytecodeEmitter::emitNameIncDec(UnaryNode* incDec, ValueUsage valueUsage) {
MOZ_ASSERT(incDec->kid()->isKind(ParseNodeKind::Name));
ParseNodeKind kind = incDec->getKind();
NameNode* name = &incDec->kid()->as<NameNode>();
NameOpEmitter noe(this, name->atom(),
kind == ParseNodeKind::PostIncrementExpr
? NameOpEmitter::Kind::PostIncrement
: kind == ParseNodeKind::PreIncrementExpr
? NameOpEmitter::Kind::PreIncrement
: kind == ParseNodeKind::PostDecrementExpr
? NameOpEmitter::Kind::PostDecrement
: NameOpEmitter::Kind::PreDecrement);
if (!noe.emitIncDec(valueUsage)) {
return false;
}
return true;
}
bool BytecodeEmitter::emitElemObjAndKey(PropertyByValue* elem,
ElemOpEmitter& eoe) {
ParseNode* exprOrSuper = &elem->expression();
ParseNode* key = &elem->key();
if (!eoe.prepareForObj()) {
// [stack]
return false;
}
if (elem->isSuper()) {
auto* base = &exprOrSuper->as<UnaryNode>();
if (!emitGetThisForSuperBase(base)) {
// [stack] THIS
return false;
}
} else {
if (!emitTree(exprOrSuper)) {
// [stack] OBJ
return false;
}
}
if (!eoe.prepareForKey()) {
// [stack] # if Super
// [stack] THIS? THIS
// [stack] # otherwise
// [stack] OBJ? OBJ
return false;
}
if (!emitTree(key)) {
// [stack] # if Super
// [stack] THIS? THIS KEY
// [stack] # otherwise
// [stack] OBJ? OBJ KEY
return false;
}
return true;
}
bool BytecodeEmitter::emitElemOpBase(JSOp op) {
if (!emit1(op)) {
return false;
}
return true;
}
static ElemOpEmitter::Kind ConvertIncDecKind(ParseNodeKind kind) {
switch (kind) {
case ParseNodeKind::PostIncrementExpr:
return ElemOpEmitter::Kind::PostIncrement;
case ParseNodeKind::PreIncrementExpr:
return ElemOpEmitter::Kind::PreIncrement;
case ParseNodeKind::PostDecrementExpr:
return ElemOpEmitter::Kind::PostDecrement;
case ParseNodeKind::PreDecrementExpr:
return ElemOpEmitter::Kind::PreDecrement;
default:
MOZ_CRASH("unexpected inc/dec node kind");
}
}
static PrivateOpEmitter::Kind PrivateConvertIncDecKind(ParseNodeKind kind) {
switch (kind) {
case ParseNodeKind::PostIncrementExpr:
return PrivateOpEmitter::Kind::PostIncrement;
case ParseNodeKind::PreIncrementExpr:
return PrivateOpEmitter::Kind::PreIncrement;
case ParseNodeKind::PostDecrementExpr:
return PrivateOpEmitter::Kind::PostDecrement;
case ParseNodeKind::PreDecrementExpr:
return PrivateOpEmitter::Kind::PreDecrement;
default:
MOZ_CRASH("unexpected inc/dec node kind");
}
}
bool BytecodeEmitter::emitElemIncDec(UnaryNode* incDec, ValueUsage valueUsage) {
PropertyByValue* elemExpr = &incDec->kid()->as<PropertyByValue>();
bool isSuper = elemExpr->isSuper();
MOZ_ASSERT(!elemExpr->key().isKind(ParseNodeKind::PrivateName));
ParseNodeKind kind = incDec->getKind();
ElemOpEmitter eoe(
this, ConvertIncDecKind(kind),
isSuper ? ElemOpEmitter::ObjKind::Super : ElemOpEmitter::ObjKind::Other);
if (!emitElemObjAndKey(elemExpr, eoe)) {
// [stack] # if Super
// [stack] THIS KEY
// [stack] # otherwise
// [stack] OBJ KEY
return false;
}
if (!eoe.emitIncDec(valueUsage)) {
// [stack] RESULT
return false;
}
return true;
}
bool BytecodeEmitter::emitCallIncDec(UnaryNode* incDec) {
MOZ_ASSERT(incDec->isKind(ParseNodeKind::PreIncrementExpr) ||
incDec->isKind(ParseNodeKind::PostIncrementExpr) ||
incDec->isKind(ParseNodeKind::PreDecrementExpr) ||
incDec->isKind(ParseNodeKind::PostDecrementExpr));
ParseNode* call = incDec->kid();
MOZ_ASSERT(call->isKind(ParseNodeKind::CallExpr));
if (!emitTree(call)) {
// [stack] CALLRESULT
return false;
}
if (!emit1(JSOp::ToNumeric)) {
// [stack] N
return false;
}
// The increment/decrement has no side effects, so proceed to throw for
// invalid assignment target.
return emit2(JSOp::ThrowMsg, uint8_t(ThrowMsgKind::AssignToCall));
}
bool BytecodeEmitter::emitPrivateIncDec(UnaryNode* incDec,
ValueUsage valueUsage) {
PrivateMemberAccess* privateExpr = &incDec->kid()->as<PrivateMemberAccess>();
ParseNodeKind kind = incDec->getKind();
PrivateOpEmitter xoe(this, PrivateConvertIncDecKind(kind),
privateExpr->privateName().name());
if (!emitTree(&privateExpr->expression())) {
// [stack] OBJ
return false;
}
if (!xoe.emitReference()) {
// [stack] OBJ NAME
return false;
}
if (!xoe.emitIncDec(valueUsage)) {
// [stack] RESULT
return false;
}
return true;
}
bool BytecodeEmitter::emitDouble(double d) {
BytecodeOffset offset;
if (!emitCheck(JSOp::Double, 9, &offset)) {
return false;
}
jsbytecode* code = bytecodeSection().code(offset);
code[0] = jsbytecode(JSOp::Double);
SET_INLINE_VALUE(code, DoubleValue(d));
bytecodeSection().updateDepth(JSOp::Double, offset);
return true;
}
bool BytecodeEmitter::emitNumberOp(double dval) {
int32_t ival;
if (NumberIsInt32(dval, &ival)) {
if (ival == 0) {
return emit1(JSOp::Zero);
}
if (ival == 1) {
return emit1(JSOp::One);
}
if ((int)(int8_t)ival == ival) {
return emit2(JSOp::Int8, uint8_t(int8_t(ival)));
}
uint32_t u = uint32_t(ival);
if (u < Bit(16)) {
if (!emitUint16Operand(JSOp::Uint16, u)) {
return false;
}
} else if (u < Bit(24)) {
BytecodeOffset off;
if (!emitN(JSOp::Uint24, 3, &off)) {
return false;
}
SET_UINT24(bytecodeSection().code(off), u);
} else {
BytecodeOffset off;
if (!emitN(JSOp::Int32, 4, &off)) {
return false;
}
SET_INT32(bytecodeSection().code(off), ival);
}
return true;
}
return emitDouble(dval);
}
/*
* Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047.
* LLVM is deciding to inline this function which uses a lot of stack space
* into emitTree which is recursive and uses relatively little stack space.
*/
MOZ_NEVER_INLINE bool BytecodeEmitter::emitSwitch(SwitchStatement* switchStmt) {
LexicalScopeNode& lexical = switchStmt->lexicalForCaseList();
MOZ_ASSERT(lexical.isKind(ParseNodeKind::LexicalScope));
ListNode* cases = &lexical.scopeBody()->as<ListNode>();
MOZ_ASSERT(cases->isKind(ParseNodeKind::StatementList));
SwitchEmitter se(this);
if (!se.emitDiscriminant(switchStmt->discriminant().pn_pos.begin)) {
return false;
}
if (!markStepBreakpoint()) {
return false;
}
if (!emitTree(&switchStmt->discriminant())) {
return false;
}
// Enter the scope before pushing the switch BreakableControl since all
// breaks are under this scope.
if (!lexical.isEmptyScope()) {
if (!se.emitLexical(lexical.scopeBindings())) {
return false;
}
// A switch statement may contain hoisted functions inside its
// cases. The hasTopLevelFunctionDeclarations flag is propagated from the
// StatementList bodies of the cases to the case list.
if (cases->hasTopLevelFunctionDeclarations()) {
for (ParseNode* item : cases->contents()) {
CaseClause* caseClause = &item->as<CaseClause>();
ListNode* statements = caseClause->statementList();
if (statements->hasTopLevelFunctionDeclarations()) {
if (!emitHoistedFunctionsInList(statements)) {
return false;
}
}
}
}
} else {
MOZ_ASSERT(!cases->hasTopLevelFunctionDeclarations());
}
SwitchEmitter::TableGenerator tableGen(this);
uint32_t caseCount = cases->count() - (switchStmt->hasDefault() ? 1 : 0);
if (caseCount == 0) {
tableGen.finish(0);
} else {
for (ParseNode* item : cases->contents()) {
CaseClause* caseClause = &item->as<CaseClause>();
if (caseClause->isDefault()) {
continue;
}
ParseNode* caseValue = caseClause->caseExpression();
if (caseValue->getKind() != ParseNodeKind::NumberExpr) {
tableGen.setInvalid();
break;
}
int32_t i;
if (!NumberEqualsInt32(caseValue->as<NumericLiteral>().value(), &i)) {
tableGen.setInvalid();
break;
}
if (!tableGen.addNumber(i)) {
return false;
}
}
tableGen.finish(caseCount);
}
if (!se.validateCaseCount(caseCount)) {
return false;
}
bool isTableSwitch = tableGen.isValid();
if (isTableSwitch) {
if (!se.emitTable(tableGen)) {
return false;
}
} else {
if (!se.emitCond()) {
return false;
}
// Emit code for evaluating cases and jumping to case statements.
for (ParseNode* item : cases->contents()) {
CaseClause* caseClause = &item->as<CaseClause>();
if (caseClause->isDefault()) {
continue;
}
if (!se.prepareForCaseValue()) {
return false;
}
ParseNode* caseValue = caseClause->caseExpression();
// If the expression is a literal, suppress line number emission so
// that debugging works more naturally.
if (!emitTree(
caseValue, ValueUsage::WantValue,
caseValue->isLiteral() ? SUPPRESS_LINENOTE : EMIT_LINENOTE)) {
return false;
}
if (!se.emitCaseJump()) {
return false;
}
}
}
// Emit code for each case's statements.
for (ParseNode* item : cases->contents()) {
CaseClause* caseClause = &item->as<CaseClause>();
if (caseClause->isDefault()) {
if (!se.emitDefaultBody()) {
return false;
}
} else {
if (isTableSwitch) {
ParseNode* caseValue = caseClause->caseExpression();
MOZ_ASSERT(caseValue->isKind(ParseNodeKind::NumberExpr));
NumericLiteral* literal = &caseValue->as<NumericLiteral>();
#ifdef DEBUG
// Use NumberEqualsInt32 here because switches compare using
// strict equality, which will equate -0 and +0. In contrast
// NumberIsInt32 would return false for -0.
int32_t v;
MOZ_ASSERT(mozilla::NumberEqualsInt32(literal->value(), &v));
#endif
int32_t i = int32_t(literal->value());
if (!se.emitCaseBody(i, tableGen)) {
return false;
}
} else {
if (!se.emitCaseBody()) {
return false;
}
}
}
if (!emitTree(caseClause->statementList())) {
return false;
}
}
if (!se.emitEnd()) {
return false;
}
return true;
}
bool BytecodeEmitter::allocateResumeIndex(BytecodeOffset offset,
uint32_t* resumeIndex) {
static constexpr uint32_t MaxResumeIndex = BitMask(24);
static_assert(
MaxResumeIndex < uint32_t(AbstractGeneratorObject::RESUME_INDEX_RUNNING),
"resumeIndex should not include magic AbstractGeneratorObject "
"resumeIndex values");
static_assert(
MaxResumeIndex <= INT32_MAX / sizeof(uintptr_t),
"resumeIndex * sizeof(uintptr_t) must fit in an int32. JIT code relies "
"on this when loading resume entries from BaselineScript");
*resumeIndex = bytecodeSection().resumeOffsetList().length();
if (*resumeIndex > MaxResumeIndex) {
reportError(nullptr, JSMSG_TOO_MANY_RESUME_INDEXES);
return false;
}
return bytecodeSection().resumeOffsetList().append(offset.value());
}
bool BytecodeEmitter::allocateResumeIndexRange(
mozilla::Span<BytecodeOffset> offsets, uint32_t* firstResumeIndex) {
*firstResumeIndex = 0;
for (size_t i = 0, len = offsets.size(); i < len; i++) {
uint32_t resumeIndex;
if (!allocateResumeIndex(offsets[i], &resumeIndex)) {
return false;
}
if (i == 0) {
*firstResumeIndex = resumeIndex;
}
}
return true;
}
bool BytecodeEmitter::emitYieldOp(JSOp op) {
// ParseContext::Scope::setOwnStackSlotCount should check the fixed slot
// for the following, and it should prevent using fixed slot if there are
// too many bindings:
// * generator or asyn function
// * module code after top-level await
MOZ_ASSERT(innermostEmitterScopeNoCheck()->frameSlotEnd() <=
ParseContext::Scope::FixedSlotLimit);
if (op == JSOp::FinalYieldRval) {
return emit1(JSOp::FinalYieldRval);
}
MOZ_ASSERT(op == JSOp::InitialYield || op == JSOp::Yield ||
op == JSOp::Await);
BytecodeOffset off;
if (!emitN(op, 3, &off)) {
return false;
}
if (op == JSOp::InitialYield || op == JSOp::Yield) {
bytecodeSection().addNumYields();
}
uint32_t resumeIndex;
if (!allocateResumeIndex(bytecodeSection().offset(), &resumeIndex)) {
return false;
}
SET_RESUMEINDEX(bytecodeSection().code(off), resumeIndex);
BytecodeOffset unusedOffset;
return emitJumpTargetOp(JSOp::AfterYield, &unusedOffset);
}
bool BytecodeEmitter::emitPushResumeKind(GeneratorResumeKind kind) {
return emit2(JSOp::ResumeKind, uint8_t(kind));
}
bool BytecodeEmitter::emitSetThis(BinaryNode* setThisNode) {
// ParseNodeKind::SetThis is used to update |this| after a super() call
// in a derived class constructor.
MOZ_ASSERT(setThisNode->isKind(ParseNodeKind::SetThis));
MOZ_ASSERT(setThisNode->left()->isKind(ParseNodeKind::Name));
auto name = setThisNode->left()->as<NameNode>().name();
// The 'this' binding is not lexical, but due to super() semantics this
// initialization needs to be treated as a lexical one.
NameLocation loc = lookupName(name);
NameLocation lexicalLoc;
if (loc.kind() == NameLocation::Kind::FrameSlot) {
lexicalLoc = NameLocation::FrameSlot(BindingKind::Let, loc.frameSlot());
} else if (loc.kind() == NameLocation::Kind::EnvironmentCoordinate) {
EnvironmentCoordinate coord = loc.environmentCoordinate();
uint8_t hops = AssertedCast<uint8_t>(coord.hops());
lexicalLoc = NameLocation::EnvironmentCoordinate(BindingKind::Let, hops,
coord.slot());
} else {
MOZ_ASSERT(loc.kind() == NameLocation::Kind::Dynamic);
lexicalLoc = loc;
}
NameOpEmitter noe(this, name, lexicalLoc, NameOpEmitter::Kind::Initialize);
if (!noe.prepareForRhs()) {
// [stack]
return false;
}
// Emit the new |this| value.
if (!emitTree(setThisNode->right())) {
// [stack] NEWTHIS
return false;
}
// Get the original |this| and throw if we already initialized
// it. Do *not* use the NameLocation argument, as that's the special
// lexical location below to deal with super() semantics.
if (!emitGetName(name)) {
// [stack] NEWTHIS THIS
return false;
}
if (!emit1(JSOp::CheckThisReinit)) {
// [stack] NEWTHIS THIS
return false;
}
if (!emit1(JSOp::Pop)) {
// [stack] NEWTHIS
return false;
}
if (!noe.emitAssignment()) {
// [stack] NEWTHIS
return false;
}
if (!emitInitializeInstanceMembers(true)) {
return false;
}
return true;
}
bool BytecodeEmitter::defineHoistedTopLevelFunctions(ParseNode* body) {
MOZ_ASSERT(inPrologue());
MOZ_ASSERT(sc->isGlobalContext() || (sc->isEvalContext() && !sc->strict()));
MOZ_ASSERT(body->is<LexicalScopeNode>() || body->is<ListNode>());
if (body->is<LexicalScopeNode>()) {
body = body->as<LexicalScopeNode>().scopeBody();
MOZ_ASSERT(body->is<ListNode>());
}
if (!body->as<ListNode>().hasTopLevelFunctionDeclarations()) {
return true;
}
return emitHoistedFunctionsInList(&body->as<ListNode>());
}
// For Global and sloppy-Eval scripts, this performs most of the steps of the
// spec's [GlobalDeclarationInstantiation] and [EvalDeclarationInstantiation]
// operations.
//
// Note that while strict-Eval is handled in the same part of the spec, it never
// fails for global-redeclaration checks so those scripts initialize directly in
// their bytecode.
bool BytecodeEmitter::emitDeclarationInstantiation(ParseNode* body) {
if (sc->isModuleContext()) {
// ES Modules have dedicated variable and lexial environments and therefore
// do not have to perform redeclaration checks. We initialize their bindings
// elsewhere in bytecode.
return true;
}
if (sc->isEvalContext() && sc->strict()) {
// Strict Eval has a dedicated variables (and lexical) environment and
// therefore does not have to perform redeclaration checks. We initialize
// their bindings elsewhere in the bytecode.
return true;
}
// If we have no variables bindings, then we are done!
if (sc->isGlobalContext()) {
if (!sc->asGlobalContext()->bindings) {
return true;
}
} else {
MOZ_ASSERT(sc->isEvalContext());
if (!sc->asEvalContext()->bindings) {
return true;
}
}
#if DEBUG
// There should be no emitted functions yet.
for (const auto& thing : perScriptData().gcThingList().objects()) {
MOZ_ASSERT(thing.isEmptyGlobalScope() || thing.isScope());
}
#endif
// Emit the hoisted functions to gc-things list. There is no bytecode
// generated yet to bind them.
if (!defineHoistedTopLevelFunctions(body)) {
return false;
}
// Save the last GCThingIndex emitted. The hoisted functions are contained in
// the gc-things list up until this point. This set of gc-things also contain
// initial scopes (of which there must be at least one).
MOZ_ASSERT(perScriptData().gcThingList().length() > 0);
GCThingIndex lastFun =
GCThingIndex(perScriptData().gcThingList().length() - 1);
#if DEBUG
for (const auto& thing : perScriptData().gcThingList().objects()) {
MOZ_ASSERT(thing.isEmptyGlobalScope() || thing.isScope() ||
thing.isFunction());
}
#endif
// Check for declaration conflicts and initialize the bindings.
// NOTE: The self-hosting top-level script should not populate the builtins
// directly on the GlobalObject (and instead uses JSOp::GetIntrinsic for
// lookups).
if (emitterMode == BytecodeEmitter::EmitterMode::Normal) {
if (!emitGCIndexOp(JSOp::GlobalOrEvalDeclInstantiation, lastFun)) {
return false;
}
}
return true;
}
bool BytecodeEmitter::emitScript(ParseNode* body) {
setScriptStartOffsetIfUnset(body->pn_pos.begin);
MOZ_ASSERT(inPrologue());
TDZCheckCache tdzCache(this);
EmitterScope emitterScope(this);
Maybe<AsyncEmitter> topLevelAwait;
if (sc->isGlobalContext()) {
if (!emitterScope.enterGlobal(this, sc->asGlobalContext())) {
return false;
}
} else if (sc->isEvalContext()) {
if (!emitterScope.enterEval(this, sc->asEvalContext())) {
return false;
}
} else {
MOZ_ASSERT(sc->isModuleContext());
if (!emitterScope.enterModule(this, sc->asModuleContext())) {
return false;
}
if (sc->asModuleContext()->isAsync()) {
topLevelAwait.emplace(this);
}
}
setFunctionBodyEndPos(body->pn_pos.end);
bool isSloppyEval = sc->isEvalContext() && !sc->strict();
if (isSloppyEval && body->is<LexicalScopeNode>() &&
!body->as<LexicalScopeNode>().isEmptyScope()) {
// Sloppy eval scripts may emit hoisted functions bindings with a
// `JSOp::GlobalOrEvalDeclInstantiation` opcode below. If this eval needs a
// top-level lexical environment, we must ensure that environment is created
// before those functions are created and bound.
//
// This differs from the global-script case below because the global-lexical
// environment exists outside the script itself. In the case of strict eval
// scripts, the `emitterScope` above is already sufficient.
EmitterScope lexicalEmitterScope(this);
LexicalScopeNode* scope = &body->as<LexicalScopeNode>();
if (!lexicalEmitterScope.enterLexical(this, ScopeKind::Lexical,
scope->scopeBindings())) {
return false;
}
if (!emitDeclarationInstantiation(scope->scopeBody())) {
return false;
}
switchToMain();
ParseNode* scopeBody = scope->scopeBody();
if (!emitLexicalScopeBody(scopeBody)) {
return false;
}
if (!updateSourceCoordNotes(scopeBody->pn_pos.end)) {
return false;
}
if (!lexicalEmitterScope.leave(this)) {
return false;
}
} else {
if (!emitDeclarationInstantiation(body)) {
return false;
}
if (topLevelAwait) {
if (!topLevelAwait->prepareForModule()) {
return false;
}
}
switchToMain();
#ifdef ENABLE_EXPLICIT_RESOURCE_MANAGEMENT
if (!emitterScope.prepareForModuleDisposableScopeBody(this)) {
return false;
}
#endif
if (topLevelAwait) {
if (!topLevelAwait->prepareForBody()) {
return false;
}
}
if (!emitTree(body)) {
// [stack]
return false;
}
if (!updateSourceCoordNotes(body->pn_pos.end)) {
return false;
}
}
#ifdef ENABLE_EXPLICIT_RESOURCE_MANAGEMENT
if (!emitterScope.emitModuleDisposableScopeBodyEnd(this)) {
return false;
}
#endif
if (topLevelAwait) {
if (!topLevelAwait->emitEndModule()) {
return false;
}
}
if (!markSimpleBreakpoint()) {
return false;
}
if (!emitReturnRval()) {
return false;
}
if (!emitterScope.leave(this)) {
return false;
}
if (!NameFunctions(fc, parserAtoms(), body)) {
return false;
}
// Create a Stencil and convert it into a JSScript.
return intoScriptStencil(CompilationStencil::TopLevelIndex);
}
js::UniquePtr<ImmutableScriptData>
BytecodeEmitter::createImmutableScriptData() {
uint32_t nslots;
if (!getNslots(&nslots)) {
return nullptr;
}
bool isFunction = sc->isFunctionBox();
uint16_t funLength = isFunction ? sc->asFunctionBox()->length() : 0;
mozilla::SaturateUint8 propertyCountEstimate = propertyAdditionEstimate;
// Add fields to the property count estimate.
if (isFunction && sc->asFunctionBox()->useMemberInitializers()) {
propertyCountEstimate +=
sc->asFunctionBox()->memberInitializers().numMemberInitializers;
}
return ImmutableScriptData::new_(
fc, mainOffset(), maxFixedSlots, nslots, bodyScopeIndex,
bytecodeSection().numICEntries(), isFunction, funLength,
propertyCountEstimate.value(), bytecodeSection().code(),
bytecodeSection().notes(), bytecodeSection().resumeOffsetList().span(),
bytecodeSection().scopeNoteList().span(),
bytecodeSection().tryNoteList().span());
}
#if defined(ENABLE_DECORATORS) || defined(ENABLE_EXPLICIT_RESOURCE_MANAGEMENT)
bool BytecodeEmitter::emitCheckIsCallable() {
// This emits code to check if the value at the top of the stack is
// callable. The value is left on the stack.
// [stack] VAL
if (!emitAtomOp(JSOp::GetIntrinsic,
TaggedParserAtomIndex::WellKnown::IsCallable())) {
// [stack] VAL ISCALLABLE
return false;
}
if (!emit1(JSOp::Undefined)) {
// [stack] VAL ISCALLABLE UNDEFINED
return false;
}
if (!emitDupAt(2)) {
// [stack] VAL ISCALLABLE UNDEFINED VAL
return false;
}
return emitCall(JSOp::Call, 1);
// [stack] VAL ISCALLABLE_RESULT
}
#endif
bool BytecodeEmitter::getNslots(uint32_t* nslots) const {
uint64_t nslots64 =
maxFixedSlots + static_cast<uint64_t>(bytecodeSection().maxStackDepth());
if (nslots64 > UINT32_MAX) {
reportError(nullptr, JSMSG_NEED_DIET, "script");
return false;
}
*nslots = nslots64;
return true;
}
bool BytecodeEmitter::emitFunctionScript(FunctionNode* funNode) {
MOZ_ASSERT(inPrologue());
ParamsBodyNode* paramsBody = funNode->body();
FunctionBox* funbox = sc->asFunctionBox();
setScriptStartOffsetIfUnset(paramsBody->pn_pos.begin);
// [stack]
FunctionScriptEmitter fse(this, funbox, Some(paramsBody->pn_pos.begin),
Some(paramsBody->pn_pos.end));
if (!fse.prepareForParameters()) {
// [stack]
return false;
}
if (!emitFunctionFormalParameters(paramsBody)) {
// [stack]
return false;
}
if (!fse.prepareForBody()) {
// [stack]
return false;
}
if (!emitTree(paramsBody->body())) {
// [stack]
return false;
}
if (!fse.emitEndBody()) {
// [stack]
return false;
}
if (funbox->index() == CompilationStencil::TopLevelIndex) {
if (!NameFunctions(fc, parserAtoms(), funNode)) {
return false;
}
}
return fse.intoStencil();
}
class js::frontend::DestructuringLHSRef {
struct None {
size_t numReferenceSlots() const { return 0; }
};
mozilla::Variant<None, NameOpEmitter, PropOpEmitter, ElemOpEmitter,
PrivateOpEmitter>
emitter_ = AsVariant(None{});
public:
template <typename T>
void from(T&& emitter) {
emitter_.emplace<T>(std::forward<T>(emitter));
}
template <typename T>
T& emitter() {
return emitter_.as<T>();
}
/**
* Return the number of values pushed onto the stack.
*/
size_t numReferenceSlots() const {
return emitter_.match([](auto& e) { return e.numReferenceSlots(); });
}
};
bool BytecodeEmitter::emitDestructuringLHSRef(ParseNode* target,
DestructuringFlavor flav,
DestructuringLHSRef& lref) {
#ifdef DEBUG
int depth = bytecodeSection().stackDepth();
#endif
switch (target->getKind()) {
case ParseNodeKind::ArrayExpr:
case ParseNodeKind::ObjectExpr:
// No need to recurse into ParseNodeKind::Array and ParseNodeKind::Object
// subpatterns here, since emitSetOrInitializeDestructuring does the
// recursion when setting or initializing the value.
break;
case ParseNodeKind::Name: {
auto* name = &target->as<NameNode>();
NameOpEmitter noe(this, name->atom(),
flav == DestructuringFlavor::Assignment
? NameOpEmitter::Kind::SimpleAssignment
: NameOpEmitter::Kind::Initialize);
if (!noe.prepareForRhs()) {
return false;
}
lref.from(std::move(noe));
return true;
}
case ParseNodeKind::ArgumentsLength:
case ParseNodeKind::DotExpr: {
PropertyAccess* prop = &target->as<PropertyAccess>();
bool isSuper = prop->isSuper();
PropOpEmitter poe(this, PropOpEmitter::Kind::SimpleAssignment,
isSuper ? PropOpEmitter::ObjKind::Super
: PropOpEmitter::ObjKind::Other);
if (!poe.prepareForObj()) {
return false;
}
if (isSuper) {
UnaryNode* base = &prop->expression().as<UnaryNode>();
if (!emitGetThisForSuperBase(base)) {
// [stack] THIS SUPERBASE
return false;
}
} else {
if (!emitTree(&prop->expression())) {
// [stack] OBJ
return false;
}
}
if (!poe.prepareForRhs()) {
// [stack] # if Super
// [stack] THIS SUPERBASE
// [stack] # otherwise
// [stack] OBJ
return false;
}
lref.from(std::move(poe));
break;
}
case ParseNodeKind::ElemExpr: {
PropertyByValue* elem = &target->as<PropertyByValue>();
bool isSuper = elem->isSuper();
MOZ_ASSERT(!elem->key().isKind(ParseNodeKind::PrivateName));
ElemOpEmitter eoe(this, ElemOpEmitter::Kind::SimpleAssignment,
isSuper ? ElemOpEmitter::ObjKind::Super
: ElemOpEmitter::ObjKind::Other);
if (!emitElemObjAndKey(elem, eoe)) {
// [stack] # if Super
// [stack] THIS KEY
// [stack] # otherwise
// [stack] OBJ KEY
return false;
}
if (!eoe.prepareForRhs()) {
// [stack] # if Super
// [stack] THIS KEY SUPERBASE
// [stack] # otherwise
// [stack] OBJ KEY
return false;
}
lref.from(std::move(eoe));
break;
}
case ParseNodeKind::PrivateMemberExpr: {
PrivateMemberAccess* privateExpr = &target->as<PrivateMemberAccess>();
PrivateOpEmitter xoe(this, PrivateOpEmitter::Kind::SimpleAssignment,
privateExpr->privateName().name());
if (!emitTree(&privateExpr->expression())) {
// [stack] OBJ
return false;
}
if (!xoe.emitReference()) {
// [stack] OBJ NAME
return false;
}
lref.from(std::move(xoe));
break;
}
case ParseNodeKind::CallExpr:
MOZ_ASSERT_UNREACHABLE(
"Parser::reportIfNotValidSimpleAssignmentTarget "
"rejects function calls as assignment "
"targets in destructuring assignments");
break;
default:
MOZ_CRASH("emitDestructuringLHSRef: bad lhs kind");
}
MOZ_ASSERT(bytecodeSection().stackDepth() ==
depth + int(lref.numReferenceSlots()));
return true;
}
bool BytecodeEmitter::emitSetOrInitializeDestructuring(
ParseNode* target, DestructuringFlavor flav, DestructuringLHSRef& lref) {
// Now emit the lvalue opcode sequence. If the lvalue is a nested
// destructuring initialiser-form, call ourselves to handle it, then pop
// the matched value. Otherwise emit an lvalue bytecode sequence followed
// by an assignment op.
switch (target->getKind()) {
case ParseNodeKind::ArrayExpr:
case ParseNodeKind::ObjectExpr:
if (!emitDestructuringOps(&target->as<ListNode>(), flav)) {
return false;
}
// emitDestructuringOps leaves the assigned (to-be-destructured) value on
// top of the stack.
break;
case ParseNodeKind::Name: {
// The environment is already pushed by emitDestructuringLHSRef.
// [stack] ENV? VAL
auto& noe = lref.emitter<NameOpEmitter>();
if (!noe.emitAssignment()) {
// [stack] VAL
return false;
}
break;
}
case ParseNodeKind::ArgumentsLength:
case ParseNodeKind::DotExpr: {
// The reference is already pushed by emitDestructuringLHSRef.
// [stack] # if Super
// [stack] THIS SUPERBASE VAL
// [stack] # otherwise
// [stack] OBJ VAL
auto& poe = lref.emitter<PropOpEmitter>();
auto* prop = &target->as<PropertyAccess>();
if (!poe.emitAssignment(prop->key().atom())) {
// [stack] # VAL
return false;
}
break;
}
case ParseNodeKind::ElemExpr: {
// The reference is already pushed by emitDestructuringLHSRef.
// [stack] # if Super
// [stack] THIS KEY SUPERBASE VAL
// [stack] # otherwise
// [stack] OBJ KEY VAL
auto& eoe = lref.emitter<ElemOpEmitter>();
if (!eoe.emitAssignment()) {
// [stack] VAL
return false;
}
break;
}
case ParseNodeKind::PrivateMemberExpr: {
// The reference is already pushed by emitDestructuringLHSRef.
// [stack] OBJ NAME VAL
auto& xoe = lref.emitter<PrivateOpEmitter>();
if (!xoe.emitAssignment()) {
// [stack] VAL
return false;
}
break;
}
case ParseNodeKind::CallExpr:
MOZ_ASSERT_UNREACHABLE(
"Parser::reportIfNotValidSimpleAssignmentTarget "
"rejects function calls as assignment "
"targets in destructuring assignments");
break;
default:
MOZ_CRASH("emitSetOrInitializeDestructuring: bad lhs kind");
}
// Pop the assigned value.
if (!emit1(JSOp::Pop)) {
// [stack] # empty
return false;
}
return true;
}
JSOp BytecodeEmitter::getIterCallOp(JSOp callOp,
SelfHostedIter selfHostedIter) const {
if (emitterMode == BytecodeEmitter::SelfHosting) {
MOZ_ASSERT(selfHostedIter != SelfHostedIter::Deny);
switch (callOp) {
case JSOp::Call:
return JSOp::CallContent;
case JSOp::CallIter:
return JSOp::CallContentIter;
default:
MOZ_CRASH("Unknown iterator call op");
}
}
return callOp;
}
bool BytecodeEmitter::emitIteratorNext(
const Maybe<uint32_t>& callSourceCoordOffset,
IteratorKind iterKind /* = IteratorKind::Sync */,
SelfHostedIter selfHostedIter /* = SelfHostedIter::Deny */) {
MOZ_ASSERT(selfHostedIter != SelfHostedIter::Deny ||
emitterMode != BytecodeEmitter::SelfHosting,
".next() iteration is prohibited in self-hosted code because it"
"can run user-modifiable iteration code");
// [stack] ... NEXT ITER
MOZ_ASSERT(bytecodeSection().stackDepth() >= 2);
if (!emitCall(getIterCallOp(JSOp::Call, selfHostedIter), 0,
callSourceCoordOffset)) {
// [stack] ... RESULT
return false;
}
if (iterKind == IteratorKind::Async) {
if (!emitAwaitInInnermostScope()) {
// [stack] ... RESULT
return false;
}
}
if (!emitCheckIsObj(CheckIsObjectKind::IteratorNext)) {
// [stack] ... RESULT
return false;
}
return true;
}
bool BytecodeEmitter::emitIteratorCloseInScope(
EmitterScope& currentScope,
IteratorKind iterKind /* = IteratorKind::Sync */,
CompletionKind completionKind /* = CompletionKind::Normal */,
SelfHostedIter selfHostedIter /* = SelfHostedIter::Deny */) {
MOZ_ASSERT(selfHostedIter != SelfHostedIter::Deny ||
emitterMode != BytecodeEmitter::SelfHosting,
".close() on iterators is prohibited in self-hosted code because "
"it can run user-modifiable iteration code");
if (iterKind == IteratorKind::Sync) {
return emit2(JSOp::CloseIter, uint8_t(completionKind));
}
// Generate inline logic corresponding to IteratorClose (ES2021 7.4.6) and
// AsyncIteratorClose (ES2021 7.4.7). Steps numbers apply to both operations.
//
// Callers need to ensure that the iterator object is at the top of the
// stack.
// For non-Throw completions, we emit the equivalent of:
//
// var returnMethod = GetMethod(iterator, "return");
// if (returnMethod !== undefined) {
// var innerResult = [Await] Call(returnMethod, iterator);
// CheckIsObj(innerResult);
// }
//
// Whereas for Throw completions, we emit:
//
// try {
// var returnMethod = GetMethod(iterator, "return");
// if (returnMethod !== undefined) {
// [Await] Call(returnMethod, iterator);
// }
// } catch {}
Maybe<TryEmitter> tryCatch;
if (completionKind == CompletionKind::Throw) {
tryCatch.emplace(this, TryEmitter::Kind::TryCatch,
TryEmitter::ControlKind::NonSyntactic);
if (!tryCatch->emitTry()) {
// [stack] ... ITER
return false;
}
}
if (!emit1(JSOp::Dup)) {
// [stack] ... ITER ITER
return false;
}
// Steps 1-2 are assertions, step 3 is implicit.
// Step 4.
//
// Get the "return" method.
if (!emitAtomOp(JSOp::GetProp, TaggedParserAtomIndex::WellKnown::return_())) {
// [stack] ... ITER RET
return false;
}
// Step 5.
//
// Do nothing if "return" is undefined or null.
InternalIfEmitter ifReturnMethodIsDefined(this);
if (!emit1(JSOp::IsNullOrUndefined)) {
// [stack] ... ITER RET NULL-OR-UNDEF
return false;
}
if (!ifReturnMethodIsDefined.emitThenElse(
IfEmitter::ConditionKind::Negative)) {
// [stack] ... ITER RET
return false;
}
// Steps 5.c, 7.
//
// Call the "return" method.
if (!emit1(JSOp::Swap)) {
// [stack] ... RET ITER
return false;
}
if (!emitCall(getIterCallOp(JSOp::Call, selfHostedIter), 0)) {
// [stack] ... RESULT
return false;
}
// 7.4.7 AsyncIteratorClose, step 5.d.
if (iterKind == IteratorKind::Async) {
if (completionKind != CompletionKind::Throw) {
// Await clobbers rval, so save the current rval.
if (!emit1(JSOp::GetRval)) {
// [stack] ... RESULT RVAL
return false;
}
if (!emit1(JSOp::Swap)) {
// [stack] ... RVAL RESULT
return false;
}
}
if (!emitAwaitInScope(currentScope)) {
// [stack] ... RVAL? RESULT
return false;
}
if (completionKind != CompletionKind::Throw) {
if (!emit1(JSOp::Swap)) {
// [stack] ... RESULT RVAL
return false;
}
if (!emit1(JSOp::SetRval)) {
// [stack] ... RESULT
return false;
}
}
}
// Step 6 (Handled in caller).
// Step 8.
if (completionKind != CompletionKind::Throw) {
// Check that the "return" result is an object.
if (!emitCheckIsObj(CheckIsObjectKind::IteratorReturn)) {
// [stack] ... RESULT
return false;
}
}
if (!ifReturnMethodIsDefined.emitElse()) {
// [stack] ... ITER RET
return false;
}
if (!emit1(JSOp::Pop)) {
// [stack] ... ITER
return false;
}
if (!ifReturnMethodIsDefined.emitEnd()) {
return false;
}
if (completionKind == CompletionKind::Throw) {
if (!tryCatch->emitCatch()) {
// [stack] ... ITER EXC
return false;
}
// Just ignore the exception thrown by call and await.
if (!emit1(JSOp::Pop)) {
// [stack] ... ITER
return false;
}
if (!tryCatch->emitEnd()) {
// [stack] ... ITER
return false;
}
}
// Step 9 (Handled in caller).
return emit1(JSOp::Pop);
// [stack] ...
}
template <typename InnerEmitter>
bool BytecodeEmitter::wrapWithDestructuringTryNote(int32_t iterDepth,
InnerEmitter emitter) {
MOZ_ASSERT(bytecodeSection().stackDepth() >= iterDepth);
// Pad a nop at the beginning of the bytecode covered by the trynote so
// that when unwinding environments, we may unwind to the scope
// corresponding to the pc *before* the start, in case the first bytecode
// emitted by |emitter| is the start of an inner scope. See comment above
// UnwindEnvironmentToTryPc.
if (!emit1(JSOp::TryDestructuring)) {
return false;
}
BytecodeOffset start = bytecodeSection().offset();
if (!emitter(this)) {
return false;
}
BytecodeOffset end = bytecodeSection().offset();
if (start != end) {
return addTryNote(TryNoteKind::Destructuring, iterDepth, start, end);
}
return true;
}
bool BytecodeEmitter::emitDefault(ParseNode* defaultExpr, ParseNode* pattern) {
// [stack] VALUE
DefaultEmitter de(this);
if (!de.prepareForDefault()) {
// [stack]
return false;
}
if (!emitInitializer(defaultExpr, pattern)) {
// [stack] DEFAULTVALUE
return false;
}
if (!de.emitEnd()) {
// [stack] VALUE/DEFAULTVALUE
return false;
}
return true;
}
bool BytecodeEmitter::emitAnonymousFunctionWithName(
ParseNode* node, TaggedParserAtomIndex name) {
MOZ_ASSERT(node->isDirectRHSAnonFunction());
if (node->is<FunctionNode>()) {
// Function doesn't have 'name' property at this point.
// Set function's name at compile time.
setFunName(node->as<FunctionNode>().funbox(), name);
return emitTree(node);
}
MOZ_ASSERT(node->is<ClassNode>());
return emitClass(&node->as<ClassNode>(), ClassNameKind::InferredName, name);
}
bool BytecodeEmitter::emitAnonymousFunctionWithComputedName(
ParseNode* node, FunctionPrefixKind prefixKind) {
MOZ_ASSERT(node->isDirectRHSAnonFunction());
if (node->is<FunctionNode>()) {
if (!emitTree(node)) {
// [stack] NAME FUN
return false;
}
if (!emitDupAt(1)) {
// [stack] NAME FUN NAME
return false;
}
if (!emit2(JSOp::SetFunName, uint8_t(prefixKind))) {
// [stack] NAME FUN
return false;
}
return true;
}
MOZ_ASSERT(node->is<ClassNode>());
MOZ_ASSERT(prefixKind == FunctionPrefixKind::None);
return emitClass(&node->as<ClassNode>(), ClassNameKind::ComputedName);
}
void BytecodeEmitter::setFunName(FunctionBox* funbox,
TaggedParserAtomIndex name) const {
// The inferred name may already be set if this function is an interpreted
// lazy function and we OOM'ed after we set the inferred name the first
// time.
if (funbox->hasInferredName()) {
MOZ_ASSERT(!funbox->emitBytecode);
MOZ_ASSERT(funbox->displayAtom() == name);
} else {
funbox->setInferredName(name);
}
}
bool BytecodeEmitter::emitInitializer(ParseNode* initializer,
ParseNode* pattern) {
if (initializer->isDirectRHSAnonFunction()) {
MOZ_ASSERT(!pattern->isInParens());
auto name = pattern->as<NameNode>().name();
if (!emitAnonymousFunctionWithName(initializer, name)) {
return false;
}
} else {
if (!emitTree(initializer)) {
return false;
}
}
return true;
}
bool BytecodeEmitter::emitDestructuringOpsArray(ListNode* pattern,
DestructuringFlavor flav) {
MOZ_ASSERT(getSelfHostedIterFor(pattern) == SelfHostedIter::Deny,
"array destructuring is prohibited in self-hosted code because it"
"can run user-modifiable iteration code");
MOZ_ASSERT(pattern->isKind(ParseNodeKind::ArrayExpr));
MOZ_ASSERT(bytecodeSection().stackDepth() != 0);
// Here's pseudo code for |let [a, b, , c=y, ...d] = x;|
//
// Lines that are annotated "covered by trynote" mean that upon throwing
// an exception, IteratorClose is called on iter only if done is false.
//
// let x, y;
// let a, b, c, d;
// let iter, next, lref, result, done, value; // stack values
//
// // NOTE: the fast path for this example is not applicable, because of
// // the spread and the assignment |c=y|, but it is documented here for a
// // simpler example, |let [a,b] = x;|
// //
// // if (IsOptimizableArray(x)) {
// // a = x[0];
// // b = x[1];
// // goto end: // (skip everything below)
// // }
//
// iter = x[Symbol.iterator]();
// next = iter.next;
//
// // ==== emitted by loop for a ====
// lref = GetReference(a); // covered by trynote
//
// result = Call(next, iter);
// done = result.done;
//
// if (done)
// value = undefined;
// else
// value = result.value;
//
// SetOrInitialize(lref, value); // covered by trynote
//
// // ==== emitted by loop for b ====
// lref = GetReference(b); // covered by trynote
//
// if (done) {
// value = undefined;
// } else {
// result = Call(next, iter);
// done = result.done;
// if (done)
// value = undefined;
// else
// value = result.value;
// }
//
// SetOrInitialize(lref, value); // covered by trynote
//
// // ==== emitted by loop for elision ====
// if (done) {
// value = undefined;
// } else {
// result = Call(next, iter);
// done = result.done;
// if (done)
// value = undefined;
// else
// value = result.value;
// }
//
// // ==== emitted by loop for c ====
// lref = GetReference(c); // covered by trynote
//
// if (done) {
// value = undefined;
// } else {
// result = Call(next, iter);
// done = result.done;
// if (done)
// value = undefined;
// else
// value = result.value;
// }
//
// if (value === undefined)
// value = y; // covered by trynote
//
// SetOrInitialize(lref, value); // covered by trynote
//
// // ==== emitted by loop for d ====
// lref = GetReference(d); // covered by trynote
//
// if (done)
// value = [];
// else
// value = [...iter];
//
// SetOrInitialize(lref, value); // covered by trynote
//
// // === emitted after loop ===
// if (!done)
// IteratorClose(iter);
//
// end:
bool isEligibleForArrayOptimizations = true;
for (ParseNode* member : pattern->contents()) {
switch (member->getKind()) {
case ParseNodeKind::Elision:
break;
case ParseNodeKind::Name: {
auto name = member->as<NameNode>().name();
NameLocation loc = lookupName(name);
if (loc.kind() != NameLocation::Kind::ArgumentSlot &&
loc.kind() != NameLocation::Kind::FrameSlot &&
loc.kind() != NameLocation::Kind::EnvironmentCoordinate) {
isEligibleForArrayOptimizations = false;
}
break;
}
default:
// Unfortunately we can't handle any recursive destructuring,
// because we can't guarantee that the recursed-into parts
// won't run code which invalidates our constraints. We also
// cannot handle ParseNodeKind::AssignExpr for similar reasons.
isEligibleForArrayOptimizations = false;
break;
}
if (!isEligibleForArrayOptimizations) {
break;
}
}
// Use an iterator to destructure the RHS, instead of index lookup. We
// must leave the *original* value on the stack.
if (!emit1(JSOp::Dup)) {
// [stack] ... OBJ OBJ
return false;
}
Maybe<InternalIfEmitter> ifArrayOptimizable;
if (isEligibleForArrayOptimizations) {
ifArrayOptimizable.emplace(
this, BranchEmitterBase::LexicalKind::MayContainLexicalAccessInBranch);
if (!emit1(JSOp::Dup)) {
// [stack] OBJ OBJ
return false;
}
if (!emit1(JSOp::OptimizeGetIterator)) {
// [stack] OBJ OBJ IS_OPTIMIZABLE
return false;
}
if (!ifArrayOptimizable->emitThenElse()) {
// [stack] OBJ OBJ
return false;
}
if (!emitAtomOp(JSOp::GetProp,
TaggedParserAtomIndex::WellKnown::length())) {
// [stack] OBJ LENGTH
return false;
}
if (!emit1(JSOp::Swap)) {
// [stack] LENGTH OBJ
return false;
}
uint32_t idx = 0;
for (ParseNode* member : pattern->contents()) {
if (member->isKind(ParseNodeKind::Elision)) {
idx += 1;
continue;
}
MOZ_ASSERT(member->isKind(ParseNodeKind::Name));
if (!emit1(JSOp::Dup)) {
// [stack] LENGTH OBJ OBJ
return false;
}
if (!emitNumberOp(idx)) {
// [stack] LENGTH OBJ OBJ IDX
return false;
}
if (!emit1(JSOp::Dup)) {
// [stack] LENGTH OBJ OBJ IDX IDX
return false;
}
if (!emitDupAt(4)) {
// [stack] LENGTH OBJ OBJ IDX IDX LENGTH
return false;
}
if (!emit1(JSOp::Lt)) {
// [stack] LENGTH OBJ OBJ IDX IS_IN_DENSE_BOUNDS
return false;
}
InternalIfEmitter isInDenseBounds(this);
if (!isInDenseBounds.emitThenElse()) {
// [stack] LENGTH OBJ OBJ IDX
return false;
}
if (!emit1(JSOp::GetElem)) {
// [stack] LENGTH OBJ VALUE
return false;
}
if (!isInDenseBounds.emitElse()) {
// [stack] LENGTH OBJ OBJ IDX
return false;
}
if (!emitPopN(2)) {
// [stack] LENGTH OBJ
return false;
}
if (!emit1(JSOp::Undefined)) {
// [stack] LENGTH OBJ UNDEFINED
return false;
}
if (!isInDenseBounds.emitEnd()) {
// [stack] LENGTH OBJ VALUE|UNDEFINED
return false;
}
DestructuringLHSRef lref;
if (!emitDestructuringLHSRef(member, flav, lref)) {
// [stack] LENGTH OBJ
return false;
}
if (!emitSetOrInitializeDestructuring(member, flav, lref)) {
// [stack] LENGTH OBJ
return false;
}
idx += 1;
}
if (!emit1(JSOp::Swap)) {
// [stack] OBJ LENGTH
return false;
}
if (!emit1(JSOp::Pop)) {
// [stack] OBJ
return false;
}
if (!ifArrayOptimizable->emitElse()) {
// [stack] OBJ OBJ
return false;
}
}
if (!emitIterator(SelfHostedIter::Deny)) {
// [stack] ... OBJ NEXT ITER
return false;
}
// For an empty pattern [], call IteratorClose unconditionally. Nothing
// else needs to be done.
if (!pattern->head()) {
if (!emit1(JSOp::Swap)) {
// [stack] ... OBJ ITER NEXT
return false;
}
if (!emit1(JSOp::Pop)) {
// [stack] ... OBJ ITER
return false;
}
if (!emitIteratorCloseInInnermostScope()) {
// [stack] ... OBJ
return false;
}
if (ifArrayOptimizable.isSome()) {
if (!ifArrayOptimizable->emitEnd()) {
// [stack] OBJ
return false;
}
}
return true;
}
// Push an initial FALSE value for DONE.
if (!emit1(JSOp::False)) {
// [stack] ... OBJ NEXT ITER FALSE
return false;
}
// TryNoteKind::Destructuring expects the iterator and the done value
// to be the second to top and the top of the stack, respectively.
// IteratorClose is called upon exception only if done is false.
int32_t tryNoteDepth = bytecodeSection().stackDepth();
for (ParseNode* member : pattern->contents()) {
bool isFirst = member == pattern->head();
DebugOnly<bool> hasNext = !!member->pn_next;
ParseNode* subpattern;
if (member->isKind(ParseNodeKind::Spread)) {
subpattern = member->as<UnaryNode>().kid();
MOZ_ASSERT(!subpattern->isKind(ParseNodeKind::AssignExpr));
} else {
subpattern = member;
}
ParseNode* lhsPattern = subpattern;
ParseNode* pndefault = nullptr;
if (subpattern->isKind(ParseNodeKind::AssignExpr)) {
lhsPattern = subpattern->as<AssignmentNode>().left();
pndefault = subpattern->as<AssignmentNode>().right();
}
// Spec requires LHS reference to be evaluated first.
DestructuringLHSRef lref;
bool isElision = lhsPattern->isKind(ParseNodeKind::Elision);
if (!isElision) {
auto emitLHSRef = [lhsPattern, flav, &lref](BytecodeEmitter* bce) {
return bce->emitDestructuringLHSRef(lhsPattern, flav, lref);
// [stack] ... OBJ NEXT ITER DONE LREF*
};
if (!wrapWithDestructuringTryNote(tryNoteDepth, emitLHSRef)) {
return false;
}
}
// Number of stack slots emitted for the LHS reference.
size_t emitted = lref.numReferenceSlots();
// Pick the DONE value to the top of the stack.
if (emitted) {
if (!emitPickN(emitted)) {
// [stack] ... OBJ NEXT ITER LREF* DONE
return false;
}
}
if (isFirst) {
// If this element is the first, DONE is always FALSE, so pop it.
//
// Non-first elements should emit if-else depending on the
// member pattern, below.
if (!emit1(JSOp::Pop)) {
// [stack] ... OBJ NEXT ITER LREF*
return false;
}
}
if (member->isKind(ParseNodeKind::Spread)) {
InternalIfEmitter ifThenElse(this);
if (!isFirst) {
// If spread is not the first element of the pattern,
// iterator can already be completed.
// [stack] ... OBJ NEXT ITER LREF* DONE
if (!ifThenElse.emitThenElse()) {
// [stack] ... OBJ NEXT ITER LREF*
return false;
}
if (!emitUint32Operand(JSOp::NewArray, 0)) {
// [stack] ... OBJ NEXT ITER LREF* ARRAY
return false;
}
if (!ifThenElse.emitElse()) {
// [stack] ... OBJ NEXT ITER LREF*
return false;
}
}
// If iterator is not completed, create a new array with the rest
// of the iterator.
if (!emitDupAt(emitted + 1, 2)) {
// [stack] ... OBJ NEXT ITER LREF* NEXT ITER
return false;
}
if (!emitUint32Operand(JSOp::NewArray, 0)) {
// [stack] ... OBJ NEXT ITER LREF* NEXT ITER ARRAY
return false;
}
if (!emitNumberOp(0)) {
// [stack] ... OBJ NEXT ITER LREF* NEXT ITER ARRAY INDEX
return false;
}
if (!emitSpread(SelfHostedIter::Deny)) {
// [stack] ... OBJ NEXT ITER LREF* ARRAY INDEX
return false;
}
if (!emit1(JSOp::Pop)) {
// [stack] ... OBJ NEXT ITER LREF* ARRAY
return false;
}
if (!isFirst) {
if (!ifThenElse.emitEnd()) {
return false;
}
MOZ_ASSERT(ifThenElse.pushed() == 1);
}
// At this point the iterator is done. Unpick a TRUE value for DONE above
// ITER.
if (!emit1(JSOp::True)) {
// [stack] ... OBJ NEXT ITER LREF* ARRAY TRUE
return false;
}
if (!emitUnpickN(emitted + 1)) {
// [stack] ... OBJ NEXT ITER TRUE LREF* ARRAY
return false;
}
auto emitAssignment = [lhsPattern, flav, &lref](BytecodeEmitter* bce) {
return bce->emitSetOrInitializeDestructuring(lhsPattern, flav, lref);
// [stack] ... OBJ NEXT ITER TRUE
};
if (!wrapWithDestructuringTryNote(tryNoteDepth, emitAssignment)) {
return false;
}
MOZ_ASSERT(!hasNext);
break;
}
InternalIfEmitter ifAlreadyDone(this);
if (!isFirst) {
// [stack] ... OBJ NEXT ITER LREF* DONE
if (!ifAlreadyDone.emitThenElse()) {
// [stack] ... OBJ NEXT ITER LREF*
return false;
}
if (!emit1(JSOp::Undefined)) {
// [stack] ... OBJ NEXT ITER LREF* UNDEF
return false;
}
if (!emit1(JSOp::NopDestructuring)) {
// [stack] ... OBJ NEXT ITER LREF* UNDEF
return false;
}
// The iterator is done. Unpick a TRUE value for DONE above ITER.
if (!emit1(JSOp::True)) {
// [stack] ... OBJ NEXT ITER LREF* UNDEF TRUE
return false;
}
if (!emitUnpickN(emitted + 1)) {
// [stack] ... OBJ NEXT ITER TRUE LREF* UNDEF
return false;
}
if (!ifAlreadyDone.emitElse()) {
// [stack] ... OBJ NEXT ITER LREF*
return false;
}
}
if (!emitDupAt(emitted + 1, 2)) {
// [stack] ... OBJ NEXT ITER LREF* NEXT
return false;
}
if (!emitIteratorNext(Some(pattern->pn_pos.begin))) {
// [stack] ... OBJ NEXT ITER LREF* RESULT
return false;
}
if (!emit1(JSOp::Dup)) {
// [stack] ... OBJ NEXT ITER LREF* RESULT RESULT
return false;
}
if (!emitAtomOp(JSOp::GetProp, TaggedParserAtomIndex::WellKnown::done())) {
// [stack] ... OBJ NEXT ITER LREF* RESULT DONE
return false;
}
if (!emit1(JSOp::Dup)) {
// [stack] ... OBJ NEXT ITER LREF* RESULT DONE DONE
return false;
}
if (!emitUnpickN(emitted + 2)) {
// [stack] ... OBJ NEXT ITER DONE LREF* RESULT DONE
return false;
}
InternalIfEmitter ifDone(this);
if (!ifDone.emitThenElse()) {
// [stack] ... OBJ NEXT ITER DONE LREF* RESULT
return false;
}
if (!emit1(JSOp::Pop)) {
// [stack] ... OBJ NEXT ITER DONE LREF*
return false;
}
if (!emit1(JSOp::Undefined)) {
// [stack] ... OBJ NEXT ITER DONE LREF* UNDEF
return false;
}
if (!emit1(JSOp::NopDestructuring)) {
// [stack] ... OBJ NEXT ITER DONE LREF* UNDEF
return false;
}
if (!ifDone.emitElse()) {
// [stack] ... OBJ NEXT ITER DONE LREF* RESULT
return false;
}
if (!emitAtomOp(JSOp::GetProp, TaggedParserAtomIndex::WellKnown::value())) {
// [stack] ... OBJ NEXT ITER DONE LREF* VALUE
return false;
}
if (!ifDone.emitEnd()) {
return false;
}
MOZ_ASSERT(ifDone.pushed() == 0);
if (!isFirst) {
if (!ifAlreadyDone.emitEnd()) {
return false;
}
MOZ_ASSERT(ifAlreadyDone.pushed() == 2);
}
if (pndefault) {
auto emitDefault = [pndefault, lhsPattern](BytecodeEmitter* bce) {
return bce->emitDefault(pndefault, lhsPattern);
// [stack] ... OBJ NEXT ITER DONE LREF* VALUE
};
if (!wrapWithDestructuringTryNote(tryNoteDepth, emitDefault)) {
return false;
}
}
if (!isElision) {
auto emitAssignment = [lhsPattern, flav, &lref](BytecodeEmitter* bce) {
return bce->emitSetOrInitializeDestructuring(lhsPattern, flav, lref);
// [stack] ... OBJ NEXT ITER DONE
};
if (!wrapWithDestructuringTryNote(tryNoteDepth, emitAssignment)) {
return false;
}
} else {
if (!emit1(JSOp::Pop)) {
// [stack] ... OBJ NEXT ITER DONE
return false;
}
}
}
// The last DONE value is on top of the stack. If not DONE, call
// IteratorClose.
// [stack] ... OBJ NEXT ITER DONE
InternalIfEmitter ifDone(this);
if (!ifDone.emitThenElse()) {
// [stack] ... OBJ NEXT ITER
return false;
}
if (!emitPopN(2)) {
// [stack] ... OBJ
return false;
}
if (!ifDone.emitElse()) {
// [stack] ... OBJ NEXT ITER
return false;
}
if (!emit1(JSOp::Swap)) {
// [stack] ... OBJ ITER NEXT
return false;
}
if (!emit1(JSOp::Pop)) {
// [stack] ... OBJ ITER
return false;
}
if (!emitIteratorCloseInInnermostScope()) {
// [stack] ... OBJ
return false;
}
if (!ifDone.emitEnd()) {
return false;
}
if (ifArrayOptimizable.isSome()) {
if (!ifArrayOptimizable->emitEnd()) {
// [stack] OBJ
return false;
}
}
return true;
}
bool BytecodeEmitter::emitComputedPropertyName(UnaryNode* computedPropName) {
MOZ_ASSERT(computedPropName->isKind(ParseNodeKind::ComputedName));
return emitTree(computedPropName->kid()) && emit1(JSOp::ToPropertyKey);
}
bool BytecodeEmitter::emitDestructuringOpsObject(ListNode* pattern,
DestructuringFlavor flav) {
MOZ_ASSERT(pattern->isKind(ParseNodeKind::ObjectExpr));
// [stack] ... RHS
MOZ_ASSERT(bytecodeSection().stackDepth() > 0);
if (!emit1(JSOp::CheckObjCoercible)) {
// [stack] ... RHS
return false;
}
bool needsRestPropertyExcludedSet =
pattern->count() > 1 && pattern->last()->isKind(ParseNodeKind::Spread);
if (needsRestPropertyExcludedSet) {
if (!emitDestructuringObjRestExclusionSet(pattern)) {
// [stack] ... RHS SET
return false;
}
if (!emit1(JSOp::Swap)) {
// [stack] ... SET RHS
return false;
}
}
for (ParseNode* member : pattern->contents()) {
ParseNode* subpattern;
bool hasKeyOnStack = false;
if (member->isKind(ParseNodeKind::MutateProto) ||
member->isKind(ParseNodeKind::Spread)) {
subpattern = member->as<UnaryNode>().kid();
MOZ_ASSERT_IF(member->isKind(ParseNodeKind::Spread),
!subpattern->isKind(ParseNodeKind::AssignExpr));
} else {
MOZ_ASSERT(member->isKind(ParseNodeKind::PropertyDefinition) ||
member->isKind(ParseNodeKind::Shorthand));
subpattern = member->as<BinaryNode>().right();
// Computed property names are evaluated before the subpattern.
ParseNode* key = member->as<BinaryNode>().left();
if (key->isKind(ParseNodeKind::ComputedName)) {
if (!emitComputedPropertyName(&key->as<UnaryNode>())) {
// [stack] ... SET? RHS KEY
return false;
}
hasKeyOnStack = true;
}
}
ParseNode* lhs = subpattern;
ParseNode* pndefault = nullptr;
if (subpattern->isKind(ParseNodeKind::AssignExpr)) {
lhs = subpattern->as<AssignmentNode>().left();
pndefault = subpattern->as<AssignmentNode>().right();
}
// Spec requires LHS reference to be evaluated first.
DestructuringLHSRef lref;
if (!emitDestructuringLHSRef(lhs, flav, lref)) {
// [stack] ... SET? RHS KEY? LREF*
return false;
}
// Number of stack slots emitted for the LHS reference.
size_t emitted = lref.numReferenceSlots();
// Duplicate the value being destructured to use as a reference base.
if (!emitDupAt(emitted + hasKeyOnStack)) {
// [stack] ... SET? RHS KEY? LREF* RHS
return false;
}
if (member->isKind(ParseNodeKind::Spread)) {
if (!updateSourceCoordNotes(member->pn_pos.begin)) {
return false;
}
if (!emit2(JSOp::NewInit, 0)) {
// [stack] ... SET? RHS LREF* RHS TARGET
return false;
}
if (!emit1(JSOp::Dup)) {
// [stack] ... SET? RHS LREF* RHS TARGET TARGET
return false;
}
if (!emit2(JSOp::Pick, 2)) {
// [stack] ... SET? RHS LREF* TARGET TARGET RHS
return false;
}
if (needsRestPropertyExcludedSet) {
if (!emit2(JSOp::Pick, emitted + 4)) {
// [stack] ... RHS LREF* TARGET TARGET RHS SET
return false;
}
}
CopyOption option = needsRestPropertyExcludedSet ? CopyOption::Filtered
: CopyOption::Unfiltered;
if (!emitCopyDataProperties(option)) {
// [stack] ... RHS LREF* TARGET
return false;
}
// Destructure TARGET per this member's lhs.
if (!emitSetOrInitializeDestructuring(lhs, flav, lref)) {
// [stack] ... RHS
return false;
}
MOZ_ASSERT(member == pattern->last(), "Rest property is always last");
break;
}
// Now push the property value currently being matched, which is the value
// of the current property name "label" on the left of a colon in the object
// initialiser.
if (member->isKind(ParseNodeKind::MutateProto)) {
if (!emitAtomOp(JSOp::GetProp,
TaggedParserAtomIndex::WellKnown::proto_())) {
// [stack] ... SET? RHS LREF* PROP
return false;
}
} else {
MOZ_ASSERT(member->isKind(ParseNodeKind::PropertyDefinition) ||
member->isKind(ParseNodeKind::Shorthand));
ParseNode* key = member->as<BinaryNode>().left();
if (key->isKind(ParseNodeKind::ObjectPropertyName) ||
key->isKind(ParseNodeKind::StringExpr)) {
if (!emitAtomOp(JSOp::GetProp, key->as<NameNode>().atom())) {
// [stack] ... SET? RHS LREF* PROP
return false;
}
} else {
if (key->isKind(ParseNodeKind::NumberExpr)) {
if (!emitNumberOp(key->as<NumericLiteral>().value())) {
// [stack]... SET? RHS LREF* RHS KEY
return false;
}
} else {
// Otherwise this is a computed property name. BigInt keys are parsed
// as (synthetic) computed property names, too.
MOZ_ASSERT(key->isKind(ParseNodeKind::ComputedName));
MOZ_ASSERT(hasKeyOnStack);
if (!emit2(JSOp::Pick, emitted + 1)) {
// [stack] ... SET? RHS LREF* RHS KEY
return false;
}
// Add the computed property key to the exclusion set.
if (needsRestPropertyExcludedSet) {
if (!emitDupAt(emitted + 3)) {
// [stack] ... SET RHS LREF* RHS KEY SET
return false;
}
if (!emitDupAt(1)) {
// [stack] ... SET RHS LREF* RHS KEY SET KEY
return false;
}
if (!emit1(JSOp::Undefined)) {
// [stack] ... SET RHS LREF* RHS KEY SET KEY UNDEFINED
return false;
}
if (!emit1(JSOp::InitElem)) {
// [stack] ... SET RHS LREF* RHS KEY SET
return false;
}
if (!emit1(JSOp::Pop)) {
// [stack] ... SET RHS LREF* RHS KEY
return false;
}
}
}
// Get the property value.
if (!emitElemOpBase(JSOp::GetElem)) {
// [stack] ... SET? RHS LREF* PROP
return false;
}
}
}
if (pndefault) {
if (!emitDefault(pndefault, lhs)) {
// [stack] ... SET? RHS LREF* VALUE
return false;
}
}
// Destructure PROP per this member's lhs.
if (!emitSetOrInitializeDestructuring(lhs, flav, lref)) {
// [stack] ... SET? RHS
return false;
}
}
return true;
}
static bool IsDestructuringRestExclusionSetObjLiteralCompatible(
ListNode* pattern) {
uint32_t propCount = 0;
for (ParseNode* member : pattern->contents()) {
if (member->isKind(ParseNodeKind::Spread)) {
MOZ_ASSERT(!member->pn_next, "unexpected trailing element after spread");
break;
}
propCount++;
if (member->isKind(ParseNodeKind::MutateProto)) {
continue;
}
ParseNode* key = member->as<BinaryNode>().left();
if (key->isKind(ParseNodeKind::ObjectPropertyName) ||
key->isKind(ParseNodeKind::StringExpr)) {
continue;
}
// Number and BigInt keys aren't yet supported. Computed property names need
// to be added dynamically.
MOZ_ASSERT(key->isKind(ParseNodeKind::NumberExpr) ||
key->isKind(ParseNodeKind::BigIntExpr) ||
key->isKind(ParseNodeKind::ComputedName));
return false;
}
if (propCount > SharedPropMap::MaxPropsForNonDictionary) {
// JSOp::NewObject cannot accept dictionary-mode objects.
return false;
}
return true;
}
bool BytecodeEmitter::emitDestructuringObjRestExclusionSet(ListNode* pattern) {
MOZ_ASSERT(pattern->isKind(ParseNodeKind::ObjectExpr));
MOZ_ASSERT(pattern->last()->isKind(ParseNodeKind::Spread));
// See if we can use ObjLiteral to construct the exclusion set object.
if (IsDestructuringRestExclusionSetObjLiteralCompatible(pattern)) {
if (!emitDestructuringRestExclusionSetObjLiteral(pattern)) {
// [stack] OBJ
return false;
}
} else {
// Take the slow but sure way and start off with a blank object.
if (!emit2(JSOp::NewInit, 0)) {
// [stack] OBJ
return false;
}
}
for (ParseNode* member : pattern->contents()) {
if (member->isKind(ParseNodeKind::Spread)) {
MOZ_ASSERT(!member->pn_next, "unexpected trailing element after spread");
break;
}
TaggedParserAtomIndex pnatom;
if (member->isKind(ParseNodeKind::MutateProto)) {
pnatom = TaggedParserAtomIndex::WellKnown::proto_();
} else {
ParseNode* key = member->as<BinaryNode>().left();
if (key->isKind(ParseNodeKind::ObjectPropertyName) ||
key->isKind(ParseNodeKind::StringExpr)) {
pnatom = key->as<NameNode>().atom();
} else if (key->isKind(ParseNodeKind::NumberExpr)) {
if (!emitNumberOp(key->as<NumericLiteral>().value())) {
return false;
}
} else {
// Otherwise this is a computed property name which needs to be added
// dynamically. BigInt keys are parsed as (synthetic) computed property
// names, too.
MOZ_ASSERT(key->isKind(ParseNodeKind::ComputedName));
continue;
}
}
// Initialize elements with |undefined|.
if (!emit1(JSOp::Undefined)) {
return false;
}
if (!pnatom) {
if (!emit1(JSOp::InitElem)) {
return false;
}
} else {
if (!emitAtomOp(JSOp::InitProp, pnatom)) {
return false;
}
}
}
return true;
}
bool BytecodeEmitter::emitDestructuringOps(ListNode* pattern,
DestructuringFlavor flav) {
if (pattern->isKind(ParseNodeKind::ArrayExpr)) {
return emitDestructuringOpsArray(pattern, flav);
}
return emitDestructuringOpsObject(pattern, flav);
}
bool BytecodeEmitter::emitTemplateString(ListNode* templateString) {
bool pushedString = false;
for (ParseNode* item : templateString->contents()) {
bool isString = (item->getKind() == ParseNodeKind::StringExpr ||
item->getKind() == ParseNodeKind::TemplateStringExpr);
// Skip empty strings. These are very common: a template string like
// `${a}${b}` has three empty strings and without this optimization
// we'd emit four JSOp::Add operations instead of just one.
if (isString && item->as<NameNode>().atom() ==
TaggedParserAtomIndex::WellKnown::empty()) {
continue;
}
if (!isString) {
// We update source notes before emitting the expression
if (!updateSourceCoordNotes(item->pn_pos.begin)) {
return false;
}
}
if (!emitTree(item)) {
return false;
}
if (!isString) {
// We need to convert the expression to a string
if (!emit1(JSOp::ToString)) {
return false;
}
}
if (pushedString) {
// We've pushed two strings onto the stack. Add them together, leaving
// just one.
if (!emit1(JSOp::Add)) {
return false;
}
} else {
pushedString = true;
}
}
if (!pushedString) {
// All strings were empty, this can happen for something like `${""}`.
// Just push an empty string.
if (!emitStringOp(JSOp::String,
TaggedParserAtomIndex::WellKnown::empty())) {
return false;
}
}
return true;
}
bool BytecodeEmitter::emitDeclarationList(ListNode* declList) {
for (ParseNode* decl : declList->contents()) {
ParseNode* pattern;
ParseNode* initializer;
if (decl->isKind(ParseNodeKind::Name)) {
pattern = decl;
initializer = nullptr;
} else {
AssignmentNode* assignNode = &decl->as<AssignmentNode>();
pattern = assignNode->left();
initializer = assignNode->right();
}
if (pattern->isKind(ParseNodeKind::Name)) {
// initializer can be null here.
if (!emitSingleDeclaration(declList, &pattern->as<NameNode>(),
initializer)) {
return false;
}
} else {
MOZ_ASSERT(pattern->isKind(ParseNodeKind::ArrayExpr) ||
pattern->isKind(ParseNodeKind::ObjectExpr));
MOZ_ASSERT(initializer != nullptr);
if (!updateSourceCoordNotes(initializer->pn_pos.begin)) {
return false;
}
if (!markStepBreakpoint()) {
return false;
}
if (!emitTree(initializer)) {
return false;
}
if (!emitDestructuringOps(&pattern->as<ListNode>(),
DestructuringFlavor::Declaration)) {
return false;
}
if (!emit1(JSOp::Pop)) {
return false;
}
}
}
return true;
}
bool BytecodeEmitter::emitSingleDeclaration(ListNode* declList, NameNode* decl,
ParseNode* initializer) {
MOZ_ASSERT(decl->isKind(ParseNodeKind::Name));
// Nothing to do for initializer-less 'var' declarations, as there's no TDZ.
if (!initializer && declList->isKind(ParseNodeKind::VarStmt)) {
return true;
}
auto nameAtom = decl->name();
NameOpEmitter noe(this, nameAtom, NameOpEmitter::Kind::Initialize);
if (!noe.prepareForRhs()) {
// [stack] ENV?
return false;
}
if (!initializer) {
// Lexical declarations are initialized to undefined without an
// initializer.
MOZ_ASSERT(declList->isKind(ParseNodeKind::LetDecl),
"var declarations without initializers handled above, "
"and const declarations must have initializers");
if (!emit1(JSOp::Undefined)) {
// [stack] ENV? UNDEF
return false;
}
} else {
MOZ_ASSERT(initializer);
if (!updateSourceCoordNotes(initializer->pn_pos.begin)) {
return false;
}
if (!markStepBreakpoint()) {
return false;
}
if (!emitInitializer(initializer, decl)) {
// [stack] ENV? V
return false;
}
}
#ifdef ENABLE_EXPLICIT_RESOURCE_MANAGEMENT
if (declList->isKind(ParseNodeKind::UsingDecl)) {
if (!innermostEmitterScope()->prepareForDisposableAssignment(
UsingHint::Sync)) {
// [stack] ENV? V
return false;
}
} else if (declList->isKind(ParseNodeKind::AwaitUsingDecl)) {
if (!innermostEmitterScope()->prepareForDisposableAssignment(
UsingHint::Async)) {
// [stack] ENV? V
return false;
}
}
#endif
if (!noe.emitAssignment()) {
// [stack] V
return false;
}
if (!emit1(JSOp::Pop)) {
// [stack]
return false;
}
return true;
}
bool BytecodeEmitter::emitAssignmentRhs(
ParseNode* rhs, TaggedParserAtomIndex anonFunctionName) {
if (rhs->isDirectRHSAnonFunction()) {
if (anonFunctionName) {
return emitAnonymousFunctionWithName(rhs, anonFunctionName);
}
return emitAnonymousFunctionWithComputedName(rhs, FunctionPrefixKind::None);
}
return emitTree(rhs);
}
// The RHS value to assign is already on the stack, i.e., the next enumeration
// value in a for-in or for-of loop. Offset is the location in the stack of the
// already-emitted rhs. If we emitted a JSOp::BindUnqualifiedName or
// JSOp::BindUnqualifiedGName, then the scope is on the top of the stack and we
// need to dig one deeper to get the right RHS value.
bool BytecodeEmitter::emitAssignmentRhs(uint8_t offset) {
if (offset != 1) {
return emitPickN(offset - 1);
}
return true;
}
static inline JSOp CompoundAssignmentParseNodeKindToJSOp(ParseNodeKind pnk) {
switch (pnk) {
case ParseNodeKind::InitExpr:
return JSOp::Nop;
case ParseNodeKind::AssignExpr:
return JSOp::Nop;
case ParseNodeKind::AddAssignExpr:
return JSOp::Add;
case ParseNodeKind::SubAssignExpr:
return JSOp::Sub;
case ParseNodeKind::BitOrAssignExpr:
return JSOp::BitOr;
case ParseNodeKind::BitXorAssignExpr:
return JSOp::BitXor;
case ParseNodeKind::BitAndAssignExpr:
return JSOp::BitAnd;
case ParseNodeKind::LshAssignExpr:
return JSOp::Lsh;
case ParseNodeKind::RshAssignExpr:
return JSOp::Rsh;
case ParseNodeKind::UrshAssignExpr:
return JSOp::Ursh;
case ParseNodeKind::MulAssignExpr:
return JSOp::Mul;
case ParseNodeKind::DivAssignExpr:
return JSOp::Div;
case ParseNodeKind::ModAssignExpr:
return JSOp::Mod;
case ParseNodeKind::PowAssignExpr:
return JSOp::Pow;
case ParseNodeKind::CoalesceAssignExpr:
case ParseNodeKind::OrAssignExpr:
case ParseNodeKind::AndAssignExpr:
// Short-circuit assignment operators are handled elsewhere.
[[fallthrough]];
default:
MOZ_CRASH("unexpected compound assignment op");
}
}
bool BytecodeEmitter::emitAssignmentOrInit(ParseNodeKind kind, ParseNode* lhs,
ParseNode* rhs) {
JSOp compoundOp = CompoundAssignmentParseNodeKindToJSOp(kind);
bool isCompound = compoundOp != JSOp::Nop;
bool isInit = kind == ParseNodeKind::InitExpr;
// We estimate the number of properties this could create
// if used as constructor merely by counting this.foo = assignment
// or init expressions;
//
// This currently doesn't handle this[x] = foo;
if (isInit || kind == ParseNodeKind::AssignExpr) {
if (lhs->isKind(ParseNodeKind::DotExpr)) {
if (lhs->as<PropertyAccess>().expression().isKind(
ParseNodeKind::ThisExpr)) {
propertyAdditionEstimate++;
}
}
}
MOZ_ASSERT_IF(isInit, lhs->isKind(ParseNodeKind::DotExpr) ||
lhs->isKind(ParseNodeKind::ElemExpr) ||
lhs->isKind(ParseNodeKind::PrivateMemberExpr));
// |name| is used within NameOpEmitter, so its lifetime must surpass |noe|.
TaggedParserAtomIndex name;
Maybe<NameOpEmitter> noe;
Maybe<PropOpEmitter> poe;
Maybe<ElemOpEmitter> eoe;
Maybe<PrivateOpEmitter> xoe;
// Deal with non-name assignments.
uint8_t offset = 1;
// Purpose of anonFunctionName:
//
// In normal name assignments (`f = function(){}`), an anonymous function gets
// an inferred name based on the left-hand side name node.
//
// In normal property assignments (`obj.x = function(){}`), the anonymous
// function does not have a computed name, and rhs->isDirectRHSAnonFunction()
// will be false (and anonFunctionName will not be used). However, in field
// initializers (`class C { x = function(){} }`), field initialization is
// implemented via a property or elem assignment (where we are now), and
// rhs->isDirectRHSAnonFunction() is set - so we'll assign the name of the
// function.
TaggedParserAtomIndex anonFunctionName;
switch (lhs->getKind()) {
case ParseNodeKind::Name: {
name = lhs->as<NameNode>().name();
anonFunctionName = name;
noe.emplace(this, name,
isCompound ? NameOpEmitter::Kind::CompoundAssignment
: NameOpEmitter::Kind::SimpleAssignment);
break;
}
case ParseNodeKind::ArgumentsLength:
case ParseNodeKind::DotExpr: {
PropertyAccess* prop = &lhs->as<PropertyAccess>();
bool isSuper = prop->isSuper();
poe.emplace(this,
isCompound ? PropOpEmitter::Kind::CompoundAssignment
: isInit ? PropOpEmitter::Kind::PropInit
: PropOpEmitter::Kind::SimpleAssignment,
isSuper ? PropOpEmitter::ObjKind::Super
: PropOpEmitter::ObjKind::Other);
if (!poe->prepareForObj()) {
return false;
}
anonFunctionName = prop->name();
if (isSuper) {
UnaryNode* base = &prop->expression().as<UnaryNode>();
if (!emitGetThisForSuperBase(base)) {
// [stack] THIS SUPERBASE
return false;
}
// SUPERBASE is pushed onto THIS later in poe->emitGet below.
offset += 2;
} else {
if (!emitTree(&prop->expression())) {
// [stack] OBJ
return false;
}
offset += 1;
}
break;
}
case ParseNodeKind::ElemExpr: {
PropertyByValue* elem = &lhs->as<PropertyByValue>();
bool isSuper = elem->isSuper();
MOZ_ASSERT(!elem->key().isKind(ParseNodeKind::PrivateName));
eoe.emplace(this,
isCompound ? ElemOpEmitter::Kind::CompoundAssignment
: isInit ? ElemOpEmitter::Kind::PropInit
: ElemOpEmitter::Kind::SimpleAssignment,
isSuper ? ElemOpEmitter::ObjKind::Super
: ElemOpEmitter::ObjKind::Other);
if (!emitElemObjAndKey(elem, *eoe)) {
// [stack] # if Super
// [stack] THIS KEY
// [stack] # otherwise
// [stack] OBJ KEY
return false;
}
if (isSuper) {
// SUPERBASE is pushed onto KEY in eoe->emitGet below.
offset += 3;
} else {
offset += 2;
}
break;
}
case ParseNodeKind::PrivateMemberExpr: {
PrivateMemberAccess* privateExpr = &lhs->as<PrivateMemberAccess>();
xoe.emplace(this,
isCompound ? PrivateOpEmitter::Kind::CompoundAssignment
: isInit ? PrivateOpEmitter::Kind::PropInit
: PrivateOpEmitter::Kind::SimpleAssignment,
privateExpr->privateName().name());
if (!emitTree(&privateExpr->expression())) {
// [stack] OBJ
return false;
}
if (!xoe->emitReference()) {
// [stack] OBJ KEY
return false;
}
offset += xoe->numReferenceSlots();
break;
}
case ParseNodeKind::ArrayExpr:
case ParseNodeKind::ObjectExpr:
break;
case ParseNodeKind::CallExpr:
if (!emitTree(lhs)) {
return false;
}
// Assignment to function calls is forbidden, but we have to make the
// call first. Now we can throw.
if (!emit2(JSOp::ThrowMsg, uint8_t(ThrowMsgKind::AssignToCall))) {
return false;
}
// Rebalance the stack to placate stack-depth assertions.
if (!emit1(JSOp::Pop)) {
return false;
}
break;
default:
MOZ_ASSERT(0);
}
if (isCompound) {
MOZ_ASSERT(rhs);
switch (lhs->getKind()) {
case ParseNodeKind::ArgumentsLength:
case ParseNodeKind::DotExpr: {
PropertyAccess* prop = &lhs->as<PropertyAccess>();
if (!poe->emitGet(prop->key().atom())) {
// [stack] # if Super
// [stack] THIS SUPERBASE PROP
// [stack] # otherwise
// [stack] OBJ PROP
return false;
}
break;
}
case ParseNodeKind::ElemExpr: {
if (!eoe->emitGet()) {
// [stack] KEY THIS OBJ ELEM
return false;
}
break;
}
case ParseNodeKind::PrivateMemberExpr: {
if (!xoe->emitGet()) {
// [stack] OBJ KEY VALUE
return false;
}
break;
}
case ParseNodeKind::CallExpr:
// We just emitted a JSOp::ThrowMsg and popped the call's return
// value. Push a random value to make sure the stack depth is
// correct.
if (!emit1(JSOp::Null)) {
// [stack] NULL
return false;
}
break;
default:;
}
}
switch (lhs->getKind()) {
case ParseNodeKind::Name:
if (!noe->prepareForRhs()) {
// [stack] ENV? VAL?
return false;
}
offset += noe->emittedBindOp();
break;
case ParseNodeKind::ArgumentsLength:
case ParseNodeKind::DotExpr:
if (!poe->prepareForRhs()) {
// [stack] # if Simple Assignment with Super
// [stack] THIS SUPERBASE
// [stack] # if Simple Assignment with other
// [stack] OBJ
// [stack] # if Compound Assignment with Super
// [stack] THIS SUPERBASE PROP
// [stack] # if Compound Assignment with other
// [stack] OBJ PROP
return false;
}
break;
case ParseNodeKind::ElemExpr:
if (!eoe->prepareForRhs()) {
// [stack] # if Simple Assignment with Super
// [stack] THIS KEY SUPERBASE
// [stack] # if Simple Assignment with other
// [stack] OBJ KEY
// [stack] # if Compound Assignment with Super
// [stack] THIS KEY SUPERBASE ELEM
// [stack] # if Compound Assignment with other
// [stack] OBJ KEY ELEM
return false;
}
break;
case ParseNodeKind::PrivateMemberExpr:
// no stack adjustment needed
break;
default:
break;
}
if (rhs) {
if (!emitAssignmentRhs(rhs, anonFunctionName)) {
// [stack] ... VAL? RHS
return false;
}
} else {
// Assumption: Things with pre-emitted RHS values never need to be named.
if (!emitAssignmentRhs(offset)) {
// [stack] ... VAL? RHS
return false;
}
}
/* If += etc., emit the binary operator with a hint for the decompiler. */
if (isCompound) {
if (!emit1(compoundOp)) {
// [stack] ... VAL
return false;
}
if (!emit1(JSOp::NopIsAssignOp)) {
// [stack] ... VAL
return false;
}
}
/* Finally, emit the specialized assignment bytecode. */
switch (lhs->getKind()) {
case ParseNodeKind::Name: {
if (!noe->emitAssignment()) {
// [stack] VAL
return false;
}
break;
}
case ParseNodeKind::ArgumentsLength:
case ParseNodeKind::DotExpr: {
PropertyAccess* prop = &lhs->as<PropertyAccess>();
if (!poe->emitAssignment(prop->key().atom())) {
// [stack] VAL
return false;
}
break;
}
case ParseNodeKind::CallExpr:
// We threw above, so nothing to do here.
break;
case ParseNodeKind::ElemExpr: {
if (!eoe->emitAssignment()) {
// [stack] VAL
return false;
}
break;
}
case ParseNodeKind::PrivateMemberExpr:
if (!xoe->emitAssignment()) {
// [stack] VAL
return false;
}
break;
case ParseNodeKind::ArrayExpr:
case ParseNodeKind::ObjectExpr:
if (!emitDestructuringOps(&lhs->as<ListNode>(),
DestructuringFlavor::Assignment)) {
return false;
}
break;
default:
MOZ_ASSERT(0);
}
return true;
}
bool BytecodeEmitter::emitShortCircuitAssignment(AssignmentNode* node) {
TDZCheckCache tdzCache(this);
JSOp op;
switch (node->getKind()) {
case ParseNodeKind::CoalesceAssignExpr:
op = JSOp::Coalesce;
break;
case ParseNodeKind::OrAssignExpr:
op = JSOp::Or;
break;
case ParseNodeKind::AndAssignExpr:
op = JSOp::And;
break;
default:
MOZ_CRASH("Unexpected ParseNodeKind");
}
ParseNode* lhs = node->left();
ParseNode* rhs = node->right();
// |name| is used within NameOpEmitter, so its lifetime must surpass |noe|.
TaggedParserAtomIndex name;
// Select the appropriate emitter based on the left-hand side.
Maybe<NameOpEmitter> noe;
Maybe<PropOpEmitter> poe;
Maybe<ElemOpEmitter> eoe;
Maybe<PrivateOpEmitter> xoe;
int32_t depth = bytecodeSection().stackDepth();
// Number of values pushed onto the stack in addition to the lhs value.
int32_t numPushed;
// Evaluate the left-hand side expression and compute any stack values needed
// for the assignment.
switch (lhs->getKind()) {
case ParseNodeKind::Name: {
name = lhs->as<NameNode>().name();
noe.emplace(this, name, NameOpEmitter::Kind::CompoundAssignment);
if (!noe->prepareForRhs()) {
// [stack] ENV? LHS
return false;
}
numPushed = noe->emittedBindOp();
break;
}
case ParseNodeKind::ArgumentsLength:
case ParseNodeKind::DotExpr: {
PropertyAccess* prop = &lhs->as<PropertyAccess>();
bool isSuper = prop->isSuper();
poe.emplace(this, PropOpEmitter::Kind::CompoundAssignment,
isSuper ? PropOpEmitter::ObjKind::Super
: PropOpEmitter::ObjKind::Other);
if (!poe->prepareForObj()) {
return false;
}
if (isSuper) {
UnaryNode* base = &prop->expression().as<UnaryNode>();
if (!emitGetThisForSuperBase(base)) {
// [stack] THIS SUPERBASE
return false;
}
} else {
if (!emitTree(&prop->expression())) {
// [stack] OBJ
return false;
}
}
if (!poe->emitGet(prop->key().atom())) {
// [stack] # if Super
// [stack] THIS SUPERBASE LHS
// [stack] # otherwise
// [stack] OBJ LHS
return false;
}
if (!poe->prepareForRhs()) {
// [stack] # if Super
// [stack] THIS SUPERBASE LHS
// [stack] # otherwise
// [stack] OBJ LHS
return false;
}
numPushed = 1 + isSuper;
break;
}
case ParseNodeKind::ElemExpr: {
PropertyByValue* elem = &lhs->as<PropertyByValue>();
bool isSuper = elem->isSuper();
MOZ_ASSERT(!elem->key().isKind(ParseNodeKind::PrivateName));
eoe.emplace(this, ElemOpEmitter::Kind::CompoundAssignment,
isSuper ? ElemOpEmitter::ObjKind::Super
: ElemOpEmitter::ObjKind::Other);
if (!emitElemObjAndKey(elem, *eoe)) {
// [stack] # if Super
// [stack] THIS KEY
// [stack] # otherwise
// [stack] OBJ KEY
return false;
}
if (!eoe->emitGet()) {
// [stack] # if Super
// [stack] THIS KEY SUPERBASE LHS
// [stack] # otherwise
// [stack] OBJ KEY LHS
return false;
}
if (!eoe->prepareForRhs()) {
// [stack] # if Super
// [stack] THIS KEY SUPERBASE LHS
// [stack] # otherwise
// [stack] OBJ KEY LHS
return false;
}
numPushed = 2 + isSuper;
break;
}
case ParseNodeKind::PrivateMemberExpr: {
PrivateMemberAccess* privateExpr = &lhs->as<PrivateMemberAccess>();
xoe.emplace(this, PrivateOpEmitter::Kind::CompoundAssignment,
privateExpr->privateName().name());
if (!emitTree(&privateExpr->expression())) {
// [stack] OBJ
return false;
}
if (!xoe->emitReference()) {
// [stack] OBJ NAME
return false;
}
if (!xoe->emitGet()) {
// [stack] OBJ NAME LHS
return false;
}
numPushed = xoe->numReferenceSlots();
break;
}
default:
MOZ_CRASH();
}
MOZ_ASSERT(bytecodeSection().stackDepth() == depth + numPushed + 1);
// Test for the short-circuit condition.
JumpList jump;
if (!emitJump(op, &jump)) {
// [stack] ... LHS
return false;
}
// The short-circuit condition wasn't fulfilled, pop the left-hand side value
// which was kept on the stack.
if (!emit1(JSOp::Pop)) {
// [stack] ...
return false;
}
if (!emitAssignmentRhs(rhs, name)) {
// [stack] ... RHS
return false;
}
// Perform the actual assignment.
switch (lhs->getKind()) {
case ParseNodeKind::Name: {
if (!noe->emitAssignment()) {
// [stack] RHS
return false;
}
break;
}
case ParseNodeKind::ArgumentsLength:
case ParseNodeKind::DotExpr: {
PropertyAccess* prop = &lhs->as<PropertyAccess>();
if (!poe->emitAssignment(prop->key().atom())) {
// [stack] RHS
return false;
}
break;
}
case ParseNodeKind::ElemExpr: {
if (!eoe->emitAssignment()) {
// [stack] RHS
return false;
}
break;
}
case ParseNodeKind::PrivateMemberExpr:
if (!xoe->emitAssignment()) {
// [stack] RHS
return false;
}
break;
default:
MOZ_CRASH();
}
MOZ_ASSERT(bytecodeSection().stackDepth() == depth + 1);
// Join with the short-circuit jump and pop anything left on the stack.
if (numPushed > 0) {
JumpList jumpAroundPop;
if (!emitJump(JSOp::Goto, &jumpAroundPop)) {
// [stack] RHS
return false;
}
if (!emitJumpTargetAndPatch(jump)) {
// [stack] ... LHS
return false;
}
// Reconstruct the stack depth after the jump.
bytecodeSection().setStackDepth(depth + 1 + numPushed);
// Move the left-hand side value to the bottom and pop the rest.
if (!emitUnpickN(numPushed)) {
// [stack] LHS ...
return false;
}
if (!emitPopN(numPushed)) {
// [stack] LHS
return false;
}
if (!emitJumpTargetAndPatch(jumpAroundPop)) {
// [stack] LHS | RHS
return false;
}
} else {
if (!emitJumpTargetAndPatch(jump)) {
// [stack] LHS | RHS
return false;
}
}
MOZ_ASSERT(bytecodeSection().stackDepth() == depth + 1);
return true;
}
bool BytecodeEmitter::emitCallSiteObjectArray(ObjLiteralWriter& writer,
ListNode* cookedOrRaw,
ParseNode* head, uint32_t count) {
DebugOnly<size_t> idx = 0;
for (ParseNode* pn : cookedOrRaw->contentsFrom(head)) {
MOZ_ASSERT(pn->isKind(ParseNodeKind::TemplateStringExpr) ||
pn->isKind(ParseNodeKind::RawUndefinedExpr));
if (!emitObjLiteralValue(writer, pn)) {
return false;
}
idx++;
}
MOZ_ASSERT(idx == count);
return true;
}
bool BytecodeEmitter::emitCallSiteObject(CallSiteNode* callSiteObj) {
constexpr JSOp op = JSOp::CallSiteObj;
// The first element of a call-site node is the raw-values list. Skip over it.
ListNode* raw = callSiteObj->rawNodes();
MOZ_ASSERT(raw->isKind(ParseNodeKind::ArrayExpr));
ParseNode* head = callSiteObj->head()->pn_next;
uint32_t count = callSiteObj->count() - 1;
MOZ_ASSERT(count == raw->count());
ObjLiteralWriter writer;
writer.beginCallSiteObj(op);
writer.beginDenseArrayElements();
// Write elements of the two arrays: the 'cooked' values followed by the
// 'raw' values.
MOZ_RELEASE_ASSERT(count < UINT32_MAX / 2,
"Number of elements for both arrays must fit in uint32_t");
if (!emitCallSiteObjectArray(writer, callSiteObj, head, count)) {
return false;
}
if (!emitCallSiteObjectArray(writer, raw, raw->head(), count)) {
return false;
}
GCThingIndex cookedIndex;
if (!addObjLiteralData(writer, &cookedIndex)) {
return false;
}
MOZ_ASSERT(sc->hasCallSiteObj());
return emitInternedObjectOp(cookedIndex, op);
}
bool BytecodeEmitter::emitCatch(BinaryNode* catchClause) {
// We must be nested under a try-finally statement.
MOZ_ASSERT(innermostNestableControl->is<TryFinallyControl>());
ParseNode* param = catchClause->left();
if (!param) {
// Catch parameter was omitted; just discard the exception.
if (!emit1(JSOp::Pop)) {
return false;
}
} else {
switch (param->getKind()) {
case ParseNodeKind::ArrayExpr:
case ParseNodeKind::ObjectExpr:
if (!emitDestructuringOps(¶m->as<ListNode>(),
DestructuringFlavor::Declaration)) {
return false;
}
if (!emit1(JSOp::Pop)) {
return false;
}
break;
case ParseNodeKind::Name:
if (!emitLexicalInitialization(¶m->as<NameNode>())) {
return false;
}
if (!emit1(JSOp::Pop)) {
return false;
}
break;
default:
MOZ_ASSERT(0);
}
}
/* Emit the catch body. */
return emitTree(catchClause->right());
}
// Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See the
// comment on EmitSwitch.
MOZ_NEVER_INLINE bool BytecodeEmitter::emitTry(TryNode* tryNode) {
LexicalScopeNode* catchScope = tryNode->catchScope();
ParseNode* finallyNode = tryNode->finallyBlock();
TryEmitter::Kind kind;
if (catchScope) {
if (finallyNode) {
kind = TryEmitter::Kind::TryCatchFinally;
} else {
kind = TryEmitter::Kind::TryCatch;
}
} else {
MOZ_ASSERT(finallyNode);
kind = TryEmitter::Kind::TryFinally;
}
TryEmitter tryCatch(this, kind, TryEmitter::ControlKind::Syntactic);
if (!tryCatch.emitTry()) {
return false;
}
if (!emitTree(tryNode->body())) {
return false;
}
// If this try has a catch block, emit it.
if (catchScope) {
// The emitted code for a catch block looks like:
//
// [pushlexicalenv] only if any local aliased
// exception
// setlocal 0; pop assign or possibly destructure exception
// < catch block contents >
// debugleaveblock
// [poplexicalenv] only if any local aliased
// if there is a finally block:
// goto <finally>
// [jump target for returning from finally]
// goto <after finally>
if (!tryCatch.emitCatch()) {
return false;
}
// Emit the lexical scope and catch body.
if (!emitTree(catchScope)) {
return false;
}
}
// Emit the finally handler, if there is one.
if (finallyNode) {
if (!tryCatch.emitFinally(Some(finallyNode->pn_pos.begin))) {
return false;
}
if (!emitTree(finallyNode)) {
return false;
}
}
if (!tryCatch.emitEnd()) {
return false;
}
return true;
}
[[nodiscard]] bool BytecodeEmitter::emitJumpToFinally(JumpList* jump,
uint32_t idx) {
// Push the continuation index.
if (!emitNumberOp(idx)) {
return false;
}
// Push |exception_stack|.
if (!emit1(JSOp::Null)) {
return false;
}
// Push |throwing|.
if (!emit1(JSOp::False)) {
return false;
}
// Jump to the finally block.
if (!emitJumpNoFallthrough(JSOp::Goto, jump)) {
return false;
}
return true;
}
bool BytecodeEmitter::emitIf(TernaryNode* ifNode) {
IfEmitter ifThenElse(this);
if (!ifThenElse.emitIf(Some(ifNode->kid1()->pn_pos.begin))) {
return false;
}
if_again:
ParseNode* testNode = ifNode->kid1();
auto conditionKind = IfEmitter::ConditionKind::Positive;
if (testNode->isKind(ParseNodeKind::NotExpr)) {
testNode = testNode->as<UnaryNode>().kid();
conditionKind = IfEmitter::ConditionKind::Negative;
}
if (!markStepBreakpoint()) {
return false;
}
// Emit code for the condition before pushing stmtInfo.
// NOTE: NotExpr of testNode may be unwrapped, and in that case the negation
// is handled by conditionKind.
if (!emitTree(testNode)) {
return false;
}
ParseNode* elseNode = ifNode->kid3();
if (elseNode) {
if (!ifThenElse.emitThenElse(conditionKind)) {
return false;
}
} else {
if (!ifThenElse.emitThen(conditionKind)) {
return false;
}
}
/* Emit code for the then part. */
if (!emitTree(ifNode->kid2())) {
return false;
}
if (elseNode) {
if (elseNode->isKind(ParseNodeKind::IfStmt)) {
ifNode = &elseNode->as<TernaryNode>();
if (!ifThenElse.emitElseIf(Some(ifNode->kid1()->pn_pos.begin))) {
return false;
}
goto if_again;
}
if (!ifThenElse.emitElse()) {
return false;
}
/* Emit code for the else part. */
if (!emitTree(elseNode)) {
return false;
}
}
if (!ifThenElse.emitEnd()) {
return false;
}
return true;
}
bool BytecodeEmitter::emitHoistedFunctionsInList(ListNode* stmtList) {
MOZ_ASSERT(stmtList->hasTopLevelFunctionDeclarations());
// We can call this multiple times for sloppy eval scopes.
if (stmtList->emittedTopLevelFunctionDeclarations()) {
return true;
}
stmtList->setEmittedTopLevelFunctionDeclarations();
for (ParseNode* stmt : stmtList->contents()) {
ParseNode* maybeFun = stmt;
if (!sc->strict()) {
while (maybeFun->isKind(ParseNodeKind::LabelStmt)) {
maybeFun = maybeFun->as<LabeledStatement>().statement();
}
}
if (maybeFun->is<FunctionNode>() &&
maybeFun->as<FunctionNode>().functionIsHoisted()) {
if (!emitTree(maybeFun)) {
return false;
}
}
}
return true;
}
bool BytecodeEmitter::emitLexicalScopeBody(
ParseNode* body, EmitLineNumberNote emitLineNote /* = EMIT_LINENOTE */) {
if (body->isKind(ParseNodeKind::StatementList) &&
body->as<ListNode>().hasTopLevelFunctionDeclarations()) {
// This block contains function statements whose definitions are
// hoisted to the top of the block. Emit these as a separate pass
// before the rest of the block.
if (!emitHoistedFunctionsInList(&body->as<ListNode>())) {
return false;
}
}
// Line notes were updated by emitLexicalScope or emitScript.
return emitTree(body, ValueUsage::WantValue, emitLineNote);
}
// Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
// the comment on emitSwitch.
MOZ_NEVER_INLINE bool BytecodeEmitter::emitLexicalScope(
LexicalScopeNode* lexicalScope) {
LexicalScopeEmitter lse(this);
ParseNode* body = lexicalScope->scopeBody();
if (lexicalScope->isEmptyScope()) {
if (!lse.emitEmptyScope()) {
return false;
}
if (!emitLexicalScopeBody(body)) {
return false;
}
if (!lse.emitEnd()) {
return false;
}
return true;
}
// We are about to emit some bytecode for what the spec calls "declaration
// instantiation". Assign these instructions to the opening `{` of the
// block. (Using the location of each declaration we're instantiating is
// too weird when stepping in the debugger.)
if (!ParseNodeRequiresSpecialLineNumberNotes(body)) {
if (!updateSourceCoordNotes(lexicalScope->pn_pos.begin)) {
return false;
}
}
ScopeKind kind;
if (body->isKind(ParseNodeKind::Catch)) {
BinaryNode* catchNode = &body->as<BinaryNode>();
kind =
(!catchNode->left() || catchNode->left()->isKind(ParseNodeKind::Name))
? ScopeKind::SimpleCatch
: ScopeKind::Catch;
} else {
kind = lexicalScope->kind();
}
#ifdef ENABLE_EXPLICIT_RESOURCE_MANAGEMENT
BlockKind blockKind = BlockKind::Other;
if (body->isKind(ParseNodeKind::ForStmt) &&
body->as<ForNode>().head()->isKind(ParseNodeKind::ForOf)) {
MOZ_ASSERT(kind == ScopeKind::Lexical);
blockKind = BlockKind::ForOf;
}
#endif
if (!lse.emitScope(kind, lexicalScope->scopeBindings()
#ifdef ENABLE_EXPLICIT_RESOURCE_MANAGEMENT
,
blockKind
#endif
)) {
return false;
}
if (body->isKind(ParseNodeKind::ForStmt)) {
// for loops need to emit JSOp::FreshenLexicalEnv/JSOp::RecreateLexicalEnv
// if there are lexical declarations in the head. Signal this by passing a
// non-nullptr lexical scope.
if (!emitFor(&body->as<ForNode>(), &lse.emitterScope())) {
return false;
}
} else {
if (!emitLexicalScopeBody(body, SUPPRESS_LINENOTE)) {
return false;
}
}
if (!lse.emitEnd()) {
return false;
}
return true;
}
bool BytecodeEmitter::emitWith(BinaryNode* withNode) {
// Ensure that the column of the 'with' is set properly.
if (!updateSourceCoordNotes(withNode->left()->pn_pos.begin)) {
return false;
}
if (!markStepBreakpoint()) {
return false;
}
if (!emitTree(withNode->left())) {
return false;
}
EmitterScope emitterScope(this);
if (!emitterScope.enterWith(this)) {
return false;
}
if (!emitTree(withNode->right())) {
return false;
}
return emitterScope.leave(this);
}
bool BytecodeEmitter::emitCopyDataProperties(CopyOption option) {
DebugOnly<int32_t> depth = bytecodeSection().stackDepth();
uint32_t argc;
if (option == CopyOption::Filtered) {
MOZ_ASSERT(depth > 2);
// [stack] TARGET SOURCE SET
argc = 3;
if (!emitAtomOp(JSOp::GetIntrinsic,
TaggedParserAtomIndex::WellKnown::CopyDataProperties())) {
// [stack] TARGET SOURCE SET COPYDATAPROPERTIES
return false;
}
} else {
MOZ_ASSERT(depth > 1);
// [stack] TARGET SOURCE
argc = 2;
if (!emitAtomOp(
JSOp::GetIntrinsic,
TaggedParserAtomIndex::WellKnown::CopyDataPropertiesUnfiltered())) {
// [stack] TARGET SOURCE COPYDATAPROPERTIES
return false;
}
}
if (!emit1(JSOp::Undefined)) {
// [stack] TARGET SOURCE SET? COPYDATAPROPERTIES
// UNDEFINED
return false;
}
if (!emit2(JSOp::Pick, argc + 1)) {
// [stack] SOURCE SET? COPYDATAPROPERTIES UNDEFINED
// TARGET
return false;
}
if (!emit2(JSOp::Pick, argc + 1)) {
// [stack] SET? COPYDATAPROPERTIES UNDEFINED TARGET
// SOURCE
return false;
}
if (option == CopyOption::Filtered) {
if (!emit2(JSOp::Pick, argc + 1)) {
// [stack] COPYDATAPROPERTIES UNDEFINED TARGET SOURCE SET
return false;
}
}
// Callee is always self-hosted instrinsic, and cannot be content function.
if (!emitCall(JSOp::CallIgnoresRv, argc)) {
// [stack] IGNORED
return false;
}
if (!emit1(JSOp::Pop)) {
// [stack]
return false;
}
MOZ_ASSERT(depth - int(argc) == bytecodeSection().stackDepth());
return true;
}
bool BytecodeEmitter::emitBigIntOp(BigIntLiteral* bigint) {