Source code
Revision control
Copy as Markdown
Other Tools
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* vim: set ts=8 sts=2 et sw=2 tw=80:
*
* Copyright 2016 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef wasm_op_iter_h
#define wasm_op_iter_h
#include "mozilla/CompactPair.h"
#include "mozilla/Poison.h"
#include <type_traits>
#include "js/Printf.h"
#include "wasm/WasmBinary.h"
#include "wasm/WasmBuiltinModule.h"
#include "wasm/WasmMetadata.h"
#include "wasm/WasmUtility.h"
namespace js {
namespace wasm {
// The kind of a control-flow stack item.
enum class LabelKind : uint8_t {
Body,
Block,
Loop,
Then,
Else,
Try,
Catch,
CatchAll,
TryTable,
};
// The type of values on the operand stack during validation. This is either a
// ValType or the special type "Bottom".
class StackType {
PackedTypeCode tc_;
explicit StackType(PackedTypeCode tc) : tc_(tc) {}
public:
StackType() : tc_(PackedTypeCode::invalid()) {}
explicit StackType(const ValType& t) : tc_(t.packed()) {
MOZ_ASSERT(tc_.isValid());
MOZ_ASSERT(!isStackBottom());
}
static StackType bottom() {
return StackType(PackedTypeCode::pack(TypeCode::Limit));
}
bool isStackBottom() const {
MOZ_ASSERT(tc_.isValid());
return tc_.typeCode() == TypeCode::Limit;
}
// Returns whether this input is nullable when interpreted as an operand.
// When the type is bottom for unreachable code, this returns false as that
// is the most permissive option.
bool isNullableAsOperand() const {
MOZ_ASSERT(tc_.isValid());
return isStackBottom() ? false : tc_.isNullable();
}
ValType valType() const {
MOZ_ASSERT(tc_.isValid());
MOZ_ASSERT(!isStackBottom());
return ValType(tc_);
}
ValType valTypeOr(ValType ifBottom) const {
MOZ_ASSERT(tc_.isValid());
if (isStackBottom()) {
return ifBottom;
}
return valType();
}
ValType asNonNullable() const {
MOZ_ASSERT(tc_.isValid());
MOZ_ASSERT(!isStackBottom());
return ValType(tc_.withIsNullable(false));
}
bool isValidForUntypedSelect() const {
MOZ_ASSERT(tc_.isValid());
if (isStackBottom()) {
return true;
}
switch (valType().kind()) {
case ValType::I32:
case ValType::F32:
case ValType::I64:
case ValType::F64:
#ifdef ENABLE_WASM_SIMD
case ValType::V128:
#endif
return true;
default:
return false;
}
}
bool operator==(const StackType& that) const {
MOZ_ASSERT(tc_.isValid() && that.tc_.isValid());
return tc_ == that.tc_;
}
bool operator!=(const StackType& that) const {
MOZ_ASSERT(tc_.isValid() && that.tc_.isValid());
return tc_ != that.tc_;
}
};
#ifdef DEBUG
// Families of opcodes that share a signature and validation logic.
enum class OpKind {
Block,
Loop,
Unreachable,
Drop,
I32,
I64,
F32,
F64,
V128,
Br,
BrIf,
BrTable,
Nop,
Unary,
Binary,
Ternary,
Comparison,
Conversion,
Load,
Store,
TeeStore,
MemorySize,
MemoryGrow,
Select,
GetLocal,
SetLocal,
TeeLocal,
GetGlobal,
SetGlobal,
TeeGlobal,
Call,
ReturnCall,
CallIndirect,
ReturnCallIndirect,
# ifdef ENABLE_WASM_GC
CallRef,
ReturnCallRef,
# endif
OldCallDirect,
OldCallIndirect,
Return,
If,
Else,
End,
Wait,
Wake,
Fence,
AtomicLoad,
AtomicStore,
AtomicBinOp,
AtomicCompareExchange,
MemOrTableCopy,
DataOrElemDrop,
MemFill,
MemOrTableInit,
TableFill,
MemDiscard,
TableGet,
TableGrow,
TableSet,
TableSize,
RefNull,
RefFunc,
RefAsNonNull,
BrOnNull,
BrOnNonNull,
StructNew,
StructNewDefault,
StructGet,
StructSet,
ArrayNew,
ArrayNewFixed,
ArrayNewDefault,
ArrayNewData,
ArrayNewElem,
ArrayInitData,
ArrayInitElem,
ArrayGet,
ArraySet,
ArrayLen,
ArrayCopy,
ArrayFill,
RefTest,
RefCast,
BrOnCast,
RefConversion,
# ifdef ENABLE_WASM_SIMD
ExtractLane,
ReplaceLane,
LoadLane,
StoreLane,
VectorShift,
VectorShuffle,
# endif
Catch,
CatchAll,
Delegate,
Throw,
ThrowRef,
Rethrow,
Try,
TryTable,
CallBuiltinModuleFunc,
StackSwitch,
};
// Return the OpKind for a given Op. This is used for sanity-checking that
// API users use the correct read function for a given Op.
OpKind Classify(OpBytes op);
#endif
// Common fields for linear memory access.
template <typename Value>
struct LinearMemoryAddress {
Value base;
uint32_t memoryIndex;
uint64_t offset;
uint32_t align;
LinearMemoryAddress() : memoryIndex(0), offset(0), align(0) {}
LinearMemoryAddress(Value base, uint32_t memoryIndex, uint64_t offset,
uint32_t align)
: base(base), memoryIndex(memoryIndex), offset(offset), align(align) {}
};
template <typename ControlItem>
class ControlStackEntry {
// Use a pair to optimize away empty ControlItem.
mozilla::CompactPair<BlockType, ControlItem> typeAndItem_;
// The "base" of a control stack entry is valueStack_.length() minus
// type().params().length(), i.e., the size of the value stack "below"
// this block.
uint32_t valueStackBase_;
bool polymorphicBase_;
LabelKind kind_;
public:
ControlStackEntry(LabelKind kind, BlockType type, uint32_t valueStackBase)
: typeAndItem_(type, ControlItem()),
valueStackBase_(valueStackBase),
polymorphicBase_(false),
kind_(kind) {
MOZ_ASSERT(type != BlockType());
}
LabelKind kind() const { return kind_; }
BlockType type() const { return typeAndItem_.first(); }
ResultType resultType() const { return type().results(); }
ResultType branchTargetType() const {
return kind_ == LabelKind::Loop ? type().params() : type().results();
}
uint32_t valueStackBase() const { return valueStackBase_; }
ControlItem& controlItem() { return typeAndItem_.second(); }
void setPolymorphicBase() { polymorphicBase_ = true; }
bool polymorphicBase() const { return polymorphicBase_; }
void switchToElse() {
MOZ_ASSERT(kind() == LabelKind::Then);
kind_ = LabelKind::Else;
polymorphicBase_ = false;
}
void switchToCatch() {
MOZ_ASSERT(kind() == LabelKind::Try || kind() == LabelKind::Catch);
kind_ = LabelKind::Catch;
polymorphicBase_ = false;
}
void switchToCatchAll() {
MOZ_ASSERT(kind() == LabelKind::Try || kind() == LabelKind::Catch);
kind_ = LabelKind::CatchAll;
polymorphicBase_ = false;
}
};
// Track state of the non-defaultable locals. Every time such local is
// initialized, the stack will record at what depth and which local was set.
// On a block end, the "unset" state will be rolled back to how it was before
// the block started.
//
// It is very likely only a few functions will have non-defaultable locals and
// very few locals will be non-defaultable. This class is optimized to be fast
// for this common case.
class UnsetLocalsState {
struct SetLocalEntry {
uint32_t depth;
uint32_t localUnsetIndex;
SetLocalEntry(uint32_t depth_, uint32_t localUnsetIndex_)
: depth(depth_), localUnsetIndex(localUnsetIndex_) {}
};
using SetLocalsStack = Vector<SetLocalEntry, 16, SystemAllocPolicy>;
using UnsetLocals = Vector<uint32_t, 16, SystemAllocPolicy>;
static constexpr size_t WordSize = 4;
static constexpr size_t WordBits = WordSize * 8;
// Bit array of "unset" function locals. Stores only unset states of the
// locals that are declared after the first non-defaultable local.
UnsetLocals unsetLocals_;
// Stack of "set" operations. Contains pair where the first field is a depth,
// and the second field is local id (offset by firstNonDefaultLocal_).
SetLocalsStack setLocalsStack_;
uint32_t firstNonDefaultLocal_;
public:
UnsetLocalsState() : firstNonDefaultLocal_(UINT32_MAX) {}
[[nodiscard]] bool init(const ValTypeVector& locals, size_t numParams);
inline bool isUnset(uint32_t id) const {
if (MOZ_LIKELY(id < firstNonDefaultLocal_)) {
return false;
}
uint32_t localUnsetIndex = id - firstNonDefaultLocal_;
return unsetLocals_[localUnsetIndex / WordBits] &
(1 << (localUnsetIndex % WordBits));
}
inline void set(uint32_t id, uint32_t depth) {
MOZ_ASSERT(isUnset(id));
MOZ_ASSERT(id >= firstNonDefaultLocal_ &&
(id - firstNonDefaultLocal_) / WordBits < unsetLocals_.length());
uint32_t localUnsetIndex = id - firstNonDefaultLocal_;
unsetLocals_[localUnsetIndex / WordBits] ^= 1
<< (localUnsetIndex % WordBits);
// The setLocalsStack_ is reserved upfront in the UnsetLocalsState::init.
// A SetLocalEntry will be pushed only once per local.
setLocalsStack_.infallibleEmplaceBack(depth, localUnsetIndex);
}
inline void resetToBlock(uint32_t controlDepth) {
while (MOZ_UNLIKELY(setLocalsStack_.length() > 0) &&
setLocalsStack_.back().depth > controlDepth) {
uint32_t localUnsetIndex = setLocalsStack_.back().localUnsetIndex;
MOZ_ASSERT(!(unsetLocals_[localUnsetIndex / WordBits] &
(1 << (localUnsetIndex % WordBits))));
unsetLocals_[localUnsetIndex / WordBits] |=
1 << (localUnsetIndex % WordBits);
setLocalsStack_.popBack();
}
}
int empty() const { return setLocalsStack_.empty(); }
};
template <typename Value>
class TypeAndValueT {
// Use a Pair to optimize away empty Value.
mozilla::CompactPair<StackType, Value> tv_;
public:
TypeAndValueT() : tv_(StackType::bottom(), Value()) {}
explicit TypeAndValueT(StackType type) : tv_(type, Value()) {}
explicit TypeAndValueT(ValType type) : tv_(StackType(type), Value()) {}
TypeAndValueT(StackType type, Value value) : tv_(type, value) {}
TypeAndValueT(ValType type, Value value) : tv_(StackType(type), value) {}
StackType type() const { return tv_.first(); }
void setType(StackType type) { tv_.first() = type; }
Value value() const { return tv_.second(); }
void setValue(Value value) { tv_.second() = value; }
};
// An iterator over the bytes of a function body. It performs validation
// and unpacks the data into a usable form.
//
// The MOZ_STACK_CLASS attribute here is because of the use of DebugOnly.
// There's otherwise nothing inherent in this class which would require
// it to be used on the stack.
template <typename Policy>
class MOZ_STACK_CLASS OpIter : private Policy {
public:
using Value = typename Policy::Value;
using ValueVector = typename Policy::ValueVector;
using TypeAndValue = TypeAndValueT<Value>;
using TypeAndValueStack = Vector<TypeAndValue, 32, SystemAllocPolicy>;
using ControlItem = typename Policy::ControlItem;
using Control = ControlStackEntry<ControlItem>;
using ControlStack = Vector<Control, 16, SystemAllocPolicy>;
enum Kind {
Func,
InitExpr,
};
private:
Kind kind_;
Decoder& d_;
const CodeMetadata& codeMeta_;
TypeAndValueStack valueStack_;
TypeAndValueStack elseParamStack_;
ControlStack controlStack_;
UnsetLocalsState unsetLocals_;
// The exclusive max index of a global that can be accessed by global.get in
// this expression. When GC is enabled, this is any previously defined
// immutable global. Otherwise this is always set to zero, and only imported
// immutable globals are allowed.
uint32_t maxInitializedGlobalsIndexPlus1_;
FeatureUsage featureUsage_;
uint32_t lastBranchHintIndex_;
BranchHintVector* branchHintVector_;
#ifdef DEBUG
OpBytes op_;
#endif
size_t offsetOfLastReadOp_;
[[nodiscard]] bool readFixedU8(uint8_t* out) { return d_.readFixedU8(out); }
[[nodiscard]] bool readFixedU32(uint32_t* out) {
return d_.readFixedU32(out);
}
[[nodiscard]] bool readVarS32(int32_t* out) { return d_.readVarS32(out); }
[[nodiscard]] bool readVarU32(uint32_t* out) { return d_.readVarU32(out); }
[[nodiscard]] bool readVarS64(int64_t* out) { return d_.readVarS64(out); }
[[nodiscard]] bool readVarU64(uint64_t* out) { return d_.readVarU64(out); }
[[nodiscard]] bool readFixedF32(float* out) { return d_.readFixedF32(out); }
[[nodiscard]] bool readFixedF64(double* out) { return d_.readFixedF64(out); }
[[nodiscard]] bool readLinearMemoryAddress(uint32_t byteSize,
LinearMemoryAddress<Value>* addr);
[[nodiscard]] bool readLinearMemoryAddressAligned(
uint32_t byteSize, LinearMemoryAddress<Value>* addr);
[[nodiscard]] bool readBlockType(BlockType* type);
[[nodiscard]] bool readGcTypeIndex(uint32_t* typeIndex);
[[nodiscard]] bool readStructTypeIndex(uint32_t* typeIndex);
[[nodiscard]] bool readArrayTypeIndex(uint32_t* typeIndex);
[[nodiscard]] bool readFuncTypeIndex(uint32_t* typeIndex);
[[nodiscard]] bool readFieldIndex(uint32_t* fieldIndex,
const StructType& structType);
[[nodiscard]] bool popCallArgs(const ValTypeVector& expectedTypes,
ValueVector* values);
[[nodiscard]] bool failEmptyStack();
[[nodiscard]] bool popStackType(StackType* type, Value* value);
[[nodiscard]] bool popWithType(ValType expected, Value* value,
StackType* stackType);
[[nodiscard]] bool popWithType(ValType expected, Value* value);
[[nodiscard]] bool popWithType(ResultType expected, ValueVector* values);
template <typename ValTypeSpanT>
[[nodiscard]] bool popWithTypes(ValTypeSpanT expected, ValueVector* values);
[[nodiscard]] bool popWithRefType(Value* value, StackType* type);
// Check that the top of the value stack has type `expected`, bearing in
// mind that it may be a block type, hence involving multiple values.
//
// If the block's stack contains polymorphic values at its base (because we
// are in unreachable code) then suitable extra values are inserted into the
// value stack, as controlled by `rewriteStackTypes`: if this is true,
// polymorphic values have their types created/updated from `expected`. If
// it is false, such values are left as `StackType::bottom()`.
//
// If `values` is non-null, it is filled in with Value components of the
// relevant stack entries, including those of any new entries created.
[[nodiscard]] bool checkTopTypeMatches(ResultType expected,
ValueVector* values,
bool rewriteStackTypes);
[[nodiscard]] bool pushControl(LabelKind kind, BlockType type);
[[nodiscard]] bool checkStackAtEndOfBlock(ResultType* type,
ValueVector* values);
[[nodiscard]] bool getControl(uint32_t relativeDepth, Control** controlEntry);
[[nodiscard]] bool checkBranchValueAndPush(uint32_t relativeDepth,
ResultType* type,
ValueVector* values,
bool rewriteStackTypes);
[[nodiscard]] bool checkBrTableEntryAndPush(uint32_t* relativeDepth,
ResultType prevBranchType,
ResultType* branchType,
ValueVector* branchValues);
[[nodiscard]] bool push(StackType t) { return valueStack_.emplaceBack(t); }
[[nodiscard]] bool push(ValType t) { return valueStack_.emplaceBack(t); }
[[nodiscard]] bool push(TypeAndValue tv) { return valueStack_.append(tv); }
[[nodiscard]] bool push(ResultType t) {
for (size_t i = 0; i < t.length(); i++) {
if (!push(t[i])) {
return false;
}
}
return true;
}
void infalliblePush(StackType t) { valueStack_.infallibleEmplaceBack(t); }
void infalliblePush(ValType t) {
valueStack_.infallibleEmplaceBack(StackType(t));
}
void infalliblePush(TypeAndValue tv) { valueStack_.infallibleAppend(tv); }
void afterUnconditionalBranch() {
valueStack_.shrinkTo(controlStack_.back().valueStackBase());
controlStack_.back().setPolymorphicBase();
}
inline bool checkIsSubtypeOf(StorageType actual, StorageType expected);
inline bool checkIsSubtypeOf(RefType actual, RefType expected) {
return checkIsSubtypeOf(ValType(actual).storageType(),
ValType(expected).storageType());
}
inline bool checkIsSubtypeOf(ValType actual, ValType expected) {
return checkIsSubtypeOf(actual.storageType(), expected.storageType());
}
inline bool checkIsSubtypeOf(ResultType params, ResultType results);
#ifdef ENABLE_WASM_GC
inline bool checkIsSubtypeOf(uint32_t actualTypeIndex,
uint32_t expectedTypeIndex);
#endif
public:
#ifdef DEBUG
explicit OpIter(const CodeMetadata& codeMeta, Decoder& decoder,
Kind kind = OpIter::Func)
: kind_(kind),
d_(decoder),
codeMeta_(codeMeta),
maxInitializedGlobalsIndexPlus1_(0),
featureUsage_(FeatureUsage::None),
branchHintVector_(nullptr),
op_(OpBytes(Op::Limit)),
offsetOfLastReadOp_(0) {}
#else
explicit OpIter(const CodeMetadata& codeMeta, Decoder& decoder,
Kind kind = OpIter::Func)
: kind_(kind),
d_(decoder),
codeMeta_(codeMeta),
maxInitializedGlobalsIndexPlus1_(0),
featureUsage_(FeatureUsage::None),
offsetOfLastReadOp_(0) {}
#endif
FeatureUsage featureUsage() const { return featureUsage_; }
void addFeatureUsage(FeatureUsage featureUsage) {
featureUsage_ |= featureUsage;
}
// Return the decoding byte offset.
uint32_t currentOffset() const { return d_.currentOffset(); }
// Return the offset within the entire module of the last-read op.
size_t lastOpcodeOffset() const {
return offsetOfLastReadOp_ ? offsetOfLastReadOp_ : d_.currentOffset();
}
// Return a BytecodeOffset describing where the current op should be reported
// to trap/call.
BytecodeOffset bytecodeOffset() const {
return BytecodeOffset(lastOpcodeOffset());
}
// Test whether the iterator has reached the end of the buffer.
bool done() const { return d_.done(); }
// Return a pointer to the end of the buffer being decoded by this iterator.
const uint8_t* end() const { return d_.end(); }
// Report a general failure.
[[nodiscard]] bool fail(const char* msg) MOZ_COLD;
// Report a general failure with a context
[[nodiscard]] bool fail_ctx(const char* fmt, const char* context) MOZ_COLD;
// Report an unrecognized opcode.
[[nodiscard]] bool unrecognizedOpcode(const OpBytes* expr) MOZ_COLD;
// Return whether the innermost block has a polymorphic base of its stack.
// Ideally this accessor would be removed; consider using something else.
bool currentBlockHasPolymorphicBase() const {
return !controlStack_.empty() && controlStack_.back().polymorphicBase();
}
// If it exists, return the BranchHint value from a function index and a
// branch offset.
// Branch hints are stored in a sorted vector. Because code in compiled in
// order, we keep track of the most recently accessed index.
// Retrieving branch hints is also done in order inside a function.
BranchHint getBranchHint(uint32_t funcIndex, uint32_t branchOffset) {
if (!codeMeta_.branchHintingEnabled()) {
return BranchHint::Invalid;
}
// Get the next hint in the collection
while (lastBranchHintIndex_ < branchHintVector_->length() &&
(*branchHintVector_)[lastBranchHintIndex_].branchOffset <
branchOffset) {
lastBranchHintIndex_++;
}
// No hint found for this branch.
if (lastBranchHintIndex_ >= branchHintVector_->length()) {
return BranchHint::Invalid;
}
// The last index is saved, now return the hint.
return (*branchHintVector_)[lastBranchHintIndex_].value;
}
// ------------------------------------------------------------------------
// Decoding and validation interface.
// Initialization and termination
[[nodiscard]] bool startFunction(uint32_t funcIndex,
const ValTypeVector& locals);
[[nodiscard]] bool endFunction(const uint8_t* bodyEnd);
[[nodiscard]] bool startInitExpr(ValType expected);
[[nodiscard]] bool endInitExpr();
// Value and reference types
[[nodiscard]] bool readValType(ValType* type);
[[nodiscard]] bool readHeapType(bool nullable, RefType* type);
// Instructions
[[nodiscard]] bool readOp(OpBytes* op);
[[nodiscard]] bool readReturn(ValueVector* values);
[[nodiscard]] bool readBlock(ResultType* paramType);
[[nodiscard]] bool readLoop(ResultType* paramType);
[[nodiscard]] bool readIf(ResultType* paramType, Value* condition);
[[nodiscard]] bool readElse(ResultType* paramType, ResultType* resultType,
ValueVector* thenResults);
[[nodiscard]] bool readEnd(LabelKind* kind, ResultType* type,
ValueVector* results,
ValueVector* resultsForEmptyElse);
void popEnd();
[[nodiscard]] bool readBr(uint32_t* relativeDepth, ResultType* type,
ValueVector* values);
[[nodiscard]] bool readBrIf(uint32_t* relativeDepth, ResultType* type,
ValueVector* values, Value* condition);
[[nodiscard]] bool readBrTable(Uint32Vector* depths, uint32_t* defaultDepth,
ResultType* defaultBranchType,
ValueVector* branchValues, Value* index);
[[nodiscard]] bool readTry(ResultType* type);
[[nodiscard]] bool readTryTable(ResultType* type,
TryTableCatchVector* catches);
[[nodiscard]] bool readCatch(LabelKind* kind, uint32_t* tagIndex,
ResultType* paramType, ResultType* resultType,
ValueVector* tryResults);
[[nodiscard]] bool readCatchAll(LabelKind* kind, ResultType* paramType,
ResultType* resultType,
ValueVector* tryResults);
[[nodiscard]] bool readDelegate(uint32_t* relativeDepth,
ResultType* resultType,
ValueVector* tryResults);
void popDelegate();
[[nodiscard]] bool readThrow(uint32_t* tagIndex, ValueVector* argValues);
[[nodiscard]] bool readThrowRef(Value* exnRef);
[[nodiscard]] bool readRethrow(uint32_t* relativeDepth);
[[nodiscard]] bool readUnreachable();
[[nodiscard]] bool readDrop();
[[nodiscard]] bool readUnary(ValType operandType, Value* input);
[[nodiscard]] bool readConversion(ValType operandType, ValType resultType,
Value* input);
[[nodiscard]] bool readBinary(ValType operandType, Value* lhs, Value* rhs);
[[nodiscard]] bool readComparison(ValType operandType, Value* lhs,
Value* rhs);
[[nodiscard]] bool readTernary(ValType operandType, Value* v0, Value* v1,
Value* v2);
[[nodiscard]] bool readLoad(ValType resultType, uint32_t byteSize,
LinearMemoryAddress<Value>* addr);
[[nodiscard]] bool readStore(ValType resultType, uint32_t byteSize,
LinearMemoryAddress<Value>* addr, Value* value);
[[nodiscard]] bool readTeeStore(ValType resultType, uint32_t byteSize,
LinearMemoryAddress<Value>* addr,
Value* value);
[[nodiscard]] bool readNop();
[[nodiscard]] bool readMemorySize(uint32_t* memoryIndex);
[[nodiscard]] bool readMemoryGrow(uint32_t* memoryIndex, Value* input);
[[nodiscard]] bool readSelect(bool typed, StackType* type, Value* trueValue,
Value* falseValue, Value* condition);
[[nodiscard]] bool readGetLocal(const ValTypeVector& locals, uint32_t* id);
[[nodiscard]] bool readSetLocal(const ValTypeVector& locals, uint32_t* id,
Value* value);
[[nodiscard]] bool readTeeLocal(const ValTypeVector& locals, uint32_t* id,
Value* value);
[[nodiscard]] bool readGetGlobal(uint32_t* id);
[[nodiscard]] bool readSetGlobal(uint32_t* id, Value* value);
[[nodiscard]] bool readTeeGlobal(uint32_t* id, Value* value);
[[nodiscard]] bool readI32Const(int32_t* i32);
[[nodiscard]] bool readI64Const(int64_t* i64);
[[nodiscard]] bool readF32Const(float* f32);
[[nodiscard]] bool readF64Const(double* f64);
[[nodiscard]] bool readRefFunc(uint32_t* funcIndex);
[[nodiscard]] bool readRefNull(RefType* type);
[[nodiscard]] bool readRefIsNull(Value* input);
[[nodiscard]] bool readRefAsNonNull(Value* input);
[[nodiscard]] bool readBrOnNull(uint32_t* relativeDepth, ResultType* type,
ValueVector* values, Value* condition);
[[nodiscard]] bool readBrOnNonNull(uint32_t* relativeDepth, ResultType* type,
ValueVector* values, Value* condition);
[[nodiscard]] bool readCall(uint32_t* funcIndex, ValueVector* argValues);
[[nodiscard]] bool readCallIndirect(uint32_t* funcTypeIndex,
uint32_t* tableIndex, Value* callee,
ValueVector* argValues);
#ifdef ENABLE_WASM_TAIL_CALLS
[[nodiscard]] bool readReturnCall(uint32_t* funcIndex,
ValueVector* argValues);
[[nodiscard]] bool readReturnCallIndirect(uint32_t* funcTypeIndex,
uint32_t* tableIndex, Value* callee,
ValueVector* argValues);
#endif
#ifdef ENABLE_WASM_GC
[[nodiscard]] bool readCallRef(const FuncType** funcType, Value* callee,
ValueVector* argValues);
# ifdef ENABLE_WASM_TAIL_CALLS
[[nodiscard]] bool readReturnCallRef(const FuncType** funcType, Value* callee,
ValueVector* argValues);
# endif
#endif
[[nodiscard]] bool readOldCallDirect(uint32_t numFuncImports,
uint32_t* funcIndex,
ValueVector* argValues);
[[nodiscard]] bool readOldCallIndirect(uint32_t* funcTypeIndex, Value* callee,
ValueVector* argValues);
[[nodiscard]] bool readWake(LinearMemoryAddress<Value>* addr, Value* count);
[[nodiscard]] bool readWait(LinearMemoryAddress<Value>* addr,
ValType valueType, uint32_t byteSize,
Value* value, Value* timeout);
[[nodiscard]] bool readFence();
[[nodiscard]] bool readAtomicLoad(LinearMemoryAddress<Value>* addr,
ValType resultType, uint32_t byteSize);
[[nodiscard]] bool readAtomicStore(LinearMemoryAddress<Value>* addr,
ValType resultType, uint32_t byteSize,
Value* value);
[[nodiscard]] bool readAtomicRMW(LinearMemoryAddress<Value>* addr,
ValType resultType, uint32_t byteSize,
Value* value);
[[nodiscard]] bool readAtomicCmpXchg(LinearMemoryAddress<Value>* addr,
ValType resultType, uint32_t byteSize,
Value* oldValue, Value* newValue);
[[nodiscard]] bool readMemOrTableCopy(bool isMem,
uint32_t* dstMemOrTableIndex,
Value* dst,
uint32_t* srcMemOrTableIndex,
Value* src, Value* len);
[[nodiscard]] bool readDataOrElemDrop(bool isData, uint32_t* segIndex);
[[nodiscard]] bool readMemFill(uint32_t* memoryIndex, Value* start,
Value* val, Value* len);
[[nodiscard]] bool readMemOrTableInit(bool isMem, uint32_t* segIndex,
uint32_t* dstMemOrTableIndex,
Value* dst, Value* src, Value* len);
[[nodiscard]] bool readTableFill(uint32_t* tableIndex, Value* start,
Value* val, Value* len);
[[nodiscard]] bool readMemDiscard(uint32_t* memoryIndex, Value* start,
Value* len);
[[nodiscard]] bool readTableGet(uint32_t* tableIndex, Value* index);
[[nodiscard]] bool readTableGrow(uint32_t* tableIndex, Value* initValue,
Value* delta);
[[nodiscard]] bool readTableSet(uint32_t* tableIndex, Value* index,
Value* value);
[[nodiscard]] bool readTableSize(uint32_t* tableIndex);
#ifdef ENABLE_WASM_GC
[[nodiscard]] bool readStructNew(uint32_t* typeIndex, ValueVector* argValues);
[[nodiscard]] bool readStructNewDefault(uint32_t* typeIndex);
[[nodiscard]] bool readStructGet(uint32_t* typeIndex, uint32_t* fieldIndex,
FieldWideningOp wideningOp, Value* ptr);
[[nodiscard]] bool readStructSet(uint32_t* typeIndex, uint32_t* fieldIndex,
Value* ptr, Value* val);
[[nodiscard]] bool readArrayNew(uint32_t* typeIndex, Value* numElements,
Value* argValue);
[[nodiscard]] bool readArrayNewFixed(uint32_t* typeIndex,
uint32_t* numElements,
ValueVector* values);
[[nodiscard]] bool readArrayNewDefault(uint32_t* typeIndex,
Value* numElements);
[[nodiscard]] bool readArrayNewData(uint32_t* typeIndex, uint32_t* segIndex,
Value* offset, Value* numElements);
[[nodiscard]] bool readArrayNewElem(uint32_t* typeIndex, uint32_t* segIndex,
Value* offset, Value* numElements);
[[nodiscard]] bool readArrayInitData(uint32_t* typeIndex, uint32_t* segIndex,
Value* array, Value* arrayIndex,
Value* segOffset, Value* length);
[[nodiscard]] bool readArrayInitElem(uint32_t* typeIndex, uint32_t* segIndex,
Value* array, Value* arrayIndex,
Value* segOffset, Value* length);
[[nodiscard]] bool readArrayGet(uint32_t* typeIndex,
FieldWideningOp wideningOp, Value* index,
Value* ptr);
[[nodiscard]] bool readArraySet(uint32_t* typeIndex, Value* val, Value* index,
Value* ptr);
[[nodiscard]] bool readArrayLen(Value* ptr);
[[nodiscard]] bool readArrayCopy(int32_t* elemSize, bool* elemsAreRefTyped,
Value* dstArray, Value* dstIndex,
Value* srcArray, Value* srcIndex,
Value* numElements);
[[nodiscard]] bool readArrayFill(uint32_t* typeIndex, Value* array,
Value* index, Value* val, Value* length);
[[nodiscard]] bool readRefTest(bool nullable, RefType* sourceType,
RefType* destType, Value* ref);
[[nodiscard]] bool readRefCast(bool nullable, RefType* sourceType,
RefType* destType, Value* ref);
[[nodiscard]] bool readBrOnCast(bool onSuccess, uint32_t* labelRelativeDepth,
RefType* sourceType, RefType* destType,
ResultType* labelType, ValueVector* values);
[[nodiscard]] bool readRefConversion(RefType operandType, RefType resultType,
Value* operandValue);
#endif
#ifdef ENABLE_WASM_SIMD
[[nodiscard]] bool readLaneIndex(uint32_t inputLanes, uint32_t* laneIndex);
[[nodiscard]] bool readExtractLane(ValType resultType, uint32_t inputLanes,
uint32_t* laneIndex, Value* input);
[[nodiscard]] bool readReplaceLane(ValType operandType, uint32_t inputLanes,
uint32_t* laneIndex, Value* baseValue,
Value* operand);
[[nodiscard]] bool readVectorShift(Value* baseValue, Value* shift);
[[nodiscard]] bool readVectorShuffle(Value* v1, Value* v2, V128* selectMask);
[[nodiscard]] bool readV128Const(V128* value);
[[nodiscard]] bool readLoadSplat(uint32_t byteSize,
LinearMemoryAddress<Value>* addr);
[[nodiscard]] bool readLoadExtend(LinearMemoryAddress<Value>* addr);
[[nodiscard]] bool readLoadLane(uint32_t byteSize,
LinearMemoryAddress<Value>* addr,
uint32_t* laneIndex, Value* input);
[[nodiscard]] bool readStoreLane(uint32_t byteSize,
LinearMemoryAddress<Value>* addr,
uint32_t* laneIndex, Value* input);
#endif
[[nodiscard]] bool readCallBuiltinModuleFunc(
const BuiltinModuleFunc** builtinModuleFunc, ValueVector* params);
#ifdef ENABLE_WASM_JSPI
[[nodiscard]] bool readStackSwitch(StackSwitchKind* kind, Value* suspender,
Value* fn, Value* data);
#endif
// At a location where readOp is allowed, peek at the next opcode
// without consuming it or updating any internal state.
// Never fails: returns uint16_t(Op::Limit) in op->b0 if it can't read.
void peekOp(OpBytes* op);
// ------------------------------------------------------------------------
// Stack management.
// Set the top N result values.
void setResults(size_t count, const ValueVector& values) {
MOZ_ASSERT(valueStack_.length() >= count);
size_t base = valueStack_.length() - count;
for (size_t i = 0; i < count; i++) {
valueStack_[base + i].setValue(values[i]);
}
}
bool getResults(size_t count, ValueVector* values) {
MOZ_ASSERT(valueStack_.length() >= count);
if (!values->resize(count)) {
return false;
}
size_t base = valueStack_.length() - count;
for (size_t i = 0; i < count; i++) {
(*values)[i] = valueStack_[base + i].value();
}
return true;
}
// Set the result value of the current top-of-value-stack expression.
void setResult(Value value) { valueStack_.back().setValue(value); }
// Return the result value of the current top-of-value-stack expression.
Value getResult() { return valueStack_.back().value(); }
// Return a reference to the top of the control stack.
ControlItem& controlItem() { return controlStack_.back().controlItem(); }
// Return a reference to an element in the control stack.
ControlItem& controlItem(uint32_t relativeDepth) {
return controlStack_[controlStack_.length() - 1 - relativeDepth]
.controlItem();
}
// Return the LabelKind of an element in the control stack.
LabelKind controlKind(uint32_t relativeDepth) {
return controlStack_[controlStack_.length() - 1 - relativeDepth].kind();
}
// Return a reference to the outermost element on the control stack.
ControlItem& controlOutermost() { return controlStack_[0].controlItem(); }
// Test whether the control-stack is empty, meaning we've consumed the final
// end of the function body.
bool controlStackEmpty() const { return controlStack_.empty(); }
// Return the depth of the control stack.
size_t controlStackDepth() const { return controlStack_.length(); }
// Find the innermost control item matching a predicate, starting to search
// from a certain relative depth, and returning true if such innermost
// control item is found. The relative depth of the found item is returned
// via a parameter.
template <typename Predicate>
bool controlFindInnermostFrom(Predicate predicate, uint32_t fromRelativeDepth,
uint32_t* foundRelativeDepth) {
int32_t fromAbsoluteDepth = controlStack_.length() - fromRelativeDepth - 1;
for (int32_t i = fromAbsoluteDepth; i >= 0; i--) {
if (predicate(controlStack_[i].kind(), controlStack_[i].controlItem())) {
*foundRelativeDepth = controlStack_.length() - 1 - i;
return true;
}
}
return false;
}
};
template <typename Policy>
inline bool OpIter<Policy>::checkIsSubtypeOf(StorageType subType,
StorageType superType) {
return CheckIsSubtypeOf(d_, codeMeta_, lastOpcodeOffset(), subType,
superType);
}
template <typename Policy>
inline bool OpIter<Policy>::checkIsSubtypeOf(ResultType params,
ResultType results) {
if (params.length() != results.length()) {
UniqueChars error(
JS_smprintf("type mismatch: expected %zu values, got %zu values",
results.length(), params.length()));
if (!error) {
return false;
}
return fail(error.get());
}
for (uint32_t i = 0; i < params.length(); i++) {
ValType param = params[i];
ValType result = results[i];
if (!checkIsSubtypeOf(param, result)) {
return false;
}
}
return true;
}
#ifdef ENABLE_WASM_GC
template <typename Policy>
inline bool OpIter<Policy>::checkIsSubtypeOf(uint32_t actualTypeIndex,
uint32_t expectedTypeIndex) {
const TypeDef& actualTypeDef = codeMeta_.types->type(actualTypeIndex);
const TypeDef& expectedTypeDef = codeMeta_.types->type(expectedTypeIndex);
return CheckIsSubtypeOf(
d_, codeMeta_, lastOpcodeOffset(),
ValType(RefType::fromTypeDef(&actualTypeDef, true)),
ValType(RefType::fromTypeDef(&expectedTypeDef, true)));
}
#endif
template <typename Policy>
inline bool OpIter<Policy>::unrecognizedOpcode(const OpBytes* expr) {
UniqueChars error(JS_smprintf("unrecognized opcode: %x %x", expr->b0,
IsPrefixByte(expr->b0) ? expr->b1 : 0));
if (!error) {
return false;
}
return fail(error.get());
}
template <typename Policy>
inline bool OpIter<Policy>::fail(const char* msg) {
return d_.fail(lastOpcodeOffset(), msg);
}
template <typename Policy>
inline bool OpIter<Policy>::fail_ctx(const char* fmt, const char* context) {
UniqueChars error(JS_smprintf(fmt, context));
if (!error) {
return false;
}
return fail(error.get());
}
template <typename Policy>
inline bool OpIter<Policy>::failEmptyStack() {
return valueStack_.empty() ? fail("popping value from empty stack")
: fail("popping value from outside block");
}
// This function pops exactly one value from the stack, yielding Bottom types in
// various cases and therefore making it the caller's responsibility to do the
// right thing for StackType::Bottom. Prefer (pop|top)WithType. This is an
// optimization for the super-common case where the caller is statically
// expecting the resulttype `[valtype]`.
template <typename Policy>
inline bool OpIter<Policy>::popStackType(StackType* type, Value* value) {
Control& block = controlStack_.back();
MOZ_ASSERT(valueStack_.length() >= block.valueStackBase());
if (MOZ_UNLIKELY(valueStack_.length() == block.valueStackBase())) {
// If the base of this block's stack is polymorphic, then we can pop a
// dummy value of the bottom type; it won't be used since we're in
// unreachable code.
if (block.polymorphicBase()) {
*type = StackType::bottom();
*value = Value();
// Maintain the invariant that, after a pop, there is always memory
// reserved to push a value infallibly.
return valueStack_.reserve(valueStack_.length() + 1);
}
return failEmptyStack();
}
TypeAndValue& tv = valueStack_.back();
*type = tv.type();
*value = tv.value();
valueStack_.popBack();
return true;
}
// This function pops exactly one value from the stack, checking that it has the
// expected type which can either be a specific value type or the bottom type.
template <typename Policy>
inline bool OpIter<Policy>::popWithType(ValType expectedType, Value* value,
StackType* stackType) {
if (!popStackType(stackType, value)) {
return false;
}
return stackType->isStackBottom() ||
checkIsSubtypeOf(stackType->valType(), expectedType);
}
// This function pops exactly one value from the stack, checking that it has the
// expected type which can either be a specific value type or the bottom type.
template <typename Policy>
inline bool OpIter<Policy>::popWithType(ValType expectedType, Value* value) {
StackType stackType;
return popWithType(expectedType, value, &stackType);
}
template <typename Policy>
inline bool OpIter<Policy>::popWithType(ResultType expected,
ValueVector* values) {
return popWithTypes(expected, values);
}
// Pops each of the given expected types (in reverse, because it's a stack).
template <typename Policy>
template <typename ValTypeSpanT>
inline bool OpIter<Policy>::popWithTypes(ValTypeSpanT expected,
ValueVector* values) {
size_t expectedLength = expected.size();
if (!values->resize(expectedLength)) {
return false;
}
for (size_t i = 0; i < expectedLength; i++) {
size_t reverseIndex = expectedLength - i - 1;
ValType expectedType = expected[reverseIndex];
Value* value = &(*values)[reverseIndex];
if (!popWithType(expectedType, value)) {
return false;
}
}
return true;
}
// This function pops exactly one value from the stack, checking that it is a
// reference type.
template <typename Policy>
inline bool OpIter<Policy>::popWithRefType(Value* value, StackType* type) {
if (!popStackType(type, value)) {
return false;
}
if (type->isStackBottom() || type->valType().isRefType()) {
return true;
}
UniqueChars actualText = ToString(type->valType(), codeMeta_.types);
if (!actualText) {
return false;
}
UniqueChars error(JS_smprintf(
"type mismatch: expression has type %s but expected a reference type",
actualText.get()));
if (!error) {
return false;
}
return fail(error.get());
}
template <typename Policy>
inline bool OpIter<Policy>::checkTopTypeMatches(ResultType expected,
ValueVector* values,
bool rewriteStackTypes) {
if (expected.empty()) {
return true;
}
Control& block = controlStack_.back();
size_t expectedLength = expected.length();
if (values && !values->resize(expectedLength)) {
return false;
}
for (size_t i = 0; i != expectedLength; i++) {
// We're iterating as-if we were popping each expected/actual type one by
// one, which means iterating the array of expected results backwards.
// The "current" value stack length refers to what the value stack length
// would have been if we were popping it.
size_t reverseIndex = expectedLength - i - 1;
ValType expectedType = expected[reverseIndex];
auto collectValue = [&](const Value& v) {
if (values) {
(*values)[reverseIndex] = v;
}
};
size_t currentValueStackLength = valueStack_.length() - i;
MOZ_ASSERT(currentValueStackLength >= block.valueStackBase());
if (currentValueStackLength == block.valueStackBase()) {
if (!block.polymorphicBase()) {
return failEmptyStack();
}
// If the base of this block's stack is polymorphic, then we can just
// pull out as many fake values as we need to validate, and create dummy
// stack entries accordingly; they won't be used since we're in
// unreachable code. However, if `rewriteStackTypes` is true, we must
// set the types on these new entries to whatever `expected` requires
// them to be.
TypeAndValue newTandV =
rewriteStackTypes ? TypeAndValue(expectedType) : TypeAndValue();
if (!valueStack_.insert(valueStack_.begin() + currentValueStackLength,
newTandV)) {
return false;
}
collectValue(Value());
} else {
TypeAndValue& observed = valueStack_[currentValueStackLength - 1];
if (observed.type().isStackBottom()) {
collectValue(Value());
} else {
if (!checkIsSubtypeOf(observed.type().valType(), expectedType)) {
return false;
}
collectValue(observed.value());
}
if (rewriteStackTypes) {
observed.setType(StackType(expectedType));
}
}
}
return true;
}
template <typename Policy>
inline bool OpIter<Policy>::pushControl(LabelKind kind, BlockType type) {
ResultType paramType = type.params();
ValueVector values;
if (!checkTopTypeMatches(paramType, &values, /*rewriteStackTypes=*/true)) {
return false;
}
MOZ_ASSERT(valueStack_.length() >= paramType.length());
uint32_t valueStackBase = valueStack_.length() - paramType.length();
return controlStack_.emplaceBack(kind, type, valueStackBase);
}
template <typename Policy>
inline bool OpIter<Policy>::checkStackAtEndOfBlock(ResultType* expectedType,
ValueVector* values) {
Control& block = controlStack_.back();
*expectedType = block.type().results();
MOZ_ASSERT(valueStack_.length() >= block.valueStackBase());
if (expectedType->length() < valueStack_.length() - block.valueStackBase()) {
return fail("unused values not explicitly dropped by end of block");
}
return checkTopTypeMatches(*expectedType, values,
/*rewriteStackTypes=*/true);
}
template <typename Policy>
inline bool OpIter<Policy>::getControl(uint32_t relativeDepth,
Control** controlEntry) {
if (relativeDepth >= controlStack_.length()) {
return fail("branch depth exceeds current nesting level");
}
*controlEntry = &controlStack_[controlStack_.length() - 1 - relativeDepth];
return true;
}
template <typename Policy>
inline bool OpIter<Policy>::readBlockType(BlockType* type) {
uint8_t nextByte;
if (!d_.peekByte(&nextByte)) {
return fail("unable to read block type");
}
if (nextByte == uint8_t(TypeCode::BlockVoid)) {
d_.uncheckedReadFixedU8();
*type = BlockType::VoidToVoid();
return true;
}
if ((nextByte & SLEB128SignMask) == SLEB128SignBit) {
ValType v;
if (!readValType(&v)) {
return false;
}
*type = BlockType::VoidToSingle(v);
return true;
}
int32_t x;
if (!d_.readVarS32(&x) || x < 0 || uint32_t(x) >= codeMeta_.types->length()) {
return fail("invalid block type type index");
}
const TypeDef* typeDef = &codeMeta_.types->type(x);
if (!typeDef->isFuncType()) {
return fail("block type type index must be func type");
}
*type = BlockType::Func(typeDef->funcType());
return true;
}
template <typename Policy>
inline bool OpIter<Policy>::readOp(OpBytes* op) {
MOZ_ASSERT(!controlStack_.empty());
offsetOfLastReadOp_ = d_.currentOffset();
if (MOZ_UNLIKELY(!d_.readOp(op))) {
return fail("unable to read opcode");
}
#ifdef DEBUG
op_ = *op;
#endif
return true;
}
template <typename Policy>
inline void OpIter<Policy>::peekOp(OpBytes* op) {
const uint8_t* pos = d_.currentPosition();
if (MOZ_UNLIKELY(!d_.readOp(op))) {
op->b0 = uint16_t(Op::Limit);
}
d_.rollbackPosition(pos);
}
template <typename Policy>
inline bool OpIter<Policy>::startFunction(uint32_t funcIndex,
const ValTypeVector& locals) {
MOZ_ASSERT(kind_ == OpIter::Func);
MOZ_ASSERT(elseParamStack_.empty());
MOZ_ASSERT(valueStack_.empty());
MOZ_ASSERT(controlStack_.empty());
MOZ_ASSERT(op_.b0 == uint16_t(Op::Limit));