Source code
Revision control
Copy as Markdown
Other Tools
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* vim: set ts=8 sts=2 et sw=2 tw=80:
*
* Copyright 2014 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "wasm/WasmFrameIter.h"
#include "jit/JitFrames.h"
#include "jit/shared/IonAssemblerBuffer.h" // jit::BufferOffset
#include "js/ColumnNumber.h" // JS::WasmFunctionIndex, LimitedColumnNumberOneOrigin, JS::TaggedColumnNumberOneOrigin, JS::TaggedColumnNumberOneOrigin
#include "vm/JitActivation.h" // js::jit::JitActivation
#include "vm/JSContext.h"
#include "wasm/WasmBuiltinModuleGenerated.h"
#include "wasm/WasmDebugFrame.h"
#include "wasm/WasmInstance.h"
#include "wasm/WasmInstanceData.h"
#include "wasm/WasmPI.h"
#include "wasm/WasmStubs.h"
#include "jit/MacroAssembler-inl.h"
#include "wasm/WasmInstance-inl.h"
using namespace js;
using namespace js::jit;
using namespace js::wasm;
using mozilla::DebugOnly;
using mozilla::Maybe;
static Instance* ExtractCallerInstanceFromFrameWithInstances(Frame* fp) {
return *reinterpret_cast<Instance**>(
reinterpret_cast<uint8_t*>(fp) +
FrameWithInstances::callerInstanceOffset());
}
static const Instance* ExtractCalleeInstanceFromFrameWithInstances(
const Frame* fp) {
return *reinterpret_cast<Instance* const*>(
reinterpret_cast<const uint8_t*>(fp) +
FrameWithInstances::calleeInstanceOffset());
}
/*****************************************************************************/
// WasmFrameIter implementation
WasmFrameIter::WasmFrameIter(JitActivation* activation, wasm::Frame* fp)
: activation_(activation),
code_(nullptr),
codeRange_(nullptr),
lineOrBytecode_(0),
fp_(fp ? fp : activation->wasmExitFP()),
instance_(nullptr),
unwoundCallerFP_(nullptr),
unwind_(Unwind::False),
unwoundAddressOfReturnAddress_(nullptr),
resumePCinCurrentFrame_(nullptr),
failedUnwindSignatureMismatch_(false),
stackSwitched_(false) {
MOZ_ASSERT(fp_);
instance_ = GetNearestEffectiveInstance(fp_);
// When the stack is captured during a trap (viz., to create the .stack
// for an Error object), use the pc/bytecode information captured by the
// signal handler in the runtime. Take care not to use this trap unwind
// state for wasm frames in the middle of a JitActivation, i.e., wasm frames
// that called into JIT frames before the trap.
if (activation->isWasmTrapping() && fp_ == activation->wasmExitFP()) {
const TrapData& trapData = activation->wasmTrapData();
void* unwoundPC = trapData.unwoundPC;
code_ = &instance_->code();
MOZ_ASSERT(code_ == LookupCode(unwoundPC));
codeRange_ = code_->lookupFuncRange(unwoundPC);
MOZ_ASSERT(codeRange_);
lineOrBytecode_ = trapData.bytecodeOffset;
failedUnwindSignatureMismatch_ = trapData.failedUnwindSignatureMismatch;
#ifdef ENABLE_WASM_TAIL_CALLS
// The debugEnabled() relies on valid value of resumePCinCurrentFrame_
// to identify DebugFrame. Normally this field is updated at popFrame().
// The only case when this can happend is during IndirectCallBadSig
// trapping and stack unwinding. The top frame will never be at ReturnStub
// callsite, except during IndirectCallBadSig unwinding.
const CallSite* site = code_->lookupCallSite(unwoundPC);
if (site && site->kind() == CallSite::ReturnStub) {
MOZ_ASSERT(trapData.trap == Trap::IndirectCallBadSig);
resumePCinCurrentFrame_ = (uint8_t*)unwoundPC;
}
#endif
MOZ_ASSERT(!done());
return;
}
// Otherwise, execution exits wasm code via an exit stub which sets exitFP
// to the exit stub's frame. Thus, in this case, we want to start iteration
// at the caller of the exit frame, whose Code, CodeRange and CallSite are
// indicated by the returnAddress of the exit stub's frame. If the caller
// was Ion, we can just skip the wasm frames.
popFrame();
MOZ_ASSERT(!done() || unwoundCallerFP_);
}
WasmFrameIter::WasmFrameIter(FrameWithInstances* fp, void* returnAddress)
: activation_(nullptr),
code_(nullptr),
codeRange_(nullptr),
lineOrBytecode_(0),
fp_(fp),
instance_(fp->calleeInstance()),
unwoundCallerFP_(nullptr),
unwind_(Unwind::False),
unwoundAddressOfReturnAddress_(nullptr),
resumePCinCurrentFrame_((uint8_t*)returnAddress),
failedUnwindSignatureMismatch_(false),
stackSwitched_(false) {
// Specialized implementation to avoid popFrame() interation.
// It is expected that the iterator starts at a callsite that is in
// the function body and has instance reference.
code_ = LookupCode(returnAddress, &codeRange_);
MOZ_ASSERT(code_ && codeRange_ && codeRange_->kind() == CodeRange::Function);
const CallSite* callsite = code_->lookupCallSite(returnAddress);
MOZ_ASSERT(callsite && callsite->mightBeCrossInstance());
#ifdef ENABLE_WASM_JSPI
stackSwitched_ = callsite->isStackSwitch();
#endif
MOZ_ASSERT(code_ == &instance_->code());
lineOrBytecode_ = callsite->lineOrBytecode();
failedUnwindSignatureMismatch_ = false;
MOZ_ASSERT(!done());
}
bool WasmFrameIter::done() const {
MOZ_ASSERT(!!fp_ == !!code_);
MOZ_ASSERT(!!fp_ == !!codeRange_);
return !fp_;
}
void WasmFrameIter::operator++() {
MOZ_ASSERT(!done());
// When the iterator is set to unwind, each time the iterator pops a frame,
// the JitActivation is updated so that the just-popped frame is no longer
// visible. This is necessary since Debugger::onLeaveFrame is called before
// popping each frame and, once onLeaveFrame is called for a given frame,
// that frame must not be visible to subsequent stack iteration (or it
// could be added as a "new" frame just as it becomes garbage). When the
// frame is trapping, then exitFP is included in the callstack (otherwise,
// it is skipped, as explained above). So to unwind the innermost frame, we
// just clear the trapping state.
if (unwind_ == Unwind::True) {
if (activation_->isWasmTrapping()) {
activation_->finishWasmTrap();
}
activation_->setWasmExitFP(fp_);
}
popFrame();
}
static inline void AssertJitExitFrame(const void* fp,
jit::ExitFrameType expected) {
// Called via a JIT to wasm call: in this case, FP is pointing in the middle
// of the exit frame, right before the exit footer; ensure the exit frame type
// is the expected one.
#ifdef DEBUG
auto* jitCaller = (ExitFrameLayout*)fp;
MOZ_ASSERT(jitCaller->footer()->type() == expected);
#endif
}
static inline void AssertDirectJitCall(const void* fp) {
AssertJitExitFrame(fp, jit::ExitFrameType::DirectWasmJitCall);
}
void WasmFrameIter::popFrame() {
uint8_t* returnAddress = fp_->returnAddress();
code_ = LookupCode(returnAddress, &codeRange_);
#ifdef ENABLE_WASM_JSPI
stackSwitched_ = false;
#endif
if (!code_) {
// This is a direct call from the jit into the wasm function's body. The
// call stack resembles this at this point:
//
// |---------------------|
// | JIT FRAME |
// | JIT FAKE EXIT FRAME | <-- fp_->callerFP_
// | WASM FRAME | <-- fp_
// |---------------------|
//
// fp_->callerFP_ points to the fake exit frame set up by the jit caller,
// and the return-address-to-fp is in JIT code, thus doesn't belong to any
// wasm instance's code (in particular, there's no associated CodeRange).
// Mark the frame as such.
AssertDirectJitCall(fp_->jitEntryCaller());
unwoundCallerFP_ = fp_->jitEntryCaller();
hasUnwoundJitFrame_ = true;
if (unwind_ == Unwind::True) {
activation_->setJSExitFP(unwoundCallerFP());
unwoundAddressOfReturnAddress_ = fp_->addressOfReturnAddress();
}
fp_ = nullptr;
code_ = nullptr;
codeRange_ = nullptr;
MOZ_ASSERT(done());
return;
}
MOZ_ASSERT(codeRange_);
Frame* prevFP = fp_;
fp_ = fp_->wasmCaller();
resumePCinCurrentFrame_ = returnAddress;
if (codeRange_->isInterpEntry()) {
// Interpreter entry has a simple frame, record FP from it.
unwoundCallerFP_ = reinterpret_cast<uint8_t*>(fp_);
MOZ_ASSERT(!hasUnwoundJitFrame_);
fp_ = nullptr;
code_ = nullptr;
codeRange_ = nullptr;
if (unwind_ == Unwind::True) {
// We're exiting via the interpreter entry; we can safely reset
// exitFP.
activation_->setWasmExitFP(nullptr);
unwoundAddressOfReturnAddress_ = prevFP->addressOfReturnAddress();
}
MOZ_ASSERT(done());
return;
}
if (codeRange_->isJitEntry()) {
// This wasm function has been called through the generic JIT entry by
// a JIT caller, so the call stack resembles this:
//
// |---------------------|
// | JIT FRAME |
// | JSJIT TO WASM EXIT | <-- fp_
// | WASM JIT ENTRY | <-- prevFP (already unwound)
// | WASM FRAME | (already unwound)
// |---------------------|
//
// The next value of FP is a jit exit frame with type WasmGenericJitEntry.
// This lets us transition to a JSJit frame iterator.
unwoundCallerFP_ = reinterpret_cast<uint8_t*>(fp_);
hasUnwoundJitFrame_ = true;
AssertJitExitFrame(unwoundCallerFP_,
jit::ExitFrameType::WasmGenericJitEntry);
fp_ = nullptr;
code_ = nullptr;
codeRange_ = nullptr;
if (unwind_ == Unwind::True) {
activation_->setJSExitFP(unwoundCallerFP());
unwoundAddressOfReturnAddress_ = prevFP->addressOfReturnAddress();
}
MOZ_ASSERT(done());
return;
}
MOZ_ASSERT(codeRange_->kind() == CodeRange::Function);
const CallSite* callsite = code_->lookupCallSite(returnAddress);
MOZ_ASSERT(callsite);
if (callsite->mightBeCrossInstance()) {
instance_ = ExtractCallerInstanceFromFrameWithInstances(prevFP);
}
#ifdef ENABLE_WASM_JSPI
stackSwitched_ = callsite->isStackSwitch();
#endif
MOZ_ASSERT(code_ == &instance()->code());
lineOrBytecode_ = callsite->lineOrBytecode();
failedUnwindSignatureMismatch_ = false;
MOZ_ASSERT(!done());
}
const char* WasmFrameIter::filename() const {
MOZ_ASSERT(!done());
return code_->codeMeta().scriptedCaller().filename.get();
}
const char16_t* WasmFrameIter::displayURL() const {
MOZ_ASSERT(!done());
return code_->codeMetaForAsmJS()
? code_->codeMetaForAsmJS()->displayURL() // asm.js
: nullptr; // wasm
}
bool WasmFrameIter::mutedErrors() const {
MOZ_ASSERT(!done());
return code_->codeMetaForAsmJS()
? code_->codeMetaForAsmJS()->mutedErrors() // asm.js
: false; // wasm
}
JSAtom* WasmFrameIter::functionDisplayAtom() const {
MOZ_ASSERT(!done());
JSContext* cx = activation_->cx();
JSAtom* atom = instance()->getFuncDisplayAtom(cx, codeRange_->funcIndex());
if (!atom) {
cx->clearPendingException();
return cx->names().empty_;
}
return atom;
}
unsigned WasmFrameIter::lineOrBytecode() const {
MOZ_ASSERT(!done());
return lineOrBytecode_;
}
uint32_t WasmFrameIter::funcIndex() const {
MOZ_ASSERT(!done());
return codeRange_->funcIndex();
}
unsigned WasmFrameIter::computeLine(
JS::TaggedColumnNumberOneOrigin* column) const {
if (instance()->isAsmJS()) {
if (column) {
*column =
JS::TaggedColumnNumberOneOrigin(JS::LimitedColumnNumberOneOrigin(
JS::WasmFunctionIndex::DefaultBinarySourceColumnNumberOneOrigin));
}
return lineOrBytecode_;
}
MOZ_ASSERT(!(codeRange_->funcIndex() &
JS::TaggedColumnNumberOneOrigin::WasmFunctionTag));
if (column) {
*column = JS::TaggedColumnNumberOneOrigin(
JS::WasmFunctionIndex(codeRange_->funcIndex()));
}
return lineOrBytecode_;
}
void** WasmFrameIter::unwoundAddressOfReturnAddress() const {
MOZ_ASSERT(done());
MOZ_ASSERT(unwind_ == Unwind::True);
MOZ_ASSERT(unwoundAddressOfReturnAddress_);
return unwoundAddressOfReturnAddress_;
}
bool WasmFrameIter::debugEnabled() const {
MOZ_ASSERT(!done());
// Metadata::debugEnabled is only set if debugging is actually enabled (both
// requested, and available via baseline compilation), and Tier::Debug code
// will be available.
if (!code_->codeMeta().debugEnabled) {
return false;
}
// Debug information is not available in prologue when the iterator is
// failing to unwind invalid signature trap.
if (failedUnwindSignatureMismatch_) {
return false;
}
// Only non-imported functions can have debug frames.
if (codeRange_->funcIndex() < code_->funcImports().length()) {
return false;
}
#ifdef ENABLE_WASM_TAIL_CALLS
// Debug frame is not present at the return stub.
const CallSite* site = code_->lookupCallSite((void*)resumePCinCurrentFrame_);
if (site && site->kind() == CallSite::ReturnStub) {
return false;
}
#endif
return true;
}
DebugFrame* WasmFrameIter::debugFrame() const {
MOZ_ASSERT(!done());
return DebugFrame::from(fp_);
}
bool WasmFrameIter::hasUnwoundJitFrame() const {
MOZ_ASSERT_IF(hasUnwoundJitFrame_, unwoundCallerFP_);
return hasUnwoundJitFrame_;
}
uint8_t* WasmFrameIter::resumePCinCurrentFrame() const {
if (resumePCinCurrentFrame_) {
return resumePCinCurrentFrame_;
}
MOZ_ASSERT(activation_->isWasmTrapping());
// The next instruction is the instruction following the trap instruction.
return (uint8_t*)activation_->wasmTrapData().resumePC;
}
/*****************************************************************************/
// Prologue/epilogue code generation
// These constants reflect statically-determined offsets in the
// prologue/epilogue. The offsets are dynamically asserted during code
// generation.
#if defined(JS_CODEGEN_X64)
static const unsigned PushedRetAddr = 0;
static const unsigned PushedFP = 1;
static const unsigned SetFP = 4;
static const unsigned PoppedFP = 0;
static const unsigned PoppedFPJitEntry = 0;
#elif defined(JS_CODEGEN_X86)
static const unsigned PushedRetAddr = 0;
static const unsigned PushedFP = 1;
static const unsigned SetFP = 3;
static const unsigned PoppedFP = 0;
static const unsigned PoppedFPJitEntry = 0;
#elif defined(JS_CODEGEN_ARM)
static const unsigned BeforePushRetAddr = 0;
static const unsigned PushedRetAddr = 4;
static const unsigned PushedFP = 8;
static const unsigned SetFP = 12;
static const unsigned PoppedFP = 0;
static const unsigned PoppedFPJitEntry = 0;
#elif defined(JS_CODEGEN_ARM64)
// On ARM64 we do not use push or pop; the prologues and epilogues are
// structured differently due to restrictions on SP alignment. Even so,
// PushedRetAddr and PushedFP are used in some restricted contexts
// and must be superficially meaningful.
static const unsigned BeforePushRetAddr = 0;
static const unsigned PushedRetAddr = 8;
static const unsigned PushedFP = 12;
static const unsigned SetFP = 16;
static const unsigned PoppedFP = 8;
static const unsigned PoppedFPJitEntry = 8;
static_assert(BeforePushRetAddr == 0, "Required by StartUnwinding");
static_assert(PushedFP > PushedRetAddr, "Required by StartUnwinding");
#elif defined(JS_CODEGEN_MIPS64)
static const unsigned PushedRetAddr = 8;
static const unsigned PushedFP = 16;
static const unsigned SetFP = 20;
static const unsigned PoppedFP = 4;
static const unsigned PoppedFPJitEntry = 0;
#elif defined(JS_CODEGEN_LOONG64)
static const unsigned PushedRetAddr = 8;
static const unsigned PushedFP = 16;
static const unsigned SetFP = 20;
static const unsigned PoppedFP = 4;
static const unsigned PoppedFPJitEntry = 0;
#elif defined(JS_CODEGEN_RISCV64)
static const unsigned PushedRetAddr = 8;
static const unsigned PushedFP = 16;
static const unsigned SetFP = 20;
static const unsigned PoppedFP = 4;
static const unsigned PoppedFPJitEntry = 0;
#elif defined(JS_CODEGEN_NONE) || defined(JS_CODEGEN_WASM32)
// Synthetic values to satisfy asserts and avoid compiler warnings.
static const unsigned PushedRetAddr = 0;
static const unsigned PushedFP = 1;
static const unsigned SetFP = 2;
static const unsigned PoppedFP = 3;
static const unsigned PoppedFPJitEntry = 4;
#else
# error "Unknown architecture!"
#endif
static void LoadActivation(MacroAssembler& masm, const Register& dest) {
// WasmCall pushes a JitActivation.
masm.loadPtr(Address(InstanceReg, wasm::Instance::offsetOfCx()), dest);
masm.loadPtr(Address(dest, JSContext::offsetOfActivation()), dest);
}
void wasm::SetExitFP(MacroAssembler& masm, ExitReason reason,
Register scratch) {
MOZ_ASSERT(!reason.isNone());
LoadActivation(masm, scratch);
masm.store32(
Imm32(reason.encode()),
Address(scratch, JitActivation::offsetOfEncodedWasmExitReason()));
masm.orPtr(Imm32(ExitFPTag), FramePointer);
masm.storePtr(FramePointer,
Address(scratch, JitActivation::offsetOfPackedExitFP()));
masm.andPtr(Imm32(int32_t(~ExitFPTag)), FramePointer);
}
void wasm::ClearExitFP(MacroAssembler& masm, Register scratch) {
LoadActivation(masm, scratch);
masm.storePtr(ImmWord(0x0),
Address(scratch, JitActivation::offsetOfPackedExitFP()));
masm.store32(
Imm32(0x0),
Address(scratch, JitActivation::offsetOfEncodedWasmExitReason()));
}
static void GenerateCallablePrologue(MacroAssembler& masm, uint32_t* entry) {
AutoCreatedBy acb(masm, "GenerateCallablePrologue");
masm.setFramePushed(0);
// ProfilingFrameIterator needs to know the offsets of several key
// instructions from entry. To save space, we make these offsets static
// constants and assert that they match the actual codegen below. On ARM,
// this requires AutoForbidPoolsAndNops to prevent a constant pool from being
// randomly inserted between two instructions.
#if defined(JS_CODEGEN_MIPS64)
{
*entry = masm.currentOffset();
masm.ma_push(ra);
MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
masm.ma_push(FramePointer);
MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
masm.moveStackPtrTo(FramePointer);
MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
}
#elif defined(JS_CODEGEN_LOONG64)
{
*entry = masm.currentOffset();
masm.ma_push(ra);
MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
masm.ma_push(FramePointer);
MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
masm.moveStackPtrTo(FramePointer);
MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
}
#elif defined(JS_CODEGEN_RISCV64)
{
*entry = masm.currentOffset();
BlockTrampolinePoolScope block_trampoline_pool(&masm, 5);
masm.ma_push(ra);
MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
masm.ma_push(FramePointer);
MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
masm.moveStackPtrTo(FramePointer);
MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
}
#elif defined(JS_CODEGEN_ARM64)
{
// We do not use the PseudoStackPointer. However, we may be called in a
// context -- compilation using Ion -- in which the PseudoStackPointer is
// in use. Rather than risk confusion in the uses of `masm` here, let's
// just switch in the real SP, do what we need to do, and restore the
// existing setting afterwards.
const vixl::Register stashedSPreg = masm.GetStackPointer64();
masm.SetStackPointer64(vixl::sp);
AutoForbidPoolsAndNops afp(&masm,
/* number of instructions in scope = */ 4);
*entry = masm.currentOffset();
masm.Sub(sp, sp, sizeof(Frame));
masm.Str(ARMRegister(lr, 64), MemOperand(sp, Frame::returnAddressOffset()));
MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
masm.Str(ARMRegister(FramePointer, 64),
MemOperand(sp, Frame::callerFPOffset()));
MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
masm.Mov(ARMRegister(FramePointer, 64), sp);
MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
// And restore the SP-reg setting, per comment above.
masm.SetStackPointer64(stashedSPreg);
}
#else
{
# if defined(JS_CODEGEN_ARM)
AutoForbidPoolsAndNops afp(&masm,
/* number of instructions in scope = */ 3);
*entry = masm.currentOffset();
static_assert(BeforePushRetAddr == 0);
masm.push(lr);
# else
*entry = masm.currentOffset();
// The x86/x64 call instruction pushes the return address.
# endif
MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
masm.push(FramePointer);
MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
masm.moveStackPtrTo(FramePointer);
MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
}
#endif
}
static void GenerateCallableEpilogue(MacroAssembler& masm, unsigned framePushed,
ExitReason reason, uint32_t* ret) {
AutoCreatedBy acb(masm, "GenerateCallableEpilogue");
if (framePushed) {
masm.freeStack(framePushed);
}
if (!reason.isNone()) {
ClearExitFP(masm, ABINonArgReturnVolatileReg);
}
DebugOnly<uint32_t> poppedFP{};
#if defined(JS_CODEGEN_MIPS64)
masm.loadPtr(Address(StackPointer, Frame::callerFPOffset()), FramePointer);
poppedFP = masm.currentOffset();
masm.loadPtr(Address(StackPointer, Frame::returnAddressOffset()), ra);
*ret = masm.currentOffset();
masm.as_jr(ra);
masm.addToStackPtr(Imm32(sizeof(Frame)));
#elif defined(JS_CODEGEN_LOONG64)
masm.loadPtr(Address(StackPointer, Frame::callerFPOffset()), FramePointer);
poppedFP = masm.currentOffset();
masm.loadPtr(Address(StackPointer, Frame::returnAddressOffset()), ra);
*ret = masm.currentOffset();
masm.addToStackPtr(Imm32(sizeof(Frame)));
masm.as_jirl(zero, ra, BOffImm16(0));
#elif defined(JS_CODEGEN_RISCV64)
{
BlockTrampolinePoolScope block_trampoline_pool(&masm, 20);
masm.loadPtr(Address(StackPointer, Frame::callerFPOffset()), FramePointer);
poppedFP = masm.currentOffset();
masm.loadPtr(Address(StackPointer, Frame::returnAddressOffset()), ra);
*ret = masm.currentOffset();
masm.addToStackPtr(Imm32(sizeof(Frame)));
masm.jalr(zero, ra, 0);
masm.nop();
}
#elif defined(JS_CODEGEN_ARM64)
// See comment at equivalent place in |GenerateCallablePrologue| above.
const vixl::Register stashedSPreg = masm.GetStackPointer64();
masm.SetStackPointer64(vixl::sp);
AutoForbidPoolsAndNops afp(&masm, /* number of instructions in scope = */ 5);
masm.Ldr(ARMRegister(lr, 64), MemOperand(sp, Frame::returnAddressOffset()));
masm.Ldr(ARMRegister(FramePointer, 64),
MemOperand(sp, Frame::callerFPOffset()));
poppedFP = masm.currentOffset();
masm.Add(sp, sp, sizeof(Frame));
// Reinitialise PSP from SP. This is less than elegant because the prologue
// operates on the raw stack pointer SP and does not keep the PSP in sync.
// We can't use initPseudoStackPtr here because we just set up masm to not
// use it. Hence we have to do it "by hand".
masm.Mov(PseudoStackPointer64, vixl::sp);
*ret = masm.currentOffset();
masm.Ret(ARMRegister(lr, 64));
// See comment at equivalent place in |GenerateCallablePrologue| above.
masm.SetStackPointer64(stashedSPreg);
#else
// Forbid pools for the same reason as described in GenerateCallablePrologue.
# if defined(JS_CODEGEN_ARM)
AutoForbidPoolsAndNops afp(&masm, /* number of instructions in scope = */ 6);
# endif
// There is an important ordering constraint here: fp must be repointed to
// the caller's frame before any field of the frame currently pointed to by
// fp is popped: asynchronous signal handlers (which use stack space
// starting at sp) could otherwise clobber these fields while they are still
// accessible via fp (fp fields are read during frame iteration which is
// *also* done asynchronously).
masm.pop(FramePointer);
poppedFP = masm.currentOffset();
*ret = masm.currentOffset();
masm.ret();
#endif
MOZ_ASSERT_IF(!masm.oom(), PoppedFP == *ret - poppedFP);
}
void wasm::GenerateFunctionPrologue(MacroAssembler& masm,
const CallIndirectId& callIndirectId,
const Maybe<uint32_t>& tier1FuncIndex,
FuncOffsets* offsets) {
AutoCreatedBy acb(masm, "wasm::GenerateFunctionPrologue");
// We are going to generate this code layout:
// ---------------------------------------------
// checked call entry: callable prologue
// check signature
// jump functionBody ──┐
// unchecked call entry: callable prologue │
// functionBody <─────┘
// -----------------------------------------------
// checked call entry - used for call_indirect when we have to check the
// signature.
//
// unchecked call entry - used for regular direct same-instance calls.
// The checked call entry is a call target, so must have CodeAlignment.
// Its offset is normally zero.
static_assert(WasmCheckedCallEntryOffset % CodeAlignment == 0,
"code aligned");
// Flush pending pools so they do not get dumped between the 'begin' and
// 'uncheckedCallEntry' offsets since the difference must be less than
// UINT8_MAX to be stored in CodeRange::funcbeginToUncheckedCallEntry_.
// (Pending pools can be large.)
masm.flushBuffer();
masm.haltingAlign(CodeAlignment);
Label functionBody;
offsets->begin = masm.currentOffset();
// Only first-class functions (those that can be referenced in a table) need
// the checked call prologue w/ signature check. It is impossible to perform
// a checked call otherwise.
//
// asm.js function tables are homogeneous and don't need a signature check.
// However, they can be put in tables which expect a checked call entry point,
// so we generate a no-op entry point for consistency. If asm.js performance
// was important we could refine this in the future.
if (callIndirectId.kind() != CallIndirectIdKind::None) {
// Generate checked call entry. The BytecodeOffset of the trap is fixed up
// to be the bytecode offset of the callsite by
// JitActivation::startWasmTrap.
MOZ_ASSERT_IF(!masm.oom(), masm.currentOffset() - offsets->begin ==
WasmCheckedCallEntryOffset);
uint32_t dummy;
GenerateCallablePrologue(masm, &dummy);
switch (callIndirectId.kind()) {
case CallIndirectIdKind::Global: {
Label fail;
Register scratch1 = WasmTableCallScratchReg0;
Register scratch2 = WasmTableCallScratchReg1;
// Check if this function's type is exactly the expected function type
masm.loadPtr(
Address(InstanceReg,
Instance::offsetInData(
callIndirectId.instanceDataOffset() +
offsetof(wasm::TypeDefInstanceData, superTypeVector))),
scratch1);
masm.branchPtr(Assembler::Condition::Equal, WasmTableCallSigReg,
scratch1, &functionBody);
// Otherwise, we need to see if this function's type is a sub type of
// the expected function type. This requires us to check if the
// expected's type is in the super type vector of this function's type.
//
// We can skip this if our function type has no super types.
if (callIndirectId.hasSuperType()) {
// Check if the expected function type was an immediate, not a
// type definition. Because we only allow the immediate form for
// final types without super types, this implies that we have a
// signature mismatch.
masm.branchTestPtr(Assembler::Condition::NonZero, WasmTableCallSigReg,
Imm32(FuncType::ImmediateBit), &fail);
// Load the subtyping depth of the expected function type. Re-use the
// index register, as it's no longer needed.
Register subTypingDepth = WasmTableCallIndexReg;
masm.load32(
Address(WasmTableCallSigReg,
int32_t(SuperTypeVector::offsetOfSubTypingDepth())),
subTypingDepth);
// Perform the check
masm.branchWasmSTVIsSubtypeDynamicDepth(scratch1, WasmTableCallSigReg,
subTypingDepth, scratch2,
&functionBody, true);
}
masm.bind(&fail);
masm.wasmTrap(Trap::IndirectCallBadSig, BytecodeOffset(0));
break;
}
case CallIndirectIdKind::Immediate: {
masm.branch32(Assembler::Condition::Equal, WasmTableCallSigReg,
Imm32(callIndirectId.immediate()), &functionBody);
masm.wasmTrap(Trap::IndirectCallBadSig, BytecodeOffset(0));
break;
}
case CallIndirectIdKind::AsmJS:
masm.jump(&functionBody);
break;
case CallIndirectIdKind::None:
break;
}
// The preceding code may have generated a small constant pool to support
// the comparison in the signature check. But if we flush the pool here we
// will also force the creation of an unused branch veneer in the pool for
// the jump to functionBody from the signature check on some platforms, thus
// needlessly inflating the size of the prologue.
//
// On no supported platform that uses a pool (arm, arm64) is there any risk
// at present of that branch or other elements in the pool going out of
// range while we're generating the following padding and prologue,
// therefore no pool elements will be emitted in the prologue, therefore it
// is safe not to flush here.
//
// We assert that this holds at runtime by comparing the expected entry
// offset to the recorded ditto; if they are not the same then
// GenerateCallablePrologue flushed a pool before the prologue code,
// contrary to assumption.
masm.nopAlign(CodeAlignment);
}
// Generate unchecked call entry:
DebugOnly<uint32_t> expectedEntry = masm.currentOffset();
GenerateCallablePrologue(masm, &offsets->uncheckedCallEntry);
MOZ_ASSERT(expectedEntry == offsets->uncheckedCallEntry);
masm.bind(&functionBody);
#ifdef JS_CODEGEN_ARM64
// GenerateCallablePrologue creates a prologue which operates on the raw
// stack pointer and does not keep the PSP in sync. So we have to resync it
// here. But we can't use initPseudoStackPtr here because masm may not be
// set up to use it, depending on which compiler is in use. Hence do it
// "manually".
masm.Mov(PseudoStackPointer64, vixl::sp);
#endif
// See comment block in WasmCompile.cpp for an explanation tiering.
if (tier1FuncIndex) {
Register scratch = ABINonArgReg0;
masm.loadPtr(Address(InstanceReg, Instance::offsetOfJumpTable()), scratch);
masm.jump(Address(scratch, *tier1FuncIndex * sizeof(uintptr_t)));
}
offsets->tierEntry = masm.currentOffset();
MOZ_ASSERT(masm.framePushed() == 0);
}
void wasm::GenerateFunctionEpilogue(MacroAssembler& masm, unsigned framePushed,
FuncOffsets* offsets) {
// Inverse of GenerateFunctionPrologue:
MOZ_ASSERT(masm.framePushed() == framePushed);
GenerateCallableEpilogue(masm, framePushed, ExitReason::None(),
&offsets->ret);
MOZ_ASSERT(masm.framePushed() == 0);
}
void wasm::GenerateExitPrologue(MacroAssembler& masm, unsigned framePushed,
ExitReason reason, CallableOffsets* offsets) {
masm.haltingAlign(CodeAlignment);
GenerateCallablePrologue(masm, &offsets->begin);
// This frame will be exiting compiled code to C++ so record the fp and
// reason in the JitActivation so the frame iterators can unwind.
SetExitFP(masm, reason, ABINonArgReturnVolatileReg);
MOZ_ASSERT(masm.framePushed() == 0);
masm.reserveStack(framePushed);
}
void wasm::GenerateExitEpilogue(MacroAssembler& masm, unsigned framePushed,
ExitReason reason, CallableOffsets* offsets) {
// Inverse of GenerateExitPrologue:
MOZ_ASSERT(masm.framePushed() == framePushed);
GenerateCallableEpilogue(masm, framePushed, reason, &offsets->ret);
MOZ_ASSERT(masm.framePushed() == 0);
}
static void AssertNoWasmExitFPInJitExit(MacroAssembler& masm) {
// As a general stack invariant, if Activation::packedExitFP is tagged as
// wasm, it must point to a valid wasm::Frame. The JIT exit stub calls into
// JIT code and thus does not really exit, thus, when entering/leaving the
// JIT exit stub from/to normal wasm code, packedExitFP is not tagged wasm.
#ifdef DEBUG
Register scratch = ABINonArgReturnReg0;
LoadActivation(masm, scratch);
Label ok;
masm.branchTestPtr(Assembler::Zero,
Address(scratch, JitActivation::offsetOfPackedExitFP()),
Imm32(ExitFPTag), &ok);
masm.breakpoint();
masm.bind(&ok);
#endif
}
void wasm::GenerateJitExitPrologue(MacroAssembler& masm, unsigned framePushed,
uint32_t fallbackOffset,
ImportOffsets* offsets) {
masm.haltingAlign(CodeAlignment);
#ifdef ENABLE_WASM_JSPI
{
# if defined(JS_CODEGEN_ARM64)
AutoForbidPoolsAndNops afp(&masm,
/* number of instructions in scope = */ 2);
# endif
offsets->begin = masm.currentOffset();
Label fallback;
masm.bind(&fallback, BufferOffset(fallbackOffset));
const Register scratch = ABINonArgReg0;
masm.load32(Address(InstanceReg, Instance::offsetOfOnSuspendableStack()),
scratch);
masm.branchTest32(Assembler::NonZero, scratch, scratch, &fallback);
}
uint32_t entryOffset;
GenerateCallablePrologue(masm, &entryOffset);
offsets->afterFallbackCheck = entryOffset;
#else
GenerateCallablePrologue(masm, &offsets->begin);
offsets->afterFallbackCheck = offsets->begin;
#endif // ENABLE_WASM_JSPI
AssertNoWasmExitFPInJitExit(masm);
MOZ_ASSERT(masm.framePushed() == 0);
masm.reserveStack(framePushed);
}
void wasm::GenerateJitExitEpilogue(MacroAssembler& masm, unsigned framePushed,
CallableOffsets* offsets) {
// Inverse of GenerateJitExitPrologue:
MOZ_ASSERT(masm.framePushed() == framePushed);
AssertNoWasmExitFPInJitExit(masm);
GenerateCallableEpilogue(masm, framePushed, ExitReason::None(),
&offsets->ret);
MOZ_ASSERT(masm.framePushed() == 0);
}
void wasm::GenerateJitEntryPrologue(MacroAssembler& masm,
CallableOffsets* offsets) {
masm.haltingAlign(CodeAlignment);
{
// Push the return address.
#if defined(JS_CODEGEN_ARM)
AutoForbidPoolsAndNops afp(&masm,
/* number of instructions in scope = */ 3);
offsets->begin = masm.currentOffset();
static_assert(BeforePushRetAddr == 0);
masm.push(lr);
#elif defined(JS_CODEGEN_MIPS64)
offsets->begin = masm.currentOffset();
masm.push(ra);
#elif defined(JS_CODEGEN_LOONG64)
offsets->begin = masm.currentOffset();
masm.push(ra);
#elif defined(JS_CODEGEN_RISCV64)
BlockTrampolinePoolScope block_trampoline_pool(&masm, 10);
offsets->begin = masm.currentOffset();
masm.push(ra);
#elif defined(JS_CODEGEN_ARM64)
{
AutoForbidPoolsAndNops afp(&masm,
/* number of instructions in scope = */ 4);
offsets->begin = masm.currentOffset();
static_assert(BeforePushRetAddr == 0);
// Subtract from SP first as SP must be aligned before offsetting.
masm.Sub(sp, sp, 16);
static_assert(JitFrameLayout::offsetOfReturnAddress() == 8);
masm.Str(ARMRegister(lr, 64), MemOperand(sp, 8));
}
#else
// The x86/x64 call instruction pushes the return address.
offsets->begin = masm.currentOffset();
#endif