Source code

Revision control

Other Tools

1
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2
* vim: set ts=8 sts=2 et sw=2 tw=80:
3
*
4
* Copyright 2017 Mozilla Foundation
5
*
6
* Licensed under the Apache License, Version 2.0 (the "License");
7
* you may not use this file except in compliance with the License.
8
* You may obtain a copy of the License at
9
*
11
*
12
* Unless required by applicable law or agreed to in writing, software
13
* distributed under the License is distributed on an "AS IS" BASIS,
14
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
* See the License for the specific language governing permissions and
16
* limitations under the License.
17
*/
18
19
#include "wasm/WasmBuiltins.h"
20
21
#include "mozilla/Atomics.h"
22
23
#include "fdlibm.h"
24
#include "jslibmath.h"
25
26
#include "jit/AtomicOperations.h"
27
#include "jit/InlinableNatives.h"
28
#include "jit/MacroAssembler.h"
29
#include "jit/Simulator.h"
30
#include "threading/Mutex.h"
31
#include "util/Memory.h"
32
#include "util/Poison.h"
33
#include "vm/BigIntType.h"
34
#include "wasm/WasmInstance.h"
35
#include "wasm/WasmStubs.h"
36
#include "wasm/WasmTypes.h"
37
38
#include "debugger/DebugAPI-inl.h"
39
#include "vm/Stack-inl.h"
40
41
using namespace js;
42
using namespace jit;
43
using namespace wasm;
44
45
using mozilla::HashGeneric;
46
using mozilla::IsNaN;
47
using mozilla::MakeEnumeratedRange;
48
49
static const unsigned BUILTIN_THUNK_LIFO_SIZE = 64 * 1024;
50
51
// ============================================================================
52
// WebAssembly builtin C++ functions called from wasm code to implement internal
53
// wasm operations: type descriptions.
54
55
// Some abbreviations, for the sake of conciseness.
56
#define _F64 MIRType::Double
57
#define _F32 MIRType::Float32
58
#define _I32 MIRType::Int32
59
#define _I64 MIRType::Int64
60
#define _PTR MIRType::Pointer
61
#define _RoN MIRType::RefOrNull
62
#define _VOID MIRType::None
63
#define _END MIRType::None
64
#define _Infallible FailureMode::Infallible
65
#define _FailOnNegI32 FailureMode::FailOnNegI32
66
#define _FailOnNullPtr FailureMode::FailOnNullPtr
67
#define _FailOnInvalidRef FailureMode::FailOnInvalidRef
68
69
namespace js {
70
namespace wasm {
71
72
const SymbolicAddressSignature SASigSinD = {
73
SymbolicAddress::SinD, _F64, _Infallible, 1, {_F64, _END}};
74
const SymbolicAddressSignature SASigCosD = {
75
SymbolicAddress::CosD, _F64, _Infallible, 1, {_F64, _END}};
76
const SymbolicAddressSignature SASigTanD = {
77
SymbolicAddress::TanD, _F64, _Infallible, 1, {_F64, _END}};
78
const SymbolicAddressSignature SASigASinD = {
79
SymbolicAddress::ASinD, _F64, _Infallible, 1, {_F64, _END}};
80
const SymbolicAddressSignature SASigACosD = {
81
SymbolicAddress::ACosD, _F64, _Infallible, 1, {_F64, _END}};
82
const SymbolicAddressSignature SASigATanD = {
83
SymbolicAddress::ATanD, _F64, _Infallible, 1, {_F64, _END}};
84
const SymbolicAddressSignature SASigCeilD = {
85
SymbolicAddress::CeilD, _F64, _Infallible, 1, {_F64, _END}};
86
const SymbolicAddressSignature SASigCeilF = {
87
SymbolicAddress::CeilF, _F32, _Infallible, 1, {_F32, _END}};
88
const SymbolicAddressSignature SASigFloorD = {
89
SymbolicAddress::FloorD, _F64, _Infallible, 1, {_F64, _END}};
90
const SymbolicAddressSignature SASigFloorF = {
91
SymbolicAddress::FloorF, _F32, _Infallible, 1, {_F32, _END}};
92
const SymbolicAddressSignature SASigTruncD = {
93
SymbolicAddress::TruncD, _F64, _Infallible, 1, {_F64, _END}};
94
const SymbolicAddressSignature SASigTruncF = {
95
SymbolicAddress::TruncF, _F32, _Infallible, 1, {_F32, _END}};
96
const SymbolicAddressSignature SASigNearbyIntD = {
97
SymbolicAddress::NearbyIntD, _F64, _Infallible, 1, {_F64, _END}};
98
const SymbolicAddressSignature SASigNearbyIntF = {
99
SymbolicAddress::NearbyIntF, _F32, _Infallible, 1, {_F32, _END}};
100
const SymbolicAddressSignature SASigExpD = {
101
SymbolicAddress::ExpD, _F64, _Infallible, 1, {_F64, _END}};
102
const SymbolicAddressSignature SASigLogD = {
103
SymbolicAddress::LogD, _F64, _Infallible, 1, {_F64, _END}};
104
const SymbolicAddressSignature SASigPowD = {
105
SymbolicAddress::PowD, _F64, _Infallible, 2, {_F64, _F64, _END}};
106
const SymbolicAddressSignature SASigATan2D = {
107
SymbolicAddress::ATan2D, _F64, _Infallible, 2, {_F64, _F64, _END}};
108
const SymbolicAddressSignature SASigMemoryGrow = {
109
SymbolicAddress::MemoryGrow, _I32, _Infallible, 2, {_PTR, _I32, _END}};
110
const SymbolicAddressSignature SASigMemorySize = {
111
SymbolicAddress::MemorySize, _I32, _Infallible, 1, {_PTR, _END}};
112
const SymbolicAddressSignature SASigWaitI32 = {SymbolicAddress::WaitI32,
113
_I32,
114
_FailOnNegI32,
115
4,
116
{_PTR, _I32, _I32, _I64, _END}};
117
const SymbolicAddressSignature SASigWaitI64 = {SymbolicAddress::WaitI64,
118
_I32,
119
_FailOnNegI32,
120
4,
121
{_PTR, _I32, _I64, _I64, _END}};
122
const SymbolicAddressSignature SASigWake = {
123
SymbolicAddress::Wake, _I32, _FailOnNegI32, 3, {_PTR, _I32, _I32, _END}};
124
const SymbolicAddressSignature SASigMemCopy = {
125
SymbolicAddress::MemCopy,
126
_VOID,
127
_FailOnNegI32,
128
5,
129
{_PTR, _I32, _I32, _I32, _PTR, _END}};
130
const SymbolicAddressSignature SASigMemCopyShared = {
131
SymbolicAddress::MemCopyShared,
132
_VOID,
133
_FailOnNegI32,
134
5,
135
{_PTR, _I32, _I32, _I32, _PTR, _END}};
136
const SymbolicAddressSignature SASigDataDrop = {
137
SymbolicAddress::DataDrop, _VOID, _FailOnNegI32, 2, {_PTR, _I32, _END}};
138
const SymbolicAddressSignature SASigMemFill = {
139
SymbolicAddress::MemFill,
140
_VOID,
141
_FailOnNegI32,
142
5,
143
{_PTR, _I32, _I32, _I32, _PTR, _END}};
144
const SymbolicAddressSignature SASigMemFillShared = {
145
SymbolicAddress::MemFillShared,
146
_VOID,
147
_FailOnNegI32,
148
5,
149
{_PTR, _I32, _I32, _I32, _PTR, _END}};
150
const SymbolicAddressSignature SASigMemInit = {
151
SymbolicAddress::MemInit,
152
_VOID,
153
_FailOnNegI32,
154
5,
155
{_PTR, _I32, _I32, _I32, _I32, _END}};
156
const SymbolicAddressSignature SASigTableCopy = {
157
SymbolicAddress::TableCopy,
158
_VOID,
159
_FailOnNegI32,
160
6,
161
{_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
162
const SymbolicAddressSignature SASigElemDrop = {
163
SymbolicAddress::ElemDrop, _VOID, _FailOnNegI32, 2, {_PTR, _I32, _END}};
164
const SymbolicAddressSignature SASigTableFill = {
165
SymbolicAddress::TableFill,
166
_VOID,
167
_FailOnNegI32,
168
5,
169
{_PTR, _I32, _RoN, _I32, _I32, _END}};
170
const SymbolicAddressSignature SASigTableGet = {SymbolicAddress::TableGet,
171
_RoN,
172
_FailOnInvalidRef,
173
3,
174
{_PTR, _I32, _I32, _END}};
175
const SymbolicAddressSignature SASigTableGrow = {
176
SymbolicAddress::TableGrow,
177
_I32,
178
_Infallible,
179
4,
180
{_PTR, _RoN, _I32, _I32, _END}};
181
const SymbolicAddressSignature SASigTableInit = {
182
SymbolicAddress::TableInit,
183
_VOID,
184
_FailOnNegI32,
185
6,
186
{_PTR, _I32, _I32, _I32, _I32, _I32, _END}};
187
const SymbolicAddressSignature SASigTableSet = {SymbolicAddress::TableSet,
188
_VOID,
189
_FailOnNegI32,
190
4,
191
{_PTR, _I32, _RoN, _I32, _END}};
192
const SymbolicAddressSignature SASigTableSize = {
193
SymbolicAddress::TableSize, _I32, _Infallible, 2, {_PTR, _I32, _END}};
194
const SymbolicAddressSignature SASigFuncRef = {
195
SymbolicAddress::FuncRef, _RoN, _FailOnInvalidRef, 2, {_PTR, _I32, _END}};
196
const SymbolicAddressSignature SASigPostBarrier = {
197
SymbolicAddress::PostBarrier, _VOID, _Infallible, 2, {_PTR, _PTR, _END}};
198
const SymbolicAddressSignature SASigPostBarrierFiltering = {
199
SymbolicAddress::PostBarrierFiltering,
200
_VOID,
201
_Infallible,
202
2,
203
{_PTR, _PTR, _END}};
204
const SymbolicAddressSignature SASigStructNew = {
205
SymbolicAddress::StructNew, _RoN, _FailOnNullPtr, 2, {_PTR, _I32, _END}};
206
const SymbolicAddressSignature SASigStructNarrow = {
207
SymbolicAddress::StructNarrow,
208
_RoN,
209
_Infallible,
210
4,
211
{_PTR, _I32, _I32, _RoN, _END}};
212
213
} // namespace wasm
214
} // namespace js
215
216
#undef _F64
217
#undef _F32
218
#undef _I32
219
#undef _I64
220
#undef _PTR
221
#undef _RoN
222
#undef _VOID
223
#undef _END
224
#undef _Infallible
225
#undef _FailOnNegI32
226
#undef _FailOnNullPtr
227
228
#ifdef DEBUG
229
ABIArgType ToABIType(FailureMode mode) {
230
switch (mode) {
231
case FailureMode::FailOnNegI32:
232
return ArgType_Int32;
233
case FailureMode::FailOnNullPtr:
234
case FailureMode::FailOnInvalidRef:
235
return ArgType_General;
236
default:
237
MOZ_CRASH("unexpected failure mode");
238
}
239
}
240
241
ABIArgType ToABIType(MIRType type) {
242
switch (type) {
243
case MIRType::None:
244
case MIRType::Int32:
245
return ArgType_Int32;
246
case MIRType::Int64:
247
return ArgType_Int64;
248
case MIRType::Pointer:
249
case MIRType::RefOrNull:
250
return ArgType_General;
251
case MIRType::Float32:
252
return ArgType_Float32;
253
case MIRType::Double:
254
return ArgType_Float64;
255
default:
256
MOZ_CRASH("unexpected type");
257
}
258
}
259
260
ABIFunctionType ToABIType(const SymbolicAddressSignature& sig) {
261
MOZ_ASSERT_IF(sig.failureMode != FailureMode::Infallible,
262
ToABIType(sig.failureMode) == ToABIType(sig.retType));
263
int abiType = ToABIType(sig.retType) << RetType_Shift;
264
for (int i = 0; i < sig.numArgs; i++) {
265
abiType |= (ToABIType(sig.argTypes[i]) << (ArgType_Shift * (i + 1)));
266
}
267
return ABIFunctionType(abiType);
268
}
269
#endif
270
271
// ============================================================================
272
// WebAssembly builtin C++ functions called from wasm code to implement internal
273
// wasm operations: implementations.
274
275
#if defined(JS_CODEGEN_ARM)
276
extern "C" {
277
278
extern MOZ_EXPORT int64_t __aeabi_idivmod(int, int);
279
280
extern MOZ_EXPORT int64_t __aeabi_uidivmod(int, int);
281
}
282
#endif
283
284
// This utility function can only be called for builtins that are called
285
// directly from wasm code.
286
static JitActivation* CallingActivation() {
287
Activation* act = TlsContext.get()->activation();
288
MOZ_ASSERT(act->asJit()->hasWasmExitFP());
289
return act->asJit();
290
}
291
292
static bool WasmHandleDebugTrap() {
293
JitActivation* activation = CallingActivation();
294
JSContext* cx = activation->cx();
295
Frame* fp = activation->wasmExitFP();
296
Instance* instance = fp->tls->instance;
297
const Code& code = instance->code();
298
MOZ_ASSERT(code.metadata().debugEnabled);
299
300
// The debug trap stub is the innermost frame. It's return address is the
301
// actual trap site.
302
const CallSite* site = code.lookupCallSite(fp->returnAddress);
303
MOZ_ASSERT(site);
304
305
// Advance to the actual trapping frame.
306
fp = fp->callerFP;
307
DebugFrame* debugFrame = DebugFrame::from(fp);
308
309
if (site->kind() == CallSite::EnterFrame) {
310
if (!instance->debug().enterFrameTrapsEnabled()) {
311
return true;
312
}
313
debugFrame->setIsDebuggee();
314
debugFrame->observe(cx);
315
ResumeMode mode = DebugAPI::onEnterFrame(cx, debugFrame);
316
if (mode == ResumeMode::Return) {
317
// Ignoring forced return (ResumeMode::Return) -- changing code execution
318
// order is not yet implemented in the wasm baseline.
319
// TODO properly handle ResumeMode::Return and resume wasm execution.
320
JS_ReportErrorASCII(cx, "Unexpected resumption value from onEnterFrame");
321
return false;
322
}
323
return mode == ResumeMode::Continue;
324
}
325
if (site->kind() == CallSite::LeaveFrame) {
326
if (!debugFrame->updateReturnJSValue()) {
327
return false;
328
}
329
bool ok = DebugAPI::onLeaveFrame(cx, debugFrame, nullptr, true);
330
debugFrame->leave(cx);
331
return ok;
332
}
333
334
DebugState& debug = instance->debug();
335
MOZ_ASSERT(debug.hasBreakpointTrapAtOffset(site->lineOrBytecode()));
336
if (debug.stepModeEnabled(debugFrame->funcIndex())) {
337
RootedValue result(cx, UndefinedValue());
338
ResumeMode mode = DebugAPI::onSingleStep(cx, &result);
339
if (mode == ResumeMode::Return) {
340
// TODO properly handle ResumeMode::Return.
341
JS_ReportErrorASCII(cx, "Unexpected resumption value from onSingleStep");
342
return false;
343
}
344
if (mode != ResumeMode::Continue) {
345
return false;
346
}
347
}
348
if (debug.hasBreakpointSite(site->lineOrBytecode())) {
349
RootedValue result(cx, UndefinedValue());
350
ResumeMode mode = DebugAPI::onTrap(cx, &result);
351
if (mode == ResumeMode::Return) {
352
// TODO properly handle ResumeMode::Return.
353
JS_ReportErrorASCII(
354
cx, "Unexpected resumption value from breakpoint handler");
355
return false;
356
}
357
if (mode != ResumeMode::Continue) {
358
return false;
359
}
360
}
361
return true;
362
}
363
364
// Unwind the entire activation in response to a thrown exception. This function
365
// is responsible for notifying the debugger of each unwound frame. The return
366
// value is the new stack address which the calling stub will set to the sp
367
// register before executing a return instruction.
368
369
void* wasm::HandleThrow(JSContext* cx, WasmFrameIter& iter) {
370
// WasmFrameIter iterates down wasm frames in the activation starting at
371
// JitActivation::wasmExitFP(). Calling WasmFrameIter::startUnwinding pops
372
// JitActivation::wasmExitFP() once each time WasmFrameIter is incremented,
373
// ultimately leaving exit FP null when the WasmFrameIter is done(). This
374
// is necessary to prevent a DebugFrame from being observed again after we
375
// just called onLeaveFrame (which would lead to the frame being re-added
376
// to the map of live frames, right as it becomes trash).
377
378
MOZ_ASSERT(CallingActivation() == iter.activation());
379
MOZ_ASSERT(!iter.done());
380
iter.setUnwind(WasmFrameIter::Unwind::True);
381
382
// Live wasm code on the stack is kept alive (in TraceJitActivation) by
383
// marking the instance of every wasm::Frame found by WasmFrameIter.
384
// However, as explained above, we're popping frames while iterating which
385
// means that a GC during this loop could collect the code of frames whose
386
// code is still on the stack. This is actually mostly fine: as soon as we
387
// return to the throw stub, the entire stack will be popped as a whole,
388
// returning to the C++ caller. However, we must keep the throw stub alive
389
// itself which is owned by the innermost instance.
390
RootedWasmInstanceObject keepAlive(cx, iter.instance()->object());
391
392
for (; !iter.done(); ++iter) {
393
// Wasm code can enter same-compartment realms, so reset cx->realm to
394
// this frame's realm.
395
cx->setRealmForJitExceptionHandler(iter.instance()->realm());
396
397
if (!iter.debugEnabled()) {
398
continue;
399
}
400
401
DebugFrame* frame = iter.debugFrame();
402
frame->clearReturnJSValue();
403
404
// Assume ResumeMode::Terminate if no exception is pending --
405
// no onExceptionUnwind handlers must be fired.
406
if (cx->isExceptionPending()) {
407
ResumeMode mode = DebugAPI::onExceptionUnwind(cx, frame);
408
if (mode == ResumeMode::Return) {
409
// Unexpected trap return -- raising error since throw recovery
410
// is not yet implemented in the wasm baseline.
411
// TODO properly handle ResumeMode::Return and resume wasm execution.
412
JS_ReportErrorASCII(
413
cx, "Unexpected resumption value from onExceptionUnwind");
414
}
415
}
416
417
bool ok = DebugAPI::onLeaveFrame(cx, frame, nullptr, false);
418
if (ok) {
419
// Unexpected success from the handler onLeaveFrame -- raising error
420
// since throw recovery is not yet implemented in the wasm baseline.
421
// TODO properly handle success and resume wasm execution.
422
JS_ReportErrorASCII(cx, "Unexpected success from onLeaveFrame");
423
}
424
frame->leave(cx);
425
}
426
427
MOZ_ASSERT(!cx->activation()->asJit()->isWasmTrapping(),
428
"unwinding clears the trapping state");
429
430
return iter.unwoundAddressOfReturnAddress();
431
}
432
433
static void* WasmHandleThrow() {
434
JitActivation* activation = CallingActivation();
435
JSContext* cx = activation->cx();
436
WasmFrameIter iter(activation);
437
return HandleThrow(cx, iter);
438
}
439
440
// Unconditionally returns nullptr per calling convention of HandleTrap().
441
static void* ReportError(JSContext* cx, unsigned errorNumber) {
442
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr, errorNumber);
443
return nullptr;
444
};
445
446
// Has the same return-value convention as HandleTrap().
447
static void* CheckInterrupt(JSContext* cx, JitActivation* activation) {
448
ResetInterruptState(cx);
449
450
if (!CheckForInterrupt(cx)) {
451
return nullptr;
452
}
453
454
void* resumePC = activation->wasmTrapData().resumePC;
455
activation->finishWasmTrap();
456
return resumePC;
457
}
458
459
// The calling convention between this function and its caller in the stub
460
// generated by GenerateTrapExit() is:
461
// - return nullptr if the stub should jump to the throw stub to unwind
462
// the activation;
463
// - return the (non-null) resumePC that should be jumped if execution should
464
// resume after the trap.
465
static void* WasmHandleTrap() {
466
JitActivation* activation = CallingActivation();
467
JSContext* cx = activation->cx();
468
469
switch (activation->wasmTrapData().trap) {
470
case Trap::Unreachable:
471
return ReportError(cx, JSMSG_WASM_UNREACHABLE);
472
case Trap::IntegerOverflow:
473
return ReportError(cx, JSMSG_WASM_INTEGER_OVERFLOW);
474
case Trap::InvalidConversionToInteger:
475
return ReportError(cx, JSMSG_WASM_INVALID_CONVERSION);
476
case Trap::IntegerDivideByZero:
477
return ReportError(cx, JSMSG_WASM_INT_DIVIDE_BY_ZERO);
478
case Trap::IndirectCallToNull:
479
return ReportError(cx, JSMSG_WASM_IND_CALL_TO_NULL);
480
case Trap::IndirectCallBadSig:
481
return ReportError(cx, JSMSG_WASM_IND_CALL_BAD_SIG);
482
case Trap::NullPointerDereference:
483
return ReportError(cx, JSMSG_WASM_DEREF_NULL);
484
case Trap::OutOfBounds:
485
return ReportError(cx, JSMSG_WASM_OUT_OF_BOUNDS);
486
case Trap::UnalignedAccess:
487
return ReportError(cx, JSMSG_WASM_UNALIGNED_ACCESS);
488
case Trap::CheckInterrupt:
489
return CheckInterrupt(cx, activation);
490
case Trap::StackOverflow:
491
// TlsData::setInterrupt() causes a fake stack overflow. Since
492
// TlsData::setInterrupt() is called racily, it's possible for a real
493
// stack overflow to trap, followed by a racy call to setInterrupt().
494
// Thus, we must check for a real stack overflow first before we
495
// CheckInterrupt() and possibly resume execution.
496
if (!CheckRecursionLimit(cx)) {
497
return nullptr;
498
}
499
if (activation->wasmExitFP()->tls->isInterrupted()) {
500
return CheckInterrupt(cx, activation);
501
}
502
return ReportError(cx, JSMSG_OVER_RECURSED);
503
case Trap::ThrowReported:
504
// Error was already reported under another name.
505
return nullptr;
506
case Trap::Limit:
507
break;
508
}
509
510
MOZ_CRASH("unexpected trap");
511
}
512
513
static void WasmReportInt64JSCall() {
514
JSContext* cx = TlsContext.get();
515
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
516
JSMSG_WASM_BAD_I64_TYPE);
517
}
518
519
static int32_t CoerceInPlace_ToInt32(Value* rawVal) {
520
JSContext* cx = TlsContext.get();
521
522
int32_t i32;
523
RootedValue val(cx, *rawVal);
524
if (!ToInt32(cx, val, &i32)) {
525
*rawVal = PoisonedObjectValue(0x42);
526
return false;
527
}
528
529
*rawVal = Int32Value(i32);
530
return true;
531
}
532
533
#ifdef ENABLE_WASM_BIGINT
534
static int32_t CoerceInPlace_ToBigInt(Value* rawVal) {
535
JSContext* cx = TlsContext.get();
536
537
RootedValue val(cx, *rawVal);
538
BigInt* bi = ToBigInt(cx, val);
539
if (!bi) {
540
*rawVal = PoisonedObjectValue(0x43);
541
return false;
542
}
543
544
*rawVal = BigIntValue(bi);
545
return true;
546
}
547
#endif
548
549
static int32_t CoerceInPlace_ToNumber(Value* rawVal) {
550
JSContext* cx = TlsContext.get();
551
552
double dbl;
553
RootedValue val(cx, *rawVal);
554
if (!ToNumber(cx, val, &dbl)) {
555
*rawVal = PoisonedObjectValue(0x42);
556
return false;
557
}
558
559
*rawVal = DoubleValue(dbl);
560
return true;
561
}
562
563
static void* BoxValue_Anyref(Value* rawVal) {
564
JSContext* cx = TlsContext.get();
565
RootedValue val(cx, *rawVal);
566
RootedAnyRef result(cx, AnyRef::null());
567
if (!BoxAnyRef(cx, val, &result)) {
568
return nullptr;
569
}
570
return result.get().forCompiledCode();
571
}
572
573
static int32_t CoerceInPlace_JitEntry(int funcExportIndex, TlsData* tlsData,
574
Value* argv) {
575
JSContext* cx = CallingActivation()->cx();
576
577
const Code& code = tlsData->instance->code();
578
const FuncExport& fe =
579
code.metadata(code.stableTier()).funcExports[funcExportIndex];
580
581
for (size_t i = 0; i < fe.funcType().args().length(); i++) {
582
HandleValue arg = HandleValue::fromMarkedLocation(&argv[i]);
583
switch (fe.funcType().args()[i].code()) {
584
case ValType::I32: {
585
int32_t i32;
586
if (!ToInt32(cx, arg, &i32)) {
587
return false;
588
}
589
argv[i] = Int32Value(i32);
590
break;
591
}
592
case ValType::F32:
593
case ValType::F64: {
594
double dbl;
595
if (!ToNumber(cx, arg, &dbl)) {
596
return false;
597
}
598
// No need to convert double-to-float for f32, it's done inline
599
// in the wasm stub later.
600
argv[i] = DoubleValue(dbl);
601
break;
602
}
603
case ValType::AnyRef: {
604
// Leave Object and Null alone, we will unbox inline. All we need to do
605
// is convert other values to an Object representation.
606
if (!arg.isObjectOrNull()) {
607
RootedAnyRef result(cx, AnyRef::null());
608
if (!BoxAnyRef(cx, arg, &result)) {
609
return false;
610
}
611
argv[i].setObject(*result.get().asJSObject());
612
}
613
break;
614
}
615
#ifdef ENABLE_WASM_BIGINT
616
case ValType::I64: {
617
// In this case we store a BigInt value as there is no value type
618
// corresponding directly to an I64. The conversion to I64 happens
619
// in the JIT entry stub.
620
BigInt* bigint = ToBigInt(cx, arg);
621
if (!bigint) {
622
return false;
623
}
624
argv[i] = BigIntValue(bigint);
625
break;
626
}
627
#endif
628
default: {
629
MOZ_CRASH("unexpected input argument in CoerceInPlace_JitEntry");
630
}
631
}
632
}
633
634
return true;
635
}
636
637
#ifdef ENABLE_WASM_BIGINT
638
// Allocate a BigInt without GC, corresponds to the similar VMFunction.
639
static BigInt* AllocateBigInt() {
640
JSContext* cx = TlsContext.get();
641
642
return js::Allocate<BigInt, NoGC>(cx);
643
}
644
#endif
645
646
static int64_t DivI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
647
uint32_t y_lo) {
648
int64_t x = ((uint64_t)x_hi << 32) + x_lo;
649
int64_t y = ((uint64_t)y_hi << 32) + y_lo;
650
MOZ_ASSERT(x != INT64_MIN || y != -1);
651
MOZ_ASSERT(y != 0);
652
return x / y;
653
}
654
655
static int64_t UDivI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
656
uint32_t y_lo) {
657
uint64_t x = ((uint64_t)x_hi << 32) + x_lo;
658
uint64_t y = ((uint64_t)y_hi << 32) + y_lo;
659
MOZ_ASSERT(y != 0);
660
return x / y;
661
}
662
663
static int64_t ModI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
664
uint32_t y_lo) {
665
int64_t x = ((uint64_t)x_hi << 32) + x_lo;
666
int64_t y = ((uint64_t)y_hi << 32) + y_lo;
667
MOZ_ASSERT(x != INT64_MIN || y != -1);
668
MOZ_ASSERT(y != 0);
669
return x % y;
670
}
671
672
static int64_t UModI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi,
673
uint32_t y_lo) {
674
uint64_t x = ((uint64_t)x_hi << 32) + x_lo;
675
uint64_t y = ((uint64_t)y_hi << 32) + y_lo;
676
MOZ_ASSERT(y != 0);
677
return x % y;
678
}
679
680
static int64_t TruncateDoubleToInt64(double input) {
681
// Note: INT64_MAX is not representable in double. It is actually
682
// INT64_MAX + 1. Therefore also sending the failure value.
683
if (input >= double(INT64_MAX) || input < double(INT64_MIN) || IsNaN(input)) {
684
return 0x8000000000000000;
685
}
686
return int64_t(input);
687
}
688
689
static uint64_t TruncateDoubleToUint64(double input) {
690
// Note: UINT64_MAX is not representable in double. It is actually
691
// UINT64_MAX + 1. Therefore also sending the failure value.
692
if (input >= double(UINT64_MAX) || input <= -1.0 || IsNaN(input)) {
693
return 0x8000000000000000;
694
}
695
return uint64_t(input);
696
}
697
698
static int64_t SaturatingTruncateDoubleToInt64(double input) {
699
// Handle in-range values (except INT64_MIN).
700
if (fabs(input) < -double(INT64_MIN)) {
701
return int64_t(input);
702
}
703
// Handle NaN.
704
if (IsNaN(input)) {
705
return 0;
706
}
707
// Handle positive overflow.
708
if (input > 0) {
709
return INT64_MAX;
710
}
711
// Handle negative overflow.
712
return INT64_MIN;
713
}
714
715
static uint64_t SaturatingTruncateDoubleToUint64(double input) {
716
// Handle positive overflow.
717
if (input >= -double(INT64_MIN) * 2.0) {
718
return UINT64_MAX;
719
}
720
// Handle in-range values.
721
if (input > -1.0) {
722
return uint64_t(input);
723
}
724
// Handle NaN and negative overflow.
725
return 0;
726
}
727
728
static double Int64ToDouble(int32_t x_hi, uint32_t x_lo) {
729
int64_t x = int64_t((uint64_t(x_hi) << 32)) + int64_t(x_lo);
730
return double(x);
731
}
732
733
static float Int64ToFloat32(int32_t x_hi, uint32_t x_lo) {
734
int64_t x = int64_t((uint64_t(x_hi) << 32)) + int64_t(x_lo);
735
return float(x);
736
}
737
738
static double Uint64ToDouble(int32_t x_hi, uint32_t x_lo) {
739
uint64_t x = (uint64_t(x_hi) << 32) + uint64_t(x_lo);
740
return double(x);
741
}
742
743
static float Uint64ToFloat32(int32_t x_hi, uint32_t x_lo) {
744
uint64_t x = (uint64_t(x_hi) << 32) + uint64_t(x_lo);
745
return float(x);
746
}
747
748
template <class F>
749
static inline void* FuncCast(F* funcPtr, ABIFunctionType abiType) {
750
void* pf = JS_FUNC_TO_DATA_PTR(void*, funcPtr);
751
#ifdef JS_SIMULATOR
752
pf = Simulator::RedirectNativeFunction(pf, abiType);
753
#endif
754
return pf;
755
}
756
757
#ifdef WASM_CODEGEN_DEBUG
758
void wasm::PrintI32(int32_t val) { fprintf(stderr, "i32(%d) ", val); }
759
760
void wasm::PrintPtr(uint8_t* val) { fprintf(stderr, "ptr(%p) ", val); }
761
762
void wasm::PrintF32(float val) { fprintf(stderr, "f32(%f) ", val); }
763
764
void wasm::PrintF64(double val) { fprintf(stderr, "f64(%lf) ", val); }
765
766
void wasm::PrintText(const char* out) { fprintf(stderr, "%s", out); }
767
#endif
768
769
void* wasm::AddressOf(SymbolicAddress imm, ABIFunctionType* abiType) {
770
switch (imm) {
771
case SymbolicAddress::HandleDebugTrap:
772
*abiType = Args_General0;
773
return FuncCast(WasmHandleDebugTrap, *abiType);
774
case SymbolicAddress::HandleThrow:
775
*abiType = Args_General0;
776
return FuncCast(WasmHandleThrow, *abiType);
777
case SymbolicAddress::HandleTrap:
778
*abiType = Args_General0;
779
return FuncCast(WasmHandleTrap, *abiType);
780
case SymbolicAddress::ReportInt64JSCall:
781
*abiType = Args_General0;
782
return FuncCast(WasmReportInt64JSCall, *abiType);
783
case SymbolicAddress::CallImport_Void:
784
*abiType = MakeABIFunctionType(
785
ArgType_Int32,
786
{ArgType_General, ArgType_Int32, ArgType_Int32, ArgType_General});
787
return FuncCast(Instance::callImport_void, *abiType);
788
case SymbolicAddress::CallImport_I32:
789
*abiType = MakeABIFunctionType(
790
ArgType_Int32,
791
{ArgType_General, ArgType_Int32, ArgType_Int32, ArgType_General});
792
return FuncCast(Instance::callImport_i32, *abiType);
793
case SymbolicAddress::CallImport_I64:
794
*abiType = MakeABIFunctionType(
795
ArgType_Int32,
796
{ArgType_General, ArgType_Int32, ArgType_Int32, ArgType_General});
797
return FuncCast(Instance::callImport_i64, *abiType);
798
case SymbolicAddress::CallImport_F64:
799
*abiType = MakeABIFunctionType(
800
ArgType_Int32,
801
{ArgType_General, ArgType_Int32, ArgType_Int32, ArgType_General});
802
return FuncCast(Instance::callImport_f64, *abiType);
803
case SymbolicAddress::CallImport_FuncRef:
804
*abiType = MakeABIFunctionType(
805
ArgType_Int32,
806
{ArgType_General, ArgType_Int32, ArgType_Int32, ArgType_General});
807
return FuncCast(Instance::callImport_funcref, *abiType);
808
case SymbolicAddress::CallImport_AnyRef:
809
*abiType = MakeABIFunctionType(
810
ArgType_Int32,
811
{ArgType_General, ArgType_Int32, ArgType_Int32, ArgType_General});
812
return FuncCast(Instance::callImport_anyref, *abiType);
813
case SymbolicAddress::CoerceInPlace_ToInt32:
814
*abiType = Args_General1;
815
return FuncCast(CoerceInPlace_ToInt32, *abiType);
816
#ifdef ENABLE_WASM_BIGINT
817
case SymbolicAddress::CoerceInPlace_ToBigInt:
818
*abiType = Args_General1;
819
return FuncCast(CoerceInPlace_ToBigInt, *abiType);
820
#endif
821
case SymbolicAddress::CoerceInPlace_ToNumber:
822
*abiType = Args_General1;
823
return FuncCast(CoerceInPlace_ToNumber, *abiType);
824
case SymbolicAddress::CoerceInPlace_JitEntry:
825
*abiType = Args_General3;
826
return FuncCast(CoerceInPlace_JitEntry, *abiType);
827
case SymbolicAddress::ToInt32:
828
*abiType = Args_Int_Double;
829
return FuncCast<int32_t(double)>(JS::ToInt32, *abiType);
830
case SymbolicAddress::BoxValue_Anyref:
831
*abiType = Args_General1;
832
return FuncCast(BoxValue_Anyref, *abiType);
833
#ifdef ENABLE_WASM_BIGINT
834
case SymbolicAddress::AllocateBigInt:
835
*abiType = Args_General0;
836
return FuncCast(AllocateBigInt, *abiType);
837
#endif
838
case SymbolicAddress::DivI64:
839
*abiType = Args_General4;
840
return FuncCast(DivI64, *abiType);
841
case SymbolicAddress::UDivI64:
842
*abiType = Args_General4;
843
return FuncCast(UDivI64, *abiType);
844
case SymbolicAddress::ModI64:
845
*abiType = Args_General4;
846
return FuncCast(ModI64, *abiType);
847
case SymbolicAddress::UModI64:
848
*abiType = Args_General4;
849
return FuncCast(UModI64, *abiType);
850
case SymbolicAddress::TruncateDoubleToUint64:
851
*abiType = Args_Int64_Double;
852
return FuncCast(TruncateDoubleToUint64, *abiType);
853
case SymbolicAddress::TruncateDoubleToInt64:
854
*abiType = Args_Int64_Double;
855
return FuncCast(TruncateDoubleToInt64, *abiType);
856
case SymbolicAddress::SaturatingTruncateDoubleToUint64:
857
*abiType = Args_Int64_Double;
858
return FuncCast(SaturatingTruncateDoubleToUint64, *abiType);
859
case SymbolicAddress::SaturatingTruncateDoubleToInt64:
860
*abiType = Args_Int64_Double;
861
return FuncCast(SaturatingTruncateDoubleToInt64, *abiType);
862
case SymbolicAddress::Uint64ToDouble:
863
*abiType = Args_Double_IntInt;
864
return FuncCast(Uint64ToDouble, *abiType);
865
case SymbolicAddress::Uint64ToFloat32:
866
*abiType = Args_Float32_IntInt;
867
return FuncCast(Uint64ToFloat32, *abiType);
868
case SymbolicAddress::Int64ToDouble:
869
*abiType = Args_Double_IntInt;
870
return FuncCast(Int64ToDouble, *abiType);
871
case SymbolicAddress::Int64ToFloat32:
872
*abiType = Args_Float32_IntInt;
873
return FuncCast(Int64ToFloat32, *abiType);
874
#if defined(JS_CODEGEN_ARM)
875
case SymbolicAddress::aeabi_idivmod:
876
*abiType = Args_General2;
877
return FuncCast(__aeabi_idivmod, *abiType);
878
case SymbolicAddress::aeabi_uidivmod:
879
*abiType = Args_General2;
880
return FuncCast(__aeabi_uidivmod, *abiType);
881
#endif
882
case SymbolicAddress::ModD:
883
*abiType = Args_Double_DoubleDouble;
884
return FuncCast(NumberMod, *abiType);
885
case SymbolicAddress::SinD:
886
*abiType = Args_Double_Double;
887
return FuncCast<double(double)>(sin, *abiType);
888
case SymbolicAddress::CosD:
889
*abiType = Args_Double_Double;
890
return FuncCast<double(double)>(cos, *abiType);
891
case SymbolicAddress::TanD:
892
*abiType = Args_Double_Double;
893
return FuncCast<double(double)>(tan, *abiType);
894
case SymbolicAddress::ASinD:
895
*abiType = Args_Double_Double;
896
return FuncCast<double(double)>(fdlibm::asin, *abiType);
897
case SymbolicAddress::ACosD:
898
*abiType = Args_Double_Double;
899
return FuncCast<double(double)>(fdlibm::acos, *abiType);
900
case SymbolicAddress::ATanD:
901
*abiType = Args_Double_Double;
902
return FuncCast<double(double)>(fdlibm::atan, *abiType);
903
case SymbolicAddress::CeilD:
904
*abiType = Args_Double_Double;
905
return FuncCast<double(double)>(fdlibm::ceil, *abiType);
906
case SymbolicAddress::CeilF:
907
*abiType = Args_Float32_Float32;
908
return FuncCast<float(float)>(fdlibm::ceilf, *abiType);
909
case SymbolicAddress::FloorD:
910
*abiType = Args_Double_Double;
911
return FuncCast<double(double)>(fdlibm::floor, *abiType);
912
case SymbolicAddress::FloorF:
913
*abiType = Args_Float32_Float32;
914
return FuncCast<float(float)>(fdlibm::floorf, *abiType);
915
case SymbolicAddress::TruncD:
916
*abiType = Args_Double_Double;
917
return FuncCast<double(double)>(fdlibm::trunc, *abiType);
918
case SymbolicAddress::TruncF:
919
*abiType = Args_Float32_Float32;
920
return FuncCast<float(float)>(fdlibm::truncf, *abiType);
921
case SymbolicAddress::NearbyIntD:
922
*abiType = Args_Double_Double;
923
return FuncCast<double(double)>(fdlibm::nearbyint, *abiType);
924
case SymbolicAddress::NearbyIntF:
925
*abiType = Args_Float32_Float32;
926
return FuncCast<float(float)>(fdlibm::nearbyintf, *abiType);
927
case SymbolicAddress::ExpD:
928
*abiType = Args_Double_Double;
929
return FuncCast<double(double)>(fdlibm::exp, *abiType);
930
case SymbolicAddress::LogD:
931
*abiType = Args_Double_Double;
932
return FuncCast<double(double)>(fdlibm::log, *abiType);
933
case SymbolicAddress::PowD:
934
*abiType = Args_Double_DoubleDouble;
935
return FuncCast(ecmaPow, *abiType);
936
case SymbolicAddress::ATan2D:
937
*abiType = Args_Double_DoubleDouble;
938
return FuncCast(ecmaAtan2, *abiType);
939
940
case SymbolicAddress::MemoryGrow:
941
*abiType =
942
MakeABIFunctionType(ArgType_Int32, {ArgType_General, ArgType_Int32});
943
MOZ_ASSERT(*abiType == ToABIType(SASigMemoryGrow));
944
return FuncCast(Instance::memoryGrow_i32, *abiType);
945
case SymbolicAddress::MemorySize:
946
*abiType = MakeABIFunctionType(ArgType_Int32, {ArgType_General});
947
MOZ_ASSERT(*abiType == ToABIType(SASigMemorySize));
948
return FuncCast(Instance::memorySize_i32, *abiType);
949
case SymbolicAddress::WaitI32:
950
*abiType = MakeABIFunctionType(
951
ArgType_Int32,
952
{ArgType_General, ArgType_Int32, ArgType_Int32, ArgType_Int64});
953
MOZ_ASSERT(*abiType == ToABIType(SASigWaitI32));
954
return FuncCast(Instance::wait_i32, *abiType);
955
case SymbolicAddress::WaitI64:
956
*abiType = MakeABIFunctionType(
957
ArgType_Int32,
958
{ArgType_General, ArgType_Int32, ArgType_Int64, ArgType_Int64});
959
MOZ_ASSERT(*abiType == ToABIType(SASigWaitI64));
960
return FuncCast(Instance::wait_i64, *abiType);
961
case SymbolicAddress::Wake:
962
*abiType = MakeABIFunctionType(
963
ArgType_Int32, {ArgType_General, ArgType_Int32, ArgType_Int32});
964
MOZ_ASSERT(*abiType == ToABIType(SASigWake));
965
return FuncCast(Instance::wake, *abiType);
966
case SymbolicAddress::MemCopy:
967
*abiType = MakeABIFunctionType(
968
ArgType_Int32, {ArgType_General, ArgType_Int32, ArgType_Int32,
969
ArgType_Int32, ArgType_General});
970
MOZ_ASSERT(*abiType == ToABIType(SASigMemCopy));
971
return FuncCast(Instance::memCopy, *abiType);
972
case SymbolicAddress::MemCopyShared:
973
*abiType = MakeABIFunctionType(
974
ArgType_Int32, {ArgType_General, ArgType_Int32, ArgType_Int32,
975
ArgType_Int32, ArgType_General});
976
MOZ_ASSERT(*abiType == ToABIType(SASigMemCopyShared));
977
return FuncCast(Instance::memCopyShared, *abiType);
978
case SymbolicAddress::DataDrop:
979
*abiType =
980
MakeABIFunctionType(ArgType_Int32, {ArgType_General, ArgType_Int32});
981
MOZ_ASSERT(*abiType == ToABIType(SASigDataDrop));
982
return FuncCast(Instance::dataDrop, *abiType);
983
case SymbolicAddress::MemFill:
984
*abiType = MakeABIFunctionType(
985
ArgType_Int32, {ArgType_General, ArgType_Int32, ArgType_Int32,
986
ArgType_Int32, ArgType_General});
987
MOZ_ASSERT(*abiType == ToABIType(SASigMemFill));
988
return FuncCast(Instance::memFill, *abiType);
989
case SymbolicAddress::MemFillShared:
990
*abiType = MakeABIFunctionType(
991
ArgType_Int32, {ArgType_General, ArgType_Int32, ArgType_Int32,
992
ArgType_Int32, ArgType_General});
993
MOZ_ASSERT(*abiType == ToABIType(SASigMemFillShared));
994
return FuncCast(Instance::memFillShared, *abiType);
995
case SymbolicAddress::MemInit:
996
*abiType = MakeABIFunctionType(
997
ArgType_Int32, {ArgType_General, ArgType_Int32, ArgType_Int32,
998
ArgType_Int32, ArgType_Int32});
999
MOZ_ASSERT(*abiType == ToABIType(SASigMemInit));
1000
return FuncCast(Instance::memInit, *abiType);
1001
case SymbolicAddress::TableCopy:
1002
*abiType = MakeABIFunctionType(
1003
ArgType_Int32, {ArgType_General, ArgType_Int32, ArgType_Int32,
1004
ArgType_Int32, ArgType_Int32, ArgType_Int32});
1005
MOZ_ASSERT(*abiType == ToABIType(SASigTableCopy));
1006
return FuncCast(Instance::tableCopy, *abiType);
1007
case SymbolicAddress::ElemDrop:
1008
*abiType =
1009
MakeABIFunctionType(ArgType_Int32, {ArgType_General, ArgType_Int32});
1010
MOZ_ASSERT(*abiType == ToABIType(SASigElemDrop));
1011
return FuncCast(Instance::elemDrop, *abiType);
1012
case SymbolicAddress::TableFill:
1013
*abiType = MakeABIFunctionType(
1014
ArgType_Int32, {ArgType_General, ArgType_Int32, ArgType_General,
1015
ArgType_Int32, ArgType_Int32});
1016
MOZ_ASSERT(*abiType == ToABIType(SASigTableFill));
1017
return FuncCast(Instance::tableFill, *abiType);
1018
case SymbolicAddress::TableInit:
1019
*abiType = MakeABIFunctionType(
1020
ArgType_Int32, {ArgType_General, ArgType_Int32, ArgType_Int32,
1021
ArgType_Int32, ArgType_Int32, ArgType_Int32});
1022
MOZ_ASSERT(*abiType == ToABIType(SASigTableInit));
1023
return FuncCast(Instance::tableInit, *abiType);
1024
case SymbolicAddress::TableGet:
1025
*abiType = MakeABIFunctionType(
1026
ArgType_General, {ArgType_General, ArgType_Int32, ArgType_Int32});
1027
MOZ_ASSERT(*abiType == ToABIType(SASigTableGet));
1028
return FuncCast(Instance::tableGet, *abiType);
1029
case SymbolicAddress::TableGrow:
1030
*abiType = MakeABIFunctionType(
1031
ArgType_Int32,
1032
{ArgType_General, ArgType_General, ArgType_Int32, ArgType_Int32});
1033
MOZ_ASSERT(*abiType == ToABIType(SASigTableGrow));
1034
return FuncCast(Instance::tableGrow, *abiType);
1035
case SymbolicAddress::TableSet:
1036
*abiType = MakeABIFunctionType(
1037
ArgType_Int32,
1038
{ArgType_General, ArgType_Int32, ArgType_General, ArgType_Int32});
1039
MOZ_ASSERT(*abiType == ToABIType(SASigTableSet));
1040
return FuncCast(Instance::tableSet, *abiType);
1041
case SymbolicAddress::TableSize:
1042
*abiType =
1043
MakeABIFunctionType(ArgType_Int32, {ArgType_General, ArgType_Int32});
1044
MOZ_ASSERT(*abiType == ToABIType(SASigTableSize));
1045
return FuncCast(Instance::tableSize, *abiType);
1046
case SymbolicAddress::FuncRef:
1047
*abiType = MakeABIFunctionType(ArgType_General,
1048
{ArgType_General, ArgType_Int32});
1049
MOZ_ASSERT(*abiType == ToABIType(SASigFuncRef));
1050
return FuncCast(Instance::funcRef, *abiType);
1051
case SymbolicAddress::PostBarrier:
1052
*abiType = MakeABIFunctionType(ArgType_Int32,
1053
{ArgType_General, ArgType_General});
1054
MOZ_ASSERT(*abiType == ToABIType(SASigPostBarrier));
1055
return FuncCast(Instance::postBarrier, *abiType);
1056
case SymbolicAddress::PostBarrierFiltering:
1057
*abiType = MakeABIFunctionType(ArgType_Int32,
1058
{ArgType_General, ArgType_General});
1059
MOZ_ASSERT(*abiType == ToABIType(SASigPostBarrierFiltering));
1060
return FuncCast(Instance::postBarrierFiltering, *abiType);
1061
case SymbolicAddress::StructNew:
1062
*abiType = MakeABIFunctionType(ArgType_General,
1063
{ArgType_General, ArgType_Int32});
1064
MOZ_ASSERT(*abiType == ToABIType(SASigStructNew));
1065
return FuncCast(Instance::structNew, *abiType);
1066
case SymbolicAddress::StructNarrow:
1067
*abiType = MakeABIFunctionType(
1068
ArgType_General,
1069
{ArgType_General, ArgType_Int32, ArgType_Int32, ArgType_General});
1070
MOZ_ASSERT(*abiType == ToABIType(SASigStructNarrow));
1071
return FuncCast(Instance::structNarrow, *abiType);
1072
1073
#if defined(JS_CODEGEN_MIPS32)
1074
case SymbolicAddress::js_jit_gAtomic64Lock:
1075
return &js::jit::gAtomic64Lock;
1076
#endif
1077
#ifdef WASM_CODEGEN_DEBUG
1078
case SymbolicAddress::PrintI32:
1079
*abiType = Args_General1;
1080
return FuncCast(PrintI32, *abiType);
1081
case SymbolicAddress::PrintPtr:
1082
*abiType = Args_General1;
1083
return FuncCast(PrintPtr, *abiType);
1084
case SymbolicAddress::PrintF32:
1085
*abiType = Args_Int_Float32;
1086
return FuncCast(PrintF32, *abiType);
1087
case SymbolicAddress::PrintF64:
1088
*abiType = Args_Int_Double;
1089
return FuncCast(PrintF64, *abiType);
1090
case SymbolicAddress::PrintText:
1091
*abiType = Args_General1;
1092
return FuncCast(PrintText, *abiType);
1093
#endif
1094
case SymbolicAddress::Limit:
1095
break;
1096
}
1097
1098
MOZ_CRASH("Bad SymbolicAddress");
1099
}
1100
1101
bool wasm::NeedsBuiltinThunk(SymbolicAddress sym) {
1102
// Some functions don't want to a thunk, because they already have one or
1103
// they don't have frame info.
1104
switch (sym) {
1105
case SymbolicAddress::HandleDebugTrap: // GenerateDebugTrapStub
1106
case SymbolicAddress::HandleThrow: // GenerateThrowStub
1107
case SymbolicAddress::HandleTrap: // GenerateTrapExit
1108
case SymbolicAddress::CallImport_Void: // GenerateImportInterpExit
1109
case SymbolicAddress::CallImport_I32:
1110
case SymbolicAddress::CallImport_I64:
1111
case SymbolicAddress::CallImport_F64:
1112
case SymbolicAddress::CallImport_FuncRef:
1113
case SymbolicAddress::CallImport_AnyRef:
1114
case SymbolicAddress::CoerceInPlace_ToInt32: // GenerateImportJitExit
1115
case SymbolicAddress::CoerceInPlace_ToNumber:
1116
#if defined(ENABLE_WASM_BIGINT)
1117
case SymbolicAddress::CoerceInPlace_ToBigInt:
1118
#endif
1119
case SymbolicAddress::BoxValue_Anyref:
1120
#if defined(JS_CODEGEN_MIPS32)
1121
case SymbolicAddress::js_jit_gAtomic64Lock:
1122
#endif
1123
#ifdef WASM_CODEGEN_DEBUG
1124
case SymbolicAddress::PrintI32:
1125
case SymbolicAddress::PrintPtr:
1126
case SymbolicAddress::PrintF32:
1127
case SymbolicAddress::PrintF64:
1128
case SymbolicAddress::PrintText: // Used only in stubs
1129
#endif
1130
return false;
1131
case SymbolicAddress::ToInt32:
1132
case SymbolicAddress::DivI64:
1133
case SymbolicAddress::UDivI64:
1134
case SymbolicAddress::ModI64:
1135
case SymbolicAddress::UModI64:
1136
case SymbolicAddress::TruncateDoubleToUint64:
1137
case SymbolicAddress::TruncateDoubleToInt64:
1138
case SymbolicAddress::SaturatingTruncateDoubleToUint64:
1139
case SymbolicAddress::SaturatingTruncateDoubleToInt64:
1140
case SymbolicAddress::Uint64ToDouble:
1141
case SymbolicAddress::Uint64ToFloat32:
1142
case SymbolicAddress::Int64ToDouble:
1143
case SymbolicAddress::Int64ToFloat32:
1144
#if defined(JS_CODEGEN_ARM)
1145
case SymbolicAddress::aeabi_idivmod:
1146
case SymbolicAddress::aeabi_uidivmod:
1147
#endif
1148
#if defined(ENABLE_WASM_BIGINT)
1149
case SymbolicAddress::AllocateBigInt:
1150
#endif
1151
case SymbolicAddress::ModD:
1152
case SymbolicAddress::SinD:
1153
case SymbolicAddress::CosD:
1154
case SymbolicAddress::TanD:
1155
case SymbolicAddress::ASinD:
1156
case SymbolicAddress::ACosD:
1157
case SymbolicAddress::ATanD:
1158
case SymbolicAddress::CeilD:
1159
case SymbolicAddress::CeilF:
1160
case SymbolicAddress::FloorD:
1161
case SymbolicAddress::FloorF:
1162
case SymbolicAddress::TruncD:
1163
case SymbolicAddress::TruncF:
1164
case SymbolicAddress::NearbyIntD:
1165
case SymbolicAddress::NearbyIntF:
1166
case SymbolicAddress::ExpD:
1167
case SymbolicAddress::LogD:
1168
case SymbolicAddress::PowD:
1169
case SymbolicAddress::ATan2D:
1170
case SymbolicAddress::MemoryGrow:
1171
case SymbolicAddress::MemorySize:
1172
case SymbolicAddress::WaitI32:
1173
case SymbolicAddress::WaitI64:
1174
case SymbolicAddress::Wake:
1175
case SymbolicAddress::CoerceInPlace_JitEntry:
1176
case SymbolicAddress::ReportInt64JSCall:
1177
case SymbolicAddress::MemCopy:
1178
case SymbolicAddress::MemCopyShared:
1179
case SymbolicAddress::DataDrop:
1180
case SymbolicAddress::MemFill:
1181
case SymbolicAddress::MemFillShared:
1182
case SymbolicAddress::MemInit:
1183
case SymbolicAddress::TableCopy:
1184
case SymbolicAddress::ElemDrop:
1185
case SymbolicAddress::TableFill:
1186
case SymbolicAddress::TableGet:
1187
case SymbolicAddress::TableGrow:
1188
case SymbolicAddress::TableInit:
1189
case SymbolicAddress::TableSet:
1190
case SymbolicAddress::TableSize:
1191
case SymbolicAddress::FuncRef:
1192
case SymbolicAddress::PostBarrier:
1193
case SymbolicAddress::PostBarrierFiltering:
1194
case SymbolicAddress::StructNew:
1195
case SymbolicAddress::StructNarrow:
1196
return true;
1197
case SymbolicAddress::Limit:
1198
break;
1199
}
1200
1201
MOZ_CRASH("unexpected symbolic address");
1202
}
1203
1204
// ============================================================================
1205
// JS builtins that can be imported by wasm modules and called efficiently
1206
// through thunks. These thunks conform to the internal wasm ABI and thus can be
1207
// patched in for import calls. Calling a JS builtin through a thunk is much
1208
// faster than calling out through the generic import call trampoline which will
1209
// end up in the slowest C++ Instance::callImport path.
1210
//
1211
// Each JS builtin can have several overloads. These must all be enumerated in
1212
// PopulateTypedNatives() so they can be included in the process-wide thunk set.
1213
1214
#define FOR_EACH_UNARY_NATIVE(_) \
1215
_(math_sin, MathSin) \
1216
_(math_tan, MathTan) \
1217
_(math_cos, MathCos) \
1218
_(math_exp, MathExp) \
1219
_(math_log, MathLog) \
1220
_(math_asin, MathASin) \
1221
_(math_atan, MathATan) \
1222
_(math_acos, MathACos) \
1223
_(math_log10, MathLog10) \
1224
_(math_log2, MathLog2) \
1225
_(math_log1p, MathLog1P) \
1226
_(math_expm1, MathExpM1) \
1227
_(math_sinh, MathSinH) \
1228
_(math_tanh, MathTanH) \
1229
_(math_cosh, MathCosH) \
1230
_(math_asinh, MathASinH) \
1231
_(math_atanh, MathATanH) \
1232
_(math_acosh, MathACosH) \
1233
_(math_sign, MathSign) \
1234
_(math_trunc, MathTrunc) \
1235
_(math_cbrt, MathCbrt)
1236
1237
#define FOR_EACH_BINARY_NATIVE(_) \
1238
_(ecmaAtan2, MathATan2) \
1239
_(ecmaHypot, MathHypot) \
1240
_(ecmaPow, MathPow)
1241
1242
#define DEFINE_UNARY_FLOAT_WRAPPER(func, _) \
1243
static float func##_impl_f32(float x) { \
1244
return float(func##_impl(double(x))); \
1245
}
1246
1247
#define DEFINE_BINARY_FLOAT_WRAPPER(func, _) \
1248
static float func##_f32(float x, float y) { \
1249
return float(func(double(x), double(y))); \
1250
}
1251
1252
FOR_EACH_UNARY_NATIVE(DEFINE_UNARY_FLOAT_WRAPPER)
1253
FOR_EACH_BINARY_NATIVE(DEFINE_BINARY_FLOAT_WRAPPER)
1254
1255
#undef DEFINE_UNARY_FLOAT_WRAPPER
1256
#undef DEFINE_BINARY_FLOAT_WRAPPER
1257
1258
struct TypedNative {
1259
InlinableNative native;
1260
ABIFunctionType abiType;
1261
1262
TypedNative(InlinableNative native, ABIFunctionType abiType)
1263
: native(native), abiType(abiType) {}
1264
1265
typedef TypedNative Lookup;
1266
static HashNumber hash(const Lookup& l) {
1267
return HashGeneric(uint32_t(l.native), uint32_t(l.abiType));
1268
}
1269
static bool match(const TypedNative& lhs, const Lookup& rhs) {
1270
return lhs.native == rhs.native && lhs.abiType == rhs.abiType;
1271
}
1272
};
1273
1274
using TypedNativeToFuncPtrMap =
1275
HashMap<TypedNative, void*, TypedNative, SystemAllocPolicy>;
1276
1277
static bool PopulateTypedNatives(TypedNativeToFuncPtrMap* typedNatives) {
1278
#define ADD_OVERLOAD(funcName, native, abiType) \
1279
if (!typedNatives->putNew(TypedNative(InlinableNative::native, abiType), \
1280
FuncCast(funcName, abiType))) \
1281
return false;
1282
1283
#define ADD_UNARY_OVERLOADS(funcName, native) \
1284
ADD_OVERLOAD(funcName##_impl, native, Args_Double_Double) \
1285
ADD_OVERLOAD(funcName##_impl_f32, native, Args_Float32_Float32)
1286
1287
#define ADD_BINARY_OVERLOADS(funcName, native) \
1288
ADD_OVERLOAD(funcName, native, Args_Double_DoubleDouble) \
1289
ADD_OVERLOAD(funcName##_f32, native, Args_Float32_Float32Float32)
1290
1291
FOR_EACH_UNARY_NATIVE(ADD_UNARY_OVERLOADS)
1292
FOR_EACH_BINARY_NATIVE(ADD_BINARY_OVERLOADS)
1293
1294
#undef ADD_UNARY_OVERLOADS
1295
#undef ADD_BINARY_OVERLOADS
1296
1297
return true;
1298
}
1299
1300
#undef FOR_EACH_UNARY_NATIVE
1301
#undef FOR_EACH_BINARY_NATIVE
1302
1303
// ============================================================================
1304
// Process-wide builtin thunk set
1305
//
1306
// Thunks are inserted between wasm calls and the C++ callee and achieve two
1307
// things:
1308
// - bridging the few differences between the internal wasm ABI and the
1309
// external native ABI (viz. float returns on x86 and soft-fp ARM)
1310
// - executing an exit prologue/epilogue which in turn allows any profiling
1311
// iterator to see the full stack up to the wasm operation that called out
1312
//
1313
// Thunks are created for two kinds of C++ callees, enumerated above:
1314
// - SymbolicAddress: for statically compiled calls in the wasm module
1315
// - Imported JS builtins: optimized calls to imports
1316
//
1317
// All thunks are created up front, lazily, when the first wasm module is
1318
// compiled in the process. Thunks are kept alive until the JS engine shuts down
1319
// in the process. No thunks are created at runtime after initialization. This
1320
// simple scheme allows several simplifications:
1321
// - no reference counting to keep thunks alive
1322
// - no problems toggling W^X permissions which, because of multiple executing
1323
// threads, would require each thunk allocation to be on its own page
1324
// The cost for creating all thunks at once is relatively low since all thunks
1325
// fit within the smallest executable quanta (64k).
1326
1327
using TypedNativeToCodeRangeMap =
1328
HashMap<TypedNative, uint32_t, TypedNative, SystemAllocPolicy>;
1329
1330
using SymbolicAddressToCodeRangeArray =
1331
EnumeratedArray<SymbolicAddress, SymbolicAddress::Limit, uint32_t>;
1332
1333
struct BuiltinThunks {
1334
uint8_t* codeBase;
1335
size_t codeSize;
1336
CodeRangeVector codeRanges;
1337
TypedNativeToCodeRangeMap typedNativeToCodeRange;
1338
SymbolicAddressToCodeRangeArray symbolicAddressToCodeRange;
1339
1340
BuiltinThunks() : codeBase(nullptr), codeSize(0) {}
1341
1342
~BuiltinThunks() {
1343
if (codeBase) {
1344
DeallocateExecutableMemory(codeBase, codeSize);
1345
}
1346
}
1347
};
1348
1349
Mutex initBuiltinThunks(mutexid::WasmInitBuiltinThunks);
1350
Atomic<const BuiltinThunks*> builtinThunks;
1351
1352
bool wasm::EnsureBuiltinThunksInitialized() {
1353
LockGuard<Mutex> guard(initBuiltinThunks);
1354
if (builtinThunks) {
1355
return true;
1356
}
1357
1358
auto thunks = MakeUnique<BuiltinThunks>();
1359
if (!thunks) {
1360
return false;
1361
}
1362
1363
LifoAlloc lifo(BUILTIN_THUNK_LIFO_SIZE);
1364
TempAllocator tempAlloc(&lifo);
1365
WasmMacroAssembler masm(tempAlloc);
1366
1367
for (auto sym : MakeEnumeratedRange(SymbolicAddress::Limit)) {
1368
if (!NeedsBuiltinThunk(sym)) {
1369
thunks->symbolicAddressToCodeRange[sym] = UINT32_MAX;
1370
continue;
1371
}
1372
1373
uint32_t codeRangeIndex = thunks->codeRanges.length();
1374
thunks->symbolicAddressToCodeRange[sym] = codeRangeIndex;
1375
1376
ABIFunctionType abiType;
1377
void* funcPtr = AddressOf(sym, &abiType);
1378
1379
ExitReason exitReason(sym);
1380
1381
CallableOffsets offsets;
1382
if (!GenerateBuiltinThunk(masm, abiType, exitReason, funcPtr, &offsets)) {
1383
return false;
1384
}
1385
if (!thunks->codeRanges.emplaceBack(CodeRange::BuiltinThunk, offsets)) {
1386
return false;
1387
}
1388
}
1389
1390
TypedNativeToFuncPtrMap typedNatives;
1391
if (!PopulateTypedNatives(&typedNatives)) {
1392
return false;
1393
}
1394
1395
for (TypedNativeToFuncPtrMap::Range r = typedNatives.all(); !r.empty();
1396
r.popFront()) {
1397
TypedNative typedNative = r.front().key();
1398
1399
uint32_t codeRangeIndex = thunks->codeRanges.length();
1400
if (!thunks->typedNativeToCodeRange.putNew(typedNative, codeRangeIndex)) {
1401
return false;
1402
}
1403
1404
ABIFunctionType abiType = typedNative.abiType;
1405
void* funcPtr = r.front().value();
1406
1407
ExitReason exitReason = ExitReason::Fixed::BuiltinNative;
1408
1409
CallableOffsets offsets;
1410
if (!GenerateBuiltinThunk(masm, abiType, exitReason, funcPtr, &offsets)) {
1411
return false;
1412
}
1413
if (!thunks->codeRanges.emplaceBack(CodeRange::BuiltinThunk, offsets)) {
1414
return false;
1415
}
1416
}
1417
1418
masm.finish();
1419
if (masm.oom()) {
1420
return false;
1421
}
1422
1423
size_t allocSize = AlignBytes(masm.bytesNeeded(), ExecutableCodePageSize);
1424
1425
thunks->codeSize = allocSize;
1426
thunks->codeBase = (uint8_t*)AllocateExecutableMemory(
1427
allocSize, ProtectionSetting::Writable, MemCheckKind::MakeUndefined);
1428
if (!thunks->codeBase) {
1429
return false;
1430
}
1431
1432
masm.executableCopy(thunks->codeBase);
1433
memset(thunks->codeBase + masm.bytesNeeded(), 0,
1434
allocSize - masm.bytesNeeded());
1435
1436
masm.processCodeLabels(thunks->codeBase);
1437
PatchDebugSymbolicAccesses(thunks->codeBase, masm);
1438
1439
MOZ_ASSERT(masm.callSites().empty());
1440
MOZ_ASSERT(masm.callSiteTargets().empty());
1441
MOZ_ASSERT(masm.trapSites().empty());
1442
1443
if (!ExecutableAllocator::makeExecutableAndFlushICache(thunks->codeBase,
1444
thunks->codeSize)) {
1445
return false;
1446
}
1447
1448
builtinThunks = thunks.release();
1449
return true;
1450
}
1451
1452
void wasm::ReleaseBuiltinThunks() {
1453
if (builtinThunks) {
1454
const BuiltinThunks* ptr = builtinThunks;
1455
js_delete(const_cast<BuiltinThunks*>(ptr));
1456
builtinThunks = nullptr;
1457
}
1458
}
1459
1460
void* wasm::SymbolicAddressTarget(SymbolicAddress sym) {
1461
MOZ_ASSERT(builtinThunks);
1462
1463
ABIFunctionType abiType;
1464
void* funcPtr = AddressOf(sym, &abiType);
1465
1466
if (!NeedsBuiltinThunk(sym)) {
1467
return funcPtr;
1468
}
1469
1470
const BuiltinThunks& thunks = *builtinThunks;
1471
uint32_t codeRangeIndex = thunks.symbolicAddressToCodeRange[sym];
1472
return thunks.codeBase + thunks.codeRanges[codeRangeIndex].begin();
1473
}
1474
1475
static Maybe<ABIFunctionType> ToBuiltinABIFunctionType(
1476
const FuncType& funcType) {
1477
const ValTypeVector& args = funcType.args();
1478
if (!funcType.ret()) {
1479
return Nothing();
1480
}
1481
1482
uint32_t abiType;
1483
switch (funcType.ret().ref().code()) {
1484
case ValType::F32:
1485
abiType = ArgType_Float32 << RetType_Shift;
1486
break;
1487
case ValType::F64:
1488
abiType = ArgType_Float64 << RetType_Shift;
1489
break;
1490
default:
1491
return Nothing();
1492
}
1493
1494
if ((args.length() + 1) > (sizeof(uint32_t) * 8 / ArgType_Shift)) {
1495
return Nothing();
1496
}
1497
1498
for (size_t i = 0; i < args.length(); i++) {
1499
switch (args[i].code()) {
1500
case ValType::F32:
1501
abiType |= (ArgType_Float32 << (ArgType_Shift * (i + 1)));
1502
break;
1503
case ValType::F64:
1504
abiType |= (ArgType_Float64 << (ArgType_Shift * (i + 1)));
1505
break;
1506
default:
1507
return Nothing();
1508
}
1509
}
1510
1511
return Some(ABIFunctionType(abiType));
1512
}
1513
1514
void* wasm::MaybeGetBuiltinThunk(JSFunction* f, const FuncType& funcType) {
1515
MOZ_ASSERT(builtinThunks);
1516
1517
if (!f->isNative() || !f->hasJitInfo() ||
1518
f->jitInfo()->type() != JSJitInfo::InlinableNative) {
1519
return nullptr;
1520
}
1521
1522
Maybe<ABIFunctionType> abiType = ToBuiltinABIFunctionType(funcType);
1523
if (!abiType) {
1524
return nullptr;
1525
}
1526
1527
TypedNative typedNative(f->jitInfo()->inlinableNative, *abiType);
1528
1529
const BuiltinThunks& thunks = *builtinThunks;
1530
auto p = thunks.typedNativeToCodeRange.readonlyThreadsafeLookup(typedNative);
1531
if (!p) {
1532
return nullptr;
1533
}
1534
1535
return thunks.codeBase + thunks.codeRanges[p->value()].begin();
1536
}
1537
1538
bool wasm::LookupBuiltinThunk(void* pc, const CodeRange** codeRange,
1539
uint8_t** codeBase) {
1540
if (!builtinThunks) {
1541
return false;
1542
}
1543
1544
const BuiltinThunks& thunks = *builtinThunks;
1545
if (pc < thunks.codeBase || pc >= thunks.codeBase + thunks.codeSize) {
1546
return false;
1547
}
1548
1549
*codeBase = thunks.codeBase;
1550
1551
CodeRange::OffsetInCode target((uint8_t*)pc - thunks.codeBase);
1552
*codeRange = LookupInSorted(thunks.codeRanges, target);
1553
1554
return !!*codeRange;
1555
}