Source code

Revision control

Other Tools

1
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2
* vim: set ts=8 sts=2 et sw=2 tw=80:
3
*
4
* Copyright 2015 Mozilla Foundation
5
*
6
* Licensed under the Apache License, Version 2.0 (the "License");
7
* you may not use this file except in compliance with the License.
8
* You may obtain a copy of the License at
9
*
11
*
12
* Unless required by applicable law or agreed to in writing, software
13
* distributed under the License is distributed on an "AS IS" BASIS,
14
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
* See the License for the specific language governing permissions and
16
* limitations under the License.
17
*/
18
19
#ifndef wasm_types_h
20
#define wasm_types_h
21
22
#include "mozilla/Alignment.h"
23
#include "mozilla/ArrayUtils.h"
24
#include "mozilla/Atomics.h"
25
#include "mozilla/BinarySearch.h"
26
#include "mozilla/EnumeratedArray.h"
27
#include "mozilla/HashFunctions.h"
28
#include "mozilla/Maybe.h"
29
#include "mozilla/RefPtr.h"
30
#include "mozilla/Unused.h"
31
32
#include "NamespaceImports.h"
33
34
#include "ds/LifoAlloc.h"
35
#include "jit/IonTypes.h"
36
#include "js/RefCounted.h"
37
#include "js/UniquePtr.h"
38
#include "js/Utility.h"
39
#include "js/Vector.h"
40
#include "vm/JSFunction.h"
41
#include "vm/MallocProvider.h"
42
#include "vm/NativeObject.h"
43
#include "wasm/WasmConstants.h"
44
#include "wasm/WasmUtility.h"
45
46
namespace js {
47
48
namespace jit {
49
class JitScript;
50
enum class RoundingMode;
51
} // namespace jit
52
53
// This is a widespread header, so lets keep out the core wasm impl types.
54
55
typedef GCVector<JSFunction*, 0, SystemAllocPolicy> JSFunctionVector;
56
57
class WasmMemoryObject;
58
typedef GCPtr<WasmMemoryObject*> GCPtrWasmMemoryObject;
59
typedef Rooted<WasmMemoryObject*> RootedWasmMemoryObject;
60
typedef Handle<WasmMemoryObject*> HandleWasmMemoryObject;
61
typedef MutableHandle<WasmMemoryObject*> MutableHandleWasmMemoryObject;
62
63
class WasmModuleObject;
64
typedef Rooted<WasmModuleObject*> RootedWasmModuleObject;
65
typedef Handle<WasmModuleObject*> HandleWasmModuleObject;
66
typedef MutableHandle<WasmModuleObject*> MutableHandleWasmModuleObject;
67
68
class WasmInstanceObject;
69
typedef GCVector<WasmInstanceObject*> WasmInstanceObjectVector;
70
typedef Rooted<WasmInstanceObject*> RootedWasmInstanceObject;
71
typedef Handle<WasmInstanceObject*> HandleWasmInstanceObject;
72
typedef MutableHandle<WasmInstanceObject*> MutableHandleWasmInstanceObject;
73
74
class WasmTableObject;
75
typedef GCVector<WasmTableObject*, 0, SystemAllocPolicy> WasmTableObjectVector;
76
typedef Rooted<WasmTableObject*> RootedWasmTableObject;
77
typedef Handle<WasmTableObject*> HandleWasmTableObject;
78
typedef MutableHandle<WasmTableObject*> MutableHandleWasmTableObject;
79
80
class WasmGlobalObject;
81
typedef GCVector<WasmGlobalObject*, 0, SystemAllocPolicy>
82
WasmGlobalObjectVector;
83
typedef Rooted<WasmGlobalObject*> RootedWasmGlobalObject;
84
85
class StructTypeDescr;
86
typedef GCVector<HeapPtr<StructTypeDescr*>, 0, SystemAllocPolicy>
87
StructTypeDescrVector;
88
89
namespace wasm {
90
91
using mozilla::ArrayEqual;
92
using mozilla::Atomic;
93
using mozilla::DebugOnly;
94
using mozilla::EnumeratedArray;
95
using mozilla::MallocSizeOf;
96
using mozilla::Maybe;
97
using mozilla::Nothing;
98
using mozilla::PodCopy;
99
using mozilla::PodZero;
100
using mozilla::Some;
101
using mozilla::Unused;
102
103
class Code;
104
class DebugState;
105
class GeneratedSourceMap;
106
class Memory;
107
class Module;
108
class Instance;
109
class Table;
110
111
// Uint32Vector has initial size 8 on the basis that the dominant use cases
112
// (line numbers and control stacks) tend to have a small but nonzero number
113
// of elements.
114
typedef Vector<uint32_t, 8, SystemAllocPolicy> Uint32Vector;
115
116
typedef Vector<uint8_t, 0, SystemAllocPolicy> Bytes;
117
typedef UniquePtr<Bytes> UniqueBytes;
118
typedef UniquePtr<const Bytes> UniqueConstBytes;
119
typedef Vector<char, 0, SystemAllocPolicy> UTF8Bytes;
120
typedef Vector<Instance*, 0, SystemAllocPolicy> InstanceVector;
121
typedef Vector<UniqueChars, 0, SystemAllocPolicy> UniqueCharsVector;
122
123
// To call Vector::podResizeToFit, a type must specialize mozilla::IsPod
124
// which is pretty verbose to do within js::wasm, so factor that process out
125
// into a macro.
126
127
#define WASM_DECLARE_POD_VECTOR(Type, VectorName) \
128
} \
129
} \
130
namespace mozilla { \
131
template <> \
132
struct IsPod<js::wasm::Type> : TrueType {}; \
133
} \
134
namespace js { \
135
namespace wasm { \
136
typedef Vector<Type, 0, SystemAllocPolicy> VectorName;
137
138
// A wasm Module and everything it contains must support serialization and
139
// deserialization. Some data can be simply copied as raw bytes and,
140
// as a convention, is stored in an inline CacheablePod struct. Everything else
141
// should implement the below methods which are called recusively by the
142
// containing Module.
143
144
#define WASM_DECLARE_SERIALIZABLE(Type) \
145
size_t serializedSize() const; \
146
uint8_t* serialize(uint8_t* cursor) const; \
147
const uint8_t* deserialize(const uint8_t* cursor); \
148
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
149
150
template <class T>
151
struct SerializableRefPtr : RefPtr<T> {
152
using RefPtr<T>::operator=;
153
154
SerializableRefPtr() = default;
155
156
template <class U>
157
MOZ_IMPLICIT SerializableRefPtr(U&& u) : RefPtr<T>(std::forward<U>(u)) {}
158
159
WASM_DECLARE_SERIALIZABLE(SerializableRefPtr)
160
};
161
162
// This reusable base class factors out the logic for a resource that is shared
163
// by multiple instances/modules but should only be counted once when computing
164
// about:memory stats.
165
166
template <class T>
167
struct ShareableBase : AtomicRefCounted<T> {
168
using SeenSet = HashSet<const T*, DefaultHasher<const T*>, SystemAllocPolicy>;
169
170
size_t sizeOfIncludingThisIfNotSeen(MallocSizeOf mallocSizeOf,
171
SeenSet* seen) const {
172
const T* self = static_cast<const T*>(this);
173
typename SeenSet::AddPtr p = seen->lookupForAdd(self);
174
if (p) {
175
return 0;
176
}
177
bool ok = seen->add(p, self);
178
(void)ok; // oh well
179
return mallocSizeOf(self) + self->sizeOfExcludingThis(mallocSizeOf);
180
}
181
};
182
183
// ShareableBytes is a reference-counted Vector of bytes.
184
185
struct ShareableBytes : ShareableBase<ShareableBytes> {
186
// Vector is 'final', so instead make Vector a member and add boilerplate.
187
Bytes bytes;
188
189
ShareableBytes() = default;
190
explicit ShareableBytes(Bytes&& bytes) : bytes(std::move(bytes)) {}
191
size_t sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const {
192
return bytes.sizeOfExcludingThis(mallocSizeOf);
193
}
194
const uint8_t* begin() const { return bytes.begin(); }
195
const uint8_t* end() const { return bytes.end(); }
196
size_t length() const { return bytes.length(); }
197
bool append(const uint8_t* start, uint32_t len) {
198
return bytes.append(start, len);
199
}
200
};
201
202
typedef RefPtr<ShareableBytes> MutableBytes;
203
typedef RefPtr<const ShareableBytes> SharedBytes;
204
205
// A PackedTypeCode represents a TypeCode paired with a refTypeIndex (valid only
206
// for TypeCode::Ref). PackedTypeCode is guaranteed to be POD.
207
//
208
// PackedTypeCode is an enum class, as opposed to the more natural
209
// struct-with-bitfields, because bitfields would make it non-POD.
210
//
211
// DO NOT use PackedTypeCode as a cast. ALWAYS go via PackTypeCode().
212
213
enum class PackedTypeCode : uint32_t {};
214
215
static_assert(std::is_pod<PackedTypeCode>::value,
216
"must be POD to be simply serialized/deserialized");
217
218
const uint32_t NoTypeCode = 0xFF; // Only use these
219
const uint32_t NoRefTypeIndex = 0x3FFFFF; // with PackedTypeCode
220
221
static inline PackedTypeCode PackTypeCode(TypeCode tc, uint32_t refTypeIndex) {
222
MOZ_ASSERT(uint32_t(tc) <= 0xFF);
223
MOZ_ASSERT_IF(tc != TypeCode::Ref, refTypeIndex == NoRefTypeIndex);
224
MOZ_ASSERT_IF(tc == TypeCode::Ref, refTypeIndex <= MaxTypes);
225
// A PackedTypeCode should be representable in a single word, so in the
226
// smallest case, 32 bits. However sometimes 2 bits of the word may be taken
227
// by a pointer tag; for that reason, limit to 30 bits; and then there's the
228
// 8-bit typecode, so 22 bits left for the type index.
229
static_assert(MaxTypes < (1 << (30 - 8)), "enough bits");
230
return PackedTypeCode((refTypeIndex << 8) | uint32_t(tc));
231
}
232
233
static inline PackedTypeCode PackTypeCode(TypeCode tc) {
234
return PackTypeCode(tc, NoRefTypeIndex);
235
}
236
237
static inline PackedTypeCode InvalidPackedTypeCode() {
238
return PackedTypeCode(NoTypeCode);
239
}
240
241
static inline PackedTypeCode PackedTypeCodeFromBits(uint32_t bits) {
242
return PackTypeCode(TypeCode(bits & 255), bits >> 8);
243
}
244
245
static inline bool IsValid(PackedTypeCode ptc) {
246
return (uint32_t(ptc) & 255) != NoTypeCode;
247
}
248
249
static inline uint32_t PackedTypeCodeToBits(PackedTypeCode ptc) {
250
return uint32_t(ptc);
251
}
252
253
static inline TypeCode UnpackTypeCodeType(PackedTypeCode ptc) {
254
MOZ_ASSERT(IsValid(ptc));
255
return TypeCode(uint32_t(ptc) & 255);
256
}
257
258
static inline uint32_t UnpackTypeCodeIndex(PackedTypeCode ptc) {
259
MOZ_ASSERT(UnpackTypeCodeType(ptc) == TypeCode::Ref);
260
return uint32_t(ptc) >> 8;
261
}
262
263
static inline bool IsReferenceType(PackedTypeCode ptc) {
264
TypeCode tc = UnpackTypeCodeType(ptc);
265
return tc == TypeCode::Ref || tc == TypeCode::AnyRef ||
266
tc == TypeCode::FuncRef || tc == TypeCode::NullRef;
267
}
268
269
// The ValType represents the storage type of a WebAssembly location, whether
270
// parameter, local, or global.
271
272
class ValType {
273
PackedTypeCode tc_;
274
275
#ifdef DEBUG
276
bool isValidCode() {
277
switch (UnpackTypeCodeType(tc_)) {
278
case TypeCode::I32:
279
case TypeCode::I64:
280
case TypeCode::F32:
281
case TypeCode::F64:
282
case TypeCode::AnyRef:
283
case TypeCode::FuncRef:
284
case TypeCode::NullRef:
285
case TypeCode::Ref:
286
return true;
287
default:
288
return false;
289
}
290
}
291
#endif
292
293
public:
294
enum Code {
295
I32 = uint8_t(TypeCode::I32),
296
I64 = uint8_t(TypeCode::I64),
297
F32 = uint8_t(TypeCode::F32),
298
F64 = uint8_t(TypeCode::F64),
299
300
AnyRef = uint8_t(TypeCode::AnyRef),
301
FuncRef = uint8_t(TypeCode::FuncRef),
302
NullRef = uint8_t(TypeCode::NullRef),
303
Ref = uint8_t(TypeCode::Ref),
304
};
305
306
ValType() : tc_(InvalidPackedTypeCode()) {}
307
308
MOZ_IMPLICIT ValType(Code c) : tc_(PackTypeCode(TypeCode(c))) {
309
MOZ_ASSERT(isValidCode());
310
}
311
312
ValType(Code c, uint32_t refTypeIndex)
313
: tc_(PackTypeCode(TypeCode(c), refTypeIndex)) {
314
MOZ_ASSERT(isValidCode());
315
}
316
317
explicit ValType(PackedTypeCode ptc) : tc_(ptc) { MOZ_ASSERT(isValidCode()); }
318
319
explicit ValType(jit::MIRType mty) {
320
switch (mty) {
321
case jit::MIRType::Int32:
322
tc_ = PackTypeCode(TypeCode::I32);
323
break;
324
case jit::MIRType::Int64:
325
tc_ = PackTypeCode(TypeCode::I64);
326
break;
327
case jit::MIRType::Float32:
328
tc_ = PackTypeCode(TypeCode::F32);
329
break;
330
case jit::MIRType::Double:
331
tc_ = PackTypeCode(TypeCode::F64);
332
break;
333
default:
334
MOZ_CRASH("ValType(MIRType): unexpected type");
335
}
336
}
337
338
static ValType fromBitsUnsafe(uint32_t bits) {
339
return ValType(PackedTypeCodeFromBits(bits));
340
}
341
342
PackedTypeCode packed() const { return tc_; }
343
344
uint32_t bitsUnsafe() const { return PackedTypeCodeToBits(tc_); }
345
346
Code code() const { return Code(UnpackTypeCodeType(tc_)); }
347
348
bool isValid() const { return IsValid(tc_); }
349
350
uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); }
351
bool isRef() const { return UnpackTypeCodeType(tc_) == TypeCode::Ref; }
352
353
bool isReference() const { return IsReferenceType(tc_); }
354
355
// Some types are encoded as JS::Value when they escape from Wasm (when passed
356
// as parameters to imports or returned from exports). For AnyRef the Value
357
// encoding is pretty much a requirement. For other types it's a choice that
358
// may (temporarily) simplify some code.
359
bool isEncodedAsJSValueOnEscape() const {
360
return code() == Code::AnyRef || code() == Code::FuncRef;
361
}
362
363
bool operator==(const ValType& that) const { return tc_ == that.tc_; }
364
bool operator!=(const ValType& that) const { return tc_ != that.tc_; }
365
bool operator==(Code that) const {
366
MOZ_ASSERT(that != Code::Ref);
367
return code() == that;
368
}
369
bool operator!=(Code that) const { return !(*this == that); }
370
};
371
372
// The dominant use of this data type is for locals and args, and profiling
373
// with ZenGarden and Tanks suggests an initial size of 16 minimises heap
374
// allocation, both in terms of blocks and bytes.
375
typedef Vector<ValType, 16, SystemAllocPolicy> ValTypeVector;
376
377
// ValType utilities
378
379
static inline unsigned SizeOf(ValType vt) {
380
switch (vt.code()) {
381
case ValType::I32:
382
case ValType::F32:
383
return 4;
384
case ValType::I64:
385
case ValType::F64:
386
return 8;
387
case ValType::AnyRef:
388
case ValType::FuncRef:
389
case ValType::NullRef:
390
case ValType::Ref:
391
return sizeof(intptr_t);
392
}
393
MOZ_CRASH("Invalid ValType");
394
}
395
396
// Note, ToMIRType is only correct within Wasm, where an AnyRef is represented
397
// as a pointer. At the JS/wasm boundary, an AnyRef can be represented as a
398
// JS::Value, and the type translation may have to be handled specially and on a
399
// case-by-case basis.
400
401
static inline jit::MIRType ToMIRType(ValType vt) {
402
switch (vt.code()) {
403
case ValType::I32:
404
return jit::MIRType::Int32;
405
case ValType::I64:
406
return jit::MIRType::Int64;
407
case ValType::F32:
408
return jit::MIRType::Float32;
409
case ValType::F64:
410
return jit::MIRType::Double;
411
case ValType::Ref:
412
case ValType::AnyRef:
413
case ValType::FuncRef:
414
case ValType::NullRef:
415
return jit::MIRType::RefOrNull;
416
}
417
MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("bad type");
418
}
419
420
static inline bool IsNumberType(ValType vt) { return !vt.isReference(); }
421
422
static inline jit::MIRType ToMIRType(const Maybe<ValType>& t) {
423
return t ? ToMIRType(ValType(t.ref())) : jit::MIRType::None;
424
}
425
426
static inline const char* ToCString(ValType type) {
427
switch (type.code()) {
428
case ValType::I32:
429
return "i32";
430
case ValType::I64:
431
return "i64";
432
case ValType::F32:
433
return "f32";
434
case ValType::F64:
435
return "f64";
436
case ValType::AnyRef:
437
return "anyref";
438
case ValType::FuncRef:
439
return "funcref";
440
case ValType::NullRef:
441
return "nullref";
442
case ValType::Ref:
443
return "ref";
444
default:
445
MOZ_CRASH("bad value type");
446
}
447
}
448
449
static inline const char* ToCString(const Maybe<ValType>& type) {
450
return type ? ToCString(type.ref()) : "void";
451
}
452
453
// An AnyRef is a boxed value that can represent any wasm reference type and any
454
// host type that the host system allows to flow into and out of wasm
455
// transparently. It is a pointer-sized datum that has the same representation
456
// as all its subtypes (funcref, eqref, (ref T), et al) due to the non-coercive
457
// subtyping of the wasm type system. Its current representation is a plain
458
// JSObject*, and the private JSObject subtype WasmValueBox is used to box
459
// non-object non-null JS values.
460
//
461
// The C++/wasm boundary always uses a 'void*' type to express AnyRef values, to
462
// emphasize the pointer-ness of the value. The C++ code must transform the
463
// void* into an AnyRef by calling AnyRef::fromCompiledCode(), and transform an
464
// AnyRef into a void* by calling AnyRef::toCompiledCode(). Once in C++, we use
465
// AnyRef everywhere. A JS Value is transformed into an AnyRef by calling
466
// AnyRef::box(), and the AnyRef is transformed into a JS Value by calling
467
// AnyRef::unbox().
468
//
469
// NOTE that AnyRef values may point to GC'd storage and as such need to be
470
// rooted if they are kept live in boxed form across code that may cause GC!
471
// Use RootedAnyRef / HandleAnyRef / MutableHandleAnyRef where necessary.
472
//
473
// The lowest bits of the pointer value are used for tagging, to allow for some
474
// representation optimizations and to distinguish various types.
475
476
// For version 0, we simply equate AnyRef and JSObject* (this means that there
477
// are technically no tags at all yet). We use a simple boxing scheme that
478
// wraps a JS value that is not already JSObject in a distinguishable JSObject
479
// that holds the value, see WasmTypes.cpp for details. Knowledge of this
480
// mapping is embedded in CodeGenerator.cpp (in WasmBoxValue and
481
// WasmAnyRefFromJSObject) and in WasmStubs.cpp (in functions Box* and Unbox*).
482
483
class AnyRef {
484
JSObject* value_;
485
486
explicit AnyRef() : value_((JSObject*)-1) {}
487
explicit AnyRef(JSObject* p) : value_(p) {
488
MOZ_ASSERT(((uintptr_t)p & 0x03) == 0);
489
}
490
491
public:
492
// An invalid AnyRef cannot arise naturally from wasm and so can be used as
493
// a sentinel value to indicate failure from an AnyRef-returning function.
494
static AnyRef invalid() { return AnyRef(); }
495
496
// Given a void* that comes from compiled wasm code, turn it into AnyRef.
497
static AnyRef fromCompiledCode(void* p) { return AnyRef((JSObject*)p); }
498
499
// Given a JSObject* that comes from JS, turn it into AnyRef.
500
static AnyRef fromJSObject(JSObject* p) { return AnyRef(p); }
501
502
// Generate an AnyRef null pointer.
503
static AnyRef null() { return AnyRef(nullptr); }
504
505
bool isNull() { return value_ == nullptr; }
506
507
void* forCompiledCode() const { return value_; }
508
509
JSObject* asJSObject() { return value_; }
510
511
JSObject** asJSObjectAddress() { return &value_; }
512
513
void trace(JSTracer* trc);
514
515
// Tags (to be developed further)
516
static constexpr uintptr_t AnyRefTagMask = 1;
517
static constexpr uintptr_t AnyRefObjTag = 0;
518
};
519
520
typedef Rooted<AnyRef> RootedAnyRef;
521
typedef Handle<AnyRef> HandleAnyRef;
522
typedef MutableHandle<AnyRef> MutableHandleAnyRef;
523
524
// TODO/AnyRef-boxing: With boxed immediates and strings, these will be defined
525
// as MOZ_CRASH or similar so that we can find all locations that need to be
526
// fixed.
527
528
#define ASSERT_ANYREF_IS_JSOBJECT (void)(0)
529
#define STATIC_ASSERT_ANYREF_IS_JSOBJECT static_assert(1, "AnyRef is JSObject")
530
531
// Given any JS value, box it as an AnyRef and store it in *result. Returns
532
// false on OOM.
533
534
bool BoxAnyRef(JSContext* cx, HandleValue val, MutableHandleAnyRef result);
535
536
// Given a JS value that requires an object box, box it as an AnyRef and return
537
// it, returning nullptr on OOM.
538
//
539
// Currently the values requiring a box are those other than JSObject* or
540
// nullptr, but in the future more values will be represented without an
541
// allocation.
542
JSObject* BoxBoxableValue(JSContext* cx, HandleValue val);
543
544
// Given any AnyRef, unbox it as a JS Value. If it is a reference to a wasm
545
// object it will be reflected as a JSObject* representing some TypedObject
546
// instance.
547
548
Value UnboxAnyRef(AnyRef val);
549
550
class WasmValueBox : public NativeObject {
551
static const unsigned VALUE_SLOT = 0;
552
553
public:
554
static const unsigned RESERVED_SLOTS = 1;
555
static const JSClass class_;
556
557
static WasmValueBox* create(JSContext* cx, HandleValue val);
558
Value value() const { return getFixedSlot(VALUE_SLOT); }
559
static size_t offsetOfValue() {
560
return NativeObject::getFixedSlotOffset(VALUE_SLOT);
561
}
562
};
563
564
// A FuncRef is a JSFunction* and is hence also an AnyRef, and the remarks above
565
// about AnyRef apply also to FuncRef. When 'funcref' is used as a value type
566
// in wasm code, the value that is held is "the canonical function value", which
567
// is a function for which IsWasmExportedFunction() is true, and which has the
568
// correct identity wrt reference equality of functions. Notably, if a function
569
// is imported then its ref.func value compares === in JS to the function that
570
// was passed as an import when the instance was created.
571
//
572
// These rules ensure that casts from funcref to anyref are non-converting
573
// (generate no code), and that no wrapping or unwrapping needs to happen when a
574
// funcref or anyref flows across the JS/wasm boundary, and that functions have
575
// the necessary identity when observed from JS, and in the future, from wasm.
576
//
577
// Functions stored in tables, whether wasm tables or internal tables, can be
578
// stored in a form that optimizes for eg call speed, however.
579
//
580
// Reading a funcref from a funcref table, writing a funcref to a funcref table,
581
// and generating the value for a ref.func instruction are therefore nontrivial
582
// operations that require mapping between the canonical JSFunction and the
583
// optimized table representation. Once we get an instruction to call a
584
// ref.func directly it too will require such a mapping.
585
586
// In many cases, a FuncRef is exactly the same as AnyRef and we can use AnyRef
587
// functionality on funcref values. The FuncRef class exists mostly to add more
588
// checks and to make it clear, when we need to, that we're manipulating funcref
589
// values. FuncRef does not currently subclass AnyRef because there's been no
590
// need to, but it probably could.
591
592
class FuncRef {
593
JSFunction* value_;
594
595
explicit FuncRef() : value_((JSFunction*)-1) {}
596
explicit FuncRef(JSFunction* p) : value_(p) {
597
MOZ_ASSERT(((uintptr_t)p & 0x03) == 0);
598
}
599
600
public:
601
// Given a void* that comes from compiled wasm code, turn it into AnyRef.
602
static FuncRef fromCompiledCode(void* p) { return FuncRef((JSFunction*)p); }
603
604
// Given a JSFunction* that comes from JS, turn it into FuncRef.
605
static FuncRef fromJSFunction(JSFunction* p) { return FuncRef(p); }
606
607
// Given an AnyRef that represents a possibly-null funcref, turn it into a
608
// FuncRef.
609
static FuncRef fromAnyRefUnchecked(AnyRef p) {
610
#ifdef DEBUG
611
Value v = UnboxAnyRef(p);
612
if (v.isNull()) {
613
return FuncRef(nullptr);
614
}
615
if (v.toObject().is<JSFunction>()) {
616
return FuncRef(&v.toObject().as<JSFunction>());
617
}
618
MOZ_CRASH("Bad value");
619
#else
620
return FuncRef(&p.asJSObject()->as<JSFunction>());
621
#endif
622
}
623
624
AnyRef asAnyRef() { return AnyRef::fromJSObject((JSObject*)value_); }
625
626
void* forCompiledCode() const { return value_; }
627
628
JSFunction* asJSFunction() { return value_; }
629
630
bool isNull() { return value_ == nullptr; }
631
};
632
633
typedef Rooted<FuncRef> RootedFuncRef;
634
typedef Handle<FuncRef> HandleFuncRef;
635
typedef MutableHandle<FuncRef> MutableHandleFuncRef;
636
637
// Given any FuncRef, unbox it as a JS Value -- always a JSFunction*.
638
639
Value UnboxFuncRef(FuncRef val);
640
641
// Code can be compiled either with the Baseline compiler or the Ion compiler,
642
// and tier-variant data are tagged with the Tier value.
643
//
644
// A tier value is used to request tier-variant aspects of code, metadata, or
645
// linkdata. The tiers are normally explicit (Baseline and Ion); implicit tiers
646
// can be obtained through accessors on Code objects (eg, stableTier).
647
648
enum class Tier {
649
Baseline,
650
Debug = Baseline,
651
Optimized,
652
Serialized = Optimized
653
};
654
655
// Which backend to use in the case of the optimized tier.
656
657
enum class OptimizedBackend {
658
Ion,
659
Cranelift,
660
};
661
662
// The CompileMode controls how compilation of a module is performed (notably,
663
// how many times we compile it).
664
665
enum class CompileMode { Once, Tier1, Tier2 };
666
667
// Typed enum for whether debugging is enabled.
668
669
enum class DebugEnabled { False, True };
670
671
// A wasm module can either use no memory, a unshared memory (ArrayBuffer) or
672
// shared memory (SharedArrayBuffer).
673
674
enum class MemoryUsage { None = false, Unshared = 1, Shared = 2 };
675
676
// Iterator over tiers present in a tiered data structure.
677
678
class Tiers {
679
Tier t_[2];
680
uint32_t n_;
681
682
public:
683
explicit Tiers() { n_ = 0; }
684
explicit Tiers(Tier t) {
685
t_[0] = t;
686
n_ = 1;
687
}
688
explicit Tiers(Tier t, Tier u) {
689
MOZ_ASSERT(t != u);
690
t_[0] = t;
691
t_[1] = u;
692
n_ = 2;
693
}
694
695
Tier* begin() { return t_; }
696
Tier* end() { return t_ + n_; }
697
};
698
699
// A Module can either be asm.js or wasm.
700
701
enum ModuleKind { Wasm, AsmJS };
702
703
enum class Shareable { False, True };
704
705
// The LitVal class represents a single WebAssembly value of a given value
706
// type, mostly for the purpose of numeric literals and initializers. A LitVal
707
// does not directly map to a JS value since there is not (currently) a precise
708
// representation of i64 values. A LitVal may contain non-canonical NaNs since,
709
// within WebAssembly, floats are not canonicalized. Canonicalization must
710
// happen at the JS boundary.
711
712
class LitVal {
713
protected:
714
ValType type_;
715
union U {
716
U() : i32_(0) {}
717
uint32_t i32_;
718
uint64_t i64_;
719
float f32_;
720
double f64_;
721
AnyRef ref_;
722
} u;
723
724
public:
725
LitVal() : type_(), u{} {}
726
727
explicit LitVal(uint32_t i32) : type_(ValType::I32) { u.i32_ = i32; }
728
explicit LitVal(uint64_t i64) : type_(ValType::I64) { u.i64_ = i64; }
729
730
explicit LitVal(float f32) : type_(ValType::F32) { u.f32_ = f32; }
731
explicit LitVal(double f64) : type_(ValType::F64) { u.f64_ = f64; }
732
733
explicit LitVal(ValType type, AnyRef any) : type_(type) {
734
MOZ_ASSERT(type.isReference());
735
MOZ_ASSERT(any.isNull(),
736
"use Val for non-nullptr ref types to get tracing");
737
u.ref_ = any;
738
}
739
740
ValType type() const { return type_; }
741
static constexpr size_t sizeofLargestValue() { return sizeof(u); }
742
743
uint32_t i32() const {
744
MOZ_ASSERT(type_ == ValType::I32);
745
return u.i32_;
746
}
747
uint64_t i64() const {
748
MOZ_ASSERT(type_ == ValType::I64);
749
return u.i64_;
750
}
751
const float& f32() const {
752
MOZ_ASSERT(type_ == ValType::F32);
753
return u.f32_;
754
}
755
const double& f64() const {
756
MOZ_ASSERT(type_ == ValType::F64);
757
return u.f64_;
758
}
759
AnyRef ref() const {
760
MOZ_ASSERT(type_.isReference());
761
return u.ref_;
762
}
763
};
764
765
// A Val is a LitVal that can contain (non-null) pointers to GC things. All Vals
766
// must be stored in Rooteds so that their trace() methods are called during
767
// stack marking. Vals do not implement barriers and thus may not be stored on
768
// the heap.
769
770
class MOZ_NON_PARAM Val : public LitVal {
771
public:
772
Val() : LitVal() {}
773
explicit Val(const LitVal& val);
774
explicit Val(uint32_t i32) : LitVal(i32) {}
775
explicit Val(uint64_t i64) : LitVal(i64) {}
776
explicit Val(float f32) : LitVal(f32) {}
777
explicit Val(double f64) : LitVal(f64) {}
778
explicit Val(ValType type, AnyRef val) : LitVal(type, AnyRef::null()) {
779
MOZ_ASSERT(type.isReference());
780
u.ref_ = val;
781
}
782
explicit Val(ValType type, FuncRef val) : LitVal(type, AnyRef::null()) {
783
MOZ_ASSERT(type == ValType::FuncRef);
784
u.ref_ = val.asAnyRef();
785
}
786
void trace(JSTracer* trc);
787
};
788
789
typedef Rooted<Val> RootedVal;
790
typedef Handle<Val> HandleVal;
791
typedef MutableHandle<Val> MutableHandleVal;
792
793
typedef GCVector<Val, 0, SystemAllocPolicy> ValVector;
794
typedef Rooted<ValVector> RootedValVector;
795
typedef Handle<ValVector> HandleValVector;
796
typedef MutableHandle<ValVector> MutableHandleValVector;
797
798
// The FuncType class represents a WebAssembly function signature which takes a
799
// list of value types and returns an expression type. The engine uses two
800
// in-memory representations of the argument Vector's memory (when elements do
801
// not fit inline): normal malloc allocation (via SystemAllocPolicy) and
802
// allocation in a LifoAlloc (via LifoAllocPolicy). The former FuncType objects
803
// can have any lifetime since they own the memory. The latter FuncType objects
804
// must not outlive the associated LifoAlloc mark/release interval (which is
805
// currently the duration of module validation+compilation). Thus, long-lived
806
// objects like WasmModule must use malloced allocation.
807
808
class FuncType {
809
ValTypeVector args_;
810
ValTypeVector results_;
811
812
public:
813
FuncType() : args_(), results_() {}
814
FuncType(ValTypeVector&& args, ValTypeVector&& results)
815
: args_(std::move(args)), results_(std::move(results)) {}
816
817
MOZ_MUST_USE bool clone(const FuncType& rhs) {
818
MOZ_ASSERT(args_.empty());
819
MOZ_ASSERT(results_.empty());
820
return args_.appendAll(rhs.args_) && results_.appendAll(rhs.results_);
821
}
822
823
ValType arg(unsigned i) const { return args_[i]; }
824
const ValTypeVector& args() const { return args_; }
825
ValType result(unsigned i) const { return results_[i]; }
826
const ValTypeVector& results() const { return results_; }
827
828
// Transitional method, to be removed after multi-values (1401675).
829
Maybe<ValType> ret() const {
830
if (results_.length() == 0) {
831
return Nothing();
832
}
833
MOZ_ASSERT(results_.length() == 1);
834
return Some(result(0));
835
}
836
837
HashNumber hash() const {
838
HashNumber hn = 0;
839
for (const ValType& vt : args_) {
840
hn = mozilla::AddToHash(hn, HashNumber(vt.code()));
841
}
842
for (const ValType& vt : results_) {
843
hn = mozilla::AddToHash(hn, HashNumber(vt.code()));
844
}
845
return hn;
846
}
847
bool operator==(const FuncType& rhs) const {
848
return EqualContainers(args(), rhs.args()) &&
849
EqualContainers(results(), rhs.results());
850
}
851
bool operator!=(const FuncType& rhs) const { return !(*this == rhs); }
852
853
bool hasI64ArgOrRet() const {
854
for (ValType arg : args()) {
855
if (arg == ValType::I64) {
856
return true;
857
}
858
}
859
for (ValType result : results()) {
860
if (result == ValType::I64) {
861
return true;
862
}
863
}
864
return false;
865
}
866
// For JS->wasm jit entries, AnyRef parameters and returns are allowed,
867
// as are FuncRef returns.
868
bool temporarilyUnsupportedReftypeForEntry() const {
869
for (ValType arg : args()) {
870
if (arg.isReference() && arg.code() != ValType::AnyRef) {
871
return true;
872
}
873
}
874
for (ValType result : results()) {
875
if (result.isReference() && result.code() != ValType::AnyRef &&
876
result.code() != ValType::FuncRef) {
877
return true;
878
}
879
}
880
return false;
881
}
882
// For inlined JS->wasm jit entries, AnyRef parameters and returns are
883
// allowed, as are FuncRef returns.
884
bool temporarilyUnsupportedReftypeForInlineEntry() const {
885
for (ValType arg : args()) {
886
if (arg.isReference() && arg.code() != ValType::AnyRef) {
887
return true;
888
}
889
}
890
for (ValType result : results()) {
891
if (result.isReference() && result.code() != ValType::AnyRef &&
892
result.code() != ValType::FuncRef) {
893
return true;
894
}
895
}
896
return false;
897
}
898
// For wasm->JS jit exits, AnyRef parameters and returns are allowed, as are
899
// FuncRef parameters.
900
bool temporarilyUnsupportedReftypeForExit() const {
901
for (ValType arg : args()) {
902
if (arg.isReference() && arg.code() != ValType::AnyRef &&
903
arg.code() != ValType::FuncRef) {
904
return true;
905
}
906
}
907
for (ValType result : results()) {
908
if (result.isReference() && result.code() != ValType::AnyRef) {
909
return true;
910
}
911
}
912
return false;
913
}
914
bool jitExitRequiresArgCheck() const {
915
for (ValType arg : args()) {
916
if (arg.isEncodedAsJSValueOnEscape()) {
917
return true;
918
}
919
}
920
return false;
921
}
922
#ifdef WASM_PRIVATE_REFTYPES
923
bool exposesRef() const {
924
for (const ValType& arg : args()) {
925
if (arg.isRef()) {
926
return true;
927
}
928
}
929
for (const ValType& result : results()) {
930
if (result.isRef()) {
931
return true;
932
}
933
}
934
return false;
935
}
936
#endif
937
938
WASM_DECLARE_SERIALIZABLE(FuncType)
939
};
940
941
struct FuncTypeHashPolicy {
942
typedef const FuncType& Lookup;
943
static HashNumber hash(Lookup ft) { return ft.hash(); }
944
static bool match(const FuncType* lhs, Lookup rhs) { return *lhs == rhs; }
945
};
946
947
// Structure type.
948
//
949
// The Module owns a dense array of StructType values that represent the
950
// structure types that the module knows about. It is created from the sparse
951
// array of types in the ModuleEnvironment when the Module is created.
952
953
struct StructField {
954
ValType type;
955
uint32_t offset;
956
bool isMutable;
957
};
958
959
typedef Vector<StructField, 0, SystemAllocPolicy> StructFieldVector;
960
961
class StructType {
962
public:
963
StructFieldVector fields_; // Field type, offset, and mutability
964
uint32_t moduleIndex_; // Index in a dense array of structs in the module
965
bool isInline_; // True if this is an InlineTypedObject and we
966
// interpret the offsets from the object pointer;
967
// if false this is an OutlineTypedObject and we
968
// interpret everything relative to the pointer to
969
// the attached storage.
970
public:
971
StructType() : fields_(), moduleIndex_(0), isInline_(true) {}
972
973
StructType(StructFieldVector&& fields, uint32_t index, bool isInline)
974
: fields_(std::move(fields)), moduleIndex_(index), isInline_(isInline) {}
975
976
bool copyFrom(const StructType& src) {
977
if (!fields_.appendAll(src.fields_)) {
978
return false;
979
}
980
moduleIndex_ = src.moduleIndex_;
981
isInline_ = src.isInline_;
982
return true;
983
}
984
985
bool hasPrefix(const StructType& other) const;
986
987
WASM_DECLARE_SERIALIZABLE(StructType)
988
};
989
990
typedef Vector<StructType, 0, SystemAllocPolicy> StructTypeVector;
991
992
// An InitExpr describes a deferred initializer expression, used to initialize
993
// a global or a table element offset. Such expressions are created during
994
// decoding and actually executed on module instantiation.
995
996
class InitExpr {
997
public:
998
enum class Kind { Constant, GetGlobal };
999
1000
private:
1001
// Note: all this private data is currently (de)serialized via memcpy().
1002
Kind kind_;
1003
union U {
1004
LitVal val_;
1005
struct {
1006
uint32_t index_;
1007
ValType type_;
1008
} global;
1009
U() : global{} {}
1010
} u;
1011
1012
public:
1013
InitExpr() = default;
1014
1015
explicit InitExpr(LitVal val) : kind_(Kind::Constant) { u.val_ = val; }
1016
1017
explicit InitExpr(uint32_t globalIndex, ValType type)
1018
: kind_(Kind::GetGlobal) {
1019
u.global.index_ = globalIndex;
1020
u.global.type_ = type;
1021
}
1022
1023
Kind kind() const { return kind_; }
1024
1025
bool isVal() const { return kind() == Kind::Constant; }
1026
LitVal val() const {
1027
MOZ_ASSERT(isVal());
1028
return u.val_;
1029
}
1030
1031
uint32_t globalIndex() const {
1032
MOZ_ASSERT(kind() == Kind::GetGlobal);
1033
return u.global.index_;
1034
}
1035
1036
ValType type() const {
1037
switch (kind()) {
1038
case Kind::Constant:
1039
return u.val_.type();
1040
case Kind::GetGlobal:
1041
return u.global.type_;
1042
}
1043
MOZ_CRASH("unexpected initExpr type");
1044
}
1045
};
1046
1047
// CacheableChars is used to cacheably store UniqueChars.
1048
1049
struct CacheableChars : UniqueChars {
1050
CacheableChars() = default;
1051
explicit CacheableChars(char* ptr) : UniqueChars(ptr) {}
1052
MOZ_IMPLICIT CacheableChars(UniqueChars&& rhs)
1053
: UniqueChars(std::move(rhs)) {}
1054
WASM_DECLARE_SERIALIZABLE(CacheableChars)
1055
};
1056
1057
typedef Vector<CacheableChars, 0, SystemAllocPolicy> CacheableCharsVector;
1058
1059
// Import describes a single wasm import. An ImportVector describes all
1060
// of a single module's imports.
1061
//
1062
// ImportVector is built incrementally by ModuleGenerator and then stored
1063
// immutably by Module.
1064
1065
struct Import {
1066
CacheableChars module;
1067
CacheableChars field;
1068
DefinitionKind kind;
1069
1070
Import() = default;
1071
Import(UniqueChars&& module, UniqueChars&& field, DefinitionKind kind)
1072
: module(std::move(module)), field(std::move(field)), kind(kind) {}
1073
1074
WASM_DECLARE_SERIALIZABLE(Import)
1075
};
1076
1077
typedef Vector<Import, 0, SystemAllocPolicy> ImportVector;
1078
1079
// Export describes the export of a definition in a Module to a field in the
1080
// export object. For functions, Export stores an index into the
1081
// FuncExportVector in Metadata. For memory and table exports, there is
1082
// at most one (default) memory/table so no index is needed. Note: a single
1083
// definition can be exported by multiple Exports in the ExportVector.
1084
//
1085
// ExportVector is built incrementally by ModuleGenerator and then stored
1086
// immutably by Module.
1087
1088
class Export {
1089
CacheableChars fieldName_;
1090
struct CacheablePod {
1091
DefinitionKind kind_;
1092
uint32_t index_;
1093
} pod;
1094
1095
public:
1096
Export() = default;
1097
explicit Export(UniqueChars fieldName, uint32_t index, DefinitionKind kind);
1098
explicit Export(UniqueChars fieldName, DefinitionKind kind);
1099
1100
const char* fieldName() const { return fieldName_.get(); }
1101
1102
DefinitionKind kind() const { return pod.kind_; }
1103
uint32_t funcIndex() const;
1104
uint32_t globalIndex() const;
1105
uint32_t tableIndex() const;
1106
1107
WASM_DECLARE_SERIALIZABLE(Export)
1108
};
1109
1110
typedef Vector<Export, 0, SystemAllocPolicy> ExportVector;
1111
1112
// A GlobalDesc describes a single global variable.
1113
//
1114
// wasm can import and export mutable and immutable globals.
1115
//
1116
// asm.js can import mutable and immutable globals, but a mutable global has a
1117
// location that is private to the module, and its initial value is copied into
1118
// that cell from the environment. asm.js cannot export globals.
1119
1120
enum class GlobalKind { Import, Constant, Variable };
1121
1122
class GlobalDesc {
1123
union V {
1124
struct {
1125
union U {
1126
InitExpr initial_;
1127
struct {
1128
ValType type_;
1129
uint32_t index_;
1130
} import;
1131
U() : import{} {}
1132
} val;
1133
unsigned offset_;
1134
bool isMutable_;
1135
bool isWasm_;
1136
bool isExport_;
1137
} var;
1138
LitVal cst_;
1139
V() {}
1140
} u;
1141
GlobalKind kind_;
1142
1143
// Private, as they have unusual semantics.
1144
1145
bool isExport() const { return !isConstant() && u.var.isExport_; }
1146
bool isWasm() const { return !isConstant() && u.var.isWasm_; }
1147
1148
public:
1149
GlobalDesc() = default;
1150
1151
explicit GlobalDesc(InitExpr initial, bool isMutable,
1152
ModuleKind kind = ModuleKind::Wasm)
1153
: kind_((isMutable || !initial.isVal()) ? GlobalKind::Variable
1154
: GlobalKind::Constant) {
1155
MOZ_ASSERT(initial.type() != ValType::NullRef);
1156
if (isVariable()) {
1157
u.var.val.initial_ = initial;
1158
u.var.isMutable_ = isMutable;
1159
u.var.isWasm_ = kind == Wasm;
1160
u.var.isExport_ = false;
1161
u.var.offset_ = UINT32_MAX;
1162
} else {
1163
u.cst_ = initial.val();
1164
}
1165
}
1166
1167
explicit GlobalDesc(ValType type, bool isMutable, uint32_t importIndex,
1168
ModuleKind kind = ModuleKind::Wasm)
1169
: kind_(GlobalKind::Import) {
1170
MOZ_ASSERT(type != ValType::NullRef);
1171
u.var.val.import.type_ = type;
1172
u.var.val.import.index_ = importIndex;
1173
u.var.isMutable_ = isMutable;
1174
u.var.isWasm_ = kind == Wasm;
1175
u.var.isExport_ = false;
1176
u.var.offset_ = UINT32_MAX;
1177
}
1178
1179
void setOffset(unsigned offset) {
1180
MOZ_ASSERT(!isConstant());
1181
MOZ_ASSERT(u.var.offset_ == UINT32_MAX);
1182
u.var.offset_ = offset;
1183
}
1184
unsigned offset() const {
1185
MOZ_ASSERT(!isConstant());
1186
MOZ_ASSERT(u.var.offset_ != UINT32_MAX);
1187
return u.var.offset_;
1188
}
1189
1190
void setIsExport() {
1191
if (!isConstant()) {
1192
u.var.isExport_ = true;
1193
}
1194
}
1195
1196
GlobalKind kind() const { return kind_; }
1197
bool isVariable() const { return kind_ == GlobalKind::Variable; }
1198
bool isConstant() const { return kind_ == GlobalKind::Constant; }
1199
bool isImport() const { return kind_ == GlobalKind::Import; }
1200
1201
bool isMutable() const { return !isConstant() && u.var.isMutable_; }
1202
LitVal constantValue() const {
1203
MOZ_ASSERT(isConstant());
1204
return u.cst_;
1205
}
1206
const InitExpr& initExpr() const {
1207
MOZ_ASSERT(isVariable());
1208
return u.var.val.initial_;
1209
}
1210
uint32_t importIndex() const {
1211
MOZ_ASSERT(isImport());
1212
return u.var.val.import.index_;
1213
}
1214
1215
// If isIndirect() is true then storage for the value is not in the
1216
// instance's global area, but in a WasmGlobalObject::Cell hanging off a
1217
// WasmGlobalObject; the global area contains a pointer to the Cell.
1218
//
1219
// We don't want to indirect unless we must, so only mutable, exposed
1220
// globals are indirected - in all other cases we copy values into and out
1221
// of their module.
1222
//
1223
// Note that isIndirect() isn't equivalent to getting a WasmGlobalObject:
1224
// an immutable exported global will still get an object, but will not be
1225
// indirect.
1226
bool isIndirect() const {
1227
return isMutable() && isWasm() && (isImport() || isExport());
1228
}
1229
1230
ValType type() const {
1231
switch (kind_) {
1232
case GlobalKind::Import:
1233
return u.var.val.import.type_;
1234
case GlobalKind::Variable:
1235
return u.var.val.initial_.type();
1236
case GlobalKind::Constant:
1237
return u.cst_.type();
1238
}
1239
MOZ_CRASH("unexpected global kind");
1240
}
1241
};
1242
1243
typedef Vector<GlobalDesc, 0, SystemAllocPolicy> GlobalDescVector;
1244
1245
// When a ElemSegment is "passive" it is shared between a wasm::Module and its
1246
// wasm::Instances. To allow each segment to be released as soon as the last
1247
// Instance elem.drops it and the Module is destroyed, each ElemSegment is
1248
// individually atomically ref-counted.
1249
1250
struct ElemSegment : AtomicRefCounted<ElemSegment> {
1251
enum class Kind {
1252
Active,
1253
Passive,
1254
Declared,
1255
};
1256
1257
Kind kind;
1258
uint32_t tableIndex;
1259
ValType elementType;
1260
Maybe<InitExpr> offsetIfActive;
1261
Uint32Vector elemFuncIndices; // Element may be NullFuncIndex
1262
1263
bool active() const { return kind == Kind::Active; }
1264
1265
InitExpr offset() const { return *offsetIfActive; }
1266
1267
size_t length() const { return elemFuncIndices.length(); }
1268
1269
ValType elemType() const { return elementType; }
1270
1271
WASM_DECLARE_SERIALIZABLE(ElemSegment)
1272
};
1273
1274
// NullFuncIndex represents the case when an element segment (of type funcref)
1275
// contains a null element.
1276
constexpr uint32_t NullFuncIndex = UINT32_MAX;
1277
static_assert(NullFuncIndex > MaxFuncs, "Invariant");
1278
1279
typedef RefPtr<ElemSegment> MutableElemSegment;
1280
typedef SerializableRefPtr<const ElemSegment> SharedElemSegment;
1281
typedef Vector<SharedElemSegment, 0, SystemAllocPolicy> ElemSegmentVector;
1282
1283
// DataSegmentEnv holds the initial results of decoding a data segment from the
1284
// bytecode and is stored in the ModuleEnvironment during compilation. When
1285
// compilation completes, (non-Env) DataSegments are created and stored in
1286
// the wasm::Module which contain copies of the data segment payload. This
1287
// allows non-compilation uses of wasm validation to avoid expensive copies.
1288
//
1289
// When a DataSegment is "passive" it is shared between a wasm::Module and its
1290
// wasm::Instances. To allow each segment to be released as soon as the last
1291
// Instance mem.drops it and the Module is destroyed, each DataSegment is
1292
// individually atomically ref-counted.
1293
1294
struct DataSegmentEnv {
1295
Maybe<InitExpr> offsetIfActive;
1296
uint32_t bytecodeOffset;
1297
uint32_t length;
1298
};
1299
1300
typedef Vector<DataSegmentEnv, 0, SystemAllocPolicy> DataSegmentEnvVector;
1301
1302
struct DataSegment : AtomicRefCounted<DataSegment> {
1303
Maybe<InitExpr> offsetIfActive;
1304
Bytes bytes;
1305
1306
DataSegment() = default;
1307
explicit DataSegment(const DataSegmentEnv& src)
1308
: offsetIfActive(src.offsetIfActive) {}
1309
1310
bool active() const { return !!offsetIfActive; }
1311
1312
InitExpr offset() const { return *offsetIfActive; }
1313
1314
WASM_DECLARE_SERIALIZABLE(DataSegment)
1315
};
1316
1317
typedef RefPtr<DataSegment> MutableDataSegment;
1318
typedef SerializableRefPtr<const DataSegment> SharedDataSegment;
1319
typedef Vector<SharedDataSegment, 0, SystemAllocPolicy> DataSegmentVector;
1320
1321
// The CustomSection(Env) structs are like DataSegment(Env): CustomSectionEnv is
1322
// stored in the ModuleEnvironment and CustomSection holds a copy of the payload
1323
// and is stored in the wasm::Module.
1324
1325
struct CustomSectionEnv {
1326
uint32_t nameOffset;
1327
uint32_t nameLength;
1328
uint32_t payloadOffset;
1329
uint32_t payloadLength;
1330
};
1331
1332
typedef Vector<CustomSectionEnv, 0, SystemAllocPolicy> CustomSectionEnvVector;
1333
1334
struct CustomSection {
1335
Bytes name;
1336
SharedBytes payload;
1337
1338
WASM_DECLARE_SERIALIZABLE(CustomSection)
1339
};
1340
1341
typedef Vector<CustomSection, 0, SystemAllocPolicy> CustomSectionVector;
1342
1343
// A Name represents a string of utf8 chars embedded within the name custom
1344
// section. The offset of a name is expressed relative to the beginning of the
1345
// name section's payload so that Names can stored in wasm::Code, which only
1346
// holds the name section's bytes, not the whole bytecode.
1347
1348
struct Name {
1349
// All fields are treated as cacheable POD:
1350
uint32_t offsetInNamePayload;
1351
uint32_t length;
1352
1353
Name() : offsetInNamePayload(UINT32_MAX), length(0) {}
1354
};
1355
1356
typedef Vector<Name, 0, SystemAllocPolicy> NameVector;
1357
1358
// FuncTypeIdDesc describes a function type that can be used by call_indirect
1359
// and table-entry prologues to structurally compare whether the caller and
1360
// callee's signatures *structurally* match. To handle the general case, a
1361
// FuncType is allocated and stored in a process-wide hash table, so that
1362
// pointer equality implies structural equality. As an optimization for the 99%
1363
// case where the FuncType has a small number of parameters, the FuncType is
1364
// bit-packed into a uint32 immediate value so that integer equality implies
1365
// structural equality. Both cases can be handled with a single comparison by
1366
// always setting the LSB for the immediates (the LSB is necessarily 0 for
1367
// allocated FuncType pointers due to alignment).
1368
1369
class FuncTypeIdDesc {
1370
public:
1371
static const uintptr_t ImmediateBit = 0x1;
1372
1373
private:
1374
FuncTypeIdDescKind kind_;
1375
size_t bits_;
1376
1377
FuncTypeIdDesc(FuncTypeIdDescKind kind, size_t bits)
1378
: kind_(kind), bits_(bits) {}
1379
1380
public:
1381
FuncTypeIdDescKind kind() const { return kind_; }
1382
static bool isGlobal(const FuncType& funcType);
1383
1384
FuncTypeIdDesc() : kind_(FuncTypeIdDescKind::None), bits_(0) {}
1385
static FuncTypeIdDesc global(const FuncType& funcType,
1386
uint32_t globalDataOffset);
1387
static FuncTypeIdDesc immediate(const FuncType& funcType);
1388
1389
bool isGlobal() const { return kind_ == FuncTypeIdDescKind::Global; }
1390
1391
size_t immediate() const {
1392
MOZ_ASSERT(kind_ == FuncTypeIdDescKind::Immediate);
1393
return bits_;
1394
}
1395
uint32_t globalDataOffset() const {
1396
MOZ_ASSERT(kind_ == FuncTypeIdDescKind::Global);
1397
return bits_;
1398
}
1399
};
1400
1401
// FuncTypeWithId pairs a FuncType with FuncTypeIdDesc, describing either how to
1402
// compile code that compares this signature's id or, at instantiation what
1403
// signature ids to allocate in the global hash and where to put them.
1404
1405
struct FuncTypeWithId : FuncType {
1406
FuncTypeIdDesc id;
1407
1408
FuncTypeWithId() = default;
1409
explicit FuncTypeWithId(FuncType&& funcType)
1410
: FuncType(std::move(funcType)), id() {}
1411
FuncTypeWithId(FuncType&& funcType, FuncTypeIdDesc id)
1412
: FuncType(std::move(funcType)), id(id) {}
1413
void operator=(FuncType&& rhs) { FuncType::operator=(std::move(rhs)); }
1414
1415
WASM_DECLARE_SERIALIZABLE(FuncTypeWithId)
1416
};
1417
1418
typedef Vector<FuncTypeWithId, 0, SystemAllocPolicy> FuncTypeWithIdVector;
1419
typedef Vector<const FuncTypeWithId*, 0, SystemAllocPolicy>
1420
FuncTypeWithIdPtrVector;
1421
1422
// A tagged container for the various types that can be present in a wasm
1423
// module's type section.
1424
1425
class TypeDef {
1426
enum { IsFuncType, IsStructType, IsNone } tag_;
1427
union {
1428
FuncTypeWithId funcType_;
1429
StructType structType_;
1430
};
1431
1432
public:
1433
TypeDef() : tag_(IsNone) {}
1434
1435
explicit TypeDef(FuncType&& funcType)
1436
: tag_(IsFuncType), funcType_(FuncTypeWithId(std::move(funcType))) {}
1437
1438
explicit TypeDef(StructType&& structType)
1439
: tag_(IsStructType), structType_(std::move(structType)) {}
1440
1441
TypeDef(TypeDef&& td) : tag_(td.tag_) {
1442
switch (tag_) {
1443
case IsFuncType:
1444
new (&funcType_) FuncTypeWithId(std::move(td.funcType_));
1445
break;
1446
case IsStructType:
1447
new (&structType_) StructType(std::move(td.structType_));
1448
break;
1449
case IsNone:
1450
break;
1451
}
1452
}
1453
1454
~TypeDef() {
1455
switch (tag_) {
1456
case IsFuncType:
1457
funcType_.~FuncTypeWithId();
1458
break;
1459
case IsStructType:
1460
structType_.~StructType();
1461
break;
1462
case IsNone:
1463
break;
1464
}
1465
}
1466
1467
TypeDef& operator=(TypeDef&& that) {
1468
MOZ_ASSERT(isNone());
1469
switch (that.tag_) {
1470
case IsFuncType:
1471
new (&funcType_) FuncTypeWithId(std::move(that.funcType_));
1472
break;
1473
case IsStructType:
1474
new (&structType_) StructType(std::move(that.structType_));
1475
break;
1476
case IsNone:
1477
break;
1478
}
1479
tag_ = that.tag_;
1480
return *this;
1481
}
1482
1483
bool isFuncType() const { return tag_ == IsFuncType; }
1484
1485
bool isNone() const { return tag_ == IsNone; }
1486
1487
bool isStructType() const { return tag_ == IsStructType; }
1488
1489
const FuncTypeWithId& funcType() const {
1490
MOZ_ASSERT(isFuncType());
1491
return funcType_;
1492
}
1493
1494
FuncTypeWithId& funcType() {
1495
MOZ_ASSERT(isFuncType());
1496
return funcType_;
1497
}
1498
1499
// p has to point to the funcType_ embedded within a TypeDef for this to be
1500
// valid.
1501
static const TypeDef* fromFuncTypeWithIdPtr(const FuncTypeWithId* p) {
1502
const TypeDef* q =
1503
(const TypeDef*)((char*)p - offsetof(TypeDef, funcType_));
1504
MOZ_ASSERT(q->tag_ == IsFuncType);
1505
return q;
1506
}
1507
1508
const StructType& structType() const {
1509
MOZ_ASSERT(isStructType());
1510
return structType_;
1511
}
1512
1513
StructType& structType() {
1514
MOZ_ASSERT(isStructType());
1515
return structType_;
1516
}
1517
1518
// p has to point to the struct_ embedded within a TypeDef for this to be
1519
// valid.
1520
static const TypeDef* fromStructPtr(const StructType* p) {
1521
const TypeDef* q =
1522
(const TypeDef*)((char*)p - offsetof(TypeDef, structType_));
1523
MOZ_ASSERT(q->tag_ == IsStructType);
1524
return q;
1525
}
1526
};
1527
1528
typedef Vector<TypeDef, 0, SystemAllocPolicy> TypeDefVector;
1529
1530
// A wrapper around the bytecode offset of a wasm instruction within a whole
1531
// module, used for trap offsets or call offsets. These offsets should refer to
1532
// the first byte of the instruction that triggered the trap / did the call and
1533
// should ultimately derive from OpIter::bytecodeOffset.
1534
1535
class BytecodeOffset {
1536
static const uint32_t INVALID = -1;
1537
uint32_t offset_;
1538
1539
public:
1540
BytecodeOffset() : offset_(INVALID) {}
1541
explicit BytecodeOffset(uint32_t offset) : offset_(offset) {}
1542
1543
bool isValid() const { return offset_ != INVALID; }
1544
uint32_t offset() const {
1545
MOZ_ASSERT(isValid());
1546
return offset_;
1547
}
1548
};
1549
1550
// A TrapSite (in the TrapSiteVector for a given Trap code) represents a wasm
1551
// instruction at a given bytecode offset that can fault at the given pc offset.
1552
// When such a fault occurs, a signal/exception handler looks up the TrapSite to
1553
// confirm the fault is intended/safe and redirects pc to the trap stub.
1554
1555
struct TrapSite {
1556
uint32_t pcOffset;
1557
BytecodeOffset bytecode;
1558
1559
TrapSite() : pcOffset(-1), bytecode() {}
1560
TrapSite(uint32_t pcOffset, BytecodeOffset bytecode)
1561
: pcOffset(pcOffset), bytecode(bytecode) {}
1562
1563
void offsetBy(uint32_t offset) { pcOffset += offset; }
1564
};
1565
1566
WASM_DECLARE_POD_VECTOR(TrapSite, TrapSiteVector)
1567
1568
struct TrapSiteVectorArray
1569
: EnumeratedArray<Trap, Trap::Limit, TrapSiteVector> {
1570
bool empty() const;
1571
void clear();
1572
void swap(TrapSiteVectorArray& rhs);
1573
void podResizeToFit();
1574
1575
WASM_DECLARE_SERIALIZABLE(TrapSiteVectorArray)
1576
};
1577
1578
// On trap, the bytecode offset to be reported in callstacks is saved.
1579
1580
struct TrapData {
1581
// The resumePC indicates where, if the trap doesn't throw, the trap stub
1582
// should jump to after restoring all register state.
1583
void* resumePC;
1584
1585
// The unwoundPC is the PC after adjustment by wasm::StartUnwinding(), which
1586
// basically unwinds partially-construted wasm::Frames when pc is in the
1587
// prologue/epilogue. Stack traces during a trap should use this PC since
1588
// it corresponds to the JitActivation::wasmExitFP.
1589
void* unwoundPC;
1590
1591
Trap trap;
1592
uint32_t bytecodeOffset;
1593
};
1594
1595
// The (,Callable,Func)Offsets classes are used to record the offsets of
1596
// different key points in a CodeRange during compilation.
1597
1598
struct Offsets {
1599
explicit Offsets(uint32_t begin = 0, uint32_t end = 0)
1600
: begin(begin), end(end) {}
1601
1602
// These define a [begin, end) contiguous range of instructions compiled
1603
// into a CodeRange.
1604
uint32_t begin;
1605
uint32_t end;
1606
};
1607
1608
struct CallableOffsets : Offsets {
1609
MOZ_IMPLICIT CallableOffsets(uint32_t ret = 0) : Offsets(), ret(ret) {}
1610
1611
// The offset of the return instruction precedes 'end' by a variable number
1612
// of instructions due to out-of-line codegen.
1613
uint32_t ret;
1614
};
1615
1616
struct JitExitOffsets : CallableOffsets {
1617
MOZ_IMPLICIT JitExitOffsets()
1618
: CallableOffsets(), untrustedFPStart(0), untrustedFPEnd(0) {}
1619
1620
// There are a few instructions in the Jit exit where FP may be trash
1621
// (because it may have been clobbered by the JS Jit), known as the
1622
// untrusted FP zone.
1623
uint32_t untrustedFPStart;
1624
uint32_t untrustedFPEnd;
1625
};
1626
1627
struct FuncOffsets : CallableOffsets {
1628
MOZ_IMPLICIT FuncOffsets()
1629
: CallableOffsets(), normalEntry(0), tierEntry(0) {}
1630
1631
// Function CodeRanges have a table entry which takes an extra signature
1632
// argument which is checked against the callee's signature before falling
1633
// through to the normal prologue. The table entry is thus at the beginning
1634
// of the CodeRange and the normal entry is at some offset after the table
1635
// entry.
1636
uint32_t normalEntry;
1637
1638
// The tierEntry is the point within a function to which the patching code
1639
// within a Tier-1 function jumps. It could be the instruction following
1640
// the jump in the Tier-1 function, or the point following the standard
1641
// prologue within a Tier-2 function.
1642
uint32_t tierEntry;
1643
};
1644
1645
typedef Vector<FuncOffsets, 0, SystemAllocPolicy> FuncOffsetsVector;
1646
1647
// A CodeRange describes a single contiguous range of code within a wasm
1648
// module's code segment. A CodeRange describes what the code does and, for
1649
// function bodies, the name and source coordinates of the function.
1650
1651
class CodeRange {
1652
public:
1653
enum Kind {
1654
Function, // function definition
1655
InterpEntry, // calls into wasm from C++
1656
JitEntry, // calls into wasm from jit code
1657
ImportInterpExit, // slow-path calling from wasm into C++ interp
1658
ImportJitExit, // fast-path calling from wasm into jit code
1659
BuiltinThunk, // fast-path calling from wasm into a C++ native
1660
TrapExit, // calls C++ to report and jumps to throw stub
1661
DebugTrap, // calls C++ to handle debug event
1662
FarJumpIsland, // inserted to connect otherwise out-of-range insns
1663
Throw // special stack-unwinding stub jumped to by other stubs
1664
};
1665
1666
private:
1667
// All fields are treated as cacheable POD:
1668
uint32_t begin_;
1669
uint32_t ret_;
1670
uint32_t end_;
1671
union {
1672
struct {
1673
uint32_t funcIndex_;
1674
union {
1675
struct {
1676
uint32_t lineOrBytecode_;
1677
uint8_t beginToNormalEntry_;
1678
uint8_t beginToTierEntry_;
1679
} func;
1680
struct {
1681
uint16_t beginToUntrustedFPStart_;
1682
uint16_t beginToUntrustedFPEnd_;
1683
} jitExit;
1684
};
1685
};
1686
Trap trap_;
1687
} u;
1688
Kind kind_ : 8;
1689
1690
public:
1691
CodeRange() = default;
1692
CodeRange(Kind kind, Offsets offsets);
1693
CodeRange(Kind kind, uint32_t funcIndex, Offsets offsets);
1694
CodeRange(Kind kind, CallableOffsets offsets);
1695
CodeRange(Kind kind, uint32_t funcIndex, CallableOffsets);
1696
CodeRange(uint32_t funcIndex, JitExitOffsets offsets);
1697
CodeRange(uint32_t funcIndex, uint32_t lineOrBytecode, FuncOffsets offsets);
1698
1699
void offsetBy(uint32_t offset) {
1700
begin_ += offset;
1701
end_ += offset;
1702
if (hasReturn()) {
1703
ret_ += offset;
1704
}
1705
}
1706
1707
// All CodeRanges have a begin and end.
1708
1709
uint32_t begin() const { return begin_; }
1710
uint32_t end() const { return end_; }
1711
1712
// Other fields are only available for certain CodeRange::Kinds.
1713
1714
Kind kind() const { return kind_; }
1715
1716
bool isFunction() const { return kind() == Function; }
1717
bool isImportExit() const {
1718
return kind() == ImportJitExit || kind() == ImportInterpExit ||
1719
kind() == BuiltinThunk;
1720
}
1721
bool isImportInterpExit() const { return kind() == ImportInterpExit; }
1722
bool isImportJitExit() const { return kind() == ImportJitExit; }
1723
bool isTrapExit() const { return kind() == TrapExit; }
1724
bool isDebugTrap() const { return kind() == DebugTrap; }
1725
bool isThunk() const { return kind() == FarJumpIsland; }
1726
1727
// Function, import exits and trap exits have standard callable prologues
1728
// and epilogues. Asynchronous frame iteration needs to know the offset of
1729
// the return instruction to calculate the frame pointer.
1730
1731
bool hasReturn() const {
1732
return isFunction() || isImportExit() || isDebugTrap();
1733
}
1734
uint32_t ret() const {
1735
MOZ_ASSERT(hasReturn());
1736
return ret_;
1737
}
1738
1739
// Functions, export stubs and import stubs all have an associated function
1740
// index.
1741
1742
bool isJitEntry() const { return kind() == JitEntry; }
1743
bool isInterpEntry() const { return kind() == InterpEntry; }
1744
bool isEntry() const { return isInterpEntry() || isJitEntry(); }
1745
bool hasFuncIndex() const {
1746
return isFunction() || isImportExit() || isEntry();
1747
}
1748
uint32_t funcIndex() const {
1749
MOZ_ASSERT(hasFuncIndex());
1750
return u.funcIndex_;
1751
}
1752
1753
// TrapExit CodeRanges have a Trap field.
1754
1755
Trap trap() const {
1756
MOZ_ASSERT(isTrapExit());
1757
return u.trap_;
1758
}
1759
1760
// Function CodeRanges have two entry points: one for normal calls (with a
1761
// known signature) and one for table calls (which involves dynamic
1762
// signature checking).
1763
1764
uint32_t funcTableEntry() const {
1765
MOZ_ASSERT(isFunction());
1766
return begin_;
1767
}
1768
uint32_t funcNormalEntry() const {
1769
MOZ_ASSERT(isFunction());
1770
return begin_ + u.func.beginToNormalEntry_;
1771
}
1772
uint32_t funcTierEntry() const {
1773
MOZ_ASSERT(isFunction());
1774
return begin_ + u.func.beginToTierEntry_;
1775
}
1776
uint32_t funcLineOrBytecode() const {
1777
MOZ_ASSERT(isFunction());
1778
return u.func.lineOrBytecode_;
1779
}
1780
1781
// ImportJitExit have a particular range where the value of FP can't be
1782
// trusted for profiling and thus must be ignored.
1783
1784
uint32_t jitExitUntrustedFPStart() const {
1785
MOZ_ASSERT(isImportJitExit());
1786
return begin_ + u.jitExit.beginToUntrustedFPStart_;
1787
}
1788
uint32_t jitExitUntrustedFPEnd() const {
1789
MOZ_ASSERT(isImportJitExit());