Source code

Revision control

Other Tools

1
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2
* vim: set ts=8 sts=2 et sw=2 tw=80:
3
*
4
* Copyright 2015 Mozilla Foundation
5
*
6
* Licensed under the Apache License, Version 2.0 (the "License");
7
* you may not use this file except in compliance with the License.
8
* You may obtain a copy of the License at
9
*
11
*
12
* Unless required by applicable law or agreed to in writing, software
13
* distributed under the License is distributed on an "AS IS" BASIS,
14
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
* See the License for the specific language governing permissions and
16
* limitations under the License.
17
*/
18
19
#include "wasm/WasmModule.h"
20
21
#include <chrono>
22
#include <thread>
23
24
#include "builtin/TypedObject.h"
25
#include "jit/JitOptions.h"
26
#include "js/BuildId.h" // JS::BuildIdCharVector
27
#include "threading/LockGuard.h"
28
#include "wasm/WasmBaselineCompile.h"
29
#include "wasm/WasmCompile.h"
30
#include "wasm/WasmInstance.h"
31
#include "wasm/WasmIonCompile.h"
32
#include "wasm/WasmJS.h"
33
#include "wasm/WasmSerialize.h"
34
#include "wasm/WasmUtility.h"
35
36
#include "debugger/DebugAPI-inl.h"
37
#include "vm/ArrayBufferObject-inl.h"
38
#include "vm/JSAtom-inl.h"
39
40
using namespace js;
41
using namespace js::jit;
42
using namespace js::wasm;
43
44
class Module::Tier2GeneratorTaskImpl : public Tier2GeneratorTask {
45
SharedCompileArgs compileArgs_;
46
SharedBytes bytecode_;
47
SharedModule module_;
48
Atomic<bool> cancelled_;
49
50
public:
51
Tier2GeneratorTaskImpl(const CompileArgs& compileArgs,
52
const ShareableBytes& bytecode, Module& module)
53
: compileArgs_(&compileArgs),
54
bytecode_(&bytecode),
55
module_(&module),
56
cancelled_(false) {}
57
58
~Tier2GeneratorTaskImpl() override {
59
module_->tier2Listener_ = nullptr;
60
module_->testingTier2Active_ = false;
61
}
62
63
void cancel() override { cancelled_ = true; }
64
65
void runTask() override {
66
CompileTier2(*compileArgs_, bytecode_->bytes, *module_, &cancelled_);
67
}
68
ThreadType threadType() override {
69
return ThreadType::THREAD_TYPE_WASM_TIER2;
70
}
71
};
72
73
Module::~Module() {
74
// Note: Modules can be destroyed on any thread.
75
MOZ_ASSERT(!tier2Listener_);
76
MOZ_ASSERT(!testingTier2Active_);
77
}
78
79
void Module::startTier2(const CompileArgs& args, const ShareableBytes& bytecode,
80
JS::OptimizedEncodingListener* listener) {
81
MOZ_ASSERT(!testingTier2Active_);
82
83
auto task = MakeUnique<Tier2GeneratorTaskImpl>(args, bytecode, *this);
84
if (!task) {
85
return;
86
}
87
88
// These will be cleared asynchronously by ~Tier2GeneratorTaskImpl() if not
89
// sooner by finishTier2().
90
tier2Listener_ = listener;
91
testingTier2Active_ = true;
92
93
StartOffThreadWasmTier2Generator(std::move(task));
94
}
95
96
bool Module::finishTier2(const LinkData& linkData2,
97
UniqueCodeTier code2) const {
98
MOZ_ASSERT(code().bestTier() == Tier::Baseline &&
99
code2->tier() == Tier::Optimized);
100
101
// Install the data in the data structures. They will not be visible
102
// until commitTier2().
103
104
if (!code().setTier2(std::move(code2), linkData2)) {
105
return false;
106
}
107
108
// Before we can make tier-2 live, we need to compile tier2 versions of any
109
// extant tier1 lazy stubs (otherwise, tiering would break the assumption
110
// that any extant exported wasm function has had a lazy entry stub already
111
// compiled for it).
112
{
113
// We need to prevent new tier1 stubs generation until we've committed
114
// the newer tier2 stubs, otherwise we might not generate one tier2
115
// stub that has been generated for tier1 before we committed.
116
117
const MetadataTier& metadataTier1 = metadata(Tier::Baseline);
118
119
auto stubs1 = code().codeTier(Tier::Baseline).lazyStubs().lock();
120
auto stubs2 = code().codeTier(Tier::Optimized).lazyStubs().lock();
121
122
MOZ_ASSERT(stubs2->empty());
123
124
Uint32Vector funcExportIndices;
125
for (size_t i = 0; i < metadataTier1.funcExports.length(); i++) {
126
const FuncExport& fe = metadataTier1.funcExports[i];
127
if (fe.hasEagerStubs()) {
128
continue;
129
}
130
if (!stubs1->hasStub(fe.funcIndex())) {
131
continue;
132
}
133
if (!funcExportIndices.emplaceBack(i)) {
134
return false;
135
}
136
}
137
138
const CodeTier& tier2 = code().codeTier(Tier::Optimized);
139
140
Maybe<size_t> stub2Index;
141
if (!stubs2->createTier2(funcExportIndices, tier2, &stub2Index)) {
142
return false;
143
}
144
145
// Now that we can't fail or otherwise abort tier2, make it live.
146
147
MOZ_ASSERT(!code().hasTier2());
148
code().commitTier2();
149
150
stubs2->setJitEntries(stub2Index, code());
151
}
152
153
// And we update the jump vector.
154
155
uint8_t* base = code().segment(Tier::Optimized).base();
156
for (const CodeRange& cr : metadata(Tier::Optimized).codeRanges) {
157
// These are racy writes that we just want to be visible, atomically,
158
// eventually. All hardware we care about will do this right. But
159
// we depend on the compiler not splitting the stores hidden inside the
160
// set*Entry functions.
161
if (cr.isFunction()) {
162
code().setTieringEntry(cr.funcIndex(), base + cr.funcTierEntry());
163
} else if (cr.isJitEntry()) {
164
code().setJitEntry(cr.funcIndex(), base + cr.begin());
165
}
166
}
167
168
// Tier-2 is done; let everyone know. Mark tier-2 active for testing
169
// purposes so that wasmHasTier2CompilationCompleted() only returns true
170
// after tier-2 has been fully cached.
171
172
if (tier2Listener_) {
173
serialize(linkData2, *tier2Listener_);
174
tier2Listener_ = nullptr;
175
}
176
testingTier2Active_ = false;
177
178
return true;
179
}
180
181
void Module::testingBlockOnTier2Complete() const {
182
while (testingTier2Active_) {
183
std::this_thread::sleep_for(std::chrono::milliseconds(1));
184
}
185
}
186
187
/* virtual */
188
size_t Module::serializedSize(const LinkData& linkData) const {
189
JS::BuildIdCharVector buildId;
190
{
191
AutoEnterOOMUnsafeRegion oom;
192
if (!GetOptimizedEncodingBuildId(&buildId)) {
193
oom.crash("getting build id");
194
}
195
}
196
197
return SerializedPodVectorSize(buildId) + linkData.serializedSize() +
198
SerializedVectorSize(imports_) + SerializedVectorSize(exports_) +
199
SerializedVectorSize(dataSegments_) +
200
SerializedVectorSize(elemSegments_) +
201
SerializedVectorSize(customSections_) + code_->serializedSize();
202
}
203
204
/* virtual */
205
void Module::serialize(const LinkData& linkData, uint8_t* begin,
206
size_t size) const {
207
MOZ_RELEASE_ASSERT(!metadata().debugEnabled);
208
MOZ_RELEASE_ASSERT(code_->hasTier(Tier::Serialized));
209
210
JS::BuildIdCharVector buildId;
211
{
212
AutoEnterOOMUnsafeRegion oom;
213
if (!GetOptimizedEncodingBuildId(&buildId)) {
214
oom.crash("getting build id");
215
}
216
}
217
218
uint8_t* cursor = begin;
219
cursor = SerializePodVector(cursor, buildId);
220
cursor = linkData.serialize(cursor);
221
cursor = SerializeVector(cursor, imports_);
222
cursor = SerializeVector(cursor, exports_);
223
cursor = SerializeVector(cursor, dataSegments_);
224
cursor = SerializeVector(cursor, elemSegments_);
225
cursor = SerializeVector(cursor, customSections_);
226
cursor = code_->serialize(cursor, linkData);
227
MOZ_RELEASE_ASSERT(cursor == begin + size);
228
}
229
230
/* static */
231
MutableModule Module::deserialize(const uint8_t* begin, size_t size,
232
Metadata* maybeMetadata) {
233
MutableMetadata metadata(maybeMetadata);
234
if (!metadata) {
235
metadata = js_new<Metadata>();
236
if (!metadata) {
237
return nullptr;
238
}
239
}
240
241
const uint8_t* cursor = begin;
242
243
JS::BuildIdCharVector currentBuildId;
244
if (!GetOptimizedEncodingBuildId(&currentBuildId)) {
245
return nullptr;
246
}
247
248
JS::BuildIdCharVector deserializedBuildId;
249
cursor = DeserializePodVector(cursor, &deserializedBuildId);
250
if (!cursor) {
251
return nullptr;
252
}
253
254
MOZ_RELEASE_ASSERT(EqualContainers(currentBuildId, deserializedBuildId));
255
256
LinkData linkData(Tier::Serialized);
257
cursor = linkData.deserialize(cursor);
258
if (!cursor) {
259
return nullptr;
260
}
261
262
ImportVector imports;
263
cursor = DeserializeVector(cursor, &imports);
264
if (!cursor) {
265
return nullptr;
266
}
267
268
ExportVector exports;
269
cursor = DeserializeVector(cursor, &exports);
270
if (!cursor) {
271
return nullptr;
272
}
273
274
DataSegmentVector dataSegments;
275
cursor = DeserializeVector(cursor, &dataSegments);
276
if (!cursor) {
277
return nullptr;
278
}
279
280
ElemSegmentVector elemSegments;
281
cursor = DeserializeVector(cursor, &elemSegments);
282
if (!cursor) {
283
return nullptr;
284
}
285
286
CustomSectionVector customSections;
287
cursor = DeserializeVector(cursor, &customSections);
288
if (!cursor) {
289
return nullptr;
290
}
291
292
SharedCode code;
293
cursor = Code::deserialize(cursor, linkData, *metadata, &code);
294
if (!cursor) {
295
return nullptr;
296
}
297
298
MOZ_RELEASE_ASSERT(cursor == begin + size);
299
MOZ_RELEASE_ASSERT(!!maybeMetadata == code->metadata().isAsmJS());
300
301
if (metadata->nameCustomSectionIndex) {
302
metadata->namePayload =
303
customSections[*metadata->nameCustomSectionIndex].payload;
304
} else {
305
MOZ_RELEASE_ASSERT(!metadata->moduleName);
306
MOZ_RELEASE_ASSERT(metadata->funcNames.empty());
307
}
308
309
return js_new<Module>(*code, std::move(imports), std::move(exports),
310
std::move(dataSegments), std::move(elemSegments),
311
std::move(customSections), nullptr, nullptr, nullptr,
312
/* loggingDeserialized = */ true);
313
}
314
315
void Module::serialize(const LinkData& linkData,
316
JS::OptimizedEncodingListener& listener) const {
317
auto bytes = MakeUnique<JS::OptimizedEncodingBytes>();
318
if (!bytes || !bytes->resize(serializedSize(linkData))) {
319
return;
320
}
321
322
serialize(linkData, bytes->begin(), bytes->length());
323
324
listener.storeOptimizedEncoding(std::move(bytes));
325
}
326
327
/* virtual */
328
JSObject* Module::createObject(JSContext* cx) {
329
if (!GlobalObject::ensureConstructor(cx, cx->global(), JSProto_WebAssembly)) {
330
return nullptr;
331
}
332
333
RootedObject proto(
334
cx, &cx->global()->getPrototype(JSProto_WasmModule).toObject());
335
return WasmModuleObject::create(cx, *this, proto);
336
}
337
338
bool wasm::GetOptimizedEncodingBuildId(JS::BuildIdCharVector* buildId) {
339
// From a JS API perspective, the "build id" covers everything that can
340
// cause machine code to become invalid, so include both the actual build-id
341
// and cpu-id.
342
343
if (!GetBuildId || !GetBuildId(buildId)) {
344
return false;
345
}
346
347
uint32_t cpu = ObservedCPUFeatures();
348
349
if (!buildId->reserve(buildId->length() +
350
12 /* "()" + 8 nibbles + "m[+-]" */)) {
351
return false;
352
}
353
354
buildId->infallibleAppend('(');
355
while (cpu) {
356
buildId->infallibleAppend('0' + (cpu & 0xf));
357
cpu >>= 4;
358
}
359
buildId->infallibleAppend(')');
360
361
buildId->infallibleAppend('m');
362
buildId->infallibleAppend(wasm::IsHugeMemoryEnabled() ? '+' : '-');
363
364
return true;
365
}
366
367
/* virtual */
368
void Module::addSizeOfMisc(MallocSizeOf mallocSizeOf,
369
Metadata::SeenSet* seenMetadata,
370
Code::SeenSet* seenCode, size_t* code,
371
size_t* data) const {
372
code_->addSizeOfMiscIfNotSeen(mallocSizeOf, seenMetadata, seenCode, code,
373
data);
374
*data += mallocSizeOf(this) +
375
SizeOfVectorExcludingThis(imports_, mallocSizeOf) +
376
SizeOfVectorExcludingThis(exports_, mallocSizeOf) +
377
SizeOfVectorExcludingThis(dataSegments_, mallocSizeOf) +
378
SizeOfVectorExcludingThis(elemSegments_, mallocSizeOf) +
379
SizeOfVectorExcludingThis(customSections_, mallocSizeOf);
380
381
if (debugUnlinkedCode_) {
382
*data += debugUnlinkedCode_->sizeOfExcludingThis(mallocSizeOf);
383
}
384
}
385
386
void Module::initGCMallocBytesExcludingCode() {
387
// The size doesn't have to be exact so use the serialization framework to
388
// calculate a value.
389
gcMallocBytesExcludingCode_ = sizeof(*this) + SerializedVectorSize(imports_) +
390
SerializedVectorSize(exports_) +
391
SerializedVectorSize(dataSegments_) +
392
SerializedVectorSize(elemSegments_) +
393
SerializedVectorSize(customSections_);
394
}
395
396
// Extracting machine code as JS object. The result has the "code" property, as
397
// a Uint8Array, and the "segments" property as array objects. The objects
398
// contain offsets in the "code" array and basic information about a code
399
// segment/function body.
400
bool Module::extractCode(JSContext* cx, Tier tier,
401
MutableHandleValue vp) const {
402
RootedPlainObject result(cx, NewBuiltinClassInstance<PlainObject>(cx));
403
if (!result) {
404
return false;
405
}
406
407
// This function is only used for testing purposes so we can simply
408
// block on tiered compilation to complete.
409
testingBlockOnTier2Complete();
410
411
if (!code_->hasTier(tier)) {
412
vp.setNull();
413
return true;
414
}
415
416
const ModuleSegment& moduleSegment = code_->segment(tier);
417
RootedObject code(cx, JS_NewUint8Array(cx, moduleSegment.length()));
418
if (!code) {
419
return false;
420
}
421
422
memcpy(code->as<TypedArrayObject>().dataPointerUnshared(),
423
moduleSegment.base(), moduleSegment.length());
424
425
RootedValue value(cx, ObjectValue(*code));
426
if (!JS_DefineProperty(cx, result, "code", value, JSPROP_ENUMERATE)) {
427
return false;
428
}
429
430
RootedObject segments(cx, NewDenseEmptyArray(cx));
431
if (!segments) {
432
return false;
433
}
434
435
for (const CodeRange& p : metadata(tier).codeRanges) {
436
RootedObject segment(cx, NewObjectWithGivenProto<PlainObject>(cx, nullptr));
437
if (!segment) {
438
return false;
439
}
440
441
value.setNumber((uint32_t)p.begin());
442
if (!JS_DefineProperty(cx, segment, "begin", value, JSPROP_ENUMERATE)) {
443
return false;
444
}
445
446
value.setNumber((uint32_t)p.end());
447
if (!JS_DefineProperty(cx, segment, "end", value, JSPROP_ENUMERATE)) {
448
return false;
449
}
450
451
value.setNumber((uint32_t)p.kind());
452
if (!JS_DefineProperty(cx, segment, "kind", value, JSPROP_ENUMERATE)) {
453
return false;
454
}
455
456
if (p.isFunction()) {
457
value.setNumber((uint32_t)p.funcIndex());
458
if (!JS_DefineProperty(cx, segment, "funcIndex", value,
459
JSPROP_ENUMERATE)) {
460
return false;
461
}
462
463
value.setNumber((uint32_t)p.funcNormalEntry());
464
if (!JS_DefineProperty(cx, segment, "funcBodyBegin", value,
465
JSPROP_ENUMERATE)) {
466
return false;
467
}
468
469
value.setNumber((uint32_t)p.end());
470
if (!JS_DefineProperty(cx, segment, "funcBodyEnd", value,
471
JSPROP_ENUMERATE)) {
472
return false;
473
}
474
}
475
476
if (!NewbornArrayPush(cx, segments, ObjectValue(*segment))) {
477
return false;
478
}
479
}
480
481
value.setObject(*segments);
482
if (!JS_DefineProperty(cx, result, "segments", value, JSPROP_ENUMERATE)) {
483
return false;
484
}
485
486
vp.setObject(*result);
487
return true;
488
}
489
490
static uint32_t EvaluateInitExpr(const ValVector& globalImportValues,
491
InitExpr initExpr) {
492
switch (initExpr.kind()) {
493
case InitExpr::Kind::Constant:
494
return initExpr.val().i32();
495
case InitExpr::Kind::GetGlobal:
496
return globalImportValues[initExpr.globalIndex()].i32();
497
}
498
499
MOZ_CRASH("bad initializer expression");
500
}
501
502
#ifdef DEBUG
503
static bool AllSegmentsArePassive(const DataSegmentVector& vec) {
504
for (const DataSegment* seg : vec) {
505
if (seg->active()) {
506
return false;
507
}
508
}
509
return true;
510
}
511
#endif
512
513
bool Module::initSegments(JSContext* cx, HandleWasmInstanceObject instanceObj,
514
HandleWasmMemoryObject memoryObj,
515
const ValVector& globalImportValues) const {
516
MOZ_ASSERT_IF(!memoryObj, AllSegmentsArePassive(dataSegments_));
517
518
Instance& instance = instanceObj->instance();
519
const SharedTableVector& tables = instance.tables();
520
521
// Bulk memory changes the error checking behavior: we apply segments
522
// in-order and terminate if one has an out-of-bounds range.
523
// We enable bulk memory semantics if shared memory is enabled.
524
#ifdef ENABLE_WASM_BULKMEM_OPS
525
const bool eagerBoundsCheck = cx->options().wasmCranelift();
526
#else
527
// Bulk memory must be available if shared memory is enabled.
528
const bool eagerBoundsCheck =
529
!cx->realm()->creationOptions().getSharedMemoryAndAtomicsEnabled();
530
#endif
531
532
if (eagerBoundsCheck) {
533
// Perform all error checks up front so that this function does not perform
534
// partial initialization if an error is reported. In addition, we need to
535
// to report OOBs as a link error when bulk-memory is disabled.
536
537
for (const ElemSegment* seg : elemSegments_) {
538
if (!seg->active()) {
539
continue;
540
}
541
542
uint32_t tableLength = tables[seg->tableIndex]->length();
543
uint32_t offset = EvaluateInitExpr(globalImportValues, seg->offset());
544
545
if (offset > tableLength || tableLength - offset < seg->length()) {
546
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
547
JSMSG_WASM_BAD_FIT, "elem", "table");
548
return false;
549
}
550
}
551
552
if (memoryObj) {
553
uint32_t memoryLength = memoryObj->volatileMemoryLength();
554
for (const DataSegment* seg : dataSegments_) {
555
if (!seg->active()) {
556
continue;
557
}
558
559
uint32_t offset = EvaluateInitExpr(globalImportValues, seg->offset());
560
561
if (offset > memoryLength ||
562
memoryLength - offset < seg->bytes.length()) {
563
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
564
JSMSG_WASM_BAD_FIT, "data", "memory");
565
return false;
566
}
567
}
568
}
569
}
570
571
// Write data/elem segments into memories/tables.
572
573
for (const ElemSegment* seg : elemSegments_) {
574
if (seg->active()) {
575
uint32_t offset = EvaluateInitExpr(globalImportValues, seg->offset());
576
uint32_t count = seg->length();
577
578
if (!eagerBoundsCheck) {
579
uint32_t tableLength = tables[seg->tableIndex]->length();
580
if (offset > tableLength || tableLength - offset < count) {
581
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
582
JSMSG_WASM_OUT_OF_BOUNDS);
583
return false;
584
}
585
}
586
587
if (!instance.initElems(seg->tableIndex, *seg, offset, 0, count)) {
588
return false; // OOM
589
}
590
}
591
}
592
593
if (memoryObj) {
594
uint32_t memoryLength = memoryObj->volatileMemoryLength();
595
uint8_t* memoryBase =
596
memoryObj->buffer().dataPointerEither().unwrap(/* memcpy */);
597
598
for (const DataSegment* seg : dataSegments_) {
599
if (!seg->active()) {
600
continue;
601
}
602
603
uint32_t offset = EvaluateInitExpr(globalImportValues, seg->offset());
604
uint32_t count = seg->bytes.length();
605
606
if (!eagerBoundsCheck) {
607
if (offset > memoryLength || memoryLength - offset < count) {
608
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
609
JSMSG_WASM_OUT_OF_BOUNDS);
610
return false;
611
}
612
}
613
memcpy(memoryBase + offset, seg->bytes.begin(), count);
614
}
615
}
616
617
return true;
618
}
619
620
static const Import& FindImportForFuncImport(const ImportVector& imports,
621
uint32_t funcImportIndex) {
622
for (const Import& import : imports) {
623
if (import.kind != DefinitionKind::Function) {
624
continue;
625
}
626
if (funcImportIndex == 0) {
627
return import;
628
}
629
funcImportIndex--;
630
}
631
MOZ_CRASH("ran out of imports");
632
}
633
634
bool Module::instantiateFunctions(JSContext* cx,
635
const JSFunctionVector& funcImports) const {
636
#ifdef DEBUG
637
for (auto t : code().tiers()) {
638
MOZ_ASSERT(funcImports.length() == metadata(t).funcImports.length());
639
}
640
#endif
641
642
if (metadata().isAsmJS()) {
643
return true;
644
}
645
646
Tier tier = code().stableTier();
647
648
for (size_t i = 0; i < metadata(tier).funcImports.length(); i++) {
649
JSFunction* f = funcImports[i];
650
if (!IsWasmExportedFunction(f)) {
651
continue;
652
}
653
654
uint32_t funcIndex = ExportedFunctionToFuncIndex(f);
655
Instance& instance = ExportedFunctionToInstance(f);
656
Tier otherTier = instance.code().stableTier();
657
658
const FuncExport& funcExport =
659
instance.metadata(otherTier).lookupFuncExport(funcIndex);
660
661
if (funcExport.funcType() != metadata(tier).funcImports[i].funcType()) {
662
const Import& import = FindImportForFuncImport(imports_, i);
663
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
664
JSMSG_WASM_BAD_IMPORT_SIG, import.module.get(),
665
import.field.get());
666
return false;
667
}
668
}
669
670
return true;
671
}
672
673
static bool CheckLimits(JSContext* cx, uint32_t declaredMin,
674
const Maybe<uint32_t>& declaredMax,
675
uint32_t actualLength, const Maybe<uint32_t>& actualMax,
676
bool isAsmJS, const char* kind) {
677
if (isAsmJS) {
678
MOZ_ASSERT(actualLength >= declaredMin);
679
MOZ_ASSERT(!declaredMax);
680
MOZ_ASSERT(actualLength == actualMax.value());
681
return true;
682
}
683
684
if (actualLength < declaredMin ||
685
actualLength > declaredMax.valueOr(UINT32_MAX)) {
686
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
687
JSMSG_WASM_BAD_IMP_SIZE, kind);
688
return false;
689
}
690
691
if ((actualMax && declaredMax && *actualMax > *declaredMax) ||
692
(!actualMax && declaredMax)) {
693
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
694
JSMSG_WASM_BAD_IMP_MAX, kind);
695
return false;
696
}
697
698
return true;
699
}
700
701
static bool CheckSharing(JSContext* cx, bool declaredShared, bool isShared) {
702
if (isShared &&
703
!cx->realm()->creationOptions().getSharedMemoryAndAtomicsEnabled()) {
704
JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
705
JSMSG_WASM_NO_SHMEM_LINK);
706
return false;
707
}
708
709
if (declaredShared && !isShared) {
710
JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
711
JSMSG_WASM_IMP_SHARED_REQD);
712
return false;
713
}
714
715
if (!declaredShared && isShared) {
716
JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
717
JSMSG_WASM_IMP_SHARED_BANNED);
718
return false;
719
}
720
721
return true;
722
}
723
724
// asm.js module instantiation supplies its own buffer, but for wasm, create and
725
// initialize the buffer if one is requested. Either way, the buffer is wrapped
726
// in a WebAssembly.Memory object which is what the Instance stores.
727
bool Module::instantiateMemory(JSContext* cx,
728
MutableHandleWasmMemoryObject memory) const {
729
if (!metadata().usesMemory()) {
730
MOZ_ASSERT(!memory);
731
MOZ_ASSERT(AllSegmentsArePassive(dataSegments_));
732
return true;
733
}
734
735
uint32_t declaredMin = metadata().minMemoryLength;
736
Maybe<uint32_t> declaredMax = metadata().maxMemoryLength;
737
bool declaredShared = metadata().memoryUsage == MemoryUsage::Shared;
738
739
if (memory) {
740
MOZ_ASSERT_IF(metadata().isAsmJS(), memory->buffer().isPreparedForAsmJS());
741
MOZ_ASSERT_IF(!metadata().isAsmJS(), memory->buffer().isWasm());
742
743
if (!CheckLimits(
744
cx, declaredMin, declaredMax, memory->volatileMemoryLength(),
745
memory->buffer().wasmMaxSize(), metadata().isAsmJS(), "Memory")) {
746
return false;
747
}
748
749
if (!CheckSharing(cx, declaredShared, memory->isShared())) {
750
return false;
751
}
752
} else {
753
MOZ_ASSERT(!metadata().isAsmJS());
754
755
RootedArrayBufferObjectMaybeShared buffer(cx);
756
Limits l(declaredMin, declaredMax,
757
declaredShared ? Shareable::True : Shareable::False);
758
if (!CreateWasmBuffer(cx, l, &buffer)) {
759
return false;
760
}
761
762
RootedObject proto(
763
cx, &cx->global()->getPrototype(JSProto_WasmMemory).toObject());
764
memory.set(WasmMemoryObject::create(cx, buffer, proto));
765
if (!memory) {
766
return false;
767
}
768
}
769
770
MOZ_RELEASE_ASSERT(memory->isHuge() == metadata().omitsBoundsChecks);
771
772
return true;
773
}
774
775
bool Module::instantiateImportedTable(JSContext* cx, const TableDesc& td,
776
Handle<WasmTableObject*> tableObj,
777
WasmTableObjectVector* tableObjs,
778
SharedTableVector* tables) const {
779
MOZ_ASSERT(tableObj);
780
MOZ_ASSERT(!metadata().isAsmJS());
781
782
Table& table = tableObj->table();
783
if (!CheckLimits(cx, td.limits.initial, td.limits.maximum, table.length(),
784
table.maximum(), metadata().isAsmJS(), "Table")) {
785
return false;
786
}
787
788
if (!tables->append(&table)) {
789
ReportOutOfMemory(cx);
790
return false;
791
}
792
793
if (!tableObjs->append(tableObj)) {
794
ReportOutOfMemory(cx);
795
return false;
796
}
797
798
return true;
799
}
800
801
bool Module::instantiateLocalTable(JSContext* cx, const TableDesc& td,
802
WasmTableObjectVector* tableObjs,
803
SharedTableVector* tables) const {
804
SharedTable table;
805
Rooted<WasmTableObject*> tableObj(cx);
806
if (td.importedOrExported) {
807
tableObj.set(WasmTableObject::create(cx, td.limits, td.kind));
808
if (!tableObj) {
809
return false;
810
}
811
table = &tableObj->table();
812
} else {
813
table = Table::create(cx, td, /* HandleWasmTableObject = */ nullptr);
814
if (!table) {
815
return false;
816
}
817
}
818
819
// Note, appending a null pointer for non-exported local tables.
820
if (!tableObjs->append(tableObj.get())) {
821
ReportOutOfMemory(cx);
822
return false;
823
}
824
825
if (!tables->emplaceBack(table)) {
826
ReportOutOfMemory(cx);
827
return false;
828
}
829
830
return true;
831
}
832
833
bool Module::instantiateTables(JSContext* cx,
834
const WasmTableObjectVector& tableImports,
835
MutableHandle<WasmTableObjectVector> tableObjs,
836
SharedTableVector* tables) const {
837
uint32_t tableIndex = 0;
838
for (const TableDesc& td : metadata().tables) {
839
if (tableIndex < tableImports.length()) {
840
Rooted<WasmTableObject*> tableObj(cx, tableImports[tableIndex]);
841
if (!instantiateImportedTable(cx, td, tableObj, &tableObjs.get(),
842
tables)) {
843
return false;
844
}
845
} else {
846
if (!instantiateLocalTable(cx, td, &tableObjs.get(), tables)) {
847
return false;
848
}
849
}
850
tableIndex++;
851
}
852
return true;
853
}
854
855
static void ExtractGlobalValue(const ValVector& globalImportValues,
856
uint32_t globalIndex, const GlobalDesc& global,
857
MutableHandleVal result) {
858
switch (global.kind()) {
859
case GlobalKind::Import: {
860
result.set(Val(globalImportValues[globalIndex]));
861
return;
862
}
863
case GlobalKind::Variable: {
864
const InitExpr& init = global.initExpr();
865
switch (init.kind()) {
866
case InitExpr::Kind::Constant:
867
result.set(Val(init.val()));
868
return;
869
case InitExpr::Kind::GetGlobal:
870
result.set(Val(globalImportValues[init.globalIndex()]));
871
return;
872
}
873
break;
874
}
875
case GlobalKind::Constant: {
876
result.set(Val(global.constantValue()));
877
return;
878
}
879
}
880
MOZ_CRASH("Not a global value");
881
}
882
883
static bool EnsureGlobalObject(JSContext* cx,
884
const ValVector& globalImportValues,
885
size_t globalIndex, const GlobalDesc& global,
886
WasmGlobalObjectVector& globalObjs) {
887
if (globalIndex < globalObjs.length() && globalObjs[globalIndex]) {
888
return true;
889
}
890
891
RootedVal val(cx);
892
ExtractGlobalValue(globalImportValues, globalIndex, global, &val);
893
RootedWasmGlobalObject go(
894
cx, WasmGlobalObject::create(cx, val, global.isMutable()));
895
if (!go) {
896
return false;
897
}
898
899
if (globalObjs.length() <= globalIndex &&
900
!globalObjs.resize(globalIndex + 1)) {
901
ReportOutOfMemory(cx);
902
return false;
903
}
904
905
globalObjs[globalIndex] = go;
906
return true;
907
}
908
909
bool Module::instantiateGlobals(JSContext* cx,
910
const ValVector& globalImportValues,
911
WasmGlobalObjectVector& globalObjs) const {
912
// If there are exported globals that aren't in globalObjs because they
913
// originate in this module or because they were immutable imports that came
914
// in as primitive values then we must create cells in the globalObjs for
915
// them here, as WasmInstanceObject::create() and CreateExportObject() will
916
// need the cells to exist.
917
918
const GlobalDescVector& globals = metadata().globals;
919
920
for (const Export& exp : exports_) {
921
if (exp.kind() != DefinitionKind::Global) {
922
continue;
923
}
924
unsigned globalIndex = exp.globalIndex();
925
const GlobalDesc& global = globals[globalIndex];
926
if (!EnsureGlobalObject(cx, globalImportValues, globalIndex, global,
927
globalObjs)) {
928
return false;
929
}
930
}
931
932
// Imported globals that are not re-exported may also have received only a
933
// primitive value; these globals are always immutable. Assert that we do
934
// not need to create any additional Global objects for such imports.
935
936
#ifdef DEBUG
937
size_t numGlobalImports = 0;
938
for (const Import& import : imports_) {
939
if (import.kind != DefinitionKind::Global) {
940
continue;
941
}
942
size_t globalIndex = numGlobalImports++;
943
const GlobalDesc& global = globals[globalIndex];
944
MOZ_ASSERT(global.importIndex() == globalIndex);
945
MOZ_ASSERT_IF(global.isIndirect(),
946
globalIndex < globalObjs.length() || globalObjs[globalIndex]);
947
}
948
MOZ_ASSERT_IF(!metadata().isAsmJS(),
949
numGlobalImports == globals.length() ||
950
!globals[numGlobalImports].isImport());
951
#endif
952
return true;
953
}
954
955
SharedCode Module::getDebugEnabledCode() const {
956
MOZ_ASSERT(metadata().debugEnabled);
957
MOZ_ASSERT(debugUnlinkedCode_);
958
MOZ_ASSERT(debugLinkData_);
959
960
// The first time through, use the pre-linked code in the module but
961
// mark it as having been claimed. Subsequently, instantiate the copy of the
962
// code bytes that we keep around for debugging instead, because the
963
// debugger may patch the pre-linked code at any time.
964
if (debugCodeClaimed_.compareExchange(false, true)) {
965
return code_;
966
}
967
968
Tier tier = Tier::Baseline;
969
auto segment =
970
ModuleSegment::create(tier, *debugUnlinkedCode_, *debugLinkData_);
971
if (!segment) {
972
return nullptr;
973
}
974
975
UniqueMetadataTier metadataTier = js::MakeUnique<MetadataTier>(tier);
976
if (!metadataTier || !metadataTier->clone(metadata(tier))) {
977
return nullptr;
978
}
979
980
auto codeTier =
981
js::MakeUnique<CodeTier>(std::move(metadataTier), std::move(segment));
982
if (!codeTier) {
983
return nullptr;
984
}
985
986
JumpTables jumpTables;
987
if (!jumpTables.init(CompileMode::Once, codeTier->segment(),
988
metadata(tier).codeRanges)) {
989
return nullptr;
990
}
991
992
StructTypeVector structTypes;
993
if (!structTypes.resize(code_->structTypes().length())) {
994
return nullptr;
995
}
996
for (uint32_t i = 0; i < code_->structTypes().length(); i++) {
997
if (!structTypes[i].copyFrom(code_->structTypes()[i])) {
998
return nullptr;
999
}
1000
}
1001
MutableCode debugCode =
1002
js_new<Code>(std::move(codeTier), metadata(), std::move(jumpTables),
1003
std::move(structTypes));
1004
if (!debugCode || !debugCode->initialize(*debugLinkData_)) {
1005
return nullptr;
1006
}
1007
1008
return debugCode;
1009
}
1010
1011
static bool GetFunctionExport(JSContext* cx,
1012
HandleWasmInstanceObject instanceObj,
1013
const JSFunctionVector& funcImports,
1014
const Export& exp, MutableHandleValue val) {
1015
if (exp.funcIndex() < funcImports.length() &&
1016
IsWasmExportedFunction(funcImports[exp.funcIndex()])) {
1017
val.setObject(*funcImports[exp.funcIndex()]);
1018
return true;
1019
}
1020
1021
RootedFunction fun(cx);
1022
if (!instanceObj->getExportedFunction(cx, instanceObj, exp.funcIndex(),
1023
&fun)) {
1024
return false;
1025
}
1026
1027
val.setObject(*fun);
1028
return true;
1029
}
1030
1031
static bool CreateExportObject(JSContext* cx,
1032
HandleWasmInstanceObject instanceObj,
1033
const JSFunctionVector& funcImports,
1034
const WasmTableObjectVector& tableObjs,
1035
HandleWasmMemoryObject memoryObj,
1036
const WasmGlobalObjectVector& globalObjs,
1037
const ExportVector& exports) {
1038
const Instance& instance = instanceObj->instance();
1039
const Metadata& metadata = instance.metadata();
1040
1041
if (metadata.isAsmJS() && exports.length() == 1 &&
1042
strlen(exports[0].fieldName()) == 0) {
1043
RootedValue val(cx);
1044
if (!GetFunctionExport(cx, instanceObj, funcImports, exports[0], &val)) {
1045
return false;
1046
}
1047
instanceObj->initExportsObj(val.toObject());
1048
return true;
1049
}
1050
1051
RootedObject exportObj(cx);
1052
if (metadata.isAsmJS()) {
1053
exportObj = NewBuiltinClassInstance<PlainObject>(cx);
1054
} else {
1055
exportObj = NewObjectWithGivenProto<PlainObject>(cx, nullptr);
1056
}
1057
if (!exportObj) {
1058
return false;
1059
}
1060
1061
for (const Export& exp : exports) {
1062
JSAtom* atom =
1063
AtomizeUTF8Chars(cx, exp.fieldName(), strlen(exp.fieldName()));
1064
if (!atom) {
1065
return false;
1066
}
1067
1068
RootedId id(cx, AtomToId(atom));
1069
RootedValue val(cx);
1070
switch (exp.kind()) {
1071
case DefinitionKind::Function:
1072
if (!GetFunctionExport(cx, instanceObj, funcImports, exp, &val)) {
1073
return false;
1074
}
1075
break;
1076
case DefinitionKind::Table:
1077
val = ObjectValue(*tableObjs[exp.tableIndex()]);
1078
break;
1079
case DefinitionKind::Memory:
1080
val = ObjectValue(*memoryObj);
1081
break;
1082
case DefinitionKind::Global:
1083
val.setObject(*globalObjs[exp.globalIndex()]);
1084
break;
1085
}
1086
1087
if (!JS_DefinePropertyById(cx, exportObj, id, val, JSPROP_ENUMERATE)) {
1088
return false;
1089
}
1090
}
1091
1092
if (!metadata.isAsmJS()) {
1093
if (!JS_FreezeObject(cx, exportObj)) {
1094
return false;
1095
}
1096
}
1097
1098
instanceObj->initExportsObj(*exportObj);
1099
return true;
1100
}
1101
1102
#ifdef ENABLE_WASM_GC
1103
static bool MakeStructField(JSContext* cx, const ValType& v, bool isMutable,
1104
const char* format, uint32_t fieldNo,
1105
MutableHandleIdVector ids,
1106
MutableHandleValueVector fieldTypeObjs,
1107
Vector<StructFieldProps>* fieldProps) {
1108
char buf[20];
1109
sprintf(buf, format, fieldNo);
1110
1111
JSAtom* atom = Atomize(cx, buf, strlen(buf));
1112
if (!atom) {
1113
return false;
1114
}
1115
RootedId id(cx, AtomToId(atom));
1116
1117
StructFieldProps props;
1118
props.isMutable = isMutable;
1119
1120
Rooted<TypeDescr*> t(cx);
1121
switch (v.kind()) {
1122
case ValType::I32:
1123
t = GlobalObject::getOrCreateScalarTypeDescr(cx, cx->global(),
1124
Scalar::Int32);
1125
break;
1126
case ValType::I64:
1127
// Align for int64 but allocate only an int32, another int32 allocation
1128
// will follow immediately. JS will see two immutable int32 values but
1129
// wasm knows it's a single int64. See makeStructTypeDescrs(), below.
1130
props.alignAsInt64 = true;
1131
t = GlobalObject::getOrCreateScalarTypeDescr(cx, cx->global(),
1132
Scalar::Int32);
1133
break;
1134
case ValType::F32:
1135
t = GlobalObject::getOrCreateScalarTypeDescr(cx, cx->global(),
1136
Scalar::Float32);
1137
break;
1138
case ValType::F64:
1139
t = GlobalObject::getOrCreateScalarTypeDescr(cx, cx->global(),
1140
Scalar::Float64);
1141
break;
1142
case ValType::Ref:
1143
switch (v.refTypeKind()) {
1144
case RefType::TypeIndex:
1145
t = GlobalObject::getOrCreateReferenceTypeDescr(
1146
cx, cx->global(), ReferenceType::TYPE_OBJECT);
1147
break;
1148
case RefType::Func:
1149
case RefType::Any:
1150
case RefType::Null:
1151
t = GlobalObject::getOrCreateReferenceTypeDescr(
1152
cx, cx->global(), ReferenceType::TYPE_WASM_ANYREF);
1153
break;
1154
}
1155
break;
1156
}
1157
MOZ_ASSERT(t != nullptr);
1158
1159
if (!ids.append(id)) {
1160
return false;
1161
}
1162
1163
if (!fieldTypeObjs.append(ObjectValue(*t))) {
1164
return false;
1165
}
1166
1167
if (!fieldProps->append(props)) {
1168
return false;
1169
}
1170
1171
return true;
1172
}
1173
#endif
1174
1175
bool Module::makeStructTypeDescrs(
1176
JSContext* cx,
1177
MutableHandle<StructTypeDescrVector> structTypeDescrs) const {
1178
// This method must be a no-op if there are no structs.
1179
if (structTypes().length() == 0) {
1180
return true;
1181
}
1182
1183
#ifndef ENABLE_WASM_GC
1184
MOZ_CRASH("Should not have seen any struct types");
1185
#else
1186
1187
# ifndef JS_HAS_TYPED_OBJECTS
1188
# error "GC types require TypedObject"
1189
# endif
1190
1191
// Not just any prototype object will do, we must have the actual
1192
// StructTypePrototype.
1193
RootedObject typedObjectModule(
1194
cx, GlobalObject::getOrCreateTypedObjectModule(cx, cx->global()));
1195
if (!typedObjectModule) {
1196
return false;
1197
}
1198
1199
RootedNativeObject toModule(cx, &typedObjectModule->as<NativeObject>());
1200
RootedObject prototype(
1201
cx,
1202
&toModule->getReservedSlot(TypedObjectModuleObject::StructTypePrototype)
1203
.toObject());
1204
1205
for (const StructType& structType : structTypes()) {
1206
RootedIdVector ids(cx);
1207
RootedValueVector fieldTypeObjs(cx);
1208
Vector<StructFieldProps> fieldProps(cx);
1209
bool allowConstruct = true;
1210
1211
uint32_t k = 0;
1212
for (StructField sf : structType.fields_) {
1213
const ValType& v = sf.type;
1214
if (v.kind() == ValType::I64) {
1215
// TypedObjects don't yet have a notion of int64 fields. Thus
1216
// we handle int64 by allocating two adjacent int32 fields, the
1217
// first of them aligned as for int64. We mark these fields as
1218
// immutable for JS and render the object non-constructible
1219
// from JS. Wasm however sees one i64 field with appropriate
1220
// mutability.
1221
sf.isMutable = false;
1222
allowConstruct = false;
1223
1224
if (!MakeStructField(cx, ValType::I64, sf.isMutable, "_%d_low", k, &ids,
1225
&fieldTypeObjs, &fieldProps)) {
1226
return false;
1227
}
1228
if (!MakeStructField(cx, ValType::I32, sf.isMutable, "_%d_high", k++,
1229
&ids, &fieldTypeObjs, &fieldProps)) {
1230
return false;
1231
}
1232
} else {
1233
// TypedObjects don't yet have a sufficient notion of type
1234
// constraints on TypedObject properties. Thus we handle fields
1235
// of type (ref T) by marking them as immutable for JS and by
1236
// rendering the objects non-constructible from JS. Wasm
1237
// however sees properly-typed (ref T) fields with appropriate
1238
// mutability.
1239
if (v.isRef()) {
1240
sf.isMutable = false;
1241
allowConstruct = false;
1242
}
1243
1244
if (!MakeStructField(cx, v, sf.isMutable, "_%d", k++, &ids,
1245
&fieldTypeObjs, &fieldProps)) {
1246
return false;
1247
}
1248
}
1249
}
1250
1251
// Types must be opaque, which we ensure here, and sealed, which is true
1252
// for every TypedObject. If they contain fields of type Ref T then we
1253
// prevent JS from constructing instances of them.
1254
1255
Rooted<StructTypeDescr*> structTypeDescr(
1256
cx, StructMetaTypeDescr::createFromArrays(cx, prototype,
1257
/* opaque= */ true,
1258
allowConstruct, ids,
1259
fieldTypeObjs, fieldProps));
1260
1261
if (!structTypeDescr || !structTypeDescrs.append(structTypeDescr)) {
1262
return false;
1263
}
1264
}
1265
1266
return true;
1267
#endif
1268
}
1269
1270
bool Module::instantiate(JSContext* cx, ImportValues& imports,
1271
HandleObject instanceProto,
1272
MutableHandleWasmInstanceObject instance) const {
1273
MOZ_RELEASE_ASSERT(cx->wasmHaveSignalHandlers);
1274
1275
if (!instantiateFunctions(cx, imports.funcs)) {
1276
return false;
1277
}
1278
1279
RootedWasmMemoryObject memory(cx, imports.memory);
1280
if (!instantiateMemory(cx, &memory)) {
1281
return false;
1282
}
1283
1284
// Note that tableObjs is sparse: it will be null in slots that contain
1285
// tables that are neither exported nor imported.
1286
1287
Rooted<WasmTableObjectVector> tableObjs(cx);
1288
SharedTableVector tables;
1289
if (!instantiateTables(cx, imports.tables, &tableObjs, &tables)) {
1290
return false;
1291
}
1292
1293
if (!instantiateGlobals(cx, imports.globalValues, imports.globalObjs)) {
1294
return false;
1295
}
1296
1297
UniqueTlsData tlsData = CreateTlsData(metadata().globalDataLength);
1298
if (!tlsData) {
1299
ReportOutOfMemory(cx);
1300
return false;
1301
}
1302
1303
SharedCode code;
1304
UniqueDebugState maybeDebug;
1305
if (metadata().debugEnabled) {
1306
code = getDebugEnabledCode();
1307
if (!code) {
1308
ReportOutOfMemory(cx);
1309
return false;
1310
}
1311
1312
maybeDebug = cx->make_unique<DebugState>(*code, *this);
1313
if (!maybeDebug) {
1314
return false;
1315
}
1316
} else {
1317
code = code_;
1318
}
1319
1320
// Create type descriptors for any struct types that the module has.
1321
1322
Rooted<StructTypeDescrVector> structTypeDescrs(cx);
1323
if (!makeStructTypeDescrs(cx, &structTypeDescrs)) {
1324
return false;
1325
}
1326
1327
instance.set(WasmInstanceObject::create(
1328
cx, code, dataSegments_, elemSegments_, std::move(tlsData), memory,
1329
std::move(tables), std::move(structTypeDescrs.get()), imports.funcs,
1330
metadata().globals, imports.globalValues, imports.globalObjs,
1331
instanceProto, std::move(maybeDebug)));
1332
if (!instance) {
1333
return false;
1334
}
1335
1336
if (!CreateExportObject(cx, instance, imports.funcs, tableObjs.get(), memory,
1337
imports.globalObjs, exports_)) {
1338
return false;
1339
}
1340
1341
// Register the instance with the Realm so that it can find out about global
1342
// events like profiling being enabled in the realm. Registration does not
1343
// require a fully-initialized instance and must precede initSegments as the
1344
// final pre-requisite for a live instance.
1345
1346
if (!cx->realm()->wasm.registerInstance(cx, instance)) {
1347
return false;
1348
}
1349
1350
// Perform initialization as the final step after the instance is fully
1351
// constructed since this can make the instance live to content (even if the
1352
// start function fails).
1353
1354
if (!initSegments(cx, instance, memory, imports.globalValues)) {
1355
return false;
1356
}
1357
1358
// Now that the instance is fully live and initialized, the start function.
1359
// Note that failure may cause instantiation to throw, but the instance may
1360
// still be live via edges created by initSegments or the start function.
1361
1362
if (metadata().startFuncIndex) {
1363
FixedInvokeArgs<0> args(cx);
1364
if (!instance->instance().callExport(cx, *metadata().startFuncIndex,
1365
args)) {
1366
return false;
1367
}
1368
}
1369
1370
JSUseCounter useCounter =
1371
metadata().isAsmJS() ? JSUseCounter::ASMJS : JSUseCounter::WASM;
1372
cx->runtime()->setUseCounter(instance, useCounter);
1373
1374
if (cx->options().testWasmAwaitTier2()) {
1375
testingBlockOnTier2Complete();
1376
}
1377
1378
return true;
1379
}