Source code

Revision control

Other Tools

1
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2
* vim: set ts=8 sts=2 et sw=2 tw=80:
3
* This Source Code Form is subject to the terms of the Mozilla Public
4
* License, v. 2.0. If a copy of the MPL was not distributed with this
5
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7
#include "jit/IonBuilder.h"
8
9
#include "mozilla/DebugOnly.h"
10
#include "mozilla/ScopeExit.h"
11
12
#include "builtin/Eval.h"
13
#include "builtin/TypedObject.h"
14
#include "frontend/SourceNotes.h"
15
#include "jit/BaselineFrame.h"
16
#include "jit/BaselineInspector.h"
17
#include "jit/CacheIR.h"
18
#include "jit/Ion.h"
19
#include "jit/IonControlFlow.h"
20
#include "jit/IonOptimizationLevels.h"
21
#include "jit/JitSpewer.h"
22
#include "jit/Lowering.h"
23
#include "jit/MIRGraph.h"
24
#include "vm/ArgumentsObject.h"
25
#include "vm/EnvironmentObject.h"
26
#include "vm/Opcodes.h"
27
#include "vm/RegExpStatics.h"
28
#include "vm/SelfHosting.h"
29
#include "vm/TraceLogging.h"
30
31
#include "gc/Nursery-inl.h"
32
#include "jit/CompileInfo-inl.h"
33
#include "jit/shared/Lowering-shared-inl.h"
34
#include "vm/BytecodeUtil-inl.h"
35
#include "vm/EnvironmentObject-inl.h"
36
#include "vm/JSScript-inl.h"
37
#include "vm/NativeObject-inl.h"
38
#include "vm/ObjectGroup-inl.h"
39
40
using namespace js;
41
using namespace js::jit;
42
43
using mozilla::AssertedCast;
44
using mozilla::DebugOnly;
45
using mozilla::Maybe;
46
using mozilla::Nothing;
47
48
using JS::TrackedOutcome;
49
using JS::TrackedStrategy;
50
using JS::TrackedTypeSite;
51
52
class jit::BaselineFrameInspector {
53
public:
54
TypeSet::Type thisType;
55
JSObject* singletonEnvChain;
56
57
Vector<TypeSet::Type, 4, JitAllocPolicy> argTypes;
58
Vector<TypeSet::Type, 4, JitAllocPolicy> varTypes;
59
60
explicit BaselineFrameInspector(TempAllocator* temp)
61
: thisType(TypeSet::UndefinedType()),
62
singletonEnvChain(nullptr),
63
argTypes(*temp),
64
varTypes(*temp) {}
65
};
66
67
BaselineFrameInspector* jit::NewBaselineFrameInspector(TempAllocator* temp,
68
BaselineFrame* frame) {
69
MOZ_ASSERT(frame);
70
71
BaselineFrameInspector* inspector =
72
temp->lifoAlloc()->new_<BaselineFrameInspector>(temp);
73
if (!inspector) {
74
return nullptr;
75
}
76
77
// Note: copying the actual values into a temporary structure for use
78
// during compilation could capture nursery pointers, so the values' types
79
// are recorded instead.
80
81
if (frame->isFunctionFrame()) {
82
inspector->thisType =
83
TypeSet::GetMaybeUntrackedValueType(frame->thisArgument());
84
}
85
86
if (frame->environmentChain()->isSingleton()) {
87
inspector->singletonEnvChain = frame->environmentChain();
88
}
89
90
JSScript* script = frame->script();
91
92
if (script->functionNonDelazifying()) {
93
if (!inspector->argTypes.reserve(frame->numFormalArgs())) {
94
return nullptr;
95
}
96
for (size_t i = 0; i < frame->numFormalArgs(); i++) {
97
if (script->formalIsAliased(i)) {
98
inspector->argTypes.infallibleAppend(TypeSet::UndefinedType());
99
} else if (!script->argsObjAliasesFormals()) {
100
TypeSet::Type type =
101
TypeSet::GetMaybeUntrackedValueType(frame->unaliasedFormal(i));
102
inspector->argTypes.infallibleAppend(type);
103
} else if (frame->hasArgsObj()) {
104
TypeSet::Type type =
105
TypeSet::GetMaybeUntrackedValueType(frame->argsObj().arg(i));
106
inspector->argTypes.infallibleAppend(type);
107
} else {
108
inspector->argTypes.infallibleAppend(TypeSet::UndefinedType());
109
}
110
}
111
}
112
113
if (!inspector->varTypes.reserve(frame->numValueSlots())) {
114
return nullptr;
115
}
116
for (size_t i = 0; i < frame->numValueSlots(); i++) {
117
TypeSet::Type type =
118
TypeSet::GetMaybeUntrackedValueType(*frame->valueSlot(i));
119
inspector->varTypes.infallibleAppend(type);
120
}
121
122
return inspector;
123
}
124
125
IonBuilder::IonBuilder(JSContext* analysisContext, CompileRealm* realm,
126
const JitCompileOptions& options, TempAllocator* temp,
127
MIRGraph* graph, CompilerConstraintList* constraints,
128
BaselineInspector* inspector, CompileInfo* info,
129
const OptimizationInfo* optimizationInfo,
130
BaselineFrameInspector* baselineFrame,
131
size_t inliningDepth, uint32_t loopDepth)
132
: MIRGenerator(realm, options, temp, graph, info, optimizationInfo),
133
backgroundCodegen_(nullptr),
134
actionableAbortScript_(nullptr),
135
actionableAbortPc_(nullptr),
136
actionableAbortMessage_(nullptr),
137
rootList_(nullptr),
138
analysisContext(analysisContext),
139
baselineFrame_(baselineFrame),
140
constraints_(constraints),
141
thisTypes(nullptr),
142
argTypes(nullptr),
143
typeArray(nullptr),
144
typeArrayHint(0),
145
bytecodeTypeMap(nullptr),
146
current(nullptr),
147
loopDepth_(loopDepth),
148
blockWorklist(*temp),
149
cfgCurrent(nullptr),
150
cfg(nullptr),
151
trackedOptimizationSites_(*temp),
152
lexicalCheck_(nullptr),
153
callerResumePoint_(nullptr),
154
callerBuilder_(nullptr),
155
iterators_(*temp),
156
loopHeaders_(*temp),
157
loopHeaderStack_(*temp),
158
#ifdef DEBUG
159
cfgLoopHeaderStack_(*temp),
160
#endif
161
inspector(inspector),
162
inliningDepth_(inliningDepth),
163
inlinedBytecodeLength_(0),
164
numLoopRestarts_(0),
165
failedBoundsCheck_(info->script()->failedBoundsCheck()),
166
failedShapeGuard_(info->script()->failedShapeGuard()),
167
failedLexicalCheck_(info->script()->failedLexicalCheck()),
168
#ifdef DEBUG
169
hasLazyArguments_(false),
170
#endif
171
inlineCallInfo_(nullptr),
172
maybeFallbackFunctionGetter_(nullptr) {
173
script_ = info->script();
174
scriptHasIonScript_ = script_->hasIonScript();
175
pc = info->startPC();
176
177
MOZ_ASSERT(script()->hasBaselineScript() ==
178
(info->analysisMode() != Analysis_ArgumentsUsage));
179
MOZ_ASSERT(!!analysisContext ==
180
(info->analysisMode() == Analysis_DefiniteProperties));
181
MOZ_ASSERT(script_->numBytecodeTypeSets() < JSScript::MaxBytecodeTypeSets);
182
183
if (!info->isAnalysis()) {
184
script()->baselineScript()->setIonCompiledOrInlined();
185
}
186
}
187
188
void IonBuilder::clearForBackEnd() {
189
MOZ_ASSERT(!analysisContext);
190
baselineFrame_ = nullptr;
191
}
192
193
mozilla::GenericErrorResult<AbortReason> IonBuilder::abort(AbortReason r) {
194
auto res = this->MIRGenerator::abort(r);
195
#ifdef DEBUG
196
JitSpew(JitSpew_IonAbort, "aborted @ %s:%d", script()->filename(),
197
PCToLineNumber(script(), pc));
198
#else
199
JitSpew(JitSpew_IonAbort, "aborted @ %s", script()->filename());
200
#endif
201
return res;
202
}
203
204
mozilla::GenericErrorResult<AbortReason> IonBuilder::abort(AbortReason r,
205
const char* message,
206
...) {
207
// Don't call PCToLineNumber in release builds.
208
va_list ap;
209
va_start(ap, message);
210
auto res = this->MIRGenerator::abortFmt(r, message, ap);
211
va_end(ap);
212
#ifdef DEBUG
213
JitSpew(JitSpew_IonAbort, "aborted @ %s:%d", script()->filename(),
214
PCToLineNumber(script(), pc));
215
#else
216
JitSpew(JitSpew_IonAbort, "aborted @ %s", script()->filename());
217
#endif
218
trackActionableAbort(message);
219
return res;
220
}
221
222
IonBuilder* IonBuilder::outermostBuilder() {
223
IonBuilder* builder = this;
224
while (builder->callerBuilder_) {
225
builder = builder->callerBuilder_;
226
}
227
return builder;
228
}
229
230
void IonBuilder::trackActionableAbort(const char* message) {
231
if (!isOptimizationTrackingEnabled()) {
232
return;
233
}
234
235
IonBuilder* topBuilder = outermostBuilder();
236
if (topBuilder->hadActionableAbort()) {
237
return;
238
}
239
240
topBuilder->actionableAbortScript_ = script();
241
topBuilder->actionableAbortPc_ = pc;
242
topBuilder->actionableAbortMessage_ = message;
243
}
244
245
void IonBuilder::spew(const char* message) {
246
// Don't call PCToLineNumber in release builds.
247
#ifdef DEBUG
248
JitSpew(JitSpew_IonMIR, "%s @ %s:%d", message, script()->filename(),
249
PCToLineNumber(script(), pc));
250
#endif
251
}
252
253
JSFunction* IonBuilder::getSingleCallTarget(TemporaryTypeSet* calleeTypes) {
254
if (!calleeTypes) {
255
return nullptr;
256
}
257
258
TemporaryTypeSet::ObjectKey* key = calleeTypes->maybeSingleObject();
259
if (!key || key->clasp() != &JSFunction::class_) {
260
return nullptr;
261
}
262
263
if (key->isSingleton()) {
264
return &key->singleton()->as<JSFunction>();
265
}
266
267
if (JSFunction* fun = key->group()->maybeInterpretedFunction()) {
268
return fun;
269
}
270
271
return nullptr;
272
}
273
274
AbortReasonOr<Ok> IonBuilder::getPolyCallTargets(TemporaryTypeSet* calleeTypes,
275
bool constructing,
276
InliningTargets& targets,
277
uint32_t maxTargets) {
278
MOZ_ASSERT(targets.empty());
279
280
if (!calleeTypes) {
281
return Ok();
282
}
283
284
if (calleeTypes->baseFlags() != 0) {
285
return Ok();
286
}
287
288
unsigned objCount = calleeTypes->getObjectCount();
289
290
if (objCount == 0 || objCount > maxTargets) {
291
return Ok();
292
}
293
294
if (!targets.reserve(objCount)) {
295
return abort(AbortReason::Alloc);
296
}
297
for (unsigned i = 0; i < objCount; i++) {
298
JSObject* obj = calleeTypes->getSingleton(i);
299
ObjectGroup* group = nullptr;
300
if (obj) {
301
MOZ_ASSERT(obj->isSingleton());
302
} else {
303
group = calleeTypes->getGroup(i);
304
if (!group) {
305
continue;
306
}
307
308
obj = group->maybeInterpretedFunction();
309
if (!obj) {
310
targets.clear();
311
return Ok();
312
}
313
314
MOZ_ASSERT(!obj->isSingleton());
315
}
316
317
// Don't optimize if the callee is not callable or constructable per
318
// the manner it is being invoked, so that CallKnown does not have to
319
// handle these cases (they will always throw).
320
if (constructing ? !obj->isConstructor() : !obj->isCallable()) {
321
targets.clear();
322
return Ok();
323
}
324
325
targets.infallibleAppend(InliningTarget(obj, group));
326
}
327
328
return Ok();
329
}
330
331
IonBuilder::InliningDecision IonBuilder::DontInline(JSScript* targetScript,
332
const char* reason) {
333
if (targetScript) {
334
JitSpew(JitSpew_Inlining, "Cannot inline %s:%u:%u %s",
335
targetScript->filename(), targetScript->lineno(),
336
targetScript->column(), reason);
337
} else {
338
JitSpew(JitSpew_Inlining, "Cannot inline: %s", reason);
339
}
340
341
return InliningDecision_DontInline;
342
}
343
344
/*
345
* |hasCommonInliningPath| determines whether the current inlining path has been
346
* seen before based on the sequence of scripts in the chain of |IonBuilder|s.
347
*
348
* An inlining path for a function |f| is the sequence of functions whose
349
* inlinings precede |f| up to any previous occurrences of |f|.
350
* So, if we have the chain of inlinings
351
*
352
* f1 -> f2 -> f -> f3 -> f4 -> f5 -> f
353
* -------- --------------
354
*
355
* the inlining paths for |f| are [f2, f1] and [f5, f4, f3].
356
* When attempting to inline |f|, we find all existing inlining paths for |f|
357
* and check whether they share a common prefix with the path created were |f|
358
* inlined.
359
*
360
* For example, given mutually recursive functions |f| and |g|, a possible
361
* inlining is
362
*
363
* +---- Inlining stopped here...
364
* |
365
* v
366
* a -> f -> g -> f \ -> g -> f -> g -> ...
367
*
368
* where the vertical bar denotes the termination of inlining.
369
* Inlining is terminated because we have already observed the inlining path
370
* [f] when inlining function |g|. Note that this will inline recursive
371
* functions such as |fib| only one level, as |fib| has a zero length inlining
372
* path which trivially prefixes all inlining paths.
373
*
374
*/
375
bool IonBuilder::hasCommonInliningPath(const JSScript* scriptToInline) {
376
// Find all previous inlinings of the |scriptToInline| and check for common
377
// inlining paths with the top of the inlining stack.
378
for (IonBuilder* it = this->callerBuilder_; it; it = it->callerBuilder_) {
379
if (it->script() != scriptToInline) {
380
continue;
381
}
382
383
// This only needs to check the top of each stack for a match,
384
// as a match of length one ensures a common prefix.
385
IonBuilder* path = it->callerBuilder_;
386
if (!path || this->script() == path->script()) {
387
return true;
388
}
389
}
390
391
return false;
392
}
393
394
IonBuilder::InliningDecision IonBuilder::canInlineTarget(JSFunction* target,
395
CallInfo& callInfo) {
396
if (!optimizationInfo().inlineInterpreted()) {
397
trackOptimizationOutcome(TrackedOutcome::CantInlineGeneric);
398
return InliningDecision_DontInline;
399
}
400
401
if (TraceLogTextIdEnabled(TraceLogger_InlinedScripts)) {
402
return DontInline(nullptr,
403
"Tracelogging of inlined scripts is enabled"
404
"but Tracelogger cannot do that yet.");
405
}
406
407
if (!target->isInterpreted()) {
408
trackOptimizationOutcome(TrackedOutcome::CantInlineNotInterpreted);
409
return DontInline(nullptr, "Non-interpreted target");
410
}
411
412
// Never inline scripted cross-realm calls.
413
if (target->realm() != script()->realm()) {
414
trackOptimizationOutcome(TrackedOutcome::CantInlineCrossRealm);
415
return DontInline(nullptr, "Cross-realm call");
416
}
417
418
if (info().analysisMode() != Analysis_DefiniteProperties) {
419
// If |this| or an argument has an empty resultTypeSet, don't bother
420
// inlining, as the call is currently unreachable due to incomplete type
421
// information. This does not apply to the definite properties analysis,
422
// in that case we want to inline anyway.
423
424
if (callInfo.thisArg()->emptyResultTypeSet()) {
425
trackOptimizationOutcome(TrackedOutcome::CantInlineUnreachable);
426
return DontInline(nullptr, "Empty TypeSet for |this|");
427
}
428
429
for (size_t i = 0; i < callInfo.argc(); i++) {
430
if (callInfo.getArg(i)->emptyResultTypeSet()) {
431
trackOptimizationOutcome(TrackedOutcome::CantInlineUnreachable);
432
return DontInline(nullptr, "Empty TypeSet for argument");
433
}
434
}
435
}
436
437
// Allow constructing lazy scripts when performing the definite properties
438
// analysis, as baseline has not been used to warm the caller up yet.
439
if (target->isInterpreted() &&
440
info().analysisMode() == Analysis_DefiniteProperties) {
441
RootedFunction fun(analysisContext, target);
442
RootedScript script(analysisContext,
443
JSFunction::getOrCreateScript(analysisContext, fun));
444
if (!script) {
445
return InliningDecision_Error;
446
}
447
448
if (!script->hasBaselineScript() && script->canBaselineCompile()) {
449
MethodStatus status = BaselineCompile(analysisContext, script);
450
if (status == Method_Error) {
451
return InliningDecision_Error;
452
}
453
if (status != Method_Compiled) {
454
trackOptimizationOutcome(TrackedOutcome::CantInlineNoBaseline);
455
return InliningDecision_DontInline;
456
}
457
}
458
}
459
460
if (!target->hasScript()) {
461
trackOptimizationOutcome(TrackedOutcome::CantInlineLazy);
462
return DontInline(nullptr, "Lazy script");
463
}
464
465
JSScript* inlineScript = target->nonLazyScript();
466
if (callInfo.constructing() && !target->isConstructor()) {
467
trackOptimizationOutcome(TrackedOutcome::CantInlineNotConstructor);
468
return DontInline(inlineScript, "Callee is not a constructor");
469
}
470
471
if (!callInfo.constructing() && target->isClassConstructor()) {
472
trackOptimizationOutcome(TrackedOutcome::CantInlineClassConstructor);
473
return DontInline(inlineScript, "Not constructing class constructor");
474
}
475
476
if (!CanIonInlineScript(inlineScript)) {
477
trackOptimizationOutcome(TrackedOutcome::CantInlineDisabledIon);
478
return DontInline(inlineScript, "Disabled Ion compilation");
479
}
480
481
// Don't inline functions which don't have baseline scripts.
482
if (!inlineScript->hasBaselineScript()) {
483
trackOptimizationOutcome(TrackedOutcome::CantInlineNoBaseline);
484
return DontInline(inlineScript, "No baseline jitcode");
485
}
486
487
// Don't inline functions with a higher optimization level.
488
if (!isHighestOptimizationLevel()) {
489
OptimizationLevel level = optimizationLevel();
490
if (inlineScript->hasIonScript() &&
491
(inlineScript->ionScript()->isRecompiling() ||
492
inlineScript->ionScript()->optimizationLevel() > level)) {
493
return DontInline(inlineScript, "More optimized");
494
}
495
if (IonOptimizations.levelForScript(inlineScript, nullptr) > level) {
496
return DontInline(inlineScript, "Should be more optimized");
497
}
498
}
499
500
if (TooManyFormalArguments(target->nargs())) {
501
trackOptimizationOutcome(TrackedOutcome::CantInlineTooManyArgs);
502
return DontInline(inlineScript, "Too many args");
503
}
504
505
// We check the number of actual arguments against the maximum number of
506
// formal arguments as we do not want to encode all actual arguments in the
507
// callerResumePoint.
508
if (TooManyFormalArguments(callInfo.argc())) {
509
trackOptimizationOutcome(TrackedOutcome::CantInlineTooManyArgs);
510
return DontInline(inlineScript, "Too many actual args");
511
}
512
513
if (hasCommonInliningPath(inlineScript)) {
514
trackOptimizationOutcome(TrackedOutcome::HasCommonInliningPath);
515
return DontInline(inlineScript, "Common inlining path");
516
}
517
518
if (inlineScript->uninlineable()) {
519
trackOptimizationOutcome(TrackedOutcome::CantInlineGeneric);
520
return DontInline(inlineScript, "Uninlineable script");
521
}
522
523
if (inlineScript->needsArgsObj()) {
524
trackOptimizationOutcome(TrackedOutcome::CantInlineNeedsArgsObj);
525
return DontInline(inlineScript, "Script that needs an arguments object");
526
}
527
528
if (inlineScript->isDebuggee()) {
529
trackOptimizationOutcome(TrackedOutcome::CantInlineDebuggee);
530
return DontInline(inlineScript, "Script is debuggee");
531
}
532
533
return InliningDecision_Inline;
534
}
535
536
AbortReasonOr<Ok> IonBuilder::analyzeNewLoopTypes(
537
const CFGBlock* loopEntryBlock) {
538
CFGLoopEntry* loopEntry = loopEntryBlock->stopIns()->toLoopEntry();
539
CFGBlock* cfgBlock = loopEntry->successor();
540
MBasicBlock* entry = blockWorklist[cfgBlock->id()];
541
MOZ_ASSERT(!entry->isDead());
542
543
// The phi inputs at the loop head only reflect types for variables that
544
// were present at the start of the loop. If the variable changes to a new
545
// type within the loop body, and that type is carried around to the loop
546
// head, then we need to know about the new type up front.
547
//
548
// Since SSA information hasn't been constructed for the loop body yet, we
549
// need a separate analysis to pick out the types that might flow around
550
// the loop header. This is a best-effort analysis that may either over-
551
// or under-approximate the set of such types.
552
//
553
// Over-approximating the types may lead to inefficient generated code, and
554
// under-approximating the types will cause the loop body to be analyzed
555
// multiple times as the correct types are deduced (see finishLoop).
556
557
// If we restarted processing of an outer loop then get loop header types
558
// directly from the last time we have previously processed this loop. This
559
// both avoids repeated work from the bytecode traverse below, and will
560
// also pick up types discovered while previously building the loop body.
561
bool foundEntry = false;
562
for (size_t i = 0; i < loopHeaders_.length(); i++) {
563
if (loopHeaders_[i].pc == cfgBlock->startPc()) {
564
MBasicBlock* oldEntry = loopHeaders_[i].header;
565
566
// If this block has been discarded, its resume points will have
567
// already discarded their operands.
568
if (oldEntry->isDead()) {
569
loopHeaders_[i].header = entry;
570
foundEntry = true;
571
break;
572
}
573
574
MResumePoint* oldEntryRp = oldEntry->entryResumePoint();
575
size_t stackDepth = oldEntryRp->stackDepth();
576
for (size_t slot = 0; slot < stackDepth; slot++) {
577
MDefinition* oldDef = oldEntryRp->getOperand(slot);
578
if (!oldDef->isPhi()) {
579
MOZ_ASSERT(oldDef->block()->id() < oldEntry->id());
580
MOZ_ASSERT(oldDef == entry->getSlot(slot));
581
continue;
582
}
583
MPhi* oldPhi = oldDef->toPhi();
584
MPhi* newPhi = entry->getSlot(slot)->toPhi();
585
if (!newPhi->addBackedgeType(alloc(), oldPhi->type(),
586
oldPhi->resultTypeSet())) {
587
return abort(AbortReason::Alloc);
588
}
589
}
590
591
// Update the most recent header for this loop encountered, in case
592
// new types flow to the phis and the loop is processed at least
593
// three times.
594
loopHeaders_[i].header = entry;
595
return Ok();
596
}
597
}
598
if (!foundEntry) {
599
if (!loopHeaders_.append(LoopHeader(cfgBlock->startPc(), entry))) {
600
return abort(AbortReason::Alloc);
601
}
602
}
603
604
if (loopEntry->isForIn()) {
605
// The backedge will have MIteratorMore with MIRType::Value. This slot
606
// is initialized to MIRType::Undefined before the loop. Add
607
// MIRType::Value to avoid unnecessary loop restarts.
608
609
MPhi* phi = entry->getSlot(entry->stackDepth() - 1)->toPhi();
610
MOZ_ASSERT(phi->getOperand(0)->type() == MIRType::Undefined);
611
612
if (!phi->addBackedgeType(alloc(), MIRType::Value, nullptr)) {
613
return abort(AbortReason::Alloc);
614
}
615
}
616
617
// Get the start and end pc of this loop.
618
jsbytecode* start = loopEntryBlock->stopPc();
619
start += GetBytecodeLength(start);
620
jsbytecode* end = loopEntry->loopStopPc();
621
622
// Iterate the bytecode quickly to seed possible types in the loopheader.
623
jsbytecode* last = nullptr;
624
jsbytecode* earlier = nullptr;
625
for (jsbytecode* pc = start; pc != end;
626
earlier = last, last = pc, pc += GetBytecodeLength(pc)) {
627
uint32_t slot;
628
if (*pc == JSOP_SETLOCAL) {
629
slot = info().localSlot(GET_LOCALNO(pc));
630
} else if (*pc == JSOP_SETARG) {
631
slot = info().argSlotUnchecked(GET_ARGNO(pc));
632
} else {
633
continue;
634
}
635
if (slot >= info().firstStackSlot()) {
636
continue;
637
}
638
if (!last) {
639
continue;
640
}
641
642
MPhi* phi = entry->getSlot(slot)->toPhi();
643
644
if (*last == JSOP_POS || *last == JSOP_TONUMERIC) {
645
last = earlier;
646
}
647
648
if (CodeSpec[*last].format & JOF_TYPESET) {
649
TemporaryTypeSet* typeSet = bytecodeTypes(last);
650
if (!typeSet->empty()) {
651
MIRType type = typeSet->getKnownMIRType();
652
if (!phi->addBackedgeType(alloc(), type, typeSet)) {
653
return abort(AbortReason::Alloc);
654
}
655
}
656
} else if (*last == JSOP_GETLOCAL || *last == JSOP_GETARG) {
657
uint32_t slot = (*last == JSOP_GETLOCAL)
658
? info().localSlot(GET_LOCALNO(last))
659
: info().argSlotUnchecked(GET_ARGNO(last));
660
if (slot < info().firstStackSlot()) {
661
MPhi* otherPhi = entry->getSlot(slot)->toPhi();
662
if (otherPhi->hasBackedgeType()) {
663
if (!phi->addBackedgeType(alloc(), otherPhi->type(),
664
otherPhi->resultTypeSet())) {
665
return abort(AbortReason::Alloc);
666
}
667
}
668
}
669
} else {
670
MIRType type = MIRType::None;
671
switch (*last) {
672
case JSOP_VOID:
673
case JSOP_UNDEFINED:
674
type = MIRType::Undefined;
675
break;
676
case JSOP_GIMPLICITTHIS:
677
if (!script()->hasNonSyntacticScope()) {
678
type = MIRType::Undefined;
679
}
680
break;
681
case JSOP_NULL:
682
type = MIRType::Null;
683
break;
684
case JSOP_ZERO:
685
case JSOP_ONE:
686
case JSOP_INT8:
687
case JSOP_INT32:
688
case JSOP_UINT16:
689
case JSOP_UINT24:
690
case JSOP_RESUMEINDEX:
691
case JSOP_BITAND:
692
case JSOP_BITOR:
693
case JSOP_BITXOR:
694
case JSOP_BITNOT:
695
case JSOP_RSH:
696
case JSOP_LSH:
697
case JSOP_URSH:
698
type = MIRType::Int32;
699
break;
700
case JSOP_FALSE:
701
case JSOP_TRUE:
702
case JSOP_EQ:
703
case JSOP_NE:
704
case JSOP_LT:
705
case JSOP_LE:
706
case JSOP_GT:
707
case JSOP_GE:
708
case JSOP_NOT:
709
case JSOP_STRICTEQ:
710
case JSOP_STRICTNE:
711
case JSOP_IN:
712
case JSOP_INSTANCEOF:
713
case JSOP_HASOWN:
714
type = MIRType::Boolean;
715
break;
716
case JSOP_DOUBLE:
717
type = MIRType::Double;
718
break;
719
case JSOP_ITERNEXT:
720
case JSOP_STRING:
721
case JSOP_TOSTRING:
722
case JSOP_TYPEOF:
723
case JSOP_TYPEOFEXPR:
724
type = MIRType::String;
725
break;
726
case JSOP_SYMBOL:
727
type = MIRType::Symbol;
728
break;
729
case JSOP_ADD:
730
case JSOP_SUB:
731
case JSOP_MUL:
732
case JSOP_DIV:
733
case JSOP_MOD:
734
case JSOP_NEG:
735
case JSOP_INC:
736
case JSOP_DEC:
737
type = inspector->expectedResultType(last);
738
break;
739
case JSOP_BIGINT:
740
type = MIRType::BigInt;
741
break;
742
default:
743
break;
744
}
745
if (type != MIRType::None) {
746
if (!phi->addBackedgeType(alloc(), type, nullptr)) {
747
return abort(AbortReason::Alloc);
748
}
749
}
750
}
751
}
752
return Ok();
753
}
754
755
AbortReasonOr<Ok> IonBuilder::init() {
756
{
757
LifoAlloc::AutoFallibleScope fallibleAllocator(alloc().lifoAlloc());
758
if (!JitScript::FreezeTypeSets(constraints(), script(), &thisTypes,
759
&argTypes, &typeArray)) {
760
return abort(AbortReason::Alloc);
761
}
762
}
763
764
if (!alloc().ensureBallast()) {
765
return abort(AbortReason::Alloc);
766
}
767
768
if (inlineCallInfo_) {
769
// If we're inlining, the actual this/argument types are not necessarily
770
// a subset of the script's observed types. |argTypes| is never accessed
771
// for inlined scripts, so we just null it.
772
thisTypes = inlineCallInfo_->thisArg()->resultTypeSet();
773
argTypes = nullptr;
774
}
775
776
bytecodeTypeMap = script()->jitScript()->bytecodeTypeMap();
777
778
return Ok();
779
}
780
781
AbortReasonOr<Ok> IonBuilder::build() {
782
// Spew IC info for inlined script, but only when actually compiling,
783
// not when analyzing it.
784
#ifdef JS_STRUCTURED_SPEW
785
if (!info().isAnalysis()) {
786
JitSpewBaselineICStats(script(), "To-Be-Compiled");
787
}
788
#endif
789
790
MOZ_TRY(init());
791
792
// The BaselineScript-based inlining heuristics only affect the highest
793
// optimization level. Other levels do almost no inlining and we don't want to
794
// overwrite data from the highest optimization tier.
795
if (script()->hasBaselineScript() && isHighestOptimizationLevel()) {
796
script()->baselineScript()->resetMaxInliningDepth();
797
}
798
799
MBasicBlock* entry;
800
MOZ_TRY_VAR(entry, newBlock(info().firstStackSlot(), pc));
801
MOZ_TRY(setCurrentAndSpecializePhis(entry));
802
803
#ifdef JS_JITSPEW
804
if (info().isAnalysis()) {
805
JitSpew(JitSpew_IonScripts, "Analyzing script %s:%u:%u (%p) %s",
806
script()->filename(), script()->lineno(), script()->column(),
807
(void*)script(), AnalysisModeString(info().analysisMode()));
808
} else {
809
JitSpew(JitSpew_IonScripts,
810
"%sompiling script %s:%u:%u (%p) (warmup-counter=%" PRIu32
811
", level=%s)",
812
(script()->hasIonScript() ? "Rec" : "C"), script()->filename(),
813
script()->lineno(), script()->column(), (void*)script(),
814
script()->getWarmUpCount(),
815
OptimizationLevelString(optimizationLevel()));
816
}
817
#endif
818
819
MOZ_TRY(initParameters());
820
initLocals();
821
822
// Initialize something for the env chain. We can bail out before the
823
// start instruction, but the snapshot is encoded *at* the start
824
// instruction, which means generating any code that could load into
825
// registers is illegal.
826
MInstruction* env = MConstant::New(alloc(), UndefinedValue());
827
current->add(env);
828
current->initSlot(info().environmentChainSlot(), env);
829
830
// Initialize the return value.
831
MInstruction* returnValue = MConstant::New(alloc(), UndefinedValue());
832
current->add(returnValue);
833
current->initSlot(info().returnValueSlot(), returnValue);
834
835
// Initialize the arguments object slot to undefined if necessary.
836
if (info().hasArguments()) {
837
MInstruction* argsObj = MConstant::New(alloc(), UndefinedValue());
838
current->add(argsObj);
839
current->initSlot(info().argsObjSlot(), argsObj);
840
}
841
842
// Emit the start instruction, so we can begin real instructions.
843
current->add(MStart::New(alloc()));
844
845
// Guard against over-recursion. Do this before we start unboxing, since
846
// this will create an OSI point that will read the incoming argument
847
// values, which is nice to do before their last real use, to minimize
848
// register/stack pressure.
849
MCheckOverRecursed* check = MCheckOverRecursed::New(alloc());
850
current->add(check);
851
MResumePoint* entryRpCopy =
852
MResumePoint::Copy(alloc(), current->entryResumePoint());
853
if (!entryRpCopy) {
854
return abort(AbortReason::Alloc);
855
}
856
check->setResumePoint(entryRpCopy);
857
858
// Parameters have been checked to correspond to the typeset, now we unbox
859
// what we can in an infallible manner.
860
MOZ_TRY(rewriteParameters());
861
862
// Check for redeclaration errors for global scripts.
863
if (!info().funMaybeLazy() && !info().module() &&
864
script()->bodyScope()->is<GlobalScope>() &&
865
script()->bodyScope()->as<GlobalScope>().hasBindings()) {
866
MGlobalNameConflictsCheck* redeclCheck =
867
MGlobalNameConflictsCheck::New(alloc());
868
current->add(redeclCheck);
869
MResumePoint* entryRpCopy =
870
MResumePoint::Copy(alloc(), current->entryResumePoint());
871
if (!entryRpCopy) {
872
return abort(AbortReason::Alloc);
873
}
874
redeclCheck->setResumePoint(entryRpCopy);
875
}
876
877
// It's safe to start emitting actual IR, so now build the env chain.
878
MOZ_TRY(initEnvironmentChain());
879
if (info().needsArgsObj()) {
880
initArgumentsObject();
881
}
882
883
// The type analysis phase attempts to insert unbox operations near
884
// definitions of values. It also attempts to replace uses in resume points
885
// with the narrower, unboxed variants. However, we must prevent this
886
// replacement from happening on values in the entry snapshot. Otherwise we
887
// could get this:
888
//
889
// v0 = MParameter(0)
890
// v1 = MParameter(1)
891
// -- ResumePoint(v2, v3)
892
// v2 = Unbox(v0, INT32)
893
// v3 = Unbox(v1, INT32)
894
//
895
// So we attach the initial resume point to each parameter, which the type
896
// analysis explicitly checks (this is the same mechanism used for
897
// effectful operations).
898
for (uint32_t i = 0; i < info().endArgSlot(); i++) {
899
MInstruction* ins = current->getEntrySlot(i)->toInstruction();
900
if (ins->type() != MIRType::Value) {
901
continue;
902
}
903
904
MResumePoint* entryRpCopy =
905
MResumePoint::Copy(alloc(), current->entryResumePoint());
906
if (!entryRpCopy) {
907
return abort(AbortReason::Alloc);
908
}
909
ins->setResumePoint(entryRpCopy);
910
}
911
912
#ifdef DEBUG
913
// lazyArguments should never be accessed in |argsObjAliasesFormals| scripts.
914
if (info().hasArguments() && !info().argsObjAliasesFormals()) {
915
hasLazyArguments_ = true;
916
}
917
#endif
918
919
insertRecompileCheck();
920
921
auto clearLastPriorResumePoint = mozilla::MakeScopeExit([&] {
922
// Discard unreferenced & pre-allocated resume points.
923
replaceMaybeFallbackFunctionGetter(nullptr);
924
});
925
926
MOZ_TRY(traverseBytecode());
927
928
if (isHighestOptimizationLevel() && script_->hasBaselineScript() &&
929
inlinedBytecodeLength_ >
930
script_->baselineScript()->inlinedBytecodeLength()) {
931
script_->baselineScript()->setInlinedBytecodeLength(inlinedBytecodeLength_);
932
}
933
934
MOZ_TRY(maybeAddOsrTypeBarriers());
935
MOZ_TRY(processIterators());
936
937
if (!info().isAnalysis() && !abortedPreliminaryGroups().empty()) {
938
return abort(AbortReason::PreliminaryObjects);
939
}
940
941
MOZ_ASSERT(loopDepth_ == 0);
942
return Ok();
943
}
944
945
AbortReasonOr<Ok> IonBuilder::processIterators() {
946
// Find and mark phis that must transitively hold an iterator live.
947
948
Vector<MDefinition*, 8, SystemAllocPolicy> worklist;
949
950
for (size_t i = 0; i < iterators_.length(); i++) {
951
MDefinition* iter = iterators_[i];
952
if (!iter->isInWorklist()) {
953
if (!worklist.append(iter)) {
954
return abort(AbortReason::Alloc);
955
}
956
iter->setInWorklist();
957
}
958
}
959
960
while (!worklist.empty()) {
961
MDefinition* def = worklist.popCopy();
962
def->setNotInWorklist();
963
964
if (def->isPhi()) {
965
MPhi* phi = def->toPhi();
966
phi->setIterator();
967
phi->setImplicitlyUsedUnchecked();
968
}
969
970
for (MUseDefIterator iter(def); iter; iter++) {
971
MDefinition* use = iter.def();
972
if (!use->isInWorklist() &&
973
(!use->isPhi() || !use->toPhi()->isIterator())) {
974
if (!worklist.append(use)) {
975
return abort(AbortReason::Alloc);
976
}
977
use->setInWorklist();
978
}
979
}
980
}
981
982
return Ok();
983
}
984
985
AbortReasonOr<Ok> IonBuilder::buildInline(IonBuilder* callerBuilder,
986
MResumePoint* callerResumePoint,
987
CallInfo& callInfo) {
988
inlineCallInfo_ = &callInfo;
989
990
// Spew IC info for inlined script, but only when actually compiling,
991
// not when analyzing it.
992
#ifdef JS_STRUCTURED_SPEW
993
if (!info().isAnalysis()) {
994
JitSpewBaselineICStats(script(), "To-Be-Inlined");
995
}
996
#endif
997
998
MOZ_TRY(init());
999
1000
JitSpew(JitSpew_IonScripts, "Inlining script %s:%u:%u (%p)",
1001
script()->filename(), script()->lineno(), script()->column(),
1002
(void*)script());
1003
1004
callerBuilder_ = callerBuilder;
1005
callerResumePoint_ = callerResumePoint;
1006
1007
if (callerBuilder->failedBoundsCheck_) {
1008
failedBoundsCheck_ = true;
1009
}
1010
1011
if (callerBuilder->failedShapeGuard_) {
1012
failedShapeGuard_ = true;
1013
}
1014
1015
if (callerBuilder->failedLexicalCheck_) {
1016
failedLexicalCheck_ = true;
1017
}
1018
1019
safeForMinorGC_ = callerBuilder->safeForMinorGC_;
1020
1021
// Generate single entrance block.
1022
MBasicBlock* entry;
1023
MOZ_TRY_VAR(entry, newBlock(info().firstStackSlot(), pc));
1024
MOZ_TRY(setCurrentAndSpecializePhis(entry));
1025
1026
current->setCallerResumePoint(callerResumePoint);
1027
1028
// Connect the entrance block to the last block in the caller's graph.
1029
MBasicBlock* predecessor = callerBuilder->current;
1030
MOZ_ASSERT(predecessor == callerResumePoint->block());
1031
1032
predecessor->end(MGoto::New(alloc(), current));
1033
if (!current->addPredecessorWithoutPhis(predecessor)) {
1034
return abort(AbortReason::Alloc);
1035
}
1036
1037
// Initialize env chain slot to Undefined. It's set later by
1038
// |initEnvironmentChain|.
1039
MInstruction* env = MConstant::New(alloc(), UndefinedValue());
1040
current->add(env);
1041
current->initSlot(info().environmentChainSlot(), env);
1042
1043
// Initialize |return value| slot.
1044
MInstruction* returnValue = MConstant::New(alloc(), UndefinedValue());
1045
current->add(returnValue);
1046
current->initSlot(info().returnValueSlot(), returnValue);
1047
1048
// Initialize |arguments| slot.
1049
if (info().hasArguments()) {
1050
MInstruction* argsObj = MConstant::New(alloc(), UndefinedValue());
1051
current->add(argsObj);
1052
current->initSlot(info().argsObjSlot(), argsObj);
1053
}
1054
1055
// Initialize |this| slot.
1056
current->initSlot(info().thisSlot(), callInfo.thisArg());
1057
1058
JitSpew(JitSpew_Inlining, "Initializing %u arg slots", info().nargs());
1059
1060
// NB: Ion does not inline functions which |needsArgsObj|. So using argSlot()
1061
// instead of argSlotUnchecked() below is OK
1062
MOZ_ASSERT(!info().needsArgsObj());
1063
1064
// Initialize actually set arguments.
1065
uint32_t existing_args = Min<uint32_t>(callInfo.argc(), info().nargs());
1066
for (size_t i = 0; i < existing_args; ++i) {
1067
MDefinition* arg = callInfo.getArg(i);
1068
current->initSlot(info().argSlot(i), arg);
1069
}
1070
1071
// Pass Undefined for missing arguments
1072
for (size_t i = callInfo.argc(); i < info().nargs(); ++i) {
1073
MConstant* arg = MConstant::New(alloc(), UndefinedValue());
1074
current->add(arg);
1075
current->initSlot(info().argSlot(i), arg);
1076
}
1077
1078
JitSpew(JitSpew_Inlining, "Initializing %u locals", info().nlocals());
1079
1080
initLocals();
1081
1082
JitSpew(JitSpew_Inlining,
1083
"Inline entry block MResumePoint %p, %u stack slots",
1084
(void*)current->entryResumePoint(),
1085
current->entryResumePoint()->stackDepth());
1086
1087
// +2 for the env chain and |this|, maybe another +1 for arguments object
1088
// slot.
1089
MOZ_ASSERT(current->entryResumePoint()->stackDepth() == info().totalSlots());
1090
1091
#ifdef DEBUG
1092
if (script_->argumentsHasVarBinding()) {
1093
hasLazyArguments_ = true;
1094
}
1095
#endif
1096
1097
insertRecompileCheck();
1098
1099
// Insert an interrupt check when recording or replaying, which will bump
1100
// the record/replay system's progress counter.
1101
if (script()->trackRecordReplayProgress()) {
1102
MInterruptCheck* check = MInterruptCheck::New(alloc());
1103
check->setTrackRecordReplayProgress();
1104
current->add(check);
1105
}
1106
1107
// Initialize the env chain now that all resume points operands are
1108
// initialized.
1109
MOZ_TRY(initEnvironmentChain(callInfo.fun()));
1110
1111
auto clearLastPriorResumePoint = mozilla::MakeScopeExit([&] {
1112
// Discard unreferenced & pre-allocated resume points.
1113
replaceMaybeFallbackFunctionGetter(nullptr);
1114
});
1115
1116
MOZ_TRY(traverseBytecode());
1117
1118
MOZ_ASSERT(iterators_.empty(), "Iterators should be added to outer builder");
1119
1120
if (!info().isAnalysis() && !abortedPreliminaryGroups().empty()) {
1121
return abort(AbortReason::PreliminaryObjects);
1122
}
1123
1124
return Ok();
1125
}
1126
1127
void IonBuilder::runTask() {
1128
// This is the entry point when ion compiles are run offthread.
1129
JSRuntime* rt = script()->runtimeFromAnyThread();
1130
1131
TraceLoggerThread* logger = TraceLoggerForCurrentThread();
1132
TraceLoggerEvent event(TraceLogger_AnnotateScripts, script());
1133
AutoTraceLog logScript(logger, event);
1134
AutoTraceLog logCompile(logger, TraceLogger_IonCompilation);
1135
1136
jit::JitContext jctx(jit::CompileRuntime::get(rt),
1137
jit::CompileRealm::get(script()->realm()), &alloc());
1138
setBackgroundCodegen(jit::CompileBackEnd(this));
1139
}
1140
1141
void IonBuilder::rewriteParameter(uint32_t slotIdx, MDefinition* param) {
1142
MOZ_ASSERT(param->isParameter() || param->isGetArgumentsObjectArg());
1143
1144
TemporaryTypeSet* types = param->resultTypeSet();
1145
MDefinition* actual = ensureDefiniteType(param, types->getKnownMIRType());
1146
if (actual == param) {
1147
return;
1148
}
1149
1150
// Careful! We leave the original MParameter in the entry resume point. The
1151
// arguments still need to be checked unless proven otherwise at the call
1152
// site, and these checks can bailout. We can end up:
1153
// v0 = Parameter(0)
1154
// v1 = Unbox(v0, INT32)
1155
// -- ResumePoint(v0)
1156
//
1157
// As usual, it would be invalid for v1 to be captured in the initial
1158
// resume point, rather than v0.
1159
current->rewriteSlot(slotIdx, actual);
1160
}
1161
1162
// Apply Type Inference information to parameters early on, unboxing them if
1163
// they have a definitive type. The actual guards will be emitted by the code
1164
// generator, explicitly, as part of the function prologue.
1165
AbortReasonOr<Ok> IonBuilder::rewriteParameters() {
1166
MOZ_ASSERT(info().environmentChainSlot() == 0);
1167
1168
// If this JSScript is not the code of a function, then skip the
1169
// initialization of function parameters.
1170
if (!info().funMaybeLazy()) {
1171
return Ok();
1172
}
1173
1174
for (uint32_t i = info().startArgSlot(); i < info().endArgSlot(); i++) {
1175
if (!alloc().ensureBallast()) {
1176
return abort(AbortReason::Alloc);
1177
}
1178
MDefinition* param = current->getSlot(i);
1179
rewriteParameter(i, param);
1180
}
1181
1182
return Ok();
1183
}
1184
1185
AbortReasonOr<Ok> IonBuilder::initParameters() {
1186
// If this JSScript is not the code of a function, then skip the
1187
// initialization of function parameters.
1188
if (!info().funMaybeLazy()) {
1189
return Ok();
1190
}
1191
1192
// If we are doing OSR on a frame which initially executed in the
1193
// interpreter and didn't accumulate type information, try to use that OSR
1194
// frame to determine possible initial types for 'this' and parameters.
1195
1196
if (thisTypes->empty() && baselineFrame_) {
1197
TypeSet::Type type = baselineFrame_->thisType;
1198
if (type.isSingletonUnchecked()) {
1199
checkNurseryObject(type.singleton());
1200
}
1201
thisTypes->addType(type, alloc_->lifoAlloc());
1202
}
1203
1204
MParameter* param =
1205
MParameter::New(alloc(), MParameter::THIS_SLOT, thisTypes);
1206
current->add(param);
1207
current->initSlot(info().thisSlot(), param);
1208
1209
for (uint32_t i = 0; i < info().nargs(); i++) {
1210
TemporaryTypeSet* types = &argTypes[i];
1211
if (types->empty() && baselineFrame_ &&
1212
!script_->baselineScript()->modifiesArguments()) {
1213
TypeSet::Type type = baselineFrame_->argTypes[i];
1214
if (type.isSingletonUnchecked()) {
1215
checkNurseryObject(type.singleton());
1216
}
1217
types->addType(type, alloc_->lifoAlloc());
1218
}
1219
1220
param = MParameter::New(alloc().fallible(), i, types);
1221
if (!param) {
1222
return abort(AbortReason::Alloc);
1223
}
1224
current->add(param);
1225
current->initSlot(info().argSlotUnchecked(i), param);
1226
}
1227
1228
return Ok();
1229
}
1230
1231
void IonBuilder::initLocals() {
1232
// Initialize all frame slots to undefined. Lexical bindings are temporal
1233
// dead zoned in bytecode.
1234
1235
if (info().nlocals() == 0) {
1236
return;
1237
}
1238
1239
MConstant* undef = MConstant::New(alloc(), UndefinedValue());
1240
current->add(undef);
1241
1242
for (uint32_t i = 0; i < info().nlocals(); i++) {
1243
current->initSlot(info().localSlot(i), undef);
1244
}
1245
}
1246
1247
bool IonBuilder::usesEnvironmentChain() {
1248
// We don't have a BaselineScript if we're running the arguments analysis,
1249
// but it's fine to assume we always use the environment chain in this case.
1250
if (info().analysisMode() == Analysis_ArgumentsUsage) {
1251
return true;
1252
}
1253
return script()->baselineScript()->usesEnvironmentChain();
1254
}
1255
1256
AbortReasonOr<Ok> IonBuilder::initEnvironmentChain(MDefinition* callee) {
1257
MInstruction* env = nullptr;
1258
1259
// If the script doesn't use the envchain, then it's already initialized
1260
// from earlier. However, always make a env chain when |needsArgsObj| is true
1261
// for the script, since arguments object construction requires the env chain
1262
// to be passed in.
1263
if (!info().needsArgsObj() && !usesEnvironmentChain()) {
1264
return Ok();
1265
}
1266
1267
// The env chain is only tracked in scripts that have NAME opcodes which
1268
// will try to access the env. For other scripts, the env instructions
1269
// will be held live by resume points and code will still be generated for
1270
// them, so just use a constant undefined value.
1271
1272
if (JSFunction* fun = info().funMaybeLazy()) {
1273
if (!callee) {
1274
MCallee* calleeIns = MCallee::New(alloc());
1275
current->add(calleeIns);
1276
callee = calleeIns;
1277
}
1278
env = MFunctionEnvironment::New(alloc(), callee);
1279
current->add(env);
1280
1281
// This reproduce what is done in CallObject::createForFunction. Skip
1282
// this for the arguments analysis, as the script might not have a
1283
// baseline script with template objects yet.
1284
if (fun->needsSomeEnvironmentObject() &&
1285
info().analysisMode() != Analysis_ArgumentsUsage) {
1286
if (fun->needsNamedLambdaEnvironment()) {
1287
env = createNamedLambdaObject(callee, env);
1288
}
1289
1290
// TODO: Parameter expression-induced extra var environment not
1291
// yet handled.
1292
if (fun->needsExtraBodyVarEnvironment()) {
1293
return abort(AbortReason::Disable, "Extra var environment unsupported");
1294
}
1295
1296
if (fun->needsCallObject()) {
1297
MOZ_TRY_VAR(env, createCallObject(callee, env));
1298
}
1299
}
1300
} else if (ModuleObject* module = info().module()) {
1301
// Modules use a pre-created env object.
1302
env = constant(ObjectValue(module->initialEnvironment()));
1303
} else {
1304
// For global scripts without a non-syntactic global scope, the env
1305
// chain is the global lexical env.
1306
MOZ_ASSERT(!script()->isForEval());
1307
MOZ_ASSERT(!script()->hasNonSyntacticScope());
1308
env = constant(ObjectValue(script()->global().lexicalEnvironment()));
1309
}
1310
1311
// Update the environment slot from UndefinedValue only after initial
1312
// environment is created so that bailout doesn't see a partial env.
1313
// See: |InitFromBailout|
1314
current->setEnvironmentChain(env);
1315
return Ok();
1316
}
1317
1318
void IonBuilder::initArgumentsObject() {
1319
JitSpew(JitSpew_IonMIR,
1320
"%s:%u:%u - Emitting code to initialize arguments object! block=%p",
1321
script()->filename(), script()->lineno(), script()->column(),
1322
current);
1323
MOZ_ASSERT(info().needsArgsObj());
1324
1325
bool mapped = script()->hasMappedArgsObj();
1326
ArgumentsObject* templateObj =
1327
script()->realm()->maybeArgumentsTemplateObject(mapped);
1328
1329
MCreateArgumentsObject* argsObj = MCreateArgumentsObject::New(
1330
alloc(), current->environmentChain(), templateObj);
1331
current->add(argsObj);
1332
current->setArgumentsObject(argsObj);
1333
}
1334
1335
AbortReasonOr<Ok> IonBuilder::addOsrValueTypeBarrier(
1336
uint32_t slot, MInstruction** def_, MIRType type,
1337
TemporaryTypeSet* typeSet) {
1338
MInstruction*& def = *def_;
1339
MBasicBlock* osrBlock = def->block();
1340
1341
// Clear bogus type information added in newOsrPreheader().
1342
def->setResultType(MIRType::Value);
1343
def->setResultTypeSet(nullptr);
1344
1345
if (typeSet && !typeSet->unknown()) {
1346
MInstruction* barrier = MTypeBarrier::New(alloc(), def, typeSet);
1347
osrBlock->insertBefore(osrBlock->lastIns(), barrier);
1348
osrBlock->rewriteSlot(slot, barrier);
1349
def = barrier;
1350
1351
// If the TypeSet is more precise than |type|, adjust |type| for the
1352
// code below.
1353
if (type == MIRType::Value) {
1354
type = barrier->type();
1355
}
1356
} else if (type == MIRType::Null || type == MIRType::Undefined ||
1357
type == MIRType::MagicOptimizedArguments) {
1358
// No unbox instruction will be added below, so check the type by
1359
// adding a type barrier for a singleton type set.
1360
TypeSet::Type ntype = TypeSet::PrimitiveType(ValueTypeFromMIRType(type));
1361
LifoAlloc* lifoAlloc = alloc().lifoAlloc();
1362
typeSet = lifoAlloc->new_<TemporaryTypeSet>(lifoAlloc, ntype);
1363
if (!typeSet) {
1364
return abort(AbortReason::Alloc);
1365
}
1366
MInstruction* barrier = MTypeBarrier::New(alloc(), def, typeSet);
1367
osrBlock->insertBefore(osrBlock->lastIns(), barrier);
1368
osrBlock->rewriteSlot(slot, barrier);
1369
def = barrier;
1370
}
1371
1372
// The following guards aren't directly linked into the usedef chain,
1373
// however in the OSR block we need to ensure they're not optimized out, so we
1374
// mark them as implicitly used.
1375
switch (type) {
1376
case MIRType::Null:
1377
case MIRType::Undefined:
1378
case MIRType::MagicOptimizedArguments:
1379
def->setImplicitlyUsed();
1380
break;
1381
default:
1382
break;
1383
}
1384
1385
switch (type) {
1386
case MIRType::Boolean:
1387
case MIRType::Int32:
1388
case MIRType::Double:
1389
case MIRType::String:
1390
case MIRType::Symbol:
1391
case MIRType::BigInt:
1392
case MIRType::Object:
1393
if (type != def->type()) {
1394
MUnbox* unbox = MUnbox::New(alloc(), def, type, MUnbox::Fallible);
1395
osrBlock->insertBefore(osrBlock->lastIns(), unbox);
1396
osrBlock->rewriteSlot(slot, unbox);
1397
def = unbox;
1398
}
1399
break;
1400
1401
case MIRType::Null: {
1402
MConstant* c = MConstant::New(alloc(), NullValue());
1403
osrBlock->insertBefore(osrBlock->lastIns(), c);
1404
osrBlock->rewriteSlot(slot, c);
1405
def = c;
1406
break;
1407
}
1408
1409
case MIRType::Undefined: {
1410
MConstant* c = MConstant::New(alloc(), UndefinedValue());
1411
osrBlock->insertBefore(osrBlock->lastIns(), c);
1412
osrBlock->rewriteSlot(slot, c);
1413
def = c;
1414
break;
1415
}
1416
1417
case MIRType::MagicOptimizedArguments: {
1418
MOZ_ASSERT(hasLazyArguments_);
1419
MConstant* lazyArg =
1420
MConstant::New(alloc(), MagicValue(JS_OPTIMIZED_ARGUMENTS));
1421
osrBlock->insertBefore(osrBlock->lastIns(), lazyArg);
1422
osrBlock->rewriteSlot(slot, lazyArg);
1423
def = lazyArg;
1424
break;
1425
}
1426
1427
default:
1428
break;
1429
}
1430
1431
MOZ_ASSERT(def == osrBlock->getSlot(slot));
1432
return Ok();
1433
}
1434
1435
AbortReasonOr<Ok> IonBuilder::maybeAddOsrTypeBarriers() {
1436
if (!info().osrPc()) {
1437
return Ok();
1438
}
1439
1440
// The loop has successfully been processed, and the loop header phis
1441
// have their final type. Add unboxes and type barriers in the OSR
1442
// block to check that the values have the appropriate type, and update
1443
// the types in the preheader.
1444
1445
MBasicBlock* osrBlock = graph().osrBlock();
1446
if (!osrBlock) {
1447
// Because IonBuilder does not compile catch blocks, it's possible to
1448
// end up without an OSR block if the OSR pc is only reachable via a
1449
// break-statement inside the catch block. For instance:
1450
//
1451
// for (;;) {
1452
// try {
1453
// throw 3;
1454
// } catch(e) {
1455
// break;
1456
// }
1457
// }
1458
// while (..) { } // <= OSR here, only reachable via catch block.
1459
//
1460
// For now we just abort in this case.
1461
MOZ_ASSERT(graph().hasTryBlock());
1462
return abort(AbortReason::Disable,
1463
"OSR block only reachable through catch block");
1464
}
1465
1466
MBasicBlock* preheader = osrBlock->getSuccessor(0);
1467
MBasicBlock* header = preheader->getSuccessor(0);
1468
static const size_t OSR_PHI_POSITION = 1;
1469
MOZ_ASSERT(preheader->getPredecessor(OSR_PHI_POSITION) == osrBlock);
1470
1471
MResumePoint* headerRp = header->entryResumePoint();
1472
size_t stackDepth = headerRp->stackDepth();
1473
MOZ_ASSERT(stackDepth == osrBlock->stackDepth());
1474
for (uint32_t slot = info().startArgSlot(); slot < stackDepth; slot++) {
1475
// Aliased slots are never accessed, since they need to go through
1476
// the callobject. The typebarriers are added there and can be
1477
// discarded here.
1478
if (info().isSlotAliased(slot)) {
1479
continue;
1480
}
1481
1482
if (!alloc().ensureBallast()) {
1483
return abort(AbortReason::Alloc);
1484
}
1485
1486
MInstruction* def = osrBlock->getSlot(slot)->toInstruction();
1487
MPhi* preheaderPhi = preheader->getSlot(slot)->toPhi();
1488
MPhi* headerPhi = headerRp->getOperand(slot)->toPhi();
1489
1490
MIRType type = headerPhi->type();
1491
TemporaryTypeSet* typeSet = headerPhi->resultTypeSet();
1492
1493
MOZ_TRY(addOsrValueTypeBarrier(slot, &def, type, typeSet));
1494
1495
preheaderPhi->replaceOperand(OSR_PHI_POSITION, def);
1496
preheaderPhi->setResultType(type);
1497
preheaderPhi->setResultTypeSet(typeSet);
1498
}
1499
1500
return Ok();
1501
}
1502
1503
enum class CFGState : uint32_t { Alloc = 0, Abort = 1, Success = 2 };
1504
1505
static CFGState GetOrCreateControlFlowGraph(TempAllocator& tempAlloc,
1506
JSScript* script,
1507
const ControlFlowGraph** cfgOut) {
1508
if (script->hasBaselineScript() &&
1509
script->baselineScript()->controlFlowGraph()) {
1510
*cfgOut = script->baselineScript()->controlFlowGraph();
1511
return CFGState::Success;
1512
}
1513
1514
ControlFlowGenerator cfgenerator(tempAlloc, script);
1515
if (!cfgenerator.traverseBytecode()) {
1516
if (cfgenerator.aborted()) {
1517
return CFGState::Abort;
1518
}
1519
return CFGState::Alloc;
1520
}
1521
1522
// If possible cache the control flow graph on the baseline script.
1523
TempAllocator* graphAlloc = nullptr;
1524
if (script->hasBaselineScript()) {
1525
LifoAlloc& lifoAlloc = script->zone()->jitZone()->cfgSpace()->lifoAlloc();
1526
LifoAlloc::AutoFallibleScope fallibleAllocator(&lifoAlloc);
1527
graphAlloc = lifoAlloc.new_<TempAllocator>(&lifoAlloc);
1528
if (!graphAlloc) {
1529
return CFGState::Alloc;
1530
}
1531
} else {
1532
graphAlloc = &tempAlloc;
1533
}
1534
1535
ControlFlowGraph* cfg = cfgenerator.getGraph(*graphAlloc);
1536
if (!cfg) {
1537
return CFGState::Alloc;
1538
}
1539
1540
if (script->hasBaselineScript()) {
1541
MOZ_ASSERT(!script->baselineScript()->controlFlowGraph());
1542
script->baselineScript()->setControlFlowGraph(cfg);
1543
}
1544
1545
if (JitSpewEnabled(JitSpew_CFG)) {
1546
JitSpew(JitSpew_CFG, "Generating graph for %s:%u:%u", script->filename(),
1547
script->lineno(), script->column());
1548
Fprinter& print = JitSpewPrinter();
1549
cfg->dump(print, script);
1550
}
1551
1552
*cfgOut = cfg;
1553
return CFGState::Success;
1554
}
1555
1556
// We traverse the bytecode using the control flow graph. This structure
1557
// contains a graph of CFGBlocks in RPO order.
1558
//
1559
// Per CFGBlock we take the corresponding MBasicBlock and start iterating the
1560
// bytecode of that CFGBlock. Each basic block has a mapping of local slots to
1561
// instructions, as well as a stack depth. As we encounter instructions we
1562
// mutate this mapping in the current block.
1563
//
1564
// Afterwards we visit the control flow instruction. There we add the ending ins
1565
// of the MBasicBlock and create new MBasicBlocks for the successors. That means
1566
// adding phi nodes for diamond join points, making sure to propagate types
1567
// around loops ...
1568
//
1569
// We keep a link between a CFGBlock and the entry MBasicBlock (in
1570
// blockWorklist). That vector only contains the MBasicBlocks that correspond
1571
// with a CFGBlock. We can create new MBasicBlocks that don't correspond to a
1572
// CFGBlock.
1573
AbortReasonOr<Ok> IonBuilder::traverseBytecode() {
1574
CFGState state = GetOrCreateControlFlowGraph(alloc(), info().script(), &cfg);
1575
MOZ_ASSERT_IF(cfg && info().script()->hasBaselineScript(),
1576
info().script()->baselineScript()->controlFlowGraph() == cfg);
1577
if (state == CFGState::Alloc) {
1578
return abort(AbortReason::Alloc);
1579
}
1580
if (state == CFGState::Abort) {
1581
return abort(AbortReason::Disable, "Couldn't create the CFG of script");
1582
}
1583
1584
if (!blockWorklist.growBy(cfg->numBlocks())) {
1585
return abort(AbortReason::Alloc);
1586
}
1587
blockWorklist[0] = current;
1588
1589
size_t i = 0;
1590
while (i < cfg->numBlocks()) {
1591
if (!alloc().ensureBallast()) {
1592
return abort(AbortReason::Alloc);
1593
}
1594
1595
bool restarted = false;
1596
const CFGBlock* cfgblock = cfg->block(i);
1597
MBasicBlock* mblock = blockWorklist[i];
1598
MOZ_ASSERT(mblock && !mblock->isDead());
1599
1600
MOZ_TRY(visitBlock(cfgblock, mblock));
1601
MOZ_TRY(visitControlInstruction(cfgblock->stopIns(), &restarted));
1602
1603
if (restarted) {
1604
// Move back to the start of the loop.
1605
while (!blockWorklist[i] || blockWorklist[i]->isDead()) {
1606
MOZ_ASSERT(i > 0);
1607
i--;
1608
}
1609
MOZ_ASSERT(cfgblock->stopIns()->isBackEdge());
1610
MOZ_ASSERT(loopHeaderStack_.back() == blockWorklist[i]);
1611
} else {
1612
i++;
1613
}
1614
}
1615
1616
#ifdef DEBUG
1617
MOZ_ASSERT(graph().numBlocks() >= blockWorklist.length());
1618
for (i = 0; i < cfg->numBlocks(); i++) {
1619
MOZ_ASSERT(blockWorklist[i]);
1620
MOZ_ASSERT(!blockWorklist[i]->isDead());
1621
MOZ_ASSERT_IF(i != 0, blockWorklist[i]->id() != 0);
1622
}
1623
#endif
1624
1625
cfg = nullptr;
1626
1627
blockWorklist.clear();
1628
return Ok();
1629
}
1630
1631
AbortReasonOr<Ok> IonBuilder::visitBlock(const CFGBlock* cfgblock,
1632
MBasicBlock* mblock) {
1633
mblock->setLoopDepth(loopDepth_);
1634
1635
cfgCurrent = cfgblock;
1636
pc = cfgblock->startPc();
1637
1638
if (mblock->pc() && script()->hasScriptCounts()) {
1639
mblock->setHitCount(script()->getHitCount(mblock->pc()));
1640
}
1641
1642
// Optimization to move a predecessor that only has this block as successor
1643
// just before this block. Skip this optimization if the previous block is
1644
// not part of the same function, as we might have to backtrack on inlining
1645
// failures.
1646
if (mblock->numPredecessors() == 1 &&
1647
mblock->getPredecessor(0)->numSuccessors() == 1 &&
1648
!mblock->getPredecessor(0)->outerResumePoint()) {
1649
graph().removeBlockFromList(mblock->getPredecessor(0));
1650
graph().addBlock(mblock->getPredecessor(0));
1651
}
1652
1653
MOZ_TRY(setCurrentAndSpecializePhis(mblock));
1654
graph().addBlock(mblock);
1655
1656
while (pc < cfgblock->stopPc()) {
1657
if (!alloc().ensureBallast()) {
1658
return abort(AbortReason::Alloc);
1659
}
1660
1661
#ifdef DEBUG
1662
// In debug builds, after compiling this op, check that all values
1663
// popped by this opcode either:
1664
//
1665
// (1) Have the ImplicitlyUsed flag set on them.
1666
// (2) Have more uses than before compiling this op (the value is
1667
// used as operand of a new MIR instruction).
1668
//
1669
// This is used to catch problems where IonBuilder pops a value without
1670
// adding any SSA uses and doesn't call setImplicitlyUsedUnchecked on it.
1671
Vector<MDefinition*, 4, JitAllocPolicy> popped(alloc());
1672
Vector<size_t, 4, JitAllocPolicy> poppedUses(alloc());
1673
unsigned nuses = GetUseCount(pc);
1674
1675
for (unsigned i = 0; i < nuses; i++) {
1676
MDefinition* def = current->peek(-int32_t(i + 1));
1677
if (!popped.append(def) || !poppedUses.append(def->defUseCount())) {
1678
return abort(AbortReason::Alloc);
1679
}
1680
}
1681
#endif
1682
1683
// Nothing in inspectOpcode() is allowed to advance the pc.
1684
JSOp op = JSOp(*pc);
1685
MOZ_TRY(inspectOpcode(op));
1686
1687
#ifdef DEBUG
1688
for (size_t i = 0; i < popped.length(); i++) {
1689
switch (op) {
1690
case JSOP_POP:
1691
case JSOP_POPN:
1692
case JSOP_DUPAT:
1693
case JSOP_DUP:
1694
case JSOP_DUP2:
1695
case JSOP_PICK:
1696
case JSOP_UNPICK:
1697
case JSOP_SWAP:
1698
case JSOP_SETARG:
1699
case JSOP_SETLOCAL:
1700
case JSOP_INITLEXICAL:
1701
case JSOP_SETRVAL:
1702
case JSOP_VOID:
1703
// Don't require SSA uses for values popped by these ops.
1704
break;
1705
1706
case JSOP_POS:
1707
case JSOP_TONUMERIC:
1708
case JSOP_TOID:
1709
case JSOP_TOSTRING:
1710
// These ops may leave their input on the stack without setting
1711
// the ImplicitlyUsed flag. If this value will be popped immediately,
1712
// we may replace it with |undefined|, but the difference is
1713
// not observable.
1714
MOZ_ASSERT(i == 0);
1715
if (current->peek(-1) == popped[0]) {
1716
break;
1717
}
1718
MOZ_FALLTHROUGH;
1719
1720
default:
1721
MOZ_ASSERT(popped[i]->isImplicitlyUsed() ||
1722
1723
// MNewDerivedTypedObject instances are
1724
// often dead unless they escape from the
1725
// fn. See IonBuilder::loadTypedObjectData()
1726
// for more details.
1727
popped[i]->isNewDerivedTypedObject() ||
1728
1729
popped[i]->defUseCount() > poppedUses[i]);
1730
break;
1731
}
1732
}
1733
#endif
1734
1735
pc += CodeSpec[op].length;
1736
current->updateTrackedSite(bytecodeSite(pc));
1737
}
1738