Line data Source code
1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sts=4 et sw=4 tw=99:
3 : * This Source Code Form is subject to the terms of the Mozilla Public
4 : * License, v. 2.0. If a copy of the MPL was not distributed with this
5 : * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 :
7 : #include "jit/Ion.h"
8 :
9 : #include "mozilla/IntegerPrintfMacros.h"
10 : #include "mozilla/MemoryReporting.h"
11 : #include "mozilla/SizePrintfMacros.h"
12 : #include "mozilla/ThreadLocal.h"
13 :
14 : #include "jscompartment.h"
15 : #include "jsgc.h"
16 : #include "jsprf.h"
17 :
18 : #include "gc/Marking.h"
19 : #include "jit/AliasAnalysis.h"
20 : #include "jit/AlignmentMaskAnalysis.h"
21 : #include "jit/BacktrackingAllocator.h"
22 : #include "jit/BaselineFrame.h"
23 : #include "jit/BaselineInspector.h"
24 : #include "jit/BaselineJIT.h"
25 : #include "jit/CacheIRSpewer.h"
26 : #include "jit/CodeGenerator.h"
27 : #include "jit/EagerSimdUnbox.h"
28 : #include "jit/EdgeCaseAnalysis.h"
29 : #include "jit/EffectiveAddressAnalysis.h"
30 : #include "jit/FlowAliasAnalysis.h"
31 : #include "jit/FoldLinearArithConstants.h"
32 : #include "jit/InstructionReordering.h"
33 : #include "jit/IonAnalysis.h"
34 : #include "jit/IonBuilder.h"
35 : #include "jit/IonIC.h"
36 : #include "jit/IonOptimizationLevels.h"
37 : #include "jit/JitcodeMap.h"
38 : #include "jit/JitCommon.h"
39 : #include "jit/JitCompartment.h"
40 : #include "jit/JitSpewer.h"
41 : #include "jit/LICM.h"
42 : #include "jit/LIR.h"
43 : #include "jit/LoopUnroller.h"
44 : #include "jit/Lowering.h"
45 : #include "jit/PerfSpewer.h"
46 : #include "jit/RangeAnalysis.h"
47 : #include "jit/ScalarReplacement.h"
48 : #include "jit/Sink.h"
49 : #include "jit/StupidAllocator.h"
50 : #include "jit/ValueNumbering.h"
51 : #include "jit/WasmBCE.h"
52 : #include "vm/Debugger.h"
53 : #include "vm/HelperThreads.h"
54 : #include "vm/TraceLogging.h"
55 : #include "vtune/VTuneWrapper.h"
56 :
57 : #include "jscompartmentinlines.h"
58 : #include "jsobjinlines.h"
59 : #include "jsscriptinlines.h"
60 :
61 : #include "jit/JitFrames-inl.h"
62 : #include "jit/shared/Lowering-shared-inl.h"
63 : #include "vm/Debugger-inl.h"
64 : #include "vm/EnvironmentObject-inl.h"
65 : #include "vm/Stack-inl.h"
66 :
67 : using namespace js;
68 : using namespace js::jit;
69 :
70 : // Assert that JitCode is gc::Cell aligned.
71 : JS_STATIC_ASSERT(sizeof(JitCode) % gc::CellAlignBytes == 0);
72 :
73 : static MOZ_THREAD_LOCAL(JitContext*) TlsJitContext;
74 :
75 : static JitContext*
76 358791 : CurrentJitContext()
77 : {
78 358791 : if (!TlsJitContext.init())
79 0 : return nullptr;
80 358792 : return TlsJitContext.get();
81 : }
82 :
83 : void
84 7176 : jit::SetJitContext(JitContext* ctx)
85 : {
86 7176 : TlsJitContext.set(ctx);
87 7176 : }
88 :
89 : JitContext*
90 147733 : jit::GetJitContext()
91 : {
92 147733 : MOZ_ASSERT(CurrentJitContext());
93 147733 : return CurrentJitContext();
94 : }
95 :
96 : JitContext*
97 59747 : jit::MaybeGetJitContext()
98 : {
99 59747 : return CurrentJitContext();
100 : }
101 :
102 3580 : JitContext::JitContext(JSContext* cx, TempAllocator* temp)
103 : : cx(cx),
104 : temp(temp),
105 3580 : runtime(CompileRuntime::get(cx->runtime())),
106 3580 : compartment(CompileCompartment::get(cx->compartment())),
107 3580 : prev_(CurrentJitContext()),
108 14320 : assemblerCount_(0)
109 : {
110 3580 : SetJitContext(this);
111 3580 : }
112 :
113 8 : JitContext::JitContext(CompileRuntime* rt, CompileCompartment* comp, TempAllocator* temp)
114 : : cx(nullptr),
115 : temp(temp),
116 : runtime(rt),
117 : compartment(comp),
118 8 : prev_(CurrentJitContext()),
119 16 : assemblerCount_(0)
120 : {
121 8 : SetJitContext(this);
122 8 : }
123 :
124 0 : JitContext::JitContext(CompileRuntime* rt)
125 : : cx(nullptr),
126 : temp(nullptr),
127 : runtime(rt),
128 : compartment(nullptr),
129 0 : prev_(CurrentJitContext()),
130 0 : assemblerCount_(0)
131 : {
132 0 : SetJitContext(this);
133 0 : }
134 :
135 0 : JitContext::JitContext(TempAllocator* temp)
136 : : cx(nullptr),
137 : temp(temp),
138 : runtime(nullptr),
139 : compartment(nullptr),
140 0 : prev_(CurrentJitContext()),
141 0 : assemblerCount_(0)
142 : {
143 0 : SetJitContext(this);
144 0 : }
145 :
146 0 : JitContext::JitContext(CompileRuntime* rt, TempAllocator* temp)
147 : : cx(nullptr),
148 : temp(temp),
149 : runtime(rt),
150 : compartment(nullptr),
151 0 : prev_(CurrentJitContext()),
152 0 : assemblerCount_(0)
153 : {
154 0 : SetJitContext(this);
155 0 : }
156 :
157 0 : JitContext::JitContext()
158 : : cx(nullptr),
159 : temp(nullptr),
160 : runtime(nullptr),
161 : compartment(nullptr),
162 0 : prev_(CurrentJitContext()),
163 0 : assemblerCount_(0)
164 : {
165 0 : SetJitContext(this);
166 0 : }
167 :
168 7176 : JitContext::~JitContext()
169 : {
170 3588 : SetJitContext(prev_);
171 3588 : }
172 :
173 : bool
174 3 : jit::InitializeIon()
175 : {
176 3 : if (!TlsJitContext.init())
177 0 : return false;
178 :
179 3 : CheckLogging();
180 :
181 : #ifdef JS_CACHEIR_SPEW
182 3 : const char* env = getenv("CACHEIR_LOGS");
183 3 : if (env && env[0])
184 0 : CacheIRSpewer::singleton().init();
185 : #endif
186 :
187 : #if defined(JS_CODEGEN_ARM)
188 : InitARMFlags();
189 : #endif
190 3 : CheckPerf();
191 3 : return true;
192 : }
193 :
194 4 : JitRuntime::JitRuntime(JSRuntime* rt)
195 : : execAlloc_(rt),
196 : backedgeExecAlloc_(rt),
197 : exceptionTail_(nullptr),
198 : bailoutTail_(nullptr),
199 : profilerExitFrameTail_(nullptr),
200 : enterJIT_(nullptr),
201 : bailoutHandler_(nullptr),
202 : argumentsRectifier_(nullptr),
203 : argumentsRectifierReturnAddr_(nullptr),
204 : invalidator_(nullptr),
205 : debugTrapHandler_(nullptr),
206 : baselineDebugModeOSRHandler_(nullptr),
207 : functionWrappers_(nullptr),
208 : preventBackedgePatching_(false),
209 4 : jitcodeGlobalTable_(nullptr)
210 : {
211 4 : }
212 :
213 0 : JitRuntime::~JitRuntime()
214 : {
215 0 : js_delete(functionWrappers_.ref());
216 :
217 : // By this point, the jitcode global table should be empty.
218 0 : MOZ_ASSERT_IF(jitcodeGlobalTable_, jitcodeGlobalTable_->empty());
219 0 : js_delete(jitcodeGlobalTable_.ref());
220 0 : }
221 :
222 : bool
223 4 : JitRuntime::initialize(JSContext* cx, AutoLockForExclusiveAccess& lock)
224 : {
225 8 : AutoAtomsCompartment ac(cx, lock);
226 :
227 8 : JitContext jctx(cx, nullptr);
228 :
229 4 : if (!cx->compartment()->ensureJitCompartmentExists(cx))
230 0 : return false;
231 :
232 4 : functionWrappers_ = cx->new_<VMWrapperMap>(cx);
233 4 : if (!functionWrappers_ || !functionWrappers_->init())
234 0 : return false;
235 :
236 4 : JitSpew(JitSpew_Codegen, "# Emitting profiler exit frame tail stub");
237 4 : profilerExitFrameTail_ = generateProfilerExitFrameTailStub(cx);
238 4 : if (!profilerExitFrameTail_)
239 0 : return false;
240 :
241 4 : JitSpew(JitSpew_Codegen, "# Emitting exception tail stub");
242 :
243 4 : void* handler = JS_FUNC_TO_DATA_PTR(void*, jit::HandleException);
244 :
245 4 : exceptionTail_ = generateExceptionTailStub(cx, handler);
246 4 : if (!exceptionTail_)
247 0 : return false;
248 :
249 4 : JitSpew(JitSpew_Codegen, "# Emitting bailout tail stub");
250 4 : bailoutTail_ = generateBailoutTailStub(cx);
251 4 : if (!bailoutTail_)
252 0 : return false;
253 :
254 4 : if (cx->runtime()->jitSupportsFloatingPoint) {
255 4 : JitSpew(JitSpew_Codegen, "# Emitting bailout tables");
256 :
257 : // Initialize some Ion-only stubs that require floating-point support.
258 4 : BailoutTableVector& bailoutTables = bailoutTables_.writeRef();
259 4 : if (!bailoutTables.reserve(FrameSizeClass::ClassLimit().classId()))
260 0 : return false;
261 :
262 4 : for (uint32_t id = 0;; id++) {
263 4 : FrameSizeClass class_ = FrameSizeClass::FromClass(id);
264 4 : if (class_ == FrameSizeClass::ClassLimit())
265 4 : break;
266 0 : bailoutTables.infallibleAppend((JitCode*)nullptr);
267 0 : JitSpew(JitSpew_Codegen, "# Bailout table");
268 0 : bailoutTables[id] = generateBailoutTable(cx, id);
269 0 : if (!bailoutTables[id])
270 0 : return false;
271 0 : }
272 :
273 4 : JitSpew(JitSpew_Codegen, "# Emitting bailout handler");
274 4 : bailoutHandler_ = generateBailoutHandler(cx);
275 4 : if (!bailoutHandler_)
276 0 : return false;
277 :
278 4 : JitSpew(JitSpew_Codegen, "# Emitting invalidator");
279 4 : invalidator_ = generateInvalidator(cx);
280 4 : if (!invalidator_)
281 0 : return false;
282 : }
283 :
284 4 : JitSpew(JitSpew_Codegen, "# Emitting sequential arguments rectifier");
285 4 : argumentsRectifier_ = generateArgumentsRectifier(cx, &argumentsRectifierReturnAddr_.writeRef());
286 4 : if (!argumentsRectifier_)
287 0 : return false;
288 :
289 4 : JitSpew(JitSpew_Codegen, "# Emitting EnterJIT sequence");
290 4 : enterJIT_ = generateEnterJIT(cx, EnterJitOptimized);
291 4 : if (!enterJIT_)
292 0 : return false;
293 :
294 4 : JitSpew(JitSpew_Codegen, "# Emitting EnterBaselineJIT sequence");
295 4 : enterBaselineJIT_ = generateEnterJIT(cx, EnterJitBaseline);
296 4 : if (!enterBaselineJIT_)
297 0 : return false;
298 :
299 4 : JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Value");
300 4 : valuePreBarrier_ = generatePreBarrier(cx, MIRType::Value);
301 4 : if (!valuePreBarrier_)
302 0 : return false;
303 :
304 4 : JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for String");
305 4 : stringPreBarrier_ = generatePreBarrier(cx, MIRType::String);
306 4 : if (!stringPreBarrier_)
307 0 : return false;
308 :
309 4 : JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Object");
310 4 : objectPreBarrier_ = generatePreBarrier(cx, MIRType::Object);
311 4 : if (!objectPreBarrier_)
312 0 : return false;
313 :
314 4 : JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for Shape");
315 4 : shapePreBarrier_ = generatePreBarrier(cx, MIRType::Shape);
316 4 : if (!shapePreBarrier_)
317 0 : return false;
318 :
319 4 : JitSpew(JitSpew_Codegen, "# Emitting Pre Barrier for ObjectGroup");
320 4 : objectGroupPreBarrier_ = generatePreBarrier(cx, MIRType::ObjectGroup);
321 4 : if (!objectGroupPreBarrier_)
322 0 : return false;
323 :
324 4 : JitSpew(JitSpew_Codegen, "# Emitting malloc stub");
325 4 : mallocStub_ = generateMallocStub(cx);
326 4 : if (!mallocStub_)
327 0 : return false;
328 :
329 4 : JitSpew(JitSpew_Codegen, "# Emitting free stub");
330 4 : freeStub_ = generateFreeStub(cx);
331 4 : if (!freeStub_)
332 0 : return false;
333 :
334 4 : JitSpew(JitSpew_Codegen, "# Emitting VM function wrappers");
335 1040 : for (VMFunction* fun = VMFunction::functions; fun; fun = fun->next) {
336 1036 : JitSpew(JitSpew_Codegen, "# VM function wrapper");
337 1036 : if (!generateVMWrapper(cx, *fun))
338 0 : return false;
339 : }
340 :
341 4 : JitSpew(JitSpew_Codegen, "# Emitting lazy link stub");
342 4 : lazyLinkStub_ = generateLazyLinkStub(cx);
343 4 : if (!lazyLinkStub_)
344 0 : return false;
345 :
346 4 : jitcodeGlobalTable_ = cx->new_<JitcodeGlobalTable>();
347 4 : if (!jitcodeGlobalTable_)
348 0 : return false;
349 :
350 4 : return true;
351 : }
352 :
353 : JitCode*
354 0 : JitRuntime::debugTrapHandler(JSContext* cx)
355 : {
356 0 : if (!debugTrapHandler_) {
357 : // JitRuntime code stubs are shared across compartments and have to
358 : // be allocated in the atoms compartment.
359 0 : AutoLockForExclusiveAccess lock(cx);
360 0 : AutoAtomsCompartment ac(cx, lock);
361 0 : debugTrapHandler_ = generateDebugTrapHandler(cx);
362 : }
363 0 : return debugTrapHandler_;
364 : }
365 :
366 : uint8_t*
367 1 : JSContext::allocateOsrTempData(size_t size)
368 : {
369 1 : osrTempData_ = (uint8_t*)js_realloc(osrTempData_, size);
370 1 : return osrTempData_;
371 : }
372 :
373 : void
374 7673 : JSContext::freeOsrTempData()
375 : {
376 7673 : js_free(osrTempData_);
377 7673 : osrTempData_ = nullptr;
378 7673 : }
379 :
380 : void
381 4 : JitZoneGroup::patchIonBackedges(JSContext* cx, BackedgeTarget target)
382 : {
383 4 : if (target == BackedgeLoopHeader) {
384 : // We must be on the active thread. The caller must use
385 : // AutoPreventBackedgePatching to ensure we don't reenter.
386 4 : MOZ_ASSERT(cx->runtime()->jitRuntime()->preventBackedgePatching());
387 4 : MOZ_ASSERT(CurrentThreadCanAccessRuntime(cx->runtime()));
388 : } else {
389 : // We must be called from InterruptRunningJitCode, or a signal handler
390 : // triggered there. rt->handlingJitInterrupt() ensures we can't reenter
391 : // this code.
392 0 : MOZ_ASSERT(!cx->runtime()->jitRuntime()->preventBackedgePatching());
393 0 : MOZ_ASSERT(cx->handlingJitInterrupt());
394 : }
395 :
396 : // Do nothing if we know all backedges are already jumping to `target`.
397 4 : if (backedgeTarget_ == target)
398 4 : return;
399 :
400 0 : backedgeTarget_ = target;
401 :
402 0 : cx->runtime()->jitRuntime()->backedgeExecAlloc().makeAllWritable();
403 :
404 : // Patch all loop backedges in Ion code so that they either jump to the
405 : // normal loop header or to an interrupt handler each time they run.
406 0 : for (InlineListIterator<PatchableBackedge> iter(backedgeList().begin());
407 0 : iter != backedgeList().end();
408 : iter++)
409 : {
410 0 : PatchableBackedge* patchableBackedge = *iter;
411 0 : if (target == BackedgeLoopHeader)
412 0 : PatchBackedge(patchableBackedge->backedge, patchableBackedge->loopHeader, target);
413 : else
414 0 : PatchBackedge(patchableBackedge->backedge, patchableBackedge->interruptCheck, target);
415 : }
416 :
417 0 : cx->runtime()->jitRuntime()->backedgeExecAlloc().makeAllExecutable();
418 : }
419 :
420 21 : JitZoneGroup::JitZoneGroup(ZoneGroup* group)
421 : : backedgeTarget_(group, BackedgeLoopHeader),
422 21 : backedgeList_(group)
423 21 : {}
424 :
425 285 : JitCompartment::JitCompartment()
426 : : stubCodes_(nullptr),
427 : stringConcatStub_(nullptr),
428 : regExpMatcherStub_(nullptr),
429 : regExpSearcherStub_(nullptr),
430 285 : regExpTesterStub_(nullptr)
431 : {
432 285 : }
433 :
434 0 : JitCompartment::~JitCompartment()
435 : {
436 0 : js_delete(stubCodes_);
437 0 : }
438 :
439 : bool
440 285 : JitCompartment::initialize(JSContext* cx)
441 : {
442 285 : stubCodes_ = cx->new_<ICStubCodeMap>(cx->runtime());
443 285 : if (!stubCodes_)
444 0 : return false;
445 :
446 285 : if (!stubCodes_->init()) {
447 0 : ReportOutOfMemory(cx);
448 0 : return false;
449 : }
450 :
451 285 : return true;
452 : }
453 :
454 : bool
455 10 : JitCompartment::ensureIonStubsExist(JSContext* cx)
456 : {
457 10 : if (!stringConcatStub_) {
458 4 : stringConcatStub_ = generateStringConcatStub(cx);
459 4 : if (!stringConcatStub_)
460 0 : return false;
461 : }
462 :
463 10 : return true;
464 : }
465 :
466 : bool
467 13 : JitZone::init(JSContext* cx)
468 : {
469 13 : if (!baselineCacheIRStubCodes_.init()) {
470 0 : ReportOutOfMemory(cx);
471 0 : return false;
472 : }
473 :
474 13 : return true;
475 : }
476 :
477 : void
478 8 : jit::FreeIonBuilder(IonBuilder* builder)
479 : {
480 : // The builder is allocated into its LifoAlloc, so destroying that will
481 : // destroy the builder and all other data accumulated during compilation,
482 : // except any final codegen (which includes an assembler and needs to be
483 : // explicitly destroyed).
484 8 : js_delete(builder->backgroundCodegen());
485 8 : js_delete(builder->alloc().lifoAlloc());
486 8 : }
487 :
488 : void
489 8 : jit::FinishOffThreadBuilder(JSRuntime* runtime, IonBuilder* builder,
490 : const AutoLockHelperThreadState& locked)
491 : {
492 : // Clean the references to the pending IonBuilder, if we just finished it.
493 9 : if (builder->script()->baselineScript()->hasPendingIonBuilder() &&
494 1 : builder->script()->baselineScript()->pendingIonBuilder() == builder)
495 : {
496 1 : builder->script()->baselineScript()->removePendingIonBuilder(builder->script());
497 : }
498 :
499 : // If the builder is still in one of the helper thread list, then remove it.
500 8 : if (builder->isInList())
501 1 : builder->script()->zone()->group()->ionLazyLinkListRemove(builder);
502 :
503 : // Clear the recompiling flag of the old ionScript, since we continue to
504 : // use the old ionScript if recompiling fails.
505 8 : if (builder->script()->hasIonScript())
506 5 : builder->script()->ionScript()->clearRecompiling();
507 :
508 : // Clean up if compilation did not succeed.
509 8 : if (builder->script()->isIonCompilingOffThread()) {
510 0 : IonScript* ion = nullptr;
511 0 : AbortReasonOr<Ok> status = builder->getOffThreadStatus();
512 0 : if (status.isErr() && status.unwrapErr() == AbortReason::Disable)
513 0 : ion = ION_DISABLED_SCRIPT;
514 0 : builder->script()->setIonScript(runtime, ion);
515 : }
516 :
517 : // Free Ion LifoAlloc off-thread. Free on the main thread if this OOMs.
518 8 : if (!StartOffThreadIonFree(builder, locked))
519 0 : FreeIonBuilder(builder);
520 8 : }
521 :
522 : static bool
523 7 : LinkCodeGen(JSContext* cx, IonBuilder* builder, CodeGenerator *codegen)
524 : {
525 14 : RootedScript script(cx, builder->script());
526 7 : TraceLoggerThread* logger = TraceLoggerForCurrentThread(cx);
527 14 : TraceLoggerEvent event(TraceLogger_AnnotateScripts, script);
528 14 : AutoTraceLog logScript(logger, event);
529 14 : AutoTraceLog logLink(logger, TraceLogger_IonLinking);
530 :
531 7 : if (!codegen->link(cx, builder->constraints()))
532 0 : return false;
533 :
534 7 : return true;
535 : }
536 :
537 : static bool
538 7 : LinkBackgroundCodeGen(JSContext* cx, IonBuilder* builder)
539 : {
540 7 : CodeGenerator* codegen = builder->backgroundCodegen();
541 7 : if (!codegen)
542 0 : return false;
543 :
544 14 : JitContext jctx(cx, &builder->alloc());
545 :
546 : // Root the assembler until the builder is finished below. As it was
547 : // constructed off thread, the assembler has not been rooted previously,
548 : // though any GC activity would discard the builder.
549 14 : MacroAssembler::AutoRooter masm(cx, &codegen->masm);
550 :
551 7 : return LinkCodeGen(cx, builder, codegen);
552 : }
553 :
554 : void
555 7 : jit::LinkIonScript(JSContext* cx, HandleScript calleeScript)
556 : {
557 : IonBuilder* builder;
558 :
559 : {
560 14 : AutoLockHelperThreadState lock;
561 :
562 : // Get the pending builder from the Ion frame.
563 7 : MOZ_ASSERT(calleeScript->hasBaselineScript());
564 7 : builder = calleeScript->baselineScript()->pendingIonBuilder();
565 7 : calleeScript->baselineScript()->removePendingIonBuilder(calleeScript);
566 :
567 : // Remove from pending.
568 7 : cx->zone()->group()->ionLazyLinkListRemove(builder);
569 : }
570 :
571 : {
572 14 : AutoEnterAnalysis enterTypes(cx);
573 7 : if (!LinkBackgroundCodeGen(cx, builder)) {
574 : // Silently ignore OOM during code generation. The assembly code
575 : // doesn't has code to handle it after linking happened. So it's
576 : // not OK to throw a catchable exception from there.
577 0 : cx->clearPendingException();
578 :
579 : // Reset the TypeZone's compiler output for this script, if any.
580 0 : InvalidateCompilerOutputsForScript(cx, calleeScript);
581 : }
582 : }
583 :
584 : {
585 14 : AutoLockHelperThreadState lock;
586 7 : FinishOffThreadBuilder(cx->runtime(), builder, lock);
587 : }
588 7 : }
589 :
590 : uint8_t*
591 3 : jit::LazyLinkTopActivation()
592 : {
593 : // First frame should be an exit frame.
594 3 : JSContext* cx = TlsContext.get();
595 3 : JitFrameIterator it(cx);
596 3 : LazyLinkExitFrameLayout* ll = it.exitFrame()->as<LazyLinkExitFrameLayout>();
597 6 : RootedScript calleeScript(cx, ScriptFromCalleeToken(ll->jsFrame()->calleeToken()));
598 :
599 3 : LinkIonScript(cx, calleeScript);
600 :
601 3 : MOZ_ASSERT(calleeScript->hasBaselineScript());
602 3 : MOZ_ASSERT(calleeScript->baselineOrIonRawPointer());
603 :
604 6 : return calleeScript->baselineOrIonRawPointer();
605 : }
606 :
607 : /* static */ void
608 1 : JitRuntime::Trace(JSTracer* trc, AutoLockForExclusiveAccess& lock)
609 : {
610 1 : MOZ_ASSERT(!JS::CurrentThreadIsHeapMinorCollecting());
611 :
612 : // Shared stubs are allocated in the atoms compartment, so do not iterate
613 : // them after the atoms heap after it has been "finished."
614 1 : if (trc->runtime()->atomsAreFinished())
615 0 : return;
616 :
617 1 : Zone* zone = trc->runtime()->atomsCompartment(lock)->zone();
618 276 : for (auto i = zone->cellIter<JitCode>(); !i.done(); i.next()) {
619 275 : JitCode* code = i;
620 275 : TraceRoot(trc, &code, "wrapper");
621 : }
622 : }
623 :
624 : /* static */ void
625 21 : JitRuntime::TraceJitcodeGlobalTableForMinorGC(JSTracer* trc)
626 : {
627 42 : if (trc->runtime()->geckoProfiler().enabled() &&
628 21 : trc->runtime()->hasJitRuntime() &&
629 0 : trc->runtime()->jitRuntime()->hasJitcodeGlobalTable())
630 : {
631 0 : trc->runtime()->jitRuntime()->getJitcodeGlobalTable()->traceForMinorGC(trc);
632 : }
633 21 : }
634 :
635 : /* static */ bool
636 0 : JitRuntime::MarkJitcodeGlobalTableIteratively(GCMarker* marker)
637 : {
638 0 : if (marker->runtime()->hasJitRuntime() &&
639 0 : marker->runtime()->jitRuntime()->hasJitcodeGlobalTable())
640 : {
641 0 : return marker->runtime()->jitRuntime()->getJitcodeGlobalTable()->markIteratively(marker);
642 : }
643 0 : return false;
644 : }
645 :
646 : /* static */ void
647 0 : JitRuntime::SweepJitcodeGlobalTable(JSRuntime* rt)
648 : {
649 0 : if (rt->hasJitRuntime() && rt->jitRuntime()->hasJitcodeGlobalTable())
650 0 : rt->jitRuntime()->getJitcodeGlobalTable()->sweep(rt);
651 0 : }
652 :
653 : void
654 0 : JitCompartment::sweep(FreeOp* fop, JSCompartment* compartment)
655 : {
656 : // Any outstanding compilations should have been cancelled by the GC.
657 0 : MOZ_ASSERT(!HasOffThreadIonCompile(compartment));
658 :
659 0 : stubCodes_->sweep();
660 :
661 : // If the sweep removed a bailout Fallback stub, nullptr the corresponding return addr.
662 0 : for (auto& it : bailoutReturnStubInfo_) {
663 0 : if (!stubCodes_->lookup(it.key))
664 0 : it = BailoutReturnStubInfo();
665 : }
666 :
667 0 : if (stringConcatStub_ && IsAboutToBeFinalizedUnbarriered(&stringConcatStub_))
668 0 : stringConcatStub_ = nullptr;
669 :
670 0 : if (regExpMatcherStub_ && IsAboutToBeFinalizedUnbarriered(®ExpMatcherStub_))
671 0 : regExpMatcherStub_ = nullptr;
672 :
673 0 : if (regExpSearcherStub_ && IsAboutToBeFinalizedUnbarriered(®ExpSearcherStub_))
674 0 : regExpSearcherStub_ = nullptr;
675 :
676 0 : if (regExpTesterStub_ && IsAboutToBeFinalizedUnbarriered(®ExpTesterStub_))
677 0 : regExpTesterStub_ = nullptr;
678 :
679 0 : for (ReadBarrieredObject& obj : simdTemplateObjects_) {
680 0 : if (obj && IsAboutToBeFinalized(&obj))
681 0 : obj.set(nullptr);
682 : }
683 0 : }
684 :
685 : void
686 0 : JitZone::sweep(FreeOp* fop)
687 : {
688 0 : baselineCacheIRStubCodes_.sweep();
689 0 : }
690 :
691 : size_t
692 0 : JitCompartment::sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const
693 : {
694 0 : size_t n = mallocSizeOf(this);
695 0 : if (stubCodes_)
696 0 : n += stubCodes_->sizeOfIncludingThis(mallocSizeOf);
697 0 : return n;
698 : }
699 :
700 : void
701 0 : JitZone::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
702 : size_t* jitZone,
703 : size_t* baselineStubsOptimized,
704 : size_t* cachedCFG) const
705 : {
706 0 : *jitZone += mallocSizeOf(this);
707 0 : *jitZone += baselineCacheIRStubCodes_.sizeOfExcludingThis(mallocSizeOf);
708 0 : *jitZone += ionCacheIRStubInfoSet_.sizeOfExcludingThis(mallocSizeOf);
709 :
710 0 : *baselineStubsOptimized += optimizedStubSpace_.sizeOfExcludingThis(mallocSizeOf);
711 0 : *cachedCFG += cfgSpace_.sizeOfExcludingThis(mallocSizeOf);
712 0 : }
713 :
714 : JitCode*
715 0 : JitRuntime::getBailoutTable(const FrameSizeClass& frameClass) const
716 : {
717 0 : MOZ_ASSERT(frameClass != FrameSizeClass::None());
718 0 : return bailoutTables_.ref()[frameClass.classId()];
719 : }
720 :
721 : JitCode*
722 4650 : JitRuntime::getVMWrapper(const VMFunction& f) const
723 : {
724 4650 : MOZ_ASSERT(functionWrappers_);
725 4650 : MOZ_ASSERT(functionWrappers_->initialized());
726 4650 : JitRuntime::VMWrapperMap::Ptr p = functionWrappers_->readonlyThreadsafeLookup(&f);
727 4650 : MOZ_ASSERT(p);
728 :
729 4650 : return p->value();
730 : }
731 :
732 : template <AllowGC allowGC>
733 : JitCode*
734 4499 : JitCode::New(JSContext* cx, uint8_t* code, uint32_t bufferSize, uint32_t headerSize,
735 : ExecutablePool* pool, CodeKind kind)
736 : {
737 4499 : JitCode* codeObj = Allocate<JitCode, allowGC>(cx);
738 4499 : if (!codeObj) {
739 0 : pool->release(headerSize + bufferSize, kind);
740 0 : return nullptr;
741 : }
742 :
743 4499 : new (codeObj) JitCode(code, bufferSize, headerSize, pool, kind);
744 4499 : return codeObj;
745 : }
746 :
747 : template
748 : JitCode*
749 : JitCode::New<CanGC>(JSContext* cx, uint8_t* code, uint32_t bufferSize, uint32_t headerSize,
750 : ExecutablePool* pool, CodeKind kind);
751 :
752 : template
753 : JitCode*
754 : JitCode::New<NoGC>(JSContext* cx, uint8_t* code, uint32_t bufferSize, uint32_t headerSize,
755 : ExecutablePool* pool, CodeKind kind);
756 :
757 : void
758 4499 : JitCode::copyFrom(MacroAssembler& masm)
759 : {
760 : // Store the JitCode pointer right before the code buffer, so we can
761 : // recover the gcthing from relocation tables.
762 4499 : *(JitCode**)(code_ - sizeof(JitCode*)) = this;
763 4499 : insnSize_ = masm.instructionsSize();
764 4499 : masm.executableCopy(code_);
765 :
766 4499 : jumpRelocTableBytes_ = masm.jumpRelocationTableBytes();
767 4499 : masm.copyJumpRelocationTable(code_ + jumpRelocTableOffset());
768 :
769 4499 : dataRelocTableBytes_ = masm.dataRelocationTableBytes();
770 4499 : masm.copyDataRelocationTable(code_ + dataRelocTableOffset());
771 :
772 4499 : masm.processCodeLabels(code_);
773 4499 : }
774 :
775 : void
776 22 : JitCode::traceChildren(JSTracer* trc)
777 : {
778 : // Note that we cannot mark invalidated scripts, since we've basically
779 : // corrupted the code stream by injecting bailouts.
780 22 : if (invalidated())
781 0 : return;
782 :
783 22 : if (jumpRelocTableBytes_) {
784 14 : uint8_t* start = code_ + jumpRelocTableOffset();
785 14 : CompactBufferReader reader(start, start + jumpRelocTableBytes_);
786 14 : MacroAssembler::TraceJumpRelocations(trc, this, reader);
787 : }
788 22 : if (dataRelocTableBytes_) {
789 : // If we're moving objects, we need writable JIT code.
790 2 : bool movingObjects = JS::CurrentThreadIsHeapMinorCollecting() || zone()->isGCCompacting();
791 4 : MaybeAutoWritableJitCode awjc(this, movingObjects ? Reprotect : DontReprotect);
792 :
793 2 : uint8_t* start = code_ + dataRelocTableOffset();
794 2 : CompactBufferReader reader(start, start + dataRelocTableBytes_);
795 2 : MacroAssembler::TraceDataRelocations(trc, this, reader);
796 : }
797 : }
798 :
799 : void
800 0 : JitCode::finalize(FreeOp* fop)
801 : {
802 : // If this jitcode had a bytecode map, it must have already been removed.
803 : #ifdef DEBUG
804 0 : JSRuntime* rt = fop->runtime();
805 0 : if (hasBytecodeMap_) {
806 0 : MOZ_ASSERT(rt->jitRuntime()->hasJitcodeGlobalTable());
807 0 : MOZ_ASSERT(!rt->jitRuntime()->getJitcodeGlobalTable()->lookup(raw()));
808 : }
809 : #endif
810 :
811 : #ifdef MOZ_VTUNE
812 0 : vtune::UnmarkCode(this);
813 : #endif
814 :
815 0 : MOZ_ASSERT(pool_);
816 :
817 : // With W^X JIT code, reprotecting memory for each JitCode instance is
818 : // slow, so we record the ranges and poison them later all at once. It's
819 : // safe to ignore OOM here, it just means we won't poison the code.
820 0 : if (fop->appendJitPoisonRange(JitPoisonRange(pool_, code_ - headerSize_,
821 0 : headerSize_ + bufferSize_)))
822 : {
823 0 : pool_->addRef();
824 : }
825 0 : code_ = nullptr;
826 :
827 : // Code buffers are stored inside ExecutablePools. Pools are refcounted.
828 : // Releasing the pool may free it. Horrible hack: if we are using perf
829 : // integration, we don't want to reuse code addresses, so we just leak the
830 : // memory instead.
831 0 : if (!PerfEnabled())
832 0 : pool_->release(headerSize_ + bufferSize_, CodeKind(kind_));
833 :
834 0 : pool_ = nullptr;
835 0 : }
836 :
837 5 : IonScript::IonScript()
838 : : method_(nullptr),
839 : deoptTable_(nullptr),
840 : osrPc_(nullptr),
841 : osrEntryOffset_(0),
842 : skipArgCheckEntryOffset_(0),
843 : invalidateEpilogueOffset_(0),
844 : invalidateEpilogueDataOffset_(0),
845 : numBailouts_(0),
846 : hasProfilingInstrumentation_(false),
847 : recompiling_(false),
848 : runtimeData_(0),
849 : runtimeSize_(0),
850 : icIndex_(0),
851 : icEntries_(0),
852 : safepointIndexOffset_(0),
853 : safepointIndexEntries_(0),
854 : safepointsStart_(0),
855 : safepointsSize_(0),
856 : frameSlots_(0),
857 : frameSize_(0),
858 : bailoutTable_(0),
859 : bailoutEntries_(0),
860 : osiIndexOffset_(0),
861 : osiIndexEntries_(0),
862 : snapshots_(0),
863 : snapshotsListSize_(0),
864 : snapshotsRVATableSize_(0),
865 : constantTable_(0),
866 : constantEntries_(0),
867 : backedgeList_(0),
868 : backedgeEntries_(0),
869 : invalidationCount_(0),
870 : recompileInfo_(),
871 : osrPcMismatchCounter_(0),
872 5 : fallbackStubSpace_()
873 : {
874 5 : }
875 :
876 : IonScript*
877 5 : IonScript::New(JSContext* cx, RecompileInfo recompileInfo,
878 : uint32_t frameSlots, uint32_t argumentSlots, uint32_t frameSize,
879 : size_t snapshotsListSize, size_t snapshotsRVATableSize,
880 : size_t recoversSize, size_t bailoutEntries,
881 : size_t constants, size_t safepointIndices,
882 : size_t osiIndices, size_t icEntries,
883 : size_t runtimeSize, size_t safepointsSize,
884 : size_t backedgeEntries, size_t sharedStubEntries,
885 : OptimizationLevel optimizationLevel)
886 : {
887 5 : constexpr size_t DataAlignment = sizeof(void*);
888 :
889 5 : if (snapshotsListSize >= MAX_BUFFER_SIZE ||
890 : (bailoutEntries >= MAX_BUFFER_SIZE / sizeof(uint32_t)))
891 : {
892 0 : ReportOutOfMemory(cx);
893 0 : return nullptr;
894 : }
895 :
896 : // This should not overflow on x86, because the memory is already allocated
897 : // *somewhere* and if their total overflowed there would be no memory left
898 : // at all.
899 5 : size_t paddedSnapshotsSize = AlignBytes(snapshotsListSize + snapshotsRVATableSize, DataAlignment);
900 5 : size_t paddedRecoversSize = AlignBytes(recoversSize, DataAlignment);
901 5 : size_t paddedBailoutSize = AlignBytes(bailoutEntries * sizeof(uint32_t), DataAlignment);
902 5 : size_t paddedConstantsSize = AlignBytes(constants * sizeof(Value), DataAlignment);
903 5 : size_t paddedSafepointIndicesSize = AlignBytes(safepointIndices * sizeof(SafepointIndex), DataAlignment);
904 5 : size_t paddedOsiIndicesSize = AlignBytes(osiIndices * sizeof(OsiIndex), DataAlignment);
905 5 : size_t paddedICEntriesSize = AlignBytes(icEntries * sizeof(uint32_t), DataAlignment);
906 5 : size_t paddedRuntimeSize = AlignBytes(runtimeSize, DataAlignment);
907 5 : size_t paddedSafepointSize = AlignBytes(safepointsSize, DataAlignment);
908 5 : size_t paddedBackedgeSize = AlignBytes(backedgeEntries * sizeof(PatchableBackedge), DataAlignment);
909 5 : size_t paddedSharedStubSize = AlignBytes(sharedStubEntries * sizeof(IonICEntry), DataAlignment);
910 :
911 5 : size_t bytes = paddedSnapshotsSize +
912 5 : paddedRecoversSize +
913 5 : paddedBailoutSize +
914 5 : paddedConstantsSize +
915 5 : paddedSafepointIndicesSize +
916 5 : paddedOsiIndicesSize +
917 5 : paddedICEntriesSize +
918 5 : paddedRuntimeSize +
919 5 : paddedSafepointSize +
920 : paddedBackedgeSize +
921 5 : paddedSharedStubSize;
922 5 : IonScript* script = cx->zone()->pod_malloc_with_extra<IonScript, uint8_t>(bytes);
923 5 : if (!script)
924 0 : return nullptr;
925 5 : new (script) IonScript();
926 :
927 5 : uint32_t offsetCursor = sizeof(IonScript);
928 :
929 5 : script->runtimeData_ = offsetCursor;
930 5 : script->runtimeSize_ = runtimeSize;
931 5 : offsetCursor += paddedRuntimeSize;
932 :
933 5 : script->icIndex_ = offsetCursor;
934 5 : script->icEntries_ = icEntries;
935 5 : offsetCursor += paddedICEntriesSize;
936 :
937 5 : script->safepointIndexOffset_ = offsetCursor;
938 5 : script->safepointIndexEntries_ = safepointIndices;
939 5 : offsetCursor += paddedSafepointIndicesSize;
940 :
941 5 : script->safepointsStart_ = offsetCursor;
942 5 : script->safepointsSize_ = safepointsSize;
943 5 : offsetCursor += paddedSafepointSize;
944 :
945 5 : script->bailoutTable_ = offsetCursor;
946 5 : script->bailoutEntries_ = bailoutEntries;
947 5 : offsetCursor += paddedBailoutSize;
948 :
949 5 : script->osiIndexOffset_ = offsetCursor;
950 5 : script->osiIndexEntries_ = osiIndices;
951 5 : offsetCursor += paddedOsiIndicesSize;
952 :
953 5 : script->snapshots_ = offsetCursor;
954 5 : script->snapshotsListSize_ = snapshotsListSize;
955 5 : script->snapshotsRVATableSize_ = snapshotsRVATableSize;
956 5 : offsetCursor += paddedSnapshotsSize;
957 :
958 5 : script->recovers_ = offsetCursor;
959 5 : script->recoversSize_ = recoversSize;
960 5 : offsetCursor += paddedRecoversSize;
961 :
962 5 : script->constantTable_ = offsetCursor;
963 5 : script->constantEntries_ = constants;
964 5 : offsetCursor += paddedConstantsSize;
965 :
966 5 : script->backedgeList_ = offsetCursor;
967 5 : script->backedgeEntries_ = backedgeEntries;
968 5 : offsetCursor += paddedBackedgeSize;
969 :
970 5 : script->sharedStubList_ = offsetCursor;
971 5 : script->sharedStubEntries_ = sharedStubEntries;
972 5 : offsetCursor += paddedSharedStubSize;
973 :
974 5 : script->frameSlots_ = frameSlots;
975 5 : script->argumentSlots_ = argumentSlots;
976 :
977 5 : script->frameSize_ = frameSize;
978 :
979 5 : script->recompileInfo_ = recompileInfo;
980 5 : script->optimizationLevel_ = optimizationLevel;
981 :
982 5 : return script;
983 : }
984 :
985 : void
986 5 : IonScript::adoptFallbackStubs(FallbackICStubSpace* stubSpace)
987 :
988 : {
989 5 : fallbackStubSpace()->adoptFrom(stubSpace);
990 5 : }
991 :
992 : void
993 0 : IonScript::trace(JSTracer* trc)
994 : {
995 0 : if (method_)
996 0 : TraceEdge(trc, &method_, "method");
997 :
998 0 : if (deoptTable_)
999 0 : TraceEdge(trc, &deoptTable_, "deoptimizationTable");
1000 :
1001 0 : for (size_t i = 0; i < numConstants(); i++)
1002 0 : TraceEdge(trc, &getConstant(i), "constant");
1003 :
1004 : // Mark all IC stub codes hanging off the IC stub entries.
1005 0 : for (size_t i = 0; i < numSharedStubs(); i++) {
1006 0 : IonICEntry& ent = sharedStubList()[i];
1007 0 : ent.trace(trc);
1008 : }
1009 :
1010 : // Trace caches so that the JSScript pointer can be updated if moved.
1011 0 : for (size_t i = 0; i < numICs(); i++)
1012 0 : getICFromIndex(i).trace(trc);
1013 0 : }
1014 :
1015 : /* static */ void
1016 5 : IonScript::writeBarrierPre(Zone* zone, IonScript* ionScript)
1017 : {
1018 5 : if (zone->needsIncrementalBarrier())
1019 0 : ionScript->trace(zone->barrierTracer());
1020 5 : }
1021 :
1022 : void
1023 5 : IonScript::copySnapshots(const SnapshotWriter* writer)
1024 : {
1025 5 : MOZ_ASSERT(writer->listSize() == snapshotsListSize_);
1026 10 : memcpy((uint8_t*)this + snapshots_,
1027 15 : writer->listBuffer(), snapshotsListSize_);
1028 :
1029 5 : MOZ_ASSERT(snapshotsRVATableSize_);
1030 5 : MOZ_ASSERT(writer->RVATableSize() == snapshotsRVATableSize_);
1031 10 : memcpy((uint8_t*)this + snapshots_ + snapshotsListSize_,
1032 15 : writer->RVATableBuffer(), snapshotsRVATableSize_);
1033 5 : }
1034 :
1035 : void
1036 5 : IonScript::copyRecovers(const RecoverWriter* writer)
1037 : {
1038 5 : MOZ_ASSERT(writer->size() == recoversSize_);
1039 5 : memcpy((uint8_t*)this + recovers_, writer->buffer(), recoversSize_);
1040 5 : }
1041 :
1042 : void
1043 4 : IonScript::copySafepoints(const SafepointWriter* writer)
1044 : {
1045 4 : MOZ_ASSERT(writer->size() == safepointsSize_);
1046 4 : memcpy((uint8_t*)this + safepointsStart_, writer->buffer(), safepointsSize_);
1047 4 : }
1048 :
1049 : void
1050 0 : IonScript::copyBailoutTable(const SnapshotOffset* table)
1051 : {
1052 0 : memcpy(bailoutTable(), table, bailoutEntries_ * sizeof(uint32_t));
1053 0 : }
1054 :
1055 : void
1056 3 : IonScript::copyConstants(const Value* vp)
1057 : {
1058 18 : for (size_t i = 0; i < constantEntries_; i++)
1059 15 : constants()[i].init(vp[i]);
1060 3 : }
1061 :
1062 : void
1063 0 : IonScript::copyPatchableBackedges(JSContext* cx, JitCode* code,
1064 : PatchableBackedgeInfo* backedges,
1065 : MacroAssembler& masm)
1066 : {
1067 0 : JitZoneGroup* jzg = cx->zone()->group()->jitZoneGroup;
1068 0 : JitRuntime::AutoPreventBackedgePatching apbp(cx->runtime());
1069 :
1070 0 : for (size_t i = 0; i < backedgeEntries_; i++) {
1071 0 : PatchableBackedgeInfo& info = backedges[i];
1072 0 : PatchableBackedge* patchableBackedge = &backedgeList()[i];
1073 :
1074 0 : info.backedge.fixup(&masm);
1075 0 : CodeLocationJump backedge(code, info.backedge);
1076 0 : CodeLocationLabel loopHeader(code, CodeOffset(info.loopHeader->offset()));
1077 0 : CodeLocationLabel interruptCheck(code, CodeOffset(info.interruptCheck->offset()));
1078 0 : new(patchableBackedge) PatchableBackedge(backedge, loopHeader, interruptCheck);
1079 :
1080 : // Point the backedge to either of its possible targets, matching the
1081 : // other backedges in the runtime.
1082 0 : if (jzg->backedgeTarget() == JitZoneGroup::BackedgeInterruptCheck)
1083 0 : PatchBackedge(backedge, interruptCheck, JitZoneGroup::BackedgeInterruptCheck);
1084 : else
1085 0 : PatchBackedge(backedge, loopHeader, JitZoneGroup::BackedgeLoopHeader);
1086 :
1087 0 : jzg->addPatchableBackedge(cx->runtime()->jitRuntime(), patchableBackedge);
1088 : }
1089 0 : }
1090 :
1091 : void
1092 4 : IonScript::copySafepointIndices(const SafepointIndex* si, MacroAssembler& masm)
1093 : {
1094 : // Jumps in the caches reflect the offset of those jumps in the compiled
1095 : // code, not the absolute positions of the jumps. Update according to the
1096 : // final code address now.
1097 4 : SafepointIndex* table = safepointIndices();
1098 4 : memcpy(table, si, safepointIndexEntries_ * sizeof(SafepointIndex));
1099 4 : }
1100 :
1101 : void
1102 5 : IonScript::copyOsiIndices(const OsiIndex* oi, MacroAssembler& masm)
1103 : {
1104 5 : memcpy(osiIndices(), oi, osiIndexEntries_ * sizeof(OsiIndex));
1105 5 : }
1106 :
1107 : void
1108 4 : IonScript::copyRuntimeData(const uint8_t* data)
1109 : {
1110 4 : memcpy(runtimeData(), data, runtimeSize());
1111 4 : }
1112 :
1113 : void
1114 4 : IonScript::copyICEntries(const uint32_t* icEntries, MacroAssembler& masm)
1115 : {
1116 4 : memcpy(icIndex(), icEntries, numICs() * sizeof(uint32_t));
1117 :
1118 : // Jumps in the caches reflect the offset of those jumps in the compiled
1119 : // code, not the absolute positions of the jumps. Update according to the
1120 : // final code address now.
1121 18 : for (size_t i = 0; i < numICs(); i++)
1122 14 : getICFromIndex(i).updateBaseAddress(method_, masm);
1123 4 : }
1124 :
1125 : const SafepointIndex*
1126 840 : IonScript::getSafepointIndex(uint32_t disp) const
1127 : {
1128 840 : MOZ_ASSERT(safepointIndexEntries_ > 0);
1129 :
1130 840 : const SafepointIndex* table = safepointIndices();
1131 840 : if (safepointIndexEntries_ == 1) {
1132 0 : MOZ_ASSERT(disp == table[0].displacement());
1133 0 : return &table[0];
1134 : }
1135 :
1136 840 : size_t minEntry = 0;
1137 840 : size_t maxEntry = safepointIndexEntries_ - 1;
1138 840 : uint32_t min = table[minEntry].displacement();
1139 840 : uint32_t max = table[maxEntry].displacement();
1140 :
1141 : // Raise if the element is not in the list.
1142 840 : MOZ_ASSERT(min <= disp && disp <= max);
1143 :
1144 : // Approximate the location of the FrameInfo.
1145 840 : size_t guess = (disp - min) * (maxEntry - minEntry) / (max - min) + minEntry;
1146 840 : uint32_t guessDisp = table[guess].displacement();
1147 :
1148 840 : if (table[guess].displacement() == disp)
1149 0 : return &table[guess];
1150 :
1151 : // Doing a linear scan from the guess should be more efficient in case of
1152 : // small group which are equally distributed on the code.
1153 : //
1154 : // such as: <... ... ... ... . ... ...>
1155 840 : if (guessDisp > disp) {
1156 5880 : while (--guess >= minEntry) {
1157 3360 : guessDisp = table[guess].displacement();
1158 3360 : MOZ_ASSERT(guessDisp >= disp);
1159 3360 : if (guessDisp == disp)
1160 840 : return &table[guess];
1161 : }
1162 : } else {
1163 0 : while (++guess <= maxEntry) {
1164 0 : guessDisp = table[guess].displacement();
1165 0 : MOZ_ASSERT(guessDisp <= disp);
1166 0 : if (guessDisp == disp)
1167 0 : return &table[guess];
1168 : }
1169 : }
1170 :
1171 0 : MOZ_CRASH("displacement not found.");
1172 : }
1173 :
1174 : const OsiIndex*
1175 840 : IonScript::getOsiIndex(uint32_t disp) const
1176 : {
1177 840 : const OsiIndex* end = osiIndices() + osiIndexEntries_;
1178 8400 : for (const OsiIndex* it = osiIndices(); it != end; ++it) {
1179 8400 : if (it->returnPointDisplacement() == disp)
1180 1680 : return it;
1181 : }
1182 :
1183 0 : MOZ_CRASH("Failed to find OSI point return address");
1184 : }
1185 :
1186 : const OsiIndex*
1187 0 : IonScript::getOsiIndex(uint8_t* retAddr) const
1188 : {
1189 0 : JitSpew(JitSpew_IonInvalidate, "IonScript %p has method %p raw %p", (void*) this, (void*)
1190 0 : method(), method()->raw());
1191 :
1192 0 : MOZ_ASSERT(containsCodeAddress(retAddr));
1193 0 : uint32_t disp = retAddr - method()->raw();
1194 0 : return getOsiIndex(disp);
1195 : }
1196 :
1197 : void
1198 0 : IonScript::Trace(JSTracer* trc, IonScript* script)
1199 : {
1200 0 : if (script != ION_DISABLED_SCRIPT)
1201 0 : script->trace(trc);
1202 0 : }
1203 :
1204 : void
1205 5 : IonScript::Destroy(FreeOp* fop, IonScript* script)
1206 : {
1207 5 : script->unlinkFromRuntime(fop);
1208 :
1209 : /*
1210 : * When the script contains pointers to nursery things, the store buffer can
1211 : * contain entries that point into the fallback stub space. Since we can
1212 : * destroy scripts outside the context of a GC, this situation could result
1213 : * in us trying to mark invalid store buffer entries.
1214 : *
1215 : * Defer freeing any allocated blocks until after the next minor GC.
1216 : */
1217 5 : script->fallbackStubSpace_.freeAllAfterMinorGC(script->method()->zone());
1218 :
1219 5 : fop->delete_(script);
1220 5 : }
1221 :
1222 : void
1223 0 : JS::DeletePolicy<js::jit::IonScript>::operator()(const js::jit::IonScript* script)
1224 : {
1225 0 : IonScript::Destroy(rt_->defaultFreeOp(), const_cast<IonScript*>(script));
1226 0 : }
1227 :
1228 : void
1229 0 : IonScript::purgeOptimizedStubs(Zone* zone)
1230 : {
1231 0 : for (size_t i = 0; i < numSharedStubs(); i++) {
1232 0 : IonICEntry& entry = sharedStubList()[i];
1233 0 : if (!entry.hasStub())
1234 0 : continue;
1235 :
1236 0 : ICStub* lastStub = entry.firstStub();
1237 0 : while (lastStub->next())
1238 0 : lastStub = lastStub->next();
1239 :
1240 0 : if (lastStub->isFallback()) {
1241 : // Unlink all stubs allocated in the optimized space.
1242 0 : ICStub* stub = entry.firstStub();
1243 0 : ICStub* prev = nullptr;
1244 :
1245 0 : while (stub->next()) {
1246 0 : if (!stub->allocatedInFallbackSpace()) {
1247 0 : lastStub->toFallbackStub()->unlinkStub(zone, prev, stub);
1248 0 : stub = stub->next();
1249 0 : continue;
1250 : }
1251 :
1252 0 : prev = stub;
1253 0 : stub = stub->next();
1254 : }
1255 :
1256 0 : lastStub->toFallbackStub()->setInvalid();
1257 :
1258 0 : if (lastStub->isMonitoredFallback()) {
1259 : // Monitor stubs can't make calls, so are always in the
1260 : // optimized stub space.
1261 : ICTypeMonitor_Fallback* lastMonStub =
1262 0 : lastStub->toMonitoredFallbackStub()->fallbackMonitorStub();
1263 0 : lastMonStub->resetMonitorStubChain(zone);
1264 0 : lastMonStub->setInvalid();
1265 : }
1266 0 : } else if (lastStub->isTypeMonitor_Fallback()) {
1267 0 : lastStub->toTypeMonitor_Fallback()->resetMonitorStubChain(zone);
1268 0 : lastStub->toTypeMonitor_Fallback()->setInvalid();
1269 : } else {
1270 0 : MOZ_ASSERT(lastStub->isTableSwitch());
1271 : }
1272 : }
1273 :
1274 : #ifdef DEBUG
1275 : // All remaining stubs must be allocated in the fallback space.
1276 0 : for (size_t i = 0; i < numSharedStubs(); i++) {
1277 0 : IonICEntry& entry = sharedStubList()[i];
1278 0 : if (!entry.hasStub())
1279 0 : continue;
1280 :
1281 0 : ICStub* stub = entry.firstStub();
1282 0 : while (stub->next()) {
1283 0 : MOZ_ASSERT(stub->allocatedInFallbackSpace());
1284 0 : stub = stub->next();
1285 : }
1286 : }
1287 : #endif
1288 0 : }
1289 :
1290 : void
1291 0 : IonScript::purgeICs(Zone* zone)
1292 : {
1293 0 : for (size_t i = 0; i < numICs(); i++)
1294 0 : getICFromIndex(i).reset(zone);
1295 0 : }
1296 :
1297 : void
1298 5 : IonScript::unlinkFromRuntime(FreeOp* fop)
1299 : {
1300 : // The writes to the executable buffer below may clobber backedge jumps, so
1301 : // make sure that those backedges are unlinked from the runtime and not
1302 : // reclobbered with garbage if an interrupt is requested.
1303 5 : JitZoneGroup* jzg = method()->zone()->group()->jitZoneGroup;
1304 10 : JitRuntime::AutoPreventBackedgePatching apbp(fop->runtime());
1305 5 : for (size_t i = 0; i < backedgeEntries_; i++)
1306 0 : jzg->removePatchableBackedge(fop->runtime()->jitRuntime(), &backedgeList()[i]);
1307 :
1308 : // Clear the list of backedges, so that this method is idempotent. It is
1309 : // called during destruction, and may be additionally called when the
1310 : // script is invalidated.
1311 5 : backedgeEntries_ = 0;
1312 5 : }
1313 :
1314 : namespace js {
1315 : namespace jit {
1316 :
1317 : static void
1318 0 : OptimizeSinCos(MIRGenerator *mir, MIRGraph &graph)
1319 : {
1320 : // Now, we are looking for:
1321 : // var y = sin(x);
1322 : // var z = cos(x);
1323 : // Graph before:
1324 : // - 1 op
1325 : // - 6 mathfunction op1 Sin
1326 : // - 7 mathfunction op1 Cos
1327 : // Graph will look like:
1328 : // - 1 op
1329 : // - 5 sincos op1
1330 : // - 6 mathfunction sincos5 Sin
1331 : // - 7 mathfunction sincos5 Cos
1332 0 : for (MBasicBlockIterator block(graph.begin()); block != graph.end(); block++) {
1333 0 : for (MInstructionIterator iter(block->begin()), end(block->end()); iter != end; ) {
1334 0 : MInstruction *ins = *iter++;
1335 0 : if (!ins->isMathFunction() || ins->isRecoveredOnBailout())
1336 0 : continue;
1337 :
1338 0 : MMathFunction *insFunc = ins->toMathFunction();
1339 0 : if (insFunc->function() != MMathFunction::Sin && insFunc->function() != MMathFunction::Cos)
1340 0 : continue;
1341 :
1342 : // Check if sin/cos is already optimized.
1343 0 : if (insFunc->getOperand(0)->type() == MIRType::SinCosDouble)
1344 0 : continue;
1345 :
1346 : // insFunc is either a |sin(x)| or |cos(x)| instruction. The
1347 : // following loop iterates over the uses of |x| to check if both
1348 : // |sin(x)| and |cos(x)| instructions exist.
1349 0 : bool hasSin = false;
1350 0 : bool hasCos = false;
1351 0 : for (MUseDefIterator uses(insFunc->input()); uses; uses++)
1352 : {
1353 0 : if (!uses.def()->isInstruction())
1354 0 : continue;
1355 :
1356 : // We should replacing the argument of the sin/cos just when it
1357 : // is dominated by the |block|.
1358 0 : if (!block->dominates(uses.def()->block()))
1359 0 : continue;
1360 :
1361 0 : MInstruction *insUse = uses.def()->toInstruction();
1362 0 : if (!insUse->isMathFunction() || insUse->isRecoveredOnBailout())
1363 0 : continue;
1364 :
1365 0 : MMathFunction *mathIns = insUse->toMathFunction();
1366 0 : if (!hasSin && mathIns->function() == MMathFunction::Sin) {
1367 0 : hasSin = true;
1368 0 : JitSpew(JitSpew_Sincos, "Found sin in block %d.", mathIns->block()->id());
1369 : }
1370 0 : else if (!hasCos && mathIns->function() == MMathFunction::Cos) {
1371 0 : hasCos = true;
1372 0 : JitSpew(JitSpew_Sincos, "Found cos in block %d.", mathIns->block()->id());
1373 : }
1374 :
1375 0 : if (hasCos && hasSin)
1376 0 : break;
1377 : }
1378 :
1379 0 : if (!hasCos || !hasSin) {
1380 0 : JitSpew(JitSpew_Sincos, "No sin/cos pair found.");
1381 0 : continue;
1382 : }
1383 :
1384 0 : JitSpew(JitSpew_Sincos, "Found, at least, a pair sin/cos. Adding sincos in block %d",
1385 0 : block->id());
1386 : // Adding the MSinCos and replacing the parameters of the
1387 : // sin(x)/cos(x) to sin(sincos(x))/cos(sincos(x)).
1388 0 : MSinCos *insSinCos = MSinCos::New(graph.alloc(),
1389 : insFunc->input(),
1390 0 : insFunc->toMathFunction()->cache());
1391 0 : insSinCos->setImplicitlyUsedUnchecked();
1392 0 : block->insertBefore(insFunc, insSinCos);
1393 0 : for (MUseDefIterator uses(insFunc->input()); uses; )
1394 : {
1395 0 : MDefinition* def = uses.def();
1396 0 : uses++;
1397 0 : if (!def->isInstruction())
1398 0 : continue;
1399 :
1400 : // We should replacing the argument of the sin/cos just when it
1401 : // is dominated by the |block|.
1402 0 : if (!block->dominates(def->block()))
1403 0 : continue;
1404 :
1405 0 : MInstruction *insUse = def->toInstruction();
1406 0 : if (!insUse->isMathFunction() || insUse->isRecoveredOnBailout())
1407 0 : continue;
1408 :
1409 0 : MMathFunction *mathIns = insUse->toMathFunction();
1410 0 : if (mathIns->function() != MMathFunction::Sin && mathIns->function() != MMathFunction::Cos)
1411 0 : continue;
1412 :
1413 0 : mathIns->replaceOperand(0, insSinCos);
1414 0 : JitSpew(JitSpew_Sincos, "Replacing %s by sincos in block %d",
1415 0 : mathIns->function() == MMathFunction::Sin ? "sin" : "cos",
1416 0 : mathIns->block()->id());
1417 : }
1418 : }
1419 : }
1420 0 : }
1421 :
1422 : bool
1423 8 : OptimizeMIR(MIRGenerator* mir)
1424 : {
1425 8 : MIRGraph& graph = mir->graph();
1426 8 : GraphSpewer& gs = mir->graphSpewer();
1427 8 : TraceLoggerThread* logger = TraceLoggerForCurrentThread();
1428 :
1429 8 : if (mir->shouldCancel("Start"))
1430 0 : return false;
1431 :
1432 8 : if (!mir->compilingWasm()) {
1433 8 : if (!MakeMRegExpHoistable(mir, graph))
1434 0 : return false;
1435 :
1436 8 : if (mir->shouldCancel("Make MRegExp Hoistable"))
1437 0 : return false;
1438 : }
1439 :
1440 8 : gs.spewPass("BuildSSA");
1441 8 : AssertBasicGraphCoherency(graph);
1442 :
1443 8 : if (!JitOptions.disablePgo && !mir->compilingWasm()) {
1444 16 : AutoTraceLog log(logger, TraceLogger_PruneUnusedBranches);
1445 8 : if (!PruneUnusedBranches(mir, graph))
1446 0 : return false;
1447 8 : gs.spewPass("Prune Unused Branches");
1448 8 : AssertBasicGraphCoherency(graph);
1449 :
1450 8 : if (mir->shouldCancel("Prune Unused Branches"))
1451 0 : return false;
1452 : }
1453 :
1454 : {
1455 16 : AutoTraceLog log(logger, TraceLogger_FoldEmptyBlocks);
1456 8 : if (!FoldEmptyBlocks(graph))
1457 0 : return false;
1458 8 : gs.spewPass("Fold Empty Blocks");
1459 8 : AssertBasicGraphCoherency(graph);
1460 :
1461 8 : if (mir->shouldCancel("Fold Empty Blocks"))
1462 0 : return false;
1463 : }
1464 :
1465 : {
1466 16 : AutoTraceLog log(logger, TraceLogger_FoldTests);
1467 8 : if (!FoldTests(graph))
1468 0 : return false;
1469 8 : gs.spewPass("Fold Tests");
1470 8 : AssertBasicGraphCoherency(graph);
1471 :
1472 8 : if (mir->shouldCancel("Fold Tests"))
1473 0 : return false;
1474 : }
1475 :
1476 : {
1477 16 : AutoTraceLog log(logger, TraceLogger_SplitCriticalEdges);
1478 8 : if (!SplitCriticalEdges(graph))
1479 0 : return false;
1480 8 : gs.spewPass("Split Critical Edges");
1481 8 : AssertGraphCoherency(graph);
1482 :
1483 8 : if (mir->shouldCancel("Split Critical Edges"))
1484 0 : return false;
1485 : }
1486 :
1487 : {
1488 16 : AutoTraceLog log(logger, TraceLogger_RenumberBlocks);
1489 8 : RenumberBlocks(graph);
1490 8 : gs.spewPass("Renumber Blocks");
1491 8 : AssertGraphCoherency(graph);
1492 :
1493 8 : if (mir->shouldCancel("Renumber Blocks"))
1494 0 : return false;
1495 : }
1496 :
1497 : {
1498 16 : AutoTraceLog log(logger, TraceLogger_DominatorTree);
1499 8 : if (!BuildDominatorTree(graph))
1500 0 : return false;
1501 : // No spew: graph not changed.
1502 :
1503 8 : if (mir->shouldCancel("Dominator Tree"))
1504 0 : return false;
1505 : }
1506 :
1507 : {
1508 16 : AutoTraceLog log(logger, TraceLogger_PhiAnalysis);
1509 : // Aggressive phi elimination must occur before any code elimination. If the
1510 : // script contains a try-statement, we only compiled the try block and not
1511 : // the catch or finally blocks, so in this case it's also invalid to use
1512 : // aggressive phi elimination.
1513 8 : Observability observability = graph.hasTryBlock()
1514 8 : ? ConservativeObservability
1515 8 : : AggressiveObservability;
1516 8 : if (!EliminatePhis(mir, graph, observability))
1517 0 : return false;
1518 8 : gs.spewPass("Eliminate phis");
1519 8 : AssertGraphCoherency(graph);
1520 :
1521 8 : if (mir->shouldCancel("Eliminate phis"))
1522 0 : return false;
1523 :
1524 8 : if (!BuildPhiReverseMapping(graph))
1525 0 : return false;
1526 8 : AssertExtendedGraphCoherency(graph);
1527 : // No spew: graph not changed.
1528 :
1529 8 : if (mir->shouldCancel("Phi reverse mapping"))
1530 0 : return false;
1531 : }
1532 :
1533 8 : if (!JitOptions.disableRecoverIns && mir->optimizationInfo().scalarReplacementEnabled()) {
1534 16 : AutoTraceLog log(logger, TraceLogger_ScalarReplacement);
1535 8 : if (!ScalarReplacement(mir, graph))
1536 0 : return false;
1537 8 : gs.spewPass("Scalar Replacement");
1538 8 : AssertGraphCoherency(graph);
1539 :
1540 8 : if (mir->shouldCancel("Scalar Replacement"))
1541 0 : return false;
1542 : }
1543 :
1544 8 : if (!mir->compilingWasm()) {
1545 16 : AutoTraceLog log(logger, TraceLogger_ApplyTypes);
1546 8 : if (!ApplyTypeInformation(mir, graph))
1547 0 : return false;
1548 8 : gs.spewPass("Apply types");
1549 8 : AssertExtendedGraphCoherency(graph);
1550 :
1551 8 : if (mir->shouldCancel("Apply types"))
1552 0 : return false;
1553 : }
1554 :
1555 8 : if (!JitOptions.disableRecoverIns && mir->optimizationInfo().eagerSimdUnboxEnabled()) {
1556 16 : AutoTraceLog log(logger, TraceLogger_EagerSimdUnbox);
1557 8 : if (!EagerSimdUnbox(mir, graph))
1558 0 : return false;
1559 8 : gs.spewPass("Eager Simd Unbox");
1560 8 : AssertGraphCoherency(graph);
1561 :
1562 8 : if (mir->shouldCancel("Eager Simd Unbox"))
1563 0 : return false;
1564 : }
1565 :
1566 8 : if (mir->optimizationInfo().amaEnabled()) {
1567 0 : AutoTraceLog log(logger, TraceLogger_AlignmentMaskAnalysis);
1568 0 : AlignmentMaskAnalysis ama(graph);
1569 0 : if (!ama.analyze())
1570 0 : return false;
1571 0 : gs.spewPass("Alignment Mask Analysis");
1572 0 : AssertExtendedGraphCoherency(graph);
1573 :
1574 0 : if (mir->shouldCancel("Alignment Mask Analysis"))
1575 0 : return false;
1576 : }
1577 :
1578 16 : ValueNumberer gvn(mir, graph);
1579 8 : if (!gvn.init())
1580 0 : return false;
1581 :
1582 : // Alias analysis is required for LICM and GVN so that we don't move
1583 : // loads across stores.
1584 8 : if (mir->optimizationInfo().licmEnabled() ||
1585 0 : mir->optimizationInfo().gvnEnabled())
1586 : {
1587 : {
1588 16 : AutoTraceLog log(logger, TraceLogger_AliasAnalysis);
1589 8 : if (JitOptions.disableFlowAA) {
1590 8 : AliasAnalysis analysis(mir, graph);
1591 8 : if (!analysis.analyze())
1592 0 : return false;
1593 : } else {
1594 0 : FlowAliasAnalysis analysis(mir, graph);
1595 0 : if (!analysis.analyze())
1596 0 : return false;
1597 : }
1598 :
1599 8 : gs.spewPass("Alias analysis");
1600 8 : AssertExtendedGraphCoherency(graph);
1601 :
1602 8 : if (mir->shouldCancel("Alias analysis"))
1603 0 : return false;
1604 : }
1605 :
1606 8 : if (!mir->compilingWasm()) {
1607 : // Eliminating dead resume point operands requires basic block
1608 : // instructions to be numbered. Reuse the numbering computed during
1609 : // alias analysis.
1610 8 : if (!EliminateDeadResumePointOperands(mir, graph))
1611 0 : return false;
1612 :
1613 8 : if (mir->shouldCancel("Eliminate dead resume point operands"))
1614 0 : return false;
1615 : }
1616 : }
1617 :
1618 8 : if (mir->optimizationInfo().gvnEnabled()) {
1619 16 : AutoTraceLog log(logger, TraceLogger_GVN);
1620 8 : if (!gvn.run(ValueNumberer::UpdateAliasAnalysis))
1621 0 : return false;
1622 8 : gs.spewPass("GVN");
1623 8 : AssertExtendedGraphCoherency(graph);
1624 :
1625 8 : if (mir->shouldCancel("GVN"))
1626 0 : return false;
1627 : }
1628 :
1629 8 : if (mir->optimizationInfo().licmEnabled()) {
1630 16 : AutoTraceLog log(logger, TraceLogger_LICM);
1631 : // LICM can hoist instructions from conditional branches and trigger
1632 : // repeated bailouts. Disable it if this script is known to bailout
1633 : // frequently.
1634 8 : JSScript* script = mir->info().script();
1635 8 : if (!script || !script->hadFrequentBailouts()) {
1636 8 : if (!LICM(mir, graph))
1637 0 : return false;
1638 8 : gs.spewPass("LICM");
1639 8 : AssertExtendedGraphCoherency(graph);
1640 :
1641 8 : if (mir->shouldCancel("LICM"))
1642 0 : return false;
1643 : }
1644 : }
1645 :
1646 16 : RangeAnalysis r(mir, graph);
1647 8 : if (mir->optimizationInfo().rangeAnalysisEnabled()) {
1648 16 : AutoTraceLog log(logger, TraceLogger_RangeAnalysis);
1649 8 : if (!r.addBetaNodes())
1650 0 : return false;
1651 8 : gs.spewPass("Beta");
1652 8 : AssertExtendedGraphCoherency(graph);
1653 :
1654 8 : if (mir->shouldCancel("RA Beta"))
1655 0 : return false;
1656 :
1657 8 : if (!r.analyze() || !r.addRangeAssertions())
1658 0 : return false;
1659 8 : gs.spewPass("Range Analysis");
1660 8 : AssertExtendedGraphCoherency(graph);
1661 :
1662 8 : if (mir->shouldCancel("Range Analysis"))
1663 0 : return false;
1664 :
1665 8 : if (!r.removeBetaNodes())
1666 0 : return false;
1667 8 : gs.spewPass("De-Beta");
1668 8 : AssertExtendedGraphCoherency(graph);
1669 :
1670 8 : if (mir->shouldCancel("RA De-Beta"))
1671 0 : return false;
1672 :
1673 8 : if (mir->optimizationInfo().gvnEnabled()) {
1674 8 : bool shouldRunUCE = false;
1675 8 : if (!r.prepareForUCE(&shouldRunUCE))
1676 0 : return false;
1677 8 : gs.spewPass("RA check UCE");
1678 8 : AssertExtendedGraphCoherency(graph);
1679 :
1680 8 : if (mir->shouldCancel("RA check UCE"))
1681 0 : return false;
1682 :
1683 8 : if (shouldRunUCE) {
1684 0 : if (!gvn.run(ValueNumberer::DontUpdateAliasAnalysis))
1685 0 : return false;
1686 0 : gs.spewPass("UCE After RA");
1687 0 : AssertExtendedGraphCoherency(graph);
1688 :
1689 0 : if (mir->shouldCancel("UCE After RA"))
1690 0 : return false;
1691 : }
1692 : }
1693 :
1694 8 : if (mir->optimizationInfo().autoTruncateEnabled()) {
1695 8 : if (!r.truncate())
1696 0 : return false;
1697 8 : gs.spewPass("Truncate Doubles");
1698 8 : AssertExtendedGraphCoherency(graph);
1699 :
1700 8 : if (mir->shouldCancel("Truncate Doubles"))
1701 0 : return false;
1702 : }
1703 :
1704 8 : if (mir->optimizationInfo().loopUnrollingEnabled()) {
1705 0 : AutoTraceLog log(logger, TraceLogger_LoopUnrolling);
1706 :
1707 0 : if (!UnrollLoops(graph, r.loopIterationBounds))
1708 0 : return false;
1709 :
1710 0 : gs.spewPass("Unroll Loops");
1711 0 : AssertExtendedGraphCoherency(graph);
1712 : }
1713 : }
1714 :
1715 8 : if (!JitOptions.disableRecoverIns) {
1716 16 : AutoTraceLog log(logger, TraceLogger_Sink);
1717 8 : if (!Sink(mir, graph))
1718 0 : return false;
1719 8 : gs.spewPass("Sink");
1720 8 : AssertExtendedGraphCoherency(graph);
1721 :
1722 8 : if (mir->shouldCancel("Sink"))
1723 0 : return false;
1724 : }
1725 :
1726 8 : if (!JitOptions.disableRecoverIns && mir->optimizationInfo().rangeAnalysisEnabled()) {
1727 16 : AutoTraceLog log(logger, TraceLogger_RemoveUnnecessaryBitops);
1728 8 : if (!r.removeUnnecessaryBitops())
1729 0 : return false;
1730 8 : gs.spewPass("Remove Unnecessary Bitops");
1731 8 : AssertExtendedGraphCoherency(graph);
1732 :
1733 8 : if (mir->shouldCancel("Remove Unnecessary Bitops"))
1734 0 : return false;
1735 : }
1736 :
1737 : {
1738 16 : AutoTraceLog log(logger, TraceLogger_FoldLinearArithConstants);
1739 8 : if (!FoldLinearArithConstants(mir, graph))
1740 0 : return false;
1741 8 : gs.spewPass("Fold Linear Arithmetic Constants");
1742 8 : AssertBasicGraphCoherency(graph);
1743 :
1744 8 : if (mir->shouldCancel("Fold Linear Arithmetic Constants"))
1745 0 : return false;
1746 : }
1747 :
1748 8 : if (mir->optimizationInfo().eaaEnabled()) {
1749 16 : AutoTraceLog log(logger, TraceLogger_EffectiveAddressAnalysis);
1750 8 : EffectiveAddressAnalysis eaa(mir, graph);
1751 8 : if (!eaa.analyze())
1752 0 : return false;
1753 8 : gs.spewPass("Effective Address Analysis");
1754 8 : AssertExtendedGraphCoherency(graph);
1755 :
1756 8 : if (mir->shouldCancel("Effective Address Analysis"))
1757 0 : return false;
1758 : }
1759 :
1760 8 : if (mir->optimizationInfo().sincosEnabled()) {
1761 0 : AutoTraceLog log(logger, TraceLogger_Sincos);
1762 0 : OptimizeSinCos(mir, graph);
1763 0 : gs.spewPass("Sincos optimization");
1764 0 : AssertExtendedGraphCoherency(graph);
1765 :
1766 0 : if (mir->shouldCancel("Sincos optimization"))
1767 0 : return false;
1768 : }
1769 :
1770 : // BCE marks bounds checks as dead, so do BCE before DCE.
1771 8 : if (mir->compilingWasm() && !JitOptions.wasmAlwaysCheckBounds) {
1772 0 : if (!EliminateBoundsChecks(mir, graph))
1773 0 : return false;
1774 0 : gs.spewPass("Redundant Bounds Check Elimination");
1775 0 : AssertGraphCoherency(graph);
1776 :
1777 0 : if (mir->shouldCancel("BCE"))
1778 0 : return false;
1779 : }
1780 :
1781 : {
1782 16 : AutoTraceLog log(logger, TraceLogger_EliminateDeadCode);
1783 8 : if (!EliminateDeadCode(mir, graph))
1784 0 : return false;
1785 8 : gs.spewPass("DCE");
1786 8 : AssertExtendedGraphCoherency(graph);
1787 :
1788 8 : if (mir->shouldCancel("DCE"))
1789 0 : return false;
1790 : }
1791 :
1792 8 : if (mir->optimizationInfo().instructionReorderingEnabled()) {
1793 16 : AutoTraceLog log(logger, TraceLogger_ReorderInstructions);
1794 8 : if (!ReorderInstructions(mir, graph))
1795 0 : return false;
1796 8 : gs.spewPass("Reordering");
1797 :
1798 8 : AssertExtendedGraphCoherency(graph);
1799 :
1800 8 : if (mir->shouldCancel("Reordering"))
1801 0 : return false;
1802 : }
1803 :
1804 : // Make loops contiguous. We do this after GVN/UCE and range analysis,
1805 : // which can remove CFG edges, exposing more blocks that can be moved.
1806 : {
1807 16 : AutoTraceLog log(logger, TraceLogger_MakeLoopsContiguous);
1808 8 : if (!MakeLoopsContiguous(graph))
1809 0 : return false;
1810 8 : gs.spewPass("Make loops contiguous");
1811 8 : AssertExtendedGraphCoherency(graph);
1812 :
1813 8 : if (mir->shouldCancel("Make loops contiguous"))
1814 0 : return false;
1815 : }
1816 8 : AssertExtendedGraphCoherency(graph, /* underValueNumberer = */ false, /* force = */ true);
1817 :
1818 : // Passes after this point must not move instructions; these analyses
1819 : // depend on knowing the final order in which instructions will execute.
1820 :
1821 8 : if (mir->optimizationInfo().edgeCaseAnalysisEnabled()) {
1822 16 : AutoTraceLog log(logger, TraceLogger_EdgeCaseAnalysis);
1823 8 : EdgeCaseAnalysis edgeCaseAnalysis(mir, graph);
1824 8 : if (!edgeCaseAnalysis.analyzeLate())
1825 0 : return false;
1826 8 : gs.spewPass("Edge Case Analysis (Late)");
1827 8 : AssertGraphCoherency(graph);
1828 :
1829 8 : if (mir->shouldCancel("Edge Case Analysis (Late)"))
1830 0 : return false;
1831 : }
1832 :
1833 8 : if (mir->optimizationInfo().eliminateRedundantChecksEnabled()) {
1834 16 : AutoTraceLog log(logger, TraceLogger_EliminateRedundantChecks);
1835 : // Note: check elimination has to run after all other passes that move
1836 : // instructions. Since check uses are replaced with the actual index,
1837 : // code motion after this pass could incorrectly move a load or store
1838 : // before its bounds check.
1839 8 : if (!EliminateRedundantChecks(graph))
1840 0 : return false;
1841 8 : gs.spewPass("Bounds Check Elimination");
1842 8 : AssertGraphCoherency(graph);
1843 : }
1844 :
1845 8 : if (!mir->compilingWasm()) {
1846 16 : AutoTraceLog log(logger, TraceLogger_AddKeepAliveInstructions);
1847 8 : if (!AddKeepAliveInstructions(graph))
1848 0 : return false;
1849 8 : gs.spewPass("Add KeepAlive Instructions");
1850 8 : AssertGraphCoherency(graph);
1851 : }
1852 :
1853 8 : AssertGraphCoherency(graph, /* force = */ true);
1854 :
1855 8 : DumpMIRExpressions(graph);
1856 :
1857 8 : return true;
1858 : }
1859 :
1860 : LIRGraph*
1861 8 : GenerateLIR(MIRGenerator* mir)
1862 : {
1863 8 : MIRGraph& graph = mir->graph();
1864 8 : GraphSpewer& gs = mir->graphSpewer();
1865 :
1866 8 : TraceLoggerThread* logger = TraceLoggerForCurrentThread();
1867 :
1868 8 : LIRGraph* lir = mir->alloc().lifoAlloc()->new_<LIRGraph>(&graph);
1869 8 : if (!lir || !lir->init())
1870 0 : return nullptr;
1871 :
1872 8 : LIRGenerator lirgen(mir, graph, *lir);
1873 : {
1874 16 : AutoTraceLog log(logger, TraceLogger_GenerateLIR);
1875 8 : if (!lirgen.generate())
1876 0 : return nullptr;
1877 8 : gs.spewPass("Generate LIR");
1878 :
1879 8 : if (mir->shouldCancel("Generate LIR"))
1880 0 : return nullptr;
1881 : }
1882 :
1883 16 : AllocationIntegrityState integrity(*lir);
1884 :
1885 : {
1886 16 : AutoTraceLog log(logger, TraceLogger_RegisterAllocation);
1887 :
1888 8 : IonRegisterAllocator allocator = mir->optimizationInfo().registerAllocator();
1889 :
1890 8 : switch (allocator) {
1891 : case RegisterAllocator_Backtracking:
1892 : case RegisterAllocator_Testbed: {
1893 : #ifdef DEBUG
1894 8 : if (JitOptions.fullDebugChecks) {
1895 8 : if (!integrity.record())
1896 0 : return nullptr;
1897 : }
1898 : #endif
1899 :
1900 : BacktrackingAllocator regalloc(mir, &lirgen, *lir,
1901 8 : allocator == RegisterAllocator_Testbed);
1902 8 : if (!regalloc.go())
1903 0 : return nullptr;
1904 :
1905 : #ifdef DEBUG
1906 8 : if (JitOptions.fullDebugChecks) {
1907 8 : if (!integrity.check(false))
1908 0 : return nullptr;
1909 : }
1910 : #endif
1911 :
1912 8 : gs.spewPass("Allocate Registers [Backtracking]");
1913 8 : break;
1914 : }
1915 :
1916 : case RegisterAllocator_Stupid: {
1917 : // Use the integrity checker to populate safepoint information, so
1918 : // run it in all builds.
1919 0 : if (!integrity.record())
1920 0 : return nullptr;
1921 :
1922 0 : StupidAllocator regalloc(mir, &lirgen, *lir);
1923 0 : if (!regalloc.go())
1924 0 : return nullptr;
1925 0 : if (!integrity.check(true))
1926 0 : return nullptr;
1927 0 : gs.spewPass("Allocate Registers [Stupid]");
1928 0 : break;
1929 : }
1930 :
1931 : default:
1932 0 : MOZ_CRASH("Bad regalloc");
1933 : }
1934 :
1935 8 : if (mir->shouldCancel("Allocate Registers"))
1936 0 : return nullptr;
1937 : }
1938 :
1939 8 : return lir;
1940 : }
1941 :
1942 : CodeGenerator*
1943 8 : GenerateCode(MIRGenerator* mir, LIRGraph* lir)
1944 : {
1945 8 : TraceLoggerThread* logger = TraceLoggerForCurrentThread();
1946 16 : AutoTraceLog log(logger, TraceLogger_GenerateCode);
1947 :
1948 8 : CodeGenerator* codegen = js_new<CodeGenerator>(mir, lir);
1949 8 : if (!codegen)
1950 0 : return nullptr;
1951 :
1952 8 : if (!codegen->generate()) {
1953 0 : js_delete(codegen);
1954 0 : return nullptr;
1955 : }
1956 :
1957 8 : return codegen;
1958 : }
1959 :
1960 : CodeGenerator*
1961 8 : CompileBackEnd(MIRGenerator* mir)
1962 : {
1963 : // Everything in CompileBackEnd can potentially run on a helper thread.
1964 16 : AutoEnterIonCompilation enter(mir->safeForMinorGC());
1965 16 : AutoSpewEndFunction spewEndFunction(mir);
1966 :
1967 8 : if (!OptimizeMIR(mir))
1968 0 : return nullptr;
1969 :
1970 8 : LIRGraph* lir = GenerateLIR(mir);
1971 8 : if (!lir)
1972 0 : return nullptr;
1973 :
1974 8 : return GenerateCode(mir, lir);
1975 : }
1976 :
1977 : // Find a builder which the current thread can finish.
1978 : static IonBuilder*
1979 16 : GetFinishedBuilder(ZoneGroup* group, GlobalHelperThreadState::IonBuilderVector& finished)
1980 : {
1981 16 : for (size_t i = 0; i < finished.length(); i++) {
1982 8 : IonBuilder* testBuilder = finished[i];
1983 16 : if (testBuilder->script()->runtimeFromAnyThread() == group->runtime &&
1984 8 : testBuilder->script()->zone()->group() == group) {
1985 8 : HelperThreadState().remove(finished, &i);
1986 8 : group->numFinishedBuilders--;
1987 8 : return testBuilder;
1988 : }
1989 : }
1990 :
1991 8 : return nullptr;
1992 : }
1993 :
1994 : void
1995 2637 : AttachFinishedCompilations(ZoneGroup* group, JSContext* maybecx)
1996 : {
1997 2637 : MOZ_ASSERT_IF(maybecx, maybecx->zone()->group() == group);
1998 :
1999 2637 : if (!group->numFinishedBuilders)
2000 2629 : return;
2001 :
2002 16 : AutoLockHelperThreadState lock;
2003 8 : GlobalHelperThreadState::IonBuilderVector& finished = HelperThreadState().ionFinishedList(lock);
2004 :
2005 : // Incorporate any off thread compilations for the runtime which have
2006 : // finished, failed or have been cancelled.
2007 : while (true) {
2008 : // Find a finished builder for the zone group.
2009 16 : IonBuilder* builder = GetFinishedBuilder(group, finished);
2010 16 : if (!builder)
2011 8 : break;
2012 :
2013 8 : JSScript* script = builder->script();
2014 8 : MOZ_ASSERT(script->hasBaselineScript());
2015 8 : script->baselineScript()->setPendingIonBuilder(group->runtime, script, builder);
2016 8 : group->ionLazyLinkListAdd(builder);
2017 :
2018 : // Don't keep more than 100 lazy link builders in a zone group.
2019 : // Link the oldest ones immediately. Only do this if we have a valid
2020 : // context to use (otherwise this method might have been called in the
2021 : // middle of a compartment change on the current thread's context).
2022 8 : if (maybecx) {
2023 7 : while (group->ionLazyLinkListSize() > 100) {
2024 0 : jit::IonBuilder* builder = group->ionLazyLinkList().getLast();
2025 0 : RootedScript script(maybecx, builder->script());
2026 :
2027 0 : AutoUnlockHelperThreadState unlock(lock);
2028 0 : AutoCompartment ac(maybecx, script);
2029 0 : jit::LinkIonScript(maybecx, script);
2030 : }
2031 : }
2032 8 : }
2033 :
2034 8 : MOZ_ASSERT(!group->numFinishedBuilders);
2035 : }
2036 :
2037 : static void
2038 0 : TrackAllProperties(JSContext* cx, JSObject* obj)
2039 : {
2040 0 : MOZ_ASSERT(obj->isSingleton());
2041 :
2042 0 : for (Shape::Range<NoGC> range(obj->as<NativeObject>().lastProperty()); !range.empty(); range.popFront())
2043 0 : EnsureTrackPropertyTypes(cx, obj, range.front().propid());
2044 0 : }
2045 :
2046 : static void
2047 10 : TrackPropertiesForSingletonScopes(JSContext* cx, JSScript* script, BaselineFrame* baselineFrame)
2048 : {
2049 : // Ensure that all properties of singleton call objects which the script
2050 : // could access are tracked. These are generally accessed through
2051 : // ALIASEDVAR operations in baseline and will not be tracked even if they
2052 : // have been accessed in baseline code.
2053 10 : JSObject* environment = script->functionNonDelazifying()
2054 10 : ? script->functionNonDelazifying()->environment()
2055 10 : : nullptr;
2056 :
2057 16 : while (environment && !environment->is<GlobalObject>()) {
2058 3 : if (environment->is<CallObject>() && environment->isSingleton())
2059 0 : TrackAllProperties(cx, environment);
2060 3 : environment = environment->enclosingEnvironment();
2061 : }
2062 :
2063 10 : if (baselineFrame) {
2064 10 : JSObject* scope = baselineFrame->environmentChain();
2065 10 : if (scope->is<CallObject>() && scope->isSingleton())
2066 0 : TrackAllProperties(cx, scope);
2067 : }
2068 10 : }
2069 :
2070 : static void
2071 20 : TrackIonAbort(JSContext* cx, JSScript* script, jsbytecode* pc, const char* message)
2072 : {
2073 20 : if (!cx->runtime()->jitRuntime()->isOptimizationTrackingEnabled(cx->zone()->group()))
2074 20 : return;
2075 :
2076 : // Only bother tracking aborts of functions we're attempting to
2077 : // Ion-compile after successfully running in Baseline.
2078 0 : if (!script->hasBaselineScript())
2079 0 : return;
2080 :
2081 0 : JitcodeGlobalTable* table = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
2082 0 : void* ptr = script->baselineScript()->method()->raw();
2083 0 : JitcodeGlobalEntry& entry = table->lookupInfallible(ptr);
2084 0 : entry.baselineEntry().trackIonAbort(pc, message);
2085 : }
2086 :
2087 : static void
2088 20 : TrackAndSpewIonAbort(JSContext* cx, JSScript* script, const char* message)
2089 : {
2090 20 : JitSpew(JitSpew_IonAbort, "%s", message);
2091 20 : TrackIonAbort(cx, script, script->code(), message);
2092 20 : }
2093 :
2094 : static AbortReason
2095 10 : IonCompile(JSContext* cx, JSScript* script,
2096 : BaselineFrame* baselineFrame, jsbytecode* osrPc,
2097 : bool recompile, OptimizationLevel optimizationLevel)
2098 : {
2099 10 : TraceLoggerThread* logger = TraceLoggerForCurrentThread(cx);
2100 20 : TraceLoggerEvent event(TraceLogger_AnnotateScripts, script);
2101 20 : AutoTraceLog logScript(logger, event);
2102 20 : AutoTraceLog logCompile(logger, TraceLogger_IonCompilation);
2103 :
2104 : // Make sure the script's canonical function isn't lazy. We can't de-lazify
2105 : // it in a helper thread.
2106 10 : script->ensureNonLazyCanonicalFunction();
2107 :
2108 10 : TrackPropertiesForSingletonScopes(cx, script, baselineFrame);
2109 :
2110 10 : LifoAlloc* alloc = cx->new_<LifoAlloc>(TempAllocator::PreferredLifoChunkSize);
2111 10 : if (!alloc)
2112 0 : return AbortReason::Alloc;
2113 :
2114 20 : ScopedJSDeletePtr<LifoAlloc> autoDelete(alloc);
2115 :
2116 10 : TempAllocator* temp = alloc->new_<TempAllocator>(alloc);
2117 10 : if (!temp)
2118 0 : return AbortReason::Alloc;
2119 :
2120 20 : JitContext jctx(cx, temp);
2121 :
2122 10 : if (!cx->compartment()->ensureJitCompartmentExists(cx))
2123 0 : return AbortReason::Alloc;
2124 :
2125 10 : if (!cx->compartment()->jitCompartment()->ensureIonStubsExist(cx))
2126 0 : return AbortReason::Alloc;
2127 :
2128 10 : MIRGraph* graph = alloc->new_<MIRGraph>(temp);
2129 10 : if (!graph)
2130 0 : return AbortReason::Alloc;
2131 :
2132 10 : InlineScriptTree* inlineScriptTree = InlineScriptTree::New(temp, nullptr, nullptr, script);
2133 10 : if (!inlineScriptTree)
2134 0 : return AbortReason::Alloc;
2135 :
2136 20 : CompileInfo* info = alloc->new_<CompileInfo>(script, script->functionNonDelazifying(), osrPc,
2137 : Analysis_None,
2138 30 : script->needsArgsObj(), inlineScriptTree);
2139 10 : if (!info)
2140 0 : return AbortReason::Alloc;
2141 :
2142 10 : BaselineInspector* inspector = alloc->new_<BaselineInspector>(script);
2143 10 : if (!inspector)
2144 0 : return AbortReason::Alloc;
2145 :
2146 10 : BaselineFrameInspector* baselineFrameInspector = nullptr;
2147 10 : if (baselineFrame) {
2148 10 : baselineFrameInspector = NewBaselineFrameInspector(temp, baselineFrame, info);
2149 10 : if (!baselineFrameInspector)
2150 0 : return AbortReason::Alloc;
2151 : }
2152 :
2153 10 : CompilerConstraintList* constraints = NewCompilerConstraintList(*temp);
2154 10 : if (!constraints)
2155 0 : return AbortReason::Alloc;
2156 :
2157 10 : const OptimizationInfo* optimizationInfo = IonOptimizations.get(optimizationLevel);
2158 10 : const JitCompileOptions options(cx);
2159 :
2160 20 : IonBuilder* builder = alloc->new_<IonBuilder>((JSContext*) nullptr,
2161 20 : CompileCompartment::get(cx->compartment()),
2162 : options, temp, graph, constraints,
2163 : inspector, info, optimizationInfo,
2164 10 : baselineFrameInspector);
2165 10 : if (!builder)
2166 0 : return AbortReason::Alloc;
2167 :
2168 10 : if (cx->zone()->group()->storeBuffer().cancelIonCompilations())
2169 1 : builder->setNotSafeForMinorGC();
2170 :
2171 10 : MOZ_ASSERT(recompile == builder->script()->hasIonScript());
2172 10 : MOZ_ASSERT(builder->script()->canIonCompile());
2173 :
2174 20 : RootedScript builderScript(cx, builder->script());
2175 :
2176 10 : if (recompile)
2177 0 : builderScript->ionScript()->setRecompiling();
2178 :
2179 10 : SpewBeginFunction(builder, builderScript);
2180 :
2181 10 : AbortReasonOr<Ok> buildResult = Ok();
2182 : {
2183 20 : AutoEnterAnalysis enter(cx);
2184 10 : buildResult = builder->build();
2185 10 : builder->clearForBackEnd();
2186 : }
2187 :
2188 10 : if (buildResult.isErr()) {
2189 2 : AbortReason reason = buildResult.unwrapErr();
2190 2 : builder->graphSpewer().endFunction();
2191 2 : if (reason == AbortReason::PreliminaryObjects) {
2192 : // Some group was accessed which has associated preliminary objects
2193 : // to analyze. Do this now and we will try to build again shortly.
2194 2 : const MIRGenerator::ObjectGroupVector& groups = builder->abortedPreliminaryGroups();
2195 4 : for (size_t i = 0; i < groups.length(); i++) {
2196 2 : ObjectGroup* group = groups[i];
2197 2 : if (group->newScript()) {
2198 0 : if (!group->newScript()->maybeAnalyze(cx, group, nullptr, /* force = */ true))
2199 0 : return AbortReason::Alloc;
2200 2 : } else if (group->maybePreliminaryObjects()) {
2201 2 : group->maybePreliminaryObjects()->maybeAnalyze(cx, group, /* force = */ true);
2202 : } else {
2203 0 : MOZ_CRASH("Unexpected aborted preliminary group");
2204 : }
2205 : }
2206 : }
2207 :
2208 2 : if (builder->hadActionableAbort()) {
2209 : JSScript* abortScript;
2210 : jsbytecode* abortPc;
2211 : const char* abortMessage;
2212 0 : builder->actionableAbortLocationAndMessage(&abortScript, &abortPc, &abortMessage);
2213 0 : TrackIonAbort(cx, abortScript, abortPc, abortMessage);
2214 : }
2215 :
2216 2 : if (cx->isThrowingOverRecursed()) {
2217 : // Non-analysis compilations should never fail with stack overflow.
2218 0 : MOZ_CRASH("Stack overflow during compilation");
2219 : }
2220 :
2221 2 : return reason;
2222 : }
2223 :
2224 8 : AssertBasicGraphCoherency(builder->graph());
2225 :
2226 : // If possible, compile the script off thread.
2227 8 : if (options.offThreadCompilationAvailable()) {
2228 16 : JitSpew(JitSpew_IonSyncLogs, "Can't log script %s:%" PRIuSIZE
2229 : ". (Compiled on background thread.)",
2230 24 : builderScript->filename(), builderScript->lineno());
2231 :
2232 8 : if (!CreateMIRRootList(*builder))
2233 0 : return AbortReason::Alloc;
2234 :
2235 8 : if (!StartOffThreadIonCompile(cx, builder)) {
2236 0 : JitSpew(JitSpew_IonAbort, "Unable to start off-thread ion compilation.");
2237 0 : builder->graphSpewer().endFunction();
2238 0 : return AbortReason::Alloc;
2239 : }
2240 :
2241 8 : if (!recompile)
2242 8 : builderScript->setIonScript(cx->runtime(), ION_COMPILING_SCRIPT);
2243 :
2244 : // The allocator and associated data will be destroyed after being
2245 : // processed in the finishedOffThreadCompilations list.
2246 8 : autoDelete.forget();
2247 :
2248 8 : return AbortReason::NoAbort;
2249 : }
2250 :
2251 0 : bool succeeded = false;
2252 : {
2253 0 : ScopedJSDeletePtr<CodeGenerator> codegen;
2254 0 : AutoEnterAnalysis enter(cx);
2255 0 : codegen = CompileBackEnd(builder);
2256 0 : if (!codegen) {
2257 0 : JitSpew(JitSpew_IonAbort, "Failed during back-end compilation.");
2258 0 : if (cx->isExceptionPending())
2259 0 : return AbortReason::Error;
2260 0 : return AbortReason::Disable;
2261 : }
2262 :
2263 0 : succeeded = LinkCodeGen(cx, builder, codegen);
2264 : }
2265 :
2266 0 : if (succeeded)
2267 0 : return AbortReason::NoAbort;
2268 0 : if (cx->isExceptionPending())
2269 0 : return AbortReason::Error;
2270 0 : return AbortReason::Disable;
2271 : }
2272 :
2273 : static bool
2274 12 : CheckFrame(JSContext* cx, BaselineFrame* frame)
2275 : {
2276 12 : MOZ_ASSERT(!frame->script()->isStarGenerator());
2277 12 : MOZ_ASSERT(!frame->script()->isLegacyGenerator());
2278 12 : MOZ_ASSERT(!frame->script()->isAsync());
2279 12 : MOZ_ASSERT(!frame->isDebuggerEvalFrame());
2280 12 : MOZ_ASSERT(!frame->isEvalFrame());
2281 :
2282 : // This check is to not overrun the stack.
2283 12 : if (frame->isFunctionFrame()) {
2284 12 : if (TooManyActualArguments(frame->numActualArgs())) {
2285 0 : TrackAndSpewIonAbort(cx, frame->script(), "too many actual arguments");
2286 0 : return false;
2287 : }
2288 :
2289 12 : if (TooManyFormalArguments(frame->numFormalArgs())) {
2290 0 : TrackAndSpewIonAbort(cx, frame->script(), "too many arguments");
2291 0 : return false;
2292 : }
2293 : }
2294 :
2295 12 : return true;
2296 : }
2297 :
2298 : static bool
2299 8267 : CheckScript(JSContext* cx, JSScript* script, bool osr)
2300 : {
2301 8267 : if (script->isForEval()) {
2302 : // Eval frames are not yet supported. Supporting this will require new
2303 : // logic in pushBailoutFrame to deal with linking prev.
2304 : // Additionally, JSOP_DEFVAR support will require baking in isEvalFrame().
2305 0 : TrackAndSpewIonAbort(cx, script, "eval script");
2306 0 : return false;
2307 : }
2308 :
2309 8267 : if (script->isStarGenerator() || script->isLegacyGenerator()) {
2310 10 : TrackAndSpewIonAbort(cx, script, "generator script");
2311 10 : return false;
2312 : }
2313 8257 : if (script->isAsync()) {
2314 7 : TrackAndSpewIonAbort(cx, script, "async script");
2315 7 : return false;
2316 : }
2317 :
2318 8250 : if (script->hasNonSyntacticScope() && !script->functionNonDelazifying()) {
2319 : // Support functions with a non-syntactic global scope but not other
2320 : // scripts. For global scripts, IonBuilder currently uses the global
2321 : // object as scope chain, this is not valid when the script has a
2322 : // non-syntactic global scope.
2323 0 : TrackAndSpewIonAbort(cx, script, "has non-syntactic global scope");
2324 0 : return false;
2325 : }
2326 :
2327 8325 : if (script->functionHasExtraBodyVarScope() &&
2328 75 : script->functionExtraBodyVarScope()->hasEnvironment())
2329 : {
2330 : // This restriction will be lifted when intra-function scope chains
2331 : // are compilable by Ion. See bug 1273858.
2332 3 : TrackAndSpewIonAbort(cx, script, "has extra var environment");
2333 3 : return false;
2334 : }
2335 :
2336 8247 : if (script->nTypeSets() >= UINT16_MAX) {
2337 : // In this case multiple bytecode ops can share a single observed
2338 : // TypeSet (see bug 1303710).
2339 0 : TrackAndSpewIonAbort(cx, script, "too many typesets");
2340 0 : return false;
2341 : }
2342 :
2343 8247 : return true;
2344 : }
2345 :
2346 : static MethodStatus
2347 8247 : CheckScriptSize(JSContext* cx, JSScript* script)
2348 : {
2349 8247 : if (!JitOptions.limitScriptSize)
2350 0 : return Method_Compiled;
2351 :
2352 8247 : uint32_t numLocalsAndArgs = NumLocalsAndArgs(script);
2353 :
2354 8247 : if (script->length() > MAX_ACTIVE_THREAD_SCRIPT_SIZE ||
2355 : numLocalsAndArgs > MAX_ACTIVE_THREAD_LOCALS_AND_ARGS)
2356 : {
2357 29 : if (!OffThreadCompilationAvailable(cx)) {
2358 0 : JitSpew(JitSpew_IonAbort, "Script too large (%" PRIuSIZE " bytes) (%u locals/args)",
2359 0 : script->length(), numLocalsAndArgs);
2360 0 : TrackIonAbort(cx, script, script->code(), "too large");
2361 0 : return Method_CantCompile;
2362 : }
2363 : }
2364 :
2365 8247 : return Method_Compiled;
2366 : }
2367 :
2368 : bool
2369 1256 : CanIonCompileScript(JSContext* cx, JSScript* script, bool osr)
2370 : {
2371 1256 : if (!script->canIonCompile() || !CheckScript(cx, script, osr))
2372 16 : return false;
2373 :
2374 1240 : return CheckScriptSize(cx, script) == Method_Compiled;
2375 : }
2376 :
2377 : static OptimizationLevel
2378 7007 : GetOptimizationLevel(HandleScript script, jsbytecode* pc)
2379 : {
2380 7007 : return IonOptimizations.levelForScript(script, pc);
2381 : }
2382 :
2383 : static MethodStatus
2384 19614 : Compile(JSContext* cx, HandleScript script, BaselineFrame* osrFrame, jsbytecode* osrPc,
2385 : bool forceRecompile = false)
2386 : {
2387 19614 : MOZ_ASSERT(jit::IsIonEnabled(cx));
2388 19614 : MOZ_ASSERT(jit::IsBaselineEnabled(cx));
2389 19614 : MOZ_ASSERT_IF(osrPc != nullptr, LoopEntryCanIonOsr(osrPc));
2390 :
2391 19614 : if (!script->hasBaselineScript())
2392 12603 : return Method_Skipped;
2393 :
2394 7011 : if (script->isDebuggee() || (osrFrame && osrFrame->isDebuggee())) {
2395 0 : TrackAndSpewIonAbort(cx, script, "debugging");
2396 0 : return Method_Skipped;
2397 : }
2398 :
2399 7011 : if (!CheckScript(cx, script, bool(osrPc))) {
2400 4 : JitSpew(JitSpew_IonAbort, "Aborted compilation of %s:%" PRIuSIZE, script->filename(), script->lineno());
2401 4 : return Method_CantCompile;
2402 : }
2403 :
2404 7007 : MethodStatus status = CheckScriptSize(cx, script);
2405 7007 : if (status != Method_Compiled) {
2406 0 : JitSpew(JitSpew_IonAbort, "Aborted compilation of %s:%" PRIuSIZE, script->filename(), script->lineno());
2407 0 : return status;
2408 : }
2409 :
2410 7007 : bool recompile = false;
2411 7007 : OptimizationLevel optimizationLevel = GetOptimizationLevel(script, osrPc);
2412 7007 : if (optimizationLevel == OptimizationLevel::DontCompile)
2413 6958 : return Method_Skipped;
2414 :
2415 49 : if (!CanLikelyAllocateMoreExecutableMemory()) {
2416 0 : script->resetWarmUpCounter();
2417 0 : return Method_Skipped;
2418 : }
2419 :
2420 49 : if (script->hasIonScript()) {
2421 35 : IonScript* scriptIon = script->ionScript();
2422 35 : if (!scriptIon->method())
2423 0 : return Method_CantCompile;
2424 :
2425 : // Don't recompile/overwrite higher optimized code,
2426 : // with a lower optimization level.
2427 35 : if (optimizationLevel <= scriptIon->optimizationLevel() && !forceRecompile)
2428 35 : return Method_Compiled;
2429 :
2430 : // Don't start compiling if already compiling
2431 0 : if (scriptIon->isRecompiling())
2432 0 : return Method_Compiled;
2433 :
2434 0 : if (osrPc)
2435 0 : scriptIon->resetOsrPcMismatchCounter();
2436 :
2437 0 : recompile = true;
2438 : }
2439 :
2440 14 : if (script->baselineScript()->hasPendingIonBuilder()) {
2441 4 : IonBuilder* buildIon = script->baselineScript()->pendingIonBuilder();
2442 4 : if (optimizationLevel <= buildIon->optimizationInfo().level() && !forceRecompile)
2443 4 : return Method_Compiled;
2444 :
2445 0 : recompile = true;
2446 : }
2447 :
2448 10 : AbortReason reason = IonCompile(cx, script, osrFrame, osrPc, recompile, optimizationLevel);
2449 10 : if (reason == AbortReason::Error)
2450 0 : return Method_Error;
2451 :
2452 10 : if (reason == AbortReason::Disable)
2453 0 : return Method_CantCompile;
2454 :
2455 10 : if (reason == AbortReason::Alloc) {
2456 0 : ReportOutOfMemory(cx);
2457 0 : return Method_Error;
2458 : }
2459 :
2460 : // Compilation succeeded or we invalidated right away or an inlining/alloc abort
2461 10 : if (script->hasIonScript())
2462 0 : return Method_Compiled;
2463 10 : return Method_Skipped;
2464 : }
2465 :
2466 : } // namespace jit
2467 : } // namespace js
2468 :
2469 : bool
2470 175 : jit::OffThreadCompilationAvailable(JSContext* cx)
2471 : {
2472 : // Even if off thread compilation is enabled, compilation must still occur
2473 : // on the active thread in some cases.
2474 : //
2475 : // Require cpuCount > 1 so that Ion compilation jobs and active-thread
2476 : // execution are not competing for the same resources.
2477 175 : return cx->runtime()->canUseOffthreadIonCompilation()
2478 175 : && HelperThreadState().cpuCount > 1
2479 350 : && CanUseExtraThreads();
2480 : }
2481 :
2482 : MethodStatus
2483 19633 : jit::CanEnter(JSContext* cx, RunState& state)
2484 : {
2485 19633 : MOZ_ASSERT(jit::IsIonEnabled(cx));
2486 :
2487 19633 : JSScript* script = state.script();
2488 :
2489 : // Skip if the script has been disabled.
2490 19633 : if (!script->canIonCompile())
2491 21 : return Method_Skipped;
2492 :
2493 : // Skip if the script is being compiled off thread.
2494 19612 : if (script->isIonCompilingOffThread())
2495 10 : return Method_Skipped;
2496 :
2497 : // Skip if the code is expected to result in a bailout.
2498 19602 : if (script->hasIonScript() && script->ionScript()->bailoutExpected())
2499 0 : return Method_Skipped;
2500 :
2501 39204 : RootedScript rscript(cx, script);
2502 :
2503 : // If constructing, allocate a new |this| object before building Ion.
2504 : // Creating |this| is done before building Ion because it may change the
2505 : // type information and invalidate compilation results.
2506 19602 : if (state.isInvoke()) {
2507 19104 : InvokeState& invoke = *state.asInvoke();
2508 :
2509 19104 : if (TooManyActualArguments(invoke.args().length())) {
2510 0 : TrackAndSpewIonAbort(cx, script, "too many actual args");
2511 0 : ForbidCompilation(cx, script);
2512 0 : return Method_CantCompile;
2513 : }
2514 :
2515 19104 : if (TooManyFormalArguments(invoke.args().callee().as<JSFunction>().nargs())) {
2516 0 : TrackAndSpewIonAbort(cx, script, "too many args");
2517 0 : ForbidCompilation(cx, script);
2518 0 : return Method_CantCompile;
2519 : }
2520 :
2521 19104 : if (!state.maybeCreateThisForConstructor(cx)) {
2522 0 : if (cx->isThrowingOutOfMemory()) {
2523 0 : cx->recoverFromOutOfMemory();
2524 0 : return Method_Skipped;
2525 : }
2526 0 : return Method_Error;
2527 : }
2528 : }
2529 :
2530 : // If --ion-eager is used, compile with Baseline first, so that we
2531 : // can directly enter IonMonkey.
2532 19602 : if (JitOptions.eagerCompilation && !rscript->hasBaselineScript()) {
2533 0 : MethodStatus status = CanEnterBaselineMethod(cx, state);
2534 0 : if (status != Method_Compiled)
2535 0 : return status;
2536 : }
2537 :
2538 : // Skip if the script is being compiled off thread or can't be
2539 : // Ion-compiled (again). MaybeCreateThisForConstructor could have
2540 : // started an Ion compilation or marked the script as uncompilable.
2541 19602 : if (rscript->isIonCompilingOffThread() || !rscript->canIonCompile())
2542 0 : return Method_Skipped;
2543 :
2544 : // Attempt compilation. Returns Method_Compiled if already compiled.
2545 19602 : MethodStatus status = Compile(cx, rscript, nullptr, nullptr);
2546 19602 : if (status != Method_Compiled) {
2547 19565 : if (status == Method_CantCompile)
2548 4 : ForbidCompilation(cx, rscript);
2549 19565 : return status;
2550 : }
2551 :
2552 37 : if (state.script()->baselineScript()->hasPendingIonBuilder()) {
2553 3 : LinkIonScript(cx, state.script());
2554 3 : if (!state.script()->hasIonScript())
2555 2 : return jit::Method_Skipped;
2556 : }
2557 :
2558 35 : return Method_Compiled;
2559 : }
2560 :
2561 : static MethodStatus
2562 7 : BaselineCanEnterAtEntry(JSContext* cx, HandleScript script, BaselineFrame* frame)
2563 : {
2564 7 : MOZ_ASSERT(jit::IsIonEnabled(cx));
2565 7 : MOZ_ASSERT(frame->callee()->nonLazyScript()->canIonCompile());
2566 7 : MOZ_ASSERT(!frame->callee()->nonLazyScript()->isIonCompilingOffThread());
2567 7 : MOZ_ASSERT(!frame->callee()->nonLazyScript()->hasIonScript());
2568 7 : MOZ_ASSERT(frame->isFunctionFrame());
2569 :
2570 : // Mark as forbidden if frame can't be handled.
2571 7 : if (!CheckFrame(cx, frame)) {
2572 0 : ForbidCompilation(cx, script);
2573 0 : return Method_CantCompile;
2574 : }
2575 :
2576 : // Attempt compilation. Returns Method_Compiled if already compiled.
2577 7 : MethodStatus status = Compile(cx, script, frame, nullptr);
2578 7 : if (status != Method_Compiled) {
2579 6 : if (status == Method_CantCompile)
2580 0 : ForbidCompilation(cx, script);
2581 6 : return status;
2582 : }
2583 :
2584 1 : return Method_Compiled;
2585 : }
2586 :
2587 : // Decide if a transition from baseline execution to Ion code should occur.
2588 : // May compile or recompile the target JSScript.
2589 : static MethodStatus
2590 5 : BaselineCanEnterAtBranch(JSContext* cx, HandleScript script, BaselineFrame* osrFrame, jsbytecode* pc)
2591 : {
2592 5 : MOZ_ASSERT(jit::IsIonEnabled(cx));
2593 5 : MOZ_ASSERT((JSOp)*pc == JSOP_LOOPENTRY);
2594 5 : MOZ_ASSERT(LoopEntryCanIonOsr(pc));
2595 :
2596 : // Skip if the script has been disabled.
2597 5 : if (!script->canIonCompile())
2598 0 : return Method_Skipped;
2599 :
2600 : // Skip if the script is being compiled off thread.
2601 5 : if (script->isIonCompilingOffThread())
2602 0 : return Method_Skipped;
2603 :
2604 : // Skip if the code is expected to result in a bailout.
2605 5 : if (script->hasIonScript() && script->ionScript()->bailoutExpected())
2606 0 : return Method_Skipped;
2607 :
2608 : // Optionally ignore on user request.
2609 5 : if (!JitOptions.osr)
2610 0 : return Method_Skipped;
2611 :
2612 : // Mark as forbidden if frame can't be handled.
2613 5 : if (!CheckFrame(cx, osrFrame)) {
2614 0 : ForbidCompilation(cx, script);
2615 0 : return Method_CantCompile;
2616 : }
2617 :
2618 : // Check if the jitcode still needs to get linked and do this
2619 : // to have a valid IonScript.
2620 5 : if (script->baselineScript()->hasPendingIonBuilder())
2621 1 : LinkIonScript(cx, script);
2622 :
2623 : // By default a recompilation doesn't happen on osr mismatch.
2624 : // Decide if we want to force a recompilation if this happens too much.
2625 5 : bool force = false;
2626 5 : if (script->hasIonScript() && pc != script->ionScript()->osrPc()) {
2627 0 : uint32_t count = script->ionScript()->incrOsrPcMismatchCounter();
2628 0 : if (count <= JitOptions.osrPcMismatchesBeforeRecompile)
2629 0 : return Method_Skipped;
2630 0 : force = true;
2631 : }
2632 :
2633 : // Attempt compilation.
2634 : // - Returns Method_Compiled if the right ionscript is present
2635 : // (Meaning it was present or a sequantial compile finished)
2636 : // - Returns Method_Skipped if pc doesn't match
2637 : // (This means a background thread compilation with that pc could have started or not.)
2638 10 : RootedScript rscript(cx, script);
2639 5 : MethodStatus status = Compile(cx, rscript, osrFrame, pc, force);
2640 5 : if (status != Method_Compiled) {
2641 4 : if (status == Method_CantCompile)
2642 0 : ForbidCompilation(cx, script);
2643 4 : return status;
2644 : }
2645 :
2646 : // Return the compilation was skipped when the osr pc wasn't adjusted.
2647 : // This can happen when there was still an IonScript available and a
2648 : // background compilation started, but hasn't finished yet.
2649 : // Or when we didn't force a recompile.
2650 1 : if (script->hasIonScript() && pc != script->ionScript()->osrPc())
2651 0 : return Method_Skipped;
2652 :
2653 1 : return Method_Compiled;
2654 : }
2655 :
2656 : bool
2657 12 : jit::IonCompileScriptForBaseline(JSContext* cx, BaselineFrame* frame, jsbytecode* pc)
2658 : {
2659 : // A TI OOM will disable TI and Ion.
2660 12 : if (!jit::IsIonEnabled(cx))
2661 0 : return true;
2662 :
2663 24 : RootedScript script(cx, frame->script());
2664 12 : bool isLoopEntry = JSOp(*pc) == JSOP_LOOPENTRY;
2665 :
2666 12 : MOZ_ASSERT(!isLoopEntry || LoopEntryCanIonOsr(pc));
2667 :
2668 12 : if (!script->canIonCompile()) {
2669 : // TODO: ASSERT that ion-compilation-disabled checker stub doesn't exist.
2670 : // TODO: Clear all optimized stubs.
2671 : // TODO: Add a ion-compilation-disabled checker IC stub
2672 0 : script->resetWarmUpCounter();
2673 0 : return true;
2674 : }
2675 :
2676 12 : MOZ_ASSERT(!script->isIonCompilingOffThread());
2677 :
2678 : // If Ion script exists, but PC is not at a loop entry, then Ion will be entered for
2679 : // this script at an appropriate LOOPENTRY or the next time this function is called.
2680 12 : if (script->hasIonScript() && !isLoopEntry) {
2681 0 : JitSpew(JitSpew_BaselineOSR, "IonScript exists, but not at loop entry!");
2682 : // TODO: ASSERT that a ion-script-already-exists checker stub doesn't exist.
2683 : // TODO: Clear all optimized stubs.
2684 : // TODO: Add a ion-script-already-exists checker stub.
2685 0 : return true;
2686 : }
2687 :
2688 : // Ensure that Ion-compiled code is available.
2689 36 : JitSpew(JitSpew_BaselineOSR,
2690 : "WarmUpCounter for %s:%" PRIuSIZE " reached %d at pc %p, trying to switch to Ion!",
2691 48 : script->filename(), script->lineno(), (int) script->getWarmUpCount(), (void*) pc);
2692 :
2693 : MethodStatus stat;
2694 12 : if (isLoopEntry) {
2695 5 : MOZ_ASSERT(LoopEntryCanIonOsr(pc));
2696 5 : JitSpew(JitSpew_BaselineOSR, " Compile at loop entry!");
2697 5 : stat = BaselineCanEnterAtBranch(cx, script, frame, pc);
2698 7 : } else if (frame->isFunctionFrame()) {
2699 7 : JitSpew(JitSpew_BaselineOSR, " Compile function from top for later entry!");
2700 7 : stat = BaselineCanEnterAtEntry(cx, script, frame);
2701 : } else {
2702 0 : return true;
2703 : }
2704 :
2705 12 : if (stat == Method_Error) {
2706 0 : JitSpew(JitSpew_BaselineOSR, " Compile with Ion errored!");
2707 0 : return false;
2708 : }
2709 :
2710 12 : if (stat == Method_CantCompile)
2711 0 : JitSpew(JitSpew_BaselineOSR, " Can't compile with Ion!");
2712 12 : else if (stat == Method_Skipped)
2713 10 : JitSpew(JitSpew_BaselineOSR, " Skipped compile with Ion!");
2714 2 : else if (stat == Method_Compiled)
2715 2 : JitSpew(JitSpew_BaselineOSR, " Compiled with Ion!");
2716 : else
2717 0 : MOZ_CRASH("Invalid MethodStatus!");
2718 :
2719 : // Failed to compile. Reset warm-up counter and return.
2720 12 : if (stat != Method_Compiled) {
2721 : // TODO: If stat == Method_CantCompile, insert stub that just skips the
2722 : // warm-up counter entirely, instead of resetting it.
2723 10 : bool bailoutExpected = script->hasIonScript() && script->ionScript()->bailoutExpected();
2724 10 : if (stat == Method_CantCompile || bailoutExpected) {
2725 0 : JitSpew(JitSpew_BaselineOSR, " Reset WarmUpCounter cantCompile=%s bailoutExpected=%s!",
2726 : stat == Method_CantCompile ? "yes" : "no",
2727 0 : bailoutExpected ? "yes" : "no");
2728 0 : script->resetWarmUpCounter();
2729 : }
2730 10 : return true;
2731 : }
2732 :
2733 2 : return true;
2734 : }
2735 :
2736 :
2737 : MethodStatus
2738 0 : jit::Recompile(JSContext* cx, HandleScript script, BaselineFrame* osrFrame, jsbytecode* osrPc,
2739 : bool force)
2740 : {
2741 0 : MOZ_ASSERT(script->hasIonScript());
2742 0 : if (script->ionScript()->isRecompiling())
2743 0 : return Method_Compiled;
2744 :
2745 0 : MethodStatus status = Compile(cx, script, osrFrame, osrPc, force);
2746 0 : if (status != Method_Compiled) {
2747 0 : if (status == Method_CantCompile)
2748 0 : ForbidCompilation(cx, script);
2749 0 : return status;
2750 : }
2751 :
2752 0 : return Method_Compiled;
2753 : }
2754 :
2755 : MethodStatus
2756 0 : jit::CanEnterUsingFastInvoke(JSContext* cx, HandleScript script, uint32_t numActualArgs)
2757 : {
2758 0 : MOZ_ASSERT(jit::IsIonEnabled(cx));
2759 :
2760 : // Skip if the code is expected to result in a bailout.
2761 0 : if (!script->hasIonScript() || script->ionScript()->bailoutExpected())
2762 0 : return Method_Skipped;
2763 :
2764 : // Don't handle arguments underflow, to make this work we would have to pad
2765 : // missing arguments with |undefined|.
2766 0 : if (numActualArgs < script->functionNonDelazifying()->nargs())
2767 0 : return Method_Skipped;
2768 :
2769 0 : if (!cx->compartment()->ensureJitCompartmentExists(cx))
2770 0 : return Method_Error;
2771 :
2772 : // This can GC, so afterward, script->ion is not guaranteed to be valid.
2773 0 : if (!cx->runtime()->jitRuntime()->enterIon())
2774 0 : return Method_Error;
2775 :
2776 0 : if (!script->hasIonScript())
2777 0 : return Method_Skipped;
2778 :
2779 0 : return Method_Compiled;
2780 : }
2781 :
2782 : static JitExecStatus
2783 35 : EnterIon(JSContext* cx, EnterJitData& data)
2784 : {
2785 35 : if (!CheckRecursionLimit(cx))
2786 0 : return JitExec_Aborted;
2787 :
2788 35 : MOZ_ASSERT(jit::IsIonEnabled(cx));
2789 35 : MOZ_ASSERT(!data.osrFrame);
2790 :
2791 : #ifdef DEBUG
2792 : // See comment in EnterBaseline.
2793 70 : mozilla::Maybe<JS::AutoAssertNoGC> nogc;
2794 35 : nogc.emplace(cx);
2795 : #endif
2796 :
2797 35 : EnterJitCode enter = cx->runtime()->jitRuntime()->enterIon();
2798 :
2799 : // Caller must construct |this| before invoking the Ion function.
2800 35 : MOZ_ASSERT_IF(data.constructing,
2801 : data.maxArgv[0].isObject() || data.maxArgv[0].isMagic(JS_UNINITIALIZED_LEXICAL));
2802 :
2803 35 : data.result.setInt32(data.numActualArgs);
2804 : {
2805 70 : AssertCompartmentUnchanged pcc(cx);
2806 70 : ActivationEntryMonitor entryMonitor(cx, data.calleeToken);
2807 70 : JitActivation activation(cx);
2808 :
2809 : #ifdef DEBUG
2810 35 : nogc.reset();
2811 : #endif
2812 35 : CALL_GENERATED_CODE(enter, data.jitcode, data.maxArgc, data.maxArgv, /* osrFrame = */nullptr, data.calleeToken,
2813 35 : /* envChain = */ nullptr, 0, data.result.address());
2814 : }
2815 :
2816 35 : MOZ_ASSERT(!cx->hasIonReturnOverride());
2817 :
2818 : // Jit callers wrap primitive constructor return, except for derived class constructors.
2819 35 : if (!data.result.isMagic() && data.constructing &&
2820 0 : data.result.isPrimitive())
2821 : {
2822 0 : MOZ_ASSERT(data.maxArgv[0].isObject());
2823 0 : data.result = data.maxArgv[0];
2824 : }
2825 :
2826 : // Release temporary buffer used for OSR into Ion.
2827 35 : cx->freeOsrTempData();
2828 :
2829 35 : MOZ_ASSERT_IF(data.result.isMagic(), data.result.isMagic(JS_ION_ERROR));
2830 35 : return data.result.isMagic() ? JitExec_Error : JitExec_Ok;
2831 : }
2832 :
2833 : bool
2834 7532 : jit::SetEnterJitData(JSContext* cx, EnterJitData& data, RunState& state,
2835 : MutableHandle<GCVector<Value>> vals)
2836 : {
2837 7532 : data.osrFrame = nullptr;
2838 :
2839 7532 : if (state.isInvoke()) {
2840 7532 : const CallArgs& args = state.asInvoke()->args();
2841 7532 : unsigned numFormals = state.script()->functionNonDelazifying()->nargs();
2842 7532 : data.constructing = state.asInvoke()->constructing();
2843 7532 : data.numActualArgs = args.length();
2844 7532 : data.maxArgc = Max(args.length(), numFormals) + 1;
2845 7532 : data.envChain = nullptr;
2846 7532 : data.calleeToken = CalleeToToken(&args.callee().as<JSFunction>(), data.constructing);
2847 :
2848 7532 : if (data.numActualArgs >= numFormals) {
2849 6407 : data.maxArgv = args.base() + 1;
2850 : } else {
2851 1125 : MOZ_ASSERT(vals.empty());
2852 1125 : unsigned numPushedArgs = Max(args.length(), numFormals);
2853 1125 : if (!vals.reserve(numPushedArgs + 1 + data.constructing))
2854 0 : return false;
2855 :
2856 : // Append |this| and any provided arguments.
2857 4919 : for (size_t i = 1; i < args.length() + 2; ++i)
2858 3794 : vals.infallibleAppend(args.base()[i]);
2859 :
2860 : // Pad missing arguments with |undefined|.
2861 7663 : while (vals.length() < numFormals + 1)
2862 3269 : vals.infallibleAppend(UndefinedValue());
2863 :
2864 1125 : if (data.constructing)
2865 0 : vals.infallibleAppend(args.newTarget());
2866 :
2867 1125 : MOZ_ASSERT(vals.length() >= numFormals + 1 + data.constructing);
2868 1125 : data.maxArgv = vals.begin();
2869 : }
2870 : } else {
2871 0 : data.constructing = false;
2872 0 : data.numActualArgs = 0;
2873 0 : data.maxArgc = 0;
2874 0 : data.maxArgv = nullptr;
2875 0 : data.envChain = state.asExecute()->environmentChain();
2876 :
2877 0 : data.calleeToken = CalleeToToken(state.script());
2878 :
2879 0 : if (state.script()->isForEval() && state.script()->isDirectEvalInFunction()) {
2880 : // Push newTarget onto the stack.
2881 0 : if (!vals.reserve(1))
2882 0 : return false;
2883 :
2884 0 : data.maxArgc = 1;
2885 0 : data.maxArgv = vals.begin();
2886 0 : if (state.asExecute()->newTarget().isNull()) {
2887 0 : ScriptFrameIter iter(cx);
2888 0 : vals.infallibleAppend(iter.newTarget());
2889 : } else {
2890 0 : vals.infallibleAppend(state.asExecute()->newTarget());
2891 : }
2892 : }
2893 : }
2894 :
2895 7532 : return true;
2896 : }
2897 :
2898 : JitExecStatus
2899 35 : jit::IonCannon(JSContext* cx, RunState& state)
2900 : {
2901 35 : IonScript* ion = state.script()->ionScript();
2902 :
2903 70 : EnterJitData data(cx);
2904 35 : data.jitcode = ion->method()->raw();
2905 :
2906 70 : Rooted<GCVector<Value>> vals(cx, GCVector<Value>(cx));
2907 35 : if (!SetEnterJitData(cx, data, state, &vals))
2908 0 : return JitExec_Error;
2909 :
2910 35 : JitExecStatus status = EnterIon(cx, data);
2911 :
2912 35 : if (status == JitExec_Ok)
2913 35 : state.setReturnValue(data.result);
2914 :
2915 35 : return status;
2916 : }
2917 :
2918 : JitExecStatus
2919 0 : jit::FastInvoke(JSContext* cx, HandleFunction fun, CallArgs& args)
2920 : {
2921 0 : if (!CheckRecursionLimit(cx))
2922 0 : return JitExec_Error;
2923 :
2924 0 : RootedScript script(cx, fun->nonLazyScript());
2925 :
2926 0 : if (!Debugger::checkNoExecute(cx, script))
2927 0 : return JitExec_Error;
2928 :
2929 : #ifdef DEBUG
2930 : // See comment in EnterBaseline.
2931 0 : mozilla::Maybe<JS::AutoAssertNoGC> nogc;
2932 0 : nogc.emplace(cx);
2933 : #endif
2934 :
2935 0 : IonScript* ion = script->ionScript();
2936 0 : JitCode* code = ion->method();
2937 0 : void* jitcode = code->raw();
2938 :
2939 0 : MOZ_ASSERT(jit::IsIonEnabled(cx));
2940 0 : MOZ_ASSERT(!ion->bailoutExpected());
2941 :
2942 0 : ActivationEntryMonitor entryMonitor(cx, CalleeToToken(script));
2943 0 : JitActivation activation(cx);
2944 :
2945 0 : EnterJitCode enter = cx->runtime()->jitRuntime()->enterIon();
2946 0 : void* calleeToken = CalleeToToken(fun, /* constructing = */ false);
2947 :
2948 0 : RootedValue result(cx, Int32Value(args.length()));
2949 0 : MOZ_ASSERT(args.length() >= fun->nargs());
2950 :
2951 : #ifdef DEBUG
2952 0 : nogc.reset();
2953 : #endif
2954 0 : CALL_GENERATED_CODE(enter, jitcode, args.length() + 1, args.array() - 1, /* osrFrame = */nullptr,
2955 0 : calleeToken, /* envChain = */ nullptr, 0, result.address());
2956 :
2957 0 : MOZ_ASSERT(!cx->hasIonReturnOverride());
2958 :
2959 0 : args.rval().set(result);
2960 :
2961 0 : MOZ_ASSERT_IF(result.isMagic(), result.isMagic(JS_ION_ERROR));
2962 0 : return result.isMagic() ? JitExec_Error : JitExec_Ok;
2963 : }
2964 :
2965 : static void
2966 0 : InvalidateActivation(FreeOp* fop, const JitActivationIterator& activations, bool invalidateAll)
2967 : {
2968 0 : JitSpew(JitSpew_IonInvalidate, "BEGIN invalidating activation");
2969 :
2970 : #ifdef CHECK_OSIPOINT_REGISTERS
2971 0 : if (JitOptions.checkOsiPointRegisters)
2972 0 : activations->asJit()->setCheckRegs(false);
2973 : #endif
2974 :
2975 0 : size_t frameno = 1;
2976 :
2977 0 : for (JitFrameIterator it(activations); !it.done(); ++it, ++frameno) {
2978 0 : MOZ_ASSERT_IF(frameno == 1, it.isExitFrame() || it.type() == JitFrame_Bailout);
2979 :
2980 : #ifdef JS_JITSPEW
2981 0 : switch (it.type()) {
2982 : case JitFrame_Exit:
2983 0 : JitSpew(JitSpew_IonInvalidate, "#%" PRIuSIZE " exit frame @ %p", frameno, it.fp());
2984 0 : break;
2985 : case JitFrame_BaselineJS:
2986 : case JitFrame_IonJS:
2987 : case JitFrame_Bailout:
2988 : {
2989 0 : MOZ_ASSERT(it.isScripted());
2990 0 : const char* type = "Unknown";
2991 0 : if (it.isIonJS())
2992 0 : type = "Optimized";
2993 0 : else if (it.isBaselineJS())
2994 0 : type = "Baseline";
2995 0 : else if (it.isBailoutJS())
2996 0 : type = "Bailing";
2997 0 : JitSpew(JitSpew_IonInvalidate,
2998 : "#%" PRIuSIZE " %s JS frame @ %p, %s:%" PRIuSIZE " (fun: %p, script: %p, pc %p)",
2999 : frameno, type, it.fp(), it.script()->maybeForwardedFilename(),
3000 : it.script()->lineno(), it.maybeCallee(), (JSScript*)it.script(),
3001 0 : it.returnAddressToFp());
3002 0 : break;
3003 : }
3004 : case JitFrame_BaselineStub:
3005 0 : JitSpew(JitSpew_IonInvalidate, "#%" PRIuSIZE " baseline stub frame @ %p", frameno, it.fp());
3006 0 : break;
3007 : case JitFrame_Rectifier:
3008 0 : JitSpew(JitSpew_IonInvalidate, "#%" PRIuSIZE " rectifier frame @ %p", frameno, it.fp());
3009 0 : break;
3010 : case JitFrame_IonICCall:
3011 0 : JitSpew(JitSpew_IonInvalidate, "#%" PRIuSIZE " ion IC call frame @ %p", frameno, it.fp());
3012 0 : break;
3013 : case JitFrame_Entry:
3014 0 : JitSpew(JitSpew_IonInvalidate, "#%" PRIuSIZE " entry frame @ %p", frameno, it.fp());
3015 0 : break;
3016 : }
3017 : #endif // JS_JITSPEW
3018 :
3019 0 : if (!it.isIonScripted())
3020 0 : continue;
3021 :
3022 0 : bool calledFromLinkStub = false;
3023 0 : JitCode* lazyLinkStub = fop->runtime()->jitRuntime()->lazyLinkStub();
3024 0 : if (it.returnAddressToFp() >= lazyLinkStub->raw() &&
3025 0 : it.returnAddressToFp() < lazyLinkStub->rawEnd())
3026 : {
3027 0 : calledFromLinkStub = true;
3028 : }
3029 :
3030 : // See if the frame has already been invalidated.
3031 0 : if (!calledFromLinkStub && it.checkInvalidation())
3032 0 : continue;
3033 :
3034 0 : JSScript* script = it.script();
3035 0 : if (!script->hasIonScript())
3036 0 : continue;
3037 :
3038 0 : if (!invalidateAll && !script->ionScript()->invalidated())
3039 0 : continue;
3040 :
3041 0 : IonScript* ionScript = script->ionScript();
3042 :
3043 : // Purge ICs before we mark this script as invalidated. This will
3044 : // prevent lastJump_ from appearing to be a bogus pointer, just
3045 : // in case anyone tries to read it.
3046 0 : ionScript->purgeICs(script->zone());
3047 0 : ionScript->purgeOptimizedStubs(script->zone());
3048 :
3049 : // Clean up any pointers from elsewhere in the runtime to this IonScript
3050 : // which is about to become disconnected from its JSScript.
3051 0 : ionScript->unlinkFromRuntime(fop);
3052 :
3053 : // This frame needs to be invalidated. We do the following:
3054 : //
3055 : // 1. Increment the reference counter to keep the ionScript alive
3056 : // for the invalidation bailout or for the exception handler.
3057 : // 2. Determine safepoint that corresponds to the current call.
3058 : // 3. From safepoint, get distance to the OSI-patchable offset.
3059 : // 4. From the IonScript, determine the distance between the
3060 : // call-patchable offset and the invalidation epilogue.
3061 : // 5. Patch the OSI point with a call-relative to the
3062 : // invalidation epilogue.
3063 : //
3064 : // The code generator ensures that there's enough space for us
3065 : // to patch in a call-relative operation at each invalidation
3066 : // point.
3067 : //
3068 : // Note: you can't simplify this mechanism to "just patch the
3069 : // instruction immediately after the call" because things may
3070 : // need to move into a well-defined register state (using move
3071 : // instructions after the call) in to capture an appropriate
3072 : // snapshot after the call occurs.
3073 :
3074 0 : ionScript->incrementInvalidationCount();
3075 :
3076 0 : JitCode* ionCode = ionScript->method();
3077 :
3078 0 : JS::Zone* zone = script->zone();
3079 0 : if (zone->needsIncrementalBarrier()) {
3080 : // We're about to remove edges from the JSScript to gcthings
3081 : // embedded in the JitCode. Perform one final trace of the
3082 : // JitCode for the incremental GC, as it must know about
3083 : // those edges.
3084 0 : ionCode->traceChildren(zone->barrierTracer());
3085 : }
3086 0 : ionCode->setInvalidated();
3087 :
3088 : // Don't adjust OSI points in the linkStub (which don't exist), or in a
3089 : // bailout path.
3090 0 : if (calledFromLinkStub || it.isBailoutJS())
3091 0 : continue;
3092 :
3093 : // Write the delta (from the return address offset to the
3094 : // IonScript pointer embedded into the invalidation epilogue)
3095 : // where the safepointed call instruction used to be. We rely on
3096 : // the call sequence causing the safepoint being >= the size of
3097 : // a uint32, which is checked during safepoint index
3098 : // construction.
3099 0 : AutoWritableJitCode awjc(ionCode);
3100 0 : const SafepointIndex* si = ionScript->getSafepointIndex(it.returnAddressToFp());
3101 0 : CodeLocationLabel dataLabelToMunge(it.returnAddressToFp());
3102 0 : ptrdiff_t delta = ionScript->invalidateEpilogueDataOffset() -
3103 0 : (it.returnAddressToFp() - ionCode->raw());
3104 0 : Assembler::PatchWrite_Imm32(dataLabelToMunge, Imm32(delta));
3105 :
3106 0 : CodeLocationLabel osiPatchPoint = SafepointReader::InvalidationPatchPoint(ionScript, si);
3107 0 : CodeLocationLabel invalidateEpilogue(ionCode, CodeOffset(ionScript->invalidateEpilogueOffset()));
3108 :
3109 0 : JitSpew(JitSpew_IonInvalidate, " ! Invalidate ionScript %p (inv count %" PRIuSIZE ") -> patching osipoint %p",
3110 0 : ionScript, ionScript->invalidationCount(), (void*) osiPatchPoint.raw());
3111 0 : Assembler::PatchWrite_NearCall(osiPatchPoint, invalidateEpilogue);
3112 : }
3113 :
3114 0 : JitSpew(JitSpew_IonInvalidate, "END invalidating activation");
3115 0 : }
3116 :
3117 : void
3118 4 : jit::InvalidateAll(FreeOp* fop, Zone* zone)
3119 : {
3120 : // The caller should previously have cancelled off thread compilation.
3121 : #ifdef DEBUG
3122 214 : for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next())
3123 210 : MOZ_ASSERT(!HasOffThreadIonCompile(comp));
3124 : #endif
3125 4 : if (zone->isAtomsZone())
3126 1 : return;
3127 3 : JSContext* cx = TlsContext.get();
3128 3 : for (JitActivationIterator iter(cx, zone->group()->ownerContext()); !iter.done(); ++iter) {
3129 0 : if (iter->compartment()->zone() == zone) {
3130 0 : JitSpew(JitSpew_IonInvalidate, "Invalidating all frames for GC");
3131 0 : InvalidateActivation(fop, iter, true);
3132 : }
3133 : }
3134 : }
3135 :
3136 :
3137 : void
3138 0 : jit::Invalidate(TypeZone& types, FreeOp* fop,
3139 : const RecompileInfoVector& invalid, bool resetUses,
3140 : bool cancelOffThread)
3141 : {
3142 0 : JitSpew(JitSpew_IonInvalidate, "Start invalidation.");
3143 :
3144 : // Add an invalidation reference to all invalidated IonScripts to indicate
3145 : // to the traversal which frames have been invalidated.
3146 0 : size_t numInvalidations = 0;
3147 0 : for (size_t i = 0; i < invalid.length(); i++) {
3148 0 : const CompilerOutput* co = invalid[i].compilerOutput(types);
3149 0 : if (!co)
3150 0 : continue;
3151 0 : MOZ_ASSERT(co->isValid());
3152 :
3153 0 : if (cancelOffThread)
3154 0 : CancelOffThreadIonCompile(co->script());
3155 :
3156 0 : if (!co->ion())
3157 0 : continue;
3158 :
3159 0 : JitSpew(JitSpew_IonInvalidate, " Invalidate %s:%" PRIuSIZE ", IonScript %p",
3160 0 : co->script()->filename(), co->script()->lineno(), co->ion());
3161 :
3162 : // Keep the ion script alive during the invalidation and flag this
3163 : // ionScript as being invalidated. This increment is removed by the
3164 : // loop after the calls to InvalidateActivation.
3165 0 : co->ion()->incrementInvalidationCount();
3166 0 : numInvalidations++;
3167 : }
3168 :
3169 0 : if (!numInvalidations) {
3170 0 : JitSpew(JitSpew_IonInvalidate, " No IonScript invalidation.");
3171 0 : return;
3172 : }
3173 :
3174 : // This method can be called both during GC and during the course of normal
3175 : // script execution. In the former case this class will already be on the
3176 : // stack, and in the latter case the invalidations will all be on the
3177 : // current thread's stack, but the assertion under ActivationIterator can't
3178 : // tell that this is a thread local use of the iterator.
3179 0 : JSRuntime::AutoProhibitActiveContextChange apacc(fop->runtime());
3180 :
3181 0 : JSContext* cx = TlsContext.get();
3182 0 : for (JitActivationIterator iter(cx, types.zone()->group()->ownerContext()); !iter.done(); ++iter)
3183 0 : InvalidateActivation(fop, iter, false);
3184 :
3185 : // Drop the references added above. If a script was never active, its
3186 : // IonScript will be immediately destroyed. Otherwise, it will be held live
3187 : // until its last invalidated frame is destroyed.
3188 0 : for (size_t i = 0; i < invalid.length(); i++) {
3189 0 : CompilerOutput* co = invalid[i].compilerOutput(types);
3190 0 : if (!co)
3191 0 : continue;
3192 0 : MOZ_ASSERT(co->isValid());
3193 :
3194 0 : JSScript* script = co->script();
3195 0 : IonScript* ionScript = co->ion();
3196 0 : if (!ionScript)
3197 0 : continue;
3198 :
3199 0 : script->setIonScript(nullptr, nullptr);
3200 0 : ionScript->decrementInvalidationCount(fop);
3201 0 : co->invalidate();
3202 0 : numInvalidations--;
3203 :
3204 : // Wait for the scripts to get warm again before doing another
3205 : // compile, unless we are recompiling *because* a script got hot
3206 : // (resetUses is false).
3207 0 : if (resetUses)
3208 0 : script->resetWarmUpCounter();
3209 : }
3210 :
3211 : // Make sure we didn't leak references by invalidating the same IonScript
3212 : // multiple times in the above loop.
3213 0 : MOZ_ASSERT(!numInvalidations);
3214 : }
3215 :
3216 : void
3217 0 : jit::Invalidate(JSContext* cx, const RecompileInfoVector& invalid, bool resetUses,
3218 : bool cancelOffThread)
3219 : {
3220 0 : jit::Invalidate(cx->zone()->types, cx->runtime()->defaultFreeOp(), invalid, resetUses,
3221 0 : cancelOffThread);
3222 0 : }
3223 :
3224 : void
3225 0 : jit::IonScript::invalidate(JSContext* cx, bool resetUses, const char* reason)
3226 : {
3227 0 : JitSpew(JitSpew_IonInvalidate, " Invalidate IonScript %p: %s", this, reason);
3228 :
3229 : // RecompileInfoVector has inline space for at least one element.
3230 0 : RecompileInfoVector list;
3231 0 : MOZ_RELEASE_ASSERT(list.reserve(1));
3232 0 : list.infallibleAppend(recompileInfo());
3233 :
3234 0 : Invalidate(cx, list, resetUses, true);
3235 0 : }
3236 :
3237 : void
3238 0 : jit::Invalidate(JSContext* cx, JSScript* script, bool resetUses, bool cancelOffThread)
3239 : {
3240 0 : MOZ_ASSERT(script->hasIonScript());
3241 :
3242 0 : if (cx->runtime()->geckoProfiler().enabled()) {
3243 : // Register invalidation with profiler.
3244 : // Format of event payload string:
3245 : // "<filename>:<lineno>"
3246 :
3247 : // Get the script filename, if any, and its length.
3248 0 : const char* filename = script->filename();
3249 0 : if (filename == nullptr)
3250 0 : filename = "<unknown>";
3251 :
3252 : // Construct the descriptive string.
3253 0 : UniqueChars buf = JS_smprintf("Invalidate %s:%" PRIuSIZE, filename, script->lineno());
3254 :
3255 : // Ignore the event on allocation failure.
3256 0 : if (buf) {
3257 0 : cx->runtime()->geckoProfiler().markEvent(buf.get());
3258 : }
3259 : }
3260 :
3261 : // RecompileInfoVector has inline space for at least one element.
3262 0 : RecompileInfoVector scripts;
3263 0 : MOZ_ASSERT(script->hasIonScript());
3264 0 : MOZ_RELEASE_ASSERT(scripts.reserve(1));
3265 0 : scripts.infallibleAppend(script->ionScript()->recompileInfo());
3266 :
3267 0 : Invalidate(cx, scripts, resetUses, cancelOffThread);
3268 0 : }
3269 :
3270 : static void
3271 5 : FinishInvalidationOf(FreeOp* fop, JSScript* script, IonScript* ionScript)
3272 : {
3273 5 : TypeZone& types = script->zone()->types;
3274 :
3275 : // Note: If the script is about to be swept, the compiler output may have
3276 : // already been destroyed.
3277 5 : if (CompilerOutput* output = ionScript->recompileInfo().compilerOutput(types))
3278 5 : output->invalidate();
3279 :
3280 : // If this script has Ion code on the stack, invalidated() will return
3281 : // true. In this case we have to wait until destroying it.
3282 5 : if (!ionScript->invalidated())
3283 5 : jit::IonScript::Destroy(fop, ionScript);
3284 5 : }
3285 :
3286 : void
3287 20666 : jit::FinishInvalidation(FreeOp* fop, JSScript* script)
3288 : {
3289 : // In all cases, nullptr out script->ion to avoid re-entry.
3290 20666 : if (script->hasIonScript()) {
3291 5 : IonScript* ion = script->ionScript();
3292 5 : script->setIonScript(nullptr, nullptr);
3293 5 : FinishInvalidationOf(fop, script, ion);
3294 : }
3295 20666 : }
3296 :
3297 : void
3298 4 : jit::ForbidCompilation(JSContext* cx, JSScript* script)
3299 : {
3300 4 : JitSpew(JitSpew_IonAbort, "Disabling Ion compilation of script %s:%" PRIuSIZE,
3301 4 : script->filename(), script->lineno());
3302 :
3303 4 : CancelOffThreadIonCompile(script);
3304 :
3305 4 : if (script->hasIonScript())
3306 0 : Invalidate(cx, script, false);
3307 :
3308 4 : script->setIonScript(cx->runtime(), ION_DISABLED_SCRIPT);
3309 4 : }
3310 :
3311 : AutoFlushICache*
3312 0 : JSContext::autoFlushICache() const
3313 : {
3314 0 : return autoFlushICache_;
3315 : }
3316 :
3317 : void
3318 0 : JSContext::setAutoFlushICache(AutoFlushICache* afc)
3319 : {
3320 0 : autoFlushICache_ = afc;
3321 0 : }
3322 :
3323 : // Set the range for the merging of flushes. The flushing is deferred until the end of
3324 : // the AutoFlushICache context. Subsequent flushing within this range will is also
3325 : // deferred. This is only expected to be defined once for each AutoFlushICache
3326 : // context. It assumes the range will be flushed is required to be within an
3327 : // AutoFlushICache context.
3328 : void
3329 0 : AutoFlushICache::setRange(uintptr_t start, size_t len)
3330 : {
3331 : #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
3332 : AutoFlushICache* afc = TlsContext.get()->autoFlushICache();
3333 : MOZ_ASSERT(afc);
3334 : MOZ_ASSERT(!afc->start_);
3335 : JitSpewCont(JitSpew_CacheFlush, "(%" PRIxPTR " %" PRIxSIZE "):", start, len);
3336 :
3337 : uintptr_t stop = start + len;
3338 : afc->start_ = start;
3339 : afc->stop_ = stop;
3340 : #endif
3341 0 : }
3342 :
3343 : // Flush the instruction cache.
3344 : //
3345 : // If called within a dynamic AutoFlushICache context and if the range is already pending
3346 : // flushing for this AutoFlushICache context then the request is ignored with the
3347 : // understanding that it will be flushed on exit from the AutoFlushICache context.
3348 : // Otherwise the range is flushed immediately.
3349 : //
3350 : // Updates outside the current code object are typically the exception so they are flushed
3351 : // immediately rather than attempting to merge them.
3352 : //
3353 : // For efficiency it is expected that all large ranges will be flushed within an
3354 : // AutoFlushICache, so check. If this assertion is hit then it does not necessarily
3355 : // indicate a program fault but it might indicate a lost opportunity to merge cache
3356 : // flushing. It can be corrected by wrapping the call in an AutoFlushICache to context.
3357 : //
3358 : // Note this can be called without TLS JSContext defined so this case needs
3359 : // to be guarded against. E.g. when patching instructions from the exception
3360 : // handler on MacOS running the ARM simulator.
3361 : void
3362 0 : AutoFlushICache::flush(uintptr_t start, size_t len)
3363 : {
3364 : #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
3365 : JSContext* cx = TlsContext.get();
3366 : AutoFlushICache* afc = cx ? cx->autoFlushICache() : nullptr;
3367 : if (!afc) {
3368 : JitSpewCont(JitSpew_CacheFlush, "#");
3369 : ExecutableAllocator::cacheFlush((void*)start, len);
3370 : MOZ_ASSERT(len <= 32);
3371 : return;
3372 : }
3373 :
3374 : uintptr_t stop = start + len;
3375 : if (start >= afc->start_ && stop <= afc->stop_) {
3376 : // Update is within the pending flush range, so defer to the end of the context.
3377 : JitSpewCont(JitSpew_CacheFlush, afc->inhibit_ ? "-" : "=");
3378 : return;
3379 : }
3380 :
3381 : JitSpewCont(JitSpew_CacheFlush, afc->inhibit_ ? "x" : "*");
3382 : ExecutableAllocator::cacheFlush((void*)start, len);
3383 : #endif
3384 0 : }
3385 :
3386 : // Flag the current dynamic AutoFlushICache as inhibiting flushing. Useful in error paths
3387 : // where the changes are being abandoned.
3388 : void
3389 0 : AutoFlushICache::setInhibit()
3390 : {
3391 : #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
3392 : AutoFlushICache* afc = TlsContext.get()->autoFlushICache();
3393 : MOZ_ASSERT(afc);
3394 : MOZ_ASSERT(afc->start_);
3395 : JitSpewCont(JitSpew_CacheFlush, "I");
3396 : afc->inhibit_ = true;
3397 : #endif
3398 0 : }
3399 :
3400 : // The common use case is merging cache flushes when preparing a code object. In this
3401 : // case the entire range of the code object is being flushed and as the code is patched
3402 : // smaller redundant flushes could occur. The design allows an AutoFlushICache dynamic
3403 : // thread local context to be declared in which the range of the code object can be set
3404 : // which defers flushing until the end of this dynamic context. The redundant flushing
3405 : // within this code range is also deferred avoiding redundant flushing. Flushing outside
3406 : // this code range is not affected and proceeds immediately.
3407 : //
3408 : // In some cases flushing is not necessary, such as when compiling an wasm module which
3409 : // is flushed again when dynamically linked, and also in error paths that abandon the
3410 : // code. Flushing within the set code range can be inhibited within the AutoFlushICache
3411 : // dynamic context by setting an inhibit flag.
3412 : //
3413 : // The JS compiler can be re-entered while within an AutoFlushICache dynamic context and
3414 : // it is assumed that code being assembled or patched is not executed before the exit of
3415 : // the respective AutoFlushICache dynamic context.
3416 : //
3417 3411 : AutoFlushICache::AutoFlushICache(const char* nonce, bool inhibit)
3418 : #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
3419 : : start_(0),
3420 : stop_(0),
3421 : name_(nonce),
3422 : inhibit_(inhibit)
3423 : #endif
3424 : {
3425 : #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
3426 : JSContext* cx = TlsContext.get();
3427 : AutoFlushICache* afc = cx->autoFlushICache();
3428 : if (afc)
3429 : JitSpew(JitSpew_CacheFlush, "<%s,%s%s ", nonce, afc->name_, inhibit ? " I" : "");
3430 : else
3431 : JitSpewCont(JitSpew_CacheFlush, "<%s%s ", nonce, inhibit ? " I" : "");
3432 :
3433 : prev_ = afc;
3434 : cx->setAutoFlushICache(this);
3435 : #endif
3436 3411 : }
3437 :
3438 3411 : AutoFlushICache::~AutoFlushICache()
3439 : {
3440 : #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
3441 : JSContext* cx = TlsContext.get();
3442 : MOZ_ASSERT(cx->autoFlushICache() == this);
3443 :
3444 : if (!inhibit_ && start_)
3445 : ExecutableAllocator::cacheFlush((void*)start_, size_t(stop_ - start_));
3446 :
3447 : JitSpewCont(JitSpew_CacheFlush, "%s%s>", name_, start_ ? "" : " U");
3448 : JitSpewFin(JitSpew_CacheFlush);
3449 : cx->setAutoFlushICache(prev_);
3450 : #endif
3451 3411 : }
3452 :
3453 : size_t
3454 0 : jit::SizeOfIonData(JSScript* script, mozilla::MallocSizeOf mallocSizeOf)
3455 : {
3456 0 : size_t result = 0;
3457 :
3458 0 : if (script->hasIonScript())
3459 0 : result += script->ionScript()->sizeOfIncludingThis(mallocSizeOf);
3460 :
3461 0 : return result;
3462 : }
3463 :
3464 : void
3465 0 : jit::DestroyJitScripts(FreeOp* fop, JSScript* script)
3466 : {
3467 0 : if (script->hasIonScript())
3468 0 : jit::IonScript::Destroy(fop, script->ionScript());
3469 :
3470 0 : if (script->hasBaselineScript())
3471 0 : jit::BaselineScript::Destroy(fop, script->baselineScript());
3472 0 : }
3473 :
3474 : void
3475 299 : jit::TraceJitScripts(JSTracer* trc, JSScript* script)
3476 : {
3477 299 : if (script->hasIonScript())
3478 0 : jit::IonScript::Trace(trc, script->ionScript());
3479 :
3480 299 : if (script->hasBaselineScript())
3481 1 : jit::BaselineScript::Trace(trc, script->baselineScript());
3482 299 : }
3483 :
3484 : bool
3485 4 : jit::JitSupportsFloatingPoint()
3486 : {
3487 4 : return js::jit::MacroAssembler::SupportsFloatingPoint();
3488 : }
3489 :
3490 : bool
3491 4 : jit::JitSupportsUnalignedAccesses()
3492 : {
3493 4 : return js::jit::MacroAssembler::SupportsUnalignedAccesses();
3494 : }
3495 :
3496 : bool
3497 24627 : jit::JitSupportsSimd()
3498 : {
3499 24627 : return js::jit::MacroAssembler::SupportsSimd();
3500 : }
3501 :
3502 : bool
3503 0 : jit::JitSupportsAtomics()
3504 : {
3505 : #if defined(JS_CODEGEN_ARM)
3506 : // Bug 1146902, bug 1077318: Enable Ion inlining of Atomics
3507 : // operations on ARM only when the CPU has byte, halfword, and
3508 : // doubleword load-exclusive and store-exclusive instructions,
3509 : // until we can add support for systems that don't have those.
3510 : return js::jit::HasLDSTREXBHD();
3511 : #else
3512 0 : return true;
3513 : #endif
3514 : }
3515 :
3516 : // If you change these, please also change the comment in TempAllocator.
3517 : /* static */ const size_t TempAllocator::BallastSize = 16 * 1024;
3518 : /* static */ const size_t TempAllocator::PreferredLifoChunkSize = 32 * 1024;
|