Line data Source code
1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sts=4 et sw=4 tw=99:
3 : * This Source Code Form is subject to the terms of the Mozilla Public
4 : * License, v. 2.0. If a copy of the MPL was not distributed with this
5 : * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 :
7 : #include "jit/BaselineJIT.h"
8 :
9 : #include "mozilla/BinarySearch.h"
10 : #include "mozilla/DebugOnly.h"
11 : #include "mozilla/MemoryReporting.h"
12 :
13 : #include "jit/BaselineCompiler.h"
14 : #include "jit/BaselineIC.h"
15 : #include "jit/CompileInfo.h"
16 : #include "jit/IonControlFlow.h"
17 : #include "jit/JitCommon.h"
18 : #include "jit/JitSpewer.h"
19 : #include "vm/Debugger.h"
20 : #include "vm/Interpreter.h"
21 : #include "vm/TraceLogging.h"
22 : #include "wasm/WasmInstance.h"
23 :
24 : #include "jsobjinlines.h"
25 : #include "jsopcodeinlines.h"
26 : #include "jsscriptinlines.h"
27 :
28 : #include "jit/JitFrames-inl.h"
29 : #include "jit/MacroAssembler-inl.h"
30 : #include "vm/Stack-inl.h"
31 :
32 : using mozilla::BinarySearchIf;
33 : using mozilla::DebugOnly;
34 :
35 : using namespace js;
36 : using namespace js::jit;
37 :
38 : /* static */ PCMappingSlotInfo::SlotLocation
39 51873 : PCMappingSlotInfo::ToSlotLocation(const StackValue* stackVal)
40 : {
41 51873 : if (stackVal->kind() == StackValue::Register) {
42 30878 : if (stackVal->reg() == R0)
43 26617 : return SlotInR0;
44 4261 : MOZ_ASSERT(stackVal->reg() == R1);
45 4261 : return SlotInR1;
46 : }
47 20995 : MOZ_ASSERT(stackVal->kind() != StackValue::Stack);
48 20995 : return SlotIgnore;
49 : }
50 :
51 : void
52 555 : ICStubSpace::freeAllAfterMinorGC(Zone* zone)
53 : {
54 555 : if (zone->isAtomsZone())
55 1 : MOZ_ASSERT(allocator_.isEmpty());
56 : else
57 554 : zone->runtimeFromActiveCooperatingThread()->gc.freeAllLifoBlocksAfterMinorGC(&allocator_);
58 555 : }
59 :
60 627 : BaselineScript::BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset,
61 : uint32_t profilerEnterToggleOffset,
62 : uint32_t profilerExitToggleOffset,
63 627 : uint32_t postDebugPrologueOffset)
64 : : method_(nullptr),
65 : templateEnv_(nullptr),
66 : fallbackStubSpace_(),
67 : dependentWasmImports_(nullptr),
68 : prologueOffset_(prologueOffset),
69 : epilogueOffset_(epilogueOffset),
70 : profilerEnterToggleOffset_(profilerEnterToggleOffset),
71 : profilerExitToggleOffset_(profilerExitToggleOffset),
72 : #ifdef JS_TRACE_LOGGING
73 : # ifdef DEBUG
74 : traceLoggerScriptsEnabled_(false),
75 : traceLoggerEngineEnabled_(false),
76 : # endif
77 : traceLoggerScriptEvent_(),
78 : #endif
79 : postDebugPrologueOffset_(postDebugPrologueOffset),
80 : flags_(0),
81 : inlinedBytecodeLength_(0),
82 : maxInliningDepth_(UINT8_MAX),
83 : pendingBuilder_(nullptr),
84 627 : controlFlowGraph_(nullptr)
85 627 : { }
86 :
87 : static const unsigned BASELINE_MAX_ARGS_LENGTH = 20000;
88 :
89 : static bool
90 2293 : CheckFrame(InterpreterFrame* fp)
91 : {
92 2293 : if (fp->isDebuggerEvalFrame()) {
93 : // Debugger eval-in-frame. These are likely short-running scripts so
94 : // don't bother compiling them for now.
95 0 : JitSpew(JitSpew_BaselineAbort, "debugger frame");
96 0 : return false;
97 : }
98 :
99 2293 : if (fp->isFunctionFrame() && fp->numActualArgs() > BASELINE_MAX_ARGS_LENGTH) {
100 : // Fall back to the interpreter to avoid running out of stack space.
101 0 : JitSpew(JitSpew_BaselineAbort, "Too many arguments (%u)", fp->numActualArgs());
102 0 : return false;
103 : }
104 :
105 2293 : return true;
106 : }
107 :
108 : static JitExecStatus
109 7638 : EnterBaseline(JSContext* cx, EnterJitData& data)
110 : {
111 7638 : if (data.osrFrame) {
112 : // Check for potential stack overflow before OSR-ing.
113 : uint8_t spDummy;
114 141 : uint32_t extra = BaselineFrame::Size() + (data.osrNumStackValues * sizeof(Value));
115 141 : uint8_t* checkSp = (&spDummy) - extra;
116 141 : if (!CheckRecursionLimitWithStackPointer(cx, checkSp))
117 0 : return JitExec_Aborted;
118 : } else {
119 7497 : if (!CheckRecursionLimit(cx))
120 0 : return JitExec_Aborted;
121 : }
122 :
123 : #ifdef DEBUG
124 : // Assert we don't GC before entering JIT code. A GC could discard JIT code
125 : // or move the function stored in the CalleeToken (it won't be traced at
126 : // this point). We use Maybe<> here so we can call reset() to call the
127 : // AutoAssertNoGC destructor before we enter JIT code.
128 15276 : mozilla::Maybe<JS::AutoAssertNoGC> nogc;
129 7638 : nogc.emplace(cx);
130 : #endif
131 :
132 7638 : MOZ_ASSERT(jit::IsBaselineEnabled(cx));
133 7638 : MOZ_ASSERT_IF(data.osrFrame, CheckFrame(data.osrFrame));
134 :
135 7638 : EnterJitCode enter = cx->runtime()->jitRuntime()->enterBaseline();
136 :
137 : bool constructingLegacyGen =
138 7638 : data.constructing && CalleeTokenToFunction(data.calleeToken)->isLegacyGenerator();
139 :
140 : // Caller must construct |this| before invoking the Ion function. Legacy
141 : // generators can be called with 'new' but when we resume them, the
142 : // this-slot and arguments are |undefined| (they are stored in the
143 : // CallObject).
144 7638 : MOZ_ASSERT_IF(data.constructing && !constructingLegacyGen,
145 : data.maxArgv[0].isObject() || data.maxArgv[0].isMagic(JS_UNINITIALIZED_LEXICAL));
146 :
147 7638 : data.result.setInt32(data.numActualArgs);
148 : {
149 15276 : AssertCompartmentUnchanged pcc(cx);
150 15276 : ActivationEntryMonitor entryMonitor(cx, data.calleeToken);
151 15276 : JitActivation activation(cx);
152 :
153 7638 : if (data.osrFrame)
154 141 : data.osrFrame->setRunningInJit();
155 :
156 : #ifdef DEBUG
157 7638 : nogc.reset();
158 : #endif
159 : // Single transition point from Interpreter to Baseline.
160 7638 : CALL_GENERATED_CODE(enter, data.jitcode, data.maxArgc, data.maxArgv, data.osrFrame,
161 : data.calleeToken, data.envChain.get(), data.osrNumStackValues,
162 7638 : data.result.address());
163 :
164 7638 : if (data.osrFrame)
165 141 : data.osrFrame->clearRunningInJit();
166 : }
167 :
168 7638 : MOZ_ASSERT(!cx->hasIonReturnOverride());
169 :
170 : // Jit callers wrap primitive constructor return, except for derived
171 : // class constructors, which are forced to do it themselves.
172 22836 : if (!data.result.isMagic() &&
173 7640 : data.constructing &&
174 7796 : data.result.isPrimitive() &&
175 78 : !constructingLegacyGen)
176 : {
177 78 : MOZ_ASSERT(data.maxArgv[0].isObject());
178 78 : data.result = data.maxArgv[0];
179 : }
180 :
181 : // Release temporary buffer used for OSR into Ion.
182 7638 : cx->freeOsrTempData();
183 :
184 7638 : MOZ_ASSERT_IF(data.result.isMagic(), data.result.isMagic(JS_ION_ERROR));
185 7638 : return data.result.isMagic() ? JitExec_Error : JitExec_Ok;
186 : }
187 :
188 : JitExecStatus
189 7497 : jit::EnterBaselineMethod(JSContext* cx, RunState& state)
190 : {
191 7497 : BaselineScript* baseline = state.script()->baselineScript();
192 :
193 14994 : EnterJitData data(cx);
194 7497 : data.jitcode = baseline->method()->raw();
195 :
196 14994 : Rooted<GCVector<Value>> vals(cx, GCVector<Value>(cx));
197 7497 : if (!SetEnterJitData(cx, data, state, &vals))
198 0 : return JitExec_Error;
199 :
200 7497 : JitExecStatus status = EnterBaseline(cx, data);
201 7497 : if (status != JitExec_Ok)
202 78 : return status;
203 :
204 7419 : state.setReturnValue(data.result);
205 7419 : return JitExec_Ok;
206 : }
207 :
208 : JitExecStatus
209 141 : jit::EnterBaselineAtBranch(JSContext* cx, InterpreterFrame* fp, jsbytecode* pc)
210 : {
211 141 : MOZ_ASSERT(JSOp(*pc) == JSOP_LOOPENTRY);
212 :
213 141 : BaselineScript* baseline = fp->script()->baselineScript();
214 :
215 282 : EnterJitData data(cx);
216 141 : data.jitcode = baseline->nativeCodeForPC(fp->script(), pc);
217 :
218 : // Skip debug breakpoint/trap handler, the interpreter already handled it
219 : // for the current op.
220 141 : if (fp->isDebuggee()) {
221 0 : MOZ_RELEASE_ASSERT(baseline->hasDebugInstrumentation());
222 0 : data.jitcode += MacroAssembler::ToggledCallSize(data.jitcode);
223 : }
224 :
225 141 : data.osrFrame = fp;
226 141 : data.osrNumStackValues = fp->script()->nfixed() + cx->interpreterRegs().stackDepth();
227 :
228 282 : AutoValueVector vals(cx);
229 282 : RootedValue thisv(cx);
230 :
231 141 : if (fp->isFunctionFrame()) {
232 136 : data.constructing = fp->isConstructing();
233 136 : data.numActualArgs = fp->numActualArgs();
234 136 : data.maxArgc = Max(fp->numActualArgs(), fp->numFormalArgs()) + 1; // +1 = include |this|
235 136 : data.maxArgv = fp->argv() - 1; // -1 = include |this|
236 136 : data.envChain = nullptr;
237 136 : data.calleeToken = CalleeToToken(&fp->callee(), data.constructing);
238 : } else {
239 5 : thisv.setUndefined();
240 5 : data.constructing = false;
241 5 : data.numActualArgs = 0;
242 5 : data.maxArgc = 1;
243 5 : data.maxArgv = thisv.address();
244 5 : data.envChain = fp->environmentChain();
245 :
246 5 : data.calleeToken = CalleeToToken(fp->script());
247 :
248 5 : if (fp->isEvalFrame()) {
249 0 : if (!vals.reserve(2))
250 0 : return JitExec_Aborted;
251 :
252 0 : vals.infallibleAppend(thisv);
253 :
254 0 : if (fp->script()->isDirectEvalInFunction())
255 0 : vals.infallibleAppend(fp->newTarget());
256 : else
257 0 : vals.infallibleAppend(NullValue());
258 :
259 0 : data.maxArgc = 2;
260 0 : data.maxArgv = vals.begin();
261 : }
262 : }
263 :
264 141 : TraceLoggerThread* logger = TraceLoggerForCurrentThread(cx);
265 141 : TraceLogStopEvent(logger, TraceLogger_Interpreter);
266 141 : TraceLogStartEvent(logger, TraceLogger_Baseline);
267 :
268 141 : JitExecStatus status = EnterBaseline(cx, data);
269 141 : if (status != JitExec_Ok)
270 0 : return status;
271 :
272 141 : fp->setReturnValue(data.result);
273 141 : return JitExec_Ok;
274 : }
275 :
276 : MethodStatus
277 628 : jit::BaselineCompile(JSContext* cx, JSScript* script, bool forceDebugInstrumentation)
278 : {
279 628 : MOZ_ASSERT(!script->hasBaselineScript());
280 628 : MOZ_ASSERT(script->canBaselineCompile());
281 628 : MOZ_ASSERT(IsBaselineEnabled(cx));
282 :
283 628 : script->ensureNonLazyCanonicalFunction();
284 :
285 1256 : LifoAlloc alloc(TempAllocator::PreferredLifoChunkSize);
286 628 : TempAllocator* temp = alloc.new_<TempAllocator>(&alloc);
287 628 : if (!temp) {
288 0 : ReportOutOfMemory(cx);
289 0 : return Method_Error;
290 : }
291 :
292 1256 : JitContext jctx(cx, temp);
293 :
294 1256 : BaselineCompiler compiler(cx, *temp, script);
295 628 : if (!compiler.init()) {
296 0 : ReportOutOfMemory(cx);
297 0 : return Method_Error;
298 : }
299 :
300 628 : if (forceDebugInstrumentation)
301 0 : compiler.setCompileDebugInstrumentation();
302 :
303 628 : MethodStatus status = compiler.compile();
304 :
305 628 : MOZ_ASSERT_IF(status == Method_Compiled, script->hasBaselineScript());
306 628 : MOZ_ASSERT_IF(status != Method_Compiled, !script->hasBaselineScript());
307 :
308 628 : if (status == Method_CantCompile)
309 1 : script->setBaselineScript(cx->runtime(), BASELINE_DISABLED_SCRIPT);
310 :
311 628 : return status;
312 : }
313 :
314 : static MethodStatus
315 21750 : CanEnterBaselineJIT(JSContext* cx, HandleScript script, InterpreterFrame* osrFrame)
316 : {
317 21750 : MOZ_ASSERT(jit::IsBaselineEnabled(cx));
318 :
319 : // Skip if the script has been disabled.
320 21750 : if (!script->canBaselineCompile())
321 46 : return Method_Skipped;
322 :
323 21704 : if (script->length() > BaselineScript::MAX_JSSCRIPT_LENGTH)
324 0 : return Method_CantCompile;
325 :
326 21704 : if (script->nslots() > BaselineScript::MAX_JSSCRIPT_SLOTS)
327 0 : return Method_CantCompile;
328 :
329 21704 : if (script->hasBaselineScript())
330 7011 : return Method_Compiled;
331 :
332 : // Check this before calling ensureJitCompartmentExists, so we're less
333 : // likely to report OOM in JSRuntime::createJitRuntime.
334 14693 : if (!CanLikelyAllocateMoreExecutableMemory())
335 0 : return Method_Skipped;
336 :
337 14693 : if (!cx->compartment()->ensureJitCompartmentExists(cx))
338 0 : return Method_Error;
339 :
340 : // Check script warm-up counter.
341 14693 : if (script->incWarmUpCounter() <= JitOptions.baselineWarmUpThreshold)
342 14065 : return Method_Skipped;
343 :
344 : // Frames can be marked as debuggee frames independently of its underlying
345 : // script being a debuggee script, e.g., when performing
346 : // Debugger.Frame.prototype.eval.
347 628 : return BaselineCompile(cx, script, osrFrame && osrFrame->isDebuggee());
348 : }
349 :
350 : MethodStatus
351 2152 : jit::CanEnterBaselineAtBranch(JSContext* cx, InterpreterFrame* fp, bool newType)
352 : {
353 2152 : if (!CheckFrame(fp))
354 0 : return Method_CantCompile;
355 :
356 : // This check is needed in the following corner case. Consider a function h,
357 : //
358 : // function h(x) {
359 : // h(false);
360 : // if (!x)
361 : // return;
362 : // for (var i = 0; i < N; i++)
363 : // /* do stuff */
364 : // }
365 : //
366 : // Suppose h is not yet compiled in baseline and is executing in the
367 : // interpreter. Let this interpreter frame be f_older. The debugger marks
368 : // f_older as isDebuggee. At the point of the recursive call h(false), h is
369 : // compiled in baseline without debug instrumentation, pushing a baseline
370 : // frame f_newer. The debugger never flags f_newer as isDebuggee, and never
371 : // recompiles h. When the recursive call returns and execution proceeds to
372 : // the loop, the interpreter attempts to OSR into baseline. Since h is
373 : // already compiled in baseline, execution jumps directly into baseline
374 : // code. This is incorrect as h's baseline script does not have debug
375 : // instrumentation.
376 2152 : if (fp->isDebuggee() && !Debugger::ensureExecutionObservabilityOfOsrFrame(cx, fp))
377 0 : return Method_Error;
378 :
379 4304 : RootedScript script(cx, fp->script());
380 2152 : return CanEnterBaselineJIT(cx, script, fp);
381 : }
382 :
383 : MethodStatus
384 19598 : jit::CanEnterBaselineMethod(JSContext* cx, RunState& state)
385 : {
386 19598 : if (state.isInvoke()) {
387 19100 : InvokeState& invoke = *state.asInvoke();
388 :
389 19100 : if (invoke.args().length() > BASELINE_MAX_ARGS_LENGTH) {
390 0 : JitSpew(JitSpew_BaselineAbort, "Too many arguments (%u)", invoke.args().length());
391 0 : return Method_CantCompile;
392 : }
393 :
394 19100 : if (!state.maybeCreateThisForConstructor(cx)) {
395 0 : if (cx->isThrowingOutOfMemory()) {
396 0 : cx->recoverFromOutOfMemory();
397 0 : return Method_Skipped;
398 : }
399 0 : return Method_Error;
400 : }
401 : } else {
402 498 : if (state.asExecute()->isDebuggerEval()) {
403 0 : JitSpew(JitSpew_BaselineAbort, "debugger frame");
404 0 : return Method_CantCompile;
405 : }
406 : }
407 :
408 39196 : RootedScript script(cx, state.script());
409 19598 : return CanEnterBaselineJIT(cx, script, /* osrFrame = */ nullptr);
410 : };
411 :
412 : BaselineScript*
413 627 : BaselineScript::New(JSScript* jsscript,
414 : uint32_t prologueOffset, uint32_t epilogueOffset,
415 : uint32_t profilerEnterToggleOffset,
416 : uint32_t profilerExitToggleOffset,
417 : uint32_t postDebugPrologueOffset,
418 : size_t icEntries,
419 : size_t pcMappingIndexEntries, size_t pcMappingSize,
420 : size_t bytecodeTypeMapEntries,
421 : size_t yieldEntries,
422 : size_t traceLoggerToggleOffsetEntries)
423 : {
424 : static const unsigned DataAlignment = sizeof(uintptr_t);
425 :
426 627 : size_t icEntriesSize = icEntries * sizeof(BaselineICEntry);
427 627 : size_t pcMappingIndexEntriesSize = pcMappingIndexEntries * sizeof(PCMappingIndexEntry);
428 627 : size_t bytecodeTypeMapSize = bytecodeTypeMapEntries * sizeof(uint32_t);
429 627 : size_t yieldEntriesSize = yieldEntries * sizeof(uintptr_t);
430 627 : size_t tlEntriesSize = traceLoggerToggleOffsetEntries * sizeof(uint32_t);
431 :
432 627 : size_t paddedICEntriesSize = AlignBytes(icEntriesSize, DataAlignment);
433 627 : size_t paddedPCMappingIndexEntriesSize = AlignBytes(pcMappingIndexEntriesSize, DataAlignment);
434 627 : size_t paddedPCMappingSize = AlignBytes(pcMappingSize, DataAlignment);
435 627 : size_t paddedBytecodeTypesMapSize = AlignBytes(bytecodeTypeMapSize, DataAlignment);
436 627 : size_t paddedYieldEntriesSize = AlignBytes(yieldEntriesSize, DataAlignment);
437 627 : size_t paddedTLEntriesSize = AlignBytes(tlEntriesSize, DataAlignment);
438 :
439 627 : size_t allocBytes = paddedICEntriesSize +
440 627 : paddedPCMappingIndexEntriesSize +
441 627 : paddedPCMappingSize +
442 627 : paddedBytecodeTypesMapSize +
443 : paddedYieldEntriesSize +
444 627 : paddedTLEntriesSize;
445 :
446 627 : BaselineScript* script = jsscript->zone()->pod_malloc_with_extra<BaselineScript, uint8_t>(allocBytes);
447 627 : if (!script)
448 0 : return nullptr;
449 : new (script) BaselineScript(prologueOffset, epilogueOffset,
450 : profilerEnterToggleOffset, profilerExitToggleOffset,
451 627 : postDebugPrologueOffset);
452 :
453 627 : size_t offsetCursor = sizeof(BaselineScript);
454 627 : MOZ_ASSERT(offsetCursor == AlignBytes(sizeof(BaselineScript), DataAlignment));
455 :
456 627 : script->icEntriesOffset_ = offsetCursor;
457 627 : script->icEntries_ = icEntries;
458 627 : offsetCursor += paddedICEntriesSize;
459 :
460 627 : script->pcMappingIndexOffset_ = offsetCursor;
461 627 : script->pcMappingIndexEntries_ = pcMappingIndexEntries;
462 627 : offsetCursor += paddedPCMappingIndexEntriesSize;
463 :
464 627 : script->pcMappingOffset_ = offsetCursor;
465 627 : script->pcMappingSize_ = pcMappingSize;
466 627 : offsetCursor += paddedPCMappingSize;
467 :
468 627 : script->bytecodeTypeMapOffset_ = bytecodeTypeMapEntries ? offsetCursor : 0;
469 627 : offsetCursor += paddedBytecodeTypesMapSize;
470 :
471 627 : script->yieldEntriesOffset_ = yieldEntries ? offsetCursor : 0;
472 627 : offsetCursor += paddedYieldEntriesSize;
473 :
474 627 : script->traceLoggerToggleOffsetsOffset_ = tlEntriesSize ? offsetCursor : 0;
475 627 : script->numTraceLoggerToggleOffsets_ = traceLoggerToggleOffsetEntries;
476 627 : offsetCursor += paddedTLEntriesSize;
477 :
478 627 : MOZ_ASSERT(offsetCursor == sizeof(BaselineScript) + allocBytes);
479 627 : return script;
480 : }
481 :
482 : void
483 1 : BaselineScript::trace(JSTracer* trc)
484 : {
485 1 : TraceEdge(trc, &method_, "baseline-method");
486 1 : TraceNullableEdge(trc, &templateEnv_, "baseline-template-environment");
487 :
488 : // Mark all IC stub codes hanging off the IC stub entries.
489 18 : for (size_t i = 0; i < numICEntries(); i++) {
490 17 : BaselineICEntry& ent = icEntry(i);
491 17 : ent.trace(trc);
492 : }
493 1 : }
494 :
495 : /* static */
496 : void
497 546 : BaselineScript::writeBarrierPre(Zone* zone, BaselineScript* script)
498 : {
499 546 : if (zone->needsIncrementalBarrier())
500 0 : script->trace(zone->barrierTracer());
501 546 : }
502 :
503 : void
504 1 : BaselineScript::Trace(JSTracer* trc, BaselineScript* script)
505 : {
506 1 : script->trace(trc);
507 1 : }
508 :
509 : void
510 546 : BaselineScript::Destroy(FreeOp* fop, BaselineScript* script)
511 : {
512 :
513 546 : MOZ_ASSERT(!script->hasPendingIonBuilder());
514 :
515 546 : script->unlinkDependentWasmImports(fop);
516 :
517 : /*
518 : * When the script contains pointers to nursery things, the store buffer can
519 : * contain entries that point into the fallback stub space. Since we can
520 : * destroy scripts outside the context of a GC, this situation could result
521 : * in us trying to mark invalid store buffer entries.
522 : *
523 : * Defer freeing any allocated blocks until after the next minor GC.
524 : */
525 546 : script->fallbackStubSpace_.freeAllAfterMinorGC(script->method()->zone());
526 :
527 546 : fop->delete_(script);
528 546 : }
529 :
530 : void
531 0 : JS::DeletePolicy<js::jit::BaselineScript>::operator()(const js::jit::BaselineScript* script)
532 : {
533 0 : BaselineScript::Destroy(rt_->defaultFreeOp(), const_cast<BaselineScript*>(script));
534 0 : }
535 :
536 : void
537 562 : BaselineScript::clearDependentWasmImports()
538 : {
539 : // Remove any links from wasm::Instances that contain optimized import calls into
540 : // this BaselineScript.
541 562 : if (dependentWasmImports_) {
542 0 : for (DependentWasmImport& dep : *dependentWasmImports_)
543 0 : dep.instance->deoptimizeImportExit(dep.importIndex);
544 0 : dependentWasmImports_->clear();
545 : }
546 562 : }
547 :
548 : void
549 546 : BaselineScript::unlinkDependentWasmImports(FreeOp* fop)
550 : {
551 : // Remove any links from wasm::Instances that contain optimized FFI calls into
552 : // this BaselineScript.
553 546 : clearDependentWasmImports();
554 546 : if (dependentWasmImports_) {
555 0 : fop->delete_(dependentWasmImports_);
556 0 : dependentWasmImports_ = nullptr;
557 : }
558 546 : }
559 :
560 : bool
561 0 : BaselineScript::addDependentWasmImport(JSContext* cx, wasm::Instance& instance, uint32_t idx)
562 : {
563 0 : if (!dependentWasmImports_) {
564 0 : dependentWasmImports_ = cx->new_<Vector<DependentWasmImport>>(cx);
565 0 : if (!dependentWasmImports_)
566 0 : return false;
567 : }
568 0 : return dependentWasmImports_->emplaceBack(instance, idx);
569 : }
570 :
571 : void
572 0 : BaselineScript::removeDependentWasmImport(wasm::Instance& instance, uint32_t idx)
573 : {
574 0 : if (!dependentWasmImports_)
575 0 : return;
576 :
577 0 : for (DependentWasmImport& dep : *dependentWasmImports_) {
578 0 : if (dep.instance == &instance && dep.importIndex == idx) {
579 0 : dependentWasmImports_->erase(&dep);
580 0 : break;
581 : }
582 : }
583 : }
584 :
585 : BaselineICEntry&
586 1038900 : BaselineScript::icEntry(size_t index)
587 : {
588 1038900 : MOZ_ASSERT(index < numICEntries());
589 1038900 : return icEntryList()[index];
590 : }
591 :
592 : PCMappingIndexEntry&
593 7874 : BaselineScript::pcMappingIndexEntry(size_t index)
594 : {
595 7874 : MOZ_ASSERT(index < numPCMappingIndexEntries());
596 7874 : return pcMappingIndexEntryList()[index];
597 : }
598 :
599 : CompactBufferReader
600 2135 : BaselineScript::pcMappingReader(size_t indexEntry)
601 : {
602 2135 : PCMappingIndexEntry& entry = pcMappingIndexEntry(indexEntry);
603 :
604 2135 : uint8_t* dataStart = pcMappingData() + entry.bufferOffset;
605 2135 : uint8_t* dataEnd = (indexEntry == numPCMappingIndexEntries() - 1)
606 2262 : ? pcMappingData() + pcMappingSize_
607 2262 : : pcMappingData() + pcMappingIndexEntry(indexEntry + 1).bufferOffset;
608 :
609 2135 : return CompactBufferReader(dataStart, dataEnd);
610 : }
611 :
612 : struct ICEntries
613 : {
614 : BaselineScript* const baseline_;
615 :
616 125895 : explicit ICEntries(BaselineScript* baseline) : baseline_(baseline) {}
617 :
618 609371 : BaselineICEntry& operator[](size_t index) const {
619 609371 : return baseline_->icEntry(index);
620 : }
621 : };
622 :
623 : BaselineICEntry&
624 7623 : BaselineScript::icEntryFromReturnOffset(CodeOffset returnOffset)
625 : {
626 : size_t loc;
627 : #ifdef DEBUG
628 : bool found =
629 : #endif
630 15246 : BinarySearchIf(ICEntries(this), 0, numICEntries(),
631 27077 : [&returnOffset](BaselineICEntry& entry) {
632 27077 : size_t roffset = returnOffset.offset();
633 27077 : size_t entryRoffset = entry.returnOffset().offset();
634 27077 : if (roffset < entryRoffset)
635 6735 : return -1;
636 20342 : if (entryRoffset < roffset)
637 12719 : return 1;
638 7623 : return 0;
639 : },
640 7623 : &loc);
641 :
642 7623 : MOZ_ASSERT(found);
643 7623 : MOZ_ASSERT(loc < numICEntries());
644 7623 : MOZ_ASSERT(icEntry(loc).returnOffset().offset() == returnOffset.offset());
645 7623 : return icEntry(loc);
646 : }
647 :
648 : static inline size_t
649 118272 : ComputeBinarySearchMid(BaselineScript* baseline, uint32_t pcOffset)
650 : {
651 : size_t loc;
652 236544 : BinarySearchIf(ICEntries(baseline), 0, baseline->numICEntries(),
653 1525343 : [pcOffset](BaselineICEntry& entry) {
654 582294 : uint32_t entryOffset = entry.pcOffset();
655 582294 : if (pcOffset < entryOffset)
656 221539 : return -1;
657 360755 : if (entryOffset < pcOffset)
658 242483 : return 1;
659 118272 : return 0;
660 : },
661 118272 : &loc);
662 118272 : return loc;
663 : }
664 :
665 : uint8_t*
666 0 : BaselineScript::returnAddressForIC(const BaselineICEntry& ent)
667 : {
668 0 : return method()->raw() + ent.returnOffset().offset();
669 : }
670 :
671 : BaselineICEntry&
672 118272 : BaselineScript::icEntryFromPCOffset(uint32_t pcOffset)
673 : {
674 : // Multiple IC entries can have the same PC offset, but this method only looks for
675 : // those which have isForOp() set.
676 118272 : size_t mid = ComputeBinarySearchMid(this, pcOffset);
677 :
678 : // Found an IC entry with a matching PC offset. Search backward, and then
679 : // forward from this IC entry, looking for one with the same PC offset which
680 : // has isForOp() set.
681 125534 : for (size_t i = mid; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i--) {
682 123781 : if (icEntry(i).isForOp())
683 116519 : return icEntry(i);
684 : }
685 3003 : for (size_t i = mid+1; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i++) {
686 3003 : if (icEntry(i).isForOp())
687 1753 : return icEntry(i);
688 : }
689 0 : MOZ_CRASH("Invalid PC offset for IC entry.");
690 : }
691 :
692 : BaselineICEntry&
693 274 : BaselineScript::icEntryFromPCOffset(uint32_t pcOffset, BaselineICEntry* prevLookedUpEntry)
694 : {
695 : // Do a linear forward search from the last queried PC offset, or fallback to a
696 : // binary search if the last offset is too far away.
697 517 : if (prevLookedUpEntry && pcOffset >= prevLookedUpEntry->pcOffset() &&
698 243 : (pcOffset - prevLookedUpEntry->pcOffset()) <= 10)
699 : {
700 153 : BaselineICEntry* firstEntry = &icEntry(0);
701 153 : BaselineICEntry* lastEntry = &icEntry(numICEntries() - 1);
702 153 : BaselineICEntry* curEntry = prevLookedUpEntry;
703 207 : while (curEntry >= firstEntry && curEntry <= lastEntry) {
704 180 : if (curEntry->pcOffset() == pcOffset && curEntry->isForOp())
705 153 : break;
706 27 : curEntry++;
707 : }
708 153 : MOZ_ASSERT(curEntry->pcOffset() == pcOffset && curEntry->isForOp());
709 153 : return *curEntry;
710 : }
711 :
712 121 : return icEntryFromPCOffset(pcOffset);
713 : }
714 :
715 : BaselineICEntry&
716 0 : BaselineScript::callVMEntryFromPCOffset(uint32_t pcOffset)
717 : {
718 : // Like icEntryFromPCOffset, but only looks for the fake ICEntries
719 : // inserted by VM calls.
720 0 : size_t mid = ComputeBinarySearchMid(this, pcOffset);
721 :
722 0 : for (size_t i = mid; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i--) {
723 0 : if (icEntry(i).kind() == ICEntry::Kind_CallVM)
724 0 : return icEntry(i);
725 : }
726 0 : for (size_t i = mid+1; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i++) {
727 0 : if (icEntry(i).kind() == ICEntry::Kind_CallVM)
728 0 : return icEntry(i);
729 : }
730 0 : MOZ_CRASH("Invalid PC offset for callVM entry.");
731 : }
732 :
733 : BaselineICEntry&
734 0 : BaselineScript::stackCheckICEntry(bool earlyCheck)
735 : {
736 : // The stack check will always be at offset 0, so just do a linear search
737 : // from the beginning. This is only needed for debug mode OSR, when
738 : // patching a frame that has invoked a Debugger hook via the interrupt
739 : // handler via the stack check, which is part of the prologue.
740 0 : ICEntry::Kind kind = earlyCheck ? ICEntry::Kind_EarlyStackCheck : ICEntry::Kind_StackCheck;
741 0 : for (size_t i = 0; i < numICEntries() && icEntry(i).pcOffset() == 0; i++) {
742 0 : if (icEntry(i).kind() == kind)
743 0 : return icEntry(i);
744 : }
745 0 : MOZ_CRASH("No stack check ICEntry found.");
746 : }
747 :
748 : BaselineICEntry&
749 0 : BaselineScript::warmupCountICEntry()
750 : {
751 : // The stack check will be at a very low offset, so just do a linear search
752 : // from the beginning.
753 0 : for (size_t i = 0; i < numICEntries() && icEntry(i).pcOffset() == 0; i++) {
754 0 : if (icEntry(i).kind() == ICEntry::Kind_WarmupCounter)
755 0 : return icEntry(i);
756 : }
757 0 : MOZ_CRASH("No warmup count ICEntry found.");
758 : }
759 :
760 : BaselineICEntry&
761 7623 : BaselineScript::icEntryFromReturnAddress(uint8_t* returnAddr)
762 : {
763 7623 : MOZ_ASSERT(returnAddr > method_->raw());
764 7623 : MOZ_ASSERT(returnAddr < method_->raw() + method_->instructionsSize());
765 7623 : CodeOffset offset(returnAddr - method_->raw());
766 7623 : return icEntryFromReturnOffset(offset);
767 : }
768 :
769 : void
770 627 : BaselineScript::copyYieldAndAwaitEntries(JSScript* script, Vector<uint32_t>& yieldAndAwaitOffsets)
771 : {
772 627 : uint8_t** entries = yieldEntryList();
773 :
774 644 : for (size_t i = 0; i < yieldAndAwaitOffsets.length(); i++) {
775 17 : uint32_t offset = yieldAndAwaitOffsets[i];
776 17 : entries[i] = nativeCodeForPC(script, script->offsetToPC(offset));
777 : }
778 627 : }
779 :
780 : void
781 627 : BaselineScript::copyICEntries(JSScript* script, const BaselineICEntry* entries, MacroAssembler& masm)
782 : {
783 : // Fix up the return offset in the IC entries and copy them in.
784 : // Also write out the IC entry ptrs in any fallback stubs that were added.
785 23406 : for (uint32_t i = 0; i < numICEntries(); i++) {
786 22779 : BaselineICEntry& realEntry = icEntry(i);
787 22779 : realEntry = entries[i];
788 :
789 22779 : if (!realEntry.hasStub()) {
790 : // VM call without any stubs.
791 3438 : continue;
792 : }
793 :
794 : // If the attached stub is a fallback stub, then fix it up with
795 : // a pointer to the (now available) realEntry.
796 19341 : if (realEntry.firstStub()->isFallback())
797 17208 : realEntry.firstStub()->toFallbackStub()->fixupICEntry(&realEntry);
798 :
799 19341 : if (realEntry.firstStub()->isTypeMonitor_Fallback()) {
800 2122 : ICTypeMonitor_Fallback* stub = realEntry.firstStub()->toTypeMonitor_Fallback();
801 2122 : stub->fixupICEntry(&realEntry);
802 : }
803 :
804 19341 : if (realEntry.firstStub()->isTableSwitch()) {
805 11 : ICTableSwitch* stub = realEntry.firstStub()->toTableSwitch();
806 11 : stub->fixupJumpTable(script, this);
807 : }
808 : }
809 627 : }
810 :
811 : void
812 627 : BaselineScript::adoptFallbackStubs(FallbackICStubSpace* stubSpace)
813 : {
814 627 : fallbackStubSpace_.adoptFrom(stubSpace);
815 627 : }
816 :
817 : void
818 627 : BaselineScript::copyPCMappingEntries(const CompactBufferWriter& entries)
819 : {
820 627 : MOZ_ASSERT(entries.length() > 0);
821 627 : MOZ_ASSERT(entries.length() == pcMappingSize_);
822 :
823 627 : memcpy(pcMappingData(), entries.buffer(), entries.length());
824 627 : }
825 :
826 : void
827 627 : BaselineScript::copyPCMappingIndexEntries(const PCMappingIndexEntry* entries)
828 : {
829 1759 : for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++)
830 1132 : pcMappingIndexEntry(i) = entries[i];
831 627 : }
832 :
833 : uint8_t*
834 2135 : BaselineScript::nativeCodeForPC(JSScript* script, jsbytecode* pc, PCMappingSlotInfo* slotInfo)
835 : {
836 2135 : MOZ_ASSERT_IF(script->hasBaselineScript(), script->baselineScript() == this);
837 :
838 2135 : uint32_t pcOffset = script->pcToOffset(pc);
839 :
840 : // Look for the first PCMappingIndexEntry with pc > the pc we are
841 : // interested in.
842 2135 : uint32_t i = 1;
843 6571 : for (; i < numPCMappingIndexEntries(); i++) {
844 2345 : if (pcMappingIndexEntry(i).pcOffset > pcOffset)
845 127 : break;
846 : }
847 :
848 : // The previous entry contains the current pc.
849 2135 : MOZ_ASSERT(i > 0);
850 2135 : i--;
851 :
852 2135 : PCMappingIndexEntry& entry = pcMappingIndexEntry(i);
853 2135 : MOZ_ASSERT(pcOffset >= entry.pcOffset);
854 :
855 2135 : CompactBufferReader reader(pcMappingReader(i));
856 2135 : jsbytecode* curPC = script->offsetToPC(entry.pcOffset);
857 2135 : uint32_t nativeOffset = entry.nativeOffset;
858 :
859 2135 : MOZ_ASSERT(script->containsPC(curPC));
860 2135 : MOZ_ASSERT(curPC <= pc);
861 :
862 18263 : while (reader.more()) {
863 : // If the high bit is set, the native offset relative to the
864 : // previous pc != 0 and comes next.
865 10199 : uint8_t b = reader.readByte();
866 10199 : if (b & 0x80)
867 6089 : nativeOffset += reader.readUnsigned();
868 :
869 10199 : if (curPC == pc) {
870 2135 : if (slotInfo)
871 0 : *slotInfo = PCMappingSlotInfo(b & ~0x80);
872 4270 : return method_->raw() + nativeOffset;
873 : }
874 :
875 8064 : curPC += GetBytecodeLength(curPC);
876 : }
877 :
878 0 : MOZ_CRASH("No native code for this pc");
879 : }
880 :
881 : jsbytecode*
882 0 : BaselineScript::approximatePcForNativeAddress(JSScript* script, uint8_t* nativeAddress)
883 : {
884 0 : MOZ_ASSERT(script->baselineScript() == this);
885 0 : MOZ_ASSERT(nativeAddress >= method_->raw());
886 0 : MOZ_ASSERT(nativeAddress < method_->raw() + method_->instructionsSize());
887 :
888 0 : uint32_t nativeOffset = nativeAddress - method_->raw();
889 0 : MOZ_ASSERT(nativeOffset < method_->instructionsSize());
890 :
891 : // Look for the first PCMappingIndexEntry with native offset > the native offset we are
892 : // interested in.
893 0 : uint32_t i = 1;
894 0 : for (; i < numPCMappingIndexEntries(); i++) {
895 0 : if (pcMappingIndexEntry(i).nativeOffset > nativeOffset)
896 0 : break;
897 : }
898 :
899 : // Go back an entry to search forward from.
900 0 : MOZ_ASSERT(i > 0);
901 0 : i--;
902 :
903 0 : PCMappingIndexEntry& entry = pcMappingIndexEntry(i);
904 :
905 0 : CompactBufferReader reader(pcMappingReader(i));
906 0 : jsbytecode* curPC = script->offsetToPC(entry.pcOffset);
907 0 : uint32_t curNativeOffset = entry.nativeOffset;
908 :
909 0 : MOZ_ASSERT(script->containsPC(curPC));
910 :
911 : // The native code address can occur before the start of ops.
912 : // Associate those with bytecode offset 0.
913 0 : if (curNativeOffset > nativeOffset)
914 0 : return script->code();
915 :
916 0 : jsbytecode* lastPC = curPC;
917 : while (true) {
918 : // If the high bit is set, the native offset relative to the
919 : // previous pc != 0 and comes next.
920 0 : uint8_t b = reader.readByte();
921 0 : if (b & 0x80)
922 0 : curNativeOffset += reader.readUnsigned();
923 :
924 : // Return the last PC that matched nativeOffset. Some bytecode
925 : // generate no native code (e.g., constant-pushing bytecode like
926 : // JSOP_INT8), and so their entries share the same nativeOffset as the
927 : // next op that does generate code.
928 0 : if (curNativeOffset > nativeOffset)
929 0 : return lastPC;
930 :
931 : // The native address may lie in-between the last delta-entry in
932 : // a pcMappingIndexEntry, and the next pcMappingIndexEntry.
933 0 : if (!reader.more())
934 0 : return curPC;
935 :
936 0 : lastPC = curPC;
937 0 : curPC += GetBytecodeLength(curPC);
938 0 : }
939 : }
940 :
941 : void
942 0 : BaselineScript::toggleDebugTraps(JSScript* script, jsbytecode* pc)
943 : {
944 0 : MOZ_ASSERT(script->baselineScript() == this);
945 :
946 : // Only scripts compiled for debug mode have toggled calls.
947 0 : if (!hasDebugInstrumentation())
948 0 : return;
949 :
950 0 : SrcNoteLineScanner scanner(script->notes(), script->lineno());
951 :
952 0 : AutoWritableJitCode awjc(method());
953 :
954 0 : for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++) {
955 0 : PCMappingIndexEntry& entry = pcMappingIndexEntry(i);
956 :
957 0 : CompactBufferReader reader(pcMappingReader(i));
958 0 : jsbytecode* curPC = script->offsetToPC(entry.pcOffset);
959 0 : uint32_t nativeOffset = entry.nativeOffset;
960 :
961 0 : MOZ_ASSERT(script->containsPC(curPC));
962 :
963 0 : while (reader.more()) {
964 0 : uint8_t b = reader.readByte();
965 0 : if (b & 0x80)
966 0 : nativeOffset += reader.readUnsigned();
967 :
968 0 : scanner.advanceTo(script->pcToOffset(curPC));
969 :
970 0 : if (!pc || pc == curPC) {
971 0 : bool enabled = (script->stepModeEnabled() && scanner.isLineHeader()) ||
972 0 : script->hasBreakpointsAt(curPC);
973 :
974 : // Patch the trap.
975 0 : CodeLocationLabel label(method(), CodeOffset(nativeOffset));
976 0 : Assembler::ToggleCall(label, enabled);
977 : }
978 :
979 0 : curPC += GetBytecodeLength(curPC);
980 : }
981 : }
982 : }
983 :
984 : #ifdef JS_TRACE_LOGGING
985 : void
986 627 : BaselineScript::initTraceLogger(JSRuntime* runtime, JSScript* script,
987 : const Vector<CodeOffset>& offsets)
988 : {
989 : #ifdef DEBUG
990 627 : traceLoggerScriptsEnabled_ = TraceLogTextIdEnabled(TraceLogger_Scripts);
991 627 : traceLoggerEngineEnabled_ = TraceLogTextIdEnabled(TraceLogger_Engine);
992 : #endif
993 :
994 627 : MOZ_ASSERT(offsets.length() == numTraceLoggerToggleOffsets_);
995 1885 : for (size_t i = 0; i < offsets.length(); i++)
996 1258 : traceLoggerToggleOffsets()[i] = offsets[i].offset();
997 :
998 627 : if (TraceLogTextIdEnabled(TraceLogger_Engine) || TraceLogTextIdEnabled(TraceLogger_Scripts)) {
999 0 : traceLoggerScriptEvent_ = TraceLoggerEvent(TraceLogger_Scripts, script);
1000 0 : for (size_t i = 0; i < numTraceLoggerToggleOffsets_; i++) {
1001 0 : CodeLocationLabel label(method_, CodeOffset(traceLoggerToggleOffsets()[i]));
1002 0 : Assembler::ToggleToCmp(label);
1003 : }
1004 : }
1005 627 : }
1006 :
1007 : void
1008 0 : BaselineScript::toggleTraceLoggerScripts(JSRuntime* runtime, JSScript* script, bool enable)
1009 : {
1010 0 : DebugOnly<bool> engineEnabled = TraceLogTextIdEnabled(TraceLogger_Engine);
1011 0 : MOZ_ASSERT(enable == !traceLoggerScriptsEnabled_);
1012 0 : MOZ_ASSERT(engineEnabled == traceLoggerEngineEnabled_);
1013 :
1014 : // Patch the logging script textId to be correct.
1015 : // When logging log the specific textId else the global Scripts textId.
1016 0 : if (enable && !traceLoggerScriptEvent_.hasTextId())
1017 0 : traceLoggerScriptEvent_ = TraceLoggerEvent(TraceLogger_Scripts, script);
1018 :
1019 0 : AutoWritableJitCode awjc(method());
1020 :
1021 : // Enable/Disable the traceLogger.
1022 0 : for (size_t i = 0; i < numTraceLoggerToggleOffsets_; i++) {
1023 0 : CodeLocationLabel label(method_, CodeOffset(traceLoggerToggleOffsets()[i]));
1024 0 : if (enable)
1025 0 : Assembler::ToggleToCmp(label);
1026 : else
1027 0 : Assembler::ToggleToJmp(label);
1028 : }
1029 :
1030 : #if DEBUG
1031 0 : traceLoggerScriptsEnabled_ = enable;
1032 : #endif
1033 0 : }
1034 :
1035 : void
1036 0 : BaselineScript::toggleTraceLoggerEngine(bool enable)
1037 : {
1038 0 : DebugOnly<bool> scriptsEnabled = TraceLogTextIdEnabled(TraceLogger_Scripts);
1039 0 : MOZ_ASSERT(enable == !traceLoggerEngineEnabled_);
1040 0 : MOZ_ASSERT(scriptsEnabled == traceLoggerScriptsEnabled_);
1041 :
1042 0 : AutoWritableJitCode awjc(method());
1043 :
1044 : // Enable/Disable the traceLogger prologue and epilogue.
1045 0 : for (size_t i = 0; i < numTraceLoggerToggleOffsets_; i++) {
1046 0 : CodeLocationLabel label(method_, CodeOffset(traceLoggerToggleOffsets()[i]));
1047 0 : if (enable)
1048 0 : Assembler::ToggleToCmp(label);
1049 : else
1050 0 : Assembler::ToggleToJmp(label);
1051 : }
1052 :
1053 : #if DEBUG
1054 0 : traceLoggerEngineEnabled_ = enable;
1055 : #endif
1056 0 : }
1057 : #endif
1058 :
1059 : void
1060 0 : BaselineScript::toggleProfilerInstrumentation(bool enable)
1061 : {
1062 0 : if (enable == isProfilerInstrumentationOn())
1063 0 : return;
1064 :
1065 0 : JitSpew(JitSpew_BaselineIC, " toggling profiling %s for BaselineScript %p",
1066 0 : enable ? "on" : "off", this);
1067 :
1068 : // Toggle the jump
1069 0 : CodeLocationLabel enterToggleLocation(method_, CodeOffset(profilerEnterToggleOffset_));
1070 0 : CodeLocationLabel exitToggleLocation(method_, CodeOffset(profilerExitToggleOffset_));
1071 0 : if (enable) {
1072 0 : Assembler::ToggleToCmp(enterToggleLocation);
1073 0 : Assembler::ToggleToCmp(exitToggleLocation);
1074 0 : flags_ |= uint32_t(PROFILER_INSTRUMENTATION_ON);
1075 : } else {
1076 0 : Assembler::ToggleToJmp(enterToggleLocation);
1077 0 : Assembler::ToggleToJmp(exitToggleLocation);
1078 0 : flags_ &= ~uint32_t(PROFILER_INSTRUMENTATION_ON);
1079 : }
1080 : }
1081 :
1082 : void
1083 0 : BaselineScript::purgeOptimizedStubs(Zone* zone)
1084 : {
1085 0 : JitSpew(JitSpew_BaselineIC, "Purging optimized stubs");
1086 :
1087 0 : for (size_t i = 0; i < numICEntries(); i++) {
1088 0 : BaselineICEntry& entry = icEntry(i);
1089 0 : if (!entry.hasStub())
1090 0 : continue;
1091 :
1092 0 : ICStub* lastStub = entry.firstStub();
1093 0 : while (lastStub->next())
1094 0 : lastStub = lastStub->next();
1095 :
1096 0 : if (lastStub->isFallback()) {
1097 : // Unlink all stubs allocated in the optimized space.
1098 0 : ICStub* stub = entry.firstStub();
1099 0 : ICStub* prev = nullptr;
1100 :
1101 0 : while (stub->next()) {
1102 0 : if (!stub->allocatedInFallbackSpace()) {
1103 0 : lastStub->toFallbackStub()->unlinkStub(zone, prev, stub);
1104 0 : stub = stub->next();
1105 0 : continue;
1106 : }
1107 :
1108 0 : prev = stub;
1109 0 : stub = stub->next();
1110 : }
1111 :
1112 0 : if (lastStub->isMonitoredFallback()) {
1113 : // Monitor stubs can't make calls, so are always in the
1114 : // optimized stub space.
1115 : ICTypeMonitor_Fallback* lastMonStub =
1116 0 : lastStub->toMonitoredFallbackStub()->fallbackMonitorStub();
1117 0 : lastMonStub->resetMonitorStubChain(zone);
1118 : }
1119 0 : } else if (lastStub->isTypeMonitor_Fallback()) {
1120 0 : lastStub->toTypeMonitor_Fallback()->resetMonitorStubChain(zone);
1121 : } else {
1122 0 : MOZ_ASSERT(lastStub->isTableSwitch());
1123 : }
1124 : }
1125 :
1126 : #ifdef DEBUG
1127 : // All remaining stubs must be allocated in the fallback space.
1128 0 : for (size_t i = 0; i < numICEntries(); i++) {
1129 0 : BaselineICEntry& entry = icEntry(i);
1130 0 : if (!entry.hasStub())
1131 0 : continue;
1132 :
1133 0 : ICStub* stub = entry.firstStub();
1134 0 : while (stub->next()) {
1135 0 : MOZ_ASSERT(stub->allocatedInFallbackSpace());
1136 0 : stub = stub->next();
1137 : }
1138 : }
1139 : #endif
1140 0 : }
1141 :
1142 : void
1143 20666 : jit::FinishDiscardBaselineScript(FreeOp* fop, JSScript* script)
1144 : {
1145 20666 : if (!script->hasBaselineScript())
1146 20120 : return;
1147 :
1148 546 : if (script->baselineScript()->active()) {
1149 : // Script is live on the stack. Keep the BaselineScript, but destroy
1150 : // stubs allocated in the optimized stub space.
1151 0 : script->baselineScript()->purgeOptimizedStubs(script->zone());
1152 :
1153 : // Reset |active| flag so that we don't need a separate script
1154 : // iteration to unmark them.
1155 0 : script->baselineScript()->resetActive();
1156 :
1157 : // The baseline caches have been wiped out, so the script will need to
1158 : // warm back up before it can be inlined during Ion compilation.
1159 0 : script->baselineScript()->clearIonCompiledOrInlined();
1160 0 : return;
1161 : }
1162 :
1163 546 : BaselineScript* baseline = script->baselineScript();
1164 546 : script->setBaselineScript(nullptr, nullptr);
1165 546 : BaselineScript::Destroy(fop, baseline);
1166 : }
1167 :
1168 : void
1169 0 : jit::AddSizeOfBaselineData(JSScript* script, mozilla::MallocSizeOf mallocSizeOf, size_t* data,
1170 : size_t* fallbackStubs)
1171 : {
1172 0 : if (script->hasBaselineScript())
1173 0 : script->baselineScript()->addSizeOfIncludingThis(mallocSizeOf, data, fallbackStubs);
1174 0 : }
1175 :
1176 : void
1177 0 : jit::ToggleBaselineProfiling(JSRuntime* runtime, bool enable)
1178 : {
1179 0 : JitRuntime* jrt = runtime->jitRuntime();
1180 0 : if (!jrt)
1181 0 : return;
1182 :
1183 0 : for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
1184 0 : for (auto script = zone->cellIter<JSScript>(); !script.done(); script.next()) {
1185 0 : if (!script->hasBaselineScript())
1186 0 : continue;
1187 0 : AutoWritableJitCode awjc(script->baselineScript()->method());
1188 0 : script->baselineScript()->toggleProfilerInstrumentation(enable);
1189 : }
1190 : }
1191 : }
1192 :
1193 : #ifdef JS_TRACE_LOGGING
1194 : void
1195 0 : jit::ToggleBaselineTraceLoggerScripts(JSRuntime* runtime, bool enable)
1196 : {
1197 0 : for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
1198 0 : for (auto script = zone->cellIter<JSScript>(); !script.done(); script.next()) {
1199 0 : if (!script->hasBaselineScript())
1200 0 : continue;
1201 0 : script->baselineScript()->toggleTraceLoggerScripts(runtime, script, enable);
1202 : }
1203 : }
1204 0 : }
1205 :
1206 : void
1207 0 : jit::ToggleBaselineTraceLoggerEngine(JSRuntime* runtime, bool enable)
1208 : {
1209 0 : for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
1210 0 : for (auto script = zone->cellIter<JSScript>(); !script.done(); script.next()) {
1211 0 : if (!script->hasBaselineScript())
1212 0 : continue;
1213 0 : script->baselineScript()->toggleTraceLoggerEngine(enable);
1214 : }
1215 : }
1216 0 : }
1217 : #endif
1218 :
1219 : static void
1220 0 : MarkActiveBaselineScripts(JSContext* cx, const JitActivationIterator& activation)
1221 : {
1222 0 : for (jit::JitFrameIterator iter(activation); !iter.done(); ++iter) {
1223 0 : switch (iter.type()) {
1224 : case JitFrame_BaselineJS:
1225 0 : iter.script()->baselineScript()->setActive();
1226 0 : break;
1227 : case JitFrame_Exit:
1228 0 : if (iter.exitFrame()->is<LazyLinkExitFrameLayout>()) {
1229 0 : LazyLinkExitFrameLayout* ll = iter.exitFrame()->as<LazyLinkExitFrameLayout>();
1230 0 : ScriptFromCalleeToken(ll->jsFrame()->calleeToken())->baselineScript()->setActive();
1231 : }
1232 0 : break;
1233 : case JitFrame_Bailout:
1234 : case JitFrame_IonJS: {
1235 : // Keep the baseline script around, since bailouts from the ion
1236 : // jitcode might need to re-enter into the baseline jitcode.
1237 0 : iter.script()->baselineScript()->setActive();
1238 0 : for (InlineFrameIterator inlineIter(cx, &iter); inlineIter.more(); ++inlineIter)
1239 0 : inlineIter.script()->baselineScript()->setActive();
1240 0 : break;
1241 : }
1242 : default:;
1243 : }
1244 : }
1245 0 : }
1246 :
1247 : void
1248 4 : jit::MarkActiveBaselineScripts(Zone* zone)
1249 : {
1250 4 : if (zone->isAtomsZone())
1251 1 : return;
1252 3 : JSContext* cx = TlsContext.get();
1253 3 : for (JitActivationIterator iter(cx, zone->group()->ownerContext()); !iter.done(); ++iter) {
1254 0 : if (iter->compartment()->zone() == zone)
1255 0 : MarkActiveBaselineScripts(cx, iter);
1256 : }
1257 : }
|