Line data Source code
1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sts=4 et sw=4 tw=99:
3 : * This Source Code Form is subject to the terms of the Mozilla Public
4 : * License, v. 2.0. If a copy of the MPL was not distributed with this
5 : * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 :
7 : #include "jit/SharedIC.h"
8 :
9 : #include "mozilla/Casting.h"
10 : #include "mozilla/DebugOnly.h"
11 : #include "mozilla/IntegerPrintfMacros.h"
12 : #include "mozilla/SizePrintfMacros.h"
13 : #include "mozilla/Sprintf.h"
14 :
15 : #include "jslibmath.h"
16 : #include "jstypes.h"
17 :
18 : #include "gc/Policy.h"
19 : #include "jit/BaselineCacheIRCompiler.h"
20 : #include "jit/BaselineDebugModeOSR.h"
21 : #include "jit/BaselineIC.h"
22 : #include "jit/JitSpewer.h"
23 : #include "jit/Linker.h"
24 : #include "jit/SharedICHelpers.h"
25 : #ifdef JS_ION_PERF
26 : # include "jit/PerfSpewer.h"
27 : #endif
28 : #include "jit/VMFunctions.h"
29 : #include "vm/Interpreter.h"
30 :
31 : #include "jit/MacroAssembler-inl.h"
32 : #include "vm/Interpreter-inl.h"
33 :
34 : using mozilla::BitwiseCast;
35 : using mozilla::DebugOnly;
36 :
37 : namespace js {
38 : namespace jit {
39 :
40 : #ifdef JS_JITSPEW
41 : void
42 20861 : FallbackICSpew(JSContext* cx, ICFallbackStub* stub, const char* fmt, ...)
43 : {
44 20861 : if (JitSpewEnabled(JitSpew_BaselineICFallback)) {
45 0 : RootedScript script(cx, GetTopJitJSScript(cx));
46 0 : jsbytecode* pc = stub->icEntry()->pc(script);
47 :
48 : char fmtbuf[100];
49 : va_list args;
50 0 : va_start(args, fmt);
51 0 : (void) VsprintfLiteral(fmtbuf, fmt, args);
52 0 : va_end(args);
53 :
54 0 : JitSpew(JitSpew_BaselineICFallback,
55 : "Fallback hit for (%s:%" PRIuSIZE ") (pc=%" PRIuSIZE ",line=%d,uses=%d,stubs=%" PRIuSIZE "): %s",
56 0 : script->filename(),
57 0 : script->lineno(),
58 0 : script->pcToOffset(pc),
59 0 : PCToLineNumber(script, pc),
60 0 : script->getWarmUpCount(),
61 : stub->numOptimizedStubs(),
62 0 : fmtbuf);
63 : }
64 20861 : }
65 :
66 : void
67 2849 : TypeFallbackICSpew(JSContext* cx, ICTypeMonitor_Fallback* stub, const char* fmt, ...)
68 : {
69 2849 : if (JitSpewEnabled(JitSpew_BaselineICFallback)) {
70 0 : RootedScript script(cx, GetTopJitJSScript(cx));
71 0 : jsbytecode* pc = stub->icEntry()->pc(script);
72 :
73 : char fmtbuf[100];
74 : va_list args;
75 0 : va_start(args, fmt);
76 0 : (void) VsprintfLiteral(fmtbuf, fmt, args);
77 0 : va_end(args);
78 :
79 0 : JitSpew(JitSpew_BaselineICFallback,
80 : "Type monitor fallback hit for (%s:%" PRIuSIZE ") (pc=%" PRIuSIZE ",line=%d,uses=%d,stubs=%d): %s",
81 0 : script->filename(),
82 0 : script->lineno(),
83 0 : script->pcToOffset(pc),
84 0 : PCToLineNumber(script, pc),
85 0 : script->getWarmUpCount(),
86 0 : (int) stub->numOptimizedMonitorStubs(),
87 0 : fmtbuf);
88 : }
89 2849 : }
90 : #endif // JS_JITSPEW
91 :
92 : ICFallbackStub*
93 118185 : ICEntry::fallbackStub() const
94 : {
95 118185 : return firstStub()->getChainFallback();
96 : }
97 :
98 : void
99 0 : IonICEntry::trace(JSTracer* trc)
100 : {
101 0 : TraceManuallyBarrieredEdge(trc, &script_, "IonICEntry::script_");
102 0 : traceEntry(trc);
103 0 : }
104 :
105 : void
106 17 : BaselineICEntry::trace(JSTracer* trc)
107 : {
108 17 : traceEntry(trc);
109 17 : }
110 :
111 : void
112 17 : ICEntry::traceEntry(JSTracer* trc)
113 : {
114 17 : if (!hasStub())
115 4 : return;
116 39 : for (ICStub* stub = firstStub(); stub; stub = stub->next())
117 26 : stub->trace(trc);
118 : }
119 :
120 : ICStubConstIterator&
121 36705 : ICStubConstIterator::operator++()
122 : {
123 36705 : MOZ_ASSERT(currentStub_ != nullptr);
124 36705 : currentStub_ = currentStub_->next();
125 36705 : return *this;
126 : }
127 :
128 :
129 6384 : ICStubIterator::ICStubIterator(ICFallbackStub* fallbackStub, bool end)
130 6384 : : icEntry_(fallbackStub->icEntry()),
131 : fallbackStub_(fallbackStub),
132 : previousStub_(nullptr),
133 6384 : currentStub_(end ? fallbackStub : icEntry_->firstStub()),
134 12768 : unlinked_(false)
135 6384 : { }
136 :
137 : ICStubIterator&
138 5756 : ICStubIterator::operator++()
139 : {
140 5756 : MOZ_ASSERT(currentStub_->next() != nullptr);
141 5756 : if (!unlinked_)
142 4700 : previousStub_ = currentStub_;
143 5756 : currentStub_ = currentStub_->next();
144 5756 : unlinked_ = false;
145 5756 : return *this;
146 : }
147 :
148 : void
149 1056 : ICStubIterator::unlink(JSContext* cx)
150 : {
151 1056 : MOZ_ASSERT(currentStub_->next() != nullptr);
152 1056 : MOZ_ASSERT(currentStub_ != fallbackStub_);
153 1056 : MOZ_ASSERT(!unlinked_);
154 :
155 1056 : fallbackStub_->unlinkStub(cx->zone(), previousStub_, currentStub_);
156 :
157 : // Mark the current iterator position as unlinked, so operator++ works properly.
158 1056 : unlinked_ = true;
159 1056 : }
160 :
161 : /* static */ bool
162 15235 : ICStub::NonCacheIRStubMakesGCCalls(Kind kind)
163 : {
164 15235 : MOZ_ASSERT(IsValidKind(kind));
165 15235 : MOZ_ASSERT(!IsCacheIRKind(kind));
166 :
167 15235 : switch (kind) {
168 : case Call_Fallback:
169 : case Call_Scripted:
170 : case Call_AnyScripted:
171 : case Call_Native:
172 : case Call_ClassHook:
173 : case Call_ScriptedApplyArray:
174 : case Call_ScriptedApplyArguments:
175 : case Call_ScriptedFunCall:
176 : case Call_ConstStringSplit:
177 : case WarmUpCounter_Fallback:
178 : case RetSub_Fallback:
179 : // These two fallback stubs don't actually make non-tail calls,
180 : // but the fallback code for the bailout path needs to pop the stub frame
181 : // pushed during the bailout.
182 : case GetProp_Fallback:
183 : case SetProp_Fallback:
184 2340 : return true;
185 : default:
186 12895 : return false;
187 : }
188 : }
189 :
190 : bool
191 2134 : ICStub::makesGCCalls() const
192 : {
193 2134 : switch (kind()) {
194 : case CacheIR_Regular:
195 204 : return toCacheIR_Regular()->stubInfo()->makesGCCalls();
196 : case CacheIR_Monitored:
197 1292 : return toCacheIR_Monitored()->stubInfo()->makesGCCalls();
198 : case CacheIR_Updated:
199 508 : return toCacheIR_Updated()->stubInfo()->makesGCCalls();
200 : default:
201 130 : return NonCacheIRStubMakesGCCalls(kind());
202 : }
203 : }
204 :
205 : void
206 134 : ICStub::traceCode(JSTracer* trc, const char* name)
207 : {
208 134 : JitCode* stubJitCode = jitCode();
209 134 : TraceManuallyBarrieredEdge(trc, &stubJitCode, name);
210 134 : }
211 :
212 : void
213 200 : ICStub::updateCode(JitCode* code)
214 : {
215 : // Write barrier on the old code.
216 200 : JitCode::writeBarrierPre(jitCode());
217 200 : stubCode_ = code->raw();
218 200 : }
219 :
220 : /* static */ void
221 134 : ICStub::trace(JSTracer* trc)
222 : {
223 134 : traceCode(trc, "shared-stub-jitcode");
224 :
225 : // If the stub is a monitored fallback stub, then trace the monitor ICs hanging
226 : // off of that stub. We don't need to worry about the regular monitored stubs,
227 : // because the regular monitored stubs will always have a monitored fallback stub
228 : // that references the same stub chain.
229 134 : if (isMonitoredFallback()) {
230 17 : ICTypeMonitor_Fallback* lastMonStub = toMonitoredFallbackStub()->fallbackMonitorStub();
231 51 : for (ICStubConstIterator iter(lastMonStub->firstMonitorStub()); !iter.atEnd(); iter++) {
232 34 : MOZ_ASSERT_IF(iter->next() == nullptr, *iter == lastMonStub);
233 34 : iter->trace(trc);
234 : }
235 : }
236 :
237 134 : if (isUpdated()) {
238 31 : for (ICStubConstIterator iter(toUpdatedStub()->firstUpdateStub()); !iter.atEnd(); iter++) {
239 16 : MOZ_ASSERT_IF(iter->next() == nullptr, iter->isTypeUpdate_Fallback());
240 16 : iter->trace(trc);
241 : }
242 : }
243 :
244 134 : switch (kind()) {
245 : case ICStub::Call_Scripted: {
246 3 : ICCall_Scripted* callStub = toCall_Scripted();
247 3 : TraceEdge(trc, &callStub->callee(), "baseline-callscripted-callee");
248 3 : TraceNullableEdge(trc, &callStub->templateObject(), "baseline-callscripted-template");
249 3 : break;
250 : }
251 : case ICStub::Call_Native: {
252 6 : ICCall_Native* callStub = toCall_Native();
253 6 : TraceEdge(trc, &callStub->callee(), "baseline-callnative-callee");
254 6 : TraceNullableEdge(trc, &callStub->templateObject(), "baseline-callnative-template");
255 6 : break;
256 : }
257 : case ICStub::Call_ClassHook: {
258 0 : ICCall_ClassHook* callStub = toCall_ClassHook();
259 0 : TraceNullableEdge(trc, &callStub->templateObject(), "baseline-callclasshook-template");
260 0 : break;
261 : }
262 : case ICStub::Call_ConstStringSplit: {
263 0 : ICCall_ConstStringSplit* callStub = toCall_ConstStringSplit();
264 0 : TraceEdge(trc, &callStub->templateObject(), "baseline-callstringsplit-template");
265 0 : TraceEdge(trc, &callStub->expectedSep(), "baseline-callstringsplit-sep");
266 0 : TraceEdge(trc, &callStub->expectedStr(), "baseline-callstringsplit-str");
267 0 : break;
268 : }
269 : case ICStub::TypeMonitor_SingleObject: {
270 3 : ICTypeMonitor_SingleObject* monitorStub = toTypeMonitor_SingleObject();
271 3 : TraceEdge(trc, &monitorStub->object(), "baseline-monitor-singleton");
272 3 : break;
273 : }
274 : case ICStub::TypeMonitor_ObjectGroup: {
275 0 : ICTypeMonitor_ObjectGroup* monitorStub = toTypeMonitor_ObjectGroup();
276 0 : TraceEdge(trc, &monitorStub->group(), "baseline-monitor-group");
277 0 : break;
278 : }
279 : case ICStub::TypeUpdate_SingleObject: {
280 0 : ICTypeUpdate_SingleObject* updateStub = toTypeUpdate_SingleObject();
281 0 : TraceEdge(trc, &updateStub->object(), "baseline-update-singleton");
282 0 : break;
283 : }
284 : case ICStub::TypeUpdate_ObjectGroup: {
285 1 : ICTypeUpdate_ObjectGroup* updateStub = toTypeUpdate_ObjectGroup();
286 1 : TraceEdge(trc, &updateStub->group(), "baseline-update-group");
287 1 : break;
288 : }
289 : case ICStub::GetIntrinsic_Constant: {
290 0 : ICGetIntrinsic_Constant* constantStub = toGetIntrinsic_Constant();
291 0 : TraceEdge(trc, &constantStub->value(), "baseline-getintrinsic-constant-value");
292 0 : break;
293 : }
294 : case ICStub::InstanceOf_Function: {
295 0 : ICInstanceOf_Function* instanceofStub = toInstanceOf_Function();
296 0 : TraceEdge(trc, &instanceofStub->shape(), "baseline-instanceof-fun-shape");
297 0 : TraceEdge(trc, &instanceofStub->prototypeObject(), "baseline-instanceof-fun-prototype");
298 0 : break;
299 : }
300 : case ICStub::NewArray_Fallback: {
301 0 : ICNewArray_Fallback* stub = toNewArray_Fallback();
302 0 : TraceNullableEdge(trc, &stub->templateObject(), "baseline-newarray-template");
303 0 : TraceEdge(trc, &stub->templateGroup(), "baseline-newarray-template-group");
304 0 : break;
305 : }
306 : case ICStub::NewObject_Fallback: {
307 1 : ICNewObject_Fallback* stub = toNewObject_Fallback();
308 1 : TraceNullableEdge(trc, &stub->templateObject(), "baseline-newobject-template");
309 1 : break;
310 : }
311 : case ICStub::Rest_Fallback: {
312 0 : ICRest_Fallback* stub = toRest_Fallback();
313 0 : TraceEdge(trc, &stub->templateObject(), "baseline-rest-template");
314 0 : break;
315 : }
316 : case ICStub::CacheIR_Regular:
317 6 : TraceCacheIRStub(trc, this, toCacheIR_Regular()->stubInfo());
318 6 : break;
319 : case ICStub::CacheIR_Monitored:
320 20 : TraceCacheIRStub(trc, this, toCacheIR_Monitored()->stubInfo());
321 20 : break;
322 : case ICStub::CacheIR_Updated: {
323 15 : ICCacheIR_Updated* stub = toCacheIR_Updated();
324 15 : TraceNullableEdge(trc, &stub->updateStubGroup(), "baseline-update-stub-group");
325 15 : TraceEdge(trc, &stub->updateStubId(), "baseline-update-stub-id");
326 15 : TraceCacheIRStub(trc, this, stub->stubInfo());
327 15 : break;
328 : }
329 : default:
330 79 : break;
331 : }
332 134 : }
333 :
334 : void
335 1056 : ICFallbackStub::unlinkStub(Zone* zone, ICStub* prev, ICStub* stub)
336 : {
337 1056 : MOZ_ASSERT(stub->next());
338 :
339 : // If stub is the last optimized stub, update lastStubPtrAddr.
340 1056 : if (stub->next() == this) {
341 163 : MOZ_ASSERT(lastStubPtrAddr_ == stub->addressOfNext());
342 163 : if (prev)
343 0 : lastStubPtrAddr_ = prev->addressOfNext();
344 : else
345 163 : lastStubPtrAddr_ = icEntry()->addressOfFirstStub();
346 163 : *lastStubPtrAddr_ = this;
347 : } else {
348 893 : if (prev) {
349 7 : MOZ_ASSERT(prev->next() == stub);
350 7 : prev->setNext(stub->next());
351 : } else {
352 886 : MOZ_ASSERT(icEntry()->firstStub() == stub);
353 886 : icEntry()->setFirstStub(stub->next());
354 : }
355 : }
356 :
357 1056 : state_.trackUnlinkedStub();
358 :
359 1056 : if (zone->needsIncrementalBarrier()) {
360 : // We are removing edges from ICStub to gcthings. Perform one final trace
361 : // of the stub for incremental GC, as it must know about those edges.
362 36 : stub->trace(zone->barrierTracer());
363 : }
364 :
365 1056 : if (stub->makesGCCalls() && stub->isMonitored()) {
366 : // This stub can make calls so we can return to it if it's on the stack.
367 : // We just have to reset its firstMonitorStub_ field to avoid a stale
368 : // pointer when purgeOptimizedStubs destroys all optimized monitor
369 : // stubs (unlinked stubs won't be updated).
370 98 : ICTypeMonitor_Fallback* monitorFallback = toMonitoredFallbackStub()->fallbackMonitorStub();
371 98 : stub->toMonitoredStub()->resetFirstMonitorStub(monitorFallback);
372 : }
373 :
374 : #ifdef DEBUG
375 : // Poison stub code to ensure we don't call this stub again. However, if
376 : // this stub can make calls, a pointer to it may be stored in a stub frame
377 : // on the stack, so we can't touch the stubCode_ or GC will crash when
378 : // tracing this pointer.
379 1056 : if (!stub->makesGCCalls())
380 952 : stub->stubCode_ = (uint8_t*)0xbad;
381 : #endif
382 1056 : }
383 :
384 : void
385 4023 : ICFallbackStub::unlinkStubsWithKind(JSContext* cx, ICStub::Kind kind)
386 : {
387 4820 : for (ICStubIterator iter = beginChain(); !iter.atEnd(); iter++) {
388 797 : if (iter->kind() == kind)
389 50 : iter.unlink(cx);
390 : }
391 4023 : }
392 :
393 : void
394 203 : ICFallbackStub::discardStubs(JSContext* cx)
395 : {
396 1109 : for (ICStubIterator iter = beginChain(); !iter.atEnd(); iter++)
397 906 : iter.unlink(cx);
398 203 : }
399 :
400 : void
401 161 : ICTypeMonitor_Fallback::resetMonitorStubChain(Zone* zone)
402 : {
403 161 : if (zone->needsIncrementalBarrier()) {
404 : // We are removing edges from monitored stubs to gcthings (JitCode).
405 : // Perform one final trace of all monitor stubs for incremental GC,
406 : // as it must know about those edges.
407 0 : for (ICStub* s = firstMonitorStub_; !s->isTypeMonitor_Fallback(); s = s->next())
408 0 : s->trace(zone->barrierTracer());
409 : }
410 :
411 161 : firstMonitorStub_ = this;
412 161 : numOptimizedMonitorStubs_ = 0;
413 :
414 161 : if (hasFallbackStub_) {
415 95 : lastMonitorStubPtrAddr_ = nullptr;
416 :
417 : // Reset firstMonitorStub_ field of all monitored stubs.
418 590 : for (ICStubConstIterator iter = mainFallbackStub_->beginChainConst();
419 295 : !iter.atEnd(); iter++)
420 : {
421 200 : if (!iter->isMonitored())
422 95 : continue;
423 105 : iter->toMonitoredStub()->resetFirstMonitorStub(this);
424 : }
425 : } else {
426 66 : icEntry_->setFirstStub(this);
427 66 : lastMonitorStubPtrAddr_ = icEntry_->addressOfFirstStub();
428 : }
429 161 : }
430 :
431 : void
432 117 : ICUpdatedStub::resetUpdateStubChain(Zone* zone)
433 : {
434 141 : while (!firstUpdateStub_->isTypeUpdate_Fallback()) {
435 24 : if (zone->needsIncrementalBarrier()) {
436 : // We are removing edges from update stubs to gcthings (JitCode).
437 : // Perform one final trace of all update stubs for incremental GC,
438 : // as it must know about those edges.
439 0 : firstUpdateStub_->trace(zone->barrierTracer());
440 : }
441 24 : firstUpdateStub_ = firstUpdateStub_->next();
442 : }
443 :
444 93 : numOptimizedStubs_ = 0;
445 93 : }
446 :
447 6323 : ICMonitoredStub::ICMonitoredStub(Kind kind, JitCode* stubCode, ICStub* firstMonitorStub)
448 : : ICStub(kind, ICStub::Monitored, stubCode),
449 6323 : firstMonitorStub_(firstMonitorStub)
450 : {
451 : // In order to silence Coverity - null pointer dereference checker
452 6323 : MOZ_ASSERT(firstMonitorStub_);
453 : // If the first monitored stub is a ICTypeMonitor_Fallback stub, then
454 : // double check that _its_ firstMonitorStub is the same as this one.
455 6323 : MOZ_ASSERT_IF(firstMonitorStub_->isTypeMonitor_Fallback(),
456 : firstMonitorStub_->toTypeMonitor_Fallback()->firstMonitorStub() ==
457 : firstMonitorStub_);
458 6323 : }
459 :
460 : bool
461 11286 : ICMonitoredFallbackStub::initMonitoringChain(JSContext* cx, ICStubSpace* space)
462 : {
463 11286 : MOZ_ASSERT(fallbackMonitorStub_ == nullptr);
464 :
465 22572 : ICTypeMonitor_Fallback::Compiler compiler(cx, this);
466 11286 : ICTypeMonitor_Fallback* stub = compiler.getStub(space);
467 11286 : if (!stub)
468 0 : return false;
469 11286 : fallbackMonitorStub_ = stub;
470 11286 : return true;
471 : }
472 :
473 : bool
474 12678 : ICMonitoredFallbackStub::addMonitorStubForValue(JSContext* cx, BaselineFrame* frame,
475 : StackTypeSet* types, HandleValue val)
476 : {
477 12678 : return fallbackMonitorStub_->addMonitorStubForValue(cx, frame, types, val);
478 : }
479 :
480 : bool
481 1069 : ICUpdatedStub::initUpdatingChain(JSContext* cx, ICStubSpace* space)
482 : {
483 1069 : MOZ_ASSERT(firstUpdateStub_ == nullptr);
484 :
485 2138 : ICTypeUpdate_Fallback::Compiler compiler(cx);
486 1069 : ICTypeUpdate_Fallback* stub = compiler.getStub(space);
487 1069 : if (!stub)
488 0 : return false;
489 :
490 1069 : firstUpdateStub_ = stub;
491 1069 : return true;
492 : }
493 :
494 : JitCode*
495 44626 : ICStubCompiler::getStubCode()
496 : {
497 44626 : JitCompartment* comp = cx->compartment()->jitCompartment();
498 :
499 : // Check for existing cached stubcode.
500 44626 : uint32_t stubKey = getKey();
501 44626 : JitCode* stubCode = comp->getStubCode(stubKey);
502 44626 : if (stubCode)
503 42247 : return stubCode;
504 :
505 : // Compile new stubcode.
506 4758 : JitContext jctx(cx, nullptr);
507 4758 : MacroAssembler masm;
508 : #ifndef JS_USE_LINK_REGISTER
509 : // The first value contains the return addres,
510 : // which we pull into ICTailCallReg for tail calls.
511 2379 : masm.adjustFrame(sizeof(intptr_t));
512 : #endif
513 : #ifdef JS_CODEGEN_ARM
514 : masm.setSecondScratchReg(BaselineSecondScratchReg);
515 : #endif
516 :
517 2379 : if (!generateStubCode(masm))
518 0 : return nullptr;
519 4758 : Linker linker(masm);
520 4758 : AutoFlushICache afc("getStubCode");
521 4758 : Rooted<JitCode*> newStubCode(cx, linker.newCode<CanGC>(cx, BASELINE_CODE));
522 2379 : if (!newStubCode)
523 0 : return nullptr;
524 :
525 : // Cache newly compiled stubcode.
526 2379 : if (!comp->putStubCode(cx, stubKey, newStubCode))
527 0 : return nullptr;
528 :
529 : // After generating code, run postGenerateStubCode(). We must not fail
530 : // after this point.
531 2379 : postGenerateStubCode(masm, newStubCode);
532 :
533 2379 : MOZ_ASSERT(entersStubFrame_ == ICStub::NonCacheIRStubMakesGCCalls(kind));
534 2379 : MOZ_ASSERT(!inStubFrame_);
535 :
536 : #ifdef JS_ION_PERF
537 : writePerfSpewerJitCodeProfile(newStubCode, "BaselineIC");
538 : #endif
539 :
540 2379 : return newStubCode;
541 : }
542 :
543 : bool
544 848 : ICStubCompiler::tailCallVM(const VMFunction& fun, MacroAssembler& masm)
545 : {
546 848 : JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
547 848 : if (!code)
548 0 : return false;
549 :
550 848 : MOZ_ASSERT(fun.expectTailCall == TailCall);
551 848 : uint32_t argSize = fun.explicitStackSlots() * sizeof(void*);
552 848 : if (engine_ == Engine::Baseline) {
553 848 : EmitBaselineTailCallVM(code, masm, argSize);
554 : } else {
555 0 : uint32_t stackSize = argSize + fun.extraValuesToPop * sizeof(Value);
556 0 : EmitIonTailCallVM(code, masm, stackSize);
557 : }
558 848 : return true;
559 : }
560 :
561 : bool
562 161 : ICStubCompiler::callVM(const VMFunction& fun, MacroAssembler& masm)
563 : {
564 161 : MOZ_ASSERT(inStubFrame_);
565 :
566 161 : JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
567 161 : if (!code)
568 0 : return false;
569 :
570 161 : MOZ_ASSERT(fun.expectTailCall == NonTailCall);
571 161 : MOZ_ASSERT(engine_ == Engine::Baseline);
572 :
573 161 : EmitBaselineCallVM(code, masm);
574 161 : return true;
575 : }
576 :
577 : void
578 291 : ICStubCompiler::enterStubFrame(MacroAssembler& masm, Register scratch)
579 : {
580 291 : MOZ_ASSERT(engine_ == Engine::Baseline);
581 291 : EmitBaselineEnterStubFrame(masm, scratch);
582 : #ifdef DEBUG
583 291 : framePushedAtEnterStubFrame_ = masm.framePushed();
584 : #endif
585 :
586 291 : MOZ_ASSERT(!inStubFrame_);
587 291 : inStubFrame_ = true;
588 :
589 : #ifdef DEBUG
590 291 : entersStubFrame_ = true;
591 : #endif
592 291 : }
593 :
594 : void
595 280 : ICStubCompiler::assumeStubFrame(MacroAssembler& masm)
596 : {
597 280 : MOZ_ASSERT(!inStubFrame_);
598 280 : inStubFrame_ = true;
599 :
600 : #ifdef DEBUG
601 280 : entersStubFrame_ = true;
602 :
603 : // |framePushed| isn't tracked precisely in ICStubs, so simply assume it to
604 : // be STUB_FRAME_SIZE so that assertions don't fail in leaveStubFrame.
605 280 : framePushedAtEnterStubFrame_ = STUB_FRAME_SIZE;
606 : #endif
607 280 : }
608 :
609 : void
610 571 : ICStubCompiler::leaveStubFrame(MacroAssembler& masm, bool calledIntoIon)
611 : {
612 571 : MOZ_ASSERT(entersStubFrame_ && inStubFrame_);
613 571 : inStubFrame_ = false;
614 :
615 571 : MOZ_ASSERT(engine_ == Engine::Baseline);
616 : #ifdef DEBUG
617 571 : masm.setFramePushed(framePushedAtEnterStubFrame_);
618 571 : if (calledIntoIon)
619 282 : masm.adjustFrame(sizeof(intptr_t)); // Calls into ion have this extra.
620 : #endif
621 571 : EmitBaselineLeaveStubFrame(masm, calledIntoIon);
622 571 : }
623 :
624 : void
625 796 : ICStubCompiler::pushStubPayload(MacroAssembler& masm, Register scratch)
626 : {
627 796 : if (engine_ == Engine::IonSharedIC) {
628 0 : masm.push(Imm32(0));
629 0 : return;
630 : }
631 :
632 796 : if (inStubFrame_) {
633 157 : masm.loadPtr(Address(BaselineFrameReg, 0), scratch);
634 157 : masm.pushBaselineFramePtr(scratch, scratch);
635 : } else {
636 639 : masm.pushBaselineFramePtr(BaselineFrameReg, scratch);
637 : }
638 : }
639 :
640 : void
641 103 : ICStubCompiler::PushStubPayload(MacroAssembler& masm, Register scratch)
642 : {
643 103 : pushStubPayload(masm, scratch);
644 103 : masm.adjustFrame(sizeof(intptr_t));
645 103 : }
646 :
647 3385 : SharedStubInfo::SharedStubInfo(JSContext* cx, void* payload, ICEntry* icEntry)
648 : : maybeFrame_(nullptr),
649 : outerScript_(cx),
650 : innerScript_(cx),
651 3385 : icEntry_(icEntry)
652 : {
653 3385 : if (payload) {
654 3385 : maybeFrame_ = (BaselineFrame*) payload;
655 3385 : outerScript_ = maybeFrame_->script();
656 3385 : innerScript_ = maybeFrame_->script();
657 : } else {
658 0 : IonICEntry* entry = (IonICEntry*) icEntry;
659 0 : innerScript_ = entry->script();
660 : // outerScript_ is initialized lazily.
661 : }
662 3385 : }
663 :
664 : HandleScript
665 1780 : SharedStubInfo::outerScript(JSContext* cx)
666 : {
667 1780 : if (!outerScript_) {
668 0 : js::jit::JitActivationIterator iter(cx);
669 0 : JitFrameIterator it(iter);
670 0 : MOZ_ASSERT(it.isExitFrame());
671 0 : ++it;
672 0 : MOZ_ASSERT(it.isIonJS());
673 0 : outerScript_ = it.script();
674 0 : MOZ_ASSERT(!it.ionScript()->invalidated());
675 : }
676 1780 : return outerScript_;
677 : }
678 :
679 : //
680 : // BinaryArith_Fallback
681 : //
682 :
683 : static bool
684 269 : DoBinaryArithFallback(JSContext* cx, void* payload, ICBinaryArith_Fallback* stub_,
685 : HandleValue lhs, HandleValue rhs, MutableHandleValue ret)
686 : {
687 538 : SharedStubInfo info(cx, payload, stub_->icEntry());
688 269 : ICStubCompiler::Engine engine = info.engine();
689 :
690 : // This fallback stub may trigger debug mode toggling.
691 269 : DebugModeOSRVolatileStub<ICBinaryArith_Fallback*> stub(engine, info.maybeFrame(), stub_);
692 :
693 269 : jsbytecode* pc = info.pc();
694 269 : JSOp op = JSOp(*pc);
695 1344 : FallbackICSpew(cx, stub, "BinaryArith(%s,%d,%d)", CodeName[op],
696 537 : int(lhs.isDouble() ? JSVAL_TYPE_DOUBLE : lhs.extractNonDoubleType()),
697 1076 : int(rhs.isDouble() ? JSVAL_TYPE_DOUBLE : rhs.extractNonDoubleType()));
698 :
699 : // Don't pass lhs/rhs directly, we need the original values when
700 : // generating stubs.
701 538 : RootedValue lhsCopy(cx, lhs);
702 538 : RootedValue rhsCopy(cx, rhs);
703 :
704 : // Perform the compare operation.
705 269 : switch(op) {
706 : case JSOP_ADD:
707 : // Do an add.
708 203 : if (!AddValues(cx, &lhsCopy, &rhsCopy, ret))
709 0 : return false;
710 203 : break;
711 : case JSOP_SUB:
712 34 : if (!SubValues(cx, &lhsCopy, &rhsCopy, ret))
713 0 : return false;
714 34 : break;
715 : case JSOP_MUL:
716 0 : if (!MulValues(cx, &lhsCopy, &rhsCopy, ret))
717 0 : return false;
718 0 : break;
719 : case JSOP_DIV:
720 1 : if (!DivValues(cx, &lhsCopy, &rhsCopy, ret))
721 0 : return false;
722 1 : break;
723 : case JSOP_MOD:
724 0 : if (!ModValues(cx, &lhsCopy, &rhsCopy, ret))
725 0 : return false;
726 0 : break;
727 : case JSOP_POW:
728 0 : if (!math_pow_handle(cx, lhsCopy, rhsCopy, ret))
729 0 : return false;
730 0 : break;
731 : case JSOP_BITOR: {
732 : int32_t result;
733 20 : if (!BitOr(cx, lhs, rhs, &result))
734 0 : return false;
735 20 : ret.setInt32(result);
736 20 : break;
737 : }
738 : case JSOP_BITXOR: {
739 : int32_t result;
740 0 : if (!BitXor(cx, lhs, rhs, &result))
741 0 : return false;
742 0 : ret.setInt32(result);
743 0 : break;
744 : }
745 : case JSOP_BITAND: {
746 : int32_t result;
747 11 : if (!BitAnd(cx, lhs, rhs, &result))
748 0 : return false;
749 11 : ret.setInt32(result);
750 11 : break;
751 : }
752 : case JSOP_LSH: {
753 : int32_t result;
754 0 : if (!BitLsh(cx, lhs, rhs, &result))
755 0 : return false;
756 0 : ret.setInt32(result);
757 0 : break;
758 : }
759 : case JSOP_RSH: {
760 : int32_t result;
761 0 : if (!BitRsh(cx, lhs, rhs, &result))
762 0 : return false;
763 0 : ret.setInt32(result);
764 0 : break;
765 : }
766 : case JSOP_URSH: {
767 0 : if (!UrshOperation(cx, lhs, rhs, ret))
768 0 : return false;
769 0 : break;
770 : }
771 : default:
772 0 : MOZ_CRASH("Unhandled baseline arith op");
773 : }
774 :
775 : // Check if debug mode toggling made the stub invalid.
776 269 : if (stub.invalid())
777 0 : return true;
778 :
779 269 : if (ret.isDouble())
780 1 : stub->setSawDoubleResult();
781 :
782 : // Check to see if a new stub should be generated.
783 269 : if (stub->numOptimizedStubs() >= ICBinaryArith_Fallback::MAX_OPTIMIZED_STUBS) {
784 0 : stub->noteUnoptimizableOperands();
785 0 : return true;
786 : }
787 :
788 : // Handle string concat.
789 269 : if (op == JSOP_ADD) {
790 203 : if (lhs.isString() && rhs.isString()) {
791 62 : JitSpew(JitSpew_BaselineIC, " Generating %s(String, String) stub", CodeName[op]);
792 62 : MOZ_ASSERT(ret.isString());
793 124 : ICBinaryArith_StringConcat::Compiler compiler(cx, engine);
794 62 : ICStub* strcatStub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
795 62 : if (!strcatStub)
796 0 : return false;
797 62 : stub->addNewStub(strcatStub);
798 62 : return true;
799 : }
800 :
801 141 : if ((lhs.isString() && rhs.isObject()) || (lhs.isObject() && rhs.isString())) {
802 0 : JitSpew(JitSpew_BaselineIC, " Generating %s(%s, %s) stub", CodeName[op],
803 0 : lhs.isString() ? "String" : "Object",
804 0 : lhs.isString() ? "Object" : "String");
805 0 : MOZ_ASSERT(ret.isString());
806 0 : ICBinaryArith_StringObjectConcat::Compiler compiler(cx, engine, lhs.isString());
807 0 : ICStub* strcatStub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
808 0 : if (!strcatStub)
809 0 : return false;
810 0 : stub->addNewStub(strcatStub);
811 0 : return true;
812 : }
813 : }
814 :
815 621 : if (((lhs.isBoolean() && (rhs.isBoolean() || rhs.isInt32())) ||
816 422 : (rhs.isBoolean() && (lhs.isBoolean() || lhs.isInt32()))) &&
817 4 : (op == JSOP_ADD || op == JSOP_SUB || op == JSOP_BITOR || op == JSOP_BITAND ||
818 : op == JSOP_BITXOR))
819 : {
820 8 : JitSpew(JitSpew_BaselineIC, " Generating %s(%s, %s) stub", CodeName[op],
821 12 : lhs.isBoolean() ? "Boolean" : "Int32", rhs.isBoolean() ? "Boolean" : "Int32");
822 : ICBinaryArith_BooleanWithInt32::Compiler compiler(cx, op, engine,
823 8 : lhs.isBoolean(), rhs.isBoolean());
824 4 : ICStub* arithStub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
825 4 : if (!arithStub)
826 0 : return false;
827 4 : stub->addNewStub(arithStub);
828 4 : return true;
829 : }
830 :
831 : // Handle only int32 or double.
832 203 : if (!lhs.isNumber() || !rhs.isNumber()) {
833 11 : stub->noteUnoptimizableOperands();
834 11 : return true;
835 : }
836 :
837 192 : MOZ_ASSERT(ret.isNumber());
838 :
839 192 : if (lhs.isDouble() || rhs.isDouble() || ret.isDouble()) {
840 1 : if (!cx->runtime()->jitSupportsFloatingPoint)
841 0 : return true;
842 :
843 1 : switch (op) {
844 : case JSOP_ADD:
845 : case JSOP_SUB:
846 : case JSOP_MUL:
847 : case JSOP_DIV:
848 : case JSOP_MOD: {
849 : // Unlink int32 stubs, it's faster to always use the double stub.
850 1 : stub->unlinkStubsWithKind(cx, ICStub::BinaryArith_Int32);
851 1 : JitSpew(JitSpew_BaselineIC, " Generating %s(Double, Double) stub", CodeName[op]);
852 :
853 2 : ICBinaryArith_Double::Compiler compiler(cx, op, engine);
854 1 : ICStub* doubleStub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
855 1 : if (!doubleStub)
856 0 : return false;
857 1 : stub->addNewStub(doubleStub);
858 1 : return true;
859 : }
860 : default:
861 0 : break;
862 : }
863 : }
864 :
865 191 : if (lhs.isInt32() && rhs.isInt32() && op != JSOP_POW) {
866 191 : bool allowDouble = ret.isDouble();
867 191 : if (allowDouble)
868 0 : stub->unlinkStubsWithKind(cx, ICStub::BinaryArith_Int32);
869 191 : JitSpew(JitSpew_BaselineIC, " Generating %s(Int32, Int32%s) stub", CodeName[op],
870 191 : allowDouble ? " => Double" : "");
871 382 : ICBinaryArith_Int32::Compiler compilerInt32(cx, op, engine, allowDouble);
872 191 : ICStub* int32Stub = compilerInt32.getStub(compilerInt32.getStubSpace(info.outerScript(cx)));
873 191 : if (!int32Stub)
874 0 : return false;
875 191 : stub->addNewStub(int32Stub);
876 191 : return true;
877 : }
878 :
879 : // Handle Double <BITOP> Int32 or Int32 <BITOP> Double case.
880 0 : if (((lhs.isDouble() && rhs.isInt32()) || (lhs.isInt32() && rhs.isDouble())) &&
881 0 : ret.isInt32())
882 : {
883 0 : switch(op) {
884 : case JSOP_BITOR:
885 : case JSOP_BITXOR:
886 : case JSOP_BITAND: {
887 0 : JitSpew(JitSpew_BaselineIC, " Generating %s(%s, %s) stub", CodeName[op],
888 0 : lhs.isDouble() ? "Double" : "Int32",
889 0 : lhs.isDouble() ? "Int32" : "Double");
890 0 : ICBinaryArith_DoubleWithInt32::Compiler compiler(cx, op, engine, lhs.isDouble());
891 0 : ICStub* optStub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
892 0 : if (!optStub)
893 0 : return false;
894 0 : stub->addNewStub(optStub);
895 0 : return true;
896 : }
897 : default:
898 0 : break;
899 : }
900 : }
901 :
902 0 : stub->noteUnoptimizableOperands();
903 0 : return true;
904 : }
905 :
906 : typedef bool (*DoBinaryArithFallbackFn)(JSContext*, void*, ICBinaryArith_Fallback*,
907 : HandleValue, HandleValue, MutableHandleValue);
908 3 : static const VMFunction DoBinaryArithFallbackInfo =
909 6 : FunctionInfo<DoBinaryArithFallbackFn>(DoBinaryArithFallback, "DoBinaryArithFallback",
910 : TailCall, PopValues(2));
911 :
912 : bool
913 56 : ICBinaryArith_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
914 : {
915 56 : MOZ_ASSERT(R0 == JSReturnOperand);
916 :
917 : // Restore the tail call register.
918 56 : EmitRestoreTailCallReg(masm);
919 :
920 : // Ensure stack is fully synced for the expression decompiler.
921 56 : masm.pushValue(R0);
922 56 : masm.pushValue(R1);
923 :
924 : // Push arguments.
925 56 : masm.pushValue(R1);
926 56 : masm.pushValue(R0);
927 56 : masm.push(ICStubReg);
928 56 : pushStubPayload(masm, R0.scratchReg());
929 :
930 56 : return tailCallVM(DoBinaryArithFallbackInfo, masm);
931 : }
932 :
933 : static bool
934 3773 : DoConcatStrings(JSContext* cx, HandleString lhs, HandleString rhs, MutableHandleValue res)
935 : {
936 3773 : JSString* result = ConcatStrings<CanGC>(cx, lhs, rhs);
937 3773 : if (!result)
938 0 : return false;
939 :
940 3773 : res.setString(result);
941 3773 : return true;
942 : }
943 :
944 : typedef bool (*DoConcatStringsFn)(JSContext*, HandleString, HandleString, MutableHandleValue);
945 3 : static const VMFunction DoConcatStringsInfo =
946 6 : FunctionInfo<DoConcatStringsFn>(DoConcatStrings, "DoConcatStrings", TailCall);
947 :
948 : bool
949 12 : ICBinaryArith_StringConcat::Compiler::generateStubCode(MacroAssembler& masm)
950 : {
951 24 : Label failure;
952 12 : masm.branchTestString(Assembler::NotEqual, R0, &failure);
953 12 : masm.branchTestString(Assembler::NotEqual, R1, &failure);
954 :
955 : // Restore the tail call register.
956 12 : EmitRestoreTailCallReg(masm);
957 :
958 12 : masm.unboxString(R0, R0.scratchReg());
959 12 : masm.unboxString(R1, R1.scratchReg());
960 :
961 12 : masm.push(R1.scratchReg());
962 12 : masm.push(R0.scratchReg());
963 12 : if (!tailCallVM(DoConcatStringsInfo, masm))
964 0 : return false;
965 :
966 : // Failure case - jump to next stub
967 12 : masm.bind(&failure);
968 12 : EmitStubGuardFailure(masm);
969 12 : return true;
970 : }
971 :
972 : static JSString*
973 0 : ConvertObjectToStringForConcat(JSContext* cx, HandleValue obj)
974 : {
975 0 : MOZ_ASSERT(obj.isObject());
976 0 : RootedValue rootedObj(cx, obj);
977 0 : if (!ToPrimitive(cx, &rootedObj))
978 0 : return nullptr;
979 0 : return ToString<CanGC>(cx, rootedObj);
980 : }
981 :
982 : static bool
983 0 : DoConcatStringObject(JSContext* cx, bool lhsIsString, HandleValue lhs, HandleValue rhs,
984 : MutableHandleValue res)
985 : {
986 0 : JSString* lstr = nullptr;
987 0 : JSString* rstr = nullptr;
988 0 : if (lhsIsString) {
989 : // Convert rhs first.
990 0 : MOZ_ASSERT(lhs.isString() && rhs.isObject());
991 0 : rstr = ConvertObjectToStringForConcat(cx, rhs);
992 0 : if (!rstr)
993 0 : return false;
994 :
995 : // lhs is already string.
996 0 : lstr = lhs.toString();
997 : } else {
998 0 : MOZ_ASSERT(rhs.isString() && lhs.isObject());
999 : // Convert lhs first.
1000 0 : lstr = ConvertObjectToStringForConcat(cx, lhs);
1001 0 : if (!lstr)
1002 0 : return false;
1003 :
1004 : // rhs is already string.
1005 0 : rstr = rhs.toString();
1006 : }
1007 :
1008 0 : JSString* str = ConcatStrings<NoGC>(cx, lstr, rstr);
1009 0 : if (!str) {
1010 0 : RootedString nlstr(cx, lstr), nrstr(cx, rstr);
1011 0 : str = ConcatStrings<CanGC>(cx, nlstr, nrstr);
1012 0 : if (!str)
1013 0 : return false;
1014 : }
1015 :
1016 : // Technically, we need to call TypeScript::MonitorString for this PC, however
1017 : // it was called when this stub was attached so it's OK.
1018 :
1019 0 : res.setString(str);
1020 0 : return true;
1021 : }
1022 :
1023 : typedef bool (*DoConcatStringObjectFn)(JSContext*, bool lhsIsString, HandleValue, HandleValue,
1024 : MutableHandleValue);
1025 3 : static const VMFunction DoConcatStringObjectInfo =
1026 6 : FunctionInfo<DoConcatStringObjectFn>(DoConcatStringObject, "DoConcatStringObject", TailCall,
1027 : PopValues(2));
1028 :
1029 : bool
1030 0 : ICBinaryArith_StringObjectConcat::Compiler::generateStubCode(MacroAssembler& masm)
1031 : {
1032 0 : Label failure;
1033 0 : if (lhsIsString_) {
1034 0 : masm.branchTestString(Assembler::NotEqual, R0, &failure);
1035 0 : masm.branchTestObject(Assembler::NotEqual, R1, &failure);
1036 : } else {
1037 0 : masm.branchTestObject(Assembler::NotEqual, R0, &failure);
1038 0 : masm.branchTestString(Assembler::NotEqual, R1, &failure);
1039 : }
1040 :
1041 : // Restore the tail call register.
1042 0 : EmitRestoreTailCallReg(masm);
1043 :
1044 : // Sync for the decompiler.
1045 0 : masm.pushValue(R0);
1046 0 : masm.pushValue(R1);
1047 :
1048 : // Push arguments.
1049 0 : masm.pushValue(R1);
1050 0 : masm.pushValue(R0);
1051 0 : masm.push(Imm32(lhsIsString_));
1052 0 : if (!tailCallVM(DoConcatStringObjectInfo, masm))
1053 0 : return false;
1054 :
1055 : // Failure case - jump to next stub
1056 0 : masm.bind(&failure);
1057 0 : EmitStubGuardFailure(masm);
1058 0 : return true;
1059 : }
1060 :
1061 : bool
1062 1 : ICBinaryArith_Double::Compiler::generateStubCode(MacroAssembler& masm)
1063 : {
1064 2 : Label failure;
1065 1 : masm.ensureDouble(R0, FloatReg0, &failure);
1066 1 : masm.ensureDouble(R1, FloatReg1, &failure);
1067 :
1068 1 : switch (op) {
1069 : case JSOP_ADD:
1070 0 : masm.addDouble(FloatReg1, FloatReg0);
1071 0 : break;
1072 : case JSOP_SUB:
1073 0 : masm.subDouble(FloatReg1, FloatReg0);
1074 0 : break;
1075 : case JSOP_MUL:
1076 0 : masm.mulDouble(FloatReg1, FloatReg0);
1077 0 : break;
1078 : case JSOP_DIV:
1079 1 : masm.divDouble(FloatReg1, FloatReg0);
1080 1 : break;
1081 : case JSOP_MOD:
1082 0 : masm.setupUnalignedABICall(R0.scratchReg());
1083 0 : masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
1084 0 : masm.passABIArg(FloatReg1, MoveOp::DOUBLE);
1085 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, NumberMod), MoveOp::DOUBLE);
1086 0 : MOZ_ASSERT(ReturnDoubleReg == FloatReg0);
1087 0 : break;
1088 : default:
1089 0 : MOZ_CRASH("Unexpected op");
1090 : }
1091 :
1092 1 : masm.boxDouble(FloatReg0, R0);
1093 1 : EmitReturnFromIC(masm);
1094 :
1095 : // Failure case - jump to next stub
1096 1 : masm.bind(&failure);
1097 1 : EmitStubGuardFailure(masm);
1098 2 : return true;
1099 : }
1100 :
1101 : bool
1102 2 : ICBinaryArith_BooleanWithInt32::Compiler::generateStubCode(MacroAssembler& masm)
1103 : {
1104 4 : Label failure;
1105 2 : if (lhsIsBool_)
1106 0 : masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
1107 : else
1108 2 : masm.branchTestInt32(Assembler::NotEqual, R0, &failure);
1109 :
1110 2 : if (rhsIsBool_)
1111 2 : masm.branchTestBoolean(Assembler::NotEqual, R1, &failure);
1112 : else
1113 0 : masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
1114 :
1115 2 : Register lhsReg = lhsIsBool_ ? masm.extractBoolean(R0, ExtractTemp0)
1116 2 : : masm.extractInt32(R0, ExtractTemp0);
1117 4 : Register rhsReg = rhsIsBool_ ? masm.extractBoolean(R1, ExtractTemp1)
1118 4 : : masm.extractInt32(R1, ExtractTemp1);
1119 :
1120 2 : MOZ_ASSERT(op_ == JSOP_ADD || op_ == JSOP_SUB ||
1121 : op_ == JSOP_BITOR || op_ == JSOP_BITXOR || op_ == JSOP_BITAND);
1122 :
1123 2 : switch(op_) {
1124 : case JSOP_ADD: {
1125 0 : Label fixOverflow;
1126 :
1127 0 : masm.branchAdd32(Assembler::Overflow, rhsReg, lhsReg, &fixOverflow);
1128 0 : masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
1129 0 : EmitReturnFromIC(masm);
1130 :
1131 0 : masm.bind(&fixOverflow);
1132 0 : masm.sub32(rhsReg, lhsReg);
1133 : // Proceed to failure below.
1134 0 : break;
1135 : }
1136 : case JSOP_SUB: {
1137 0 : Label fixOverflow;
1138 :
1139 0 : masm.branchSub32(Assembler::Overflow, rhsReg, lhsReg, &fixOverflow);
1140 0 : masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
1141 0 : EmitReturnFromIC(masm);
1142 :
1143 0 : masm.bind(&fixOverflow);
1144 0 : masm.add32(rhsReg, lhsReg);
1145 : // Proceed to failure below.
1146 0 : break;
1147 : }
1148 : case JSOP_BITOR: {
1149 2 : masm.orPtr(rhsReg, lhsReg);
1150 2 : masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
1151 2 : EmitReturnFromIC(masm);
1152 2 : break;
1153 : }
1154 : case JSOP_BITXOR: {
1155 0 : masm.xorPtr(rhsReg, lhsReg);
1156 0 : masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
1157 0 : EmitReturnFromIC(masm);
1158 0 : break;
1159 : }
1160 : case JSOP_BITAND: {
1161 0 : masm.andPtr(rhsReg, lhsReg);
1162 0 : masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
1163 0 : EmitReturnFromIC(masm);
1164 0 : break;
1165 : }
1166 : default:
1167 0 : MOZ_CRASH("Unhandled op for BinaryArith_BooleanWithInt32.");
1168 : }
1169 :
1170 : // Failure case - jump to next stub
1171 2 : masm.bind(&failure);
1172 2 : EmitStubGuardFailure(masm);
1173 4 : return true;
1174 : }
1175 :
1176 : bool
1177 0 : ICBinaryArith_DoubleWithInt32::Compiler::generateStubCode(MacroAssembler& masm)
1178 : {
1179 0 : MOZ_ASSERT(op == JSOP_BITOR || op == JSOP_BITAND || op == JSOP_BITXOR);
1180 :
1181 0 : Label failure;
1182 0 : Register intReg;
1183 0 : Register scratchReg;
1184 0 : if (lhsIsDouble_) {
1185 0 : masm.branchTestDouble(Assembler::NotEqual, R0, &failure);
1186 0 : masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
1187 0 : intReg = masm.extractInt32(R1, ExtractTemp0);
1188 0 : masm.unboxDouble(R0, FloatReg0);
1189 0 : scratchReg = R0.scratchReg();
1190 : } else {
1191 0 : masm.branchTestInt32(Assembler::NotEqual, R0, &failure);
1192 0 : masm.branchTestDouble(Assembler::NotEqual, R1, &failure);
1193 0 : intReg = masm.extractInt32(R0, ExtractTemp0);
1194 0 : masm.unboxDouble(R1, FloatReg0);
1195 0 : scratchReg = R1.scratchReg();
1196 : }
1197 :
1198 : // Truncate the double to an int32.
1199 : {
1200 0 : Label doneTruncate;
1201 0 : Label truncateABICall;
1202 0 : masm.branchTruncateDoubleMaybeModUint32(FloatReg0, scratchReg, &truncateABICall);
1203 0 : masm.jump(&doneTruncate);
1204 :
1205 0 : masm.bind(&truncateABICall);
1206 0 : masm.push(intReg);
1207 0 : masm.setupUnalignedABICall(scratchReg);
1208 0 : masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
1209 0 : masm.callWithABI(mozilla::BitwiseCast<void*, int32_t(*)(double)>(JS::ToInt32));
1210 0 : masm.storeCallWordResult(scratchReg);
1211 0 : masm.pop(intReg);
1212 :
1213 0 : masm.bind(&doneTruncate);
1214 : }
1215 :
1216 0 : Register intReg2 = scratchReg;
1217 : // All handled ops commute, so no need to worry about ordering.
1218 0 : switch(op) {
1219 : case JSOP_BITOR:
1220 0 : masm.orPtr(intReg, intReg2);
1221 0 : break;
1222 : case JSOP_BITXOR:
1223 0 : masm.xorPtr(intReg, intReg2);
1224 0 : break;
1225 : case JSOP_BITAND:
1226 0 : masm.andPtr(intReg, intReg2);
1227 0 : break;
1228 : default:
1229 0 : MOZ_CRASH("Unhandled op for BinaryArith_DoubleWithInt32.");
1230 : }
1231 0 : masm.tagValue(JSVAL_TYPE_INT32, intReg2, R0);
1232 0 : EmitReturnFromIC(masm);
1233 :
1234 : // Failure case - jump to next stub
1235 0 : masm.bind(&failure);
1236 0 : EmitStubGuardFailure(masm);
1237 0 : return true;
1238 : }
1239 :
1240 : //
1241 : // UnaryArith_Fallback
1242 : //
1243 :
1244 : static bool
1245 2 : DoUnaryArithFallback(JSContext* cx, void* payload, ICUnaryArith_Fallback* stub_,
1246 : HandleValue val, MutableHandleValue res)
1247 : {
1248 4 : SharedStubInfo info(cx, payload, stub_->icEntry());
1249 2 : ICStubCompiler::Engine engine = info.engine();
1250 2 : HandleScript script = info.innerScript();
1251 :
1252 : // This fallback stub may trigger debug mode toggling.
1253 2 : DebugModeOSRVolatileStub<ICUnaryArith_Fallback*> stub(engine, info.maybeFrame(), stub_);
1254 :
1255 2 : jsbytecode* pc = info.pc();
1256 2 : JSOp op = JSOp(*pc);
1257 2 : FallbackICSpew(cx, stub, "UnaryArith(%s)", CodeName[op]);
1258 :
1259 2 : switch (op) {
1260 : case JSOP_BITNOT: {
1261 : int32_t result;
1262 2 : if (!BitNot(cx, val, &result))
1263 0 : return false;
1264 2 : res.setInt32(result);
1265 2 : break;
1266 : }
1267 : case JSOP_NEG:
1268 0 : if (!NegOperation(cx, script, pc, val, res))
1269 0 : return false;
1270 0 : break;
1271 : default:
1272 0 : MOZ_CRASH("Unexpected op");
1273 : }
1274 :
1275 : // Check if debug mode toggling made the stub invalid.
1276 2 : if (stub.invalid())
1277 0 : return true;
1278 :
1279 2 : if (res.isDouble())
1280 0 : stub->setSawDoubleResult();
1281 :
1282 2 : if (stub->numOptimizedStubs() >= ICUnaryArith_Fallback::MAX_OPTIMIZED_STUBS) {
1283 : // TODO: Discard/replace stubs.
1284 0 : return true;
1285 : }
1286 :
1287 2 : if (val.isInt32() && res.isInt32()) {
1288 1 : JitSpew(JitSpew_BaselineIC, " Generating %s(Int32 => Int32) stub", CodeName[op]);
1289 2 : ICUnaryArith_Int32::Compiler compiler(cx, op, engine);
1290 1 : ICStub* int32Stub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1291 1 : if (!int32Stub)
1292 0 : return false;
1293 1 : stub->addNewStub(int32Stub);
1294 1 : return true;
1295 : }
1296 :
1297 1 : if (val.isNumber() && res.isNumber() && cx->runtime()->jitSupportsFloatingPoint) {
1298 0 : JitSpew(JitSpew_BaselineIC, " Generating %s(Number => Number) stub", CodeName[op]);
1299 :
1300 : // Unlink int32 stubs, the double stub handles both cases and TI specializes for both.
1301 0 : stub->unlinkStubsWithKind(cx, ICStub::UnaryArith_Int32);
1302 :
1303 0 : ICUnaryArith_Double::Compiler compiler(cx, op, engine);
1304 0 : ICStub* doubleStub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1305 0 : if (!doubleStub)
1306 0 : return false;
1307 0 : stub->addNewStub(doubleStub);
1308 0 : return true;
1309 : }
1310 :
1311 1 : return true;
1312 : }
1313 :
1314 : typedef bool (*DoUnaryArithFallbackFn)(JSContext*, void*, ICUnaryArith_Fallback*,
1315 : HandleValue, MutableHandleValue);
1316 3 : static const VMFunction DoUnaryArithFallbackInfo =
1317 6 : FunctionInfo<DoUnaryArithFallbackFn>(DoUnaryArithFallback, "DoUnaryArithFallback", TailCall,
1318 : PopValues(1));
1319 :
1320 : bool
1321 1 : ICUnaryArith_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
1322 : {
1323 1 : MOZ_ASSERT(R0 == JSReturnOperand);
1324 :
1325 : // Restore the tail call register.
1326 1 : EmitRestoreTailCallReg(masm);
1327 :
1328 : // Ensure stack is fully synced for the expression decompiler.
1329 1 : masm.pushValue(R0);
1330 :
1331 : // Push arguments.
1332 1 : masm.pushValue(R0);
1333 1 : masm.push(ICStubReg);
1334 1 : pushStubPayload(masm, R0.scratchReg());
1335 :
1336 1 : return tailCallVM(DoUnaryArithFallbackInfo, masm);
1337 : }
1338 :
1339 : bool
1340 0 : ICUnaryArith_Double::Compiler::generateStubCode(MacroAssembler& masm)
1341 : {
1342 0 : Label failure;
1343 0 : masm.ensureDouble(R0, FloatReg0, &failure);
1344 :
1345 0 : MOZ_ASSERT(op == JSOP_NEG || op == JSOP_BITNOT);
1346 :
1347 0 : if (op == JSOP_NEG) {
1348 0 : masm.negateDouble(FloatReg0);
1349 0 : masm.boxDouble(FloatReg0, R0);
1350 : } else {
1351 : // Truncate the double to an int32.
1352 0 : Register scratchReg = R1.scratchReg();
1353 :
1354 0 : Label doneTruncate;
1355 0 : Label truncateABICall;
1356 0 : masm.branchTruncateDoubleMaybeModUint32(FloatReg0, scratchReg, &truncateABICall);
1357 0 : masm.jump(&doneTruncate);
1358 :
1359 0 : masm.bind(&truncateABICall);
1360 0 : masm.setupUnalignedABICall(scratchReg);
1361 0 : masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
1362 0 : masm.callWithABI(BitwiseCast<void*, int32_t(*)(double)>(JS::ToInt32));
1363 0 : masm.storeCallWordResult(scratchReg);
1364 :
1365 0 : masm.bind(&doneTruncate);
1366 0 : masm.not32(scratchReg);
1367 0 : masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R0);
1368 : }
1369 :
1370 0 : EmitReturnFromIC(masm);
1371 :
1372 : // Failure case - jump to next stub
1373 0 : masm.bind(&failure);
1374 0 : EmitStubGuardFailure(masm);
1375 0 : return true;
1376 : }
1377 :
1378 : //
1379 : // Compare_Fallback
1380 : //
1381 :
1382 : static bool
1383 1002 : DoCompareFallback(JSContext* cx, void* payload, ICCompare_Fallback* stub_, HandleValue lhs,
1384 : HandleValue rhs, MutableHandleValue ret)
1385 : {
1386 2004 : SharedStubInfo info(cx, payload, stub_->icEntry());
1387 1002 : ICStubCompiler::Engine engine = info.engine();
1388 :
1389 : // This fallback stub may trigger debug mode toggling.
1390 1002 : DebugModeOSRVolatileStub<ICCompare_Fallback*> stub(engine, info.maybeFrame(), stub_);
1391 :
1392 1002 : jsbytecode* pc = info.pc();
1393 1002 : JSOp op = JSOp(*pc);
1394 :
1395 1002 : FallbackICSpew(cx, stub, "Compare(%s)", CodeName[op]);
1396 :
1397 : // Case operations in a CONDSWITCH are performing strict equality.
1398 1002 : if (op == JSOP_CASE)
1399 24 : op = JSOP_STRICTEQ;
1400 :
1401 : // Don't pass lhs/rhs directly, we need the original values when
1402 : // generating stubs.
1403 2004 : RootedValue lhsCopy(cx, lhs);
1404 2004 : RootedValue rhsCopy(cx, rhs);
1405 :
1406 : // Perform the compare operation.
1407 : bool out;
1408 1002 : switch (op) {
1409 : case JSOP_LT:
1410 151 : if (!LessThan(cx, &lhsCopy, &rhsCopy, &out))
1411 0 : return false;
1412 151 : break;
1413 : case JSOP_LE:
1414 8 : if (!LessThanOrEqual(cx, &lhsCopy, &rhsCopy, &out))
1415 0 : return false;
1416 8 : break;
1417 : case JSOP_GT:
1418 30 : if (!GreaterThan(cx, &lhsCopy, &rhsCopy, &out))
1419 0 : return false;
1420 30 : break;
1421 : case JSOP_GE:
1422 62 : if (!GreaterThanOrEqual(cx, &lhsCopy, &rhsCopy, &out))
1423 0 : return false;
1424 62 : break;
1425 : case JSOP_EQ:
1426 304 : if (!LooselyEqual<true>(cx, &lhsCopy, &rhsCopy, &out))
1427 0 : return false;
1428 304 : break;
1429 : case JSOP_NE:
1430 77 : if (!LooselyEqual<false>(cx, &lhsCopy, &rhsCopy, &out))
1431 0 : return false;
1432 77 : break;
1433 : case JSOP_STRICTEQ:
1434 341 : if (!StrictlyEqual<true>(cx, &lhsCopy, &rhsCopy, &out))
1435 0 : return false;
1436 341 : break;
1437 : case JSOP_STRICTNE:
1438 29 : if (!StrictlyEqual<false>(cx, &lhsCopy, &rhsCopy, &out))
1439 0 : return false;
1440 29 : break;
1441 : default:
1442 0 : MOZ_ASSERT_UNREACHABLE("Unhandled baseline compare op");
1443 : return false;
1444 : }
1445 :
1446 1002 : ret.setBoolean(out);
1447 :
1448 : // Check if debug mode toggling made the stub invalid.
1449 1002 : if (stub.invalid())
1450 0 : return true;
1451 :
1452 : // Check to see if a new stub should be generated.
1453 1002 : if (stub->numOptimizedStubs() >= ICCompare_Fallback::MAX_OPTIMIZED_STUBS) {
1454 : // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
1455 : // But for now we just bail.
1456 4 : return true;
1457 : }
1458 :
1459 998 : if (engine == ICStubEngine::Baseline) {
1460 1611 : RootedScript script(cx, info.outerScript(cx));
1461 1611 : CompareIRGenerator gen(cx, script, pc, stub->state().mode(), op, lhs, rhs);
1462 998 : bool attached = false;
1463 998 : if (gen.tryAttachStub()) {
1464 770 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
1465 1155 : engine, script, stub, &attached);
1466 385 : if (newStub)
1467 155 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
1468 385 : return true;
1469 : }
1470 : }
1471 :
1472 : // Try to generate new stubs.
1473 613 : if (lhs.isInt32() && rhs.isInt32()) {
1474 330 : JitSpew(JitSpew_BaselineIC, " Generating %s(Int32, Int32) stub", CodeName[op]);
1475 660 : ICCompare_Int32::Compiler compiler(cx, op, engine);
1476 330 : ICStub* int32Stub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1477 330 : if (!int32Stub)
1478 0 : return false;
1479 :
1480 330 : stub->addNewStub(int32Stub);
1481 330 : return true;
1482 : }
1483 :
1484 283 : if (!cx->runtime()->jitSupportsFloatingPoint && (lhs.isNumber() || rhs.isNumber()))
1485 0 : return true;
1486 :
1487 283 : if (lhs.isNumber() && rhs.isNumber()) {
1488 11 : JitSpew(JitSpew_BaselineIC, " Generating %s(Number, Number) stub", CodeName[op]);
1489 :
1490 : // Unlink int32 stubs, it's faster to always use the double stub.
1491 11 : stub->unlinkStubsWithKind(cx, ICStub::Compare_Int32);
1492 :
1493 22 : ICCompare_Double::Compiler compiler(cx, op, engine);
1494 11 : ICStub* doubleStub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1495 11 : if (!doubleStub)
1496 0 : return false;
1497 :
1498 11 : stub->addNewStub(doubleStub);
1499 11 : return true;
1500 : }
1501 :
1502 544 : if ((lhs.isNumber() && rhs.isUndefined()) ||
1503 323 : (lhs.isUndefined() && rhs.isNumber()))
1504 : {
1505 4 : JitSpew(JitSpew_BaselineIC, " Generating %s(%s, %s) stub", CodeName[op],
1506 2 : rhs.isUndefined() ? "Number" : "Undefined",
1507 4 : rhs.isUndefined() ? "Undefined" : "Number");
1508 4 : ICCompare_NumberWithUndefined::Compiler compiler(cx, op, engine, lhs.isUndefined());
1509 2 : ICStub* doubleStub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1510 2 : if (!doubleStub)
1511 0 : return false;
1512 :
1513 2 : stub->addNewStub(doubleStub);
1514 2 : return true;
1515 : }
1516 :
1517 270 : if (lhs.isBoolean() && rhs.isBoolean()) {
1518 2 : JitSpew(JitSpew_BaselineIC, " Generating %s(Boolean, Boolean) stub", CodeName[op]);
1519 4 : ICCompare_Boolean::Compiler compiler(cx, op, engine);
1520 2 : ICStub* booleanStub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1521 2 : if (!booleanStub)
1522 0 : return false;
1523 :
1524 2 : stub->addNewStub(booleanStub);
1525 2 : return true;
1526 : }
1527 :
1528 268 : if ((lhs.isBoolean() && rhs.isInt32()) || (lhs.isInt32() && rhs.isBoolean())) {
1529 0 : JitSpew(JitSpew_BaselineIC, " Generating %s(%s, %s) stub", CodeName[op],
1530 0 : rhs.isInt32() ? "Boolean" : "Int32",
1531 0 : rhs.isInt32() ? "Int32" : "Boolean");
1532 0 : ICCompare_Int32WithBoolean::Compiler compiler(cx, op, engine, lhs.isInt32());
1533 0 : ICStub* optStub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1534 0 : if (!optStub)
1535 0 : return false;
1536 :
1537 0 : stub->addNewStub(optStub);
1538 0 : return true;
1539 : }
1540 :
1541 268 : if (IsEqualityOp(op)) {
1542 218 : if (lhs.isString() && rhs.isString() && !stub->hasStub(ICStub::Compare_String)) {
1543 0 : JitSpew(JitSpew_BaselineIC, " Generating %s(String, String) stub", CodeName[op]);
1544 0 : ICCompare_String::Compiler compiler(cx, op, engine);
1545 0 : ICStub* stringStub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1546 0 : if (!stringStub)
1547 0 : return false;
1548 :
1549 0 : stub->addNewStub(stringStub);
1550 0 : return true;
1551 : }
1552 :
1553 218 : if (lhs.isSymbol() && rhs.isSymbol() && !stub->hasStub(ICStub::Compare_Symbol)) {
1554 0 : JitSpew(JitSpew_BaselineIC, " Generating %s(Symbol, Symbol) stub", CodeName[op]);
1555 0 : ICCompare_Symbol::Compiler compiler(cx, op, engine);
1556 0 : ICStub* symbolStub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1557 0 : if (!symbolStub)
1558 0 : return false;
1559 :
1560 0 : stub->addNewStub(symbolStub);
1561 0 : return true;
1562 : }
1563 :
1564 218 : if (lhs.isObject() && rhs.isObject()) {
1565 0 : MOZ_ASSERT(!stub->hasStub(ICStub::Compare_Object));
1566 0 : JitSpew(JitSpew_BaselineIC, " Generating %s(Object, Object) stub", CodeName[op]);
1567 0 : ICCompare_Object::Compiler compiler(cx, op, engine);
1568 0 : ICStub* objectStub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1569 0 : if (!objectStub)
1570 0 : return false;
1571 :
1572 0 : stub->addNewStub(objectStub);
1573 0 : return true;
1574 : }
1575 :
1576 218 : if (lhs.isNullOrUndefined() || rhs.isNullOrUndefined()) {
1577 178 : JitSpew(JitSpew_BaselineIC, " Generating %s(Null/Undef or X, Null/Undef or X) stub",
1578 178 : CodeName[op]);
1579 178 : bool lhsIsUndefined = lhs.isNullOrUndefined();
1580 178 : bool compareWithNull = lhs.isNull() || rhs.isNull();
1581 : ICCompare_ObjectWithUndefined::Compiler compiler(cx, op, engine,
1582 356 : lhsIsUndefined, compareWithNull);
1583 178 : ICStub* objectStub = compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1584 178 : if (!objectStub)
1585 0 : return false;
1586 :
1587 178 : stub->addNewStub(objectStub);
1588 178 : return true;
1589 : }
1590 : }
1591 :
1592 90 : stub->noteUnoptimizableAccess();
1593 :
1594 90 : return true;
1595 : }
1596 :
1597 : typedef bool (*DoCompareFallbackFn)(JSContext*, void*, ICCompare_Fallback*,
1598 : HandleValue, HandleValue, MutableHandleValue);
1599 3 : static const VMFunction DoCompareFallbackInfo =
1600 6 : FunctionInfo<DoCompareFallbackFn>(DoCompareFallback, "DoCompareFallback", TailCall,
1601 : PopValues(2));
1602 :
1603 : bool
1604 64 : ICCompare_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
1605 : {
1606 64 : MOZ_ASSERT(R0 == JSReturnOperand);
1607 :
1608 : // Restore the tail call register.
1609 64 : EmitRestoreTailCallReg(masm);
1610 :
1611 : // Ensure stack is fully synced for the expression decompiler.
1612 64 : masm.pushValue(R0);
1613 64 : masm.pushValue(R1);
1614 :
1615 : // Push arguments.
1616 64 : masm.pushValue(R1);
1617 64 : masm.pushValue(R0);
1618 64 : masm.push(ICStubReg);
1619 64 : pushStubPayload(masm, R0.scratchReg());
1620 64 : return tailCallVM(DoCompareFallbackInfo, masm);
1621 : }
1622 :
1623 : //
1624 : // Compare_String
1625 : //
1626 :
1627 : bool
1628 0 : ICCompare_String::Compiler::generateStubCode(MacroAssembler& masm)
1629 : {
1630 0 : Label failure;
1631 0 : masm.branchTestString(Assembler::NotEqual, R0, &failure);
1632 0 : masm.branchTestString(Assembler::NotEqual, R1, &failure);
1633 :
1634 0 : MOZ_ASSERT(IsEqualityOp(op));
1635 :
1636 0 : Register left = masm.extractString(R0, ExtractTemp0);
1637 0 : Register right = masm.extractString(R1, ExtractTemp1);
1638 :
1639 0 : AllocatableGeneralRegisterSet regs(availableGeneralRegs(2));
1640 0 : Register scratchReg = regs.takeAny();
1641 :
1642 0 : masm.compareStrings(op, left, right, scratchReg, &failure);
1643 0 : masm.tagValue(JSVAL_TYPE_BOOLEAN, scratchReg, R0);
1644 0 : EmitReturnFromIC(masm);
1645 :
1646 0 : masm.bind(&failure);
1647 0 : EmitStubGuardFailure(masm);
1648 0 : return true;
1649 : }
1650 :
1651 : //
1652 : // Compare_Symbol
1653 : //
1654 :
1655 : bool
1656 0 : ICCompare_Symbol::Compiler::generateStubCode(MacroAssembler& masm)
1657 : {
1658 0 : Label failure;
1659 0 : masm.branchTestSymbol(Assembler::NotEqual, R0, &failure);
1660 0 : masm.branchTestSymbol(Assembler::NotEqual, R1, &failure);
1661 :
1662 0 : MOZ_ASSERT(IsEqualityOp(op));
1663 :
1664 0 : Register left = masm.extractSymbol(R0, ExtractTemp0);
1665 0 : Register right = masm.extractSymbol(R1, ExtractTemp1);
1666 :
1667 0 : Label ifTrue;
1668 0 : masm.branchPtr(JSOpToCondition(op, /* signed = */true), left, right, &ifTrue);
1669 :
1670 0 : masm.moveValue(BooleanValue(false), R0);
1671 0 : EmitReturnFromIC(masm);
1672 :
1673 0 : masm.bind(&ifTrue);
1674 0 : masm.moveValue(BooleanValue(true), R0);
1675 0 : EmitReturnFromIC(masm);
1676 :
1677 : // Failure case - jump to next stub
1678 0 : masm.bind(&failure);
1679 0 : EmitStubGuardFailure(masm);
1680 0 : return true;
1681 : }
1682 :
1683 : //
1684 : // Compare_Boolean
1685 : //
1686 :
1687 : bool
1688 2 : ICCompare_Boolean::Compiler::generateStubCode(MacroAssembler& masm)
1689 : {
1690 4 : Label failure;
1691 2 : masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
1692 2 : masm.branchTestBoolean(Assembler::NotEqual, R1, &failure);
1693 :
1694 2 : Register left = masm.extractInt32(R0, ExtractTemp0);
1695 2 : Register right = masm.extractInt32(R1, ExtractTemp1);
1696 :
1697 : // Compare payload regs of R0 and R1.
1698 2 : Assembler::Condition cond = JSOpToCondition(op, /* signed = */true);
1699 2 : masm.cmp32Set(cond, left, right, left);
1700 :
1701 : // Box the result and return
1702 2 : masm.tagValue(JSVAL_TYPE_BOOLEAN, left, R0);
1703 2 : EmitReturnFromIC(masm);
1704 :
1705 : // Failure case - jump to next stub
1706 2 : masm.bind(&failure);
1707 2 : EmitStubGuardFailure(masm);
1708 4 : return true;
1709 : }
1710 :
1711 : //
1712 : // Compare_NumberWithUndefined
1713 : //
1714 :
1715 : bool
1716 2 : ICCompare_NumberWithUndefined::Compiler::generateStubCode(MacroAssembler& masm)
1717 : {
1718 2 : ValueOperand numberOperand, undefinedOperand;
1719 2 : if (lhsIsUndefined) {
1720 0 : numberOperand = R1;
1721 0 : undefinedOperand = R0;
1722 : } else {
1723 2 : numberOperand = R0;
1724 2 : undefinedOperand = R1;
1725 : }
1726 :
1727 4 : Label failure;
1728 2 : masm.branchTestNumber(Assembler::NotEqual, numberOperand, &failure);
1729 2 : masm.branchTestUndefined(Assembler::NotEqual, undefinedOperand, &failure);
1730 :
1731 : // Comparing a number with undefined will always be true for NE/STRICTNE,
1732 : // and always be false for other compare ops.
1733 2 : masm.moveValue(BooleanValue(op == JSOP_NE || op == JSOP_STRICTNE), R0);
1734 :
1735 2 : EmitReturnFromIC(masm);
1736 :
1737 : // Failure case - jump to next stub
1738 2 : masm.bind(&failure);
1739 2 : EmitStubGuardFailure(masm);
1740 4 : return true;
1741 : }
1742 :
1743 : //
1744 : // Compare_Object
1745 : //
1746 :
1747 : bool
1748 0 : ICCompare_Object::Compiler::generateStubCode(MacroAssembler& masm)
1749 : {
1750 0 : Label failure;
1751 0 : masm.branchTestObject(Assembler::NotEqual, R0, &failure);
1752 0 : masm.branchTestObject(Assembler::NotEqual, R1, &failure);
1753 :
1754 0 : MOZ_ASSERT(IsEqualityOp(op));
1755 :
1756 0 : Register left = masm.extractObject(R0, ExtractTemp0);
1757 0 : Register right = masm.extractObject(R1, ExtractTemp1);
1758 :
1759 0 : Label ifTrue;
1760 0 : masm.branchPtr(JSOpToCondition(op, /* signed = */true), left, right, &ifTrue);
1761 :
1762 0 : masm.moveValue(BooleanValue(false), R0);
1763 0 : EmitReturnFromIC(masm);
1764 :
1765 0 : masm.bind(&ifTrue);
1766 0 : masm.moveValue(BooleanValue(true), R0);
1767 0 : EmitReturnFromIC(masm);
1768 :
1769 : // Failure case - jump to next stub
1770 0 : masm.bind(&failure);
1771 0 : EmitStubGuardFailure(masm);
1772 0 : return true;
1773 : }
1774 :
1775 : //
1776 : // Compare_ObjectWithUndefined
1777 : //
1778 :
1779 : bool
1780 103 : ICCompare_ObjectWithUndefined::Compiler::generateStubCode(MacroAssembler& masm)
1781 : {
1782 103 : MOZ_ASSERT(IsEqualityOp(op));
1783 :
1784 103 : ValueOperand objectOperand, undefinedOperand;
1785 103 : if (lhsIsUndefined) {
1786 38 : objectOperand = R1;
1787 38 : undefinedOperand = R0;
1788 : } else {
1789 65 : objectOperand = R0;
1790 65 : undefinedOperand = R1;
1791 : }
1792 :
1793 206 : Label failure;
1794 103 : if (compareWithNull)
1795 45 : masm.branchTestNull(Assembler::NotEqual, undefinedOperand, &failure);
1796 : else
1797 58 : masm.branchTestUndefined(Assembler::NotEqual, undefinedOperand, &failure);
1798 :
1799 206 : Label notObject;
1800 103 : masm.branchTestObject(Assembler::NotEqual, objectOperand, ¬Object);
1801 :
1802 103 : if (op == JSOP_STRICTEQ || op == JSOP_STRICTNE) {
1803 : // obj !== undefined for all objects.
1804 84 : masm.moveValue(BooleanValue(op == JSOP_STRICTNE), R0);
1805 84 : EmitReturnFromIC(masm);
1806 : } else {
1807 : // obj != undefined only where !obj->getClass()->emulatesUndefined()
1808 19 : Register obj = masm.extractObject(objectOperand, ExtractTemp0);
1809 :
1810 : // We need a scratch register.
1811 19 : masm.push(obj);
1812 38 : Label slow, emulatesUndefined;
1813 19 : masm.branchIfObjectEmulatesUndefined(obj, obj, &slow, &emulatesUndefined);
1814 :
1815 19 : masm.pop(obj);
1816 19 : masm.moveValue(BooleanValue(op == JSOP_NE), R0);
1817 19 : EmitReturnFromIC(masm);
1818 :
1819 19 : masm.bind(&emulatesUndefined);
1820 19 : masm.pop(obj);
1821 19 : masm.moveValue(BooleanValue(op == JSOP_EQ), R0);
1822 19 : EmitReturnFromIC(masm);
1823 :
1824 19 : masm.bind(&slow);
1825 19 : masm.pop(obj);
1826 19 : masm.jump(&failure);
1827 : }
1828 :
1829 103 : masm.bind(¬Object);
1830 :
1831 : // Also support null == null or undefined == undefined comparisons.
1832 206 : Label differentTypes;
1833 103 : if (compareWithNull)
1834 45 : masm.branchTestNull(Assembler::NotEqual, objectOperand, &differentTypes);
1835 : else
1836 58 : masm.branchTestUndefined(Assembler::NotEqual, objectOperand, &differentTypes);
1837 :
1838 103 : masm.moveValue(BooleanValue(op == JSOP_STRICTEQ || op == JSOP_EQ), R0);
1839 103 : EmitReturnFromIC(masm);
1840 :
1841 103 : masm.bind(&differentTypes);
1842 : // Also support null == undefined or undefined == null.
1843 206 : Label neverEqual;
1844 103 : if (compareWithNull)
1845 45 : masm.branchTestUndefined(Assembler::NotEqual, objectOperand, &neverEqual);
1846 : else
1847 58 : masm.branchTestNull(Assembler::NotEqual, objectOperand, &neverEqual);
1848 :
1849 103 : masm.moveValue(BooleanValue(op == JSOP_EQ || op == JSOP_STRICTNE), R0);
1850 103 : EmitReturnFromIC(masm);
1851 :
1852 : // null/undefined can only be equal to null/undefined or emulatesUndefined.
1853 103 : masm.bind(&neverEqual);
1854 103 : masm.moveValue(BooleanValue(op == JSOP_NE || op == JSOP_STRICTNE), R0);
1855 103 : EmitReturnFromIC(masm);
1856 :
1857 : // Failure case - jump to next stub
1858 103 : masm.bind(&failure);
1859 103 : EmitStubGuardFailure(masm);
1860 206 : return true;
1861 : }
1862 :
1863 : //
1864 : // Compare_Int32WithBoolean
1865 : //
1866 :
1867 : bool
1868 0 : ICCompare_Int32WithBoolean::Compiler::generateStubCode(MacroAssembler& masm)
1869 : {
1870 0 : Label failure;
1871 0 : ValueOperand int32Val;
1872 0 : ValueOperand boolVal;
1873 0 : if (lhsIsInt32_) {
1874 0 : int32Val = R0;
1875 0 : boolVal = R1;
1876 : } else {
1877 0 : boolVal = R0;
1878 0 : int32Val = R1;
1879 : }
1880 0 : masm.branchTestBoolean(Assembler::NotEqual, boolVal, &failure);
1881 0 : masm.branchTestInt32(Assembler::NotEqual, int32Val, &failure);
1882 :
1883 0 : if (op_ == JSOP_STRICTEQ || op_ == JSOP_STRICTNE) {
1884 : // Ints and booleans are never strictly equal, always strictly not equal.
1885 0 : masm.moveValue(BooleanValue(op_ == JSOP_STRICTNE), R0);
1886 0 : EmitReturnFromIC(masm);
1887 : } else {
1888 0 : Register boolReg = masm.extractBoolean(boolVal, ExtractTemp0);
1889 0 : Register int32Reg = masm.extractInt32(int32Val, ExtractTemp1);
1890 :
1891 : // Compare payload regs of R0 and R1.
1892 0 : Assembler::Condition cond = JSOpToCondition(op_, /* signed = */true);
1893 0 : masm.cmp32Set(cond, (lhsIsInt32_ ? int32Reg : boolReg),
1894 0 : (lhsIsInt32_ ? boolReg : int32Reg), R0.scratchReg());
1895 :
1896 : // Box the result and return
1897 0 : masm.tagValue(JSVAL_TYPE_BOOLEAN, R0.scratchReg(), R0);
1898 0 : EmitReturnFromIC(masm);
1899 : }
1900 :
1901 : // Failure case - jump to next stub
1902 0 : masm.bind(&failure);
1903 0 : EmitStubGuardFailure(masm);
1904 0 : return true;
1905 : }
1906 :
1907 : //
1908 : // GetProp_Fallback
1909 : //
1910 :
1911 : // Return whether obj is in some PreliminaryObjectArray and has a structure
1912 : // that might change in the future.
1913 : bool
1914 2802 : IsPreliminaryObject(JSObject* obj)
1915 : {
1916 2802 : if (obj->isSingleton())
1917 825 : return false;
1918 :
1919 1977 : TypeNewScript* newScript = obj->group()->newScript();
1920 1977 : if (newScript && !newScript->analyzed())
1921 250 : return true;
1922 :
1923 1727 : if (obj->group()->maybePreliminaryObjects())
1924 650 : return true;
1925 :
1926 1077 : return false;
1927 : }
1928 :
1929 : void
1930 2154 : StripPreliminaryObjectStubs(JSContext* cx, ICFallbackStub* stub)
1931 : {
1932 : // Before the new script properties analysis has been performed on a type,
1933 : // all instances of that type have the maximum number of fixed slots.
1934 : // Afterwards, the objects (even the preliminary ones) might be changed
1935 : // to reduce the number of fixed slots they have. If we generate stubs for
1936 : // both the old and new number of fixed slots, the stub will look
1937 : // polymorphic to IonBuilder when it is actually monomorphic. To avoid
1938 : // this, strip out any stubs for preliminary objects before attaching a new
1939 : // stub which isn't on a preliminary object.
1940 :
1941 6203 : for (ICStubIterator iter = stub->beginChain(); !iter.atEnd(); iter++) {
1942 4049 : if (iter->isCacheIR_Regular() && iter->toCacheIR_Regular()->hasPreliminaryObject())
1943 0 : iter.unlink(cx);
1944 4049 : else if (iter->isCacheIR_Monitored() && iter->toCacheIR_Monitored()->hasPreliminaryObject())
1945 34 : iter.unlink(cx);
1946 4015 : else if (iter->isCacheIR_Updated() && iter->toCacheIR_Updated()->hasPreliminaryObject())
1947 62 : iter.unlink(cx);
1948 : }
1949 2154 : }
1950 :
1951 : bool
1952 624 : CheckHasNoSuchOwnProperty(JSContext* cx, JSObject* obj, jsid id)
1953 : {
1954 624 : if (obj->isNative()) {
1955 : // Don't handle proto chains with resolve hooks.
1956 602 : if (ClassMayResolveId(cx->names(), obj->getClass(), id, obj))
1957 0 : return false;
1958 602 : if (obj->as<NativeObject>().contains(cx, id))
1959 0 : return false;
1960 602 : if (obj->getClass()->getGetProperty())
1961 0 : return false;
1962 22 : } else if (obj->is<UnboxedPlainObject>()) {
1963 0 : if (obj->as<UnboxedPlainObject>().containsUnboxedOrExpandoProperty(cx, id))
1964 0 : return false;
1965 22 : } else if (obj->is<UnboxedArrayObject>()) {
1966 0 : if (JSID_IS_ATOM(id, cx->names().length))
1967 0 : return false;
1968 22 : } else if (obj->is<TypedObject>()) {
1969 0 : if (obj->as<TypedObject>().typeDescr().hasProperty(cx->names(), id))
1970 0 : return false;
1971 : } else {
1972 22 : return false;
1973 : }
1974 :
1975 602 : return true;
1976 : }
1977 :
1978 : bool
1979 275 : CheckHasNoSuchProperty(JSContext* cx, JSObject* obj, jsid id,
1980 : JSObject** lastProto, size_t* protoChainDepthOut)
1981 : {
1982 275 : size_t depth = 0;
1983 275 : JSObject* curObj = obj;
1984 973 : while (curObj) {
1985 624 : if (!CheckHasNoSuchOwnProperty(cx, curObj, id))
1986 22 : return false;
1987 :
1988 602 : if (!curObj->isNative()) {
1989 : // Non-native objects are only handled as the original receiver.
1990 0 : if (curObj != obj)
1991 0 : return false;
1992 : }
1993 :
1994 602 : JSObject* proto = curObj->staticPrototype();
1995 602 : if (!proto)
1996 253 : break;
1997 :
1998 349 : curObj = proto;
1999 349 : depth++;
2000 : }
2001 :
2002 253 : if (lastProto)
2003 0 : *lastProto = curObj;
2004 253 : if (protoChainDepthOut)
2005 0 : *protoChainDepthOut = depth;
2006 253 : return true;
2007 : }
2008 :
2009 : static bool
2010 5345 : ComputeGetPropResult(JSContext* cx, BaselineFrame* frame, JSOp op, HandlePropertyName name,
2011 : MutableHandleValue val, MutableHandleValue res)
2012 : {
2013 : // Handle arguments.length and arguments.callee on optimized arguments, as
2014 : // it is not an object.
2015 5345 : if (val.isMagic(JS_OPTIMIZED_ARGUMENTS) && IsOptimizedArguments(frame, val)) {
2016 38 : if (op == JSOP_LENGTH) {
2017 38 : res.setInt32(frame->numActualArgs());
2018 : } else {
2019 0 : MOZ_ASSERT(name == cx->names().callee);
2020 0 : MOZ_ASSERT(frame->script()->hasMappedArgsObj());
2021 0 : res.setObject(*frame->callee());
2022 : }
2023 : } else {
2024 5307 : if (op == JSOP_GETBOUNDNAME) {
2025 0 : RootedObject env(cx, &val.toObject());
2026 0 : RootedId id(cx, NameToId(name));
2027 0 : if (!GetNameBoundInEnvironment(cx, env, id, res))
2028 0 : return false;
2029 : } else {
2030 5307 : MOZ_ASSERT(op == JSOP_GETPROP || op == JSOP_CALLPROP || op == JSOP_LENGTH);
2031 5307 : if (!GetProperty(cx, val, name, res))
2032 0 : return false;
2033 : }
2034 : }
2035 :
2036 5345 : return true;
2037 : }
2038 :
2039 : static bool
2040 5345 : DoGetPropFallback(JSContext* cx, BaselineFrame* frame, ICGetProp_Fallback* stub_,
2041 : MutableHandleValue val, MutableHandleValue res)
2042 : {
2043 : // This fallback stub may trigger debug mode toggling.
2044 5345 : DebugModeOSRVolatileStub<ICGetProp_Fallback*> stub(frame, stub_);
2045 :
2046 10690 : RootedScript script(cx, frame->script());
2047 5345 : jsbytecode* pc = stub_->icEntry()->pc(script);
2048 5345 : JSOp op = JSOp(*pc);
2049 5345 : FallbackICSpew(cx, stub, "GetProp(%s)", CodeName[op]);
2050 :
2051 5345 : MOZ_ASSERT(op == JSOP_GETPROP ||
2052 : op == JSOP_CALLPROP ||
2053 : op == JSOP_LENGTH ||
2054 : op == JSOP_GETBOUNDNAME);
2055 :
2056 10690 : RootedPropertyName name(cx, script->getName(pc));
2057 :
2058 : // There are some reasons we can fail to attach a stub that are temporary.
2059 : // We want to avoid calling noteUnoptimizableAccess() if the reason we
2060 : // failed to attach a stub is one of those temporary reasons, since we might
2061 : // end up attaching a stub for the exact same access later.
2062 5345 : bool isTemporarilyUnoptimizable = false;
2063 :
2064 5345 : if (stub->state().maybeTransition())
2065 85 : stub->discardStubs(cx);
2066 :
2067 5345 : bool attached = false;
2068 5345 : if (stub->state().canAttachStub()) {
2069 6766 : RootedValue idVal(cx, StringValue(name));
2070 3383 : GetPropIRGenerator gen(cx, script, pc, CacheKind::GetProp, stub->state().mode(),
2071 10149 : &isTemporarilyUnoptimizable, val, idVal, val, CanAttachGetter::Yes);
2072 3383 : if (gen.tryAttachStub()) {
2073 6258 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
2074 : ICStubEngine::Baseline, script,
2075 9387 : stub, &attached);
2076 3129 : if (newStub) {
2077 2985 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
2078 2985 : if (gen.shouldNotePreliminaryObjectStub())
2079 384 : newStub->toCacheIR_Monitored()->notePreliminaryObject();
2080 2601 : else if (gen.shouldUnlinkPreliminaryObjectStubs())
2081 1475 : StripPreliminaryObjectStubs(cx, stub);
2082 : }
2083 : }
2084 3383 : if (!attached && !isTemporarilyUnoptimizable)
2085 366 : stub->state().trackNotAttached();
2086 : }
2087 :
2088 5345 : if (!ComputeGetPropResult(cx, frame, op, name, val, res))
2089 0 : return false;
2090 :
2091 5345 : StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
2092 5345 : TypeScript::Monitor(cx, script, pc, types, res);
2093 :
2094 : // Check if debug mode toggling made the stub invalid.
2095 5345 : if (stub.invalid())
2096 0 : return true;
2097 :
2098 : // Add a type monitor stub for the resulting value.
2099 5345 : if (!stub->addMonitorStubForValue(cx, frame, types, res))
2100 0 : return false;
2101 :
2102 5345 : if (attached)
2103 2985 : return true;
2104 :
2105 2360 : MOZ_ASSERT(!attached);
2106 2360 : if (!isTemporarilyUnoptimizable)
2107 2328 : stub->noteUnoptimizableAccess();
2108 :
2109 2360 : return true;
2110 : }
2111 :
2112 : static bool
2113 0 : DoGetPropSuperFallback(JSContext* cx, BaselineFrame* frame, ICGetProp_Fallback* stub_,
2114 : HandleValue receiver, MutableHandleValue val, MutableHandleValue res)
2115 : {
2116 : // This fallback stub may trigger debug mode toggling.
2117 0 : DebugModeOSRVolatileStub<ICGetProp_Fallback*> stub(frame, stub_);
2118 :
2119 0 : RootedScript script(cx, frame->script());
2120 0 : jsbytecode* pc = stub_->icEntry()->pc(script);
2121 0 : FallbackICSpew(cx, stub, "GetPropSuper(%s)", CodeName[JSOp(*pc)]);
2122 :
2123 0 : MOZ_ASSERT(JSOp(*pc) == JSOP_GETPROP_SUPER);
2124 :
2125 0 : RootedPropertyName name(cx, script->getName(pc));
2126 :
2127 : // There are some reasons we can fail to attach a stub that are temporary.
2128 : // We want to avoid calling noteUnoptimizableAccess() if the reason we
2129 : // failed to attach a stub is one of those temporary reasons, since we might
2130 : // end up attaching a stub for the exact same access later.
2131 0 : bool isTemporarilyUnoptimizable = false;
2132 :
2133 0 : if (stub->state().maybeTransition())
2134 0 : stub->discardStubs(cx);
2135 :
2136 0 : bool attached = false;
2137 0 : if (stub->state().canAttachStub()) {
2138 0 : RootedValue idVal(cx, StringValue(name));
2139 0 : GetPropIRGenerator gen(cx, script, pc, CacheKind::GetPropSuper, stub->state().mode(),
2140 : &isTemporarilyUnoptimizable, val, idVal, receiver,
2141 0 : CanAttachGetter::Yes);
2142 0 : if (gen.tryAttachStub()) {
2143 0 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
2144 : ICStubEngine::Baseline, script,
2145 0 : stub, &attached);
2146 0 : if (newStub) {
2147 0 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
2148 0 : if (gen.shouldNotePreliminaryObjectStub())
2149 0 : newStub->toCacheIR_Monitored()->notePreliminaryObject();
2150 0 : else if (gen.shouldUnlinkPreliminaryObjectStubs())
2151 0 : StripPreliminaryObjectStubs(cx, stub);
2152 : }
2153 : }
2154 0 : if (!attached && !isTemporarilyUnoptimizable)
2155 0 : stub->state().trackNotAttached();
2156 : }
2157 :
2158 : // |val| is [[HomeObject]].[[Prototype]] which must be Object
2159 0 : RootedObject valObj(cx, &val.toObject());
2160 0 : if (!GetProperty(cx, valObj, receiver, name, res))
2161 0 : return false;
2162 :
2163 0 : StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
2164 0 : TypeScript::Monitor(cx, script, pc, types, res);
2165 :
2166 : // Check if debug mode toggling made the stub invalid.
2167 0 : if (stub.invalid())
2168 0 : return true;
2169 :
2170 : // Add a type monitor stub for the resulting value.
2171 0 : if (!stub->addMonitorStubForValue(cx, frame, types, res))
2172 0 : return false;
2173 :
2174 0 : if (attached)
2175 0 : return true;
2176 :
2177 0 : MOZ_ASSERT(!attached);
2178 0 : if (!isTemporarilyUnoptimizable)
2179 0 : stub->noteUnoptimizableAccess();
2180 :
2181 0 : return true;
2182 : }
2183 :
2184 : typedef bool (*DoGetPropFallbackFn)(JSContext*, BaselineFrame*, ICGetProp_Fallback*,
2185 : MutableHandleValue, MutableHandleValue);
2186 3 : static const VMFunction DoGetPropFallbackInfo =
2187 6 : FunctionInfo<DoGetPropFallbackFn>(DoGetPropFallback, "DoGetPropFallback", TailCall,
2188 : PopValues(1));
2189 :
2190 : typedef bool (*DoGetPropSuperFallbackFn)(JSContext*, BaselineFrame*, ICGetProp_Fallback*,
2191 : HandleValue, MutableHandleValue, MutableHandleValue);
2192 3 : static const VMFunction DoGetPropSuperFallbackInfo =
2193 6 : FunctionInfo<DoGetPropSuperFallbackFn>(DoGetPropSuperFallback, "DoGetPropSuperFallback",
2194 : TailCall);
2195 :
2196 : bool
2197 70 : ICGetProp_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
2198 : {
2199 70 : MOZ_ASSERT(R0 == JSReturnOperand);
2200 :
2201 70 : EmitRestoreTailCallReg(masm);
2202 :
2203 : // Super property getters use a |this| that differs from base object
2204 70 : if (hasReceiver_) {
2205 : // Push arguments.
2206 0 : masm.pushValue(R0);
2207 0 : masm.pushValue(R1);
2208 0 : masm.push(ICStubReg);
2209 0 : masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
2210 :
2211 0 : if (!tailCallVM(DoGetPropSuperFallbackInfo, masm))
2212 0 : return false;
2213 : } else {
2214 : // Ensure stack is fully synced for the expression decompiler.
2215 70 : masm.pushValue(R0);
2216 :
2217 : // Push arguments.
2218 70 : masm.pushValue(R0);
2219 70 : masm.push(ICStubReg);
2220 70 : masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
2221 :
2222 70 : if (!tailCallVM(DoGetPropFallbackInfo, masm))
2223 0 : return false;
2224 : }
2225 :
2226 : // This is the resume point used when bailout rewrites call stack to undo
2227 : // Ion inlined frames. The return address pushed onto reconstructed stack
2228 : // will point here.
2229 70 : assumeStubFrame(masm);
2230 70 : bailoutReturnOffset_.bind(masm.currentOffset());
2231 :
2232 70 : leaveStubFrame(masm, true);
2233 :
2234 : // When we get here, ICStubReg contains the ICGetProp_Fallback stub,
2235 : // which we can't use to enter the TypeMonitor IC, because it's a MonitoredFallbackStub
2236 : // instead of a MonitoredStub. So, we cheat.
2237 140 : masm.loadPtr(Address(ICStubReg, ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
2238 70 : ICStubReg);
2239 70 : EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
2240 :
2241 70 : return true;
2242 : }
2243 :
2244 : void
2245 70 : ICGetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler& masm, Handle<JitCode*> code)
2246 : {
2247 70 : if (engine_ == Engine::Baseline) {
2248 70 : BailoutReturnStub kind = hasReceiver_ ? BailoutReturnStub::GetPropSuper
2249 70 : : BailoutReturnStub::GetProp;
2250 70 : void* address = code->raw() + bailoutReturnOffset_.offset();
2251 70 : cx->compartment()->jitCompartment()->initBailoutReturnAddr(address, getKey(), kind);
2252 : }
2253 70 : }
2254 :
2255 : void
2256 0 : CheckForTypedObjectWithDetachedStorage(JSContext* cx, MacroAssembler& masm, Label* failure)
2257 : {
2258 : // All stubs manipulating typed objects must check the compartment-wide
2259 : // flag indicating whether their underlying storage might be detached, to
2260 : // bail out if needed.
2261 0 : int32_t* address = &cx->compartment()->detachedTypedObjects;
2262 0 : masm.branch32(Assembler::NotEqual, AbsoluteAddress(address), Imm32(0), failure);
2263 0 : }
2264 :
2265 : void
2266 0 : LoadTypedThingData(MacroAssembler& masm, TypedThingLayout layout, Register obj, Register result)
2267 : {
2268 0 : switch (layout) {
2269 : case Layout_TypedArray:
2270 0 : masm.loadPtr(Address(obj, TypedArrayObject::dataOffset()), result);
2271 0 : break;
2272 : case Layout_OutlineTypedObject:
2273 0 : masm.loadPtr(Address(obj, OutlineTypedObject::offsetOfData()), result);
2274 0 : break;
2275 : case Layout_InlineTypedObject:
2276 0 : masm.computeEffectiveAddress(Address(obj, InlineTypedObject::offsetOfDataStart()), result);
2277 0 : break;
2278 : default:
2279 0 : MOZ_CRASH();
2280 : }
2281 0 : }
2282 :
2283 : void
2284 715 : BaselineScript::noteAccessedGetter(uint32_t pcOffset)
2285 : {
2286 715 : ICEntry& entry = icEntryFromPCOffset(pcOffset);
2287 715 : ICFallbackStub* stub = entry.fallbackStub();
2288 :
2289 715 : if (stub->isGetProp_Fallback())
2290 715 : stub->toGetProp_Fallback()->noteAccessedGetter();
2291 715 : }
2292 :
2293 : // TypeMonitor_Fallback
2294 : //
2295 :
2296 : bool
2297 15526 : ICTypeMonitor_Fallback::addMonitorStubForValue(JSContext* cx, BaselineFrame* frame,
2298 : StackTypeSet* types, HandleValue val)
2299 : {
2300 15526 : MOZ_ASSERT(types);
2301 :
2302 : // Don't attach too many SingleObject/ObjectGroup stubs. If the value is a
2303 : // primitive or if we will attach an any-object stub, we can handle this
2304 : // with a single PrimitiveSet or AnyValue stub so we always optimize.
2305 31052 : if (numOptimizedMonitorStubs_ >= MAX_OPTIMIZED_STUBS &&
2306 15526 : val.isObject() &&
2307 0 : !types->unknownObject())
2308 : {
2309 0 : return true;
2310 : }
2311 :
2312 15526 : bool wasDetachedMonitorChain = lastMonitorStubPtrAddr_ == nullptr;
2313 15526 : MOZ_ASSERT_IF(wasDetachedMonitorChain, numOptimizedMonitorStubs_ == 0);
2314 :
2315 15526 : if (types->unknown()) {
2316 : // The TypeSet got marked as unknown so attach a stub that always
2317 : // succeeds.
2318 :
2319 : // Check for existing TypeMonitor_AnyValue stubs.
2320 0 : for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd(); iter++) {
2321 0 : if (iter->isTypeMonitor_AnyValue())
2322 0 : return true;
2323 : }
2324 :
2325 : // Discard existing stubs.
2326 0 : resetMonitorStubChain(cx->zone());
2327 0 : wasDetachedMonitorChain = (lastMonitorStubPtrAddr_ == nullptr);
2328 :
2329 0 : ICTypeMonitor_AnyValue::Compiler compiler(cx);
2330 0 : ICStub* stub = compiler.getStub(compiler.getStubSpace(frame->script()));
2331 0 : if (!stub) {
2332 0 : ReportOutOfMemory(cx);
2333 0 : return false;
2334 : }
2335 :
2336 0 : JitSpew(JitSpew_BaselineIC, " Added TypeMonitor stub %p for any value", stub);
2337 0 : addOptimizedMonitorStub(stub);
2338 :
2339 15526 : } else if (val.isPrimitive() || types->unknownObject()) {
2340 7248 : if (val.isMagic(JS_UNINITIALIZED_LEXICAL))
2341 3058 : return true;
2342 7248 : MOZ_ASSERT(!val.isMagic());
2343 7248 : JSValueType type = val.isDouble() ? JSVAL_TYPE_DOUBLE : val.extractNonDoubleType();
2344 :
2345 : // Check for existing TypeMonitor stub.
2346 7248 : ICTypeMonitor_PrimitiveSet* existingStub = nullptr;
2347 12370 : for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd(); iter++) {
2348 8180 : if (iter->isTypeMonitor_PrimitiveSet()) {
2349 3235 : existingStub = iter->toTypeMonitor_PrimitiveSet();
2350 3235 : if (existingStub->containsType(type))
2351 3058 : return true;
2352 : }
2353 : }
2354 :
2355 4190 : if (val.isObject()) {
2356 : // Check for existing SingleObject/ObjectGroup stubs and discard
2357 : // stubs if we find one. Ideally we would discard just these stubs,
2358 : // but unlinking individual type monitor stubs is somewhat
2359 : // complicated.
2360 865 : MOZ_ASSERT(types->unknownObject());
2361 865 : bool hasObjectStubs = false;
2362 1593 : for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd(); iter++) {
2363 889 : if (iter->isTypeMonitor_SingleObject() || iter->isTypeMonitor_ObjectGroup()) {
2364 161 : hasObjectStubs = true;
2365 161 : break;
2366 : }
2367 : }
2368 865 : if (hasObjectStubs) {
2369 161 : resetMonitorStubChain(cx->zone());
2370 161 : wasDetachedMonitorChain = (lastMonitorStubPtrAddr_ == nullptr);
2371 161 : existingStub = nullptr;
2372 : }
2373 : }
2374 :
2375 8380 : ICTypeMonitor_PrimitiveSet::Compiler compiler(cx, existingStub, type);
2376 : ICStub* stub = existingStub
2377 : ? compiler.updateStub()
2378 4190 : : compiler.getStub(compiler.getStubSpace(frame->script()));
2379 4190 : if (!stub) {
2380 0 : ReportOutOfMemory(cx);
2381 0 : return false;
2382 : }
2383 :
2384 4190 : JitSpew(JitSpew_BaselineIC, " %s TypeMonitor stub %p for primitive type %d",
2385 4190 : existingStub ? "Modified existing" : "Created new", stub, type);
2386 :
2387 4190 : if (!existingStub) {
2388 4020 : MOZ_ASSERT(!hasStub(TypeMonitor_PrimitiveSet));
2389 4020 : addOptimizedMonitorStub(stub);
2390 : }
2391 :
2392 8278 : } else if (val.toObject().isSingleton()) {
2393 4771 : RootedObject obj(cx, &val.toObject());
2394 :
2395 : // Check for existing TypeMonitor stub.
2396 5471 : for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd(); iter++) {
2397 6759 : if (iter->isTypeMonitor_SingleObject() &&
2398 2665 : iter->toTypeMonitor_SingleObject()->object() == obj)
2399 : {
2400 2017 : return true;
2401 : }
2402 : }
2403 :
2404 2754 : ICTypeMonitor_SingleObject::Compiler compiler(cx, obj);
2405 1377 : ICStub* stub = compiler.getStub(compiler.getStubSpace(frame->script()));
2406 1377 : if (!stub) {
2407 0 : ReportOutOfMemory(cx);
2408 0 : return false;
2409 : }
2410 :
2411 1377 : JitSpew(JitSpew_BaselineIC, " Added TypeMonitor stub %p for singleton %p",
2412 2754 : stub, obj.get());
2413 :
2414 1377 : addOptimizedMonitorStub(stub);
2415 :
2416 : } else {
2417 7676 : RootedObjectGroup group(cx, val.toObject().group());
2418 :
2419 : // Check for existing TypeMonitor stub.
2420 9443 : for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd(); iter++) {
2421 10452 : if (iter->isTypeMonitor_ObjectGroup() &&
2422 3801 : iter->toTypeMonitor_ObjectGroup()->group() == group)
2423 : {
2424 2092 : return true;
2425 : }
2426 : }
2427 :
2428 5584 : ICTypeMonitor_ObjectGroup::Compiler compiler(cx, group);
2429 2792 : ICStub* stub = compiler.getStub(compiler.getStubSpace(frame->script()));
2430 2792 : if (!stub) {
2431 0 : ReportOutOfMemory(cx);
2432 0 : return false;
2433 : }
2434 :
2435 2792 : JitSpew(JitSpew_BaselineIC, " Added TypeMonitor stub %p for ObjectGroup %p",
2436 5584 : stub, group.get());
2437 :
2438 2792 : addOptimizedMonitorStub(stub);
2439 : }
2440 :
2441 8359 : bool firstMonitorStubAdded = wasDetachedMonitorChain && (numOptimizedMonitorStubs_ > 0);
2442 :
2443 8359 : if (firstMonitorStubAdded) {
2444 : // Was an empty monitor chain before, but a new stub was added. This is the
2445 : // only time that any main stubs' firstMonitorStub fields need to be updated to
2446 : // refer to the newly added monitor stub.
2447 5394 : ICStub* firstStub = mainFallbackStub_->icEntry()->firstStub();
2448 15548 : for (ICStubConstIterator iter(firstStub); !iter.atEnd(); iter++) {
2449 : // Non-monitored stubs are used if the result has always the same type,
2450 : // e.g. a StringLength stub will always return int32.
2451 10154 : if (!iter->isMonitored())
2452 5398 : continue;
2453 :
2454 : // Since we just added the first optimized monitoring stub, any
2455 : // existing main stub's |firstMonitorStub| MUST be pointing to the fallback
2456 : // monitor stub (i.e. this stub).
2457 4756 : MOZ_ASSERT(iter->toMonitoredStub()->firstMonitorStub() == this);
2458 4756 : iter->toMonitoredStub()->updateFirstMonitorStub(firstMonitorStub_);
2459 : }
2460 : }
2461 :
2462 8359 : return true;
2463 : }
2464 :
2465 : static bool
2466 2849 : DoTypeMonitorFallback(JSContext* cx, BaselineFrame* frame, ICTypeMonitor_Fallback* stub,
2467 : HandleValue value, MutableHandleValue res)
2468 : {
2469 2849 : JSScript* script = frame->script();
2470 2849 : jsbytecode* pc = stub->icEntry()->pc(script);
2471 2849 : TypeFallbackICSpew(cx, stub, "TypeMonitor");
2472 :
2473 : // Copy input value to res.
2474 2849 : res.set(value);
2475 :
2476 2849 : if (MOZ_UNLIKELY(value.isMagic())) {
2477 : // It's possible that we arrived here from bailing out of Ion, and that
2478 : // Ion proved that the value is dead and optimized out. In such cases,
2479 : // do nothing. However, it's also possible that we have an uninitialized
2480 : // this, in which case we should not look for other magic values.
2481 :
2482 1 : if (value.whyMagic() == JS_OPTIMIZED_OUT) {
2483 0 : MOZ_ASSERT(!stub->monitorsThis());
2484 0 : return true;
2485 : }
2486 :
2487 : // In derived class constructors (including nested arrows/eval), the
2488 : // |this| argument or GETALIASEDVAR can return the magic TDZ value.
2489 1 : MOZ_ASSERT(value.isMagic(JS_UNINITIALIZED_LEXICAL));
2490 1 : MOZ_ASSERT(frame->isFunctionFrame() || frame->isEvalFrame());
2491 1 : MOZ_ASSERT(stub->monitorsThis() ||
2492 : *GetNextPc(pc) == JSOP_CHECKTHIS ||
2493 : *GetNextPc(pc) == JSOP_CHECKTHISREINIT ||
2494 : *GetNextPc(pc) == JSOP_CHECKRETURN);
2495 1 : if (stub->monitorsThis())
2496 1 : TypeScript::SetThis(cx, script, TypeSet::UnknownType());
2497 : else
2498 0 : TypeScript::Monitor(cx, script, pc, TypeSet::UnknownType());
2499 1 : return true;
2500 : }
2501 :
2502 : // Note: ideally we would merge this if-else statement with the one below,
2503 : // but that triggers an MSVC 2015 compiler bug. See bug 1363054.
2504 : StackTypeSet* types;
2505 : uint32_t argument;
2506 2848 : if (stub->monitorsArgument(&argument))
2507 1130 : types = TypeScript::ArgTypes(script, argument);
2508 1718 : else if (stub->monitorsThis())
2509 718 : types = TypeScript::ThisTypes(script);
2510 : else
2511 1000 : types = TypeScript::BytecodeTypes(script, pc);
2512 :
2513 2848 : if (stub->monitorsArgument(&argument)) {
2514 1130 : MOZ_ASSERT(pc == script->code());
2515 1130 : TypeScript::SetArgument(cx, script, argument, value);
2516 1718 : } else if (stub->monitorsThis()) {
2517 718 : MOZ_ASSERT(pc == script->code());
2518 718 : TypeScript::SetThis(cx, script, value);
2519 : } else {
2520 1000 : TypeScript::Monitor(cx, script, pc, types, value);
2521 : }
2522 :
2523 2848 : if (MOZ_UNLIKELY(stub->invalid()))
2524 0 : return true;
2525 :
2526 2848 : return stub->addMonitorStubForValue(cx, frame, types, value);
2527 : }
2528 :
2529 : typedef bool (*DoTypeMonitorFallbackFn)(JSContext*, BaselineFrame*, ICTypeMonitor_Fallback*,
2530 : HandleValue, MutableHandleValue);
2531 3 : static const VMFunction DoTypeMonitorFallbackInfo =
2532 6 : FunctionInfo<DoTypeMonitorFallbackFn>(DoTypeMonitorFallback, "DoTypeMonitorFallback",
2533 : TailCall);
2534 :
2535 : bool
2536 75 : ICTypeMonitor_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
2537 : {
2538 75 : MOZ_ASSERT(R0 == JSReturnOperand);
2539 :
2540 : // Restore the tail call register.
2541 75 : EmitRestoreTailCallReg(masm);
2542 :
2543 75 : masm.pushValue(R0);
2544 75 : masm.push(ICStubReg);
2545 75 : masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
2546 :
2547 75 : return tailCallVM(DoTypeMonitorFallbackInfo, masm);
2548 : }
2549 :
2550 : bool
2551 409 : ICTypeMonitor_PrimitiveSet::Compiler::generateStubCode(MacroAssembler& masm)
2552 : {
2553 818 : Label success;
2554 409 : if ((flags_ & TypeToFlag(JSVAL_TYPE_INT32)) && !(flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE)))
2555 80 : masm.branchTestInt32(Assembler::Equal, R0, &success);
2556 :
2557 409 : if (flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE))
2558 15 : masm.branchTestNumber(Assembler::Equal, R0, &success);
2559 :
2560 409 : if (flags_ & TypeToFlag(JSVAL_TYPE_UNDEFINED))
2561 101 : masm.branchTestUndefined(Assembler::Equal, R0, &success);
2562 :
2563 409 : if (flags_ & TypeToFlag(JSVAL_TYPE_BOOLEAN))
2564 92 : masm.branchTestBoolean(Assembler::Equal, R0, &success);
2565 :
2566 409 : if (flags_ & TypeToFlag(JSVAL_TYPE_STRING))
2567 120 : masm.branchTestString(Assembler::Equal, R0, &success);
2568 :
2569 409 : if (flags_ & TypeToFlag(JSVAL_TYPE_SYMBOL))
2570 4 : masm.branchTestSymbol(Assembler::Equal, R0, &success);
2571 :
2572 409 : if (flags_ & TypeToFlag(JSVAL_TYPE_OBJECT))
2573 93 : masm.branchTestObject(Assembler::Equal, R0, &success);
2574 :
2575 409 : if (flags_ & TypeToFlag(JSVAL_TYPE_NULL))
2576 38 : masm.branchTestNull(Assembler::Equal, R0, &success);
2577 :
2578 409 : EmitStubGuardFailure(masm);
2579 :
2580 409 : masm.bind(&success);
2581 409 : EmitReturnFromIC(masm);
2582 818 : return true;
2583 : }
2584 :
2585 : static void
2586 375 : MaybeWorkAroundAmdBug(MacroAssembler& masm)
2587 : {
2588 : // Attempt to work around an AMD bug (see bug 1034706 and bug 1281759), by
2589 : // inserting 32-bytes of NOPs.
2590 : #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
2591 375 : if (CPUInfo::NeedAmdBugWorkaround()) {
2592 0 : masm.nop(9);
2593 0 : masm.nop(9);
2594 0 : masm.nop(9);
2595 0 : masm.nop(5);
2596 : }
2597 : #endif
2598 375 : }
2599 :
2600 : bool
2601 59 : ICTypeMonitor_SingleObject::Compiler::generateStubCode(MacroAssembler& masm)
2602 : {
2603 118 : Label failure;
2604 59 : masm.branchTestObject(Assembler::NotEqual, R0, &failure);
2605 59 : MaybeWorkAroundAmdBug(masm);
2606 :
2607 : // Guard on the object's identity.
2608 59 : Register obj = masm.extractObject(R0, ExtractTemp0);
2609 59 : Address expectedObject(ICStubReg, ICTypeMonitor_SingleObject::offsetOfObject());
2610 59 : masm.branchPtr(Assembler::NotEqual, expectedObject, obj, &failure);
2611 59 : MaybeWorkAroundAmdBug(masm);
2612 :
2613 59 : EmitReturnFromIC(masm);
2614 59 : MaybeWorkAroundAmdBug(masm);
2615 :
2616 59 : masm.bind(&failure);
2617 59 : EmitStubGuardFailure(masm);
2618 118 : return true;
2619 : }
2620 :
2621 : bool
2622 66 : ICTypeMonitor_ObjectGroup::Compiler::generateStubCode(MacroAssembler& masm)
2623 : {
2624 132 : Label failure;
2625 66 : masm.branchTestObject(Assembler::NotEqual, R0, &failure);
2626 66 : MaybeWorkAroundAmdBug(masm);
2627 :
2628 : // Guard on the object's ObjectGroup.
2629 66 : Register obj = masm.extractObject(R0, ExtractTemp0);
2630 66 : masm.loadPtr(Address(obj, JSObject::offsetOfGroup()), R1.scratchReg());
2631 :
2632 66 : Address expectedGroup(ICStubReg, ICTypeMonitor_ObjectGroup::offsetOfGroup());
2633 66 : masm.branchPtr(Assembler::NotEqual, expectedGroup, R1.scratchReg(), &failure);
2634 66 : MaybeWorkAroundAmdBug(masm);
2635 :
2636 66 : EmitReturnFromIC(masm);
2637 66 : MaybeWorkAroundAmdBug(masm);
2638 :
2639 66 : masm.bind(&failure);
2640 66 : EmitStubGuardFailure(masm);
2641 132 : return true;
2642 : }
2643 :
2644 : bool
2645 0 : ICTypeMonitor_AnyValue::Compiler::generateStubCode(MacroAssembler& masm)
2646 : {
2647 0 : EmitReturnFromIC(masm);
2648 0 : return true;
2649 : }
2650 :
2651 : bool
2652 618 : ICUpdatedStub::addUpdateStubForValue(JSContext* cx, HandleScript outerScript, HandleObject obj,
2653 : HandleObjectGroup group, HandleId id, HandleValue val)
2654 : {
2655 618 : EnsureTrackPropertyTypes(cx, obj, id);
2656 :
2657 : // Make sure that undefined values are explicitly included in the property
2658 : // types for an object if generating a stub to write an undefined value.
2659 618 : if (val.isUndefined() && CanHaveEmptyPropertyTypesForOwnProperty(obj)) {
2660 0 : MOZ_ASSERT(obj->group() == group);
2661 0 : AddTypePropertyId(cx, obj, id, val);
2662 : }
2663 :
2664 618 : bool unknown = false, unknownObject = false;
2665 618 : if (group->unknownProperties()) {
2666 12 : unknown = unknownObject = true;
2667 : } else {
2668 606 : if (HeapTypeSet* types = group->maybeGetProperty(id)) {
2669 606 : unknown = types->unknown();
2670 606 : unknownObject = types->unknownObject();
2671 : } else {
2672 : // We don't record null/undefined types for certain TypedObject
2673 : // properties. In these cases |types| is allowed to be nullptr
2674 : // without implying unknown types. See DoTypeUpdateFallback.
2675 0 : MOZ_ASSERT(obj->is<TypedObject>());
2676 0 : MOZ_ASSERT(val.isNullOrUndefined());
2677 : }
2678 : }
2679 618 : MOZ_ASSERT_IF(unknown, unknownObject);
2680 :
2681 : // Don't attach too many SingleObject/ObjectGroup stubs unless we can
2682 : // replace them with a single PrimitiveSet or AnyValue stub.
2683 1236 : if (numOptimizedStubs_ >= MAX_OPTIMIZED_STUBS &&
2684 618 : val.isObject() &&
2685 0 : !unknownObject)
2686 : {
2687 0 : return true;
2688 : }
2689 :
2690 618 : if (unknown) {
2691 : // Attach a stub that always succeeds. We should not have a
2692 : // TypeUpdate_AnyValue stub yet.
2693 27 : MOZ_ASSERT(!hasTypeUpdateStub(TypeUpdate_AnyValue));
2694 :
2695 : // Discard existing stubs.
2696 27 : resetUpdateStubChain(cx->zone());
2697 :
2698 54 : ICTypeUpdate_AnyValue::Compiler compiler(cx);
2699 27 : ICStub* stub = compiler.getStub(compiler.getStubSpace(outerScript));
2700 27 : if (!stub)
2701 0 : return false;
2702 :
2703 27 : JitSpew(JitSpew_BaselineIC, " Added TypeUpdate stub %p for any value", stub);
2704 27 : addOptimizedUpdateStub(stub);
2705 :
2706 591 : } else if (val.isPrimitive() || unknownObject) {
2707 448 : JSValueType type = val.isDouble() ? JSVAL_TYPE_DOUBLE : val.extractNonDoubleType();
2708 :
2709 : // Check for existing TypeUpdate stub.
2710 448 : ICTypeUpdate_PrimitiveSet* existingStub = nullptr;
2711 946 : for (ICStubConstIterator iter(firstUpdateStub_); !iter.atEnd(); iter++) {
2712 498 : if (iter->isTypeUpdate_PrimitiveSet()) {
2713 30 : existingStub = iter->toTypeUpdate_PrimitiveSet();
2714 30 : MOZ_ASSERT(!existingStub->containsType(type));
2715 : }
2716 : }
2717 :
2718 448 : if (val.isObject()) {
2719 : // Discard existing ObjectGroup/SingleObject stubs.
2720 66 : resetUpdateStubChain(cx->zone());
2721 66 : if (existingStub)
2722 8 : addOptimizedUpdateStub(existingStub);
2723 : }
2724 :
2725 896 : ICTypeUpdate_PrimitiveSet::Compiler compiler(cx, existingStub, type);
2726 : ICStub* stub = existingStub ? compiler.updateStub()
2727 448 : : compiler.getStub(compiler.getStubSpace(outerScript));
2728 448 : if (!stub)
2729 0 : return false;
2730 448 : if (!existingStub) {
2731 418 : MOZ_ASSERT(!hasTypeUpdateStub(TypeUpdate_PrimitiveSet));
2732 418 : addOptimizedUpdateStub(stub);
2733 : }
2734 :
2735 448 : JitSpew(JitSpew_BaselineIC, " %s TypeUpdate stub %p for primitive type %d",
2736 448 : existingStub ? "Modified existing" : "Created new", stub, type);
2737 :
2738 143 : } else if (val.toObject().isSingleton()) {
2739 14 : RootedObject obj(cx, &val.toObject());
2740 :
2741 : #ifdef DEBUG
2742 : // We should not have a stub for this object.
2743 20 : for (ICStubConstIterator iter(firstUpdateStub_); !iter.atEnd(); iter++) {
2744 13 : MOZ_ASSERT_IF(iter->isTypeUpdate_SingleObject(),
2745 : iter->toTypeUpdate_SingleObject()->object() != obj);
2746 : }
2747 : #endif
2748 :
2749 14 : ICTypeUpdate_SingleObject::Compiler compiler(cx, obj);
2750 7 : ICStub* stub = compiler.getStub(compiler.getStubSpace(outerScript));
2751 7 : if (!stub)
2752 0 : return false;
2753 :
2754 7 : JitSpew(JitSpew_BaselineIC, " Added TypeUpdate stub %p for singleton %p", stub, obj.get());
2755 :
2756 7 : addOptimizedUpdateStub(stub);
2757 :
2758 : } else {
2759 272 : RootedObjectGroup group(cx, val.toObject().group());
2760 :
2761 : #ifdef DEBUG
2762 : // We should not have a stub for this group.
2763 319 : for (ICStubConstIterator iter(firstUpdateStub_); !iter.atEnd(); iter++) {
2764 183 : MOZ_ASSERT_IF(iter->isTypeUpdate_ObjectGroup(),
2765 : iter->toTypeUpdate_ObjectGroup()->group() != group);
2766 : }
2767 : #endif
2768 :
2769 272 : ICTypeUpdate_ObjectGroup::Compiler compiler(cx, group);
2770 136 : ICStub* stub = compiler.getStub(compiler.getStubSpace(outerScript));
2771 136 : if (!stub)
2772 0 : return false;
2773 :
2774 136 : JitSpew(JitSpew_BaselineIC, " Added TypeUpdate stub %p for ObjectGroup %p",
2775 272 : stub, group.get());
2776 :
2777 136 : addOptimizedUpdateStub(stub);
2778 : }
2779 :
2780 618 : return true;
2781 : }
2782 :
2783 : //
2784 : // NewArray_Fallback
2785 : //
2786 :
2787 : static bool
2788 1192 : DoNewArray(JSContext* cx, void* payload, ICNewArray_Fallback* stub, uint32_t length,
2789 : MutableHandleValue res)
2790 : {
2791 2384 : SharedStubInfo info(cx, payload, stub->icEntry());
2792 :
2793 1192 : FallbackICSpew(cx, stub, "NewArray");
2794 :
2795 2384 : RootedObject obj(cx);
2796 1192 : if (stub->templateObject()) {
2797 2292 : RootedObject templateObject(cx, stub->templateObject());
2798 1146 : obj = NewArrayOperationWithTemplate(cx, templateObject);
2799 1146 : if (!obj)
2800 0 : return false;
2801 : } else {
2802 46 : HandleScript script = info.script();
2803 46 : jsbytecode* pc = info.pc();
2804 46 : obj = NewArrayOperation(cx, script, pc, length);
2805 46 : if (!obj)
2806 0 : return false;
2807 :
2808 46 : if (obj && !obj->isSingleton() && !obj->group()->maybePreliminaryObjects()) {
2809 46 : JSObject* templateObject = NewArrayOperation(cx, script, pc, length, TenuredObject);
2810 46 : if (!templateObject)
2811 0 : return false;
2812 46 : stub->setTemplateObject(templateObject);
2813 : }
2814 : }
2815 :
2816 1192 : res.setObject(*obj);
2817 1192 : return true;
2818 : }
2819 :
2820 : typedef bool(*DoNewArrayFn)(JSContext*, void*, ICNewArray_Fallback*, uint32_t,
2821 : MutableHandleValue);
2822 3 : static const VMFunction DoNewArrayInfo =
2823 6 : FunctionInfo<DoNewArrayFn>(DoNewArray, "DoNewArray", TailCall);
2824 :
2825 : bool
2826 36 : ICNewArray_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
2827 : {
2828 36 : EmitRestoreTailCallReg(masm);
2829 :
2830 36 : masm.push(R0.scratchReg()); // length
2831 36 : masm.push(ICStubReg); // stub.
2832 36 : pushStubPayload(masm, R0.scratchReg());
2833 :
2834 36 : return tailCallVM(DoNewArrayInfo, masm);
2835 : }
2836 :
2837 : //
2838 : // NewObject_Fallback
2839 : //
2840 :
2841 : // Unlike typical baseline IC stubs, the code for NewObject_WithTemplate is
2842 : // specialized for the template object being allocated.
2843 : static JitCode*
2844 78 : GenerateNewObjectWithTemplateCode(JSContext* cx, JSObject* templateObject)
2845 : {
2846 156 : JitContext jctx(cx, nullptr);
2847 156 : MacroAssembler masm;
2848 : #ifdef JS_CODEGEN_ARM
2849 : masm.setSecondScratchReg(BaselineSecondScratchReg);
2850 : #endif
2851 :
2852 156 : Label failure;
2853 78 : Register objReg = R0.scratchReg();
2854 78 : Register tempReg = R1.scratchReg();
2855 78 : masm.movePtr(ImmGCPtr(templateObject->group()), tempReg);
2856 156 : masm.branchTest32(Assembler::NonZero, Address(tempReg, ObjectGroup::offsetOfFlags()),
2857 78 : Imm32(OBJECT_FLAG_PRE_TENURE), &failure);
2858 156 : masm.branchPtr(Assembler::NotEqual, AbsoluteAddress(cx->compartment()->addressOfMetadataBuilder()),
2859 78 : ImmWord(0), &failure);
2860 78 : masm.createGCObject(objReg, tempReg, templateObject, gc::DefaultHeap, &failure);
2861 78 : masm.tagValue(JSVAL_TYPE_OBJECT, objReg, R0);
2862 :
2863 78 : EmitReturnFromIC(masm);
2864 78 : masm.bind(&failure);
2865 78 : EmitStubGuardFailure(masm);
2866 :
2867 156 : Linker linker(masm);
2868 156 : AutoFlushICache afc("GenerateNewObjectWithTemplateCode");
2869 156 : return linker.newCode<CanGC>(cx, BASELINE_CODE);
2870 : }
2871 :
2872 : static bool
2873 920 : DoNewObject(JSContext* cx, void* payload, ICNewObject_Fallback* stub, MutableHandleValue res)
2874 : {
2875 1840 : SharedStubInfo info(cx, payload, stub->icEntry());
2876 :
2877 920 : FallbackICSpew(cx, stub, "NewObject");
2878 :
2879 1840 : RootedObject obj(cx);
2880 :
2881 1840 : RootedObject templateObject(cx, stub->templateObject());
2882 920 : if (templateObject) {
2883 32 : MOZ_ASSERT(!templateObject->group()->maybePreliminaryObjects());
2884 32 : obj = NewObjectOperationWithTemplate(cx, templateObject);
2885 : } else {
2886 888 : HandleScript script = info.script();
2887 888 : jsbytecode* pc = info.pc();
2888 888 : obj = NewObjectOperation(cx, script, pc);
2889 :
2890 888 : if (obj && !obj->isSingleton() && !obj->group()->maybePreliminaryObjects()) {
2891 78 : JSObject* templateObject = NewObjectOperation(cx, script, pc, TenuredObject);
2892 78 : if (!templateObject)
2893 0 : return false;
2894 :
2895 234 : if (!stub->invalid() &&
2896 136 : (templateObject->is<UnboxedPlainObject>() ||
2897 58 : !templateObject->as<PlainObject>().hasDynamicSlots()))
2898 : {
2899 78 : JitCode* code = GenerateNewObjectWithTemplateCode(cx, templateObject);
2900 78 : if (!code)
2901 0 : return false;
2902 :
2903 : ICStubSpace* space =
2904 78 : ICStubCompiler::StubSpaceForStub(/* makesGCCalls = */ false, script,
2905 156 : ICStubCompiler::Engine::Baseline);
2906 78 : ICStub* templateStub = ICStub::New<ICNewObject_WithTemplate>(cx, space, code);
2907 78 : if (!templateStub)
2908 0 : return false;
2909 :
2910 78 : stub->addNewStub(templateStub);
2911 : }
2912 :
2913 78 : stub->setTemplateObject(templateObject);
2914 : }
2915 : }
2916 :
2917 920 : if (!obj)
2918 0 : return false;
2919 :
2920 920 : res.setObject(*obj);
2921 920 : return true;
2922 : }
2923 :
2924 : typedef bool(*DoNewObjectFn)(JSContext*, void*, ICNewObject_Fallback*, MutableHandleValue);
2925 3 : static const VMFunction DoNewObjectInfo =
2926 6 : FunctionInfo<DoNewObjectFn>(DoNewObject, "DoNewObject", TailCall);
2927 :
2928 : bool
2929 46 : ICNewObject_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
2930 : {
2931 46 : EmitRestoreTailCallReg(masm);
2932 :
2933 46 : masm.push(ICStubReg); // stub.
2934 46 : pushStubPayload(masm, R0.scratchReg());
2935 :
2936 46 : return tailCallVM(DoNewObjectInfo, masm);
2937 : }
2938 :
2939 : } // namespace jit
2940 : } // namespace js
|