Line data Source code
1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sts=4 et sw=4 tw=99:
3 : * This Source Code Form is subject to the terms of the Mozilla Public
4 : * License, v. 2.0. If a copy of the MPL was not distributed with this
5 : * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 :
7 : #include "jit/BaselineIC.h"
8 :
9 : #include "mozilla/DebugOnly.h"
10 : #include "mozilla/SizePrintfMacros.h"
11 : #include "mozilla/TemplateLib.h"
12 :
13 : #include "jsfriendapi.h"
14 : #include "jsfun.h"
15 : #include "jslibmath.h"
16 : #include "jstypes.h"
17 :
18 : #include "builtin/Eval.h"
19 : #include "builtin/SIMD.h"
20 : #include "gc/Policy.h"
21 : #include "jit/BaselineCacheIRCompiler.h"
22 : #include "jit/BaselineDebugModeOSR.h"
23 : #include "jit/BaselineJIT.h"
24 : #include "jit/InlinableNatives.h"
25 : #include "jit/JitSpewer.h"
26 : #include "jit/Linker.h"
27 : #include "jit/Lowering.h"
28 : #ifdef JS_ION_PERF
29 : # include "jit/PerfSpewer.h"
30 : #endif
31 : #include "jit/SharedICHelpers.h"
32 : #include "jit/VMFunctions.h"
33 : #include "js/Conversions.h"
34 : #include "js/GCVector.h"
35 : #include "vm/Opcodes.h"
36 : #include "vm/SelfHosting.h"
37 : #include "vm/TypedArrayObject.h"
38 :
39 : #include "jsboolinlines.h"
40 : #include "jsscriptinlines.h"
41 :
42 : #include "jit/JitFrames-inl.h"
43 : #include "jit/MacroAssembler-inl.h"
44 : #include "jit/shared/Lowering-shared-inl.h"
45 : #include "vm/EnvironmentObject-inl.h"
46 : #include "vm/Interpreter-inl.h"
47 : #include "vm/StringObject-inl.h"
48 : #include "vm/UnboxedObject-inl.h"
49 :
50 : using mozilla::DebugOnly;
51 :
52 : namespace js {
53 : namespace jit {
54 :
55 : //
56 : // WarmUpCounter_Fallback
57 : //
58 :
59 :
60 : //
61 : // The following data is kept in a temporary heap-allocated buffer, stored in
62 : // JitRuntime (high memory addresses at top, low at bottom):
63 : //
64 : // +----->+=================================+ -- <---- High Address
65 : // | | | |
66 : // | | ...BaselineFrame... | |-- Copy of BaselineFrame + stack values
67 : // | | | |
68 : // | +---------------------------------+ |
69 : // | | | |
70 : // | | ...Locals/Stack... | |
71 : // | | | |
72 : // | +=================================+ --
73 : // | | Padding(Maybe Empty) |
74 : // | +=================================+ --
75 : // +------|-- baselineFrame | |-- IonOsrTempData
76 : // | jitcode | |
77 : // +=================================+ -- <---- Low Address
78 : //
79 : // A pointer to the IonOsrTempData is returned.
80 :
81 : struct IonOsrTempData
82 : {
83 : void* jitcode;
84 : uint8_t* baselineFrame;
85 : };
86 :
87 : static IonOsrTempData*
88 1 : PrepareOsrTempData(JSContext* cx, ICWarmUpCounter_Fallback* stub, BaselineFrame* frame,
89 : HandleScript script, jsbytecode* pc, void* jitcode)
90 : {
91 1 : size_t numLocalsAndStackVals = frame->numValueSlots();
92 :
93 : // Calculate the amount of space to allocate:
94 : // BaselineFrame space:
95 : // (sizeof(Value) * (numLocals + numStackVals))
96 : // + sizeof(BaselineFrame)
97 : //
98 : // IonOsrTempData space:
99 : // sizeof(IonOsrTempData)
100 :
101 1 : size_t frameSpace = sizeof(BaselineFrame) + sizeof(Value) * numLocalsAndStackVals;
102 1 : size_t ionOsrTempDataSpace = sizeof(IonOsrTempData);
103 :
104 1 : size_t totalSpace = AlignBytes(frameSpace, sizeof(Value)) +
105 1 : AlignBytes(ionOsrTempDataSpace, sizeof(Value));
106 :
107 1 : IonOsrTempData* info = (IonOsrTempData*)cx->allocateOsrTempData(totalSpace);
108 1 : if (!info)
109 0 : return nullptr;
110 :
111 1 : memset(info, 0, totalSpace);
112 :
113 1 : info->jitcode = jitcode;
114 :
115 : // Copy the BaselineFrame + local/stack Values to the buffer. Arguments and
116 : // |this| are not copied but left on the stack: the Baseline and Ion frame
117 : // share the same frame prefix and Ion won't clobber these values. Note
118 : // that info->baselineFrame will point to the *end* of the frame data, like
119 : // the frame pointer register in baseline frames.
120 1 : uint8_t* frameStart = (uint8_t*)info + AlignBytes(ionOsrTempDataSpace, sizeof(Value));
121 1 : info->baselineFrame = frameStart + frameSpace;
122 :
123 1 : memcpy(frameStart, (uint8_t*)frame - numLocalsAndStackVals * sizeof(Value), frameSpace);
124 :
125 1 : JitSpew(JitSpew_BaselineOSR, "Allocated IonOsrTempData at %p", (void*) info);
126 1 : JitSpew(JitSpew_BaselineOSR, "Jitcode is %p", info->jitcode);
127 :
128 : // All done.
129 1 : return info;
130 : }
131 :
132 : static bool
133 5 : DoWarmUpCounterFallbackOSR(JSContext* cx, BaselineFrame* frame, ICWarmUpCounter_Fallback* stub,
134 : IonOsrTempData** infoPtr)
135 : {
136 5 : MOZ_ASSERT(infoPtr);
137 5 : *infoPtr = nullptr;
138 :
139 10 : RootedScript script(cx, frame->script());
140 5 : jsbytecode* pc = stub->icEntry()->pc(script);
141 5 : MOZ_ASSERT(JSOp(*pc) == JSOP_LOOPENTRY);
142 :
143 5 : FallbackICSpew(cx, stub, "WarmUpCounter(%d)", int(script->pcToOffset(pc)));
144 :
145 5 : if (!IonCompileScriptForBaseline(cx, frame, pc))
146 0 : return false;
147 :
148 12 : if (!script->hasIonScript() || script->ionScript()->osrPc() != pc ||
149 7 : script->ionScript()->bailoutExpected() ||
150 1 : frame->isDebuggee())
151 : {
152 4 : return true;
153 : }
154 :
155 1 : IonScript* ion = script->ionScript();
156 1 : MOZ_ASSERT(cx->runtime()->geckoProfiler().enabled() == ion->hasProfilingInstrumentation());
157 1 : MOZ_ASSERT(ion->osrPc() == pc);
158 :
159 1 : JitSpew(JitSpew_BaselineOSR, " OSR possible!");
160 1 : void* jitcode = ion->method()->raw() + ion->osrEntryOffset();
161 :
162 : // Prepare the temporary heap copy of the fake InterpreterFrame and actual args list.
163 1 : JitSpew(JitSpew_BaselineOSR, "Got jitcode. Preparing for OSR into ion.");
164 1 : IonOsrTempData* info = PrepareOsrTempData(cx, stub, frame, script, pc, jitcode);
165 1 : if (!info)
166 0 : return false;
167 1 : *infoPtr = info;
168 :
169 1 : return true;
170 : }
171 :
172 : typedef bool (*DoWarmUpCounterFallbackOSRFn)(JSContext*, BaselineFrame*,
173 : ICWarmUpCounter_Fallback*, IonOsrTempData** infoPtr);
174 3 : static const VMFunction DoWarmUpCounterFallbackOSRInfo =
175 6 : FunctionInfo<DoWarmUpCounterFallbackOSRFn>(DoWarmUpCounterFallbackOSR,
176 : "DoWarmUpCounterFallbackOSR");
177 :
178 : bool
179 50 : ICWarmUpCounter_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
180 : {
181 50 : MOZ_ASSERT(engine_ == Engine::Baseline);
182 :
183 : // Push a stub frame so that we can perform a non-tail call.
184 50 : enterStubFrame(masm, R1.scratchReg());
185 :
186 100 : Label noCompiledCode;
187 : // Call DoWarmUpCounterFallbackOSR to compile/check-for Ion-compiled function
188 : {
189 : // Push IonOsrTempData pointer storage
190 50 : masm.subFromStackPtr(Imm32(sizeof(void*)));
191 50 : masm.push(masm.getStackPointer());
192 :
193 : // Push stub pointer.
194 50 : masm.push(ICStubReg);
195 :
196 50 : pushStubPayload(masm, R0.scratchReg());
197 :
198 50 : if (!callVM(DoWarmUpCounterFallbackOSRInfo, masm))
199 0 : return false;
200 :
201 : // Pop IonOsrTempData pointer.
202 50 : masm.pop(R0.scratchReg());
203 :
204 50 : leaveStubFrame(masm);
205 :
206 : // If no JitCode was found, then skip just exit the IC.
207 50 : masm.branchPtr(Assembler::Equal, R0.scratchReg(), ImmPtr(nullptr), &noCompiledCode);
208 : }
209 :
210 : // Get a scratch register.
211 50 : AllocatableGeneralRegisterSet regs(availableGeneralRegs(0));
212 50 : Register osrDataReg = R0.scratchReg();
213 50 : regs.take(osrDataReg);
214 50 : regs.takeUnchecked(OsrFrameReg);
215 :
216 50 : Register scratchReg = regs.takeAny();
217 :
218 : // At this point, stack looks like:
219 : // +-> [...Calling-Frame...]
220 : // | [...Actual-Args/ThisV/ArgCount/Callee...]
221 : // | [Descriptor]
222 : // | [Return-Addr]
223 : // +---[Saved-FramePtr] <-- BaselineFrameReg points here.
224 : // [...Baseline-Frame...]
225 :
226 : // Restore the stack pointer to point to the saved frame pointer.
227 50 : masm.moveToStackPtr(BaselineFrameReg);
228 :
229 : // Discard saved frame pointer, so that the return address is on top of
230 : // the stack.
231 50 : masm.pop(scratchReg);
232 :
233 : #ifdef DEBUG
234 : // If profiler instrumentation is on, ensure that lastProfilingFrame is
235 : // the frame currently being OSR-ed
236 : {
237 100 : Label checkOk;
238 50 : AbsoluteAddress addressOfEnabled(cx->runtime()->geckoProfiler().addressOfEnabled());
239 50 : masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &checkOk);
240 50 : masm.loadPtr(AbsoluteAddress((void*)&cx->jitActivation), scratchReg);
241 50 : masm.loadPtr(Address(scratchReg, JitActivation::offsetOfLastProfilingFrame()), scratchReg);
242 :
243 : // It may be the case that we entered the baseline frame with
244 : // profiling turned off on, then in a call within a loop (i.e. a
245 : // callee frame), turn on profiling, then return to this frame,
246 : // and then OSR with profiling turned on. In this case, allow for
247 : // lastProfilingFrame to be null.
248 50 : masm.branchPtr(Assembler::Equal, scratchReg, ImmWord(0), &checkOk);
249 :
250 50 : masm.branchStackPtr(Assembler::Equal, scratchReg, &checkOk);
251 50 : masm.assumeUnreachable("Baseline OSR lastProfilingFrame mismatch.");
252 50 : masm.bind(&checkOk);
253 : }
254 : #endif
255 :
256 : // Jump into Ion.
257 50 : masm.loadPtr(Address(osrDataReg, offsetof(IonOsrTempData, jitcode)), scratchReg);
258 50 : masm.loadPtr(Address(osrDataReg, offsetof(IonOsrTempData, baselineFrame)), OsrFrameReg);
259 50 : masm.jump(scratchReg);
260 :
261 : // No jitcode available, do nothing.
262 50 : masm.bind(&noCompiledCode);
263 50 : EmitReturnFromIC(masm);
264 50 : return true;
265 : }
266 :
267 : //
268 : // TypeUpdate_Fallback
269 : //
270 : static bool
271 618 : DoTypeUpdateFallback(JSContext* cx, BaselineFrame* frame, ICUpdatedStub* stub, HandleValue objval,
272 : HandleValue value)
273 : {
274 : // This can get called from optimized stubs. Therefore it is not allowed to gc.
275 1236 : JS::AutoCheckCannotGC nogc;
276 :
277 618 : FallbackICSpew(cx, stub->getChainFallback(), "TypeUpdate(%s)",
278 618 : ICStub::KindString(stub->kind()));
279 :
280 618 : MOZ_ASSERT(stub->isCacheIR_Updated());
281 :
282 1236 : RootedScript script(cx, frame->script());
283 1236 : RootedObject obj(cx, &objval.toObject());
284 :
285 1236 : RootedId id(cx, stub->toCacheIR_Updated()->updateStubId());
286 618 : MOZ_ASSERT(id != JSID_EMPTY);
287 :
288 : // The group should match the object's group, except when the object is
289 : // an unboxed expando object: in that case, the group is the group of
290 : // the unboxed object.
291 1236 : RootedObjectGroup group(cx, stub->toCacheIR_Updated()->updateStubGroup());
292 : #ifdef DEBUG
293 618 : if (obj->is<UnboxedExpandoObject>())
294 0 : MOZ_ASSERT(group->clasp() == &UnboxedPlainObject::class_);
295 : else
296 618 : MOZ_ASSERT(obj->group() == group);
297 : #endif
298 :
299 : // If we're storing null/undefined to a typed object property, check if
300 : // we want to include it in this property's type information.
301 618 : bool addType = true;
302 618 : if (MOZ_UNLIKELY(obj->is<TypedObject>()) && value.isNullOrUndefined()) {
303 0 : StructTypeDescr* structDescr = &obj->as<TypedObject>().typeDescr().as<StructTypeDescr>();
304 : size_t fieldIndex;
305 0 : MOZ_ALWAYS_TRUE(structDescr->fieldIndex(id, &fieldIndex));
306 :
307 0 : TypeDescr* fieldDescr = &structDescr->fieldDescr(fieldIndex);
308 0 : ReferenceTypeDescr::Type type = fieldDescr->as<ReferenceTypeDescr>().type();
309 0 : if (type == ReferenceTypeDescr::TYPE_ANY) {
310 : // Ignore undefined values, which are included implicitly in type
311 : // information for this property.
312 0 : if (value.isUndefined())
313 0 : addType = false;
314 : } else {
315 0 : MOZ_ASSERT(type == ReferenceTypeDescr::TYPE_OBJECT);
316 :
317 : // Ignore null values being written here. Null is included
318 : // implicitly in type information for this property. Note that
319 : // non-object, non-null values are not possible here, these
320 : // should have been filtered out by the IR emitter.
321 0 : if (value.isNull())
322 0 : addType = false;
323 : }
324 : }
325 :
326 618 : if (MOZ_LIKELY(addType)) {
327 618 : JSObject* maybeSingleton = obj->isSingleton() ? obj.get() : nullptr;
328 618 : AddTypePropertyId(cx, group, maybeSingleton, id, value);
329 : }
330 :
331 618 : if (MOZ_UNLIKELY(!stub->addUpdateStubForValue(cx, script, obj, group, id, value))) {
332 : // The calling JIT code assumes this function is infallible (for
333 : // instance we may reallocate dynamic slots before calling this),
334 : // so ignore OOMs if we failed to attach a stub.
335 0 : cx->recoverFromOutOfMemory();
336 : }
337 :
338 1236 : return true;
339 : }
340 :
341 : typedef bool (*DoTypeUpdateFallbackFn)(JSContext*, BaselineFrame*, ICUpdatedStub*, HandleValue,
342 : HandleValue);
343 3 : const VMFunction DoTypeUpdateFallbackInfo =
344 6 : FunctionInfo<DoTypeUpdateFallbackFn>(DoTypeUpdateFallback, "DoTypeUpdateFallback", NonTailCall);
345 :
346 : bool
347 53 : ICTypeUpdate_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
348 : {
349 53 : MOZ_ASSERT(engine_ == Engine::Baseline);
350 :
351 : // Just store false into R1.scratchReg() and return.
352 53 : masm.move32(Imm32(0), R1.scratchReg());
353 53 : EmitReturnFromIC(masm);
354 53 : return true;
355 : }
356 :
357 : bool
358 153 : ICTypeUpdate_PrimitiveSet::Compiler::generateStubCode(MacroAssembler& masm)
359 : {
360 153 : MOZ_ASSERT(engine_ == Engine::Baseline);
361 :
362 306 : Label success;
363 153 : if ((flags_ & TypeToFlag(JSVAL_TYPE_INT32)) && !(flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE)))
364 19 : masm.branchTestInt32(Assembler::Equal, R0, &success);
365 :
366 153 : if (flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE))
367 3 : masm.branchTestNumber(Assembler::Equal, R0, &success);
368 :
369 153 : if (flags_ & TypeToFlag(JSVAL_TYPE_UNDEFINED))
370 29 : masm.branchTestUndefined(Assembler::Equal, R0, &success);
371 :
372 153 : if (flags_ & TypeToFlag(JSVAL_TYPE_BOOLEAN))
373 38 : masm.branchTestBoolean(Assembler::Equal, R0, &success);
374 :
375 153 : if (flags_ & TypeToFlag(JSVAL_TYPE_STRING))
376 53 : masm.branchTestString(Assembler::Equal, R0, &success);
377 :
378 153 : if (flags_ & TypeToFlag(JSVAL_TYPE_SYMBOL))
379 1 : masm.branchTestSymbol(Assembler::Equal, R0, &success);
380 :
381 153 : if (flags_ & TypeToFlag(JSVAL_TYPE_OBJECT))
382 36 : masm.branchTestObject(Assembler::Equal, R0, &success);
383 :
384 153 : if (flags_ & TypeToFlag(JSVAL_TYPE_NULL))
385 10 : masm.branchTestNull(Assembler::Equal, R0, &success);
386 :
387 153 : EmitStubGuardFailure(masm);
388 :
389 : // Type matches, load true into R1.scratchReg() and return.
390 153 : masm.bind(&success);
391 153 : masm.mov(ImmWord(1), R1.scratchReg());
392 153 : EmitReturnFromIC(masm);
393 :
394 306 : return true;
395 : }
396 :
397 : bool
398 2 : ICTypeUpdate_SingleObject::Compiler::generateStubCode(MacroAssembler& masm)
399 : {
400 2 : MOZ_ASSERT(engine_ == Engine::Baseline);
401 :
402 4 : Label failure;
403 2 : masm.branchTestObject(Assembler::NotEqual, R0, &failure);
404 :
405 : // Guard on the object's identity.
406 2 : Register obj = masm.extractObject(R0, R1.scratchReg());
407 2 : Address expectedObject(ICStubReg, ICTypeUpdate_SingleObject::offsetOfObject());
408 2 : masm.branchPtr(Assembler::NotEqual, expectedObject, obj, &failure);
409 :
410 : // Identity matches, load true into R1.scratchReg() and return.
411 2 : masm.mov(ImmWord(1), R1.scratchReg());
412 2 : EmitReturnFromIC(masm);
413 :
414 2 : masm.bind(&failure);
415 2 : EmitStubGuardFailure(masm);
416 4 : return true;
417 : }
418 :
419 : bool
420 19 : ICTypeUpdate_ObjectGroup::Compiler::generateStubCode(MacroAssembler& masm)
421 : {
422 19 : MOZ_ASSERT(engine_ == Engine::Baseline);
423 :
424 38 : Label failure;
425 19 : masm.branchTestObject(Assembler::NotEqual, R0, &failure);
426 :
427 : // Guard on the object's ObjectGroup.
428 19 : Register obj = masm.extractObject(R0, R1.scratchReg());
429 19 : masm.loadPtr(Address(obj, JSObject::offsetOfGroup()), R1.scratchReg());
430 :
431 19 : Address expectedGroup(ICStubReg, ICTypeUpdate_ObjectGroup::offsetOfGroup());
432 19 : masm.branchPtr(Assembler::NotEqual, expectedGroup, R1.scratchReg(), &failure);
433 :
434 : // Group matches, load true into R1.scratchReg() and return.
435 19 : masm.mov(ImmWord(1), R1.scratchReg());
436 19 : EmitReturnFromIC(masm);
437 :
438 19 : masm.bind(&failure);
439 19 : EmitStubGuardFailure(masm);
440 38 : return true;
441 : }
442 :
443 : bool
444 9 : ICTypeUpdate_AnyValue::Compiler::generateStubCode(MacroAssembler& masm)
445 : {
446 : // AnyValue always matches so return true.
447 9 : masm.mov(ImmWord(1), R1.scratchReg());
448 9 : EmitReturnFromIC(masm);
449 9 : return true;
450 : }
451 :
452 : //
453 : // ToBool_Fallback
454 : //
455 :
456 : static bool
457 316 : DoToBoolFallback(JSContext* cx, BaselineFrame* frame, ICToBool_Fallback* stub, HandleValue arg,
458 : MutableHandleValue ret)
459 : {
460 316 : FallbackICSpew(cx, stub, "ToBool");
461 :
462 316 : bool cond = ToBoolean(arg);
463 316 : ret.setBoolean(cond);
464 :
465 : // Check to see if a new stub should be generated.
466 316 : if (stub->numOptimizedStubs() >= ICToBool_Fallback::MAX_OPTIMIZED_STUBS) {
467 : // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
468 : // But for now we just bail.
469 0 : return true;
470 : }
471 :
472 316 : MOZ_ASSERT(!arg.isBoolean());
473 :
474 316 : JSScript* script = frame->script();
475 :
476 : // Try to generate new stubs.
477 316 : if (arg.isInt32()) {
478 26 : JitSpew(JitSpew_BaselineIC, " Generating ToBool(Int32) stub.");
479 52 : ICToBool_Int32::Compiler compiler(cx);
480 26 : ICStub* int32Stub = compiler.getStub(compiler.getStubSpace(script));
481 26 : if (!int32Stub)
482 0 : return false;
483 :
484 26 : stub->addNewStub(int32Stub);
485 26 : return true;
486 : }
487 :
488 290 : if (arg.isDouble() && cx->runtime()->jitSupportsFloatingPoint) {
489 1 : JitSpew(JitSpew_BaselineIC, " Generating ToBool(Double) stub.");
490 2 : ICToBool_Double::Compiler compiler(cx);
491 1 : ICStub* doubleStub = compiler.getStub(compiler.getStubSpace(script));
492 1 : if (!doubleStub)
493 0 : return false;
494 :
495 1 : stub->addNewStub(doubleStub);
496 1 : return true;
497 : }
498 :
499 289 : if (arg.isString()) {
500 55 : JitSpew(JitSpew_BaselineIC, " Generating ToBool(String) stub");
501 110 : ICToBool_String::Compiler compiler(cx);
502 55 : ICStub* stringStub = compiler.getStub(compiler.getStubSpace(script));
503 55 : if (!stringStub)
504 0 : return false;
505 :
506 55 : stub->addNewStub(stringStub);
507 55 : return true;
508 : }
509 :
510 234 : if (arg.isNull() || arg.isUndefined()) {
511 256 : ICToBool_NullUndefined::Compiler compiler(cx);
512 128 : ICStub* nilStub = compiler.getStub(compiler.getStubSpace(script));
513 128 : if (!nilStub)
514 0 : return false;
515 :
516 128 : stub->addNewStub(nilStub);
517 128 : return true;
518 : }
519 :
520 106 : if (arg.isObject()) {
521 106 : JitSpew(JitSpew_BaselineIC, " Generating ToBool(Object) stub.");
522 212 : ICToBool_Object::Compiler compiler(cx);
523 106 : ICStub* objStub = compiler.getStub(compiler.getStubSpace(script));
524 106 : if (!objStub)
525 0 : return false;
526 :
527 106 : stub->addNewStub(objStub);
528 106 : return true;
529 : }
530 :
531 0 : return true;
532 : }
533 :
534 : typedef bool (*pf)(JSContext*, BaselineFrame*, ICToBool_Fallback*, HandleValue,
535 : MutableHandleValue);
536 3 : static const VMFunction fun = FunctionInfo<pf>(DoToBoolFallback, "DoToBoolFallback", TailCall);
537 :
538 : bool
539 64 : ICToBool_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
540 : {
541 64 : MOZ_ASSERT(engine_ == Engine::Baseline);
542 64 : MOZ_ASSERT(R0 == JSReturnOperand);
543 :
544 : // Restore the tail call register.
545 64 : EmitRestoreTailCallReg(masm);
546 :
547 : // Push arguments.
548 64 : masm.pushValue(R0);
549 64 : masm.push(ICStubReg);
550 64 : pushStubPayload(masm, R0.scratchReg());
551 :
552 64 : return tailCallVM(fun, masm);
553 : }
554 :
555 : //
556 : // ToBool_Int32
557 : //
558 :
559 : bool
560 11 : ICToBool_Int32::Compiler::generateStubCode(MacroAssembler& masm)
561 : {
562 11 : MOZ_ASSERT(engine_ == Engine::Baseline);
563 :
564 22 : Label failure;
565 11 : masm.branchTestInt32(Assembler::NotEqual, R0, &failure);
566 :
567 22 : Label ifFalse;
568 11 : masm.branchTestInt32Truthy(false, R0, &ifFalse);
569 :
570 11 : masm.moveValue(BooleanValue(true), R0);
571 11 : EmitReturnFromIC(masm);
572 :
573 11 : masm.bind(&ifFalse);
574 11 : masm.moveValue(BooleanValue(false), R0);
575 11 : EmitReturnFromIC(masm);
576 :
577 : // Failure case - jump to next stub
578 11 : masm.bind(&failure);
579 11 : EmitStubGuardFailure(masm);
580 22 : return true;
581 : }
582 :
583 : //
584 : // ToBool_String
585 : //
586 :
587 : bool
588 18 : ICToBool_String::Compiler::generateStubCode(MacroAssembler& masm)
589 : {
590 18 : MOZ_ASSERT(engine_ == Engine::Baseline);
591 :
592 36 : Label failure;
593 18 : masm.branchTestString(Assembler::NotEqual, R0, &failure);
594 :
595 36 : Label ifFalse;
596 18 : masm.branchTestStringTruthy(false, R0, &ifFalse);
597 :
598 18 : masm.moveValue(BooleanValue(true), R0);
599 18 : EmitReturnFromIC(masm);
600 :
601 18 : masm.bind(&ifFalse);
602 18 : masm.moveValue(BooleanValue(false), R0);
603 18 : EmitReturnFromIC(masm);
604 :
605 : // Failure case - jump to next stub
606 18 : masm.bind(&failure);
607 18 : EmitStubGuardFailure(masm);
608 36 : return true;
609 : }
610 :
611 : //
612 : // ToBool_NullUndefined
613 : //
614 :
615 : bool
616 23 : ICToBool_NullUndefined::Compiler::generateStubCode(MacroAssembler& masm)
617 : {
618 23 : MOZ_ASSERT(engine_ == Engine::Baseline);
619 :
620 46 : Label failure, ifFalse;
621 23 : masm.branchTestNull(Assembler::Equal, R0, &ifFalse);
622 23 : masm.branchTestUndefined(Assembler::NotEqual, R0, &failure);
623 :
624 23 : masm.bind(&ifFalse);
625 23 : masm.moveValue(BooleanValue(false), R0);
626 23 : EmitReturnFromIC(masm);
627 :
628 : // Failure case - jump to next stub
629 23 : masm.bind(&failure);
630 23 : EmitStubGuardFailure(masm);
631 46 : return true;
632 : }
633 :
634 : //
635 : // ToBool_Double
636 : //
637 :
638 : bool
639 1 : ICToBool_Double::Compiler::generateStubCode(MacroAssembler& masm)
640 : {
641 1 : MOZ_ASSERT(engine_ == Engine::Baseline);
642 :
643 2 : Label failure, ifTrue;
644 1 : masm.branchTestDouble(Assembler::NotEqual, R0, &failure);
645 1 : masm.unboxDouble(R0, FloatReg0);
646 1 : masm.branchTestDoubleTruthy(true, FloatReg0, &ifTrue);
647 :
648 1 : masm.moveValue(BooleanValue(false), R0);
649 1 : EmitReturnFromIC(masm);
650 :
651 1 : masm.bind(&ifTrue);
652 1 : masm.moveValue(BooleanValue(true), R0);
653 1 : EmitReturnFromIC(masm);
654 :
655 : // Failure case - jump to next stub
656 1 : masm.bind(&failure);
657 1 : EmitStubGuardFailure(masm);
658 2 : return true;
659 : }
660 :
661 : //
662 : // ToBool_Object
663 : //
664 :
665 : bool
666 26 : ICToBool_Object::Compiler::generateStubCode(MacroAssembler& masm)
667 : {
668 26 : MOZ_ASSERT(engine_ == Engine::Baseline);
669 :
670 52 : Label failure, emulatesUndefined, slowPath;
671 26 : masm.branchTestObject(Assembler::NotEqual, R0, &failure);
672 :
673 26 : Register objReg = masm.extractObject(R0, ExtractTemp0);
674 26 : Register scratch = R1.scratchReg();
675 26 : masm.branchIfObjectEmulatesUndefined(objReg, scratch, &slowPath, &emulatesUndefined);
676 :
677 : // If object doesn't emulate undefined, it evaulates to true.
678 26 : masm.moveValue(BooleanValue(true), R0);
679 26 : EmitReturnFromIC(masm);
680 :
681 26 : masm.bind(&emulatesUndefined);
682 26 : masm.moveValue(BooleanValue(false), R0);
683 26 : EmitReturnFromIC(masm);
684 :
685 26 : masm.bind(&slowPath);
686 26 : masm.setupUnalignedABICall(scratch);
687 26 : masm.passABIArg(objReg);
688 26 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, js::EmulatesUndefined));
689 26 : masm.convertBoolToInt32(ReturnReg, ReturnReg);
690 26 : masm.xor32(Imm32(1), ReturnReg);
691 26 : masm.tagValue(JSVAL_TYPE_BOOLEAN, ReturnReg, R0);
692 26 : EmitReturnFromIC(masm);
693 :
694 : // Failure case - jump to next stub
695 26 : masm.bind(&failure);
696 26 : EmitStubGuardFailure(masm);
697 52 : return true;
698 : }
699 :
700 : //
701 : // ToNumber_Fallback
702 : //
703 :
704 : static bool
705 0 : DoToNumberFallback(JSContext* cx, ICToNumber_Fallback* stub, HandleValue arg, MutableHandleValue ret)
706 : {
707 0 : FallbackICSpew(cx, stub, "ToNumber");
708 0 : ret.set(arg);
709 0 : return ToNumber(cx, ret);
710 : }
711 :
712 : typedef bool (*DoToNumberFallbackFn)(JSContext*, ICToNumber_Fallback*, HandleValue, MutableHandleValue);
713 3 : static const VMFunction DoToNumberFallbackInfo =
714 6 : FunctionInfo<DoToNumberFallbackFn>(DoToNumberFallback, "DoToNumberFallback", TailCall,
715 : PopValues(1));
716 :
717 : bool
718 31 : ICToNumber_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
719 : {
720 31 : MOZ_ASSERT(engine_ == Engine::Baseline);
721 31 : MOZ_ASSERT(R0 == JSReturnOperand);
722 :
723 : // Restore the tail call register.
724 31 : EmitRestoreTailCallReg(masm);
725 :
726 : // Ensure stack is fully synced for the expression decompiler.
727 31 : masm.pushValue(R0);
728 :
729 : // Push arguments.
730 31 : masm.pushValue(R0);
731 31 : masm.push(ICStubReg);
732 :
733 31 : return tailCallVM(DoToNumberFallbackInfo, masm);
734 : }
735 :
736 : //
737 : // GetElem_Fallback
738 : //
739 :
740 : bool
741 163 : IsPrimitiveArrayTypedObject(JSObject* obj)
742 : {
743 163 : if (!obj->is<TypedObject>())
744 163 : return false;
745 0 : TypeDescr& descr = obj->as<TypedObject>().typeDescr();
746 0 : return descr.is<ArrayTypeDescr>() &&
747 0 : descr.as<ArrayTypeDescr>().elementType().is<ScalarTypeDescr>();
748 : }
749 :
750 : static Scalar::Type
751 0 : PrimitiveArrayTypedObjectType(JSObject* obj)
752 : {
753 0 : MOZ_ASSERT(IsPrimitiveArrayTypedObject(obj));
754 0 : TypeDescr& descr = obj->as<TypedObject>().typeDescr();
755 0 : return descr.as<ArrayTypeDescr>().elementType().as<ScalarTypeDescr>().type();
756 : }
757 :
758 : Scalar::Type
759 0 : TypedThingElementType(JSObject* obj)
760 : {
761 0 : return obj->is<TypedArrayObject>()
762 0 : ? obj->as<TypedArrayObject>().type()
763 0 : : PrimitiveArrayTypedObjectType(obj);
764 : }
765 :
766 : bool
767 0 : TypedThingRequiresFloatingPoint(JSObject* obj)
768 : {
769 0 : Scalar::Type type = TypedThingElementType(obj);
770 0 : return type == Scalar::Uint32 ||
771 0 : type == Scalar::Float32 ||
772 0 : type == Scalar::Float64;
773 : }
774 :
775 : static bool
776 1493 : DoGetElemFallback(JSContext* cx, BaselineFrame* frame, ICGetElem_Fallback* stub_, HandleValue lhs,
777 : HandleValue rhs, MutableHandleValue res)
778 : {
779 : // This fallback stub may trigger debug mode toggling.
780 1493 : DebugModeOSRVolatileStub<ICGetElem_Fallback*> stub(frame, stub_);
781 :
782 2986 : RootedScript script(cx, frame->script());
783 1493 : jsbytecode* pc = stub->icEntry()->pc(frame->script());
784 1493 : StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
785 :
786 1493 : JSOp op = JSOp(*pc);
787 1493 : FallbackICSpew(cx, stub, "GetElem(%s)", CodeName[op]);
788 :
789 1493 : MOZ_ASSERT(op == JSOP_GETELEM || op == JSOP_CALLELEM);
790 :
791 : // Don't pass lhs directly, we need it when generating stubs.
792 2986 : RootedValue lhsCopy(cx, lhs);
793 :
794 1493 : bool isOptimizedArgs = false;
795 1493 : if (lhs.isMagic(JS_OPTIMIZED_ARGUMENTS)) {
796 : // Handle optimized arguments[i] access.
797 16 : if (!GetElemOptimizedArguments(cx, frame, &lhsCopy, rhs, res, &isOptimizedArgs))
798 0 : return false;
799 16 : if (isOptimizedArgs)
800 16 : TypeScript::Monitor(cx, script, pc, types, res);
801 : }
802 :
803 1493 : bool attached = false;
804 1493 : bool isTemporarilyUnoptimizable = false;
805 :
806 1493 : if (stub->state().maybeTransition())
807 43 : stub->discardStubs(cx);
808 :
809 1493 : if (stub->state().canAttachStub()) {
810 786 : ICStubEngine engine = ICStubEngine::Baseline;
811 786 : GetPropIRGenerator gen(cx, script, pc, CacheKind::GetElem, stub->state().mode(),
812 1572 : &isTemporarilyUnoptimizable, lhs, rhs, lhs, CanAttachGetter::Yes);
813 786 : if (gen.tryAttachStub()) {
814 1368 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
815 2052 : engine, script, stub, &attached);
816 684 : if (newStub) {
817 684 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
818 684 : if (gen.shouldNotePreliminaryObjectStub())
819 43 : newStub->toCacheIR_Monitored()->notePreliminaryObject();
820 641 : else if (gen.shouldUnlinkPreliminaryObjectStubs())
821 231 : StripPreliminaryObjectStubs(cx, stub);
822 : }
823 : }
824 786 : if (!attached && !isTemporarilyUnoptimizable)
825 88 : stub->state().trackNotAttached();
826 : }
827 :
828 1493 : if (!isOptimizedArgs) {
829 1477 : if (!GetElementOperation(cx, op, lhsCopy, rhs, res))
830 1 : return false;
831 1476 : TypeScript::Monitor(cx, script, pc, types, res);
832 : }
833 :
834 : // Check if debug mode toggling made the stub invalid.
835 1492 : if (stub.invalid())
836 0 : return true;
837 :
838 : // Add a type monitor stub for the resulting value.
839 1492 : if (!stub->addMonitorStubForValue(cx, frame, types, res))
840 0 : return false;
841 :
842 1492 : if (attached)
843 684 : return true;
844 :
845 : // GetElem operations which could access negative indexes generally can't
846 : // be optimized without the potential for bailouts, as we can't statically
847 : // determine that an object has no properties on such indexes.
848 808 : if (rhs.isNumber() && rhs.toNumber() < 0)
849 0 : stub->noteNegativeIndex();
850 :
851 808 : if (!attached && !isTemporarilyUnoptimizable)
852 794 : stub->noteUnoptimizableAccess();
853 :
854 808 : return true;
855 : }
856 :
857 : static bool
858 0 : DoGetElemSuperFallback(JSContext* cx, BaselineFrame* frame, ICGetElem_Fallback* stub_,
859 : HandleValue receiver, HandleValue lhs, HandleValue rhs,
860 : MutableHandleValue res)
861 : {
862 : // This fallback stub may trigger debug mode toggling.
863 0 : DebugModeOSRVolatileStub<ICGetElem_Fallback*> stub(frame, stub_);
864 :
865 0 : RootedScript script(cx, frame->script());
866 0 : jsbytecode* pc = stub->icEntry()->pc(frame->script());
867 0 : StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
868 :
869 0 : JSOp op = JSOp(*pc);
870 0 : FallbackICSpew(cx, stub, "GetElemSuper(%s)", CodeName[op]);
871 :
872 0 : MOZ_ASSERT(op == JSOP_GETELEM_SUPER);
873 :
874 0 : bool attached = false;
875 0 : bool isTemporarilyUnoptimizable = false;
876 :
877 0 : if (stub->state().maybeTransition())
878 0 : stub->discardStubs(cx);
879 :
880 0 : if (stub->state().canAttachStub()) {
881 0 : ICStubEngine engine = ICStubEngine::Baseline;
882 0 : GetPropIRGenerator gen(cx, script, pc, CacheKind::GetElemSuper, stub->state().mode(),
883 : &isTemporarilyUnoptimizable, lhs, rhs, receiver,
884 0 : CanAttachGetter::Yes);
885 0 : if (gen.tryAttachStub()) {
886 0 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
887 0 : engine, script, stub, &attached);
888 0 : if (newStub) {
889 0 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
890 0 : if (gen.shouldNotePreliminaryObjectStub())
891 0 : newStub->toCacheIR_Monitored()->notePreliminaryObject();
892 0 : else if (gen.shouldUnlinkPreliminaryObjectStubs())
893 0 : StripPreliminaryObjectStubs(cx, stub);
894 : }
895 : }
896 0 : if (!attached && !isTemporarilyUnoptimizable)
897 0 : stub->state().trackNotAttached();
898 : }
899 :
900 : // |lhs| is [[HomeObject]].[[Prototype]] which must be Object
901 0 : RootedObject lhsObj(cx, &lhs.toObject());
902 0 : if (!GetObjectElementOperation(cx, op, lhsObj, receiver, rhs, res))
903 0 : return false;
904 0 : TypeScript::Monitor(cx, script, pc, types, res);
905 :
906 : // Check if debug mode toggling made the stub invalid.
907 0 : if (stub.invalid())
908 0 : return true;
909 :
910 : // Add a type monitor stub for the resulting value.
911 0 : if (!stub->addMonitorStubForValue(cx, frame, types, res))
912 0 : return false;
913 :
914 0 : if (attached)
915 0 : return true;
916 :
917 : // GetElem operations which could access negative indexes generally can't
918 : // be optimized without the potential for bailouts, as we can't statically
919 : // determine that an object has no properties on such indexes.
920 0 : if (rhs.isNumber() && rhs.toNumber() < 0)
921 0 : stub->noteNegativeIndex();
922 :
923 0 : if (!attached && !isTemporarilyUnoptimizable)
924 0 : stub->noteUnoptimizableAccess();
925 :
926 0 : return true;
927 : }
928 :
929 : typedef bool (*DoGetElemFallbackFn)(JSContext*, BaselineFrame*, ICGetElem_Fallback*,
930 : HandleValue, HandleValue, MutableHandleValue);
931 3 : static const VMFunction DoGetElemFallbackInfo =
932 6 : FunctionInfo<DoGetElemFallbackFn>(DoGetElemFallback, "DoGetElemFallback", TailCall,
933 : PopValues(2));
934 :
935 : typedef bool (*DoGetElemSuperFallbackFn)(JSContext*, BaselineFrame*, ICGetElem_Fallback*,
936 : HandleValue, HandleValue, HandleValue,
937 : MutableHandleValue);
938 3 : static const VMFunction DoGetElemSuperFallbackInfo =
939 6 : FunctionInfo<DoGetElemSuperFallbackFn>(DoGetElemSuperFallback, "DoGetElemSuperFallback",
940 : TailCall, PopValues(1));
941 :
942 : bool
943 60 : ICGetElem_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
944 : {
945 60 : MOZ_ASSERT(engine_ == Engine::Baseline);
946 60 : MOZ_ASSERT(R0 == JSReturnOperand);
947 :
948 : // Restore the tail call register.
949 60 : EmitRestoreTailCallReg(masm);
950 :
951 : // Super property getters use a |this| that differs from base object
952 60 : if (hasReceiver_) {
953 : // Ensure stack is fully synced for the expression decompiler.
954 0 : masm.pushValue(R1);
955 :
956 0 : masm.pushValue(R1); // Index
957 0 : masm.pushValue(R0); // Object
958 0 : masm.loadValue(Address(masm.getStackPointer(), 3 * sizeof(Value)), R0);
959 0 : masm.pushValue(R0); // Receiver
960 0 : masm.push(ICStubReg);
961 0 : pushStubPayload(masm, R0.scratchReg());
962 :
963 0 : return tailCallVM(DoGetElemSuperFallbackInfo, masm);
964 : }
965 :
966 : // Ensure stack is fully synced for the expression decompiler.
967 60 : masm.pushValue(R0);
968 60 : masm.pushValue(R1);
969 :
970 : // Push arguments.
971 60 : masm.pushValue(R1);
972 60 : masm.pushValue(R0);
973 60 : masm.push(ICStubReg);
974 60 : pushStubPayload(masm, R0.scratchReg());
975 :
976 60 : return tailCallVM(DoGetElemFallbackInfo, masm);
977 : }
978 :
979 : void
980 0 : LoadTypedThingLength(MacroAssembler& masm, TypedThingLayout layout, Register obj, Register result)
981 : {
982 0 : switch (layout) {
983 : case Layout_TypedArray:
984 0 : masm.unboxInt32(Address(obj, TypedArrayObject::lengthOffset()), result);
985 0 : break;
986 : case Layout_OutlineTypedObject:
987 : case Layout_InlineTypedObject:
988 0 : masm.loadPtr(Address(obj, JSObject::offsetOfGroup()), result);
989 0 : masm.loadPtr(Address(result, ObjectGroup::offsetOfAddendum()), result);
990 0 : masm.unboxInt32(Address(result, ArrayTypeDescr::offsetOfLength()), result);
991 0 : break;
992 : default:
993 0 : MOZ_CRASH();
994 : }
995 0 : }
996 :
997 : static void
998 1069 : SetUpdateStubData(ICCacheIR_Updated* stub, const PropertyTypeCheckInfo* info)
999 : {
1000 1069 : if (info->isSet()) {
1001 1034 : stub->updateStubGroup() = info->group();
1002 1034 : stub->updateStubId() = info->id();
1003 : }
1004 1069 : }
1005 :
1006 : static bool
1007 1215 : DoSetElemFallback(JSContext* cx, BaselineFrame* frame, ICSetElem_Fallback* stub_, Value* stack,
1008 : HandleValue objv, HandleValue index, HandleValue rhs)
1009 : {
1010 : // This fallback stub may trigger debug mode toggling.
1011 1215 : DebugModeOSRVolatileStub<ICSetElem_Fallback*> stub(frame, stub_);
1012 :
1013 2430 : RootedScript script(cx, frame->script());
1014 2430 : RootedScript outerScript(cx, script);
1015 1215 : jsbytecode* pc = stub->icEntry()->pc(script);
1016 1215 : JSOp op = JSOp(*pc);
1017 1215 : FallbackICSpew(cx, stub, "SetElem(%s)", CodeName[JSOp(*pc)]);
1018 :
1019 1215 : MOZ_ASSERT(op == JSOP_SETELEM ||
1020 : op == JSOP_STRICTSETELEM ||
1021 : op == JSOP_INITELEM ||
1022 : op == JSOP_INITHIDDENELEM ||
1023 : op == JSOP_INITELEM_ARRAY ||
1024 : op == JSOP_INITELEM_INC);
1025 :
1026 2430 : RootedObject obj(cx, ToObjectFromStack(cx, objv));
1027 1215 : if (!obj)
1028 0 : return false;
1029 :
1030 2430 : RootedShape oldShape(cx, obj->maybeShape());
1031 2430 : RootedObjectGroup oldGroup(cx, JSObject::getGroup(cx, obj));
1032 1215 : if (!oldGroup)
1033 0 : return false;
1034 :
1035 1215 : if (obj->is<UnboxedPlainObject>()) {
1036 0 : MOZ_ASSERT(!oldShape);
1037 0 : if (UnboxedExpandoObject* expando = obj->as<UnboxedPlainObject>().maybeExpando())
1038 0 : oldShape = expando->lastProperty();
1039 : }
1040 :
1041 1215 : bool isTemporarilyUnoptimizable = false;
1042 1215 : bool attached = false;
1043 :
1044 1215 : if (stub->state().maybeTransition())
1045 33 : stub->discardStubs(cx);
1046 :
1047 1215 : if (stub->state().canAttachStub()) {
1048 475 : SetPropIRGenerator gen(cx, script, pc, CacheKind::SetElem, stub->state().mode(),
1049 950 : &isTemporarilyUnoptimizable, objv, index, rhs);
1050 475 : if (gen.tryAttachStub()) {
1051 146 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
1052 : ICStubEngine::Baseline, frame->script(),
1053 292 : stub, &attached);
1054 146 : if (newStub) {
1055 146 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
1056 :
1057 146 : SetUpdateStubData(newStub->toCacheIR_Updated(), gen.typeCheckInfo());
1058 :
1059 146 : if (gen.shouldNotePreliminaryObjectStub())
1060 9 : newStub->toCacheIR_Updated()->notePreliminaryObject();
1061 137 : else if (gen.shouldUnlinkPreliminaryObjectStubs())
1062 6 : StripPreliminaryObjectStubs(cx, stub);
1063 :
1064 146 : if (gen.attachedTypedArrayOOBStub())
1065 0 : stub->noteHasTypedArrayOOB();
1066 : }
1067 : }
1068 : }
1069 :
1070 1215 : if (op == JSOP_INITELEM || op == JSOP_INITHIDDENELEM) {
1071 42 : if (!InitElemOperation(cx, pc, obj, index, rhs))
1072 0 : return false;
1073 1194 : } else if (op == JSOP_INITELEM_ARRAY) {
1074 27 : MOZ_ASSERT(uint32_t(index.toInt32()) <= INT32_MAX,
1075 : "the bytecode emitter must fail to compile code that would "
1076 : "produce JSOP_INITELEM_ARRAY with an index exceeding "
1077 : "int32_t range");
1078 27 : MOZ_ASSERT(uint32_t(index.toInt32()) == GET_UINT32(pc));
1079 27 : if (!InitArrayElemOperation(cx, pc, obj, index.toInt32(), rhs))
1080 0 : return false;
1081 1167 : } else if (op == JSOP_INITELEM_INC) {
1082 22 : if (!InitArrayElemOperation(cx, pc, obj, index.toInt32(), rhs))
1083 0 : return false;
1084 : } else {
1085 1145 : if (!SetObjectElement(cx, obj, index, rhs, objv, JSOp(*pc) == JSOP_STRICTSETELEM, script, pc))
1086 0 : return false;
1087 : }
1088 :
1089 : // Don't try to attach stubs that wish to be hidden. We don't know how to
1090 : // have different enumerability in the stubs for the moment.
1091 1215 : if (op == JSOP_INITHIDDENELEM)
1092 0 : return true;
1093 :
1094 : // Overwrite the object on the stack (pushed for the decompiler) with the rhs.
1095 1215 : MOZ_ASSERT(stack[2] == objv);
1096 1215 : stack[2] = rhs;
1097 :
1098 : // Check if debug mode toggling made the stub invalid.
1099 1215 : if (stub.invalid())
1100 0 : return true;
1101 :
1102 1215 : if (attached)
1103 146 : return true;
1104 :
1105 : // The SetObjectElement call might have entered this IC recursively, so try
1106 : // to transition.
1107 1069 : if (stub->state().maybeTransition())
1108 0 : stub->discardStubs(cx);
1109 :
1110 1069 : if (stub->state().canAttachStub()) {
1111 329 : SetPropIRGenerator gen(cx, script, pc, CacheKind::SetElem, stub->state().mode(),
1112 373 : &isTemporarilyUnoptimizable, objv, index, rhs);
1113 329 : if (gen.tryAttachAddSlotStub(oldGroup, oldShape)) {
1114 285 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
1115 : ICStubEngine::Baseline, frame->script(),
1116 570 : stub, &attached);
1117 285 : if (newStub) {
1118 285 : if (gen.shouldNotePreliminaryObjectStub())
1119 46 : newStub->toCacheIR_Updated()->notePreliminaryObject();
1120 239 : else if (gen.shouldUnlinkPreliminaryObjectStubs())
1121 239 : StripPreliminaryObjectStubs(cx, stub);
1122 :
1123 285 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
1124 285 : SetUpdateStubData(newStub->toCacheIR_Updated(), gen.typeCheckInfo());
1125 285 : return true;
1126 : }
1127 : } else {
1128 44 : gen.trackNotAttached();
1129 : }
1130 44 : if (!attached && !isTemporarilyUnoptimizable)
1131 34 : stub->state().trackNotAttached();
1132 : }
1133 :
1134 784 : return true;
1135 : }
1136 :
1137 : typedef bool (*DoSetElemFallbackFn)(JSContext*, BaselineFrame*, ICSetElem_Fallback*, Value*,
1138 : HandleValue, HandleValue, HandleValue);
1139 3 : static const VMFunction DoSetElemFallbackInfo =
1140 6 : FunctionInfo<DoSetElemFallbackFn>(DoSetElemFallback, "DoSetElemFallback", TailCall,
1141 : PopValues(2));
1142 :
1143 : bool
1144 46 : ICSetElem_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
1145 : {
1146 46 : MOZ_ASSERT(engine_ == Engine::Baseline);
1147 46 : MOZ_ASSERT(R0 == JSReturnOperand);
1148 :
1149 46 : EmitRestoreTailCallReg(masm);
1150 :
1151 : // State: R0: object, R1: index, stack: rhs.
1152 : // For the decompiler, the stack has to be: object, index, rhs,
1153 : // so we push the index, then overwrite the rhs Value with R0
1154 : // and push the rhs value.
1155 46 : masm.pushValue(R1);
1156 46 : masm.loadValue(Address(masm.getStackPointer(), sizeof(Value)), R1);
1157 46 : masm.storeValue(R0, Address(masm.getStackPointer(), sizeof(Value)));
1158 46 : masm.pushValue(R1);
1159 :
1160 : // Push arguments.
1161 46 : masm.pushValue(R1); // RHS
1162 :
1163 : // Push index. On x86 and ARM two push instructions are emitted so use a
1164 : // separate register to store the old stack pointer.
1165 46 : masm.moveStackPtrTo(R1.scratchReg());
1166 46 : masm.pushValue(Address(R1.scratchReg(), 2 * sizeof(Value)));
1167 46 : masm.pushValue(R0); // Object.
1168 :
1169 : // Push pointer to stack values, so that the stub can overwrite the object
1170 : // (pushed for the decompiler) with the rhs.
1171 46 : masm.computeEffectiveAddress(Address(masm.getStackPointer(), 3 * sizeof(Value)), R0.scratchReg());
1172 46 : masm.push(R0.scratchReg());
1173 :
1174 46 : masm.push(ICStubReg);
1175 46 : pushStubPayload(masm, R0.scratchReg());
1176 :
1177 46 : return tailCallVM(DoSetElemFallbackInfo, masm);
1178 : }
1179 :
1180 : void
1181 42 : BaselineScript::noteHasDenseAdd(uint32_t pcOffset)
1182 : {
1183 42 : ICEntry& entry = icEntryFromPCOffset(pcOffset);
1184 42 : ICFallbackStub* stub = entry.fallbackStub();
1185 :
1186 42 : if (stub->isSetElem_Fallback())
1187 42 : stub->toSetElem_Fallback()->noteHasDenseAdd();
1188 42 : }
1189 :
1190 : template <typename T>
1191 : void
1192 10 : EmitICUnboxedPreBarrier(MacroAssembler& masm, const T& address, JSValueType type)
1193 : {
1194 10 : if (type == JSVAL_TYPE_OBJECT)
1195 4 : EmitPreBarrier(masm, address, MIRType::Object);
1196 6 : else if (type == JSVAL_TYPE_STRING)
1197 3 : EmitPreBarrier(masm, address, MIRType::String);
1198 : else
1199 3 : MOZ_ASSERT(!UnboxedTypeNeedsPreBarrier(type));
1200 10 : }
1201 :
1202 : template void
1203 : EmitICUnboxedPreBarrier(MacroAssembler& masm, const Address& address, JSValueType type);
1204 :
1205 : template void
1206 : EmitICUnboxedPreBarrier(MacroAssembler& masm, const BaseIndex& address, JSValueType type);
1207 :
1208 : template <typename T>
1209 : void
1210 0 : StoreToTypedArray(JSContext* cx, MacroAssembler& masm, Scalar::Type type,
1211 : const ValueOperand& value, const T& dest, Register scratch,
1212 : Label* failure)
1213 : {
1214 0 : Label done;
1215 :
1216 0 : if (type == Scalar::Float32 || type == Scalar::Float64) {
1217 0 : masm.ensureDouble(value, FloatReg0, failure);
1218 0 : if (type == Scalar::Float32) {
1219 0 : masm.convertDoubleToFloat32(FloatReg0, ScratchFloat32Reg);
1220 0 : masm.storeToTypedFloatArray(type, ScratchFloat32Reg, dest);
1221 : } else {
1222 0 : masm.storeToTypedFloatArray(type, FloatReg0, dest);
1223 : }
1224 0 : } else if (type == Scalar::Uint8Clamped) {
1225 0 : Label notInt32;
1226 0 : masm.branchTestInt32(Assembler::NotEqual, value, ¬Int32);
1227 0 : masm.unboxInt32(value, scratch);
1228 0 : masm.clampIntToUint8(scratch);
1229 :
1230 0 : Label clamped;
1231 0 : masm.bind(&clamped);
1232 0 : masm.storeToTypedIntArray(type, scratch, dest);
1233 0 : masm.jump(&done);
1234 :
1235 : // If the value is a double, clamp to uint8 and jump back.
1236 : // Else, jump to failure.
1237 0 : masm.bind(¬Int32);
1238 0 : if (cx->runtime()->jitSupportsFloatingPoint) {
1239 0 : masm.branchTestDouble(Assembler::NotEqual, value, failure);
1240 0 : masm.unboxDouble(value, FloatReg0);
1241 0 : masm.clampDoubleToUint8(FloatReg0, scratch);
1242 0 : masm.jump(&clamped);
1243 : } else {
1244 0 : masm.jump(failure);
1245 : }
1246 : } else {
1247 0 : Label notInt32;
1248 0 : masm.branchTestInt32(Assembler::NotEqual, value, ¬Int32);
1249 0 : masm.unboxInt32(value, scratch);
1250 :
1251 0 : Label isInt32;
1252 0 : masm.bind(&isInt32);
1253 0 : masm.storeToTypedIntArray(type, scratch, dest);
1254 0 : masm.jump(&done);
1255 :
1256 : // If the value is a double, truncate and jump back.
1257 : // Else, jump to failure.
1258 0 : masm.bind(¬Int32);
1259 0 : if (cx->runtime()->jitSupportsFloatingPoint) {
1260 0 : masm.branchTestDouble(Assembler::NotEqual, value, failure);
1261 0 : masm.unboxDouble(value, FloatReg0);
1262 0 : masm.branchTruncateDoubleMaybeModUint32(FloatReg0, scratch, failure);
1263 0 : masm.jump(&isInt32);
1264 : } else {
1265 0 : masm.jump(failure);
1266 : }
1267 : }
1268 :
1269 0 : masm.bind(&done);
1270 0 : }
1271 :
1272 : template void
1273 : StoreToTypedArray(JSContext* cx, MacroAssembler& masm, Scalar::Type type,
1274 : const ValueOperand& value, const Address& dest, Register scratch,
1275 : Label* failure);
1276 :
1277 : template void
1278 : StoreToTypedArray(JSContext* cx, MacroAssembler& masm, Scalar::Type type,
1279 : const ValueOperand& value, const BaseIndex& dest, Register scratch,
1280 : Label* failure);
1281 :
1282 : //
1283 : // In_Fallback
1284 : //
1285 :
1286 : static bool
1287 615 : DoInFallback(JSContext* cx, BaselineFrame* frame, ICIn_Fallback* stub_,
1288 : HandleValue key, HandleValue objValue, MutableHandleValue res)
1289 : {
1290 : // This fallback stub may trigger debug mode toggling.
1291 615 : DebugModeOSRVolatileStub<ICIn_Fallback*> stub(frame, stub_);
1292 :
1293 615 : FallbackICSpew(cx, stub, "In");
1294 :
1295 615 : if (!objValue.isObject()) {
1296 0 : ReportValueError(cx, JSMSG_IN_NOT_OBJECT, -1, objValue, nullptr);
1297 0 : return false;
1298 : }
1299 :
1300 615 : if (stub->state().maybeTransition())
1301 20 : stub->discardStubs(cx);
1302 :
1303 615 : if (stub->state().canAttachStub()) {
1304 366 : RootedScript script(cx, frame->script());
1305 183 : jsbytecode* pc = stub->icEntry()->pc(script);
1306 :
1307 183 : ICStubEngine engine = ICStubEngine::Baseline;
1308 366 : HasPropIRGenerator gen(cx, script, pc, CacheKind::In, stub->state().mode(), key, objValue);
1309 183 : bool attached = false;
1310 183 : if (gen.tryAttachStub()) {
1311 318 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
1312 477 : engine, script, stub, &attached);
1313 159 : if (newStub)
1314 159 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
1315 : }
1316 183 : if (!attached)
1317 24 : stub->state().trackNotAttached();
1318 : }
1319 :
1320 1230 : RootedObject obj(cx, &objValue.toObject());
1321 615 : bool cond = false;
1322 615 : if (!OperatorIn(cx, key, obj, &cond))
1323 0 : return false;
1324 615 : res.setBoolean(cond);
1325 :
1326 615 : return true;
1327 : }
1328 :
1329 : typedef bool (*DoInFallbackFn)(JSContext*, BaselineFrame*, ICIn_Fallback*, HandleValue,
1330 : HandleValue, MutableHandleValue);
1331 3 : static const VMFunction DoInFallbackInfo =
1332 6 : FunctionInfo<DoInFallbackFn>(DoInFallback, "DoInFallback", TailCall, PopValues(2));
1333 :
1334 : bool
1335 33 : ICIn_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
1336 : {
1337 33 : MOZ_ASSERT(engine_ == Engine::Baseline);
1338 :
1339 33 : EmitRestoreTailCallReg(masm);
1340 :
1341 : // Sync for the decompiler.
1342 33 : masm.pushValue(R0);
1343 33 : masm.pushValue(R1);
1344 :
1345 : // Push arguments.
1346 33 : masm.pushValue(R1);
1347 33 : masm.pushValue(R0);
1348 33 : masm.push(ICStubReg);
1349 33 : pushStubPayload(masm, R0.scratchReg());
1350 :
1351 33 : return tailCallVM(DoInFallbackInfo, masm);
1352 : }
1353 :
1354 : //
1355 : // HasOwn_Fallback
1356 : //
1357 :
1358 : static bool
1359 0 : DoHasOwnFallback(JSContext* cx, BaselineFrame* frame, ICHasOwn_Fallback* stub_,
1360 : HandleValue keyValue, HandleValue objValue, MutableHandleValue res)
1361 : {
1362 : // This fallback stub may trigger debug mode toggling.
1363 0 : DebugModeOSRVolatileStub<ICIn_Fallback*> stub(frame, stub_);
1364 :
1365 0 : FallbackICSpew(cx, stub, "HasOwn");
1366 :
1367 0 : if (stub->state().maybeTransition())
1368 0 : stub->discardStubs(cx);
1369 :
1370 0 : if (stub->state().canAttachStub()) {
1371 0 : RootedScript script(cx, frame->script());
1372 0 : jsbytecode* pc = stub->icEntry()->pc(script);
1373 :
1374 0 : ICStubEngine engine = ICStubEngine::Baseline;
1375 : HasPropIRGenerator gen(cx, script, pc, CacheKind::HasOwn,
1376 0 : stub->state().mode(), keyValue, objValue);
1377 0 : bool attached = false;
1378 0 : if (gen.tryAttachStub()) {
1379 0 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
1380 0 : engine, script, stub, &attached);
1381 0 : if (newStub)
1382 0 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
1383 : }
1384 0 : if (!attached)
1385 0 : stub->state().trackNotAttached();
1386 : }
1387 :
1388 : bool found;
1389 0 : if (!HasOwnProperty(cx, objValue, keyValue, &found))
1390 0 : return false;
1391 :
1392 0 : res.setBoolean(found);
1393 0 : return true;
1394 : }
1395 :
1396 : typedef bool (*DoHasOwnFallbackFn)(JSContext*, BaselineFrame*, ICHasOwn_Fallback*, HandleValue,
1397 : HandleValue, MutableHandleValue);
1398 3 : static const VMFunction DoHasOwnFallbackInfo =
1399 6 : FunctionInfo<DoHasOwnFallbackFn>(DoHasOwnFallback, "DoHasOwnFallback", TailCall, PopValues(2));
1400 :
1401 : bool
1402 0 : ICHasOwn_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
1403 : {
1404 0 : MOZ_ASSERT(engine_ == Engine::Baseline);
1405 :
1406 0 : EmitRestoreTailCallReg(masm);
1407 :
1408 : // Sync for the decompiler.
1409 0 : masm.pushValue(R0);
1410 0 : masm.pushValue(R1);
1411 :
1412 : // Push arguments.
1413 0 : masm.pushValue(R1);
1414 0 : masm.pushValue(R0);
1415 0 : masm.push(ICStubReg);
1416 0 : pushStubPayload(masm, R0.scratchReg());
1417 :
1418 0 : return tailCallVM(DoHasOwnFallbackInfo, masm);
1419 : }
1420 :
1421 :
1422 : //
1423 : // GetName_Fallback
1424 : //
1425 :
1426 : static bool
1427 1838 : DoGetNameFallback(JSContext* cx, BaselineFrame* frame, ICGetName_Fallback* stub_,
1428 : HandleObject envChain, MutableHandleValue res)
1429 : {
1430 : // This fallback stub may trigger debug mode toggling.
1431 1838 : DebugModeOSRVolatileStub<ICGetName_Fallback*> stub(frame, stub_);
1432 :
1433 3676 : RootedScript script(cx, frame->script());
1434 1838 : jsbytecode* pc = stub->icEntry()->pc(script);
1435 3676 : mozilla::DebugOnly<JSOp> op = JSOp(*pc);
1436 1838 : FallbackICSpew(cx, stub, "GetName(%s)", CodeName[JSOp(*pc)]);
1437 :
1438 1838 : MOZ_ASSERT(op == JSOP_GETNAME || op == JSOP_GETGNAME);
1439 :
1440 3676 : RootedPropertyName name(cx, script->getName(pc));
1441 1838 : bool attached = false;
1442 :
1443 1838 : if (stub->state().maybeTransition())
1444 20 : stub->discardStubs(cx);
1445 :
1446 1838 : if (stub->state().canAttachStub()) {
1447 977 : ICStubEngine engine = ICStubEngine::Baseline;
1448 1954 : GetNameIRGenerator gen(cx, script, pc, stub->state().mode(), envChain, name);
1449 977 : if (gen.tryAttachStub()) {
1450 1706 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
1451 2559 : engine, script, stub, &attached);
1452 853 : if (newStub)
1453 853 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
1454 : }
1455 977 : if (!attached)
1456 124 : stub->state().trackNotAttached();
1457 : }
1458 :
1459 : static_assert(JSOP_GETGNAME_LENGTH == JSOP_GETNAME_LENGTH,
1460 : "Otherwise our check for JSOP_TYPEOF isn't ok");
1461 1838 : if (JSOp(pc[JSOP_GETGNAME_LENGTH]) == JSOP_TYPEOF) {
1462 0 : if (!GetEnvironmentName<GetNameMode::TypeOf>(cx, envChain, name, res))
1463 0 : return false;
1464 : } else {
1465 1838 : if (!GetEnvironmentName<GetNameMode::Normal>(cx, envChain, name, res))
1466 0 : return false;
1467 : }
1468 :
1469 1838 : StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
1470 1838 : TypeScript::Monitor(cx, script, pc, types, res);
1471 :
1472 : // Check if debug mode toggling made the stub invalid.
1473 1838 : if (stub.invalid())
1474 0 : return true;
1475 :
1476 : // Add a type monitor stub for the resulting value.
1477 1838 : if (!stub->addMonitorStubForValue(cx, frame, types, res))
1478 0 : return false;
1479 :
1480 1838 : if (!attached)
1481 985 : stub->noteUnoptimizableAccess();
1482 1838 : return true;
1483 : }
1484 :
1485 : typedef bool (*DoGetNameFallbackFn)(JSContext*, BaselineFrame*, ICGetName_Fallback*,
1486 : HandleObject, MutableHandleValue);
1487 3 : static const VMFunction DoGetNameFallbackInfo =
1488 6 : FunctionInfo<DoGetNameFallbackFn>(DoGetNameFallback, "DoGetNameFallback", TailCall);
1489 :
1490 : bool
1491 52 : ICGetName_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
1492 : {
1493 52 : MOZ_ASSERT(engine_ == Engine::Baseline);
1494 52 : MOZ_ASSERT(R0 == JSReturnOperand);
1495 :
1496 52 : EmitRestoreTailCallReg(masm);
1497 :
1498 52 : masm.push(R0.scratchReg());
1499 52 : masm.push(ICStubReg);
1500 52 : pushStubPayload(masm, R0.scratchReg());
1501 :
1502 52 : return tailCallVM(DoGetNameFallbackInfo, masm);
1503 : }
1504 :
1505 : //
1506 : // BindName_Fallback
1507 : //
1508 :
1509 : static bool
1510 0 : DoBindNameFallback(JSContext* cx, BaselineFrame* frame, ICBindName_Fallback* stub,
1511 : HandleObject envChain, MutableHandleValue res)
1512 : {
1513 0 : jsbytecode* pc = stub->icEntry()->pc(frame->script());
1514 0 : mozilla::DebugOnly<JSOp> op = JSOp(*pc);
1515 0 : FallbackICSpew(cx, stub, "BindName(%s)", CodeName[JSOp(*pc)]);
1516 :
1517 0 : MOZ_ASSERT(op == JSOP_BINDNAME || op == JSOP_BINDGNAME);
1518 :
1519 0 : RootedPropertyName name(cx, frame->script()->getName(pc));
1520 :
1521 0 : if (stub->state().maybeTransition())
1522 0 : stub->discardStubs(cx);
1523 :
1524 0 : if (stub->state().canAttachStub()) {
1525 0 : bool attached = false;
1526 0 : RootedScript script(cx, frame->script());
1527 0 : BindNameIRGenerator gen(cx, script, pc, stub->state().mode(), envChain, name);
1528 0 : if (gen.tryAttachStub()) {
1529 0 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
1530 : ICStubEngine::Baseline, script, stub,
1531 0 : &attached);
1532 0 : if (newStub)
1533 0 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
1534 : }
1535 0 : if (!attached)
1536 0 : stub->state().trackNotAttached();
1537 : }
1538 :
1539 0 : RootedObject scope(cx);
1540 0 : if (!LookupNameUnqualified(cx, name, envChain, &scope))
1541 0 : return false;
1542 :
1543 0 : res.setObject(*scope);
1544 0 : return true;
1545 : }
1546 :
1547 : typedef bool (*DoBindNameFallbackFn)(JSContext*, BaselineFrame*, ICBindName_Fallback*,
1548 : HandleObject, MutableHandleValue);
1549 3 : static const VMFunction DoBindNameFallbackInfo =
1550 6 : FunctionInfo<DoBindNameFallbackFn>(DoBindNameFallback, "DoBindNameFallback", TailCall);
1551 :
1552 : bool
1553 0 : ICBindName_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
1554 : {
1555 0 : MOZ_ASSERT(engine_ == Engine::Baseline);
1556 0 : MOZ_ASSERT(R0 == JSReturnOperand);
1557 :
1558 0 : EmitRestoreTailCallReg(masm);
1559 :
1560 0 : masm.push(R0.scratchReg());
1561 0 : masm.push(ICStubReg);
1562 0 : pushStubPayload(masm, R0.scratchReg());
1563 :
1564 0 : return tailCallVM(DoBindNameFallbackInfo, masm);
1565 : }
1566 :
1567 : //
1568 : // GetIntrinsic_Fallback
1569 : //
1570 :
1571 : static bool
1572 1024 : DoGetIntrinsicFallback(JSContext* cx, BaselineFrame* frame, ICGetIntrinsic_Fallback* stub_,
1573 : MutableHandleValue res)
1574 : {
1575 : // This fallback stub may trigger debug mode toggling.
1576 1024 : DebugModeOSRVolatileStub<ICGetIntrinsic_Fallback*> stub(frame, stub_);
1577 :
1578 2048 : RootedScript script(cx, frame->script());
1579 1024 : jsbytecode* pc = stub->icEntry()->pc(script);
1580 2048 : mozilla::DebugOnly<JSOp> op = JSOp(*pc);
1581 1024 : FallbackICSpew(cx, stub, "GetIntrinsic(%s)", CodeName[JSOp(*pc)]);
1582 :
1583 1024 : MOZ_ASSERT(op == JSOP_GETINTRINSIC);
1584 :
1585 1024 : if (!GetIntrinsicOperation(cx, pc, res))
1586 0 : return false;
1587 :
1588 : // An intrinsic operation will always produce the same result, so only
1589 : // needs to be monitored once. Attach a stub to load the resulting constant
1590 : // directly.
1591 :
1592 1024 : TypeScript::Monitor(cx, script, pc, res);
1593 :
1594 : // Check if debug mode toggling made the stub invalid.
1595 1024 : if (stub.invalid())
1596 0 : return true;
1597 :
1598 1024 : JitSpew(JitSpew_BaselineIC, " Generating GetIntrinsic optimized stub");
1599 2048 : ICGetIntrinsic_Constant::Compiler compiler(cx, res);
1600 1024 : ICStub* newStub = compiler.getStub(compiler.getStubSpace(script));
1601 1024 : if (!newStub)
1602 0 : return false;
1603 :
1604 1024 : stub->addNewStub(newStub);
1605 1024 : return true;
1606 : }
1607 :
1608 : typedef bool (*DoGetIntrinsicFallbackFn)(JSContext*, BaselineFrame*, ICGetIntrinsic_Fallback*,
1609 : MutableHandleValue);
1610 3 : static const VMFunction DoGetIntrinsicFallbackInfo =
1611 6 : FunctionInfo<DoGetIntrinsicFallbackFn>(DoGetIntrinsicFallback, "DoGetIntrinsicFallback",
1612 : TailCall);
1613 :
1614 : bool
1615 49 : ICGetIntrinsic_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
1616 : {
1617 49 : MOZ_ASSERT(engine_ == Engine::Baseline);
1618 :
1619 49 : EmitRestoreTailCallReg(masm);
1620 :
1621 49 : masm.push(ICStubReg);
1622 49 : pushStubPayload(masm, R0.scratchReg());
1623 :
1624 49 : return tailCallVM(DoGetIntrinsicFallbackInfo, masm);
1625 : }
1626 :
1627 : bool
1628 45 : ICGetIntrinsic_Constant::Compiler::generateStubCode(MacroAssembler& masm)
1629 : {
1630 45 : MOZ_ASSERT(engine_ == Engine::Baseline);
1631 :
1632 45 : masm.loadValue(Address(ICStubReg, ICGetIntrinsic_Constant::offsetOfValue()), R0);
1633 :
1634 45 : EmitReturnFromIC(masm);
1635 45 : return true;
1636 : }
1637 :
1638 : //
1639 : // SetProp_Fallback
1640 : //
1641 :
1642 : static bool
1643 690 : DoSetPropFallback(JSContext* cx, BaselineFrame* frame, ICSetProp_Fallback* stub_, Value* stack,
1644 : HandleValue lhs, HandleValue rhs)
1645 : {
1646 : // This fallback stub may trigger debug mode toggling.
1647 690 : DebugModeOSRVolatileStub<ICSetProp_Fallback*> stub(frame, stub_);
1648 :
1649 1380 : RootedScript script(cx, frame->script());
1650 690 : jsbytecode* pc = stub->icEntry()->pc(script);
1651 690 : JSOp op = JSOp(*pc);
1652 690 : FallbackICSpew(cx, stub, "SetProp(%s)", CodeName[op]);
1653 :
1654 690 : MOZ_ASSERT(op == JSOP_SETPROP ||
1655 : op == JSOP_STRICTSETPROP ||
1656 : op == JSOP_SETNAME ||
1657 : op == JSOP_STRICTSETNAME ||
1658 : op == JSOP_SETGNAME ||
1659 : op == JSOP_STRICTSETGNAME ||
1660 : op == JSOP_INITPROP ||
1661 : op == JSOP_INITLOCKEDPROP ||
1662 : op == JSOP_INITHIDDENPROP ||
1663 : op == JSOP_SETALIASEDVAR ||
1664 : op == JSOP_INITALIASEDLEXICAL ||
1665 : op == JSOP_INITGLEXICAL);
1666 :
1667 1380 : RootedPropertyName name(cx);
1668 690 : if (op == JSOP_SETALIASEDVAR || op == JSOP_INITALIASEDLEXICAL)
1669 0 : name = EnvironmentCoordinateName(cx->caches().envCoordinateNameCache, script, pc);
1670 : else
1671 690 : name = script->getName(pc);
1672 1380 : RootedId id(cx, NameToId(name));
1673 :
1674 1380 : RootedObject obj(cx, ToObjectFromStack(cx, lhs));
1675 690 : if (!obj)
1676 0 : return false;
1677 1380 : RootedShape oldShape(cx, obj->maybeShape());
1678 1380 : RootedObjectGroup oldGroup(cx, JSObject::getGroup(cx, obj));
1679 690 : if (!oldGroup)
1680 0 : return false;
1681 :
1682 690 : if (obj->is<UnboxedPlainObject>()) {
1683 47 : MOZ_ASSERT(!oldShape);
1684 47 : if (UnboxedExpandoObject* expando = obj->as<UnboxedPlainObject>().maybeExpando())
1685 0 : oldShape = expando->lastProperty();
1686 : }
1687 :
1688 : // There are some reasons we can fail to attach a stub that are temporary.
1689 : // We want to avoid calling noteUnoptimizableAccess() if the reason we
1690 : // failed to attach a stub is one of those temporary reasons, since we might
1691 : // end up attaching a stub for the exact same access later.
1692 690 : bool isTemporarilyUnoptimizable = false;
1693 :
1694 690 : bool attached = false;
1695 690 : if (stub->state().maybeTransition())
1696 2 : stub->discardStubs(cx);
1697 :
1698 690 : if (stub->state().canAttachStub()) {
1699 1380 : RootedValue idVal(cx, StringValue(name));
1700 690 : SetPropIRGenerator gen(cx, script, pc, CacheKind::SetProp, stub->state().mode(),
1701 2070 : &isTemporarilyUnoptimizable, lhs, idVal, rhs);
1702 690 : if (gen.tryAttachStub()) {
1703 480 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
1704 : ICStubEngine::Baseline, frame->script(),
1705 960 : stub, &attached);
1706 480 : if (newStub) {
1707 479 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
1708 :
1709 479 : SetUpdateStubData(newStub->toCacheIR_Updated(), gen.typeCheckInfo());
1710 :
1711 479 : if (gen.shouldNotePreliminaryObjectStub())
1712 344 : newStub->toCacheIR_Updated()->notePreliminaryObject();
1713 135 : else if (gen.shouldUnlinkPreliminaryObjectStubs())
1714 109 : StripPreliminaryObjectStubs(cx, stub);
1715 : }
1716 : }
1717 : }
1718 :
1719 690 : if (op == JSOP_INITPROP ||
1720 335 : op == JSOP_INITLOCKEDPROP ||
1721 : op == JSOP_INITHIDDENPROP)
1722 : {
1723 710 : if (!InitPropertyOperation(cx, op, obj, id, rhs))
1724 0 : return false;
1725 335 : } else if (op == JSOP_SETNAME ||
1726 335 : op == JSOP_STRICTSETNAME ||
1727 335 : op == JSOP_SETGNAME ||
1728 : op == JSOP_STRICTSETGNAME)
1729 : {
1730 34 : if (!SetNameOperation(cx, script, pc, obj, rhs))
1731 0 : return false;
1732 318 : } else if (op == JSOP_SETALIASEDVAR || op == JSOP_INITALIASEDLEXICAL) {
1733 0 : obj->as<EnvironmentObject>().setAliasedBinding(cx, EnvironmentCoordinate(pc), name, rhs);
1734 318 : } else if (op == JSOP_INITGLEXICAL) {
1735 0 : RootedValue v(cx, rhs);
1736 : LexicalEnvironmentObject* lexicalEnv;
1737 0 : if (script->hasNonSyntacticScope())
1738 0 : lexicalEnv = &NearestEnclosingExtensibleLexicalEnvironment(frame->environmentChain());
1739 : else
1740 0 : lexicalEnv = &cx->global()->lexicalEnvironment();
1741 0 : InitGlobalLexicalOperation(cx, lexicalEnv, script, pc, v);
1742 : } else {
1743 318 : MOZ_ASSERT(op == JSOP_SETPROP || op == JSOP_STRICTSETPROP);
1744 :
1745 318 : ObjectOpResult result;
1746 1590 : if (!SetProperty(cx, obj, id, rhs, lhs, result) ||
1747 1272 : !result.checkStrictErrorOrWarning(cx, obj, id, op == JSOP_STRICTSETPROP))
1748 : {
1749 0 : return false;
1750 : }
1751 : }
1752 :
1753 : // Overwrite the LHS on the stack (pushed for the decompiler) with the RHS.
1754 690 : MOZ_ASSERT(stack[1] == lhs);
1755 690 : stack[1] = rhs;
1756 :
1757 : // Check if debug mode toggling made the stub invalid.
1758 690 : if (stub.invalid())
1759 0 : return true;
1760 :
1761 690 : if (attached)
1762 479 : return true;
1763 :
1764 : // The SetProperty call might have entered this IC recursively, so try
1765 : // to transition.
1766 211 : if (stub->state().maybeTransition())
1767 0 : stub->discardStubs(cx);
1768 :
1769 211 : if (stub->state().canAttachStub()) {
1770 422 : RootedValue idVal(cx, StringValue(name));
1771 211 : SetPropIRGenerator gen(cx, script, pc, CacheKind::SetProp, stub->state().mode(),
1772 633 : &isTemporarilyUnoptimizable, lhs, idVal, rhs);
1773 211 : if (gen.tryAttachAddSlotStub(oldGroup, oldShape)) {
1774 159 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
1775 : ICStubEngine::Baseline, frame->script(),
1776 318 : stub, &attached);
1777 159 : if (newStub) {
1778 159 : if (gen.shouldNotePreliminaryObjectStub())
1779 65 : newStub->toCacheIR_Updated()->notePreliminaryObject();
1780 94 : else if (gen.shouldUnlinkPreliminaryObjectStubs())
1781 94 : StripPreliminaryObjectStubs(cx, stub);
1782 :
1783 159 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
1784 159 : SetUpdateStubData(newStub->toCacheIR_Updated(), gen.typeCheckInfo());
1785 : }
1786 : } else {
1787 52 : gen.trackNotAttached();
1788 : }
1789 211 : if (!attached && !isTemporarilyUnoptimizable)
1790 12 : stub->state().trackNotAttached();
1791 : }
1792 :
1793 211 : if (!attached && !isTemporarilyUnoptimizable)
1794 12 : stub->noteUnoptimizableAccess();
1795 :
1796 211 : return true;
1797 : }
1798 :
1799 : typedef bool (*DoSetPropFallbackFn)(JSContext*, BaselineFrame*, ICSetProp_Fallback*, Value*,
1800 : HandleValue, HandleValue);
1801 3 : static const VMFunction DoSetPropFallbackInfo =
1802 6 : FunctionInfo<DoSetPropFallbackFn>(DoSetPropFallback, "DoSetPropFallback", TailCall,
1803 : PopValues(1));
1804 :
1805 : bool
1806 57 : ICSetProp_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
1807 : {
1808 57 : MOZ_ASSERT(engine_ == Engine::Baseline);
1809 57 : MOZ_ASSERT(R0 == JSReturnOperand);
1810 :
1811 57 : EmitRestoreTailCallReg(masm);
1812 :
1813 : // Ensure stack is fully synced for the expression decompiler.
1814 : // Overwrite the RHS value on top of the stack with the object, then push
1815 : // the RHS in R1 on top of that.
1816 57 : masm.storeValue(R0, Address(masm.getStackPointer(), 0));
1817 57 : masm.pushValue(R1);
1818 :
1819 : // Push arguments.
1820 57 : masm.pushValue(R1);
1821 57 : masm.pushValue(R0);
1822 :
1823 : // Push pointer to stack values, so that the stub can overwrite the object
1824 : // (pushed for the decompiler) with the RHS.
1825 114 : masm.computeEffectiveAddress(Address(masm.getStackPointer(), 2 * sizeof(Value)),
1826 57 : R0.scratchReg());
1827 57 : masm.push(R0.scratchReg());
1828 :
1829 57 : masm.push(ICStubReg);
1830 57 : pushStubPayload(masm, R0.scratchReg());
1831 :
1832 57 : if (!tailCallVM(DoSetPropFallbackInfo, masm))
1833 0 : return false;
1834 :
1835 : // This is the resume point used when bailout rewrites call stack to undo
1836 : // Ion inlined frames. The return address pushed onto reconstructed stack
1837 : // will point here.
1838 57 : assumeStubFrame(masm);
1839 57 : bailoutReturnOffset_.bind(masm.currentOffset());
1840 :
1841 57 : leaveStubFrame(masm, true);
1842 57 : EmitReturnFromIC(masm);
1843 :
1844 57 : return true;
1845 : }
1846 :
1847 : void
1848 57 : ICSetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler& masm, Handle<JitCode*> code)
1849 : {
1850 57 : BailoutReturnStub kind = BailoutReturnStub::SetProp;
1851 57 : void* address = code->raw() + bailoutReturnOffset_.offset();
1852 57 : cx->compartment()->jitCompartment()->initBailoutReturnAddr(address, getKey(), kind);
1853 57 : }
1854 :
1855 : //
1856 : // Call_Fallback
1857 : //
1858 :
1859 : static bool
1860 270 : TryAttachFunApplyStub(JSContext* cx, ICCall_Fallback* stub, HandleScript script, jsbytecode* pc,
1861 : HandleValue thisv, uint32_t argc, Value* argv, bool* attached)
1862 : {
1863 270 : if (argc != 2)
1864 242 : return true;
1865 :
1866 28 : if (!thisv.isObject() || !thisv.toObject().is<JSFunction>())
1867 0 : return true;
1868 56 : RootedFunction target(cx, &thisv.toObject().as<JSFunction>());
1869 :
1870 28 : bool isScripted = target->hasJITCode();
1871 :
1872 : // right now, only handle situation where second argument is |arguments|
1873 28 : if (argv[1].isMagic(JS_OPTIMIZED_ARGUMENTS) && !script->needsArgsObj()) {
1874 0 : if (isScripted && !stub->hasStub(ICStub::Call_ScriptedApplyArguments)) {
1875 0 : JitSpew(JitSpew_BaselineIC, " Generating Call_ScriptedApplyArguments stub");
1876 :
1877 : ICCall_ScriptedApplyArguments::Compiler compiler(
1878 0 : cx, stub->fallbackMonitorStub()->firstMonitorStub(), script->pcToOffset(pc));
1879 0 : ICStub* newStub = compiler.getStub(compiler.getStubSpace(script));
1880 0 : if (!newStub)
1881 0 : return false;
1882 :
1883 0 : stub->addNewStub(newStub);
1884 0 : *attached = true;
1885 0 : return true;
1886 : }
1887 :
1888 : // TODO: handle FUNAPPLY for native targets.
1889 : }
1890 :
1891 28 : if (argv[1].isObject() && argv[1].toObject().is<ArrayObject>()) {
1892 25 : if (isScripted && !stub->hasStub(ICStub::Call_ScriptedApplyArray)) {
1893 0 : JitSpew(JitSpew_BaselineIC, " Generating Call_ScriptedApplyArray stub");
1894 :
1895 : ICCall_ScriptedApplyArray::Compiler compiler(
1896 0 : cx, stub->fallbackMonitorStub()->firstMonitorStub(), script->pcToOffset(pc));
1897 0 : ICStub* newStub = compiler.getStub(compiler.getStubSpace(script));
1898 0 : if (!newStub)
1899 0 : return false;
1900 :
1901 0 : stub->addNewStub(newStub);
1902 0 : *attached = true;
1903 0 : return true;
1904 : }
1905 : }
1906 28 : return true;
1907 : }
1908 :
1909 : static bool
1910 2 : TryAttachFunCallStub(JSContext* cx, ICCall_Fallback* stub, HandleScript script, jsbytecode* pc,
1911 : HandleValue thisv, bool* attached)
1912 : {
1913 : // Try to attach a stub for Function.prototype.call with scripted |this|.
1914 :
1915 2 : *attached = false;
1916 2 : if (!thisv.isObject() || !thisv.toObject().is<JSFunction>())
1917 0 : return true;
1918 4 : RootedFunction target(cx, &thisv.toObject().as<JSFunction>());
1919 :
1920 : // Attach a stub if the script can be Baseline-compiled. We do this also
1921 : // if the script is not yet compiled to avoid attaching a CallNative stub
1922 : // that handles everything, even after the callee becomes hot.
1923 4 : if (target->hasScript() && target->nonLazyScript()->canBaselineCompile() &&
1924 2 : !stub->hasStub(ICStub::Call_ScriptedFunCall))
1925 : {
1926 2 : JitSpew(JitSpew_BaselineIC, " Generating Call_ScriptedFunCall stub");
1927 :
1928 : ICCall_ScriptedFunCall::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
1929 4 : script->pcToOffset(pc));
1930 2 : ICStub* newStub = compiler.getStub(compiler.getStubSpace(script));
1931 2 : if (!newStub)
1932 0 : return false;
1933 :
1934 2 : *attached = true;
1935 2 : stub->addNewStub(newStub);
1936 2 : return true;
1937 : }
1938 :
1939 0 : return true;
1940 : }
1941 :
1942 : // Check if target is a native SIMD operation which returns a SIMD type.
1943 : // If so, set res to a template object matching the SIMD type produced and return true.
1944 : static bool
1945 1054 : GetTemplateObjectForSimd(JSContext* cx, JSFunction* target, MutableHandleObject res)
1946 : {
1947 1054 : const JSJitInfo* jitInfo = target->jitInfo();
1948 1054 : if (!jitInfo || jitInfo->type() != JSJitInfo::InlinableNative)
1949 375 : return false;
1950 :
1951 : // Check if this is a native inlinable SIMD operation.
1952 : SimdType ctrlType;
1953 679 : switch (jitInfo->inlinableNative) {
1954 0 : case InlinableNative::SimdInt8x16: ctrlType = SimdType::Int8x16; break;
1955 0 : case InlinableNative::SimdUint8x16: ctrlType = SimdType::Uint8x16; break;
1956 0 : case InlinableNative::SimdInt16x8: ctrlType = SimdType::Int16x8; break;
1957 0 : case InlinableNative::SimdUint16x8: ctrlType = SimdType::Uint16x8; break;
1958 0 : case InlinableNative::SimdInt32x4: ctrlType = SimdType::Int32x4; break;
1959 0 : case InlinableNative::SimdUint32x4: ctrlType = SimdType::Uint32x4; break;
1960 0 : case InlinableNative::SimdFloat32x4: ctrlType = SimdType::Float32x4; break;
1961 0 : case InlinableNative::SimdBool8x16: ctrlType = SimdType::Bool8x16; break;
1962 0 : case InlinableNative::SimdBool16x8: ctrlType = SimdType::Bool16x8; break;
1963 0 : case InlinableNative::SimdBool32x4: ctrlType = SimdType::Bool32x4; break;
1964 : // This is not an inlinable SIMD operation.
1965 679 : default: return false;
1966 : }
1967 :
1968 : // The controlling type is not necessarily the return type.
1969 : // Check the actual operation.
1970 0 : SimdOperation simdOp = SimdOperation(jitInfo->nativeOp);
1971 : SimdType retType;
1972 :
1973 0 : switch(simdOp) {
1974 : case SimdOperation::Fn_allTrue:
1975 : case SimdOperation::Fn_anyTrue:
1976 : case SimdOperation::Fn_extractLane:
1977 : // These operations return a scalar. No template object needed.
1978 0 : return false;
1979 :
1980 : case SimdOperation::Fn_lessThan:
1981 : case SimdOperation::Fn_lessThanOrEqual:
1982 : case SimdOperation::Fn_equal:
1983 : case SimdOperation::Fn_notEqual:
1984 : case SimdOperation::Fn_greaterThan:
1985 : case SimdOperation::Fn_greaterThanOrEqual:
1986 : // These operations return a boolean vector with the same shape as the
1987 : // controlling type.
1988 0 : retType = GetBooleanSimdType(ctrlType);
1989 0 : break;
1990 :
1991 : default:
1992 : // All other operations return the controlling type.
1993 0 : retType = ctrlType;
1994 0 : break;
1995 : }
1996 :
1997 : // Create a template object based on retType.
1998 0 : RootedGlobalObject global(cx, cx->global());
1999 0 : Rooted<SimdTypeDescr*> descr(cx, GlobalObject::getOrCreateSimdTypeDescr(cx, global, retType));
2000 0 : res.set(cx->compartment()->jitCompartment()->getSimdTemplateObjectFor(cx, descr));
2001 0 : return true;
2002 : }
2003 :
2004 : static void
2005 13 : EnsureArrayGroupAnalyzed(JSContext* cx, JSObject* obj)
2006 : {
2007 13 : if (PreliminaryObjectArrayWithTemplate* objects = obj->group()->maybePreliminaryObjects())
2008 0 : objects->maybeAnalyze(cx, obj->group(), /* forceAnalyze = */ true);
2009 13 : }
2010 :
2011 : static bool
2012 1081 : GetTemplateObjectForNative(JSContext* cx, HandleFunction target, const CallArgs& args,
2013 : MutableHandleObject res, bool* skipAttach)
2014 : {
2015 1081 : Native native = target->native();
2016 :
2017 : // Check for natives to which template objects can be attached. This is
2018 : // done to provide templates to Ion for inlining these natives later on.
2019 :
2020 1081 : if (native == ArrayConstructor || native == array_construct) {
2021 : // Note: the template array won't be used if its length is inaccurately
2022 : // computed here. (We allocate here because compilation may occur on a
2023 : // separate thread where allocation is impossible.)
2024 6 : size_t count = 0;
2025 6 : if (args.length() != 1)
2026 2 : count = args.length();
2027 4 : else if (args.length() == 1 && args[0].isInt32() && args[0].toInt32() >= 0)
2028 4 : count = args[0].toInt32();
2029 :
2030 6 : if (count <= ArrayObject::EagerAllocationMaxLength) {
2031 6 : ObjectGroup* group = ObjectGroup::callingAllocationSiteGroup(cx, JSProto_Array);
2032 6 : if (!group)
2033 0 : return false;
2034 6 : if (group->maybePreliminaryObjects()) {
2035 0 : *skipAttach = true;
2036 0 : return true;
2037 : }
2038 :
2039 : // With this and other array templates, analyze the group so that
2040 : // we don't end up with a template whose structure might change later.
2041 6 : res.set(NewFullyAllocatedArrayForCallingAllocationSite(cx, count, TenuredObject));
2042 6 : if (!res)
2043 0 : return false;
2044 6 : EnsureArrayGroupAnalyzed(cx, res);
2045 6 : return true;
2046 : }
2047 : }
2048 :
2049 1075 : if (args.length() == 1) {
2050 550 : size_t len = 0;
2051 :
2052 550 : if (args[0].isInt32() && args[0].toInt32() >= 0)
2053 44 : len = args[0].toInt32();
2054 :
2055 550 : if (!TypedArrayObject::GetTemplateObjectForNative(cx, native, len, res))
2056 0 : return false;
2057 550 : if (res)
2058 0 : return true;
2059 : }
2060 :
2061 1075 : if (native == js::array_slice) {
2062 1 : if (args.thisv().isObject()) {
2063 1 : RootedObject obj(cx, &args.thisv().toObject());
2064 1 : if (!obj->isSingleton()) {
2065 1 : if (obj->group()->maybePreliminaryObjects()) {
2066 0 : *skipAttach = true;
2067 0 : return true;
2068 : }
2069 1 : res.set(NewFullyAllocatedArrayTryReuseGroup(cx, obj, 0, TenuredObject));
2070 1 : if (!res)
2071 0 : return false;
2072 1 : EnsureArrayGroupAnalyzed(cx, res);
2073 1 : return true;
2074 : }
2075 : }
2076 : }
2077 :
2078 2150 : if (native == js::intrinsic_StringSplitString && args.length() == 2 && args[0].isString() &&
2079 1076 : args[1].isString())
2080 : {
2081 1 : ObjectGroup* group = ObjectGroup::callingAllocationSiteGroup(cx, JSProto_Array);
2082 1 : if (!group)
2083 0 : return false;
2084 1 : if (group->maybePreliminaryObjects()) {
2085 0 : *skipAttach = true;
2086 0 : return true;
2087 : }
2088 :
2089 1 : res.set(NewFullyAllocatedArrayForCallingAllocationSite(cx, 0, TenuredObject));
2090 1 : if (!res)
2091 0 : return false;
2092 1 : EnsureArrayGroupAnalyzed(cx, res);
2093 1 : return true;
2094 : }
2095 :
2096 1073 : if (native == StringConstructor) {
2097 0 : RootedString emptyString(cx, cx->runtime()->emptyString);
2098 0 : res.set(StringObject::create(cx, emptyString, /* proto = */ nullptr, TenuredObject));
2099 0 : return !!res;
2100 : }
2101 :
2102 1073 : if (native == obj_create && args.length() == 1 && args[0].isObjectOrNull()) {
2103 4 : RootedObject proto(cx, args[0].toObjectOrNull());
2104 2 : res.set(ObjectCreateImpl(cx, proto, TenuredObject));
2105 2 : return !!res;
2106 : }
2107 :
2108 1071 : if (native == js::intrinsic_NewArrayIterator) {
2109 17 : res.set(NewArrayIteratorObject(cx, TenuredObject));
2110 17 : return !!res;
2111 : }
2112 :
2113 1054 : if (native == js::intrinsic_NewStringIterator) {
2114 0 : res.set(NewStringIteratorObject(cx, TenuredObject));
2115 0 : return !!res;
2116 : }
2117 :
2118 1054 : if (JitSupportsSimd() && GetTemplateObjectForSimd(cx, target, res))
2119 0 : return !!res;
2120 :
2121 1054 : return true;
2122 : }
2123 :
2124 : static bool
2125 4 : GetTemplateObjectForClassHook(JSContext* cx, JSNative hook, CallArgs& args,
2126 : MutableHandleObject templateObject)
2127 : {
2128 4 : if (hook == TypedObject::construct) {
2129 0 : Rooted<TypeDescr*> descr(cx, &args.callee().as<TypeDescr>());
2130 0 : templateObject.set(TypedObject::createZeroed(cx, descr, 1, gc::TenuredHeap));
2131 0 : return !!templateObject;
2132 : }
2133 :
2134 4 : if (hook == SimdTypeDescr::call && JitSupportsSimd()) {
2135 0 : Rooted<SimdTypeDescr*> descr(cx, &args.callee().as<SimdTypeDescr>());
2136 0 : templateObject.set(cx->compartment()->jitCompartment()->getSimdTemplateObjectFor(cx, descr));
2137 0 : return !!templateObject;
2138 : }
2139 :
2140 4 : return true;
2141 : }
2142 :
2143 : static bool
2144 4787 : IsOptimizableConstStringSplit(const Value& callee, int argc, Value* args)
2145 : {
2146 4787 : if (argc != 2 || !args[0].isString() || !args[1].isString())
2147 4594 : return false;
2148 :
2149 193 : if (!args[0].toString()->isAtom() || !args[1].toString()->isAtom())
2150 89 : return false;
2151 :
2152 104 : if (!callee.isObject() || !callee.toObject().is<JSFunction>())
2153 60 : return false;
2154 :
2155 44 : JSFunction& calleeFun = callee.toObject().as<JSFunction>();
2156 44 : if (!calleeFun.isNative() || calleeFun.native() != js::intrinsic_StringSplitString)
2157 40 : return false;
2158 :
2159 4 : return true;
2160 : }
2161 :
2162 : static bool
2163 4006 : TryAttachCallStub(JSContext* cx, ICCall_Fallback* stub, HandleScript script, jsbytecode* pc,
2164 : JSOp op, uint32_t argc, Value* vp, bool constructing, bool isSpread,
2165 : bool createSingleton, bool* handled)
2166 : {
2167 4006 : bool isSuper = op == JSOP_SUPERCALL || op == JSOP_SPREADSUPERCALL;
2168 :
2169 4006 : if (createSingleton || op == JSOP_EVAL || op == JSOP_STRICTEVAL)
2170 0 : return true;
2171 :
2172 4006 : if (stub->numOptimizedStubs() >= ICCall_Fallback::MAX_OPTIMIZED_STUBS) {
2173 : // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
2174 : // But for now we just bail.
2175 0 : return true;
2176 : }
2177 :
2178 8012 : RootedValue callee(cx, vp[0]);
2179 8012 : RootedValue thisv(cx, vp[1]);
2180 :
2181 : // Don't attach an optimized call stub if we could potentially attach an
2182 : // optimized ConstStringSplit stub.
2183 4006 : if (stub->numOptimizedStubs() == 0 && IsOptimizableConstStringSplit(callee, argc, vp + 2))
2184 2 : return true;
2185 :
2186 4004 : stub->unlinkStubsWithKind(cx, ICStub::Call_ConstStringSplit);
2187 :
2188 4004 : if (!callee.isObject())
2189 0 : return true;
2190 :
2191 8008 : RootedObject obj(cx, &callee.toObject());
2192 4004 : if (!obj->is<JSFunction>()) {
2193 : // Try to attach a stub for a call/construct hook on the object.
2194 : // Ignore proxies, which are special cased by callHook/constructHook.
2195 832 : if (obj->is<ProxyObject>())
2196 828 : return true;
2197 4 : if (JSNative hook = constructing ? obj->constructHook() : obj->callHook()) {
2198 4 : if (op != JSOP_FUNAPPLY && !isSpread && !createSingleton) {
2199 8 : RootedObject templateObject(cx);
2200 4 : CallArgs args = CallArgsFromVp(argc, vp);
2201 4 : if (!GetTemplateObjectForClassHook(cx, hook, args, &templateObject))
2202 0 : return false;
2203 :
2204 4 : JitSpew(JitSpew_BaselineIC, " Generating Call_ClassHook stub");
2205 : ICCall_ClassHook::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
2206 4 : obj->getClass(), hook, templateObject,
2207 12 : script->pcToOffset(pc), constructing);
2208 4 : ICStub* newStub = compiler.getStub(compiler.getStubSpace(script));
2209 4 : if (!newStub)
2210 0 : return false;
2211 :
2212 4 : stub->addNewStub(newStub);
2213 4 : *handled = true;
2214 4 : return true;
2215 : }
2216 : }
2217 0 : return true;
2218 : }
2219 :
2220 6344 : RootedFunction fun(cx, &obj->as<JSFunction>());
2221 :
2222 3172 : if (fun->hasScript()) {
2223 : // Never attach optimized scripted call stubs for JSOP_FUNAPPLY.
2224 : // MagicArguments may escape the frame through them.
2225 1781 : if (op == JSOP_FUNAPPLY)
2226 0 : return true;
2227 :
2228 : // If callee is not an interpreted constructor, we have to throw.
2229 1781 : if (constructing && !fun->isConstructor())
2230 0 : return true;
2231 :
2232 : // Likewise, if the callee is a class constructor, we have to throw.
2233 1781 : if (!constructing && fun->isClassConstructor())
2234 0 : return true;
2235 :
2236 1781 : if (!fun->hasJITCode()) {
2237 : // Don't treat this as an unoptimizable case, as we'll add a stub
2238 : // when the callee becomes hot.
2239 1039 : *handled = true;
2240 1039 : return true;
2241 : }
2242 :
2243 : // Check if this stub chain has already generalized scripted calls.
2244 742 : if (stub->scriptedStubsAreGeneralized()) {
2245 0 : JitSpew(JitSpew_BaselineIC, " Chain already has generalized scripted call stub!");
2246 0 : return true;
2247 : }
2248 :
2249 742 : if (stub->scriptedStubCount() >= ICCall_Fallback::MAX_SCRIPTED_STUBS) {
2250 : // Create a Call_AnyScripted stub.
2251 7 : JitSpew(JitSpew_BaselineIC, " Generating Call_AnyScripted stub (cons=%s, spread=%s)",
2252 7 : constructing ? "yes" : "no", isSpread ? "yes" : "no");
2253 : ICCallScriptedCompiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
2254 14 : constructing, isSpread, script->pcToOffset(pc));
2255 7 : ICStub* newStub = compiler.getStub(compiler.getStubSpace(script));
2256 7 : if (!newStub)
2257 0 : return false;
2258 :
2259 : // Before adding new stub, unlink all previous Call_Scripted.
2260 7 : stub->unlinkStubsWithKind(cx, ICStub::Call_Scripted);
2261 :
2262 : // Add new generalized stub.
2263 7 : stub->addNewStub(newStub);
2264 7 : *handled = true;
2265 7 : return true;
2266 : }
2267 :
2268 : // Keep track of the function's |prototype| property in type
2269 : // information, for use during Ion compilation.
2270 735 : if (IsIonEnabled(cx))
2271 735 : EnsureTrackPropertyTypes(cx, fun, NameToId(cx->names().prototype));
2272 :
2273 : // Remember the template object associated with any script being called
2274 : // as a constructor, for later use during Ion compilation. This is unsound
2275 : // for super(), as a single callsite can have multiple possible prototype object
2276 : // created (via different newTargets)
2277 1470 : RootedObject templateObject(cx);
2278 735 : if (constructing && !isSuper) {
2279 : // If we are calling a constructor for which the new script
2280 : // properties analysis has not been performed yet, don't attach a
2281 : // stub. After the analysis is performed, CreateThisForFunction may
2282 : // start returning objects with a different type, and the Ion
2283 : // compiler will get confused.
2284 :
2285 : // Only attach a stub if the function already has a prototype and
2286 : // we can look it up without causing side effects.
2287 38 : RootedObject newTarget(cx, &vp[2 + argc].toObject());
2288 38 : RootedValue protov(cx);
2289 36 : if (!GetPropertyPure(cx, newTarget, NameToId(cx->names().prototype), protov.address())) {
2290 0 : JitSpew(JitSpew_BaselineIC, " Can't purely lookup function prototype");
2291 0 : return true;
2292 : }
2293 :
2294 36 : if (protov.isObject()) {
2295 36 : TaggedProto proto(&protov.toObject());
2296 36 : ObjectGroup* group = ObjectGroup::defaultNewGroup(cx, nullptr, proto, newTarget);
2297 36 : if (!group)
2298 34 : return false;
2299 :
2300 36 : if (group->newScript() && !group->newScript()->analyzed()) {
2301 34 : JitSpew(JitSpew_BaselineIC, " Function newScript has not been analyzed");
2302 :
2303 : // This is temporary until the analysis is perfomed, so
2304 : // don't treat this as unoptimizable.
2305 34 : *handled = true;
2306 34 : return true;
2307 : }
2308 : }
2309 :
2310 2 : JSObject* thisObject = CreateThisForFunction(cx, fun, newTarget, TenuredObject);
2311 2 : if (!thisObject)
2312 0 : return false;
2313 :
2314 2 : if (thisObject->is<PlainObject>() || thisObject->is<UnboxedPlainObject>())
2315 2 : templateObject = thisObject;
2316 : }
2317 :
2318 2804 : JitSpew(JitSpew_BaselineIC,
2319 : " Generating Call_Scripted stub (fun=%p, %s:%" PRIuSIZE ", cons=%s, spread=%s)",
2320 2103 : fun.get(), fun->nonLazyScript()->filename(), fun->nonLazyScript()->lineno(),
2321 701 : constructing ? "yes" : "no", isSpread ? "yes" : "no");
2322 : ICCallScriptedCompiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
2323 : fun, templateObject,
2324 1402 : constructing, isSpread, script->pcToOffset(pc));
2325 701 : ICStub* newStub = compiler.getStub(compiler.getStubSpace(script));
2326 701 : if (!newStub)
2327 0 : return false;
2328 :
2329 701 : stub->addNewStub(newStub);
2330 701 : *handled = true;
2331 701 : return true;
2332 : }
2333 :
2334 1391 : if (fun->isNative() && (!constructing || (constructing && fun->isConstructor()))) {
2335 : // Generalized native call stubs are not here yet!
2336 1382 : MOZ_ASSERT(!stub->nativeStubsAreGeneralized());
2337 :
2338 : // Check for JSOP_FUNAPPLY
2339 1382 : if (op == JSOP_FUNAPPLY) {
2340 270 : if (fun->native() == fun_apply)
2341 270 : return TryAttachFunApplyStub(cx, stub, script, pc, thisv, argc, vp + 2, handled);
2342 :
2343 : // Don't try to attach a "regular" optimized call stubs for FUNAPPLY ops,
2344 : // since MagicArguments may escape through them.
2345 0 : return true;
2346 : }
2347 :
2348 1112 : if (op == JSOP_FUNCALL && fun->native() == fun_call) {
2349 2 : if (!TryAttachFunCallStub(cx, stub, script, pc, thisv, handled))
2350 0 : return false;
2351 2 : if (*handled)
2352 2 : return true;
2353 : }
2354 :
2355 1110 : if (stub->nativeStubCount() >= ICCall_Fallback::MAX_NATIVE_STUBS) {
2356 : JitSpew(JitSpew_BaselineIC,
2357 22 : " Too many Call_Native stubs. TODO: add Call_AnyNative!");
2358 22 : return true;
2359 : }
2360 :
2361 1088 : if (fun->native() == intrinsic_IsSuspendedStarGenerator) {
2362 : // This intrinsic only appears in self-hosted code.
2363 4 : MOZ_ASSERT(op != JSOP_NEW);
2364 4 : MOZ_ASSERT(argc == 1);
2365 4 : JitSpew(JitSpew_BaselineIC, " Generating Call_IsSuspendedStarGenerator stub");
2366 :
2367 8 : ICCall_IsSuspendedStarGenerator::Compiler compiler(cx);
2368 4 : ICStub* newStub = compiler.getStub(compiler.getStubSpace(script));
2369 4 : if (!newStub)
2370 0 : return false;
2371 :
2372 4 : stub->addNewStub(newStub);
2373 4 : *handled = true;
2374 4 : return true;
2375 : }
2376 :
2377 2168 : RootedObject templateObject(cx);
2378 1084 : if (MOZ_LIKELY(!isSpread && !isSuper)) {
2379 1081 : bool skipAttach = false;
2380 1081 : CallArgs args = CallArgsFromVp(argc, vp);
2381 1081 : if (!GetTemplateObjectForNative(cx, fun, args, &templateObject, &skipAttach))
2382 0 : return false;
2383 1081 : if (skipAttach) {
2384 0 : *handled = true;
2385 0 : return true;
2386 : }
2387 1081 : MOZ_ASSERT_IF(templateObject, !templateObject->group()->maybePreliminaryObjects());
2388 : }
2389 :
2390 1084 : bool ignoresReturnValue = false;
2391 1084 : if (op == JSOP_CALL_IGNORES_RV && fun->isNative()) {
2392 252 : const JSJitInfo* jitInfo = fun->jitInfo();
2393 252 : ignoresReturnValue = jitInfo && jitInfo->type() == JSJitInfo::IgnoresReturnValueNative;
2394 : }
2395 :
2396 2168 : JitSpew(JitSpew_BaselineIC, " Generating Call_Native stub (fun=%p, cons=%s, spread=%s)",
2397 2168 : fun.get(), constructing ? "yes" : "no", isSpread ? "yes" : "no");
2398 : ICCall_Native::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
2399 : fun, templateObject, constructing, ignoresReturnValue,
2400 2168 : isSpread, script->pcToOffset(pc));
2401 1084 : ICStub* newStub = compiler.getStub(compiler.getStubSpace(script));
2402 1084 : if (!newStub)
2403 0 : return false;
2404 :
2405 1084 : stub->addNewStub(newStub);
2406 1084 : *handled = true;
2407 1084 : return true;
2408 : }
2409 :
2410 9 : return true;
2411 : }
2412 :
2413 : static bool
2414 5 : CopyArray(JSContext* cx, HandleObject obj, MutableHandleValue result)
2415 : {
2416 5 : uint32_t length = GetAnyBoxedOrUnboxedArrayLength(obj);
2417 5 : JSObject* nobj = NewFullyAllocatedArrayTryReuseGroup(cx, obj, length, TenuredObject);
2418 5 : if (!nobj)
2419 0 : return false;
2420 5 : EnsureArrayGroupAnalyzed(cx, nobj);
2421 5 : CopyAnyBoxedOrUnboxedDenseElements(cx, nobj, obj, 0, 0, length);
2422 :
2423 5 : result.setObject(*nobj);
2424 5 : return true;
2425 : }
2426 :
2427 : static bool
2428 1114 : TryAttachConstStringSplit(JSContext* cx, ICCall_Fallback* stub, HandleScript script,
2429 : uint32_t argc, HandleValue callee, Value* vp, jsbytecode* pc,
2430 : HandleValue res, bool* attached)
2431 : {
2432 1114 : if (stub->numOptimizedStubs() != 0)
2433 30 : return true;
2434 :
2435 1084 : Value* args = vp + 2;
2436 :
2437 : // String.prototype.split will not yield a constructable.
2438 1084 : if (JSOp(*pc) == JSOP_NEW)
2439 32 : return true;
2440 :
2441 1052 : if (!IsOptimizableConstStringSplit(callee, argc, args))
2442 1050 : return true;
2443 :
2444 2 : MOZ_ASSERT(callee.isObject());
2445 2 : MOZ_ASSERT(callee.toObject().is<JSFunction>());
2446 :
2447 4 : RootedString str(cx, args[0].toString());
2448 4 : RootedString sep(cx, args[1].toString());
2449 4 : RootedObject obj(cx, &res.toObject());
2450 4 : RootedValue arr(cx);
2451 :
2452 : // Copy the array before storing in stub.
2453 2 : if (!CopyArray(cx, obj, &arr))
2454 0 : return false;
2455 :
2456 : // Atomize all elements of the array.
2457 4 : RootedObject arrObj(cx, &arr.toObject());
2458 2 : uint32_t initLength = GetAnyBoxedOrUnboxedArrayLength(arrObj);
2459 5 : for (uint32_t i = 0; i < initLength; i++) {
2460 3 : JSAtom* str = js::AtomizeString(cx, GetAnyBoxedOrUnboxedDenseElement(arrObj, i).toString());
2461 3 : if (!str)
2462 0 : return false;
2463 :
2464 3 : if (!SetAnyBoxedOrUnboxedDenseElement(cx, arrObj, i, StringValue(str))) {
2465 : // The value could not be stored to an unboxed dense element.
2466 0 : return true;
2467 : }
2468 : }
2469 :
2470 : ICCall_ConstStringSplit::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
2471 4 : script->pcToOffset(pc), str, sep, arr);
2472 2 : ICStub* newStub = compiler.getStub(compiler.getStubSpace(script));
2473 2 : if (!newStub)
2474 0 : return false;
2475 :
2476 2 : stub->addNewStub(newStub);
2477 2 : *attached = true;
2478 2 : return true;
2479 : }
2480 :
2481 : static bool
2482 3981 : DoCallFallback(JSContext* cx, BaselineFrame* frame, ICCall_Fallback* stub_, uint32_t argc,
2483 : Value* vp, MutableHandleValue res)
2484 : {
2485 : // This fallback stub may trigger debug mode toggling.
2486 3981 : DebugModeOSRVolatileStub<ICCall_Fallback*> stub(frame, stub_);
2487 :
2488 7962 : RootedScript script(cx, frame->script());
2489 3981 : jsbytecode* pc = stub->icEntry()->pc(script);
2490 3981 : JSOp op = JSOp(*pc);
2491 3981 : FallbackICSpew(cx, stub, "Call(%s)", CodeName[op]);
2492 :
2493 3981 : MOZ_ASSERT(argc == GET_ARGC(pc));
2494 3981 : bool constructing = (op == JSOP_NEW || op == JSOP_SUPERCALL);
2495 3981 : bool ignoresReturnValue = (op == JSOP_CALL_IGNORES_RV);
2496 :
2497 : // Ensure vp array is rooted - we may GC in here.
2498 3981 : size_t numValues = argc + 2 + constructing;
2499 7962 : AutoArrayRooter vpRoot(cx, numValues, vp);
2500 :
2501 3981 : CallArgs callArgs = CallArgsFromSp(argc + constructing, vp + numValues, constructing,
2502 7962 : ignoresReturnValue);
2503 7962 : RootedValue callee(cx, vp[0]);
2504 :
2505 : // Handle funapply with JSOP_ARGUMENTS
2506 3981 : if (op == JSOP_FUNAPPLY && argc == 2 && callArgs[1].isMagic(JS_OPTIMIZED_ARGUMENTS)) {
2507 0 : if (!GuardFunApplyArgumentsOptimization(cx, frame, callArgs))
2508 0 : return false;
2509 : }
2510 :
2511 3981 : CallIRGenerator gen(cx, script, pc, stub->state().mode(), argc,
2512 : callee, callArgs.thisv(),
2513 11943 : HandleValueArray::fromMarkedLocation(argc, vp+2));
2514 3981 : bool optimizeAfterCall = false;
2515 3981 : CallIRGenerator::OptStrategy optStrategy = gen.getOptStrategy(&optimizeAfterCall);
2516 :
2517 : // Try attaching a call stub, if the CallIRGenerator has determined that this
2518 : // operation cannot be optimized after the call.
2519 3981 : bool handled = false;
2520 3981 : if (!optimizeAfterCall) {
2521 3980 : bool createSingleton = ObjectGroup::useSingletonForNewObject(cx, script, pc);
2522 7960 : if (!TryAttachCallStub(cx, stub, script, pc, op, argc, vp, constructing, false,
2523 3980 : createSingleton, &handled))
2524 : {
2525 0 : return false;
2526 : }
2527 : }
2528 :
2529 3981 : if (constructing) {
2530 114 : if (!ConstructFromStack(cx, callArgs))
2531 1 : return false;
2532 113 : res.set(callArgs.rval());
2533 3867 : } else if ((op == JSOP_EVAL || op == JSOP_STRICTEVAL) &&
2534 0 : frame->environmentChain()->global().valueIsEval(callee))
2535 : {
2536 0 : if (!DirectEval(cx, callArgs.get(0), res))
2537 0 : return false;
2538 : } else {
2539 3867 : MOZ_ASSERT(op == JSOP_CALL ||
2540 : op == JSOP_CALL_IGNORES_RV ||
2541 : op == JSOP_CALLITER ||
2542 : op == JSOP_FUNCALL ||
2543 : op == JSOP_FUNAPPLY ||
2544 : op == JSOP_EVAL ||
2545 : op == JSOP_STRICTEVAL);
2546 3867 : if (op == JSOP_CALLITER && callee.isPrimitive()) {
2547 0 : MOZ_ASSERT(argc == 0, "thisv must be on top of the stack");
2548 0 : ReportValueError(cx, JSMSG_NOT_ITERABLE, -1, callArgs.thisv(), nullptr);
2549 0 : return false;
2550 : }
2551 :
2552 3867 : if (!CallFromStack(cx, callArgs))
2553 3 : return false;
2554 :
2555 3864 : res.set(callArgs.rval());
2556 : }
2557 :
2558 3977 : StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
2559 3977 : TypeScript::Monitor(cx, script, pc, types, res);
2560 :
2561 : // Check if debug mode toggling made the stub invalid.
2562 3977 : if (stub.invalid())
2563 0 : return true;
2564 :
2565 : // Add a type monitor stub for the resulting value.
2566 3977 : if (!stub->addMonitorStubForValue(cx, frame, types, res))
2567 0 : return false;
2568 :
2569 3977 : if (optimizeAfterCall && !handled && optStrategy != CallIRGenerator::OptStrategy::None) {
2570 1 : if (gen.tryAttachStub()) {
2571 2 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
2572 : ICStubEngine::Baseline, script, stub,
2573 3 : &handled);
2574 1 : if (newStub) {
2575 1 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
2576 : }
2577 : }
2578 : }
2579 :
2580 3977 : if (!handled) {
2581 : // If 'callee' is a potential Call_ConstStringSplit, try to attach an
2582 : // optimized ConstStringSplit stub. Note that vp[0] now holds the return value
2583 : // instead of the callee, so we pass the callee as well.
2584 1114 : if (!TryAttachConstStringSplit(cx, stub, script, argc, callee, vp, pc, res, &handled))
2585 0 : return false;
2586 : }
2587 :
2588 3977 : if (!handled)
2589 1112 : stub->noteUnoptimizableCall();
2590 3977 : return true;
2591 : }
2592 :
2593 : static bool
2594 26 : DoSpreadCallFallback(JSContext* cx, BaselineFrame* frame, ICCall_Fallback* stub_, Value* vp,
2595 : MutableHandleValue res)
2596 : {
2597 : // This fallback stub may trigger debug mode toggling.
2598 26 : DebugModeOSRVolatileStub<ICCall_Fallback*> stub(frame, stub_);
2599 :
2600 52 : RootedScript script(cx, frame->script());
2601 26 : jsbytecode* pc = stub->icEntry()->pc(script);
2602 26 : JSOp op = JSOp(*pc);
2603 26 : bool constructing = (op == JSOP_SPREADNEW || op == JSOP_SPREADSUPERCALL);
2604 26 : FallbackICSpew(cx, stub, "SpreadCall(%s)", CodeName[op]);
2605 :
2606 : // Ensure vp array is rooted - we may GC in here.
2607 52 : AutoArrayRooter vpRoot(cx, 3 + constructing, vp);
2608 :
2609 52 : RootedValue callee(cx, vp[0]);
2610 52 : RootedValue thisv(cx, vp[1]);
2611 52 : RootedValue arr(cx, vp[2]);
2612 52 : RootedValue newTarget(cx, constructing ? vp[3] : NullValue());
2613 :
2614 : // Try attaching a call stub.
2615 26 : bool handled = false;
2616 104 : if (op != JSOP_SPREADEVAL && op != JSOP_STRICTSPREADEVAL &&
2617 104 : !TryAttachCallStub(cx, stub, script, pc, op, 1, vp, constructing, true, false,
2618 26 : &handled))
2619 : {
2620 0 : return false;
2621 : }
2622 :
2623 26 : if (!SpreadCallOperation(cx, script, pc, thisv, callee, arr, newTarget, res))
2624 0 : return false;
2625 :
2626 : // Check if debug mode toggling made the stub invalid.
2627 26 : if (stub.invalid())
2628 0 : return true;
2629 :
2630 : // Add a type monitor stub for the resulting value.
2631 26 : StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
2632 26 : if (!stub->addMonitorStubForValue(cx, frame, types, res))
2633 0 : return false;
2634 :
2635 26 : if (!handled)
2636 17 : stub->noteUnoptimizableCall();
2637 26 : return true;
2638 : }
2639 :
2640 : void
2641 230 : ICCallStubCompiler::pushCallArguments(MacroAssembler& masm, AllocatableGeneralRegisterSet regs,
2642 : Register argcReg, bool isJitCall, bool isConstructing)
2643 : {
2644 230 : MOZ_ASSERT(!regs.has(argcReg));
2645 :
2646 : // Account for new.target
2647 230 : Register count = regs.takeAny();
2648 :
2649 230 : masm.move32(argcReg, count);
2650 :
2651 : // If we are setting up for a jitcall, we have to align the stack taking
2652 : // into account the args and newTarget. We could also count callee and |this|,
2653 : // but it's a waste of stack space. Because we want to keep argcReg unchanged,
2654 : // just account for newTarget initially, and add the other 2 after assuring
2655 : // allignment.
2656 230 : if (isJitCall) {
2657 55 : if (isConstructing)
2658 2 : masm.add32(Imm32(1), count);
2659 : } else {
2660 175 : masm.add32(Imm32(2 + isConstructing), count);
2661 : }
2662 :
2663 : // argPtr initially points to the last argument.
2664 230 : Register argPtr = regs.takeAny();
2665 230 : masm.moveStackPtrTo(argPtr);
2666 :
2667 : // Skip 4 pointers pushed on top of the arguments: the frame descriptor,
2668 : // return address, old frame pointer and stub reg.
2669 230 : masm.addPtr(Imm32(STUB_FRAME_SIZE), argPtr);
2670 :
2671 : // Align the stack such that the JitFrameLayout is aligned on the
2672 : // JitStackAlignment.
2673 230 : if (isJitCall) {
2674 55 : masm.alignJitStackBasedOnNArgs(count);
2675 :
2676 : // Account for callee and |this|, skipped earlier
2677 55 : masm.add32(Imm32(2), count);
2678 : }
2679 :
2680 : // Push all values, starting at the last one.
2681 460 : Label loop, done;
2682 230 : masm.bind(&loop);
2683 230 : masm.branchTest32(Assembler::Zero, count, count, &done);
2684 : {
2685 230 : masm.pushValue(Address(argPtr, 0));
2686 230 : masm.addPtr(Imm32(sizeof(Value)), argPtr);
2687 :
2688 230 : masm.sub32(Imm32(1), count);
2689 230 : masm.jump(&loop);
2690 : }
2691 230 : masm.bind(&done);
2692 230 : }
2693 :
2694 : void
2695 2 : ICCallStubCompiler::guardSpreadCall(MacroAssembler& masm, Register argcReg, Label* failure,
2696 : bool isConstructing)
2697 : {
2698 4 : masm.unboxObject(Address(masm.getStackPointer(),
2699 2 : isConstructing * sizeof(Value) + ICStackValueOffset), argcReg);
2700 2 : masm.loadPtr(Address(argcReg, NativeObject::offsetOfElements()), argcReg);
2701 2 : masm.load32(Address(argcReg, ObjectElements::offsetOfLength()), argcReg);
2702 :
2703 : // Limit actual argc to something reasonable (huge number of arguments can
2704 : // blow the stack limit).
2705 : static_assert(ICCall_Scripted::MAX_ARGS_SPREAD_LENGTH <= ARGS_LENGTH_MAX,
2706 : "maximum arguments length for optimized stub should be <= ARGS_LENGTH_MAX");
2707 4 : masm.branch32(Assembler::Above, argcReg, Imm32(ICCall_Scripted::MAX_ARGS_SPREAD_LENGTH),
2708 2 : failure);
2709 2 : }
2710 :
2711 : void
2712 2 : ICCallStubCompiler::pushSpreadCallArguments(MacroAssembler& masm,
2713 : AllocatableGeneralRegisterSet regs,
2714 : Register argcReg, bool isJitCall,
2715 : bool isConstructing)
2716 : {
2717 : // Pull the array off the stack before aligning.
2718 2 : Register startReg = regs.takeAny();
2719 4 : masm.unboxObject(Address(masm.getStackPointer(),
2720 2 : (isConstructing * sizeof(Value)) + STUB_FRAME_SIZE), startReg);
2721 2 : masm.loadPtr(Address(startReg, NativeObject::offsetOfElements()), startReg);
2722 :
2723 : // Align the stack such that the JitFrameLayout is aligned on the
2724 : // JitStackAlignment.
2725 2 : if (isJitCall) {
2726 0 : Register alignReg = argcReg;
2727 0 : if (isConstructing) {
2728 0 : alignReg = regs.takeAny();
2729 0 : masm.movePtr(argcReg, alignReg);
2730 0 : masm.addPtr(Imm32(1), alignReg);
2731 : }
2732 0 : masm.alignJitStackBasedOnNArgs(alignReg);
2733 0 : if (isConstructing) {
2734 0 : MOZ_ASSERT(alignReg != argcReg);
2735 0 : regs.add(alignReg);
2736 : }
2737 : }
2738 :
2739 : // Push newTarget, if necessary
2740 2 : if (isConstructing)
2741 0 : masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE));
2742 :
2743 : // Push arguments: set up endReg to point to &array[argc]
2744 2 : Register endReg = regs.takeAny();
2745 2 : masm.movePtr(argcReg, endReg);
2746 : static_assert(sizeof(Value) == 8, "Value must be 8 bytes");
2747 2 : masm.lshiftPtr(Imm32(3), endReg);
2748 2 : masm.addPtr(startReg, endReg);
2749 :
2750 : // Copying pre-decrements endReg by 8 until startReg is reached
2751 4 : Label copyDone;
2752 4 : Label copyStart;
2753 2 : masm.bind(©Start);
2754 2 : masm.branchPtr(Assembler::Equal, endReg, startReg, ©Done);
2755 2 : masm.subPtr(Imm32(sizeof(Value)), endReg);
2756 2 : masm.pushValue(Address(endReg, 0));
2757 2 : masm.jump(©Start);
2758 2 : masm.bind(©Done);
2759 :
2760 2 : regs.add(startReg);
2761 2 : regs.add(endReg);
2762 :
2763 : // Push the callee and |this|.
2764 2 : masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE + (1 + isConstructing) * sizeof(Value)));
2765 2 : masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE + (2 + isConstructing) * sizeof(Value)));
2766 2 : }
2767 :
2768 : Register
2769 0 : ICCallStubCompiler::guardFunApply(MacroAssembler& masm, AllocatableGeneralRegisterSet regs,
2770 : Register argcReg, bool checkNative, FunApplyThing applyThing,
2771 : Label* failure)
2772 : {
2773 : // Ensure argc == 2
2774 0 : masm.branch32(Assembler::NotEqual, argcReg, Imm32(2), failure);
2775 :
2776 : // Stack looks like:
2777 : // [..., CalleeV, ThisV, Arg0V, Arg1V <MaybeReturnReg>]
2778 :
2779 0 : Address secondArgSlot(masm.getStackPointer(), ICStackValueOffset);
2780 0 : if (applyThing == FunApply_MagicArgs) {
2781 : // Ensure that the second arg is magic arguments.
2782 0 : masm.branchTestMagic(Assembler::NotEqual, secondArgSlot, failure);
2783 :
2784 : // Ensure that this frame doesn't have an arguments object.
2785 0 : masm.branchTest32(Assembler::NonZero,
2786 0 : Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags()),
2787 : Imm32(BaselineFrame::HAS_ARGS_OBJ),
2788 0 : failure);
2789 :
2790 : // Limit the length to something reasonable.
2791 0 : masm.branch32(Assembler::Above,
2792 0 : Address(BaselineFrameReg, BaselineFrame::offsetOfNumActualArgs()),
2793 : Imm32(ICCall_ScriptedApplyArray::MAX_ARGS_ARRAY_LENGTH),
2794 0 : failure);
2795 : } else {
2796 0 : MOZ_ASSERT(applyThing == FunApply_Array);
2797 :
2798 0 : AllocatableGeneralRegisterSet regsx = regs;
2799 :
2800 : // Ensure that the second arg is an array.
2801 0 : ValueOperand secondArgVal = regsx.takeAnyValue();
2802 0 : masm.loadValue(secondArgSlot, secondArgVal);
2803 :
2804 0 : masm.branchTestObject(Assembler::NotEqual, secondArgVal, failure);
2805 0 : Register secondArgObj = masm.extractObject(secondArgVal, ExtractTemp1);
2806 :
2807 0 : regsx.add(secondArgVal);
2808 0 : regsx.takeUnchecked(secondArgObj);
2809 :
2810 0 : masm.branchTestObjClass(Assembler::NotEqual, secondArgObj, regsx.getAny(),
2811 0 : &ArrayObject::class_, failure);
2812 :
2813 : // Get the array elements and ensure that initializedLength == length
2814 0 : masm.loadPtr(Address(secondArgObj, NativeObject::offsetOfElements()), secondArgObj);
2815 :
2816 0 : Register lenReg = regsx.takeAny();
2817 0 : masm.load32(Address(secondArgObj, ObjectElements::offsetOfLength()), lenReg);
2818 :
2819 : masm.branch32(Assembler::NotEqual,
2820 0 : Address(secondArgObj, ObjectElements::offsetOfInitializedLength()),
2821 0 : lenReg, failure);
2822 :
2823 : // Limit the length to something reasonable (huge number of arguments can
2824 : // blow the stack limit).
2825 0 : masm.branch32(Assembler::Above, lenReg,
2826 : Imm32(ICCall_ScriptedApplyArray::MAX_ARGS_ARRAY_LENGTH),
2827 0 : failure);
2828 :
2829 : // Ensure no holes. Loop through values in array and make sure none are magic.
2830 : // Start address is secondArgObj, end address is secondArgObj + (lenReg * sizeof(Value))
2831 : JS_STATIC_ASSERT(sizeof(Value) == 8);
2832 0 : masm.lshiftPtr(Imm32(3), lenReg);
2833 0 : masm.addPtr(secondArgObj, lenReg);
2834 :
2835 0 : Register start = secondArgObj;
2836 0 : Register end = lenReg;
2837 0 : Label loop;
2838 0 : Label endLoop;
2839 0 : masm.bind(&loop);
2840 0 : masm.branchPtr(Assembler::AboveOrEqual, start, end, &endLoop);
2841 0 : masm.branchTestMagic(Assembler::Equal, Address(start, 0), failure);
2842 0 : masm.addPtr(Imm32(sizeof(Value)), start);
2843 0 : masm.jump(&loop);
2844 0 : masm.bind(&endLoop);
2845 : }
2846 :
2847 : // Stack now confirmed to be like:
2848 : // [..., CalleeV, ThisV, Arg0V, MagicValue(Arguments), <MaybeReturnAddr>]
2849 :
2850 : // Load the callee, ensure that it's fun_apply
2851 0 : ValueOperand val = regs.takeAnyValue();
2852 0 : Address calleeSlot(masm.getStackPointer(), ICStackValueOffset + (3 * sizeof(Value)));
2853 0 : masm.loadValue(calleeSlot, val);
2854 :
2855 0 : masm.branchTestObject(Assembler::NotEqual, val, failure);
2856 0 : Register callee = masm.extractObject(val, ExtractTemp1);
2857 :
2858 0 : masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
2859 0 : failure);
2860 0 : masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
2861 :
2862 0 : masm.branchPtr(Assembler::NotEqual, callee, ImmPtr(fun_apply), failure);
2863 :
2864 : // Load the |thisv|, ensure that it's a scripted function with a valid baseline or ion
2865 : // script, or a native function.
2866 0 : Address thisSlot(masm.getStackPointer(), ICStackValueOffset + (2 * sizeof(Value)));
2867 0 : masm.loadValue(thisSlot, val);
2868 :
2869 0 : masm.branchTestObject(Assembler::NotEqual, val, failure);
2870 0 : Register target = masm.extractObject(val, ExtractTemp1);
2871 0 : regs.add(val);
2872 0 : regs.takeUnchecked(target);
2873 :
2874 0 : masm.branchTestObjClass(Assembler::NotEqual, target, regs.getAny(), &JSFunction::class_,
2875 0 : failure);
2876 :
2877 0 : if (checkNative) {
2878 0 : masm.branchIfInterpreted(target, failure);
2879 : } else {
2880 0 : Register temp = regs.takeAny();
2881 0 : masm.branchIfFunctionHasNoScript(target, failure);
2882 : masm.branchFunctionKind(Assembler::Equal, JSFunction::ClassConstructor,
2883 0 : callee, temp, failure);
2884 0 : masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), temp);
2885 0 : masm.loadBaselineOrIonRaw(temp, temp, failure);
2886 0 : regs.add(temp);
2887 : }
2888 0 : return target;
2889 : }
2890 :
2891 : void
2892 0 : ICCallStubCompiler::pushCallerArguments(MacroAssembler& masm, AllocatableGeneralRegisterSet regs)
2893 : {
2894 : // Initialize copyReg to point to start caller arguments vector.
2895 : // Initialize argcReg to poitn to the end of it.
2896 0 : Register startReg = regs.takeAny();
2897 0 : Register endReg = regs.takeAny();
2898 0 : masm.loadPtr(Address(BaselineFrameReg, 0), startReg);
2899 0 : masm.loadPtr(Address(startReg, BaselineFrame::offsetOfNumActualArgs()), endReg);
2900 0 : masm.addPtr(Imm32(BaselineFrame::offsetOfArg(0)), startReg);
2901 0 : masm.alignJitStackBasedOnNArgs(endReg);
2902 0 : masm.lshiftPtr(Imm32(ValueShift), endReg);
2903 0 : masm.addPtr(startReg, endReg);
2904 :
2905 : // Copying pre-decrements endReg by 8 until startReg is reached
2906 0 : Label copyDone;
2907 0 : Label copyStart;
2908 0 : masm.bind(©Start);
2909 0 : masm.branchPtr(Assembler::Equal, endReg, startReg, ©Done);
2910 0 : masm.subPtr(Imm32(sizeof(Value)), endReg);
2911 0 : masm.pushValue(Address(endReg, 0));
2912 0 : masm.jump(©Start);
2913 0 : masm.bind(©Done);
2914 0 : }
2915 :
2916 : void
2917 0 : ICCallStubCompiler::pushArrayArguments(MacroAssembler& masm, Address arrayVal,
2918 : AllocatableGeneralRegisterSet regs)
2919 : {
2920 : // Load start and end address of values to copy.
2921 : // guardFunApply has already gauranteed that the array is packed and contains
2922 : // no holes.
2923 0 : Register startReg = regs.takeAny();
2924 0 : Register endReg = regs.takeAny();
2925 0 : masm.extractObject(arrayVal, startReg);
2926 0 : masm.loadPtr(Address(startReg, NativeObject::offsetOfElements()), startReg);
2927 0 : masm.load32(Address(startReg, ObjectElements::offsetOfInitializedLength()), endReg);
2928 0 : masm.alignJitStackBasedOnNArgs(endReg);
2929 0 : masm.lshiftPtr(Imm32(ValueShift), endReg);
2930 0 : masm.addPtr(startReg, endReg);
2931 :
2932 : // Copying pre-decrements endReg by 8 until startReg is reached
2933 0 : Label copyDone;
2934 0 : Label copyStart;
2935 0 : masm.bind(©Start);
2936 0 : masm.branchPtr(Assembler::Equal, endReg, startReg, ©Done);
2937 0 : masm.subPtr(Imm32(sizeof(Value)), endReg);
2938 0 : masm.pushValue(Address(endReg, 0));
2939 0 : masm.jump(©Start);
2940 0 : masm.bind(©Done);
2941 0 : }
2942 :
2943 : typedef bool (*DoCallFallbackFn)(JSContext*, BaselineFrame*, ICCall_Fallback*,
2944 : uint32_t, Value*, MutableHandleValue);
2945 3 : static const VMFunction DoCallFallbackInfo =
2946 6 : FunctionInfo<DoCallFallbackFn>(DoCallFallback, "DoCallFallback");
2947 :
2948 : typedef bool (*DoSpreadCallFallbackFn)(JSContext*, BaselineFrame*, ICCall_Fallback*,
2949 : Value*, MutableHandleValue);
2950 3 : static const VMFunction DoSpreadCallFallbackInfo =
2951 6 : FunctionInfo<DoSpreadCallFallbackFn>(DoSpreadCallFallback, "DoSpreadCallFallback");
2952 :
2953 : bool
2954 103 : ICCall_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
2955 : {
2956 103 : MOZ_ASSERT(engine_ == Engine::Baseline);
2957 :
2958 103 : MOZ_ASSERT(R0 == JSReturnOperand);
2959 :
2960 : // Values are on the stack left-to-right. Calling convention wants them
2961 : // right-to-left so duplicate them on the stack in reverse order.
2962 : // |this| and callee are pushed last.
2963 :
2964 103 : AllocatableGeneralRegisterSet regs(availableGeneralRegs(0));
2965 :
2966 103 : if (MOZ_UNLIKELY(isSpread_)) {
2967 : // Push a stub frame so that we can perform a non-tail call.
2968 3 : enterStubFrame(masm, R1.scratchReg());
2969 :
2970 : // Use BaselineFrameReg instead of BaselineStackReg, because
2971 : // BaselineFrameReg and BaselineStackReg hold the same value just after
2972 : // calling enterStubFrame.
2973 :
2974 : // newTarget
2975 3 : if (isConstructing_)
2976 0 : masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE));
2977 :
2978 : // array
2979 3 : uint32_t valueOffset = isConstructing_;
2980 3 : masm.pushValue(Address(BaselineFrameReg, valueOffset++ * sizeof(Value) + STUB_FRAME_SIZE));
2981 :
2982 : // this
2983 3 : masm.pushValue(Address(BaselineFrameReg, valueOffset++ * sizeof(Value) + STUB_FRAME_SIZE));
2984 :
2985 : // callee
2986 3 : masm.pushValue(Address(BaselineFrameReg, valueOffset++ * sizeof(Value) + STUB_FRAME_SIZE));
2987 :
2988 3 : masm.push(masm.getStackPointer());
2989 3 : masm.push(ICStubReg);
2990 :
2991 3 : PushStubPayload(masm, R0.scratchReg());
2992 :
2993 3 : if (!callVM(DoSpreadCallFallbackInfo, masm))
2994 0 : return false;
2995 :
2996 3 : leaveStubFrame(masm);
2997 3 : EmitReturnFromIC(masm);
2998 :
2999 : // SPREADCALL is not yet supported in Ion, so do not generate asmcode for
3000 : // bailout.
3001 3 : return true;
3002 : }
3003 :
3004 : // Push a stub frame so that we can perform a non-tail call.
3005 100 : enterStubFrame(masm, R1.scratchReg());
3006 :
3007 100 : regs.take(R0.scratchReg()); // argc.
3008 :
3009 100 : pushCallArguments(masm, regs, R0.scratchReg(), /* isJitCall = */ false, isConstructing_);
3010 :
3011 100 : masm.push(masm.getStackPointer());
3012 100 : masm.push(R0.scratchReg());
3013 100 : masm.push(ICStubReg);
3014 :
3015 100 : PushStubPayload(masm, R0.scratchReg());
3016 :
3017 100 : if (!callVM(DoCallFallbackInfo, masm))
3018 0 : return false;
3019 :
3020 100 : leaveStubFrame(masm);
3021 100 : EmitReturnFromIC(masm);
3022 :
3023 : // This is the resume point used when bailout rewrites call stack to undo
3024 : // Ion inlined frames. The return address pushed onto reconstructed stack
3025 : // will point here.
3026 100 : assumeStubFrame(masm);
3027 100 : bailoutReturnOffset_.bind(masm.currentOffset());
3028 :
3029 : // Load passed-in ThisV into R1 just in case it's needed. Need to do this before
3030 : // we leave the stub frame since that info will be lost.
3031 : // Current stack: [...., ThisV, ActualArgc, CalleeToken, Descriptor ]
3032 100 : masm.loadValue(Address(masm.getStackPointer(), 3 * sizeof(size_t)), R1);
3033 :
3034 100 : leaveStubFrame(masm, true);
3035 :
3036 : // If this is a |constructing| call, if the callee returns a non-object, we replace it with
3037 : // the |this| object passed in.
3038 100 : if (isConstructing_) {
3039 27 : MOZ_ASSERT(JSReturnOperand == R0);
3040 54 : Label skipThisReplace;
3041 :
3042 27 : masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
3043 27 : masm.moveValue(R1, R0);
3044 : #ifdef DEBUG
3045 27 : masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
3046 27 : masm.assumeUnreachable("Failed to return object in constructing call.");
3047 : #endif
3048 27 : masm.bind(&skipThisReplace);
3049 : }
3050 :
3051 : // At this point, ICStubReg points to the ICCall_Fallback stub, which is NOT
3052 : // a MonitoredStub, but rather a MonitoredFallbackStub. To use EmitEnterTypeMonitorIC,
3053 : // first load the ICTypeMonitor_Fallback stub into ICStubReg. Then, use
3054 : // EmitEnterTypeMonitorIC with a custom struct offset.
3055 200 : masm.loadPtr(Address(ICStubReg, ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
3056 100 : ICStubReg);
3057 100 : EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
3058 :
3059 100 : return true;
3060 : }
3061 :
3062 : void
3063 103 : ICCall_Fallback::Compiler::postGenerateStubCode(MacroAssembler& masm, Handle<JitCode*> code)
3064 : {
3065 103 : if (MOZ_UNLIKELY(isSpread_))
3066 3 : return;
3067 :
3068 100 : void* address = code->raw() + bailoutReturnOffset_.offset();
3069 100 : BailoutReturnStub kind = isConstructing_ ? BailoutReturnStub::New
3070 100 : : BailoutReturnStub::Call;
3071 100 : cx->compartment()->jitCompartment()->initBailoutReturnAddr(address, getKey(), kind);
3072 : }
3073 :
3074 : typedef bool (*CreateThisFn)(JSContext* cx, HandleObject callee, HandleObject newTarget,
3075 : MutableHandleValue rval);
3076 3 : static const VMFunction CreateThisInfoBaseline =
3077 6 : FunctionInfo<CreateThisFn>(CreateThis, "CreateThis");
3078 :
3079 : bool
3080 53 : ICCallScriptedCompiler::generateStubCode(MacroAssembler& masm)
3081 : {
3082 53 : MOZ_ASSERT(engine_ == Engine::Baseline);
3083 :
3084 106 : Label failure;
3085 53 : AllocatableGeneralRegisterSet regs(availableGeneralRegs(0));
3086 53 : bool canUseTailCallReg = regs.has(ICTailCallReg);
3087 :
3088 53 : Register argcReg = R0.scratchReg();
3089 53 : MOZ_ASSERT(argcReg != ArgumentsRectifierReg);
3090 :
3091 53 : regs.take(argcReg);
3092 53 : regs.take(ArgumentsRectifierReg);
3093 53 : regs.takeUnchecked(ICTailCallReg);
3094 :
3095 53 : if (isSpread_)
3096 0 : guardSpreadCall(masm, argcReg, &failure, isConstructing_);
3097 :
3098 : // Load the callee in R1, accounting for newTarget, if necessary
3099 : // Stack Layout: [ ..., CalleeVal, ThisVal, Arg0Val, ..., ArgNVal, [newTarget] +ICStackValueOffset+ ]
3100 53 : if (isSpread_) {
3101 0 : unsigned skipToCallee = (2 + isConstructing_) * sizeof(Value);
3102 0 : masm.loadValue(Address(masm.getStackPointer(), skipToCallee + ICStackValueOffset), R1);
3103 : } else {
3104 : // Account for newTarget, if necessary
3105 53 : unsigned nonArgsSkip = (1 + isConstructing_) * sizeof(Value);
3106 53 : BaseValueIndex calleeSlot(masm.getStackPointer(), argcReg, ICStackValueOffset + nonArgsSkip);
3107 53 : masm.loadValue(calleeSlot, R1);
3108 : }
3109 53 : regs.take(R1);
3110 :
3111 : // Ensure callee is an object.
3112 53 : masm.branchTestObject(Assembler::NotEqual, R1, &failure);
3113 :
3114 : // Ensure callee is a function.
3115 53 : Register callee = masm.extractObject(R1, ExtractTemp0);
3116 :
3117 : // If calling a specific script, check if the script matches. Otherwise, ensure that
3118 : // callee function is scripted. Leave calleeScript in |callee| reg.
3119 53 : if (callee_) {
3120 48 : MOZ_ASSERT(kind == ICStub::Call_Scripted);
3121 :
3122 : // Check if the object matches this callee.
3123 48 : Address expectedCallee(ICStubReg, ICCall_Scripted::offsetOfCallee());
3124 48 : masm.branchPtr(Assembler::NotEqual, expectedCallee, callee, &failure);
3125 :
3126 : // Guard against relazification.
3127 48 : masm.branchIfFunctionHasNoScript(callee, &failure);
3128 : } else {
3129 : // Ensure the object is a function.
3130 5 : masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
3131 5 : &failure);
3132 5 : if (isConstructing_) {
3133 0 : masm.branchIfNotInterpretedConstructor(callee, regs.getAny(), &failure);
3134 : } else {
3135 5 : masm.branchIfFunctionHasNoScript(callee, &failure);
3136 5 : masm.branchFunctionKind(Assembler::Equal, JSFunction::ClassConstructor, callee,
3137 5 : regs.getAny(), &failure);
3138 : }
3139 : }
3140 :
3141 : // Load the JSScript.
3142 53 : masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
3143 :
3144 : // Load the start of the target JitCode.
3145 53 : Register code;
3146 53 : if (!isConstructing_) {
3147 51 : code = regs.takeAny();
3148 51 : masm.loadBaselineOrIonRaw(callee, code, &failure);
3149 : } else {
3150 2 : Address scriptCode(callee, JSScript::offsetOfBaselineOrIonRaw());
3151 2 : masm.branchPtr(Assembler::Equal, scriptCode, ImmPtr(nullptr), &failure);
3152 : }
3153 :
3154 : // We no longer need R1.
3155 53 : regs.add(R1);
3156 :
3157 : // Push a stub frame so that we can perform a non-tail call.
3158 53 : enterStubFrame(masm, regs.getAny());
3159 53 : if (canUseTailCallReg)
3160 53 : regs.add(ICTailCallReg);
3161 :
3162 106 : Label failureLeaveStubFrame;
3163 :
3164 53 : if (isConstructing_) {
3165 : // Save argc before call.
3166 2 : masm.push(argcReg);
3167 :
3168 : // Stack now looks like:
3169 : // [..., Callee, ThisV, Arg0V, ..., ArgNV, NewTarget, StubFrameHeader, ArgC ]
3170 2 : masm.loadValue(Address(masm.getStackPointer(), STUB_FRAME_SIZE + sizeof(size_t)), R1);
3171 2 : masm.push(masm.extractObject(R1, ExtractTemp0));
3172 :
3173 2 : if (isSpread_) {
3174 0 : masm.loadValue(Address(masm.getStackPointer(),
3175 : 3 * sizeof(Value) + STUB_FRAME_SIZE + sizeof(size_t) +
3176 : sizeof(JSObject*)),
3177 0 : R1);
3178 : } else {
3179 : BaseValueIndex calleeSlot2(masm.getStackPointer(), argcReg,
3180 : 2 * sizeof(Value) + STUB_FRAME_SIZE + sizeof(size_t) +
3181 2 : sizeof(JSObject*));
3182 2 : masm.loadValue(calleeSlot2, R1);
3183 : }
3184 2 : masm.push(masm.extractObject(R1, ExtractTemp0));
3185 2 : if (!callVM(CreateThisInfoBaseline, masm))
3186 0 : return false;
3187 :
3188 : // Return of CreateThis must be an object or uninitialized.
3189 : #ifdef DEBUG
3190 4 : Label createdThisOK;
3191 2 : masm.branchTestObject(Assembler::Equal, JSReturnOperand, &createdThisOK);
3192 2 : masm.branchTestMagic(Assembler::Equal, JSReturnOperand, &createdThisOK);
3193 2 : masm.assumeUnreachable("The return of CreateThis must be an object or uninitialized.");
3194 2 : masm.bind(&createdThisOK);
3195 : #endif
3196 :
3197 : // Reset the register set from here on in.
3198 2 : MOZ_ASSERT(JSReturnOperand == R0);
3199 2 : regs = availableGeneralRegs(0);
3200 2 : regs.take(R0);
3201 2 : regs.take(ArgumentsRectifierReg);
3202 2 : argcReg = regs.takeAny();
3203 :
3204 : // Restore saved argc so we can use it to calculate the address to save
3205 : // the resulting this object to.
3206 2 : masm.pop(argcReg);
3207 :
3208 : // Save "this" value back into pushed arguments on stack. R0 can be clobbered after that.
3209 : // Stack now looks like:
3210 : // [..., Callee, ThisV, Arg0V, ..., ArgNV, [NewTarget], StubFrameHeader ]
3211 2 : if (isSpread_) {
3212 0 : masm.storeValue(R0, Address(masm.getStackPointer(),
3213 0 : (1 + isConstructing_) * sizeof(Value) + STUB_FRAME_SIZE));
3214 : } else {
3215 : BaseValueIndex thisSlot(masm.getStackPointer(), argcReg,
3216 2 : STUB_FRAME_SIZE + isConstructing_ * sizeof(Value));
3217 2 : masm.storeValue(R0, thisSlot);
3218 : }
3219 :
3220 : // Restore the stub register from the baseline stub frame.
3221 2 : masm.loadPtr(Address(masm.getStackPointer(), STUB_FRAME_SAVED_STUB_OFFSET), ICStubReg);
3222 :
3223 : // Reload callee script. Note that a GC triggered by CreateThis may
3224 : // have destroyed the callee BaselineScript and IonScript. CreateThis is
3225 : // safely repeatable though, so in this case we just leave the stub frame
3226 : // and jump to the next stub.
3227 :
3228 : // Just need to load the script now.
3229 2 : if (isSpread_) {
3230 0 : unsigned skipForCallee = (2 + isConstructing_) * sizeof(Value);
3231 0 : masm.loadValue(Address(masm.getStackPointer(), skipForCallee + STUB_FRAME_SIZE), R0);
3232 : } else {
3233 : // Account for newTarget, if necessary
3234 2 : unsigned nonArgsSkip = (1 + isConstructing_) * sizeof(Value);
3235 2 : BaseValueIndex calleeSlot3(masm.getStackPointer(), argcReg, nonArgsSkip + STUB_FRAME_SIZE);
3236 2 : masm.loadValue(calleeSlot3, R0);
3237 : }
3238 2 : callee = masm.extractObject(R0, ExtractTemp0);
3239 2 : regs.add(R0);
3240 2 : regs.takeUnchecked(callee);
3241 2 : masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
3242 :
3243 2 : code = regs.takeAny();
3244 2 : masm.loadBaselineOrIonRaw(callee, code, &failureLeaveStubFrame);
3245 :
3246 : // Release callee register, but don't add ExtractTemp0 back into the pool
3247 : // ExtractTemp0 is used later, and if it's allocated to some other register at that
3248 : // point, it will get clobbered when used.
3249 2 : if (callee != ExtractTemp0)
3250 0 : regs.add(callee);
3251 :
3252 2 : if (canUseTailCallReg)
3253 2 : regs.addUnchecked(ICTailCallReg);
3254 : }
3255 53 : Register scratch = regs.takeAny();
3256 :
3257 : // Values are on the stack left-to-right. Calling convention wants them
3258 : // right-to-left so duplicate them on the stack in reverse order.
3259 : // |this| and callee are pushed last.
3260 53 : if (isSpread_)
3261 0 : pushSpreadCallArguments(masm, regs, argcReg, /* isJitCall = */ true, isConstructing_);
3262 : else
3263 53 : pushCallArguments(masm, regs, argcReg, /* isJitCall = */ true, isConstructing_);
3264 :
3265 : // The callee is on top of the stack. Pop and unbox it.
3266 53 : ValueOperand val = regs.takeAnyValue();
3267 53 : masm.popValue(val);
3268 53 : callee = masm.extractObject(val, ExtractTemp0);
3269 :
3270 53 : EmitBaselineCreateStubFrameDescriptor(masm, scratch, JitFrameLayout::Size());
3271 :
3272 : // Note that we use Push, not push, so that callJit will align the stack
3273 : // properly on ARM.
3274 53 : masm.Push(argcReg);
3275 53 : masm.PushCalleeToken(callee, isConstructing_);
3276 53 : masm.Push(scratch);
3277 :
3278 : // Handle arguments underflow.
3279 106 : Label noUnderflow;
3280 53 : masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), callee);
3281 53 : masm.branch32(Assembler::AboveOrEqual, argcReg, callee, &noUnderflow);
3282 : {
3283 : // Call the arguments rectifier.
3284 53 : MOZ_ASSERT(ArgumentsRectifierReg != code);
3285 53 : MOZ_ASSERT(ArgumentsRectifierReg != argcReg);
3286 :
3287 : JitCode* argumentsRectifier =
3288 53 : cx->runtime()->jitRuntime()->getArgumentsRectifier();
3289 :
3290 53 : masm.movePtr(ImmGCPtr(argumentsRectifier), code);
3291 53 : masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
3292 53 : masm.movePtr(argcReg, ArgumentsRectifierReg);
3293 : }
3294 :
3295 53 : masm.bind(&noUnderflow);
3296 53 : masm.callJit(code);
3297 :
3298 : // If this is a constructing call, and the callee returns a non-object, replace it with
3299 : // the |this| object passed in.
3300 53 : if (isConstructing_) {
3301 4 : Label skipThisReplace;
3302 2 : masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
3303 :
3304 : // Current stack: [ Padding?, ARGVALS..., ThisVal, ActualArgc, Callee, Descriptor ]
3305 : // However, we can't use this ThisVal, because it hasn't been traced. We need to use
3306 : // The ThisVal higher up the stack:
3307 : // Current stack: [ ThisVal, ARGVALS..., ...STUB FRAME...,
3308 : // Padding?, ARGVALS..., ThisVal, ActualArgc, Callee, Descriptor ]
3309 :
3310 : // Restore the BaselineFrameReg based on the frame descriptor.
3311 : //
3312 : // BaselineFrameReg = BaselineStackReg
3313 : // + sizeof(Descriptor) + sizeof(Callee) + sizeof(ActualArgc)
3314 : // + stubFrameSize(Descriptor)
3315 : // - sizeof(ICStubReg) - sizeof(BaselineFrameReg)
3316 2 : Address descriptorAddr(masm.getStackPointer(), 0);
3317 2 : masm.loadPtr(descriptorAddr, BaselineFrameReg);
3318 2 : masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), BaselineFrameReg);
3319 2 : masm.addPtr(Imm32((3 - 2) * sizeof(size_t)), BaselineFrameReg);
3320 2 : masm.addStackPtrTo(BaselineFrameReg);
3321 :
3322 : // Load the number of arguments present before the stub frame.
3323 2 : Register argcReg = JSReturnOperand.scratchReg();
3324 2 : if (isSpread_) {
3325 : // Account for the Array object.
3326 0 : masm.move32(Imm32(1), argcReg);
3327 : } else {
3328 2 : Address argcAddr(masm.getStackPointer(), 2 * sizeof(size_t));
3329 2 : masm.loadPtr(argcAddr, argcReg);
3330 : }
3331 :
3332 : // Current stack: [ ThisVal, ARGVALS..., ...STUB FRAME..., <-- BaselineFrameReg
3333 : // Padding?, ARGVALS..., ThisVal, ActualArgc, Callee, Descriptor ]
3334 : //
3335 : // &ThisVal = BaselineFrameReg + argc * sizeof(Value) + STUB_FRAME_SIZE + sizeof(Value)
3336 : // This last sizeof(Value) accounts for the newTarget on the end of the arguments vector
3337 : // which is not reflected in actualArgc
3338 2 : BaseValueIndex thisSlotAddr(BaselineFrameReg, argcReg, STUB_FRAME_SIZE + sizeof(Value));
3339 2 : masm.loadValue(thisSlotAddr, JSReturnOperand);
3340 : #ifdef DEBUG
3341 2 : masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
3342 2 : masm.assumeUnreachable("Return of constructing call should be an object.");
3343 : #endif
3344 2 : masm.bind(&skipThisReplace);
3345 : }
3346 :
3347 53 : leaveStubFrame(masm, true);
3348 :
3349 : // Enter type monitor IC to type-check result.
3350 53 : EmitEnterTypeMonitorIC(masm);
3351 :
3352 : // Leave stub frame and restore argc for the next stub.
3353 53 : assumeStubFrame(masm);
3354 53 : masm.bind(&failureLeaveStubFrame);
3355 53 : leaveStubFrame(masm, false);
3356 53 : if (argcReg != R0.scratchReg())
3357 2 : masm.movePtr(argcReg, R0.scratchReg());
3358 :
3359 53 : masm.bind(&failure);
3360 53 : EmitStubGuardFailure(masm);
3361 53 : return true;
3362 : }
3363 :
3364 : typedef bool (*CopyArrayFn)(JSContext*, HandleObject, MutableHandleValue);
3365 3 : static const VMFunction CopyArrayInfo = FunctionInfo<CopyArrayFn>(CopyArray, "CopyArray");
3366 :
3367 : bool
3368 2 : ICCall_ConstStringSplit::Compiler::generateStubCode(MacroAssembler& masm)
3369 : {
3370 2 : MOZ_ASSERT(engine_ == Engine::Baseline);
3371 :
3372 : // Stack Layout: [ ..., CalleeVal, ThisVal, strVal, sepVal, +ICStackValueOffset+ ]
3373 : static const size_t SEP_DEPTH = 0;
3374 : static const size_t STR_DEPTH = sizeof(Value);
3375 : static const size_t CALLEE_DEPTH = 3 * sizeof(Value);
3376 :
3377 2 : AllocatableGeneralRegisterSet regs(availableGeneralRegs(0));
3378 4 : Label failureRestoreArgc;
3379 : #ifdef DEBUG
3380 4 : Label twoArg;
3381 2 : Register argcReg = R0.scratchReg();
3382 2 : masm.branch32(Assembler::Equal, argcReg, Imm32(2), &twoArg);
3383 2 : masm.assumeUnreachable("Expected argc == 2");
3384 2 : masm.bind(&twoArg);
3385 : #endif
3386 2 : Register scratchReg = regs.takeAny();
3387 :
3388 : // Guard that callee is native function js::intrinsic_StringSplitString.
3389 : {
3390 2 : Address calleeAddr(masm.getStackPointer(), ICStackValueOffset + CALLEE_DEPTH);
3391 2 : ValueOperand calleeVal = regs.takeAnyValue();
3392 :
3393 : // Ensure that callee is an object.
3394 2 : masm.loadValue(calleeAddr, calleeVal);
3395 2 : masm.branchTestObject(Assembler::NotEqual, calleeVal, &failureRestoreArgc);
3396 :
3397 : // Ensure that callee is a function.
3398 2 : Register calleeObj = masm.extractObject(calleeVal, ExtractTemp0);
3399 : masm.branchTestObjClass(Assembler::NotEqual, calleeObj, scratchReg,
3400 2 : &JSFunction::class_, &failureRestoreArgc);
3401 :
3402 : // Ensure that callee's function impl is the native intrinsic_StringSplitString.
3403 2 : masm.loadPtr(Address(calleeObj, JSFunction::offsetOfNativeOrScript()), scratchReg);
3404 4 : masm.branchPtr(Assembler::NotEqual, scratchReg, ImmPtr(js::intrinsic_StringSplitString),
3405 2 : &failureRestoreArgc);
3406 :
3407 2 : regs.add(calleeVal);
3408 : }
3409 :
3410 : // Guard sep.
3411 : {
3412 : // Ensure that sep is a string.
3413 2 : Address sepAddr(masm.getStackPointer(), ICStackValueOffset + SEP_DEPTH);
3414 2 : ValueOperand sepVal = regs.takeAnyValue();
3415 :
3416 2 : masm.loadValue(sepAddr, sepVal);
3417 2 : masm.branchTestString(Assembler::NotEqual, sepVal, &failureRestoreArgc);
3418 :
3419 2 : Register sep = masm.extractString(sepVal, ExtractTemp0);
3420 4 : masm.branchPtr(Assembler::NotEqual, Address(ICStubReg, offsetOfExpectedSep()),
3421 2 : sep, &failureRestoreArgc);
3422 2 : regs.add(sepVal);
3423 : }
3424 :
3425 : // Guard str.
3426 : {
3427 : // Ensure that str is a string.
3428 2 : Address strAddr(masm.getStackPointer(), ICStackValueOffset + STR_DEPTH);
3429 2 : ValueOperand strVal = regs.takeAnyValue();
3430 :
3431 2 : masm.loadValue(strAddr, strVal);
3432 2 : masm.branchTestString(Assembler::NotEqual, strVal, &failureRestoreArgc);
3433 :
3434 2 : Register str = masm.extractString(strVal, ExtractTemp0);
3435 4 : masm.branchPtr(Assembler::NotEqual, Address(ICStubReg, offsetOfExpectedStr()),
3436 2 : str, &failureRestoreArgc);
3437 2 : regs.add(strVal);
3438 : }
3439 :
3440 : // Main stub body.
3441 : {
3442 2 : Register paramReg = regs.takeAny();
3443 :
3444 : // Push arguments.
3445 2 : enterStubFrame(masm, scratchReg);
3446 2 : masm.loadPtr(Address(ICStubReg, offsetOfTemplateObject()), paramReg);
3447 2 : masm.push(paramReg);
3448 :
3449 2 : if (!callVM(CopyArrayInfo, masm))
3450 0 : return false;
3451 2 : leaveStubFrame(masm);
3452 2 : regs.add(paramReg);
3453 : }
3454 :
3455 : // Enter type monitor IC to type-check result.
3456 2 : EmitEnterTypeMonitorIC(masm);
3457 :
3458 : // Guard failure path.
3459 2 : masm.bind(&failureRestoreArgc);
3460 2 : masm.move32(Imm32(2), R0.scratchReg());
3461 2 : EmitStubGuardFailure(masm);
3462 2 : return true;
3463 : }
3464 :
3465 : bool
3466 4 : ICCall_IsSuspendedStarGenerator::Compiler::generateStubCode(MacroAssembler& masm)
3467 : {
3468 4 : MOZ_ASSERT(engine_ == Engine::Baseline);
3469 :
3470 : // The IsSuspendedStarGenerator intrinsic is only called in self-hosted
3471 : // code, so it's safe to assume we have a single argument and the callee
3472 : // is our intrinsic.
3473 :
3474 4 : AllocatableGeneralRegisterSet regs(availableGeneralRegs(0));
3475 :
3476 : // Load the argument.
3477 4 : Address argAddr(masm.getStackPointer(), ICStackValueOffset);
3478 4 : ValueOperand argVal = regs.takeAnyValue();
3479 4 : masm.loadValue(argAddr, argVal);
3480 :
3481 : // Check if it's an object.
3482 8 : Label returnFalse;
3483 4 : Register genObj = regs.takeAny();
3484 4 : masm.branchTestObject(Assembler::NotEqual, argVal, &returnFalse);
3485 4 : masm.unboxObject(argVal, genObj);
3486 :
3487 : // Check if it's a StarGeneratorObject.
3488 4 : Register scratch = regs.takeAny();
3489 : masm.branchTestObjClass(Assembler::NotEqual, genObj, scratch, &StarGeneratorObject::class_,
3490 4 : &returnFalse);
3491 :
3492 : // If the yield index slot holds an int32 value < YIELD_AND_AWAIT_INDEX_CLOSING,
3493 : // the generator is suspended.
3494 4 : masm.loadValue(Address(genObj, GeneratorObject::offsetOfYieldAndAwaitIndexSlot()), argVal);
3495 4 : masm.branchTestInt32(Assembler::NotEqual, argVal, &returnFalse);
3496 4 : masm.unboxInt32(argVal, scratch);
3497 8 : masm.branch32(Assembler::AboveOrEqual, scratch,
3498 : Imm32(StarGeneratorObject::YIELD_AND_AWAIT_INDEX_CLOSING),
3499 4 : &returnFalse);
3500 :
3501 4 : masm.moveValue(BooleanValue(true), R0);
3502 4 : EmitReturnFromIC(masm);
3503 :
3504 4 : masm.bind(&returnFalse);
3505 4 : masm.moveValue(BooleanValue(false), R0);
3506 4 : EmitReturnFromIC(masm);
3507 8 : return true;
3508 : }
3509 :
3510 : bool
3511 75 : ICCall_Native::Compiler::generateStubCode(MacroAssembler& masm)
3512 : {
3513 75 : MOZ_ASSERT(engine_ == Engine::Baseline);
3514 :
3515 150 : Label failure;
3516 75 : AllocatableGeneralRegisterSet regs(availableGeneralRegs(0));
3517 :
3518 75 : Register argcReg = R0.scratchReg();
3519 75 : regs.take(argcReg);
3520 75 : regs.takeUnchecked(ICTailCallReg);
3521 :
3522 75 : if (isSpread_)
3523 2 : guardSpreadCall(masm, argcReg, &failure, isConstructing_);
3524 :
3525 : // Load the callee in R1.
3526 75 : if (isSpread_) {
3527 2 : unsigned skipToCallee = (2 + isConstructing_) * sizeof(Value);
3528 2 : masm.loadValue(Address(masm.getStackPointer(), skipToCallee + ICStackValueOffset), R1);
3529 : } else {
3530 73 : unsigned nonArgsSlots = (1 + isConstructing_) * sizeof(Value);
3531 73 : BaseValueIndex calleeSlot(masm.getStackPointer(), argcReg, ICStackValueOffset + nonArgsSlots);
3532 73 : masm.loadValue(calleeSlot, R1);
3533 : }
3534 75 : regs.take(R1);
3535 :
3536 75 : masm.branchTestObject(Assembler::NotEqual, R1, &failure);
3537 :
3538 : // Ensure callee matches this stub's callee.
3539 75 : Register callee = masm.extractObject(R1, ExtractTemp0);
3540 75 : Address expectedCallee(ICStubReg, ICCall_Native::offsetOfCallee());
3541 75 : masm.branchPtr(Assembler::NotEqual, expectedCallee, callee, &failure);
3542 :
3543 75 : regs.add(R1);
3544 75 : regs.takeUnchecked(callee);
3545 :
3546 : // Push a stub frame so that we can perform a non-tail call.
3547 : // Note that this leaves the return address in TailCallReg.
3548 75 : enterStubFrame(masm, regs.getAny());
3549 :
3550 : // Values are on the stack left-to-right. Calling convention wants them
3551 : // right-to-left so duplicate them on the stack in reverse order.
3552 : // |this| and callee are pushed last.
3553 75 : if (isSpread_)
3554 2 : pushSpreadCallArguments(masm, regs, argcReg, /* isJitCall = */ false, isConstructing_);
3555 : else
3556 73 : pushCallArguments(masm, regs, argcReg, /* isJitCall = */ false, isConstructing_);
3557 :
3558 :
3559 : // Native functions have the signature:
3560 : //
3561 : // bool (*)(JSContext*, unsigned, Value* vp)
3562 : //
3563 : // Where vp[0] is space for callee/return value, vp[1] is |this|, and vp[2] onward
3564 : // are the function arguments.
3565 :
3566 : // Initialize vp.
3567 75 : Register vpReg = regs.takeAny();
3568 75 : masm.moveStackPtrTo(vpReg);
3569 :
3570 : // Construct a native exit frame.
3571 75 : masm.push(argcReg);
3572 :
3573 75 : Register scratch = regs.takeAny();
3574 75 : EmitBaselineCreateStubFrameDescriptor(masm, scratch, ExitFrameLayout::Size());
3575 75 : masm.push(scratch);
3576 75 : masm.push(ICTailCallReg);
3577 75 : masm.loadJSContext(scratch);
3578 75 : masm.enterFakeExitFrameForNative(scratch, scratch, isConstructing_);
3579 :
3580 : // Execute call.
3581 75 : masm.setupUnalignedABICall(scratch);
3582 75 : masm.loadJSContext(scratch);
3583 75 : masm.passABIArg(scratch);
3584 75 : masm.passABIArg(argcReg);
3585 75 : masm.passABIArg(vpReg);
3586 :
3587 : #ifdef JS_SIMULATOR
3588 : // The simulator requires VM calls to be redirected to a special swi
3589 : // instruction to handle them, so we store the redirected pointer in the
3590 : // stub and use that instead of the original one.
3591 : masm.callWithABI(Address(ICStubReg, ICCall_Native::offsetOfNative()));
3592 : #else
3593 75 : if (ignoresReturnValue_) {
3594 2 : masm.loadPtr(Address(callee, JSFunction::offsetOfJitInfo()), callee);
3595 2 : masm.callWithABI(Address(callee, JSJitInfo::offsetOfIgnoresReturnValueNative()));
3596 : } else {
3597 73 : masm.callWithABI(Address(callee, JSFunction::offsetOfNativeOrScript()));
3598 : }
3599 : #endif
3600 :
3601 : // Test for failure.
3602 75 : masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
3603 :
3604 : // Load the return value into R0.
3605 75 : masm.loadValue(Address(masm.getStackPointer(), NativeExitFrameLayout::offsetOfResult()), R0);
3606 :
3607 75 : leaveStubFrame(masm);
3608 :
3609 : // Enter type monitor IC to type-check result.
3610 75 : EmitEnterTypeMonitorIC(masm);
3611 :
3612 75 : masm.bind(&failure);
3613 75 : EmitStubGuardFailure(masm);
3614 150 : return true;
3615 : }
3616 :
3617 : bool
3618 2 : ICCall_ClassHook::Compiler::generateStubCode(MacroAssembler& masm)
3619 : {
3620 2 : MOZ_ASSERT(engine_ == Engine::Baseline);
3621 :
3622 4 : Label failure;
3623 2 : AllocatableGeneralRegisterSet regs(availableGeneralRegs(0));
3624 :
3625 2 : Register argcReg = R0.scratchReg();
3626 2 : regs.take(argcReg);
3627 2 : regs.takeUnchecked(ICTailCallReg);
3628 :
3629 : // Load the callee in R1.
3630 2 : unsigned nonArgSlots = (1 + isConstructing_) * sizeof(Value);
3631 2 : BaseValueIndex calleeSlot(masm.getStackPointer(), argcReg, ICStackValueOffset + nonArgSlots);
3632 2 : masm.loadValue(calleeSlot, R1);
3633 2 : regs.take(R1);
3634 :
3635 2 : masm.branchTestObject(Assembler::NotEqual, R1, &failure);
3636 :
3637 : // Ensure the callee's class matches the one in this stub.
3638 2 : Register callee = masm.extractObject(R1, ExtractTemp0);
3639 2 : Register scratch = regs.takeAny();
3640 2 : masm.loadObjClass(callee, scratch);
3641 : masm.branchPtr(Assembler::NotEqual,
3642 4 : Address(ICStubReg, ICCall_ClassHook::offsetOfClass()),
3643 2 : scratch, &failure);
3644 :
3645 2 : regs.add(R1);
3646 2 : regs.takeUnchecked(callee);
3647 :
3648 : // Push a stub frame so that we can perform a non-tail call.
3649 : // Note that this leaves the return address in TailCallReg.
3650 2 : enterStubFrame(masm, regs.getAny());
3651 :
3652 2 : regs.add(scratch);
3653 2 : pushCallArguments(masm, regs, argcReg, /* isJitCall = */ false, isConstructing_);
3654 2 : regs.take(scratch);
3655 :
3656 2 : masm.checkStackAlignment();
3657 :
3658 : // Native functions have the signature:
3659 : //
3660 : // bool (*)(JSContext*, unsigned, Value* vp)
3661 : //
3662 : // Where vp[0] is space for callee/return value, vp[1] is |this|, and vp[2] onward
3663 : // are the function arguments.
3664 :
3665 : // Initialize vp.
3666 2 : Register vpReg = regs.takeAny();
3667 2 : masm.moveStackPtrTo(vpReg);
3668 :
3669 : // Construct a native exit frame.
3670 2 : masm.push(argcReg);
3671 :
3672 2 : EmitBaselineCreateStubFrameDescriptor(masm, scratch, ExitFrameLayout::Size());
3673 2 : masm.push(scratch);
3674 2 : masm.push(ICTailCallReg);
3675 2 : masm.loadJSContext(scratch);
3676 2 : masm.enterFakeExitFrameForNative(scratch, scratch, isConstructing_);
3677 :
3678 : // Execute call.
3679 2 : masm.setupUnalignedABICall(scratch);
3680 2 : masm.loadJSContext(scratch);
3681 2 : masm.passABIArg(scratch);
3682 2 : masm.passABIArg(argcReg);
3683 2 : masm.passABIArg(vpReg);
3684 2 : masm.callWithABI(Address(ICStubReg, ICCall_ClassHook::offsetOfNative()));
3685 :
3686 : // Test for failure.
3687 2 : masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
3688 :
3689 : // Load the return value into R0.
3690 2 : masm.loadValue(Address(masm.getStackPointer(), NativeExitFrameLayout::offsetOfResult()), R0);
3691 :
3692 2 : leaveStubFrame(masm);
3693 :
3694 : // Enter type monitor IC to type-check result.
3695 2 : EmitEnterTypeMonitorIC(masm);
3696 :
3697 2 : masm.bind(&failure);
3698 2 : EmitStubGuardFailure(masm);
3699 4 : return true;
3700 : }
3701 :
3702 : bool
3703 0 : ICCall_ScriptedApplyArray::Compiler::generateStubCode(MacroAssembler& masm)
3704 : {
3705 0 : MOZ_ASSERT(engine_ == Engine::Baseline);
3706 :
3707 0 : Label failure;
3708 0 : AllocatableGeneralRegisterSet regs(availableGeneralRegs(0));
3709 :
3710 0 : Register argcReg = R0.scratchReg();
3711 0 : regs.take(argcReg);
3712 0 : regs.takeUnchecked(ICTailCallReg);
3713 0 : regs.takeUnchecked(ArgumentsRectifierReg);
3714 :
3715 : //
3716 : // Validate inputs
3717 : //
3718 :
3719 : Register target = guardFunApply(masm, regs, argcReg, /*checkNative=*/false,
3720 0 : FunApply_Array, &failure);
3721 0 : if (regs.has(target)) {
3722 0 : regs.take(target);
3723 : } else {
3724 : // If target is already a reserved reg, take another register for it, because it's
3725 : // probably currently an ExtractTemp, which might get clobbered later.
3726 0 : Register targetTemp = regs.takeAny();
3727 0 : masm.movePtr(target, targetTemp);
3728 0 : target = targetTemp;
3729 : }
3730 :
3731 : // Push a stub frame so that we can perform a non-tail call.
3732 0 : enterStubFrame(masm, regs.getAny());
3733 :
3734 : //
3735 : // Push arguments
3736 : //
3737 :
3738 : // Stack now looks like:
3739 : // BaselineFrameReg -------------------.
3740 : // v
3741 : // [..., fun_apply, TargetV, TargetThisV, ArgsArrayV, StubFrameHeader]
3742 :
3743 : // Push all array elements onto the stack:
3744 0 : Address arrayVal(BaselineFrameReg, STUB_FRAME_SIZE);
3745 0 : pushArrayArguments(masm, arrayVal, regs);
3746 :
3747 : // Stack now looks like:
3748 : // BaselineFrameReg -------------------.
3749 : // v
3750 : // [..., fun_apply, TargetV, TargetThisV, ArgsArrayV, StubFrameHeader,
3751 : // PushedArgN, ..., PushedArg0]
3752 : // Can't fail after this, so it's ok to clobber argcReg.
3753 :
3754 : // Push actual argument 0 as |thisv| for call.
3755 0 : masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE + sizeof(Value)));
3756 :
3757 : // All pushes after this use Push instead of push to make sure ARM can align
3758 : // stack properly for call.
3759 0 : Register scratch = regs.takeAny();
3760 0 : EmitBaselineCreateStubFrameDescriptor(masm, scratch, JitFrameLayout::Size());
3761 :
3762 : // Reload argc from length of array.
3763 0 : masm.extractObject(arrayVal, argcReg);
3764 0 : masm.loadPtr(Address(argcReg, NativeObject::offsetOfElements()), argcReg);
3765 0 : masm.load32(Address(argcReg, ObjectElements::offsetOfInitializedLength()), argcReg);
3766 :
3767 0 : masm.Push(argcReg);
3768 0 : masm.Push(target);
3769 0 : masm.Push(scratch);
3770 :
3771 : // Load nargs into scratch for underflow check, and then load jitcode pointer into target.
3772 0 : masm.load16ZeroExtend(Address(target, JSFunction::offsetOfNargs()), scratch);
3773 0 : masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), target);
3774 0 : masm.loadBaselineOrIonRaw(target, target, nullptr);
3775 :
3776 : // Handle arguments underflow.
3777 0 : Label noUnderflow;
3778 0 : masm.branch32(Assembler::AboveOrEqual, argcReg, scratch, &noUnderflow);
3779 : {
3780 : // Call the arguments rectifier.
3781 0 : MOZ_ASSERT(ArgumentsRectifierReg != target);
3782 0 : MOZ_ASSERT(ArgumentsRectifierReg != argcReg);
3783 :
3784 : JitCode* argumentsRectifier =
3785 0 : cx->runtime()->jitRuntime()->getArgumentsRectifier();
3786 :
3787 0 : masm.movePtr(ImmGCPtr(argumentsRectifier), target);
3788 0 : masm.loadPtr(Address(target, JitCode::offsetOfCode()), target);
3789 0 : masm.movePtr(argcReg, ArgumentsRectifierReg);
3790 : }
3791 0 : masm.bind(&noUnderflow);
3792 0 : regs.add(argcReg);
3793 :
3794 : // Do call
3795 0 : masm.callJit(target);
3796 0 : leaveStubFrame(masm, true);
3797 :
3798 : // Enter type monitor IC to type-check result.
3799 0 : EmitEnterTypeMonitorIC(masm);
3800 :
3801 0 : masm.bind(&failure);
3802 0 : EmitStubGuardFailure(masm);
3803 0 : return true;
3804 : }
3805 :
3806 : bool
3807 0 : ICCall_ScriptedApplyArguments::Compiler::generateStubCode(MacroAssembler& masm)
3808 : {
3809 0 : MOZ_ASSERT(engine_ == Engine::Baseline);
3810 :
3811 0 : Label failure;
3812 0 : AllocatableGeneralRegisterSet regs(availableGeneralRegs(0));
3813 :
3814 0 : Register argcReg = R0.scratchReg();
3815 0 : regs.take(argcReg);
3816 0 : regs.takeUnchecked(ICTailCallReg);
3817 0 : regs.takeUnchecked(ArgumentsRectifierReg);
3818 :
3819 : //
3820 : // Validate inputs
3821 : //
3822 :
3823 : Register target = guardFunApply(masm, regs, argcReg, /*checkNative=*/false,
3824 0 : FunApply_MagicArgs, &failure);
3825 0 : if (regs.has(target)) {
3826 0 : regs.take(target);
3827 : } else {
3828 : // If target is already a reserved reg, take another register for it, because it's
3829 : // probably currently an ExtractTemp, which might get clobbered later.
3830 0 : Register targetTemp = regs.takeAny();
3831 0 : masm.movePtr(target, targetTemp);
3832 0 : target = targetTemp;
3833 : }
3834 :
3835 : // Push a stub frame so that we can perform a non-tail call.
3836 0 : enterStubFrame(masm, regs.getAny());
3837 :
3838 : //
3839 : // Push arguments
3840 : //
3841 :
3842 : // Stack now looks like:
3843 : // [..., fun_apply, TargetV, TargetThisV, MagicArgsV, StubFrameHeader]
3844 :
3845 : // Push all arguments supplied to caller function onto the stack.
3846 0 : pushCallerArguments(masm, regs);
3847 :
3848 : // Stack now looks like:
3849 : // BaselineFrameReg -------------------.
3850 : // v
3851 : // [..., fun_apply, TargetV, TargetThisV, MagicArgsV, StubFrameHeader,
3852 : // PushedArgN, ..., PushedArg0]
3853 : // Can't fail after this, so it's ok to clobber argcReg.
3854 :
3855 : // Push actual argument 0 as |thisv| for call.
3856 0 : masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE + sizeof(Value)));
3857 :
3858 : // All pushes after this use Push instead of push to make sure ARM can align
3859 : // stack properly for call.
3860 0 : Register scratch = regs.takeAny();
3861 0 : EmitBaselineCreateStubFrameDescriptor(masm, scratch, JitFrameLayout::Size());
3862 :
3863 0 : masm.loadPtr(Address(BaselineFrameReg, 0), argcReg);
3864 0 : masm.loadPtr(Address(argcReg, BaselineFrame::offsetOfNumActualArgs()), argcReg);
3865 0 : masm.Push(argcReg);
3866 0 : masm.Push(target);
3867 0 : masm.Push(scratch);
3868 :
3869 : // Load nargs into scratch for underflow check, and then load jitcode pointer into target.
3870 0 : masm.load16ZeroExtend(Address(target, JSFunction::offsetOfNargs()), scratch);
3871 0 : masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), target);
3872 0 : masm.loadBaselineOrIonRaw(target, target, nullptr);
3873 :
3874 : // Handle arguments underflow.
3875 0 : Label noUnderflow;
3876 0 : masm.branch32(Assembler::AboveOrEqual, argcReg, scratch, &noUnderflow);
3877 : {
3878 : // Call the arguments rectifier.
3879 0 : MOZ_ASSERT(ArgumentsRectifierReg != target);
3880 0 : MOZ_ASSERT(ArgumentsRectifierReg != argcReg);
3881 :
3882 : JitCode* argumentsRectifier =
3883 0 : cx->runtime()->jitRuntime()->getArgumentsRectifier();
3884 :
3885 0 : masm.movePtr(ImmGCPtr(argumentsRectifier), target);
3886 0 : masm.loadPtr(Address(target, JitCode::offsetOfCode()), target);
3887 0 : masm.movePtr(argcReg, ArgumentsRectifierReg);
3888 : }
3889 0 : masm.bind(&noUnderflow);
3890 0 : regs.add(argcReg);
3891 :
3892 : // Do call
3893 0 : masm.callJit(target);
3894 0 : leaveStubFrame(masm, true);
3895 :
3896 : // Enter type monitor IC to type-check result.
3897 0 : EmitEnterTypeMonitorIC(masm);
3898 :
3899 0 : masm.bind(&failure);
3900 0 : EmitStubGuardFailure(masm);
3901 0 : return true;
3902 : }
3903 :
3904 : bool
3905 2 : ICCall_ScriptedFunCall::Compiler::generateStubCode(MacroAssembler& masm)
3906 : {
3907 2 : MOZ_ASSERT(engine_ == Engine::Baseline);
3908 :
3909 4 : Label failure;
3910 2 : AllocatableGeneralRegisterSet regs(availableGeneralRegs(0));
3911 2 : bool canUseTailCallReg = regs.has(ICTailCallReg);
3912 :
3913 2 : Register argcReg = R0.scratchReg();
3914 2 : MOZ_ASSERT(argcReg != ArgumentsRectifierReg);
3915 :
3916 2 : regs.take(argcReg);
3917 2 : regs.take(ArgumentsRectifierReg);
3918 2 : regs.takeUnchecked(ICTailCallReg);
3919 :
3920 : // Load the callee in R1.
3921 : // Stack Layout: [ ..., CalleeVal, ThisVal, Arg0Val, ..., ArgNVal, +ICStackValueOffset+ ]
3922 2 : BaseValueIndex calleeSlot(masm.getStackPointer(), argcReg, ICStackValueOffset + sizeof(Value));
3923 2 : masm.loadValue(calleeSlot, R1);
3924 2 : regs.take(R1);
3925 :
3926 : // Ensure callee is fun_call.
3927 2 : masm.branchTestObject(Assembler::NotEqual, R1, &failure);
3928 :
3929 2 : Register callee = masm.extractObject(R1, ExtractTemp0);
3930 2 : masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
3931 2 : &failure);
3932 2 : masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
3933 2 : masm.branchPtr(Assembler::NotEqual, callee, ImmPtr(fun_call), &failure);
3934 :
3935 : // Ensure |this| is a scripted function with JIT code.
3936 2 : BaseIndex thisSlot(masm.getStackPointer(), argcReg, TimesEight, ICStackValueOffset);
3937 2 : masm.loadValue(thisSlot, R1);
3938 :
3939 2 : masm.branchTestObject(Assembler::NotEqual, R1, &failure);
3940 2 : callee = masm.extractObject(R1, ExtractTemp0);
3941 :
3942 2 : masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
3943 2 : &failure);
3944 2 : masm.branchIfFunctionHasNoScript(callee, &failure);
3945 2 : masm.branchFunctionKind(Assembler::Equal, JSFunction::ClassConstructor,
3946 2 : callee, regs.getAny(), &failure);
3947 2 : masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
3948 :
3949 : // Load the start of the target JitCode.
3950 2 : Register code = regs.takeAny();
3951 2 : masm.loadBaselineOrIonRaw(callee, code, &failure);
3952 :
3953 : // We no longer need R1.
3954 2 : regs.add(R1);
3955 :
3956 : // Push a stub frame so that we can perform a non-tail call.
3957 2 : enterStubFrame(masm, regs.getAny());
3958 2 : if (canUseTailCallReg)
3959 2 : regs.add(ICTailCallReg);
3960 :
3961 : // Decrement argc if argc > 0. If argc == 0, push |undefined| as |this|.
3962 4 : Label zeroArgs, done;
3963 2 : masm.branchTest32(Assembler::Zero, argcReg, argcReg, &zeroArgs);
3964 :
3965 : // Avoid the copy of the callee (function.call).
3966 2 : masm.sub32(Imm32(1), argcReg);
3967 :
3968 : // Values are on the stack left-to-right. Calling convention wants them
3969 : // right-to-left so duplicate them on the stack in reverse order.
3970 :
3971 2 : pushCallArguments(masm, regs, argcReg, /* isJitCall = */ true);
3972 :
3973 : // Pop scripted callee (the original |this|).
3974 2 : ValueOperand val = regs.takeAnyValue();
3975 2 : masm.popValue(val);
3976 :
3977 2 : masm.jump(&done);
3978 2 : masm.bind(&zeroArgs);
3979 :
3980 : // Copy scripted callee (the original |this|).
3981 2 : Address thisSlotFromStubFrame(BaselineFrameReg, STUB_FRAME_SIZE);
3982 2 : masm.loadValue(thisSlotFromStubFrame, val);
3983 :
3984 : // Align the stack.
3985 2 : masm.alignJitStackBasedOnNArgs(0);
3986 :
3987 : // Store the new |this|.
3988 2 : masm.pushValue(UndefinedValue());
3989 :
3990 2 : masm.bind(&done);
3991 :
3992 : // Unbox scripted callee.
3993 2 : callee = masm.extractObject(val, ExtractTemp0);
3994 :
3995 2 : Register scratch = regs.takeAny();
3996 2 : EmitBaselineCreateStubFrameDescriptor(masm, scratch, JitFrameLayout::Size());
3997 :
3998 : // Note that we use Push, not push, so that callJit will align the stack
3999 : // properly on ARM.
4000 2 : masm.Push(argcReg);
4001 2 : masm.Push(callee);
4002 2 : masm.Push(scratch);
4003 :
4004 : // Handle arguments underflow.
4005 4 : Label noUnderflow;
4006 2 : masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), callee);
4007 2 : masm.branch32(Assembler::AboveOrEqual, argcReg, callee, &noUnderflow);
4008 : {
4009 : // Call the arguments rectifier.
4010 2 : MOZ_ASSERT(ArgumentsRectifierReg != code);
4011 2 : MOZ_ASSERT(ArgumentsRectifierReg != argcReg);
4012 :
4013 : JitCode* argumentsRectifier =
4014 2 : cx->runtime()->jitRuntime()->getArgumentsRectifier();
4015 :
4016 2 : masm.movePtr(ImmGCPtr(argumentsRectifier), code);
4017 2 : masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
4018 2 : masm.movePtr(argcReg, ArgumentsRectifierReg);
4019 : }
4020 :
4021 2 : masm.bind(&noUnderflow);
4022 2 : masm.callJit(code);
4023 :
4024 2 : leaveStubFrame(masm, true);
4025 :
4026 : // Enter type monitor IC to type-check result.
4027 2 : EmitEnterTypeMonitorIC(masm);
4028 :
4029 2 : masm.bind(&failure);
4030 2 : EmitStubGuardFailure(masm);
4031 4 : return true;
4032 : }
4033 :
4034 : static bool
4035 0 : DoubleValueToInt32ForSwitch(Value* v)
4036 : {
4037 0 : double d = v->toDouble();
4038 0 : int32_t truncated = int32_t(d);
4039 0 : if (d != double(truncated))
4040 0 : return false;
4041 :
4042 0 : v->setInt32(truncated);
4043 0 : return true;
4044 : }
4045 :
4046 : bool
4047 6 : ICTableSwitch::Compiler::generateStubCode(MacroAssembler& masm)
4048 : {
4049 6 : MOZ_ASSERT(engine_ == Engine::Baseline);
4050 :
4051 12 : Label isInt32, notInt32, outOfRange;
4052 6 : Register scratch = R1.scratchReg();
4053 :
4054 6 : masm.branchTestInt32(Assembler::NotEqual, R0, ¬Int32);
4055 :
4056 6 : Register key = masm.extractInt32(R0, ExtractTemp0);
4057 :
4058 6 : masm.bind(&isInt32);
4059 :
4060 6 : masm.load32(Address(ICStubReg, offsetof(ICTableSwitch, min_)), scratch);
4061 6 : masm.sub32(scratch, key);
4062 : masm.branch32(Assembler::BelowOrEqual,
4063 6 : Address(ICStubReg, offsetof(ICTableSwitch, length_)), key, &outOfRange);
4064 :
4065 6 : masm.loadPtr(Address(ICStubReg, offsetof(ICTableSwitch, table_)), scratch);
4066 6 : masm.loadPtr(BaseIndex(scratch, key, ScalePointer), scratch);
4067 :
4068 6 : EmitChangeICReturnAddress(masm, scratch);
4069 6 : EmitReturnFromIC(masm);
4070 :
4071 6 : masm.bind(¬Int32);
4072 :
4073 6 : masm.branchTestDouble(Assembler::NotEqual, R0, &outOfRange);
4074 6 : if (cx->runtime()->jitSupportsFloatingPoint) {
4075 6 : masm.unboxDouble(R0, FloatReg0);
4076 :
4077 : // N.B. -0 === 0, so convert -0 to a 0 int32.
4078 6 : masm.convertDoubleToInt32(FloatReg0, key, &outOfRange, /* negativeZeroCheck = */ false);
4079 : } else {
4080 : // Pass pointer to double value.
4081 0 : masm.pushValue(R0);
4082 0 : masm.moveStackPtrTo(R0.scratchReg());
4083 :
4084 0 : masm.setupUnalignedABICall(scratch);
4085 0 : masm.passABIArg(R0.scratchReg());
4086 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, DoubleValueToInt32ForSwitch));
4087 :
4088 : // If the function returns |true|, the value has been converted to
4089 : // int32.
4090 0 : masm.movePtr(ReturnReg, scratch);
4091 0 : masm.popValue(R0);
4092 0 : masm.branchIfFalseBool(scratch, &outOfRange);
4093 0 : masm.unboxInt32(R0, key);
4094 : }
4095 6 : masm.jump(&isInt32);
4096 :
4097 6 : masm.bind(&outOfRange);
4098 :
4099 6 : masm.loadPtr(Address(ICStubReg, offsetof(ICTableSwitch, defaultTarget_)), scratch);
4100 :
4101 6 : EmitChangeICReturnAddress(masm, scratch);
4102 6 : EmitReturnFromIC(masm);
4103 12 : return true;
4104 : }
4105 :
4106 : ICStub*
4107 11 : ICTableSwitch::Compiler::getStub(ICStubSpace* space)
4108 : {
4109 11 : JitCode* code = getStubCode();
4110 11 : if (!code)
4111 0 : return nullptr;
4112 :
4113 11 : jsbytecode* pc = pc_;
4114 11 : pc += JUMP_OFFSET_LEN;
4115 11 : int32_t low = GET_JUMP_OFFSET(pc);
4116 11 : pc += JUMP_OFFSET_LEN;
4117 11 : int32_t high = GET_JUMP_OFFSET(pc);
4118 11 : int32_t length = high - low + 1;
4119 11 : pc += JUMP_OFFSET_LEN;
4120 :
4121 11 : void** table = (void**) space->alloc(sizeof(void*) * length);
4122 11 : if (!table) {
4123 0 : ReportOutOfMemory(cx);
4124 0 : return nullptr;
4125 : }
4126 :
4127 11 : jsbytecode* defaultpc = pc_ + GET_JUMP_OFFSET(pc_);
4128 :
4129 58 : for (int32_t i = 0; i < length; i++) {
4130 47 : int32_t off = GET_JUMP_OFFSET(pc);
4131 47 : if (off)
4132 47 : table[i] = pc_ + off;
4133 : else
4134 0 : table[i] = defaultpc;
4135 47 : pc += JUMP_OFFSET_LEN;
4136 : }
4137 :
4138 11 : return newStub<ICTableSwitch>(space, code, table, low, length, defaultpc);
4139 : }
4140 :
4141 : void
4142 11 : ICTableSwitch::fixupJumpTable(JSScript* script, BaselineScript* baseline)
4143 : {
4144 11 : defaultTarget_ = baseline->nativeCodeForPC(script, (jsbytecode*) defaultTarget_);
4145 :
4146 58 : for (int32_t i = 0; i < length_; i++)
4147 47 : table_[i] = baseline->nativeCodeForPC(script, (jsbytecode*) table_[i]);
4148 11 : }
4149 :
4150 : //
4151 : // IteratorNew_Fallback
4152 : //
4153 :
4154 : static bool
4155 59 : DoIteratorNewFallback(JSContext* cx, BaselineFrame* frame, ICIteratorNew_Fallback* stub,
4156 : HandleValue value, MutableHandleValue res)
4157 : {
4158 59 : jsbytecode* pc = stub->icEntry()->pc(frame->script());
4159 59 : FallbackICSpew(cx, stub, "IteratorNew");
4160 :
4161 59 : uint8_t flags = GET_UINT8(pc);
4162 59 : res.set(value);
4163 118 : RootedObject iterobj(cx, ValueToIterator(cx, flags, res));
4164 59 : if (!iterobj)
4165 0 : return false;
4166 59 : res.setObject(*iterobj);
4167 59 : return true;
4168 : }
4169 :
4170 : typedef bool (*DoIteratorNewFallbackFn)(JSContext*, BaselineFrame*, ICIteratorNew_Fallback*,
4171 : HandleValue, MutableHandleValue);
4172 3 : static const VMFunction DoIteratorNewFallbackInfo =
4173 6 : FunctionInfo<DoIteratorNewFallbackFn>(DoIteratorNewFallback, "DoIteratorNewFallback",
4174 : TailCall, PopValues(1));
4175 :
4176 : bool
4177 15 : ICIteratorNew_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
4178 : {
4179 15 : MOZ_ASSERT(engine_ == Engine::Baseline);
4180 :
4181 15 : EmitRestoreTailCallReg(masm);
4182 :
4183 : // Sync stack for the decompiler.
4184 15 : masm.pushValue(R0);
4185 :
4186 15 : masm.pushValue(R0);
4187 15 : masm.push(ICStubReg);
4188 15 : pushStubPayload(masm, R0.scratchReg());
4189 :
4190 15 : return tailCallVM(DoIteratorNewFallbackInfo, masm);
4191 : }
4192 :
4193 : //
4194 : // IteratorMore_Fallback
4195 : //
4196 :
4197 : static bool
4198 19 : DoIteratorMoreFallback(JSContext* cx, BaselineFrame* frame, ICIteratorMore_Fallback* stub_,
4199 : HandleObject iterObj, MutableHandleValue res)
4200 : {
4201 : // This fallback stub may trigger debug mode toggling.
4202 19 : DebugModeOSRVolatileStub<ICIteratorMore_Fallback*> stub(frame, stub_);
4203 :
4204 19 : FallbackICSpew(cx, stub, "IteratorMore");
4205 :
4206 19 : if (!IteratorMore(cx, iterObj, res))
4207 0 : return false;
4208 :
4209 : // Check if debug mode toggling made the stub invalid.
4210 19 : if (stub.invalid())
4211 0 : return true;
4212 :
4213 19 : if (!res.isMagic(JS_NO_ITER_VALUE) && !res.isString())
4214 0 : stub->setHasNonStringResult();
4215 :
4216 35 : if (iterObj->is<PropertyIteratorObject>() &&
4217 16 : !stub->hasStub(ICStub::IteratorMore_Native))
4218 : {
4219 32 : ICIteratorMore_Native::Compiler compiler(cx);
4220 16 : ICStub* newStub = compiler.getStub(compiler.getStubSpace(frame->script()));
4221 16 : if (!newStub)
4222 0 : return false;
4223 16 : stub->addNewStub(newStub);
4224 : }
4225 :
4226 19 : return true;
4227 : }
4228 :
4229 : typedef bool (*DoIteratorMoreFallbackFn)(JSContext*, BaselineFrame*, ICIteratorMore_Fallback*,
4230 : HandleObject, MutableHandleValue);
4231 3 : static const VMFunction DoIteratorMoreFallbackInfo =
4232 6 : FunctionInfo<DoIteratorMoreFallbackFn>(DoIteratorMoreFallback, "DoIteratorMoreFallback",
4233 : TailCall);
4234 :
4235 : bool
4236 15 : ICIteratorMore_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
4237 : {
4238 15 : MOZ_ASSERT(engine_ == Engine::Baseline);
4239 :
4240 15 : EmitRestoreTailCallReg(masm);
4241 :
4242 15 : masm.unboxObject(R0, R0.scratchReg());
4243 15 : masm.push(R0.scratchReg());
4244 15 : masm.push(ICStubReg);
4245 15 : pushStubPayload(masm, R0.scratchReg());
4246 :
4247 15 : return tailCallVM(DoIteratorMoreFallbackInfo, masm);
4248 : }
4249 :
4250 : //
4251 : // IteratorMore_Native
4252 : //
4253 :
4254 : bool
4255 12 : ICIteratorMore_Native::Compiler::generateStubCode(MacroAssembler& masm)
4256 : {
4257 12 : MOZ_ASSERT(engine_ == Engine::Baseline);
4258 :
4259 24 : Label failure;
4260 :
4261 12 : Register obj = masm.extractObject(R0, ExtractTemp0);
4262 :
4263 12 : AllocatableGeneralRegisterSet regs(availableGeneralRegs(1));
4264 12 : Register nativeIterator = regs.takeAny();
4265 12 : Register scratch = regs.takeAny();
4266 :
4267 : masm.branchTestObjClass(Assembler::NotEqual, obj, scratch,
4268 12 : &PropertyIteratorObject::class_, &failure);
4269 12 : masm.loadObjPrivate(obj, JSObject::ITER_CLASS_NFIXED_SLOTS, nativeIterator);
4270 :
4271 24 : masm.branchTest32(Assembler::NonZero, Address(nativeIterator, offsetof(NativeIterator, flags)),
4272 12 : Imm32(JSITER_FOREACH), &failure);
4273 :
4274 : // If props_cursor < props_end, load the next string and advance the cursor.
4275 : // Else, return MagicValue(JS_NO_ITER_VALUE).
4276 24 : Label iterDone;
4277 12 : Address cursorAddr(nativeIterator, offsetof(NativeIterator, props_cursor));
4278 12 : Address cursorEndAddr(nativeIterator, offsetof(NativeIterator, props_end));
4279 12 : masm.loadPtr(cursorAddr, scratch);
4280 12 : masm.branchPtr(Assembler::BelowOrEqual, cursorEndAddr, scratch, &iterDone);
4281 :
4282 : // Get next string.
4283 12 : masm.loadPtr(Address(scratch, 0), scratch);
4284 :
4285 : // Increase the cursor.
4286 12 : masm.addPtr(Imm32(sizeof(JSString*)), cursorAddr);
4287 :
4288 12 : masm.tagValue(JSVAL_TYPE_STRING, scratch, R0);
4289 12 : EmitReturnFromIC(masm);
4290 :
4291 12 : masm.bind(&iterDone);
4292 12 : masm.moveValue(MagicValue(JS_NO_ITER_VALUE), R0);
4293 12 : EmitReturnFromIC(masm);
4294 :
4295 : // Failure case - jump to next stub
4296 12 : masm.bind(&failure);
4297 12 : EmitStubGuardFailure(masm);
4298 24 : return true;
4299 : }
4300 :
4301 : //
4302 : // IteratorClose_Fallback
4303 : //
4304 :
4305 : static bool
4306 72 : DoIteratorCloseFallback(JSContext* cx, ICIteratorClose_Fallback* stub, HandleValue iterValue)
4307 : {
4308 72 : FallbackICSpew(cx, stub, "IteratorClose");
4309 :
4310 144 : RootedObject iteratorObject(cx, &iterValue.toObject());
4311 144 : return CloseIterator(cx, iteratorObject);
4312 : }
4313 :
4314 : typedef bool (*DoIteratorCloseFallbackFn)(JSContext*, ICIteratorClose_Fallback*, HandleValue);
4315 3 : static const VMFunction DoIteratorCloseFallbackInfo =
4316 6 : FunctionInfo<DoIteratorCloseFallbackFn>(DoIteratorCloseFallback, "DoIteratorCloseFallback",
4317 : TailCall);
4318 :
4319 : bool
4320 15 : ICIteratorClose_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
4321 : {
4322 15 : MOZ_ASSERT(engine_ == Engine::Baseline);
4323 :
4324 15 : EmitRestoreTailCallReg(masm);
4325 :
4326 15 : masm.pushValue(R0);
4327 15 : masm.push(ICStubReg);
4328 :
4329 15 : return tailCallVM(DoIteratorCloseFallbackInfo, masm);
4330 : }
4331 :
4332 : //
4333 : // InstanceOf_Fallback
4334 : //
4335 :
4336 : static bool
4337 3 : TryAttachInstanceOfStub(JSContext* cx, BaselineFrame* frame, ICInstanceOf_Fallback* stub,
4338 : HandleFunction fun, bool* attached)
4339 : {
4340 3 : MOZ_ASSERT(!*attached);
4341 3 : if (fun->isBoundFunction())
4342 0 : return true;
4343 :
4344 : // If the user has supplied their own @@hasInstance method we shouldn't
4345 : // clobber it.
4346 3 : if (!js::FunctionHasDefaultHasInstance(fun, cx->wellKnownSymbols()))
4347 0 : return true;
4348 :
4349 : // Refuse to optimize any function whose [[Prototype]] isn't
4350 : // Function.prototype.
4351 3 : if (!fun->hasStaticPrototype() || fun->hasUncacheableProto())
4352 0 : return true;
4353 :
4354 3 : Value funProto = cx->global()->getPrototype(JSProto_Function);
4355 3 : if (funProto.isObject() && fun->staticPrototype() != &funProto.toObject())
4356 0 : return true;
4357 :
4358 3 : Shape* shape = fun->lookupPure(cx->names().prototype);
4359 3 : if (!shape || !shape->hasSlot() || !shape->hasDefaultGetter())
4360 0 : return true;
4361 :
4362 3 : uint32_t slot = shape->slot();
4363 3 : MOZ_ASSERT(fun->numFixedSlots() == 0, "Stub code relies on this");
4364 :
4365 3 : if (!fun->getSlot(slot).isObject())
4366 0 : return true;
4367 :
4368 3 : JSObject* protoObject = &fun->getSlot(slot).toObject();
4369 :
4370 3 : JitSpew(JitSpew_BaselineIC, " Generating InstanceOf(Function) stub");
4371 6 : ICInstanceOf_Function::Compiler compiler(cx, fun->lastProperty(), protoObject, slot);
4372 3 : ICStub* newStub = compiler.getStub(compiler.getStubSpace(frame->script()));
4373 3 : if (!newStub)
4374 0 : return false;
4375 :
4376 3 : stub->addNewStub(newStub);
4377 3 : *attached = true;
4378 3 : return true;
4379 : }
4380 :
4381 : static bool
4382 76 : DoInstanceOfFallback(JSContext* cx, BaselineFrame* frame, ICInstanceOf_Fallback* stub,
4383 : HandleValue lhs, HandleValue rhs, MutableHandleValue res)
4384 : {
4385 76 : FallbackICSpew(cx, stub, "InstanceOf");
4386 :
4387 76 : if (!rhs.isObject()) {
4388 0 : ReportValueError(cx, JSMSG_BAD_INSTANCEOF_RHS, -1, rhs, nullptr);
4389 0 : return false;
4390 : }
4391 :
4392 152 : RootedObject obj(cx, &rhs.toObject());
4393 76 : bool cond = false;
4394 76 : if (!HasInstance(cx, obj, lhs, &cond))
4395 0 : return false;
4396 :
4397 76 : res.setBoolean(cond);
4398 :
4399 76 : if (!obj->is<JSFunction>()) {
4400 73 : stub->noteUnoptimizableAccess();
4401 73 : return true;
4402 : }
4403 :
4404 : // For functions, keep track of the |prototype| property in type information,
4405 : // for use during Ion compilation.
4406 3 : EnsureTrackPropertyTypes(cx, obj, NameToId(cx->names().prototype));
4407 :
4408 3 : if (stub->numOptimizedStubs() >= ICInstanceOf_Fallback::MAX_OPTIMIZED_STUBS)
4409 0 : return true;
4410 :
4411 6 : RootedFunction fun(cx, &obj->as<JSFunction>());
4412 3 : bool attached = false;
4413 3 : if (!TryAttachInstanceOfStub(cx, frame, stub, fun, &attached))
4414 0 : return false;
4415 3 : if (!attached)
4416 0 : stub->noteUnoptimizableAccess();
4417 3 : return true;
4418 : }
4419 :
4420 : typedef bool (*DoInstanceOfFallbackFn)(JSContext*, BaselineFrame*, ICInstanceOf_Fallback*,
4421 : HandleValue, HandleValue, MutableHandleValue);
4422 3 : static const VMFunction DoInstanceOfFallbackInfo =
4423 6 : FunctionInfo<DoInstanceOfFallbackFn>(DoInstanceOfFallback, "DoInstanceOfFallback", TailCall,
4424 : PopValues(2));
4425 :
4426 : bool
4427 7 : ICInstanceOf_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
4428 : {
4429 7 : MOZ_ASSERT(engine_ == Engine::Baseline);
4430 :
4431 7 : EmitRestoreTailCallReg(masm);
4432 :
4433 : // Sync stack for the decompiler.
4434 7 : masm.pushValue(R0);
4435 7 : masm.pushValue(R1);
4436 :
4437 7 : masm.pushValue(R1);
4438 7 : masm.pushValue(R0);
4439 7 : masm.push(ICStubReg);
4440 7 : pushStubPayload(masm, R0.scratchReg());
4441 :
4442 7 : return tailCallVM(DoInstanceOfFallbackInfo, masm);
4443 : }
4444 :
4445 : bool
4446 2 : ICInstanceOf_Function::Compiler::generateStubCode(MacroAssembler& masm)
4447 : {
4448 2 : MOZ_ASSERT(engine_ == Engine::Baseline);
4449 :
4450 4 : Label failure;
4451 :
4452 : // Ensure RHS is an object.
4453 2 : masm.branchTestObject(Assembler::NotEqual, R1, &failure);
4454 2 : Register rhsObj = masm.extractObject(R1, ExtractTemp0);
4455 :
4456 : // Allow using R1's type register as scratch. We have to restore it when
4457 : // we want to jump to the next stub.
4458 4 : Label failureRestoreR1;
4459 2 : AllocatableGeneralRegisterSet regs(availableGeneralRegs(1));
4460 2 : regs.takeUnchecked(rhsObj);
4461 :
4462 2 : Register scratch1 = regs.takeAny();
4463 2 : Register scratch2 = regs.takeAny();
4464 :
4465 : // Shape guard.
4466 2 : masm.loadPtr(Address(ICStubReg, ICInstanceOf_Function::offsetOfShape()), scratch1);
4467 2 : masm.branchTestObjShape(Assembler::NotEqual, rhsObj, scratch1, &failureRestoreR1);
4468 :
4469 : // Guard on the .prototype object.
4470 2 : masm.loadPtr(Address(rhsObj, NativeObject::offsetOfSlots()), scratch1);
4471 2 : masm.load32(Address(ICStubReg, ICInstanceOf_Function::offsetOfSlot()), scratch2);
4472 2 : BaseValueIndex prototypeSlot(scratch1, scratch2);
4473 2 : masm.branchTestObject(Assembler::NotEqual, prototypeSlot, &failureRestoreR1);
4474 2 : masm.unboxObject(prototypeSlot, scratch1);
4475 : masm.branchPtr(Assembler::NotEqual,
4476 4 : Address(ICStubReg, ICInstanceOf_Function::offsetOfPrototypeObject()),
4477 2 : scratch1, &failureRestoreR1);
4478 :
4479 : // If LHS is a primitive, return false.
4480 4 : Label returnFalse, returnTrue;
4481 2 : masm.branchTestObject(Assembler::NotEqual, R0, &returnFalse);
4482 :
4483 : // LHS is an object. Load its proto.
4484 2 : masm.unboxObject(R0, scratch2);
4485 2 : masm.loadObjProto(scratch2, scratch2);
4486 :
4487 : {
4488 : // Walk the proto chain until we either reach the target object,
4489 : // nullptr or LazyProto.
4490 4 : Label loop;
4491 2 : masm.bind(&loop);
4492 :
4493 2 : masm.branchPtr(Assembler::Equal, scratch2, scratch1, &returnTrue);
4494 2 : masm.branchTestPtr(Assembler::Zero, scratch2, scratch2, &returnFalse);
4495 :
4496 2 : MOZ_ASSERT(uintptr_t(TaggedProto::LazyProto) == 1);
4497 2 : masm.branchPtr(Assembler::Equal, scratch2, ImmWord(1), &failureRestoreR1);
4498 :
4499 2 : masm.loadObjProto(scratch2, scratch2);
4500 2 : masm.jump(&loop);
4501 : }
4502 :
4503 2 : EmitReturnFromIC(masm);
4504 :
4505 2 : masm.bind(&returnFalse);
4506 2 : masm.moveValue(BooleanValue(false), R0);
4507 2 : EmitReturnFromIC(masm);
4508 :
4509 2 : masm.bind(&returnTrue);
4510 2 : masm.moveValue(BooleanValue(true), R0);
4511 2 : EmitReturnFromIC(masm);
4512 :
4513 2 : masm.bind(&failureRestoreR1);
4514 2 : masm.tagValue(JSVAL_TYPE_OBJECT, rhsObj, R1);
4515 :
4516 2 : masm.bind(&failure);
4517 2 : EmitStubGuardFailure(masm);
4518 4 : return true;
4519 : }
4520 :
4521 : //
4522 : // TypeOf_Fallback
4523 : //
4524 :
4525 : static bool
4526 80 : DoTypeOfFallback(JSContext* cx, BaselineFrame* frame, ICTypeOf_Fallback* stub, HandleValue val,
4527 : MutableHandleValue res)
4528 : {
4529 80 : FallbackICSpew(cx, stub, "TypeOf");
4530 :
4531 80 : if (stub->state().maybeTransition())
4532 0 : stub->discardStubs(cx);
4533 :
4534 80 : if (stub->state().canAttachStub()) {
4535 160 : RootedScript script(cx, frame->script());
4536 80 : jsbytecode* pc = stub->icEntry()->pc(script);
4537 :
4538 80 : ICStubEngine engine = ICStubEngine::Baseline;
4539 160 : TypeOfIRGenerator gen(cx, script, pc, stub->state().mode(), val);
4540 80 : bool attached = false;
4541 80 : if (gen.tryAttachStub()) {
4542 80 : ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
4543 160 : engine, script, stub, &attached);
4544 80 : if (newStub)
4545 80 : JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
4546 : }
4547 80 : if (!attached)
4548 0 : stub->state().trackNotAttached();
4549 : }
4550 :
4551 80 : JSType type = js::TypeOfValue(val);
4552 160 : RootedString string(cx, TypeName(type, cx->names()));
4553 80 : res.setString(string);
4554 160 : return true;
4555 : }
4556 :
4557 : typedef bool (*DoTypeOfFallbackFn)(JSContext*, BaselineFrame* frame, ICTypeOf_Fallback*,
4558 : HandleValue, MutableHandleValue);
4559 3 : static const VMFunction DoTypeOfFallbackInfo =
4560 6 : FunctionInfo<DoTypeOfFallbackFn>(DoTypeOfFallback, "DoTypeOfFallback", TailCall);
4561 :
4562 : bool
4563 24 : ICTypeOf_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
4564 : {
4565 24 : MOZ_ASSERT(engine_ == Engine::Baseline);
4566 :
4567 24 : EmitRestoreTailCallReg(masm);
4568 :
4569 24 : masm.pushValue(R0);
4570 24 : masm.push(ICStubReg);
4571 24 : pushStubPayload(masm, R0.scratchReg());
4572 :
4573 24 : return tailCallVM(DoTypeOfFallbackInfo, masm);
4574 : }
4575 :
4576 : static bool
4577 4 : DoRetSubFallback(JSContext* cx, BaselineFrame* frame, ICRetSub_Fallback* stub,
4578 : HandleValue val, uint8_t** resumeAddr)
4579 : {
4580 4 : FallbackICSpew(cx, stub, "RetSub");
4581 :
4582 : // |val| is the bytecode offset where we should resume.
4583 :
4584 4 : MOZ_ASSERT(val.isInt32());
4585 4 : MOZ_ASSERT(val.toInt32() >= 0);
4586 :
4587 4 : JSScript* script = frame->script();
4588 4 : uint32_t offset = uint32_t(val.toInt32());
4589 :
4590 4 : *resumeAddr = script->baselineScript()->nativeCodeForPC(script, script->offsetToPC(offset));
4591 :
4592 4 : if (stub->numOptimizedStubs() >= ICRetSub_Fallback::MAX_OPTIMIZED_STUBS)
4593 0 : return true;
4594 :
4595 : // Attach an optimized stub for this pc offset.
4596 4 : JitSpew(JitSpew_BaselineIC, " Generating RetSub stub for pc offset %u", offset);
4597 8 : ICRetSub_Resume::Compiler compiler(cx, offset, *resumeAddr);
4598 4 : ICStub* optStub = compiler.getStub(compiler.getStubSpace(script));
4599 4 : if (!optStub)
4600 0 : return false;
4601 :
4602 4 : stub->addNewStub(optStub);
4603 4 : return true;
4604 : }
4605 :
4606 : typedef bool(*DoRetSubFallbackFn)(JSContext* cx, BaselineFrame*, ICRetSub_Fallback*,
4607 : HandleValue, uint8_t**);
4608 3 : static const VMFunction DoRetSubFallbackInfo =
4609 6 : FunctionInfo<DoRetSubFallbackFn>(DoRetSubFallback, "DoRetSubFallback");
4610 :
4611 : typedef bool (*ThrowFn)(JSContext*, HandleValue);
4612 3 : static const VMFunction ThrowInfoBaseline =
4613 6 : FunctionInfo<ThrowFn>(js::Throw, "ThrowInfoBaseline", TailCall);
4614 :
4615 : bool
4616 4 : ICRetSub_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
4617 : {
4618 4 : MOZ_ASSERT(engine_ == Engine::Baseline);
4619 :
4620 : // If R0 is BooleanValue(true), rethrow R1.
4621 8 : Label rethrow;
4622 4 : masm.branchTestBooleanTruthy(true, R0, &rethrow);
4623 : {
4624 : // Call a stub to get the native code address for the pc offset in R1.
4625 4 : AllocatableGeneralRegisterSet regs(availableGeneralRegs(0));
4626 4 : regs.take(R1);
4627 4 : regs.takeUnchecked(ICTailCallReg);
4628 4 : Register scratch = regs.getAny();
4629 :
4630 4 : enterStubFrame(masm, scratch);
4631 :
4632 4 : masm.pushValue(R1);
4633 4 : masm.push(ICStubReg);
4634 4 : pushStubPayload(masm, scratch);
4635 :
4636 4 : if (!callVM(DoRetSubFallbackInfo, masm))
4637 0 : return false;
4638 :
4639 4 : leaveStubFrame(masm);
4640 :
4641 4 : EmitChangeICReturnAddress(masm, ReturnReg);
4642 4 : EmitReturnFromIC(masm);
4643 : }
4644 :
4645 4 : masm.bind(&rethrow);
4646 4 : EmitRestoreTailCallReg(masm);
4647 4 : masm.pushValue(R1);
4648 4 : return tailCallVM(ThrowInfoBaseline, masm);
4649 : }
4650 :
4651 : bool
4652 2 : ICRetSub_Resume::Compiler::generateStubCode(MacroAssembler& masm)
4653 : {
4654 2 : MOZ_ASSERT(engine_ == Engine::Baseline);
4655 :
4656 : // If R0 is BooleanValue(true), rethrow R1.
4657 4 : Label fail, rethrow;
4658 2 : masm.branchTestBooleanTruthy(true, R0, &rethrow);
4659 :
4660 : // R1 is the pc offset. Ensure it matches this stub's offset.
4661 2 : Register offset = masm.extractInt32(R1, ExtractTemp0);
4662 : masm.branch32(Assembler::NotEqual,
4663 4 : Address(ICStubReg, ICRetSub_Resume::offsetOfPCOffset()),
4664 : offset,
4665 2 : &fail);
4666 :
4667 : // pc offset matches, resume at the target pc.
4668 2 : masm.loadPtr(Address(ICStubReg, ICRetSub_Resume::offsetOfAddr()), R0.scratchReg());
4669 2 : EmitChangeICReturnAddress(masm, R0.scratchReg());
4670 2 : EmitReturnFromIC(masm);
4671 :
4672 : // Rethrow the Value stored in R1.
4673 2 : masm.bind(&rethrow);
4674 2 : EmitRestoreTailCallReg(masm);
4675 2 : masm.pushValue(R1);
4676 2 : if (!tailCallVM(ThrowInfoBaseline, masm))
4677 0 : return false;
4678 :
4679 2 : masm.bind(&fail);
4680 2 : EmitStubGuardFailure(masm);
4681 2 : return true;
4682 : }
4683 :
4684 1377 : ICTypeMonitor_SingleObject::ICTypeMonitor_SingleObject(JitCode* stubCode, JSObject* obj)
4685 : : ICStub(TypeMonitor_SingleObject, stubCode),
4686 1377 : obj_(obj)
4687 1377 : { }
4688 :
4689 2792 : ICTypeMonitor_ObjectGroup::ICTypeMonitor_ObjectGroup(JitCode* stubCode, ObjectGroup* group)
4690 : : ICStub(TypeMonitor_ObjectGroup, stubCode),
4691 2792 : group_(group)
4692 2792 : { }
4693 :
4694 7 : ICTypeUpdate_SingleObject::ICTypeUpdate_SingleObject(JitCode* stubCode, JSObject* obj)
4695 : : ICStub(TypeUpdate_SingleObject, stubCode),
4696 7 : obj_(obj)
4697 7 : { }
4698 :
4699 136 : ICTypeUpdate_ObjectGroup::ICTypeUpdate_ObjectGroup(JitCode* stubCode, ObjectGroup* group)
4700 : : ICStub(TypeUpdate_ObjectGroup, stubCode),
4701 136 : group_(group)
4702 136 : { }
4703 :
4704 1024 : ICGetIntrinsic_Constant::ICGetIntrinsic_Constant(JitCode* stubCode, const Value& value)
4705 : : ICStub(GetIntrinsic_Constant, stubCode),
4706 1024 : value_(value)
4707 1024 : { }
4708 :
4709 0 : ICGetIntrinsic_Constant::~ICGetIntrinsic_Constant()
4710 0 : { }
4711 :
4712 3 : ICInstanceOf_Function::ICInstanceOf_Function(JitCode* stubCode, Shape* shape,
4713 3 : JSObject* prototypeObj, uint32_t slot)
4714 : : ICStub(InstanceOf_Function, stubCode),
4715 : shape_(shape),
4716 : prototypeObj_(prototypeObj),
4717 3 : slot_(slot)
4718 3 : { }
4719 :
4720 701 : ICCall_Scripted::ICCall_Scripted(JitCode* stubCode, ICStub* firstMonitorStub,
4721 : JSFunction* callee, JSObject* templateObject,
4722 701 : uint32_t pcOffset)
4723 : : ICMonitoredStub(ICStub::Call_Scripted, stubCode, firstMonitorStub),
4724 : callee_(callee),
4725 : templateObject_(templateObject),
4726 701 : pcOffset_(pcOffset)
4727 701 : { }
4728 :
4729 : /* static */ ICCall_Scripted*
4730 0 : ICCall_Scripted::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
4731 : ICCall_Scripted& other)
4732 : {
4733 0 : return New<ICCall_Scripted>(cx, space, other.jitCode(), firstMonitorStub, other.callee_,
4734 0 : other.templateObject_, other.pcOffset_);
4735 : }
4736 :
4737 : /* static */ ICCall_AnyScripted*
4738 0 : ICCall_AnyScripted::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
4739 : ICCall_AnyScripted& other)
4740 : {
4741 0 : return New<ICCall_AnyScripted>(cx, space, other.jitCode(), firstMonitorStub, other.pcOffset_);
4742 : }
4743 :
4744 1084 : ICCall_Native::ICCall_Native(JitCode* stubCode, ICStub* firstMonitorStub,
4745 : JSFunction* callee, JSObject* templateObject,
4746 1084 : uint32_t pcOffset)
4747 : : ICMonitoredStub(ICStub::Call_Native, stubCode, firstMonitorStub),
4748 : callee_(callee),
4749 : templateObject_(templateObject),
4750 1084 : pcOffset_(pcOffset)
4751 : {
4752 : #ifdef JS_SIMULATOR
4753 : // The simulator requires VM calls to be redirected to a special swi
4754 : // instruction to handle them. To make this work, we store the redirected
4755 : // pointer in the stub.
4756 : native_ = Simulator::RedirectNativeFunction(JS_FUNC_TO_DATA_PTR(void*, callee->native()),
4757 : Args_General3);
4758 : #endif
4759 1084 : }
4760 :
4761 : /* static */ ICCall_Native*
4762 0 : ICCall_Native::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
4763 : ICCall_Native& other)
4764 : {
4765 0 : return New<ICCall_Native>(cx, space, other.jitCode(), firstMonitorStub, other.callee_,
4766 0 : other.templateObject_, other.pcOffset_);
4767 : }
4768 :
4769 4 : ICCall_ClassHook::ICCall_ClassHook(JitCode* stubCode, ICStub* firstMonitorStub,
4770 : const Class* clasp, Native native,
4771 4 : JSObject* templateObject, uint32_t pcOffset)
4772 : : ICMonitoredStub(ICStub::Call_ClassHook, stubCode, firstMonitorStub),
4773 : clasp_(clasp),
4774 4 : native_(JS_FUNC_TO_DATA_PTR(void*, native)),
4775 : templateObject_(templateObject),
4776 8 : pcOffset_(pcOffset)
4777 : {
4778 : #ifdef JS_SIMULATOR
4779 : // The simulator requires VM calls to be redirected to a special swi
4780 : // instruction to handle them. To make this work, we store the redirected
4781 : // pointer in the stub.
4782 : native_ = Simulator::RedirectNativeFunction(native_, Args_General3);
4783 : #endif
4784 4 : }
4785 :
4786 : /* static */ ICCall_ClassHook*
4787 0 : ICCall_ClassHook::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
4788 : ICCall_ClassHook& other)
4789 : {
4790 0 : ICCall_ClassHook* res = New<ICCall_ClassHook>(cx, space, other.jitCode(), firstMonitorStub,
4791 0 : other.clasp(), nullptr, other.templateObject_,
4792 0 : other.pcOffset_);
4793 0 : if (res)
4794 0 : res->native_ = other.native();
4795 0 : return res;
4796 : }
4797 :
4798 : /* static */ ICCall_ScriptedApplyArray*
4799 0 : ICCall_ScriptedApplyArray::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
4800 : ICCall_ScriptedApplyArray& other)
4801 : {
4802 0 : return New<ICCall_ScriptedApplyArray>(cx, space, other.jitCode(), firstMonitorStub,
4803 0 : other.pcOffset_);
4804 : }
4805 :
4806 : /* static */ ICCall_ScriptedApplyArguments*
4807 0 : ICCall_ScriptedApplyArguments::Clone(JSContext* cx,
4808 : ICStubSpace* space,
4809 : ICStub* firstMonitorStub,
4810 : ICCall_ScriptedApplyArguments& other)
4811 : {
4812 0 : return New<ICCall_ScriptedApplyArguments>(cx, space, other.jitCode(), firstMonitorStub,
4813 0 : other.pcOffset_);
4814 : }
4815 :
4816 : /* static */ ICCall_ScriptedFunCall*
4817 0 : ICCall_ScriptedFunCall::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
4818 : ICCall_ScriptedFunCall& other)
4819 : {
4820 0 : return New<ICCall_ScriptedFunCall>(cx, space, other.jitCode(), firstMonitorStub,
4821 0 : other.pcOffset_);
4822 : }
4823 :
4824 : //
4825 : // Rest_Fallback
4826 : //
4827 :
4828 547 : static bool DoRestFallback(JSContext* cx, BaselineFrame* frame, ICRest_Fallback* stub,
4829 : MutableHandleValue res)
4830 : {
4831 547 : unsigned numFormals = frame->numFormalArgs() - 1;
4832 547 : unsigned numActuals = frame->numActualArgs();
4833 547 : unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0;
4834 547 : Value* rest = frame->argv() + numFormals;
4835 :
4836 547 : JSObject* obj = ObjectGroup::newArrayObject(cx, rest, numRest, GenericObject,
4837 547 : ObjectGroup::NewArrayKind::UnknownIndex);
4838 547 : if (!obj)
4839 0 : return false;
4840 547 : res.setObject(*obj);
4841 547 : return true;
4842 : }
4843 :
4844 : typedef bool (*DoRestFallbackFn)(JSContext*, BaselineFrame*, ICRest_Fallback*,
4845 : MutableHandleValue);
4846 3 : static const VMFunction DoRestFallbackInfo =
4847 6 : FunctionInfo<DoRestFallbackFn>(DoRestFallback, "DoRestFallback", TailCall);
4848 :
4849 : bool
4850 14 : ICRest_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
4851 : {
4852 14 : MOZ_ASSERT(engine_ == Engine::Baseline);
4853 :
4854 14 : EmitRestoreTailCallReg(masm);
4855 :
4856 14 : masm.push(ICStubReg);
4857 14 : pushStubPayload(masm, R0.scratchReg());
4858 :
4859 14 : return tailCallVM(DoRestFallbackInfo, masm);
4860 : }
4861 :
4862 : } // namespace jit
4863 : } // namespace js
|