Line data Source code
1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sts=4 et sw=4 tw=99:
3 : * This Source Code Form is subject to the terms of the Mozilla Public
4 : * License, v. 2.0. If a copy of the MPL was not distributed with this
5 : * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 :
7 : #include "jit/Bailouts.h"
8 : #include "jit/JitCompartment.h"
9 : #include "jit/JitFrames.h"
10 : #include "jit/Linker.h"
11 : #ifdef JS_ION_PERF
12 : # include "jit/PerfSpewer.h"
13 : #endif
14 : #include "jit/VMFunctions.h"
15 : #include "jit/x64/SharedICHelpers-x64.h"
16 : #include "vtune/VTuneWrapper.h"
17 :
18 : #include "jit/MacroAssembler-inl.h"
19 :
20 : using namespace js;
21 : using namespace js::jit;
22 :
23 : using mozilla::IsPowerOfTwo;
24 :
25 : // All registers to save and restore. This includes the stack pointer, since we
26 : // use the ability to reference register values on the stack by index.
27 : static const LiveRegisterSet AllRegs =
28 : LiveRegisterSet(GeneralRegisterSet(Registers::AllMask),
29 : FloatRegisterSet(FloatRegisters::AllMask));
30 :
31 : // Generates a trampoline for calling Jit compiled code from a C++ function.
32 : // The trampoline use the EnterJitCode signature, with the standard x64 fastcall
33 : // calling convention.
34 : JitCode*
35 8 : JitRuntime::generateEnterJIT(JSContext* cx, EnterJitType type)
36 : {
37 16 : MacroAssembler masm(cx);
38 8 : masm.assertStackAlignment(ABIStackAlignment, -int32_t(sizeof(uintptr_t)) /* return address */);
39 :
40 8 : const Register reg_code = IntArgReg0;
41 8 : const Register reg_argc = IntArgReg1;
42 8 : const Register reg_argv = IntArgReg2;
43 8 : MOZ_ASSERT(OsrFrameReg == IntArgReg3);
44 :
45 : #if defined(_WIN64)
46 : const Address token = Address(rbp, 16 + ShadowStackSpace);
47 : const Operand scopeChain = Operand(rbp, 24 + ShadowStackSpace);
48 : const Operand numStackValuesAddr = Operand(rbp, 32 + ShadowStackSpace);
49 : const Operand result = Operand(rbp, 40 + ShadowStackSpace);
50 : #else
51 8 : const Register token = IntArgReg4;
52 8 : const Register scopeChain = IntArgReg5;
53 8 : const Operand numStackValuesAddr = Operand(rbp, 16 + ShadowStackSpace);
54 8 : const Operand result = Operand(rbp, 24 + ShadowStackSpace);
55 : #endif
56 :
57 : // Save old stack frame pointer, set new stack frame pointer.
58 8 : masm.push(rbp);
59 8 : masm.mov(rsp, rbp);
60 :
61 : // Save non-volatile registers. These must be saved by the trampoline, rather
62 : // than by the JIT'd code, because they are scanned by the conservative scanner.
63 8 : masm.push(rbx);
64 8 : masm.push(r12);
65 8 : masm.push(r13);
66 8 : masm.push(r14);
67 8 : masm.push(r15);
68 : #if defined(_WIN64)
69 : masm.push(rdi);
70 : masm.push(rsi);
71 :
72 : // 16-byte aligment for vmovdqa
73 : masm.subq(Imm32(16 * 10 + 8), rsp);
74 :
75 : masm.vmovdqa(xmm6, Operand(rsp, 16 * 0));
76 : masm.vmovdqa(xmm7, Operand(rsp, 16 * 1));
77 : masm.vmovdqa(xmm8, Operand(rsp, 16 * 2));
78 : masm.vmovdqa(xmm9, Operand(rsp, 16 * 3));
79 : masm.vmovdqa(xmm10, Operand(rsp, 16 * 4));
80 : masm.vmovdqa(xmm11, Operand(rsp, 16 * 5));
81 : masm.vmovdqa(xmm12, Operand(rsp, 16 * 6));
82 : masm.vmovdqa(xmm13, Operand(rsp, 16 * 7));
83 : masm.vmovdqa(xmm14, Operand(rsp, 16 * 8));
84 : masm.vmovdqa(xmm15, Operand(rsp, 16 * 9));
85 : #endif
86 :
87 : // Save arguments passed in registers needed after function call.
88 8 : masm.push(result);
89 :
90 : // Remember stack depth without padding and arguments.
91 8 : masm.mov(rsp, r14);
92 :
93 : // Remember number of bytes occupied by argument vector
94 8 : masm.mov(reg_argc, r13);
95 :
96 : // if we are constructing, that also needs to include newTarget
97 : {
98 16 : Label noNewTarget;
99 16 : masm.branchTest32(Assembler::Zero, token, Imm32(CalleeToken_FunctionConstructing),
100 8 : &noNewTarget);
101 :
102 8 : masm.addq(Imm32(1), r13);
103 :
104 8 : masm.bind(&noNewTarget);
105 : }
106 :
107 8 : masm.shll(Imm32(3), r13); // r13 = argc * sizeof(Value)
108 : static_assert(sizeof(Value) == 1 << 3, "Constant is baked in assembly code");
109 :
110 : // Guarantee stack alignment of Jit frames.
111 : //
112 : // This code compensates for the offset created by the copy of the vector of
113 : // arguments, such that the jit frame will be aligned once the return
114 : // address is pushed on the stack.
115 : //
116 : // In the computation of the offset, we omit the size of the JitFrameLayout
117 : // which is pushed on the stack, as the JitFrameLayout size is a multiple of
118 : // the JitStackAlignment.
119 8 : masm.mov(rsp, r12);
120 8 : masm.subq(r13, r12);
121 : static_assert(sizeof(JitFrameLayout) % JitStackAlignment == 0,
122 : "No need to consider the JitFrameLayout for aligning the stack");
123 8 : masm.andl(Imm32(JitStackAlignment - 1), r12);
124 8 : masm.subq(r12, rsp);
125 :
126 : /***************************************************************
127 : Loop over argv vector, push arguments onto stack in reverse order
128 : ***************************************************************/
129 :
130 : // r13 still stores the number of bytes in the argument vector.
131 8 : masm.addq(reg_argv, r13); // r13 points above last argument or newTarget
132 :
133 : // while r13 > rdx, push arguments.
134 : {
135 16 : Label header, footer;
136 8 : masm.bind(&header);
137 :
138 8 : masm.cmpPtr(r13, reg_argv);
139 8 : masm.j(AssemblerX86Shared::BelowOrEqual, &footer);
140 :
141 8 : masm.subq(Imm32(8), r13);
142 8 : masm.push(Operand(r13, 0));
143 8 : masm.jmp(&header);
144 :
145 8 : masm.bind(&footer);
146 : }
147 :
148 : // Push the number of actual arguments. |result| is used to store the
149 : // actual number of arguments without adding an extra argument to the enter
150 : // JIT.
151 8 : masm.movq(result, reg_argc);
152 8 : masm.unboxInt32(Operand(reg_argc, 0), reg_argc);
153 8 : masm.push(reg_argc);
154 :
155 : // Push the callee token.
156 8 : masm.push(token);
157 :
158 : /*****************************************************************
159 : Push the number of bytes we've pushed so far on the stack and call
160 : *****************************************************************/
161 8 : masm.subq(rsp, r14);
162 :
163 : // Create a frame descriptor.
164 8 : masm.makeFrameDescriptor(r14, JitFrame_Entry, JitFrameLayout::Size());
165 8 : masm.push(r14);
166 :
167 8 : CodeLabel returnLabel;
168 8 : CodeLabel oomReturnLabel;
169 8 : if (type == EnterJitBaseline) {
170 : // Handle OSR.
171 4 : AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
172 4 : regs.takeUnchecked(OsrFrameReg);
173 4 : regs.take(rbp);
174 4 : regs.take(reg_code);
175 :
176 : // Ensure that |scratch| does not end up being JSReturnOperand.
177 : // Do takeUnchecked because on Win64/x64, reg_code (IntArgReg0) and JSReturnOperand are
178 : // the same (rcx). See bug 849398.
179 4 : regs.takeUnchecked(JSReturnOperand);
180 4 : Register scratch = regs.takeAny();
181 :
182 8 : Label notOsr;
183 4 : masm.branchTestPtr(Assembler::Zero, OsrFrameReg, OsrFrameReg, ¬Osr);
184 :
185 4 : Register numStackValues = regs.takeAny();
186 4 : masm.movq(numStackValuesAddr, numStackValues);
187 :
188 : // Push return address
189 4 : masm.mov(returnLabel.patchAt(), scratch);
190 4 : masm.push(scratch);
191 :
192 : // Push previous frame pointer.
193 4 : masm.push(rbp);
194 :
195 : // Reserve frame.
196 4 : Register framePtr = rbp;
197 4 : masm.subPtr(Imm32(BaselineFrame::Size()), rsp);
198 4 : masm.mov(rsp, framePtr);
199 :
200 : #ifdef XP_WIN
201 : // Can't push large frames blindly on windows. Touch frame memory incrementally.
202 : masm.mov(numStackValues, scratch);
203 : masm.lshiftPtr(Imm32(3), scratch);
204 : masm.subPtr(scratch, framePtr);
205 : {
206 : masm.movePtr(rsp, scratch);
207 : masm.subPtr(Imm32(WINDOWS_BIG_FRAME_TOUCH_INCREMENT), scratch);
208 :
209 : Label touchFrameLoop;
210 : Label touchFrameLoopEnd;
211 : masm.bind(&touchFrameLoop);
212 : masm.branchPtr(Assembler::Below, scratch, framePtr, &touchFrameLoopEnd);
213 : masm.store32(Imm32(0), Address(scratch, 0));
214 : masm.subPtr(Imm32(WINDOWS_BIG_FRAME_TOUCH_INCREMENT), scratch);
215 : masm.jump(&touchFrameLoop);
216 : masm.bind(&touchFrameLoopEnd);
217 : }
218 : masm.mov(rsp, framePtr);
219 : #endif
220 :
221 : // Reserve space for locals and stack values.
222 4 : Register valuesSize = regs.takeAny();
223 4 : masm.mov(numStackValues, valuesSize);
224 4 : masm.shll(Imm32(3), valuesSize);
225 4 : masm.subPtr(valuesSize, rsp);
226 :
227 : // Enter exit frame.
228 4 : masm.addPtr(Imm32(BaselineFrame::Size() + BaselineFrame::FramePointerOffset), valuesSize);
229 4 : masm.makeFrameDescriptor(valuesSize, JitFrame_BaselineJS, ExitFrameLayout::Size());
230 4 : masm.push(valuesSize);
231 4 : masm.push(Imm32(0)); // Fake return address.
232 : // No GC things to mark, push a bare token.
233 4 : masm.loadJSContext(scratch);
234 4 : masm.enterFakeExitFrame(scratch, scratch, ExitFrameLayoutBareToken);
235 :
236 4 : regs.add(valuesSize);
237 :
238 4 : masm.push(framePtr);
239 4 : masm.push(reg_code);
240 :
241 4 : masm.setupUnalignedABICall(scratch);
242 4 : masm.passABIArg(framePtr); // BaselineFrame
243 4 : masm.passABIArg(OsrFrameReg); // InterpreterFrame
244 4 : masm.passABIArg(numStackValues);
245 4 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, jit::InitBaselineFrameForOsr));
246 :
247 4 : masm.pop(reg_code);
248 4 : masm.pop(framePtr);
249 :
250 4 : MOZ_ASSERT(reg_code != ReturnReg);
251 :
252 8 : Label error;
253 4 : masm.addPtr(Imm32(ExitFrameLayout::SizeWithFooter()), rsp);
254 4 : masm.addPtr(Imm32(BaselineFrame::Size()), framePtr);
255 4 : masm.branchIfFalseBool(ReturnReg, &error);
256 :
257 : // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
258 : // if profiler instrumentation is enabled.
259 : {
260 8 : Label skipProfilingInstrumentation;
261 4 : Register realFramePtr = numStackValues;
262 4 : AbsoluteAddress addressOfEnabled(cx->runtime()->geckoProfiler().addressOfEnabled());
263 8 : masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
264 4 : &skipProfilingInstrumentation);
265 4 : masm.lea(Operand(framePtr, sizeof(void*)), realFramePtr);
266 4 : masm.profilerEnterFrame(realFramePtr, scratch);
267 4 : masm.bind(&skipProfilingInstrumentation);
268 : }
269 :
270 4 : masm.jump(reg_code);
271 :
272 : // OOM: load error value, discard return address and previous frame
273 : // pointer and return.
274 4 : masm.bind(&error);
275 4 : masm.mov(framePtr, rsp);
276 4 : masm.addPtr(Imm32(2 * sizeof(uintptr_t)), rsp);
277 4 : masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
278 4 : masm.mov(oomReturnLabel.patchAt(), scratch);
279 4 : masm.jump(scratch);
280 :
281 4 : masm.bind(¬Osr);
282 4 : masm.movq(scopeChain, R1.scratchReg());
283 : }
284 :
285 : // The call will push the return address on the stack, thus we check that
286 : // the stack would be aligned once the call is complete.
287 8 : masm.assertStackAlignment(JitStackAlignment, sizeof(uintptr_t));
288 :
289 : // Call function.
290 8 : masm.callJitNoProfiler(reg_code);
291 :
292 8 : if (type == EnterJitBaseline) {
293 : // Baseline OSR will return here.
294 4 : masm.use(returnLabel.target());
295 4 : masm.addCodeLabel(returnLabel);
296 4 : masm.use(oomReturnLabel.target());
297 4 : masm.addCodeLabel(oomReturnLabel);
298 : }
299 :
300 : // Pop arguments and padding from stack.
301 8 : masm.pop(r14); // Pop and decode descriptor.
302 8 : masm.shrq(Imm32(FRAMESIZE_SHIFT), r14);
303 8 : masm.addq(r14, rsp); // Remove arguments.
304 :
305 : /*****************************************************************
306 : Place return value where it belongs, pop all saved registers
307 : *****************************************************************/
308 8 : masm.pop(r12); // vp
309 8 : masm.storeValue(JSReturnOperand, Operand(r12, 0));
310 :
311 : // Restore non-volatile registers.
312 : #if defined(_WIN64)
313 : masm.vmovdqa(Operand(rsp, 16 * 0), xmm6);
314 : masm.vmovdqa(Operand(rsp, 16 * 1), xmm7);
315 : masm.vmovdqa(Operand(rsp, 16 * 2), xmm8);
316 : masm.vmovdqa(Operand(rsp, 16 * 3), xmm9);
317 : masm.vmovdqa(Operand(rsp, 16 * 4), xmm10);
318 : masm.vmovdqa(Operand(rsp, 16 * 5), xmm11);
319 : masm.vmovdqa(Operand(rsp, 16 * 6), xmm12);
320 : masm.vmovdqa(Operand(rsp, 16 * 7), xmm13);
321 : masm.vmovdqa(Operand(rsp, 16 * 8), xmm14);
322 : masm.vmovdqa(Operand(rsp, 16 * 9), xmm15);
323 :
324 : masm.addq(Imm32(16 * 10 + 8), rsp);
325 :
326 : masm.pop(rsi);
327 : masm.pop(rdi);
328 : #endif
329 8 : masm.pop(r15);
330 8 : masm.pop(r14);
331 8 : masm.pop(r13);
332 8 : masm.pop(r12);
333 8 : masm.pop(rbx);
334 :
335 : // Restore frame pointer and return.
336 8 : masm.pop(rbp);
337 8 : masm.ret();
338 :
339 16 : Linker linker(masm);
340 8 : JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
341 :
342 : #ifdef JS_ION_PERF
343 : writePerfSpewerJitCodeProfile(code, "EnterJIT");
344 : #endif
345 : #ifdef MOZ_VTUNE
346 8 : vtune::MarkStub(code, "EnterJIT");
347 : #endif
348 :
349 16 : return code;
350 : }
351 :
352 : JitCode*
353 4 : JitRuntime::generateInvalidator(JSContext* cx)
354 : {
355 8 : AutoJitContextAlloc ajca(cx);
356 8 : MacroAssembler masm(cx);
357 :
358 : // See explanatory comment in x86's JitRuntime::generateInvalidator.
359 :
360 4 : masm.addq(Imm32(sizeof(uintptr_t)), rsp);
361 :
362 : // Push registers such that we can access them from [base + code].
363 4 : masm.PushRegsInMask(AllRegs);
364 :
365 4 : masm.movq(rsp, rax); // Argument to jit::InvalidationBailout.
366 :
367 : // Make space for InvalidationBailout's frameSize outparam.
368 4 : masm.reserveStack(sizeof(size_t));
369 4 : masm.movq(rsp, rbx);
370 :
371 : // Make space for InvalidationBailout's bailoutInfo outparam.
372 4 : masm.reserveStack(sizeof(void*));
373 4 : masm.movq(rsp, r9);
374 :
375 4 : masm.setupUnalignedABICall(rdx);
376 4 : masm.passABIArg(rax);
377 4 : masm.passABIArg(rbx);
378 4 : masm.passABIArg(r9);
379 4 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, InvalidationBailout));
380 :
381 4 : masm.pop(r9); // Get the bailoutInfo outparam.
382 4 : masm.pop(rbx); // Get the frameSize outparam.
383 :
384 : // Pop the machine state and the dead frame.
385 4 : masm.lea(Operand(rsp, rbx, TimesOne, sizeof(InvalidationBailoutStack)), rsp);
386 :
387 : // Jump to shared bailout tail. The BailoutInfo pointer has to be in r9.
388 4 : JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
389 4 : masm.jmp(bailoutTail);
390 :
391 8 : Linker linker(masm);
392 4 : JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
393 :
394 : #ifdef JS_ION_PERF
395 : writePerfSpewerJitCodeProfile(code, "Invalidator");
396 : #endif
397 : #ifdef MOZ_VTUNE
398 4 : vtune::MarkStub(code, "Invalidator");
399 : #endif
400 :
401 8 : return code;
402 : }
403 :
404 : JitCode*
405 4 : JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
406 : {
407 : // Do not erase the frame pointer in this function.
408 :
409 8 : MacroAssembler masm(cx);
410 : // Caller:
411 : // [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]] <- rsp
412 : // '--- #r8 ---'
413 :
414 : // ArgumentsRectifierReg contains the |nargs| pushed onto the current frame.
415 : // Including |this|, there are (|nargs| + 1) arguments to copy.
416 4 : MOZ_ASSERT(ArgumentsRectifierReg == r8);
417 :
418 : // Add |this|, in the counter of known arguments.
419 4 : masm.addl(Imm32(1), r8);
420 :
421 : // Load |nformals| into %rcx.
422 4 : masm.loadPtr(Address(rsp, RectifierFrameLayout::offsetOfCalleeToken()), rax);
423 4 : masm.mov(rax, rcx);
424 4 : masm.andq(Imm32(uint32_t(CalleeTokenMask)), rcx);
425 4 : masm.movzwl(Operand(rcx, JSFunction::offsetOfNargs()), rcx);
426 :
427 : // Stash another copy in r11, since we are going to do destructive operations
428 : // on rcx
429 4 : masm.mov(rcx, r11);
430 :
431 : static_assert(CalleeToken_FunctionConstructing == 1,
432 : "Ensure that we can use the constructing bit to count the value");
433 4 : masm.mov(rax, rdx);
434 4 : masm.andq(Imm32(uint32_t(CalleeToken_FunctionConstructing)), rdx);
435 :
436 : // Including |this|, and |new.target|, there are (|nformals| + 1 + isConstructing)
437 : // arguments to push to the stack. Then we push a JitFrameLayout. We
438 : // compute the padding expressed in the number of extra |undefined| values
439 : // to push on the stack.
440 : static_assert(sizeof(JitFrameLayout) % JitStackAlignment == 0,
441 : "No need to consider the JitFrameLayout for aligning the stack");
442 : static_assert(JitStackAlignment % sizeof(Value) == 0,
443 : "Ensure that we can pad the stack by pushing extra UndefinedValue");
444 : static_assert(IsPowerOfTwo(JitStackValueAlignment),
445 : "must have power of two for masm.andl to do its job");
446 :
447 4 : masm.addl(Imm32(JitStackValueAlignment - 1 /* for padding */ + 1 /* for |this| */), rcx);
448 4 : masm.addl(rdx, rcx);
449 4 : masm.andl(Imm32(~(JitStackValueAlignment - 1)), rcx);
450 :
451 : // Load the number of |undefined|s to push into %rcx.
452 4 : masm.subq(r8, rcx);
453 :
454 : // Caller:
455 : // [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]] <- rsp <- r9
456 : // '------ #r8 -------'
457 : //
458 : // Rectifier frame:
459 : // [undef] [undef] [undef] [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]]
460 : // '------- #rcx --------' '------ #r8 -------'
461 :
462 : // Copy the number of actual arguments
463 4 : masm.loadPtr(Address(rsp, RectifierFrameLayout::offsetOfNumActualArgs()), rdx);
464 :
465 4 : masm.moveValue(UndefinedValue(), r10);
466 :
467 4 : masm.movq(rsp, r9); // Save %rsp.
468 :
469 : // Push undefined. (including the padding)
470 : {
471 8 : Label undefLoopTop;
472 4 : masm.bind(&undefLoopTop);
473 :
474 4 : masm.push(r10);
475 4 : masm.subl(Imm32(1), rcx);
476 4 : masm.j(Assembler::NonZero, &undefLoopTop);
477 : }
478 :
479 : // Get the topmost argument.
480 : static_assert(sizeof(Value) == 8, "TimesEight is used to skip arguments");
481 :
482 : // | - sizeof(Value)| is used to put rcx such that we can read the last
483 : // argument, and not the value which is after.
484 4 : BaseIndex b = BaseIndex(r9, r8, TimesEight, sizeof(RectifierFrameLayout) - sizeof(Value));
485 4 : masm.lea(Operand(b), rcx);
486 :
487 : // Copy & Push arguments, |nargs| + 1 times (to include |this|).
488 : {
489 8 : Label copyLoopTop;
490 :
491 4 : masm.bind(©LoopTop);
492 4 : masm.push(Operand(rcx, 0x0));
493 4 : masm.subq(Imm32(sizeof(Value)), rcx);
494 4 : masm.subl(Imm32(1), r8);
495 4 : masm.j(Assembler::NonZero, ©LoopTop);
496 : }
497 :
498 : // if constructing, copy newTarget
499 : {
500 8 : Label notConstructing;
501 :
502 8 : masm.branchTest32(Assembler::Zero, rax, Imm32(CalleeToken_FunctionConstructing),
503 4 : ¬Constructing);
504 :
505 : // thisFrame[numFormals] = prevFrame[argc]
506 4 : ValueOperand newTarget(r10);
507 :
508 : // +1 for |this|. We want vp[argc], so don't subtract 1
509 4 : BaseIndex newTargetSrc(r9, rdx, TimesEight, sizeof(RectifierFrameLayout) + sizeof(Value));
510 4 : masm.loadValue(newTargetSrc, newTarget);
511 :
512 : // Again, 1 for |this|
513 4 : BaseIndex newTargetDest(rsp, r11, TimesEight, sizeof(Value));
514 4 : masm.storeValue(newTarget, newTargetDest);
515 :
516 4 : masm.bind(¬Constructing);
517 : }
518 :
519 :
520 : // Caller:
521 : // [arg2] [arg1] [this] [[argc] [callee] [descr] [raddr]] <- r9
522 : //
523 : //
524 : // Rectifier frame:
525 : // [undef] [undef] [undef] [arg2] [arg1] [this] <- rsp [[argc] [callee] [descr] [raddr]]
526 : //
527 :
528 : // Construct descriptor.
529 4 : masm.subq(rsp, r9);
530 4 : masm.makeFrameDescriptor(r9, JitFrame_Rectifier, JitFrameLayout::Size());
531 :
532 : // Construct JitFrameLayout.
533 4 : masm.push(rdx); // numActualArgs
534 4 : masm.push(rax); // callee token
535 4 : masm.push(r9); // descriptor
536 :
537 : // Call the target function.
538 : // Note that this code assumes the function is JITted.
539 4 : masm.andq(Imm32(uint32_t(CalleeTokenMask)), rax);
540 4 : masm.loadPtr(Address(rax, JSFunction::offsetOfNativeOrScript()), rax);
541 4 : masm.loadBaselineOrIonRaw(rax, rax, nullptr);
542 4 : uint32_t returnOffset = masm.callJitNoProfiler(rax);
543 :
544 : // Remove the rectifier frame.
545 4 : masm.pop(r9); // r9 <- descriptor with FrameType.
546 4 : masm.shrq(Imm32(FRAMESIZE_SHIFT), r9);
547 4 : masm.pop(r11); // Discard calleeToken.
548 4 : masm.pop(r11); // Discard numActualArgs.
549 4 : masm.addq(r9, rsp); // Discard pushed arguments.
550 :
551 4 : masm.ret();
552 :
553 8 : Linker linker(masm);
554 4 : JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
555 :
556 : #ifdef JS_ION_PERF
557 : writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
558 : #endif
559 : #ifdef MOZ_VTUNE
560 4 : vtune::MarkStub(code, "ArgumentsRectifier");
561 : #endif
562 :
563 4 : if (returnAddrOut)
564 4 : *returnAddrOut = (void*)(code->raw() + returnOffset);
565 8 : return code;
566 : }
567 :
568 : static void
569 4 : PushBailoutFrame(MacroAssembler& masm, Register spArg)
570 : {
571 : // Push registers such that we can access them from [base + code].
572 4 : if (JitSupportsSimd()) {
573 4 : masm.PushRegsInMask(AllRegs);
574 : } else {
575 : // When SIMD isn't supported, PushRegsInMask reduces the set of float
576 : // registers to be double-sized, while the RegisterDump expects each of
577 : // the float registers to have the maximal possible size
578 : // (Simd128DataSize). To work around this, we just spill the double
579 : // registers by hand here, using the register dump offset directly.
580 0 : for (GeneralRegisterBackwardIterator iter(AllRegs.gprs()); iter.more(); ++iter)
581 0 : masm.Push(*iter);
582 :
583 0 : masm.reserveStack(sizeof(RegisterDump::FPUArray));
584 0 : for (FloatRegisterBackwardIterator iter(AllRegs.fpus()); iter.more(); ++iter) {
585 0 : FloatRegister reg = *iter;
586 0 : Address spillAddress(StackPointer, reg.getRegisterDumpOffsetInBytes());
587 0 : masm.storeDouble(reg, spillAddress);
588 : }
589 : }
590 :
591 : // Get the stack pointer into a register, pre-alignment.
592 4 : masm.movq(rsp, spArg);
593 4 : }
594 :
595 : static void
596 4 : GenerateBailoutThunk(JSContext* cx, MacroAssembler& masm, uint32_t frameClass)
597 : {
598 4 : PushBailoutFrame(masm, r8);
599 :
600 : // Make space for Bailout's bailoutInfo outparam.
601 4 : masm.reserveStack(sizeof(void*));
602 4 : masm.movq(rsp, r9);
603 :
604 : // Call the bailout function.
605 4 : masm.setupUnalignedABICall(rax);
606 4 : masm.passABIArg(r8);
607 4 : masm.passABIArg(r9);
608 4 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, Bailout));
609 :
610 4 : masm.pop(r9); // Get the bailoutInfo outparam.
611 :
612 : // Stack is:
613 : // [frame]
614 : // snapshotOffset
615 : // frameSize
616 : // [bailoutFrame]
617 : //
618 : // Remove both the bailout frame and the topmost Ion frame's stack.
619 : static const uint32_t BailoutDataSize = sizeof(RegisterDump);
620 4 : masm.addq(Imm32(BailoutDataSize), rsp);
621 4 : masm.pop(rcx);
622 4 : masm.lea(Operand(rsp, rcx, TimesOne, sizeof(void*)), rsp);
623 :
624 : // Jump to shared bailout tail. The BailoutInfo pointer has to be in r9.
625 4 : JitCode* bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
626 4 : masm.jmp(bailoutTail);
627 4 : }
628 :
629 : JitCode*
630 0 : JitRuntime::generateBailoutTable(JSContext* cx, uint32_t frameClass)
631 : {
632 0 : MOZ_CRASH("x64 does not use bailout tables");
633 : }
634 :
635 : JitCode*
636 4 : JitRuntime::generateBailoutHandler(JSContext* cx)
637 : {
638 8 : MacroAssembler masm;
639 4 : GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
640 :
641 8 : Linker linker(masm);
642 4 : JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
643 :
644 : #ifdef JS_ION_PERF
645 : writePerfSpewerJitCodeProfile(code, "BailoutHandler");
646 : #endif
647 : #ifdef MOZ_VTUNE
648 4 : vtune::MarkStub(code, "BailoutHandler");
649 : #endif
650 :
651 8 : return code;
652 : }
653 :
654 : JitCode*
655 1036 : JitRuntime::generateVMWrapper(JSContext* cx, const VMFunction& f)
656 : {
657 1036 : MOZ_ASSERT(functionWrappers_);
658 1036 : MOZ_ASSERT(functionWrappers_->initialized());
659 1036 : VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f);
660 1036 : if (p)
661 0 : return p->value();
662 :
663 : // Generate a separated code for the wrapper.
664 2072 : MacroAssembler masm;
665 :
666 : // Avoid conflicts with argument registers while discarding the result after
667 : // the function call.
668 1036 : AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask);
669 :
670 : static_assert((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
671 : "Wrapper register set must be a superset of Volatile register set");
672 :
673 : // The context is the first argument.
674 1036 : Register cxreg = IntArgReg0;
675 1036 : regs.take(cxreg);
676 :
677 : // Stack is:
678 : // ... frame ...
679 : // +12 [args]
680 : // +8 descriptor
681 : // +0 returnAddress
682 : //
683 : // We're aligned to an exit frame, so link it up.
684 1036 : masm.loadJSContext(cxreg);
685 1036 : masm.enterExitFrame(cxreg, regs.getAny(), &f);
686 :
687 : // Save the current stack pointer as the base for copying arguments.
688 1036 : Register argsBase = InvalidReg;
689 1036 : if (f.explicitArgs) {
690 1012 : argsBase = r10;
691 1012 : regs.take(argsBase);
692 1012 : masm.lea(Operand(rsp, ExitFrameLayout::SizeWithFooter()), argsBase);
693 : }
694 :
695 : // Reserve space for the outparameter.
696 1036 : Register outReg = InvalidReg;
697 1036 : switch (f.outParam) {
698 : case Type_Value:
699 0 : outReg = regs.takeAny();
700 0 : masm.reserveStack(sizeof(Value));
701 0 : masm.movq(esp, outReg);
702 0 : break;
703 :
704 : case Type_Handle:
705 268 : outReg = regs.takeAny();
706 268 : masm.PushEmptyRooted(f.outParamRootType);
707 268 : masm.movq(esp, outReg);
708 268 : break;
709 :
710 : case Type_Int32:
711 : case Type_Bool:
712 156 : outReg = regs.takeAny();
713 156 : masm.reserveStack(sizeof(int32_t));
714 156 : masm.movq(esp, outReg);
715 156 : break;
716 :
717 : case Type_Double:
718 4 : outReg = regs.takeAny();
719 4 : masm.reserveStack(sizeof(double));
720 4 : masm.movq(esp, outReg);
721 4 : break;
722 :
723 : case Type_Pointer:
724 4 : outReg = regs.takeAny();
725 4 : masm.reserveStack(sizeof(uintptr_t));
726 4 : masm.movq(esp, outReg);
727 4 : break;
728 :
729 : default:
730 604 : MOZ_ASSERT(f.outParam == Type_Void);
731 604 : break;
732 : }
733 :
734 1036 : if (!generateTLEnterVM(cx, masm, f))
735 0 : return nullptr;
736 :
737 1036 : masm.setupUnalignedABICall(regs.getAny());
738 1036 : masm.passABIArg(cxreg);
739 :
740 1036 : size_t argDisp = 0;
741 :
742 : // Copy arguments.
743 3352 : for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
744 2316 : MoveOperand from;
745 2316 : switch (f.argProperties(explicitArg)) {
746 : case VMFunction::WordByValue:
747 844 : if (f.argPassedInFloatReg(explicitArg))
748 4 : masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::DOUBLE);
749 : else
750 840 : masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::GENERAL);
751 844 : argDisp += sizeof(void*);
752 844 : break;
753 : case VMFunction::WordByRef:
754 2944 : masm.passABIArg(MoveOperand(argsBase, argDisp, MoveOperand::EFFECTIVE_ADDRESS),
755 1472 : MoveOp::GENERAL);
756 1472 : argDisp += sizeof(void*);
757 1472 : break;
758 : case VMFunction::DoubleByValue:
759 : case VMFunction::DoubleByRef:
760 0 : MOZ_CRASH("NYI: x64 callVM should not be used with 128bits values.");
761 : }
762 : }
763 :
764 : // Copy the implicit outparam, if any.
765 1036 : if (outReg != InvalidReg)
766 432 : masm.passABIArg(outReg);
767 :
768 1036 : masm.callWithABI(f.wrapped);
769 :
770 1036 : if (!generateTLExitVM(cx, masm, f))
771 0 : return nullptr;
772 :
773 : // Test for failure.
774 1036 : switch (f.failType()) {
775 : case Type_Object:
776 240 : masm.branchTestPtr(Assembler::Zero, rax, rax, masm.failureLabel());
777 240 : break;
778 : case Type_Bool:
779 796 : masm.testb(rax, rax);
780 796 : masm.j(Assembler::Zero, masm.failureLabel());
781 796 : break;
782 : default:
783 0 : MOZ_CRASH("unknown failure kind");
784 : }
785 :
786 : // Load the outparam and free any allocated stack.
787 1036 : switch (f.outParam) {
788 : case Type_Handle:
789 268 : masm.popRooted(f.outParamRootType, ReturnReg, JSReturnOperand);
790 268 : break;
791 :
792 : case Type_Value:
793 0 : masm.loadValue(Address(esp, 0), JSReturnOperand);
794 0 : masm.freeStack(sizeof(Value));
795 0 : break;
796 :
797 : case Type_Int32:
798 48 : masm.load32(Address(esp, 0), ReturnReg);
799 48 : masm.freeStack(sizeof(int32_t));
800 48 : break;
801 :
802 : case Type_Bool:
803 108 : masm.load8ZeroExtend(Address(esp, 0), ReturnReg);
804 108 : masm.freeStack(sizeof(int32_t));
805 108 : break;
806 :
807 : case Type_Double:
808 4 : MOZ_ASSERT(cx->runtime()->jitSupportsFloatingPoint);
809 4 : masm.loadDouble(Address(esp, 0), ReturnDoubleReg);
810 4 : masm.freeStack(sizeof(double));
811 4 : break;
812 :
813 : case Type_Pointer:
814 4 : masm.loadPtr(Address(esp, 0), ReturnReg);
815 4 : masm.freeStack(sizeof(uintptr_t));
816 4 : break;
817 :
818 : default:
819 604 : MOZ_ASSERT(f.outParam == Type_Void);
820 604 : break;
821 : }
822 1036 : masm.leaveExitFrame();
823 2072 : masm.retn(Imm32(sizeof(ExitFrameLayout) +
824 2072 : f.explicitStackSlots() * sizeof(void*) +
825 2072 : f.extraValuesToPop * sizeof(Value)));
826 :
827 2072 : Linker linker(masm);
828 1036 : JitCode* wrapper = linker.newCode<NoGC>(cx, OTHER_CODE);
829 1036 : if (!wrapper)
830 0 : return nullptr;
831 :
832 : #ifdef JS_ION_PERF
833 : writePerfSpewerJitCodeProfile(wrapper, "VMWrapper");
834 : #endif
835 : #ifdef MOZ_VTUNE
836 1036 : vtune::MarkStub(wrapper, "VMWrapper");
837 : #endif
838 :
839 : // linker.newCode may trigger a GC and sweep functionWrappers_ so we have to
840 : // use relookupOrAdd instead of add.
841 1036 : if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
842 0 : return nullptr;
843 :
844 1036 : return wrapper;
845 : }
846 :
847 : JitCode*
848 20 : JitRuntime::generatePreBarrier(JSContext* cx, MIRType type)
849 : {
850 40 : MacroAssembler masm;
851 :
852 : LiveRegisterSet regs =
853 20 : LiveRegisterSet(GeneralRegisterSet(Registers::VolatileMask),
854 40 : FloatRegisterSet(FloatRegisters::VolatileMask));
855 20 : masm.PushRegsInMask(regs);
856 :
857 20 : MOZ_ASSERT(PreBarrierReg == rdx);
858 20 : masm.mov(ImmPtr(cx->runtime()), rcx);
859 :
860 20 : masm.setupUnalignedABICall(rax);
861 20 : masm.passABIArg(rcx);
862 20 : masm.passABIArg(rdx);
863 20 : masm.callWithABI(IonMarkFunction(type));
864 :
865 20 : masm.PopRegsInMask(regs);
866 20 : masm.ret();
867 :
868 40 : Linker linker(masm);
869 20 : JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
870 :
871 : #ifdef JS_ION_PERF
872 : writePerfSpewerJitCodeProfile(code, "PreBarrier");
873 : #endif
874 : #ifdef MOZ_VTUNE
875 20 : vtune::MarkStub(code, "PreBarrier");
876 : #endif
877 :
878 40 : return code;
879 : }
880 :
881 : typedef bool (*HandleDebugTrapFn)(JSContext*, BaselineFrame*, uint8_t*, bool*);
882 3 : static const VMFunction HandleDebugTrapInfo =
883 6 : FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap, "HandleDebugTrap");
884 :
885 : JitCode*
886 0 : JitRuntime::generateDebugTrapHandler(JSContext* cx)
887 : {
888 0 : MacroAssembler masm;
889 : #ifndef JS_USE_LINK_REGISTER
890 : // The first value contains the return addres,
891 : // which we pull into ICTailCallReg for tail calls.
892 0 : masm.setFramePushed(sizeof(intptr_t));
893 : #endif
894 :
895 0 : Register scratch1 = rax;
896 0 : Register scratch2 = rcx;
897 0 : Register scratch3 = rdx;
898 :
899 : // Load the return address in scratch1.
900 0 : masm.loadPtr(Address(rsp, 0), scratch1);
901 :
902 : // Load BaselineFrame pointer in scratch2.
903 0 : masm.mov(rbp, scratch2);
904 0 : masm.subPtr(Imm32(BaselineFrame::Size()), scratch2);
905 :
906 : // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
907 : // the stub frame has a nullptr ICStub pointer, since this pointer is marked
908 : // during GC.
909 0 : masm.movePtr(ImmPtr(nullptr), ICStubReg);
910 0 : EmitBaselineEnterStubFrame(masm, scratch3);
911 :
912 0 : JitCode* code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
913 0 : if (!code)
914 0 : return nullptr;
915 :
916 0 : masm.push(scratch1);
917 0 : masm.push(scratch2);
918 0 : EmitBaselineCallVM(code, masm);
919 :
920 0 : EmitBaselineLeaveStubFrame(masm);
921 :
922 : // If the stub returns |true|, we have to perform a forced return
923 : // (return from the JS frame). If the stub returns |false|, just return
924 : // from the trap stub so that execution continues at the current pc.
925 0 : Label forcedReturn;
926 0 : masm.branchTest32(Assembler::NonZero, ReturnReg, ReturnReg, &forcedReturn);
927 0 : masm.ret();
928 :
929 0 : masm.bind(&forcedReturn);
930 0 : masm.loadValue(Address(ebp, BaselineFrame::reverseOffsetOfReturnValue()),
931 0 : JSReturnOperand);
932 0 : masm.mov(rbp, rsp);
933 0 : masm.pop(rbp);
934 :
935 : // Before returning, if profiling is turned on, make sure that lastProfilingFrame
936 : // is set to the correct caller frame.
937 : {
938 0 : Label skipProfilingInstrumentation;
939 0 : AbsoluteAddress addressOfEnabled(cx->runtime()->geckoProfiler().addressOfEnabled());
940 0 : masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skipProfilingInstrumentation);
941 0 : masm.profilerExitFrame();
942 0 : masm.bind(&skipProfilingInstrumentation);
943 : }
944 :
945 0 : masm.ret();
946 :
947 0 : Linker linker(masm);
948 0 : JitCode* codeDbg = linker.newCode<NoGC>(cx, OTHER_CODE);
949 :
950 : #ifdef JS_ION_PERF
951 : writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
952 : #endif
953 : #ifdef MOZ_VTUNE
954 0 : vtune::MarkStub(codeDbg, "DebugTrapHandler");
955 : #endif
956 :
957 0 : return codeDbg;
958 : }
959 :
960 : JitCode*
961 4 : JitRuntime::generateExceptionTailStub(JSContext* cx, void* handler)
962 : {
963 8 : MacroAssembler masm;
964 :
965 4 : masm.handleFailureWithHandlerTail(handler);
966 :
967 8 : Linker linker(masm);
968 4 : JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
969 :
970 : #ifdef JS_ION_PERF
971 : writePerfSpewerJitCodeProfile(code, "ExceptionTailStub");
972 : #endif
973 : #ifdef MOZ_VTUNE
974 4 : vtune::MarkStub(code, "ExceptionTailStub");
975 : #endif
976 :
977 8 : return code;
978 : }
979 :
980 : JitCode*
981 4 : JitRuntime::generateBailoutTailStub(JSContext* cx)
982 : {
983 8 : MacroAssembler masm;
984 :
985 4 : masm.generateBailoutTail(rdx, r9);
986 :
987 8 : Linker linker(masm);
988 4 : JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
989 :
990 : #ifdef JS_ION_PERF
991 : writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
992 : #endif
993 : #ifdef MOZ_VTUNE
994 4 : vtune::MarkStub(code, "BailoutTailStub");
995 : #endif
996 :
997 8 : return code;
998 : }
999 :
1000 : JitCode*
1001 4 : JitRuntime::generateProfilerExitFrameTailStub(JSContext* cx)
1002 : {
1003 8 : MacroAssembler masm;
1004 :
1005 4 : Register scratch1 = r8;
1006 4 : Register scratch2 = r9;
1007 4 : Register scratch3 = r10;
1008 4 : Register scratch4 = r11;
1009 :
1010 : //
1011 : // The code generated below expects that the current stack pointer points
1012 : // to an Ion or Baseline frame, at the state it would be immediately
1013 : // before a ret(). Thus, after this stub's business is done, it executes
1014 : // a ret() and returns directly to the caller script, on behalf of the
1015 : // callee script that jumped to this code.
1016 : //
1017 : // Thus the expected stack is:
1018 : //
1019 : // StackPointer ----+
1020 : // v
1021 : // ..., ActualArgc, CalleeToken, Descriptor, ReturnAddr
1022 : // MEM-HI MEM-LOW
1023 : //
1024 : //
1025 : // The generated jitcode is responsible for overwriting the
1026 : // jitActivation->lastProfilingFrame field with a pointer to the previous
1027 : // Ion or Baseline jit-frame that was pushed before this one. It is also
1028 : // responsible for overwriting jitActivation->lastProfilingCallSite with
1029 : // the return address into that frame. The frame could either be an
1030 : // immediate "caller" frame, or it could be a frame in a previous
1031 : // JitActivation (if the current frame was entered from C++, and the C++
1032 : // was entered by some caller jit-frame further down the stack).
1033 : //
1034 : // So this jitcode is responsible for "walking up" the jit stack, finding
1035 : // the previous Ion or Baseline JS frame, and storing its address and the
1036 : // return address into the appropriate fields on the current jitActivation.
1037 : //
1038 : // There are a fixed number of different path types that can lead to the
1039 : // current frame, which is either a baseline or ion frame:
1040 : //
1041 : // <Baseline-Or-Ion>
1042 : // ^
1043 : // |
1044 : // ^--- Ion
1045 : // |
1046 : // ^--- Baseline Stub <---- Baseline
1047 : // |
1048 : // ^--- Argument Rectifier
1049 : // | ^
1050 : // | |
1051 : // | ^--- Ion
1052 : // | |
1053 : // | ^--- Baseline Stub <---- Baseline
1054 : // |
1055 : // ^--- Entry Frame (From C++)
1056 : //
1057 4 : Register actReg = scratch4;
1058 4 : masm.loadJSContext(actReg);
1059 4 : masm.loadPtr(Address(actReg, offsetof(JSContext, profilingActivation_)), actReg);
1060 :
1061 4 : Address lastProfilingFrame(actReg, JitActivation::offsetOfLastProfilingFrame());
1062 4 : Address lastProfilingCallSite(actReg, JitActivation::offsetOfLastProfilingCallSite());
1063 :
1064 : #ifdef DEBUG
1065 : // Ensure that frame we are exiting is current lastProfilingFrame
1066 : {
1067 4 : masm.loadPtr(lastProfilingFrame, scratch1);
1068 8 : Label checkOk;
1069 4 : masm.branchPtr(Assembler::Equal, scratch1, ImmWord(0), &checkOk);
1070 4 : masm.branchPtr(Assembler::Equal, StackPointer, scratch1, &checkOk);
1071 : masm.assumeUnreachable(
1072 4 : "Mismatch between stored lastProfilingFrame and current stack pointer.");
1073 4 : masm.bind(&checkOk);
1074 : }
1075 : #endif
1076 :
1077 : // Load the frame descriptor into |scratch1|, figure out what to do depending on its type.
1078 4 : masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfDescriptor()), scratch1);
1079 :
1080 : // Going into the conditionals, we will have:
1081 : // FrameDescriptor.size in scratch1
1082 : // FrameDescriptor.type in scratch2
1083 4 : masm.movePtr(scratch1, scratch2);
1084 4 : masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
1085 4 : masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch2);
1086 :
1087 : // Handling of each case is dependent on FrameDescriptor.type
1088 8 : Label handle_IonJS;
1089 8 : Label handle_BaselineStub;
1090 8 : Label handle_Rectifier;
1091 8 : Label handle_IonICCall;
1092 8 : Label handle_Entry;
1093 8 : Label end;
1094 :
1095 4 : masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonJS), &handle_IonJS);
1096 4 : masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineJS), &handle_IonJS);
1097 4 : masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineStub), &handle_BaselineStub);
1098 4 : masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Rectifier), &handle_Rectifier);
1099 4 : masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonICCall), &handle_IonICCall);
1100 4 : masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Entry), &handle_Entry);
1101 :
1102 4 : masm.assumeUnreachable("Invalid caller frame type when exiting from Ion frame.");
1103 :
1104 : //
1105 : // JitFrame_IonJS
1106 : //
1107 : // Stack layout:
1108 : // ...
1109 : // Ion-Descriptor
1110 : // Prev-FP ---> Ion-ReturnAddr
1111 : // ... previous frame data ... |- Descriptor.Size
1112 : // ... arguments ... |
1113 : // ActualArgc |
1114 : // CalleeToken |- JitFrameLayout::Size()
1115 : // Descriptor |
1116 : // FP -----> ReturnAddr |
1117 : //
1118 4 : masm.bind(&handle_IonJS);
1119 : {
1120 : // returning directly to an IonJS frame. Store return addr to frame
1121 : // in lastProfilingCallSite.
1122 4 : masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfReturnAddress()), scratch2);
1123 4 : masm.storePtr(scratch2, lastProfilingCallSite);
1124 :
1125 : // Store return frame in lastProfilingFrame.
1126 : // scratch2 := StackPointer + Descriptor.size*1 + JitFrameLayout::Size();
1127 4 : masm.lea(Operand(StackPointer, scratch1, TimesOne, JitFrameLayout::Size()), scratch2);
1128 4 : masm.storePtr(scratch2, lastProfilingFrame);
1129 4 : masm.ret();
1130 : }
1131 :
1132 : //
1133 : // JitFrame_BaselineStub
1134 : //
1135 : // Look past the stub and store the frame pointer to
1136 : // the baselineJS frame prior to it.
1137 : //
1138 : // Stack layout:
1139 : // ...
1140 : // BL-Descriptor
1141 : // Prev-FP ---> BL-ReturnAddr
1142 : // +-----> BL-PrevFramePointer
1143 : // | ... BL-FrameData ...
1144 : // | BLStub-Descriptor
1145 : // | BLStub-ReturnAddr
1146 : // | BLStub-StubPointer |
1147 : // +------ BLStub-SavedFramePointer |- Descriptor.Size
1148 : // ... arguments ... |
1149 : // ActualArgc |
1150 : // CalleeToken |- JitFrameLayout::Size()
1151 : // Descriptor |
1152 : // FP -----> ReturnAddr |
1153 : //
1154 : // We take advantage of the fact that the stub frame saves the frame
1155 : // pointer pointing to the baseline frame, so a bunch of calculation can
1156 : // be avoided.
1157 : //
1158 4 : masm.bind(&handle_BaselineStub);
1159 : {
1160 : BaseIndex stubFrameReturnAddr(StackPointer, scratch1, TimesOne,
1161 8 : JitFrameLayout::Size() +
1162 8 : BaselineStubFrameLayout::offsetOfReturnAddress());
1163 4 : masm.loadPtr(stubFrameReturnAddr, scratch2);
1164 4 : masm.storePtr(scratch2, lastProfilingCallSite);
1165 :
1166 : BaseIndex stubFrameSavedFramePtr(StackPointer, scratch1, TimesOne,
1167 4 : JitFrameLayout::Size() - (2 * sizeof(void*)));
1168 4 : masm.loadPtr(stubFrameSavedFramePtr, scratch2);
1169 4 : masm.addPtr(Imm32(sizeof(void*)), scratch2); // Skip past BL-PrevFramePtr
1170 4 : masm.storePtr(scratch2, lastProfilingFrame);
1171 4 : masm.ret();
1172 : }
1173 :
1174 :
1175 : //
1176 : // JitFrame_Rectifier
1177 : //
1178 : // The rectifier frame can be preceded by either an IonJS or a
1179 : // BaselineStub frame.
1180 : //
1181 : // Stack layout if caller of rectifier was Ion:
1182 : //
1183 : // Ion-Descriptor
1184 : // Ion-ReturnAddr
1185 : // ... ion frame data ... |- Rect-Descriptor.Size
1186 : // < COMMON LAYOUT >
1187 : //
1188 : // Stack layout if caller of rectifier was Baseline:
1189 : //
1190 : // BL-Descriptor
1191 : // Prev-FP ---> BL-ReturnAddr
1192 : // +-----> BL-SavedFramePointer
1193 : // | ... baseline frame data ...
1194 : // | BLStub-Descriptor
1195 : // | BLStub-ReturnAddr
1196 : // | BLStub-StubPointer |
1197 : // +------ BLStub-SavedFramePointer |- Rect-Descriptor.Size
1198 : // ... args to rectifier ... |
1199 : // < COMMON LAYOUT >
1200 : //
1201 : // Common stack layout:
1202 : //
1203 : // ActualArgc |
1204 : // CalleeToken |- IonRectitiferFrameLayout::Size()
1205 : // Rect-Descriptor |
1206 : // Rect-ReturnAddr |
1207 : // ... rectifier data & args ... |- Descriptor.Size
1208 : // ActualArgc |
1209 : // CalleeToken |- JitFrameLayout::Size()
1210 : // Descriptor |
1211 : // FP -----> ReturnAddr |
1212 : //
1213 4 : masm.bind(&handle_Rectifier);
1214 : {
1215 : // scratch2 := StackPointer + Descriptor.size + JitFrameLayout::Size()
1216 4 : masm.lea(Operand(StackPointer, scratch1, TimesOne, JitFrameLayout::Size()), scratch2);
1217 4 : masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfDescriptor()), scratch3);
1218 4 : masm.movePtr(scratch3, scratch1);
1219 4 : masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch3);
1220 4 : masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
1221 :
1222 : // Now |scratch1| contains Rect-Descriptor.Size
1223 : // and |scratch2| points to Rectifier frame
1224 : // and |scratch3| contains Rect-Descriptor.Type
1225 :
1226 : // Check for either Ion or BaselineStub frame.
1227 8 : Label handle_Rectifier_BaselineStub;
1228 8 : masm.branch32(Assembler::NotEqual, scratch3, Imm32(JitFrame_IonJS),
1229 4 : &handle_Rectifier_BaselineStub);
1230 :
1231 : // Handle Rectifier <- IonJS
1232 : // scratch3 := RectFrame[ReturnAddr]
1233 4 : masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfReturnAddress()), scratch3);
1234 4 : masm.storePtr(scratch3, lastProfilingCallSite);
1235 :
1236 : // scratch3 := RectFrame + Rect-Descriptor.Size + RectifierFrameLayout::Size()
1237 4 : masm.lea(Operand(scratch2, scratch1, TimesOne, RectifierFrameLayout::Size()), scratch3);
1238 4 : masm.storePtr(scratch3, lastProfilingFrame);
1239 4 : masm.ret();
1240 :
1241 : // Handle Rectifier <- BaselineStub <- BaselineJS
1242 4 : masm.bind(&handle_Rectifier_BaselineStub);
1243 : #ifdef DEBUG
1244 : {
1245 8 : Label checkOk;
1246 4 : masm.branch32(Assembler::Equal, scratch3, Imm32(JitFrame_BaselineStub), &checkOk);
1247 4 : masm.assumeUnreachable("Unrecognized frame preceding baselineStub.");
1248 4 : masm.bind(&checkOk);
1249 : }
1250 : #endif
1251 : BaseIndex stubFrameReturnAddr(scratch2, scratch1, TimesOne,
1252 8 : RectifierFrameLayout::Size() +
1253 8 : BaselineStubFrameLayout::offsetOfReturnAddress());
1254 4 : masm.loadPtr(stubFrameReturnAddr, scratch3);
1255 4 : masm.storePtr(scratch3, lastProfilingCallSite);
1256 :
1257 : BaseIndex stubFrameSavedFramePtr(scratch2, scratch1, TimesOne,
1258 4 : RectifierFrameLayout::Size() - (2 * sizeof(void*)));
1259 4 : masm.loadPtr(stubFrameSavedFramePtr, scratch3);
1260 4 : masm.addPtr(Imm32(sizeof(void*)), scratch3);
1261 4 : masm.storePtr(scratch3, lastProfilingFrame);
1262 4 : masm.ret();
1263 : }
1264 :
1265 : // JitFrame_IonICCall
1266 : //
1267 : // The caller is always an IonJS frame.
1268 : //
1269 : // Ion-Descriptor
1270 : // Ion-ReturnAddr
1271 : // ... ion frame data ... |- CallFrame-Descriptor.Size
1272 : // StubCode |
1273 : // ICCallFrame-Descriptor |- IonICCallFrameLayout::Size()
1274 : // ICCallFrame-ReturnAddr |
1275 : // ... call frame data & args ... |- Descriptor.Size
1276 : // ActualArgc |
1277 : // CalleeToken |- JitFrameLayout::Size()
1278 : // Descriptor |
1279 : // FP -----> ReturnAddr |
1280 4 : masm.bind(&handle_IonICCall);
1281 : {
1282 : // scratch2 := StackPointer + Descriptor.size + JitFrameLayout::Size()
1283 4 : masm.lea(Operand(StackPointer, scratch1, TimesOne, JitFrameLayout::Size()), scratch2);
1284 :
1285 : // scratch3 := ICCallFrame-Descriptor.Size
1286 4 : masm.loadPtr(Address(scratch2, IonICCallFrameLayout::offsetOfDescriptor()), scratch3);
1287 : #ifdef DEBUG
1288 : // Assert previous frame is an IonJS frame.
1289 4 : masm.movePtr(scratch3, scratch1);
1290 4 : masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch1);
1291 : {
1292 8 : Label checkOk;
1293 4 : masm.branch32(Assembler::Equal, scratch1, Imm32(JitFrame_IonJS), &checkOk);
1294 4 : masm.assumeUnreachable("IonICCall frame must be preceded by IonJS frame");
1295 4 : masm.bind(&checkOk);
1296 : }
1297 : #endif
1298 4 : masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch3);
1299 :
1300 : // lastProfilingCallSite := ICCallFrame-ReturnAddr
1301 4 : masm.loadPtr(Address(scratch2, IonICCallFrameLayout::offsetOfReturnAddress()), scratch1);
1302 4 : masm.storePtr(scratch1, lastProfilingCallSite);
1303 :
1304 : // lastProfilingFrame := ICCallFrame + ICCallFrame-Descriptor.Size +
1305 : // IonICCallFrameLayout::Size()
1306 4 : masm.lea(Operand(scratch2, scratch3, TimesOne, IonICCallFrameLayout::Size()), scratch1);
1307 4 : masm.storePtr(scratch1, lastProfilingFrame);
1308 4 : masm.ret();
1309 : }
1310 :
1311 : //
1312 : // JitFrame_Entry
1313 : //
1314 : // If at an entry frame, store null into both fields.
1315 : //
1316 4 : masm.bind(&handle_Entry);
1317 : {
1318 4 : masm.movePtr(ImmPtr(nullptr), scratch1);
1319 4 : masm.storePtr(scratch1, lastProfilingCallSite);
1320 4 : masm.storePtr(scratch1, lastProfilingFrame);
1321 4 : masm.ret();
1322 : }
1323 :
1324 8 : Linker linker(masm);
1325 4 : JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
1326 :
1327 : #ifdef JS_ION_PERF
1328 : writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
1329 : #endif
1330 : #ifdef MOZ_VTUNE
1331 4 : vtune::MarkStub(code, "ProfilerExitFrameStub");
1332 : #endif
1333 :
1334 8 : return code;
1335 : }
|