Line data Source code
1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sts=4 et sw=4 tw=99:
3 : *
4 : * Copyright 2014 Mozilla Foundation
5 : *
6 : * Licensed under the Apache License, Version 2.0 (the "License");
7 : * you may not use this file except in compliance with the License.
8 : * You may obtain a copy of the License at
9 : *
10 : * http://www.apache.org/licenses/LICENSE-2.0
11 : *
12 : * Unless required by applicable law or agreed to in writing, software
13 : * distributed under the License is distributed on an "AS IS" BASIS,
14 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 : * See the License for the specific language governing permissions and
16 : * limitations under the License.
17 : */
18 :
19 : #include "wasm/WasmFrameIterator.h"
20 :
21 : #include "wasm/WasmInstance.h"
22 :
23 : #include "jit/MacroAssembler-inl.h"
24 :
25 : using namespace js;
26 : using namespace js::jit;
27 : using namespace js::wasm;
28 :
29 : using mozilla::DebugOnly;
30 : using mozilla::Swap;
31 :
32 : /*****************************************************************************/
33 : // FrameIterator implementation
34 :
35 3186 : FrameIterator::FrameIterator()
36 : : activation_(nullptr),
37 : code_(nullptr),
38 : callsite_(nullptr),
39 : codeRange_(nullptr),
40 : fp_(nullptr),
41 : unwind_(Unwind::False),
42 3186 : unwoundAddressOfReturnAddress_(nullptr)
43 : {
44 3186 : MOZ_ASSERT(done());
45 3186 : }
46 :
47 0 : FrameIterator::FrameIterator(WasmActivation* activation, Unwind unwind)
48 : : activation_(activation),
49 : code_(nullptr),
50 : callsite_(nullptr),
51 : codeRange_(nullptr),
52 0 : fp_(activation->exitFP()),
53 0 : unwind_(unwind)
54 : {
55 0 : MOZ_ASSERT(fp_);
56 :
57 : // Normally, execution exits wasm code via an exit stub which sets exitFP to
58 : // the exit stub's frame. Thus, in this case, we want to start iteration at
59 : // the caller of the exit frame, whose Code, CodeRange and CallSite are
60 : // indicated by the returnAddress of the exit stub's frame.
61 :
62 0 : if (!activation->interrupted()) {
63 0 : popFrame();
64 0 : MOZ_ASSERT(!done());
65 0 : return;
66 : }
67 :
68 : // When asynchronously interrupted, exitFP is set to the interrupted frame
69 : // itself and so we do not want to skip it. Instead, we can recover the
70 : // Code and CodeRange from the WasmActivation, which are set when control
71 : // flow was interrupted. There is no CallSite (b/c the interrupt was async),
72 : // but this is fine because CallSite is only used for line number for which
73 : // we can use the beginning of the function from the CodeRange instead.
74 :
75 0 : code_ = activation_->compartment()->wasm.lookupCode(activation->unwindPC());
76 0 : MOZ_ASSERT(code_);
77 0 : MOZ_ASSERT(&fp_->tls->instance->code() == code_);
78 :
79 0 : codeRange_ = code_->lookupRange(activation->unwindPC());
80 0 : MOZ_ASSERT(codeRange_->kind() == CodeRange::Function);
81 :
82 0 : MOZ_ASSERT(!done());
83 : }
84 :
85 : bool
86 3186 : FrameIterator::done() const
87 : {
88 3186 : MOZ_ASSERT(!!fp_ == !!code_);
89 3186 : MOZ_ASSERT(!!fp_ == !!codeRange_);
90 3186 : return !fp_;
91 : }
92 :
93 : void
94 0 : FrameIterator::operator++()
95 : {
96 0 : MOZ_ASSERT(!done());
97 :
98 : // When the iterator is set to Unwind::True, each time the iterator pops a
99 : // frame, the WasmActivation is updated so that the just-popped frame
100 : // is no longer visible. This is necessary since Debugger::onLeaveFrame is
101 : // called before popping each frame and, once onLeaveFrame is called for a
102 : // given frame, that frame must not be visible to subsequent stack iteration
103 : // (or it could be added as a "new" frame just as it becomes garbage).
104 : // When the frame is "interrupted", then exitFP is included in the callstack
105 : // (otherwise, it is skipped, as explained above). So to unwind the
106 : // innermost frame, we just clear the interrupt state.
107 :
108 0 : if (unwind_ == Unwind::True) {
109 0 : if (activation_->interrupted())
110 0 : activation_->finishInterrupt();
111 0 : activation_->unwindExitFP(fp_);
112 : }
113 :
114 0 : popFrame();
115 0 : }
116 :
117 : void
118 0 : FrameIterator::popFrame()
119 : {
120 0 : Frame* prevFP = fp_;
121 0 : fp_ = prevFP->callerFP;
122 :
123 0 : if (!fp_) {
124 0 : code_ = nullptr;
125 0 : codeRange_ = nullptr;
126 0 : callsite_ = nullptr;
127 :
128 0 : if (unwind_ == Unwind::True) {
129 0 : activation_->unwindExitFP(nullptr);
130 0 : unwoundAddressOfReturnAddress_ = &prevFP->returnAddress;
131 : }
132 :
133 0 : MOZ_ASSERT(done());
134 0 : return;
135 : }
136 :
137 0 : void* returnAddress = prevFP->returnAddress;
138 :
139 0 : code_ = activation_->compartment()->wasm.lookupCode(returnAddress);
140 0 : MOZ_ASSERT(code_);
141 :
142 0 : codeRange_ = code_->lookupRange(returnAddress);
143 0 : MOZ_ASSERT(codeRange_->kind() == CodeRange::Function);
144 :
145 0 : callsite_ = code_->lookupCallSite(returnAddress);
146 0 : MOZ_ASSERT(callsite_);
147 :
148 0 : MOZ_ASSERT(!done());
149 : }
150 :
151 : const char*
152 0 : FrameIterator::filename() const
153 : {
154 0 : MOZ_ASSERT(!done());
155 0 : return code_->metadata().filename.get();
156 : }
157 :
158 : const char16_t*
159 0 : FrameIterator::displayURL() const
160 : {
161 0 : MOZ_ASSERT(!done());
162 0 : return code_->metadata().displayURL();
163 : }
164 :
165 : bool
166 0 : FrameIterator::mutedErrors() const
167 : {
168 0 : MOZ_ASSERT(!done());
169 0 : return code_->metadata().mutedErrors();
170 : }
171 :
172 : JSAtom*
173 0 : FrameIterator::functionDisplayAtom() const
174 : {
175 0 : MOZ_ASSERT(!done());
176 :
177 0 : JSContext* cx = activation_->cx();
178 0 : JSAtom* atom = instance()->getFuncAtom(cx, codeRange_->funcIndex());
179 0 : if (!atom) {
180 0 : cx->clearPendingException();
181 0 : return cx->names().empty;
182 : }
183 :
184 0 : return atom;
185 : }
186 :
187 : unsigned
188 0 : FrameIterator::lineOrBytecode() const
189 : {
190 0 : MOZ_ASSERT(!done());
191 0 : MOZ_ASSERT_IF(!callsite_, activation_->interrupted());
192 0 : return callsite_ ? callsite_->lineOrBytecode() : codeRange_->funcLineOrBytecode();
193 : }
194 :
195 : Instance*
196 0 : FrameIterator::instance() const
197 : {
198 0 : MOZ_ASSERT(!done());
199 0 : return fp_->tls->instance;
200 : }
201 :
202 : void**
203 0 : FrameIterator::unwoundAddressOfReturnAddress() const
204 : {
205 0 : MOZ_ASSERT(done());
206 0 : MOZ_ASSERT(unwind_ == Unwind::True);
207 0 : MOZ_ASSERT(unwoundAddressOfReturnAddress_);
208 0 : return unwoundAddressOfReturnAddress_;
209 : }
210 :
211 : bool
212 0 : FrameIterator::debugEnabled() const
213 : {
214 0 : MOZ_ASSERT(!done());
215 :
216 : // Only non-imported functions can have debug frames.
217 : //
218 : // Metadata::debugEnabled is only set if debugging is actually enabled (both
219 : // requested, and available via baseline compilation), and Tier::Debug code
220 : // will be available.
221 0 : return code_->metadata().debugEnabled &&
222 0 : codeRange_->funcIndex() >= code_->metadata(Tier::Debug).funcImports.length();
223 : }
224 :
225 : DebugFrame*
226 0 : FrameIterator::debugFrame() const
227 : {
228 0 : MOZ_ASSERT(!done());
229 0 : MOZ_ASSERT(debugEnabled());
230 0 : return reinterpret_cast<DebugFrame*>((uint8_t*)fp_ - DebugFrame::offsetOfFrame());
231 : }
232 :
233 : const CallSite*
234 0 : FrameIterator::debugTrapCallsite() const
235 : {
236 0 : MOZ_ASSERT(!done());
237 0 : MOZ_ASSERT(callsite_);
238 0 : MOZ_ASSERT(debugEnabled());
239 0 : MOZ_ASSERT(callsite_->kind() == CallSite::EnterFrame ||
240 : callsite_->kind() == CallSite::LeaveFrame ||
241 : callsite_->kind() == CallSite::Breakpoint);
242 0 : return callsite_;
243 : }
244 :
245 : /*****************************************************************************/
246 : // Prologue/epilogue code generation
247 :
248 : // These constants reflect statically-determined offsets in the
249 : // prologue/epilogue. The offsets are dynamically asserted during code
250 : // generation.
251 : #if defined(JS_CODEGEN_X64)
252 : static const unsigned PushedRetAddr = 0;
253 : static const unsigned PushedTLS = 2;
254 : static const unsigned PushedExitReason = 4;
255 : static const unsigned PushedFP = 5;
256 : static const unsigned SetFP = 8;
257 : static const unsigned PoppedFP = 4;
258 : static const unsigned PoppedExitReason = 2;
259 : #elif defined(JS_CODEGEN_X86)
260 : static const unsigned PushedRetAddr = 0;
261 : static const unsigned PushedTLS = 1;
262 : static const unsigned PushedExitReason = 3;
263 : static const unsigned PushedFP = 4;
264 : static const unsigned SetFP = 6;
265 : static const unsigned PoppedFP = 2;
266 : static const unsigned PoppedExitReason = 1;
267 : #elif defined(JS_CODEGEN_ARM)
268 : static const unsigned BeforePushRetAddr = 0;
269 : static const unsigned PushedRetAddr = 4;
270 : static const unsigned PushedTLS = 8;
271 : static const unsigned PushedExitReason = 16;
272 : static const unsigned PushedFP = 20;
273 : static const unsigned SetFP = 24;
274 : static const unsigned PoppedFP = 8;
275 : static const unsigned PoppedExitReason = 4;
276 : #elif defined(JS_CODEGEN_ARM64)
277 : static const unsigned BeforePushRetAddr = 0;
278 : static const unsigned PushedRetAddr = 0;
279 : static const unsigned PushedTLS = 1;
280 : static const unsigned PushedExitReason = 2;
281 : static const unsigned PushedFP = 0;
282 : static const unsigned SetFP = 0;
283 : static const unsigned PoppedFP = 0;
284 : static const unsigned PoppedExitReason = 0;
285 : #elif defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
286 : static const unsigned BeforePushRetAddr = 0;
287 : static const unsigned PushedRetAddr = 4;
288 : static const unsigned PushedTLS = 8;
289 : static const unsigned PushedExitReason = 12;
290 : static const unsigned PushedFP = 16;
291 : static const unsigned SetFP = 20;
292 : static const unsigned PoppedFP = 8;
293 : static const unsigned PoppedExitReason = 4;
294 : #elif defined(JS_CODEGEN_NONE)
295 : static const unsigned PushedRetAddr = 0;
296 : static const unsigned PushedTLS = 1;
297 : static const unsigned PushedExitReason = 2;
298 : static const unsigned PushedFP = 0;
299 : static const unsigned SetFP = 0;
300 : static const unsigned PoppedFP = 0;
301 : static const unsigned PoppedExitReason = 0;
302 : #else
303 : # error "Unknown architecture!"
304 : #endif
305 :
306 : static void
307 0 : PushRetAddr(MacroAssembler& masm, unsigned entry)
308 : {
309 : #if defined(JS_CODEGEN_ARM)
310 : MOZ_ASSERT(masm.currentOffset() - entry == BeforePushRetAddr);
311 : masm.push(lr);
312 : #elif defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
313 : MOZ_ASSERT(masm.currentOffset() - entry == BeforePushRetAddr);
314 : masm.push(ra);
315 : #else
316 : // The x86/x64 call instruction pushes the return address.
317 : #endif
318 0 : }
319 :
320 : static void
321 0 : LoadActivation(MacroAssembler& masm, Register dest)
322 : {
323 : // WasmCall pushes a WasmActivation and an inactive JitActivation. The
324 : // JitActivation only becomes active when calling into JS from wasm.
325 0 : masm.loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, addressOfContext)), dest);
326 0 : masm.loadPtr(Address(dest, 0), dest);
327 0 : masm.loadPtr(Address(dest, JSContext::offsetOfActivation()), dest);
328 0 : masm.loadPtr(Address(dest, Activation::offsetOfPrev()), dest);
329 0 : }
330 :
331 : static void
332 0 : GenerateCallablePrologue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
333 : uint32_t* entry)
334 : {
335 : // ProfilingFrameIterator needs to know the offsets of several key
336 : // instructions from entry. To save space, we make these offsets static
337 : // constants and assert that they match the actual codegen below. On ARM,
338 : // this requires AutoForbidPools to prevent a constant pool from being
339 : // randomly inserted between two instructions.
340 : {
341 : #if defined(JS_CODEGEN_ARM)
342 : AutoForbidPools afp(&masm, /* number of instructions in scope = */ 7);
343 : #endif
344 0 : *entry = masm.currentOffset();
345 :
346 0 : PushRetAddr(masm, *entry);
347 0 : MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
348 0 : masm.push(WasmTlsReg);
349 0 : MOZ_ASSERT_IF(!masm.oom(), PushedTLS == masm.currentOffset() - *entry);
350 0 : masm.push(Imm32(reason.encode()));
351 0 : MOZ_ASSERT_IF(!masm.oom(), PushedExitReason == masm.currentOffset() - *entry);
352 0 : masm.push(FramePointer);
353 0 : MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
354 0 : masm.moveStackPtrTo(FramePointer);
355 0 : MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
356 : }
357 :
358 0 : if (!reason.isNone()) {
359 0 : Register act = ABINonArgReg0;
360 :
361 : // Native callers expect the native ABI, which assume that non-saved
362 : // registers are preserved. Explicitly preserve the act register
363 : // in that case.
364 0 : if (reason.isNative() && !act.volatile_())
365 0 : masm.Push(act);
366 :
367 0 : LoadActivation(masm, act);
368 0 : masm.wasmAssertNonExitInvariants(act);
369 0 : masm.storePtr(FramePointer, Address(act, WasmActivation::offsetOfExitFP()));
370 :
371 0 : if (reason.isNative() && !act.volatile_())
372 0 : masm.Pop(act);
373 : }
374 :
375 0 : if (framePushed)
376 0 : masm.subFromStackPtr(Imm32(framePushed));
377 0 : }
378 :
379 : static void
380 0 : GenerateCallableEpilogue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
381 : uint32_t* ret)
382 : {
383 0 : if (framePushed)
384 0 : masm.addToStackPtr(Imm32(framePushed));
385 :
386 0 : if (!reason.isNone()) {
387 0 : Register act = ABINonArgReturnReg0;
388 :
389 : // See comment in GenerateCallablePrologue.
390 0 : if (reason.isNative() && !act.volatile_())
391 0 : masm.Push(act);
392 :
393 0 : LoadActivation(masm, act);
394 0 : masm.storePtr(ImmWord(0), Address(act, WasmActivation::offsetOfExitFP()));
395 :
396 : #ifdef DEBUG
397 : // Check the passed exitReason is the same as the one on entry.
398 : // Do it here rather than in the pop sequence to not perturbate the
399 : // static stack structure in debug vs optimized mode.
400 0 : Register scratch = act;
401 0 : size_t exitReasonSlot = 1 + (reason.isNative() && !scratch.volatile_() ? 1 : 0);
402 0 : masm.load32(Address(masm.getStackPointer(), exitReasonSlot * sizeof(void*)), scratch);
403 0 : Label ok;
404 0 : masm.branch32(Assembler::Condition::Equal, scratch, Imm32(reason.encode()), &ok);
405 0 : masm.breakpoint();
406 0 : masm.bind(&ok);
407 : #endif
408 :
409 0 : if (reason.isNative() && !act.volatile_())
410 0 : masm.Pop(act);
411 : }
412 :
413 : // Forbid pools for the same reason as described in GenerateCallablePrologue.
414 : #if defined(JS_CODEGEN_ARM)
415 : AutoForbidPools afp(&masm, /* number of instructions in scope = */ 7);
416 : #endif
417 :
418 : // There is an important ordering constraint here: fp must be repointed to
419 : // the caller's frame before any field of the frame currently pointed to by
420 : // fp is popped: asynchronous signal handlers (which use stack space
421 : // starting at sp) could otherwise clobber these fields while they are still
422 : // accessible via fp (fp fields are read during frame iteration which is
423 : // *also* done asynchronously).
424 :
425 0 : masm.pop(FramePointer);
426 0 : DebugOnly<uint32_t> poppedFP = masm.currentOffset();
427 :
428 : // Pop the exit reason to WasmTlsReg; it's going to be clobbered just
429 : // thereafter to store the real value of WasmTlsReg.
430 0 : masm.pop(WasmTlsReg);
431 0 : DebugOnly<uint32_t> poppedExitReason = masm.currentOffset();
432 :
433 0 : masm.pop(WasmTlsReg);
434 0 : *ret = masm.currentOffset();
435 0 : masm.ret();
436 :
437 0 : MOZ_ASSERT_IF(!masm.oom(), PoppedFP == *ret - poppedFP);
438 0 : MOZ_ASSERT_IF(!masm.oom(), PoppedExitReason == *ret - poppedExitReason);
439 0 : }
440 :
441 : void
442 0 : wasm::GenerateFunctionPrologue(MacroAssembler& masm, unsigned framePushed, const SigIdDesc& sigId,
443 : FuncOffsets* offsets)
444 : {
445 : // Flush pending pools so they do not get dumped between the 'begin' and
446 : // 'normalEntry' offsets since the difference must be less than UINT8_MAX
447 : // to be stored in CodeRange::funcBeginToNormalEntry_.
448 0 : masm.flushBuffer();
449 0 : masm.haltingAlign(CodeAlignment);
450 :
451 : // Generate table entry:
452 0 : offsets->begin = masm.currentOffset();
453 0 : BytecodeOffset trapOffset(0); // ignored by masm.wasmEmitTrapOutOfLineCode
454 0 : TrapDesc trap(trapOffset, Trap::IndirectCallBadSig, masm.framePushed());
455 0 : switch (sigId.kind()) {
456 : case SigIdDesc::Kind::Global: {
457 0 : Register scratch = WasmTableCallScratchReg;
458 0 : masm.loadWasmGlobalPtr(sigId.globalDataOffset(), scratch);
459 0 : masm.branchPtr(Assembler::Condition::NotEqual, WasmTableCallSigReg, scratch, trap);
460 0 : break;
461 : }
462 : case SigIdDesc::Kind::Immediate: {
463 0 : masm.branch32(Assembler::Condition::NotEqual, WasmTableCallSigReg, Imm32(sigId.immediate()), trap);
464 0 : break;
465 : }
466 : case SigIdDesc::Kind::None:
467 0 : break;
468 : }
469 :
470 : // The table entry might have generated a small constant pool in case of
471 : // immediate comparison.
472 0 : masm.flushBuffer();
473 :
474 : // Generate normal entry:
475 0 : masm.nopAlign(CodeAlignment);
476 0 : GenerateCallablePrologue(masm, framePushed, ExitReason::None(), &offsets->normalEntry);
477 :
478 0 : masm.setFramePushed(framePushed);
479 0 : }
480 :
481 : void
482 0 : wasm::GenerateFunctionEpilogue(MacroAssembler& masm, unsigned framePushed, FuncOffsets* offsets)
483 : {
484 0 : MOZ_ASSERT(masm.framePushed() == framePushed);
485 0 : GenerateCallableEpilogue(masm, framePushed, ExitReason::None(), &offsets->ret);
486 0 : masm.setFramePushed(0);
487 0 : }
488 :
489 : void
490 0 : wasm::GenerateExitPrologue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
491 : CallableOffsets* offsets)
492 : {
493 0 : masm.haltingAlign(CodeAlignment);
494 0 : GenerateCallablePrologue(masm, framePushed, reason, &offsets->begin);
495 0 : masm.setFramePushed(framePushed);
496 0 : }
497 :
498 : void
499 0 : wasm::GenerateExitEpilogue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
500 : CallableOffsets* offsets)
501 : {
502 : // Inverse of GenerateExitPrologue:
503 0 : MOZ_ASSERT(masm.framePushed() == framePushed);
504 0 : GenerateCallableEpilogue(masm, framePushed, reason, &offsets->ret);
505 0 : masm.setFramePushed(0);
506 0 : }
507 :
508 : /*****************************************************************************/
509 : // ProfilingFrameIterator
510 :
511 0 : ProfilingFrameIterator::ProfilingFrameIterator()
512 : : activation_(nullptr),
513 : code_(nullptr),
514 : codeRange_(nullptr),
515 : callerFP_(nullptr),
516 : callerPC_(nullptr),
517 : stackAddress_(nullptr),
518 0 : exitReason_(ExitReason::Fixed::None)
519 : {
520 0 : MOZ_ASSERT(done());
521 0 : }
522 :
523 0 : ProfilingFrameIterator::ProfilingFrameIterator(const WasmActivation& activation)
524 : : activation_(&activation),
525 : code_(nullptr),
526 : codeRange_(nullptr),
527 : callerFP_(nullptr),
528 : callerPC_(nullptr),
529 : stackAddress_(nullptr),
530 0 : exitReason_(ExitReason::Fixed::None)
531 : {
532 0 : initFromExitFP();
533 0 : }
534 :
535 : static inline void
536 0 : AssertMatchesCallSite(const WasmActivation& activation, void* callerPC, Frame* callerFP)
537 : {
538 : #ifdef DEBUG
539 0 : const Code* code = activation.compartment()->wasm.lookupCode(callerPC);
540 0 : MOZ_ASSERT(code);
541 :
542 0 : const CodeRange* callerCodeRange = code->lookupRange(callerPC);
543 0 : MOZ_ASSERT(callerCodeRange);
544 :
545 0 : if (callerCodeRange->kind() == CodeRange::Entry) {
546 0 : MOZ_ASSERT(callerFP == nullptr);
547 0 : return;
548 : }
549 :
550 0 : const CallSite* callsite = code->lookupCallSite(callerPC);
551 0 : MOZ_ASSERT(callsite);
552 : #endif
553 : }
554 :
555 : void
556 0 : ProfilingFrameIterator::initFromExitFP()
557 : {
558 0 : Frame* fp = activation_->exitFP();
559 0 : void* pc = fp->returnAddress;
560 :
561 : // The iterator inserts a pretend innermost frame for ExitReasons.
562 : // This allows the variety of exit reasons to show up in the callstack.
563 0 : exitReason_ = ExitReason::Decode(fp->encodedExitReason);
564 :
565 0 : stackAddress_ = fp;
566 :
567 0 : code_ = activation_->compartment()->wasm.lookupCode(pc);
568 0 : MOZ_ASSERT(code_);
569 :
570 0 : codeRange_ = code_->lookupRange(pc);
571 0 : MOZ_ASSERT(codeRange_);
572 :
573 : // Since we don't have the pc for fp, start unwinding at the caller of fp.
574 : // This means that the innermost frame is skipped. This is fine because:
575 : // - for import exit calls, the innermost frame is a thunk, so the first
576 : // frame that shows up is the function calling the import;
577 : // - for Math and other builtin calls as well as interrupts, we note the absence
578 : // of an exit reason and inject a fake "builtin" frame; and
579 : // - for async interrupts, we just accept that we'll lose the innermost frame.
580 0 : switch (codeRange_->kind()) {
581 : case CodeRange::Entry:
582 0 : callerPC_ = nullptr;
583 0 : callerFP_ = nullptr;
584 0 : break;
585 : case CodeRange::Function:
586 0 : fp = fp->callerFP;
587 0 : callerPC_ = fp->returnAddress;
588 0 : callerFP_ = fp->callerFP;
589 0 : AssertMatchesCallSite(*activation_, callerPC_, callerFP_);
590 0 : break;
591 : case CodeRange::ImportJitExit:
592 : case CodeRange::ImportInterpExit:
593 : case CodeRange::BuiltinThunk:
594 : case CodeRange::TrapExit:
595 : case CodeRange::DebugTrap:
596 : case CodeRange::Inline:
597 : case CodeRange::Throw:
598 : case CodeRange::Interrupt:
599 : case CodeRange::FarJumpIsland:
600 0 : MOZ_CRASH("Unexpected CodeRange kind");
601 : }
602 :
603 0 : MOZ_ASSERT(!done());
604 0 : }
605 :
606 : bool
607 0 : js::wasm::StartUnwinding(const WasmActivation& activation, const RegisterState& registers,
608 : UnwindState* unwindState, bool* unwoundCaller)
609 : {
610 : // Shorthands.
611 0 : uint8_t* const pc = (uint8_t*) registers.pc;
612 0 : Frame* const fp = (Frame*) registers.fp;
613 0 : void** const sp = (void**) registers.sp;
614 :
615 : // Get the CodeRange describing pc and the base address to which the
616 : // CodeRange is relative. If the pc is not in a wasm module or a builtin
617 : // thunk, then execution must be entering from or leaving to the C++ caller
618 : // that pushed the WasmActivation.
619 : const CodeRange* codeRange;
620 : uint8_t* codeBase;
621 0 : const Code* code = activation.compartment()->wasm.lookupCode(pc);
622 0 : if (code) {
623 : const CodeSegment* codeSegment;
624 0 : codeRange = code->lookupRange(pc, &codeSegment);
625 0 : codeBase = codeSegment->base();
626 0 : } else if (!LookupBuiltinThunk(pc, &codeRange, &codeBase)) {
627 0 : return false;
628 : }
629 :
630 : // When the pc is inside the prologue/epilogue, the innermost call's Frame
631 : // is not complete and thus fp points to the second-to-innermost call's
632 : // Frame. Since fp can only tell you about its caller, naively unwinding
633 : // while pc is in the prologue/epilogue would skip the second-to-innermost
634 : // call. To avoid this problem, we use the static structure of the code in
635 : // the prologue and epilogue to do the Right Thing.
636 0 : uint32_t offsetInCode = pc - codeBase;
637 0 : MOZ_ASSERT(offsetInCode >= codeRange->begin());
638 0 : MOZ_ASSERT(offsetInCode < codeRange->end());
639 :
640 : // Compute the offset of the pc from the (normal) entry of the code range.
641 : // The stack state of the pc for the entire table-entry is equivalent to
642 : // that of the first pc of the normal-entry. Thus, we can simplify the below
643 : // case analysis by redirecting all pc-in-table-entry cases to the
644 : // pc-at-normal-entry case.
645 : uint32_t offsetFromEntry;
646 0 : if (codeRange->isFunction()) {
647 0 : if (offsetInCode < codeRange->funcNormalEntry())
648 0 : offsetFromEntry = 0;
649 : else
650 0 : offsetFromEntry = offsetInCode - codeRange->funcNormalEntry();
651 : } else {
652 0 : offsetFromEntry = offsetInCode - codeRange->begin();
653 : }
654 :
655 : // Most cases end up unwinding to the caller state; not unwinding is the
656 : // exception here.
657 0 : *unwoundCaller = true;
658 :
659 0 : Frame* fixedFP = nullptr;
660 0 : void* fixedPC = nullptr;
661 0 : switch (codeRange->kind()) {
662 : case CodeRange::Function:
663 : case CodeRange::FarJumpIsland:
664 : case CodeRange::ImportJitExit:
665 : case CodeRange::ImportInterpExit:
666 : case CodeRange::BuiltinThunk:
667 : case CodeRange::TrapExit:
668 : #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
669 : if (offsetFromEntry == BeforePushRetAddr || codeRange->isThunk()) {
670 : // The return address is still in lr and fp holds the caller's fp.
671 : fixedPC = (uint8_t*) registers.lr;
672 : fixedFP = fp;
673 : AssertMatchesCallSite(activation, fixedPC, fixedFP);
674 : } else
675 : #endif
676 0 : if (offsetFromEntry == PushedRetAddr || codeRange->isThunk()) {
677 : // The return address has been pushed on the stack but fp still
678 : // points to the caller's fp.
679 0 : fixedPC = sp[0];
680 0 : fixedFP = fp;
681 0 : AssertMatchesCallSite(activation, fixedPC, fixedFP);
682 0 : } else if (offsetFromEntry >= PushedTLS && offsetFromEntry < PushedExitReason) {
683 : // The return address and caller's TLS have been pushed on the
684 : // stack; fp is still the caller's fp.
685 0 : fixedPC = sp[1];
686 0 : fixedFP = fp;
687 0 : AssertMatchesCallSite(activation, fixedPC, fixedFP);
688 0 : } else if (offsetFromEntry == PushedExitReason) {
689 : // The return address, caller's TLS and exit reason have been
690 : // pushed on the stack; fp is still the caller's fp.
691 0 : fixedPC = sp[2];
692 0 : fixedFP = fp;
693 0 : AssertMatchesCallSite(activation, fixedPC, fixedFP);
694 0 : } else if (offsetFromEntry == PushedFP) {
695 : // The full Frame has been pushed; fp is still the caller's fp.
696 0 : MOZ_ASSERT(fp == reinterpret_cast<Frame*>(sp)->callerFP);
697 0 : fixedPC = reinterpret_cast<Frame*>(sp)->returnAddress;
698 0 : fixedFP = fp;
699 0 : AssertMatchesCallSite(activation, fixedPC, fixedFP);
700 0 : } else if (offsetInCode == codeRange->ret() - PoppedFP) {
701 : // The fixedFP field of the Frame has been popped into fp, but the
702 : // exit reason hasn't been popped yet.
703 0 : fixedPC = sp[2];
704 0 : fixedFP = fp;
705 0 : AssertMatchesCallSite(activation, fixedPC, fixedFP);
706 0 : } else if (offsetInCode == codeRange->ret() - PoppedExitReason) {
707 : // The fixedFP field of the Frame has been popped into fp, and the
708 : // exit reason has been popped.
709 0 : fixedPC = sp[1];
710 0 : fixedFP = fp;
711 0 : AssertMatchesCallSite(activation, fixedPC, fixedFP);
712 0 : } else if (offsetInCode == codeRange->ret()) {
713 : // Both the TLS and fixedFP fields have been popped and fp now
714 : // points to the caller's frame.
715 0 : fixedPC = sp[0];
716 0 : fixedFP = fp;
717 0 : AssertMatchesCallSite(activation, fixedPC, fixedFP);
718 : } else {
719 : // Not in the prologue/epilogue.
720 0 : fixedPC = pc;
721 0 : fixedFP = fp;
722 0 : *unwoundCaller = false;
723 0 : AssertMatchesCallSite(activation, fp->returnAddress, fp->callerFP);
724 0 : break;
725 : }
726 0 : break;
727 : case CodeRange::DebugTrap:
728 : case CodeRange::Inline:
729 : // Inline code stubs execute after the prologue/epilogue have completed
730 : // so pc/fp contains the right values here.
731 0 : fixedPC = pc;
732 0 : fixedFP = fp;
733 0 : *unwoundCaller = false;
734 0 : AssertMatchesCallSite(activation, fp->returnAddress, fp->callerFP);
735 0 : break;
736 : case CodeRange::Entry:
737 : // The entry trampoline is the final frame in an WasmActivation. The entry
738 : // trampoline also doesn't GeneratePrologue/Epilogue so we can't use
739 : // the general unwinding logic above.
740 0 : break;
741 : case CodeRange::Throw:
742 : // The throw stub executes a small number of instructions before popping
743 : // the entire activation. To simplify testing, we simply pretend throw
744 : // stubs have already popped the entire stack.
745 0 : return false;
746 : case CodeRange::Interrupt:
747 : // When the PC is in the async interrupt stub, the fp may be garbage and
748 : // so we cannot blindly unwind it. Since the percent of time spent in
749 : // the interrupt stub is extremely small, just ignore the stack.
750 0 : return false;
751 : }
752 :
753 0 : unwindState->code = code;
754 0 : unwindState->codeRange = codeRange;
755 0 : unwindState->fp = fixedFP;
756 0 : unwindState->pc = fixedPC;
757 0 : return true;
758 : }
759 :
760 0 : ProfilingFrameIterator::ProfilingFrameIterator(const WasmActivation& activation,
761 0 : const RegisterState& state)
762 : : activation_(&activation),
763 : code_(nullptr),
764 : codeRange_(nullptr),
765 : callerFP_(nullptr),
766 : callerPC_(nullptr),
767 : stackAddress_(nullptr),
768 0 : exitReason_(ExitReason::Fixed::None)
769 : {
770 : // In the case of ImportJitExit, the fp register may be temporarily
771 : // clobbered on return from Ion so always use activation.fp when it is set.
772 0 : if (activation.exitFP()) {
773 0 : initFromExitFP();
774 0 : return;
775 : }
776 :
777 : bool unwoundCaller;
778 0 : UnwindState unwindState;
779 0 : if (!StartUnwinding(*activation_, state, &unwindState, &unwoundCaller)) {
780 0 : MOZ_ASSERT(done());
781 0 : return;
782 : }
783 :
784 0 : if (unwoundCaller) {
785 0 : callerFP_ = unwindState.fp;
786 0 : callerPC_ = unwindState.pc;
787 : } else {
788 0 : callerFP_ = unwindState.fp->callerFP;
789 0 : callerPC_ = unwindState.fp->returnAddress;
790 : }
791 :
792 0 : code_ = unwindState.code;
793 0 : codeRange_ = unwindState.codeRange;
794 0 : stackAddress_ = state.sp;
795 0 : MOZ_ASSERT(!done());
796 : }
797 :
798 : void
799 0 : ProfilingFrameIterator::operator++()
800 : {
801 0 : if (!exitReason_.isNone()) {
802 0 : MOZ_ASSERT(codeRange_);
803 0 : exitReason_ = ExitReason::None();
804 0 : MOZ_ASSERT(!done());
805 0 : return;
806 : }
807 :
808 0 : if (!callerPC_) {
809 0 : MOZ_ASSERT(!callerFP_);
810 0 : codeRange_ = nullptr;
811 0 : MOZ_ASSERT(done());
812 0 : return;
813 : }
814 :
815 0 : code_ = activation_->compartment()->wasm.lookupCode(callerPC_);
816 0 : MOZ_ASSERT(code_);
817 :
818 0 : codeRange_ = code_->lookupRange(callerPC_);
819 0 : MOZ_ASSERT(codeRange_);
820 :
821 0 : switch (codeRange_->kind()) {
822 : case CodeRange::Entry:
823 0 : MOZ_ASSERT(callerFP_ == nullptr);
824 0 : callerPC_ = nullptr;
825 0 : break;
826 : case CodeRange::Function:
827 : case CodeRange::ImportJitExit:
828 : case CodeRange::ImportInterpExit:
829 : case CodeRange::BuiltinThunk:
830 : case CodeRange::TrapExit:
831 : case CodeRange::DebugTrap:
832 : case CodeRange::Inline:
833 : case CodeRange::FarJumpIsland:
834 0 : stackAddress_ = callerFP_;
835 0 : callerPC_ = callerFP_->returnAddress;
836 0 : AssertMatchesCallSite(*activation_, callerPC_, callerFP_->callerFP);
837 0 : callerFP_ = callerFP_->callerFP;
838 0 : break;
839 : case CodeRange::Interrupt:
840 : case CodeRange::Throw:
841 0 : MOZ_CRASH("code range doesn't have frame");
842 : }
843 :
844 0 : MOZ_ASSERT(!done());
845 : }
846 :
847 : static const char*
848 0 : ThunkedNativeToDescription(SymbolicAddress func)
849 : {
850 0 : MOZ_ASSERT(NeedsBuiltinThunk(func));
851 0 : switch (func) {
852 : case SymbolicAddress::HandleExecutionInterrupt:
853 : case SymbolicAddress::HandleDebugTrap:
854 : case SymbolicAddress::HandleThrow:
855 : case SymbolicAddress::ReportTrap:
856 : case SymbolicAddress::ReportOutOfBounds:
857 : case SymbolicAddress::ReportUnalignedAccess:
858 : case SymbolicAddress::CallImport_Void:
859 : case SymbolicAddress::CallImport_I32:
860 : case SymbolicAddress::CallImport_I64:
861 : case SymbolicAddress::CallImport_F64:
862 : case SymbolicAddress::CoerceInPlace_ToInt32:
863 : case SymbolicAddress::CoerceInPlace_ToNumber:
864 0 : MOZ_ASSERT(!NeedsBuiltinThunk(func), "not in sync with NeedsBuiltinThunk");
865 0 : break;
866 : case SymbolicAddress::ToInt32:
867 0 : return "call to asm.js native ToInt32 coercion (in wasm)";
868 : case SymbolicAddress::DivI64:
869 0 : return "call to native i64.div_s (in wasm)";
870 : case SymbolicAddress::UDivI64:
871 0 : return "call to native i64.div_u (in wasm)";
872 : case SymbolicAddress::ModI64:
873 0 : return "call to native i64.rem_s (in wasm)";
874 : case SymbolicAddress::UModI64:
875 0 : return "call to native i64.rem_u (in wasm)";
876 : case SymbolicAddress::TruncateDoubleToUint64:
877 0 : return "call to native i64.trunc_u/f64 (in wasm)";
878 : case SymbolicAddress::TruncateDoubleToInt64:
879 0 : return "call to native i64.trunc_s/f64 (in wasm)";
880 : case SymbolicAddress::Uint64ToDouble:
881 0 : return "call to native f64.convert_u/i64 (in wasm)";
882 : case SymbolicAddress::Uint64ToFloat32:
883 0 : return "call to native f32.convert_u/i64 (in wasm)";
884 : case SymbolicAddress::Int64ToDouble:
885 0 : return "call to native f64.convert_s/i64 (in wasm)";
886 : case SymbolicAddress::Int64ToFloat32:
887 0 : return "call to native f32.convert_s/i64 (in wasm)";
888 : #if defined(JS_CODEGEN_ARM)
889 : case SymbolicAddress::aeabi_idivmod:
890 : return "call to native i32.div_s (in wasm)";
891 : case SymbolicAddress::aeabi_uidivmod:
892 : return "call to native i32.div_u (in wasm)";
893 : case SymbolicAddress::AtomicCmpXchg:
894 : return "call to native atomic compare exchange (in wasm)";
895 : case SymbolicAddress::AtomicXchg:
896 : return "call to native atomic exchange (in wasm)";
897 : case SymbolicAddress::AtomicFetchAdd:
898 : return "call to native atomic fetch add (in wasm)";
899 : case SymbolicAddress::AtomicFetchSub:
900 : return "call to native atomic fetch sub (in wasm)";
901 : case SymbolicAddress::AtomicFetchAnd:
902 : return "call to native atomic fetch and (in wasm)";
903 : case SymbolicAddress::AtomicFetchOr:
904 : return "call to native atomic fetch or (in wasm)";
905 : case SymbolicAddress::AtomicFetchXor:
906 : return "call to native atomic fetch xor (in wasm)";
907 : #endif
908 : case SymbolicAddress::ModD:
909 0 : return "call to asm.js native f64 % (mod)";
910 : case SymbolicAddress::SinD:
911 0 : return "call to asm.js native f64 Math.sin";
912 : case SymbolicAddress::CosD:
913 0 : return "call to asm.js native f64 Math.cos";
914 : case SymbolicAddress::TanD:
915 0 : return "call to asm.js native f64 Math.tan";
916 : case SymbolicAddress::ASinD:
917 0 : return "call to asm.js native f64 Math.asin";
918 : case SymbolicAddress::ACosD:
919 0 : return "call to asm.js native f64 Math.acos";
920 : case SymbolicAddress::ATanD:
921 0 : return "call to asm.js native f64 Math.atan";
922 : case SymbolicAddress::CeilD:
923 0 : return "call to native f64.ceil (in wasm)";
924 : case SymbolicAddress::CeilF:
925 0 : return "call to native f32.ceil (in wasm)";
926 : case SymbolicAddress::FloorD:
927 0 : return "call to native f64.floor (in wasm)";
928 : case SymbolicAddress::FloorF:
929 0 : return "call to native f32.floor (in wasm)";
930 : case SymbolicAddress::TruncD:
931 0 : return "call to native f64.trunc (in wasm)";
932 : case SymbolicAddress::TruncF:
933 0 : return "call to native f32.trunc (in wasm)";
934 : case SymbolicAddress::NearbyIntD:
935 0 : return "call to native f64.nearest (in wasm)";
936 : case SymbolicAddress::NearbyIntF:
937 0 : return "call to native f32.nearest (in wasm)";
938 : case SymbolicAddress::ExpD:
939 0 : return "call to asm.js native f64 Math.exp";
940 : case SymbolicAddress::LogD:
941 0 : return "call to asm.js native f64 Math.log";
942 : case SymbolicAddress::PowD:
943 0 : return "call to asm.js native f64 Math.pow";
944 : case SymbolicAddress::ATan2D:
945 0 : return "call to asm.js native f64 Math.atan2";
946 : case SymbolicAddress::GrowMemory:
947 0 : return "call to native grow_memory (in wasm)";
948 : case SymbolicAddress::CurrentMemory:
949 0 : return "call to native current_memory (in wasm)";
950 : case SymbolicAddress::Limit:
951 0 : break;
952 : }
953 0 : return "?";
954 : }
955 :
956 : const char*
957 0 : ProfilingFrameIterator::label() const
958 : {
959 0 : MOZ_ASSERT(!done());
960 :
961 : // Use the same string for both time inside and under so that the two
962 : // entries will be coalesced by the profiler.
963 : //
964 : // NB: these labels are parsed for location by
965 : // devtools/client/performance/modules/logic/frame-utils.js
966 : static const char* importJitDescription = "fast FFI trampoline (in wasm)";
967 : static const char* importInterpDescription = "slow FFI trampoline (in wasm)";
968 : static const char* builtinNativeDescription = "fast FFI trampoline to native (in wasm)";
969 : static const char* trapDescription = "trap handling (in wasm)";
970 : static const char* debugTrapDescription = "debug trap handling (in wasm)";
971 :
972 0 : if (!exitReason_.isFixed())
973 0 : return ThunkedNativeToDescription(exitReason_.symbolic());
974 :
975 0 : switch (exitReason_.fixed()) {
976 : case ExitReason::Fixed::None:
977 0 : break;
978 : case ExitReason::Fixed::ImportJit:
979 0 : return importJitDescription;
980 : case ExitReason::Fixed::ImportInterp:
981 0 : return importInterpDescription;
982 : case ExitReason::Fixed::BuiltinNative:
983 0 : return builtinNativeDescription;
984 : case ExitReason::Fixed::Trap:
985 0 : return trapDescription;
986 : case ExitReason::Fixed::DebugTrap:
987 0 : return debugTrapDescription;
988 : }
989 :
990 0 : switch (codeRange_->kind()) {
991 0 : case CodeRange::Function: return code_->profilingLabel(codeRange_->funcIndex());
992 0 : case CodeRange::Entry: return "entry trampoline (in wasm)";
993 0 : case CodeRange::ImportJitExit: return importJitDescription;
994 0 : case CodeRange::BuiltinThunk: return builtinNativeDescription;
995 0 : case CodeRange::ImportInterpExit: return importInterpDescription;
996 0 : case CodeRange::TrapExit: return trapDescription;
997 0 : case CodeRange::DebugTrap: return debugTrapDescription;
998 0 : case CodeRange::Inline: return "inline stub (in wasm)";
999 0 : case CodeRange::FarJumpIsland: return "interstitial (in wasm)";
1000 : case CodeRange::Throw: MOZ_FALLTHROUGH;
1001 0 : case CodeRange::Interrupt: MOZ_CRASH("does not have a frame");
1002 : }
1003 :
1004 0 : MOZ_CRASH("bad code range kind");
1005 : }
1006 :
1007 : void
1008 22 : wasm::TraceActivations(JSContext* cx, const CooperatingContext& target, JSTracer* trc)
1009 : {
1010 128 : for (ActivationIterator iter(cx, target); !iter.done(); ++iter) {
1011 106 : if (iter.activation()->isWasm()) {
1012 0 : for (FrameIterator fi(iter.activation()->asWasm()); !fi.done(); ++fi)
1013 0 : fi.instance()->trace(trc);
1014 : }
1015 : }
1016 22 : }
1017 :
1018 : Instance*
1019 0 : wasm::LookupFaultingInstance(WasmActivation* activation, void* pc, void* fp)
1020 : {
1021 : // Assume bug-caused faults can be raised at any PC and apply the logic of
1022 : // ProfilingFrameIterator to reject any pc outside the (post-prologue,
1023 : // pre-epilogue) body of a wasm function. This is exhaustively tested by the
1024 : // simulators which call this function at every load/store before even
1025 : // knowing whether there is a fault.
1026 :
1027 0 : const Code* code = activation->compartment()->wasm.lookupCode(pc);
1028 0 : if (!code)
1029 0 : return nullptr;
1030 :
1031 : const CodeSegment* codeSegment;
1032 0 : const CodeRange* codeRange = code->lookupRange(pc, &codeSegment);
1033 0 : if (!codeRange || !codeRange->isFunction())
1034 0 : return nullptr;
1035 :
1036 0 : size_t offsetInModule = ((uint8_t*)pc) - codeSegment->base();
1037 0 : if (offsetInModule < codeRange->funcNormalEntry() + SetFP)
1038 0 : return nullptr;
1039 0 : if (offsetInModule >= codeRange->ret() - PoppedFP)
1040 0 : return nullptr;
1041 :
1042 0 : Instance* instance = reinterpret_cast<Frame*>(fp)->tls->instance;
1043 0 : MOZ_RELEASE_ASSERT(&instance->code() == code);
1044 0 : return instance;
1045 : }
1046 :
1047 : WasmActivation*
1048 0 : wasm::ActivationIfInnermost(JSContext* cx)
1049 : {
1050 : // WasmCall pushes both an outer WasmActivation and an inner JitActivation
1051 : // that only becomes active when calling JIT code.
1052 0 : Activation* act = cx->activation();
1053 0 : while (act && act->isJit() && !act->asJit()->isActive())
1054 0 : act = act->prev();
1055 0 : if (!act || !act->isWasm())
1056 0 : return nullptr;
1057 0 : return act->asWasm();
1058 : }
1059 :
1060 : bool
1061 0 : wasm::InCompiledCode(void* pc)
1062 : {
1063 0 : JSContext* cx = TlsContext.get();
1064 0 : if (!cx)
1065 0 : return false;
1066 :
1067 0 : MOZ_RELEASE_ASSERT(!cx->handlingSegFault);
1068 :
1069 0 : if (cx->compartment()->wasm.lookupCode(pc))
1070 0 : return true;
1071 :
1072 : const CodeRange* codeRange;
1073 : uint8_t* codeBase;
1074 0 : return LookupBuiltinThunk(pc, &codeRange, &codeBase);
1075 : }
|