1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2  * vim: set ts=8 sts=2 et sw=2 tw=80:
3  *
4  * Copyright 2014 Mozilla Foundation
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *     http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  */
18 
19 #include "wasm/WasmFrameIter.h"
20 
21 #include "jit/JitFrames.h"
22 #include "vm/JitActivation.h"  // js::jit::JitActivation
23 #include "vm/JSContext.h"
24 #include "wasm/WasmInstance.h"
25 #include "wasm/WasmStubs.h"
26 #include "wasm/WasmTlsData.h"
27 
28 #include "jit/MacroAssembler-inl.h"
29 
30 using namespace js;
31 using namespace js::jit;
32 using namespace js::wasm;
33 
34 using mozilla::DebugOnly;
35 using mozilla::Maybe;
36 
37 /*****************************************************************************/
38 // WasmFrameIter implementation
39 
WasmFrameIter(JitActivation * activation,wasm::Frame * fp)40 WasmFrameIter::WasmFrameIter(JitActivation* activation, wasm::Frame* fp)
41     : activation_(activation),
42       code_(nullptr),
43       codeRange_(nullptr),
44       lineOrBytecode_(0),
45       fp_(fp ? fp : activation->wasmExitFP()),
46       tls_(nullptr),
47       unwoundIonCallerFP_(nullptr),
48       unwoundIonFrameType_(jit::FrameType(-1)),
49       unwind_(Unwind::False),
50       unwoundAddressOfReturnAddress_(nullptr),
51       resumePCinCurrentFrame_(nullptr) {
52   MOZ_ASSERT(fp_);
53   tls_ = GetNearestEffectiveTls(fp_);
54 
55   // When the stack is captured during a trap (viz., to create the .stack
56   // for an Error object), use the pc/bytecode information captured by the
57   // signal handler in the runtime. Take care not to use this trap unwind
58   // state for wasm frames in the middle of a JitActivation, i.e., wasm frames
59   // that called into JIT frames before the trap.
60 
61   if (activation->isWasmTrapping() && fp_ == activation->wasmExitFP()) {
62     const TrapData& trapData = activation->wasmTrapData();
63     void* unwoundPC = trapData.unwoundPC;
64 
65     code_ = &tls_->instance->code();
66     MOZ_ASSERT(code_ == LookupCode(unwoundPC));
67 
68     codeRange_ = code_->lookupFuncRange(unwoundPC);
69     MOZ_ASSERT(codeRange_);
70 
71     lineOrBytecode_ = trapData.bytecodeOffset;
72 
73     MOZ_ASSERT(!done());
74     return;
75   }
76 
77   // Otherwise, execution exits wasm code via an exit stub which sets exitFP
78   // to the exit stub's frame. Thus, in this case, we want to start iteration
79   // at the caller of the exit frame, whose Code, CodeRange and CallSite are
80   // indicated by the returnAddress of the exit stub's frame. If the caller
81   // was Ion, we can just skip the wasm frames.
82 
83   popFrame();
84   MOZ_ASSERT(!done() || unwoundIonCallerFP_);
85 }
86 
done() const87 bool WasmFrameIter::done() const {
88   MOZ_ASSERT(!!fp_ == !!code_);
89   MOZ_ASSERT(!!fp_ == !!codeRange_);
90   return !fp_;
91 }
92 
operator ++()93 void WasmFrameIter::operator++() {
94   MOZ_ASSERT(!done());
95 
96   // When the iterator is set to unwind, each time the iterator pops a frame,
97   // the JitActivation is updated so that the just-popped frame is no longer
98   // visible. This is necessary since Debugger::onLeaveFrame is called before
99   // popping each frame and, once onLeaveFrame is called for a given frame,
100   // that frame must not be visible to subsequent stack iteration (or it
101   // could be added as a "new" frame just as it becomes garbage).  When the
102   // frame is trapping, then exitFP is included in the callstack (otherwise,
103   // it is skipped, as explained above). So to unwind the innermost frame, we
104   // just clear the trapping state.
105 
106   if (unwind_ == Unwind::True) {
107     if (activation_->isWasmTrapping()) {
108       activation_->finishWasmTrap();
109     }
110     activation_->setWasmExitFP(fp_);
111   }
112 
113   popFrame();
114 }
115 
popFrame()116 void WasmFrameIter::popFrame() {
117   if (fp_->callerIsExitOrJitEntryFP()) {
118     // We run into a frame pointer which has the low bit set,
119     // indicating this is a direct call from the jit into the wasm
120     // function's body. The call stack resembles this at this point:
121     //
122     // |---------------------|
123     // |      JIT FRAME      |
124     // | JIT FAKE EXIT FRAME | <-- tagged fp_->callerFP_
125     // |      WASM FRAME     | <-- fp_
126     // |---------------------|
127     //
128     // fp_->callerFP_ points to the fake exit frame set up by the jit caller,
129     // and the return-address-to-fp is in JIT code, thus doesn't belong to any
130     // wasm instance's code (in particular, there's no associated CodeRange).
131     // Mark the frame as such and untag FP.
132     MOZ_ASSERT(!LookupCode(fp_->returnAddress()));
133 
134     unwoundIonCallerFP_ = fp_->jitEntryCaller();
135     unwoundIonFrameType_ = FrameType::Exit;
136 
137     if (unwind_ == Unwind::True) {
138       activation_->setJSExitFP(unwoundIonCallerFP());
139       unwoundAddressOfReturnAddress_ = fp_->addressOfReturnAddress();
140     }
141 
142     fp_ = nullptr;
143     code_ = nullptr;
144     codeRange_ = nullptr;
145 
146     MOZ_ASSERT(done());
147     return;
148   }
149 
150   Frame* prevFP = fp_;
151   fp_ = fp_->wasmCaller();
152   resumePCinCurrentFrame_ = prevFP->returnAddress();
153 
154   if (!fp_) {
155     code_ = nullptr;
156     codeRange_ = nullptr;
157 
158     if (unwind_ == Unwind::True) {
159       // We're exiting via the interpreter entry; we can safely reset
160       // exitFP.
161       activation_->setWasmExitFP(nullptr);
162       unwoundAddressOfReturnAddress_ = prevFP->addressOfReturnAddress();
163     }
164 
165     MOZ_ASSERT(done());
166     return;
167   }
168 
169   void* returnAddress = prevFP->returnAddress();
170   code_ = LookupCode(returnAddress, &codeRange_);
171   MOZ_ASSERT(codeRange_);
172 
173   if (codeRange_->isJitEntry()) {
174     // This wasm function has been called through the generic JIT entry by
175     // a JIT caller, so the call stack resembles this:
176     //
177     // |---------------------|
178     // |      JIT FRAME      |
179     // |  JSJIT TO WASM EXIT | <-- fp_
180     // |    WASM JIT ENTRY   | <-- prevFP (already unwound)
181     // |      WASM FRAME     | (already unwound)
182     // |---------------------|
183     //
184     // The next value of FP is just a regular jit frame used as a marker to
185     // know that we should transition to a JSJit frame iterator.
186     unwoundIonCallerFP_ = reinterpret_cast<uint8_t*>(fp_);
187     unwoundIonFrameType_ = FrameType::JSJitToWasm;
188 
189     fp_ = nullptr;
190     code_ = nullptr;
191     codeRange_ = nullptr;
192 
193     if (unwind_ == Unwind::True) {
194       activation_->setJSExitFP(unwoundIonCallerFP());
195       unwoundAddressOfReturnAddress_ = prevFP->addressOfReturnAddress();
196     }
197 
198     MOZ_ASSERT(done());
199     return;
200   }
201 
202   MOZ_ASSERT(codeRange_->kind() == CodeRange::Function);
203 
204   const CallSite* callsite = code_->lookupCallSite(returnAddress);
205   MOZ_ASSERT(callsite);
206 
207   if (callsite->mightBeCrossInstance()) {
208     tls_ = ExtractCallerTlsFromFrameWithTls(prevFP);
209   }
210 
211   MOZ_ASSERT(code_ == &tls()->instance->code());
212   lineOrBytecode_ = callsite->lineOrBytecode();
213 
214   MOZ_ASSERT(!done());
215 }
216 
filename() const217 const char* WasmFrameIter::filename() const {
218   MOZ_ASSERT(!done());
219   return code_->metadata().filename.get();
220 }
221 
displayURL() const222 const char16_t* WasmFrameIter::displayURL() const {
223   MOZ_ASSERT(!done());
224   return code_->metadata().displayURL();
225 }
226 
mutedErrors() const227 bool WasmFrameIter::mutedErrors() const {
228   MOZ_ASSERT(!done());
229   return code_->metadata().mutedErrors();
230 }
231 
functionDisplayAtom() const232 JSAtom* WasmFrameIter::functionDisplayAtom() const {
233   MOZ_ASSERT(!done());
234 
235   JSContext* cx = activation_->cx();
236   JSAtom* atom = instance()->getFuncDisplayAtom(cx, codeRange_->funcIndex());
237   if (!atom) {
238     cx->clearPendingException();
239     return cx->names().empty;
240   }
241 
242   return atom;
243 }
244 
lineOrBytecode() const245 unsigned WasmFrameIter::lineOrBytecode() const {
246   MOZ_ASSERT(!done());
247   return lineOrBytecode_;
248 }
249 
funcIndex() const250 uint32_t WasmFrameIter::funcIndex() const {
251   MOZ_ASSERT(!done());
252   return codeRange_->funcIndex();
253 }
254 
computeLine(uint32_t * column) const255 unsigned WasmFrameIter::computeLine(uint32_t* column) const {
256   if (instance()->isAsmJS()) {
257     if (column) {
258       *column = 1;
259     }
260     return lineOrBytecode_;
261   }
262 
263   // As a terrible hack to avoid changing the tons of places that pass around
264   // (url, line, column) tuples to instead passing around a Variant that
265   // stores a (url, func-index, bytecode-offset) tuple for wasm frames,
266   // wasm stuffs its tuple into the existing (url, line, column) tuple,
267   // tagging the high bit of the column to indicate "this is a wasm frame".
268   // When knowing clients see this bit, they shall render the tuple
269   // (url, line, column|bit) as "url:wasm-function[column]:0xline" according
270   // to the WebAssembly Web API's Developer-Facing Display Conventions.
271   //   https://webassembly.github.io/spec/web-api/index.html#conventions
272   // The wasm bytecode offset continues to be passed as the JS line to avoid
273   // breaking existing devtools code written when this used to be the case.
274 
275   MOZ_ASSERT(!(codeRange_->funcIndex() & ColumnBit));
276   if (column) {
277     *column = codeRange_->funcIndex() | ColumnBit;
278   }
279   return lineOrBytecode_;
280 }
281 
instance() const282 Instance* WasmFrameIter::instance() const {
283   MOZ_ASSERT(!done());
284   return tls_->instance;
285 }
286 
unwoundAddressOfReturnAddress() const287 void** WasmFrameIter::unwoundAddressOfReturnAddress() const {
288   MOZ_ASSERT(done());
289   MOZ_ASSERT(unwind_ == Unwind::True);
290   MOZ_ASSERT(unwoundAddressOfReturnAddress_);
291   return unwoundAddressOfReturnAddress_;
292 }
293 
debugEnabled() const294 bool WasmFrameIter::debugEnabled() const {
295   MOZ_ASSERT(!done());
296 
297   // Only non-imported functions can have debug frames.
298   //
299   // Metadata::debugEnabled is only set if debugging is actually enabled (both
300   // requested, and available via baseline compilation), and Tier::Debug code
301   // will be available.
302   return code_->metadata().debugEnabled &&
303          codeRange_->funcIndex() >=
304              code_->metadata(Tier::Debug).funcImports.length();
305 }
306 
debugFrame() const307 DebugFrame* WasmFrameIter::debugFrame() const {
308   MOZ_ASSERT(!done());
309   return DebugFrame::from(fp_);
310 }
311 
unwoundIonFrameType() const312 jit::FrameType WasmFrameIter::unwoundIonFrameType() const {
313   MOZ_ASSERT(unwoundIonCallerFP_);
314   MOZ_ASSERT(unwoundIonFrameType_ != jit::FrameType(-1));
315   return unwoundIonFrameType_;
316 }
317 
resumePCinCurrentFrame() const318 uint8_t* WasmFrameIter::resumePCinCurrentFrame() const {
319   if (resumePCinCurrentFrame_) {
320     return resumePCinCurrentFrame_;
321   }
322   MOZ_ASSERT(activation_->isWasmTrapping());
323   // The next instruction is the instruction following the trap instruction.
324   return (uint8_t*)activation_->wasmTrapData().resumePC;
325 }
326 
327 /*****************************************************************************/
328 // Prologue/epilogue code generation
329 
330 // These constants reflect statically-determined offsets in the
331 // prologue/epilogue. The offsets are dynamically asserted during code
332 // generation.
333 #if defined(JS_CODEGEN_X64)
334 static const unsigned PushedRetAddr = 0;
335 static const unsigned PushedFP = 1;
336 static const unsigned SetFP = 4;
337 static const unsigned PoppedFP = 0;
338 #elif defined(JS_CODEGEN_X86)
339 static const unsigned PushedRetAddr = 0;
340 static const unsigned PushedFP = 1;
341 static const unsigned SetFP = 3;
342 static const unsigned PoppedFP = 0;
343 #elif defined(JS_CODEGEN_ARM)
344 static const unsigned BeforePushRetAddr = 0;
345 static const unsigned PushedRetAddr = 4;
346 static const unsigned PushedFP = 8;
347 static const unsigned SetFP = 12;
348 static const unsigned PoppedFP = 0;
349 #elif defined(JS_CODEGEN_ARM64)
350 // On ARM64 we do not use push or pop; the prologues and epilogues are
351 // structured differently due to restrictions on SP alignment.  Even so,
352 // PushedRetAddr and PushedFP are used in some restricted contexts
353 // and must be superficially meaningful.
354 static const unsigned BeforePushRetAddr = 0;
355 static const unsigned PushedRetAddr = 8;
356 static const unsigned PushedFP = 12;
357 static const unsigned SetFP = 16;
358 static const unsigned PoppedFP = 4;
359 static_assert(BeforePushRetAddr == 0, "Required by StartUnwinding");
360 static_assert(PushedFP > PushedRetAddr, "Required by StartUnwinding");
361 #elif defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
362 static const unsigned PushedRetAddr = 8;
363 static const unsigned PushedFP = 12;
364 static const unsigned SetFP = 16;
365 static const unsigned PoppedFP = 4;
366 #elif defined(JS_CODEGEN_NONE)
367 // Synthetic values to satisfy asserts and avoid compiler warnings.
368 static const unsigned PushedRetAddr = 0;
369 static const unsigned PushedFP = 1;
370 static const unsigned SetFP = 2;
371 static const unsigned PoppedFP = 3;
372 #else
373 #  error "Unknown architecture!"
374 #endif
375 static constexpr unsigned SetJitEntryFP = PushedRetAddr + SetFP - PushedFP;
376 
LoadActivation(MacroAssembler & masm,const Register & dest)377 static void LoadActivation(MacroAssembler& masm, const Register& dest) {
378   // WasmCall pushes a JitActivation.
379   masm.loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, cx)), dest);
380   masm.loadPtr(Address(dest, JSContext::offsetOfActivation()), dest);
381 }
382 
SetExitFP(MacroAssembler & masm,ExitReason reason,Register scratch)383 void wasm::SetExitFP(MacroAssembler& masm, ExitReason reason,
384                      Register scratch) {
385   MOZ_ASSERT(!reason.isNone());
386 
387   LoadActivation(masm, scratch);
388 
389   masm.store32(
390       Imm32(reason.encode()),
391       Address(scratch, JitActivation::offsetOfEncodedWasmExitReason()));
392 
393   masm.orPtr(Imm32(ExitOrJitEntryFPTag), FramePointer);
394   masm.storePtr(FramePointer,
395                 Address(scratch, JitActivation::offsetOfPackedExitFP()));
396   masm.andPtr(Imm32(int32_t(~ExitOrJitEntryFPTag)), FramePointer);
397 }
398 
ClearExitFP(MacroAssembler & masm,Register scratch)399 void wasm::ClearExitFP(MacroAssembler& masm, Register scratch) {
400   LoadActivation(masm, scratch);
401   masm.storePtr(ImmWord(0x0),
402                 Address(scratch, JitActivation::offsetOfPackedExitFP()));
403   masm.store32(
404       Imm32(0x0),
405       Address(scratch, JitActivation::offsetOfEncodedWasmExitReason()));
406 }
407 
GenerateCallablePrologue(MacroAssembler & masm,uint32_t * entry)408 static void GenerateCallablePrologue(MacroAssembler& masm, uint32_t* entry) {
409   masm.setFramePushed(0);
410 
411   // ProfilingFrameIterator needs to know the offsets of several key
412   // instructions from entry. To save space, we make these offsets static
413   // constants and assert that they match the actual codegen below. On ARM,
414   // this requires AutoForbidPoolsAndNops to prevent a constant pool from being
415   // randomly inserted between two instructions.
416 
417   // The size of the prologue is constrained to be no larger than the difference
418   // between WasmCheckedTailEntryOffset and WasmCheckedCallEntryOffset; to
419   // conserve code space / avoid excessive padding, this difference is made as
420   // tight as possible.
421 
422 #if defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
423   {
424     *entry = masm.currentOffset();
425 
426     masm.subFromStackPtr(Imm32(sizeof(Frame)));
427     masm.storePtr(ra, Address(StackPointer, Frame::returnAddressOffset()));
428     MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
429     masm.storePtr(FramePointer, Address(StackPointer, Frame::callerFPOffset()));
430     MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
431     masm.moveStackPtrTo(FramePointer);
432     MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
433   }
434 #elif defined(JS_CODEGEN_ARM64)
435   {
436     // We do not use the PseudoStackPointer.  However, we may be called in a
437     // context -- compilation using Ion -- in which the PseudoStackPointer is
438     // in use.  Rather than risk confusion in the uses of `masm` here, let's
439     // just switch in the real SP, do what we need to do, and restore the
440     // existing setting afterwards.
441     const vixl::Register stashedSPreg = masm.GetStackPointer64();
442     masm.SetStackPointer64(vixl::sp);
443 
444     AutoForbidPoolsAndNops afp(&masm,
445                                /* number of instructions in scope = */ 4);
446 
447     *entry = masm.currentOffset();
448 
449     masm.Sub(sp, sp, sizeof(Frame));
450     masm.Str(ARMRegister(lr, 64), MemOperand(sp, Frame::returnAddressOffset()));
451     MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
452     masm.Str(ARMRegister(FramePointer, 64),
453              MemOperand(sp, Frame::callerFPOffset()));
454     MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
455     masm.Mov(ARMRegister(FramePointer, 64), sp);
456     MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
457 
458     // And restore the SP-reg setting, per comment above.
459     masm.SetStackPointer64(stashedSPreg);
460   }
461 #else
462   {
463 #  if defined(JS_CODEGEN_ARM)
464     AutoForbidPoolsAndNops afp(&masm,
465                                /* number of instructions in scope = */ 3);
466 
467     *entry = masm.currentOffset();
468 
469     static_assert(BeforePushRetAddr == 0);
470     masm.push(lr);
471 #  else
472     *entry = masm.currentOffset();
473     // The x86/x64 call instruction pushes the return address.
474 #  endif
475 
476     MOZ_ASSERT_IF(!masm.oom(), PushedRetAddr == masm.currentOffset() - *entry);
477     masm.push(FramePointer);
478     MOZ_ASSERT_IF(!masm.oom(), PushedFP == masm.currentOffset() - *entry);
479     masm.moveStackPtrTo(FramePointer);
480     MOZ_ASSERT_IF(!masm.oom(), SetFP == masm.currentOffset() - *entry);
481   }
482 #endif
483 }
484 
GenerateCallableEpilogue(MacroAssembler & masm,unsigned framePushed,ExitReason reason,uint32_t * ret)485 static void GenerateCallableEpilogue(MacroAssembler& masm, unsigned framePushed,
486                                      ExitReason reason, uint32_t* ret) {
487   if (framePushed) {
488     masm.freeStack(framePushed);
489   }
490 
491   if (!reason.isNone()) {
492     ClearExitFP(masm, ABINonArgReturnVolatileReg);
493   }
494 
495   DebugOnly<uint32_t> poppedFP;
496 
497 #if defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
498 
499   masm.loadPtr(Address(StackPointer, Frame::callerFPOffset()), FramePointer);
500   poppedFP = masm.currentOffset();
501   masm.loadPtr(Address(StackPointer, Frame::returnAddressOffset()), ra);
502 
503   *ret = masm.currentOffset();
504   masm.as_jr(ra);
505   masm.addToStackPtr(Imm32(sizeof(Frame)));
506 
507 #elif defined(JS_CODEGEN_ARM64)
508 
509   // See comment at equivalent place in |GenerateCallablePrologue| above.
510   const vixl::Register stashedSPreg = masm.GetStackPointer64();
511   masm.SetStackPointer64(vixl::sp);
512 
513   AutoForbidPoolsAndNops afp(&masm, /* number of instructions in scope = */ 5);
514 
515   masm.Ldr(ARMRegister(FramePointer, 64),
516            MemOperand(sp, Frame::callerFPOffset()));
517   poppedFP = masm.currentOffset();
518 
519   masm.Ldr(ARMRegister(lr, 64), MemOperand(sp, Frame::returnAddressOffset()));
520   *ret = masm.currentOffset();
521 
522   masm.Add(sp, sp, sizeof(Frame));
523 
524   // Reinitialise PSP from SP. This is less than elegant because the prologue
525   // operates on the raw stack pointer SP and does not keep the PSP in sync.
526   // We can't use initPseudoStackPtr here because we just set up masm to not
527   // use it.  Hence we have to do it "by hand".
528   masm.Mov(PseudoStackPointer64, vixl::sp);
529 
530   masm.Ret(ARMRegister(lr, 64));
531 
532   // See comment at equivalent place in |GenerateCallablePrologue| above.
533   masm.SetStackPointer64(stashedSPreg);
534 
535 #else
536   // Forbid pools for the same reason as described in GenerateCallablePrologue.
537 #  if defined(JS_CODEGEN_ARM)
538   AutoForbidPoolsAndNops afp(&masm, /* number of instructions in scope = */ 6);
539 #  endif
540 
541   // There is an important ordering constraint here: fp must be repointed to
542   // the caller's frame before any field of the frame currently pointed to by
543   // fp is popped: asynchronous signal handlers (which use stack space
544   // starting at sp) could otherwise clobber these fields while they are still
545   // accessible via fp (fp fields are read during frame iteration which is
546   // *also* done asynchronously).
547 
548   masm.pop(FramePointer);
549   poppedFP = masm.currentOffset();
550 
551   *ret = masm.currentOffset();
552   masm.ret();
553 
554 #endif
555 
556   MOZ_ASSERT_IF(!masm.oom(), PoppedFP == *ret - poppedFP);
557 }
558 
GenerateFunctionPrologue(MacroAssembler & masm,const TypeIdDesc & funcTypeId,const Maybe<uint32_t> & tier1FuncIndex,FuncOffsets * offsets)559 void wasm::GenerateFunctionPrologue(MacroAssembler& masm,
560                                     const TypeIdDesc& funcTypeId,
561                                     const Maybe<uint32_t>& tier1FuncIndex,
562                                     FuncOffsets* offsets) {
563   // These constants reflect statically-determined offsets between a function's
564   // checked call entry and the checked tail's entry, see diagram below.  The
565   // Entry is a call target, so must have CodeAlignment, but the TailEntry is
566   // only a jump target from a stub.
567   //
568   // The CheckedCallEntryOffset is normally zero.
569   //
570   // CheckedTailEntryOffset > CheckedCallEntryOffset, and if CPSIZE is the size
571   // of the callable prologue then TailEntryOffset - CallEntryOffset >= CPSIZE.
572   // It is a goal to keep that difference as small as possible to reduce the
573   // amount of padding inserted in the prologue.
574   static_assert(WasmCheckedCallEntryOffset % CodeAlignment == 0,
575                 "code aligned");
576   static_assert(WasmCheckedTailEntryOffset > WasmCheckedCallEntryOffset);
577 
578   // Flush pending pools so they do not get dumped between the 'begin' and
579   // 'uncheckedCallEntry' offsets since the difference must be less than
580   // UINT8_MAX to be stored in CodeRange::funcbeginToUncheckedCallEntry_.
581   // (Pending pools can be large.)
582   masm.flushBuffer();
583   masm.haltingAlign(CodeAlignment);
584 
585   // We are going to generate the next code layout:
586   // ---------------------------------------------
587   // checked call entry:    callable prologue
588   // checked tail entry:    check signature
589   //                        jump functionBody
590   // unchecked call entry:  callable prologue
591   //                        functionBody
592   // -----------------------------------------------
593   // checked call entry - used for call_indirect when we have to check the
594   // signature.
595   //
596   // checked tail entry - used by indirect call trampolines which already
597   // had pushed Frame on the callee’s behalf.
598   //
599   // unchecked call entry - used for regular direct same-instance calls.
600 
601   Label functionBody;
602 
603   // Generate checked call entry. The BytecodeOffset of the trap is fixed up to
604   // be the bytecode offset of the callsite by JitActivation::startWasmTrap.
605   offsets->begin = masm.currentOffset();
606   MOZ_ASSERT_IF(!masm.oom(), masm.currentOffset() - offsets->begin ==
607                                  WasmCheckedCallEntryOffset);
608   uint32_t dummy;
609   GenerateCallablePrologue(masm, &dummy);
610 
611   // Check that we did not overshoot the space budget for the prologue.
612   MOZ_ASSERT_IF(!masm.oom(), masm.currentOffset() - offsets->begin <=
613                                  WasmCheckedTailEntryOffset);
614 
615   // Pad to WasmCheckedTailEntryOffset.  Don't use nopAlign because the target
616   // offset is not necessarily a power of two.  The expected number of NOPs here
617   // is very small.
618   while (masm.currentOffset() - offsets->begin < WasmCheckedTailEntryOffset) {
619     masm.nop();
620   }
621 
622   // Signature check starts at WasmCheckedTailEntryOffset.
623   MOZ_ASSERT_IF(!masm.oom(), masm.currentOffset() - offsets->begin ==
624                                  WasmCheckedTailEntryOffset);
625   switch (funcTypeId.kind()) {
626     case TypeIdDescKind::Global: {
627       Register scratch = WasmTableCallScratchReg0;
628       masm.loadWasmGlobalPtr(funcTypeId.globalDataOffset(), scratch);
629       masm.branchPtr(Assembler::Condition::Equal, WasmTableCallSigReg, scratch,
630                      &functionBody);
631       masm.wasmTrap(Trap::IndirectCallBadSig, BytecodeOffset(0));
632       break;
633     }
634     case TypeIdDescKind::Immediate: {
635       masm.branch32(Assembler::Condition::Equal, WasmTableCallSigReg,
636                     Imm32(funcTypeId.immediate()), &functionBody);
637       masm.wasmTrap(Trap::IndirectCallBadSig, BytecodeOffset(0));
638       break;
639     }
640     case TypeIdDescKind::None:
641       masm.jump(&functionBody);
642       break;
643   }
644 
645   // The preceding code may have generated a small constant pool to support the
646   // comparison in the signature check.  But if we flush the pool here we will
647   // also force the creation of an unused branch veneer in the pool for the jump
648   // to functionBody from the signature check on some platforms, thus needlessly
649   // inflating the size of the prologue.
650   //
651   // On no supported platform that uses a pool (arm, arm64) is there any risk at
652   // present of that branch or other elements in the pool going out of range
653   // while we're generating the following padding and prologue, therefore no
654   // pool elements will be emitted in the prologue, therefore it is safe not to
655   // flush here.
656   //
657   // We assert that this holds at runtime by comparing the expected entry offset
658   // to the recorded ditto; if they are not the same then
659   // GenerateCallablePrologue flushed a pool before the prologue code, contrary
660   // to assumption.
661 
662   // Generate unchecked call entry:
663   masm.nopAlign(CodeAlignment);
664   DebugOnly<uint32_t> expectedEntry = masm.currentOffset();
665   GenerateCallablePrologue(masm, &offsets->uncheckedCallEntry);
666   MOZ_ASSERT(expectedEntry == offsets->uncheckedCallEntry);
667   masm.bind(&functionBody);
668 #ifdef JS_CODEGEN_ARM64
669   // GenerateCallablePrologue creates a prologue which operates on the raw
670   // stack pointer and does not keep the PSP in sync.  So we have to resync it
671   // here.  But we can't use initPseudoStackPtr here because masm may not be
672   // set up to use it, depending on which compiler is in use.  Hence do it
673   // "manually".
674   masm.Mov(PseudoStackPointer64, vixl::sp);
675 #endif
676 
677   // See comment block in WasmCompile.cpp for an explanation tiering.
678   if (tier1FuncIndex) {
679     Register scratch = ABINonArgReg0;
680     masm.loadPtr(Address(WasmTlsReg, offsetof(TlsData, jumpTable)), scratch);
681     masm.jump(Address(scratch, *tier1FuncIndex * sizeof(uintptr_t)));
682   }
683 
684   offsets->tierEntry = masm.currentOffset();
685 
686   MOZ_ASSERT(masm.framePushed() == 0);
687 }
688 
GenerateFunctionEpilogue(MacroAssembler & masm,unsigned framePushed,FuncOffsets * offsets)689 void wasm::GenerateFunctionEpilogue(MacroAssembler& masm, unsigned framePushed,
690                                     FuncOffsets* offsets) {
691   // Inverse of GenerateFunctionPrologue:
692   MOZ_ASSERT(masm.framePushed() == framePushed);
693   GenerateCallableEpilogue(masm, framePushed, ExitReason::None(),
694                            &offsets->ret);
695   MOZ_ASSERT(masm.framePushed() == 0);
696 }
697 
GenerateExitPrologue(MacroAssembler & masm,unsigned framePushed,ExitReason reason,CallableOffsets * offsets)698 void wasm::GenerateExitPrologue(MacroAssembler& masm, unsigned framePushed,
699                                 ExitReason reason, CallableOffsets* offsets) {
700   masm.haltingAlign(CodeAlignment);
701 
702   GenerateCallablePrologue(masm, &offsets->begin);
703 
704   // This frame will be exiting compiled code to C++ so record the fp and
705   // reason in the JitActivation so the frame iterators can unwind.
706   SetExitFP(masm, reason, ABINonArgReturnVolatileReg);
707 
708   MOZ_ASSERT(masm.framePushed() == 0);
709   masm.reserveStack(framePushed);
710 }
711 
GenerateExitEpilogue(MacroAssembler & masm,unsigned framePushed,ExitReason reason,CallableOffsets * offsets)712 void wasm::GenerateExitEpilogue(MacroAssembler& masm, unsigned framePushed,
713                                 ExitReason reason, CallableOffsets* offsets) {
714   // Inverse of GenerateExitPrologue:
715   MOZ_ASSERT(masm.framePushed() == framePushed);
716   GenerateCallableEpilogue(masm, framePushed, reason, &offsets->ret);
717   MOZ_ASSERT(masm.framePushed() == 0);
718 }
719 
AssertNoWasmExitFPInJitExit(MacroAssembler & masm)720 static void AssertNoWasmExitFPInJitExit(MacroAssembler& masm) {
721   // As a general stack invariant, if Activation::packedExitFP is tagged as
722   // wasm, it must point to a valid wasm::Frame. The JIT exit stub calls into
723   // JIT code and thus does not really exit, thus, when entering/leaving the
724   // JIT exit stub from/to normal wasm code, packedExitFP is not tagged wasm.
725 #ifdef DEBUG
726   Register scratch = ABINonArgReturnReg0;
727   LoadActivation(masm, scratch);
728 
729   Label ok;
730   masm.branchTestPtr(Assembler::Zero,
731                      Address(scratch, JitActivation::offsetOfPackedExitFP()),
732                      Imm32(ExitOrJitEntryFPTag), &ok);
733   masm.breakpoint();
734   masm.bind(&ok);
735 #endif
736 }
737 
GenerateJitExitPrologue(MacroAssembler & masm,unsigned framePushed,CallableOffsets * offsets)738 void wasm::GenerateJitExitPrologue(MacroAssembler& masm, unsigned framePushed,
739                                    CallableOffsets* offsets) {
740   masm.haltingAlign(CodeAlignment);
741 
742   GenerateCallablePrologue(masm, &offsets->begin);
743   AssertNoWasmExitFPInJitExit(masm);
744 
745   MOZ_ASSERT(masm.framePushed() == 0);
746   masm.reserveStack(framePushed);
747 }
748 
GenerateJitExitEpilogue(MacroAssembler & masm,unsigned framePushed,CallableOffsets * offsets)749 void wasm::GenerateJitExitEpilogue(MacroAssembler& masm, unsigned framePushed,
750                                    CallableOffsets* offsets) {
751   // Inverse of GenerateJitExitPrologue:
752   MOZ_ASSERT(masm.framePushed() == framePushed);
753   AssertNoWasmExitFPInJitExit(masm);
754   GenerateCallableEpilogue(masm, framePushed, ExitReason::None(),
755                            &offsets->ret);
756   MOZ_ASSERT(masm.framePushed() == 0);
757 }
758 
GenerateJitEntryPrologue(MacroAssembler & masm,Offsets * offsets)759 void wasm::GenerateJitEntryPrologue(MacroAssembler& masm, Offsets* offsets) {
760   masm.haltingAlign(CodeAlignment);
761 
762   {
763 #if defined(JS_CODEGEN_ARM)
764     AutoForbidPoolsAndNops afp(&masm,
765                                /* number of instructions in scope = */ 2);
766     offsets->begin = masm.currentOffset();
767     static_assert(BeforePushRetAddr == 0);
768     masm.push(lr);
769 #elif defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
770     offsets->begin = masm.currentOffset();
771     masm.push(ra);
772 #elif defined(JS_CODEGEN_ARM64)
773     AutoForbidPoolsAndNops afp(&masm,
774                                /* number of instructions in scope = */ 3);
775     offsets->begin = masm.currentOffset();
776     static_assert(BeforePushRetAddr == 0);
777     // Subtract from SP first as SP must be aligned before offsetting.
778     masm.Sub(sp, sp, 8);
779     masm.storePtr(lr, Address(masm.getStackPointer(), 0));
780     masm.adjustFrame(8);
781 #else
782     // The x86/x64 call instruction pushes the return address.
783     offsets->begin = masm.currentOffset();
784 #endif
785     MOZ_ASSERT_IF(!masm.oom(),
786                   PushedRetAddr == masm.currentOffset() - offsets->begin);
787 
788     // Save jit frame pointer, so unwinding from wasm to jit frames is trivial.
789     masm.moveStackPtrTo(FramePointer);
790     MOZ_ASSERT_IF(!masm.oom(),
791                   SetJitEntryFP == masm.currentOffset() - offsets->begin);
792   }
793 
794   masm.setFramePushed(0);
795 }
796 
797 /*****************************************************************************/
798 // ProfilingFrameIterator
799 
ProfilingFrameIterator()800 ProfilingFrameIterator::ProfilingFrameIterator()
801     : code_(nullptr),
802       codeRange_(nullptr),
803       callerFP_(nullptr),
804       callerPC_(nullptr),
805       stackAddress_(nullptr),
806       unwoundIonCallerFP_(nullptr),
807       exitReason_(ExitReason::Fixed::None) {
808   MOZ_ASSERT(done());
809 }
810 
ProfilingFrameIterator(const JitActivation & activation)811 ProfilingFrameIterator::ProfilingFrameIterator(const JitActivation& activation)
812     : code_(nullptr),
813       codeRange_(nullptr),
814       callerFP_(nullptr),
815       callerPC_(nullptr),
816       stackAddress_(nullptr),
817       unwoundIonCallerFP_(nullptr),
818       exitReason_(activation.wasmExitReason()) {
819   initFromExitFP(activation.wasmExitFP());
820 }
821 
ProfilingFrameIterator(const Frame * fp)822 ProfilingFrameIterator::ProfilingFrameIterator(const Frame* fp)
823     : code_(nullptr),
824       codeRange_(nullptr),
825       callerFP_(nullptr),
826       callerPC_(nullptr),
827       stackAddress_(nullptr),
828       unwoundIonCallerFP_(nullptr),
829       exitReason_(ExitReason::Fixed::ImportJit) {
830   MOZ_ASSERT(fp);
831   initFromExitFP(fp);
832 }
833 
AssertDirectJitCall(const void * fp)834 static inline void AssertDirectJitCall(const void* fp) {
835   // Called via an inlined fast JIT to wasm call: in this case, FP is
836   // pointing in the middle of the exit frame, right before the exit
837   // footer; ensure the exit frame type is the expected one.
838 #ifdef DEBUG
839   if (Frame::isExitOrJitEntryFP(fp)) {
840     fp = Frame::toJitEntryCaller(fp);
841   }
842   auto* jitCaller = (ExitFrameLayout*)fp;
843   MOZ_ASSERT(jitCaller->footer()->type() ==
844              jit::ExitFrameType::DirectWasmJitCall);
845 #endif
846 }
847 
AssertMatchesCallSite(void * callerPC,uint8_t * callerFP)848 static inline void AssertMatchesCallSite(void* callerPC, uint8_t* callerFP) {
849 #ifdef DEBUG
850   const CodeRange* callerCodeRange;
851   const Code* code = LookupCode(callerPC, &callerCodeRange);
852 
853   if (!code) {
854     AssertDirectJitCall(callerFP);
855     return;
856   }
857 
858   MOZ_ASSERT(callerCodeRange);
859 
860   if (callerCodeRange->isInterpEntry()) {
861     MOZ_ASSERT(callerFP == nullptr);
862     return;
863   }
864 
865   if (callerCodeRange->isJitEntry()) {
866     MOZ_ASSERT(callerFP != nullptr);
867     return;
868   }
869 
870   const CallSite* callsite = code->lookupCallSite(callerPC);
871   MOZ_ASSERT(callsite);
872 #endif
873 }
874 
initFromExitFP(const Frame * fp)875 void ProfilingFrameIterator::initFromExitFP(const Frame* fp) {
876   MOZ_ASSERT(fp);
877   stackAddress_ = (void*)fp;
878   code_ = LookupCode(fp->returnAddress(), &codeRange_);
879 
880   if (!code_) {
881     // This is a direct call from the JIT, the caller FP is pointing to a
882     // tagged JIT caller's frame.
883     AssertDirectJitCall(fp->jitEntryCaller());
884 
885     unwoundIonCallerFP_ = fp->jitEntryCaller();
886     MOZ_ASSERT(done());
887     return;
888   }
889 
890   MOZ_ASSERT(codeRange_);
891 
892   // Since we don't have the pc for fp, start unwinding at the caller of fp.
893   // This means that the innermost frame is skipped. This is fine because:
894   //  - for import exit calls, the innermost frame is a thunk, so the first
895   //    frame that shows up is the function calling the import;
896   //  - for Math and other builtin calls, we note the absence of an exit
897   //    reason and inject a fake "builtin" frame; and
898   switch (codeRange_->kind()) {
899     case CodeRange::InterpEntry:
900       callerPC_ = nullptr;
901       callerFP_ = nullptr;
902       codeRange_ = nullptr;
903       exitReason_ = ExitReason(ExitReason::Fixed::FakeInterpEntry);
904       break;
905     case CodeRange::JitEntry:
906       callerPC_ = nullptr;
907       callerFP_ = nullptr;
908       unwoundIonCallerFP_ = fp->rawCaller();
909       break;
910     case CodeRange::Function:
911       fp = fp->wasmCaller();
912       callerPC_ = fp->returnAddress();
913       callerFP_ = fp->rawCaller();
914       AssertMatchesCallSite(callerPC_, callerFP_);
915       break;
916     case CodeRange::ImportJitExit:
917     case CodeRange::ImportInterpExit:
918     case CodeRange::BuiltinThunk:
919     case CodeRange::TrapExit:
920     case CodeRange::DebugTrap:
921     case CodeRange::Throw:
922     case CodeRange::FarJumpIsland:
923       MOZ_CRASH("Unexpected CodeRange kind");
924   }
925 
926   MOZ_ASSERT(!done());
927 }
928 
isSignatureCheckFail(uint32_t offsetInCode,const CodeRange * codeRange)929 static bool isSignatureCheckFail(uint32_t offsetInCode,
930                                  const CodeRange* codeRange) {
931   if (!codeRange->isFunction()) {
932     return false;
933   }
934   // checked call entry:    1. push Frame
935   //                        2. set FP
936   //                        3. signature check <--- check if we are here.
937   //                        4. jump 7
938   // unchecked call entry:  5. push Frame
939   //                        6. set FP
940   //                        7. function's code
941   return offsetInCode < codeRange->funcUncheckedCallEntry() &&
942          (offsetInCode - codeRange->funcCheckedCallEntry()) > SetFP;
943 }
944 
GetNearestEffectiveTls(const Frame * fp)945 const TlsData* js::wasm::GetNearestEffectiveTls(const Frame* fp) {
946   while (true) {
947     if (fp->callerIsExitOrJitEntryFP()) {
948       // It is a direct call from JIT.
949       MOZ_ASSERT(!LookupCode(fp->returnAddress()));
950       return ExtractCalleeTlsFromFrameWithTls(fp);
951     }
952 
953     uint8_t* returnAddress = fp->returnAddress();
954     const CodeRange* codeRange = nullptr;
955     const Code* code = LookupCode(returnAddress, &codeRange);
956     MOZ_ASSERT(codeRange);
957 
958     if (codeRange->isEntry()) {
959       return ExtractCalleeTlsFromFrameWithTls(fp);
960     }
961 
962     MOZ_ASSERT(codeRange->kind() == CodeRange::Function);
963     MOZ_ASSERT(code);
964     const CallSite* callsite = code->lookupCallSite(returnAddress);
965     if (callsite->mightBeCrossInstance()) {
966       return ExtractCalleeTlsFromFrameWithTls(fp);
967     }
968 
969     fp = fp->wasmCaller();
970   }
971 }
972 
GetNearestEffectiveTls(Frame * fp)973 TlsData* js::wasm::GetNearestEffectiveTls(Frame* fp) {
974   return const_cast<TlsData*>(
975       GetNearestEffectiveTls(const_cast<const Frame*>(fp)));
976 }
977 
StartUnwinding(const RegisterState & registers,UnwindState * unwindState,bool * unwoundCaller)978 bool js::wasm::StartUnwinding(const RegisterState& registers,
979                               UnwindState* unwindState, bool* unwoundCaller) {
980   // Shorthands.
981   uint8_t* const pc = (uint8_t*)registers.pc;
982   void** const sp = (void**)registers.sp;
983 
984   // The frame pointer might be:
985   // - in the process of tagging/untagging when calling into the JITs;
986   // make sure it's untagged.
987   // - tagged by an direct JIT call.
988   // - unreliable if it's not been set yet, in prologues.
989   uint8_t* fp = Frame::isExitOrJitEntryFP(registers.fp)
990                     ? Frame::toJitEntryCaller(registers.fp)
991                     : reinterpret_cast<uint8_t*>(registers.fp);
992 
993   // Get the CodeRange describing pc and the base address to which the
994   // CodeRange is relative. If the pc is not in a wasm module or a builtin
995   // thunk, then execution must be entering from or leaving to the C++ caller
996   // that pushed the JitActivation.
997   const CodeRange* codeRange;
998   uint8_t* codeBase;
999   const Code* code = nullptr;
1000 
1001   const CodeSegment* codeSegment = LookupCodeSegment(pc, &codeRange);
1002   if (codeSegment) {
1003     code = &codeSegment->code();
1004     codeBase = codeSegment->base();
1005     MOZ_ASSERT(codeRange);
1006   } else if (!LookupBuiltinThunk(pc, &codeRange, &codeBase)) {
1007     return false;
1008   }
1009 
1010   // When the pc is inside the prologue/epilogue, the innermost call's Frame
1011   // is not complete and thus fp points to the second-to-innermost call's
1012   // Frame. Since fp can only tell you about its caller, naively unwinding
1013   // while pc is in the prologue/epilogue would skip the second-to-innermost
1014   // call. To avoid this problem, we use the static structure of the code in
1015   // the prologue and epilogue to do the Right Thing.
1016   uint32_t offsetInCode = pc - codeBase;
1017   MOZ_ASSERT(offsetInCode >= codeRange->begin());
1018   MOZ_ASSERT(offsetInCode < codeRange->end());
1019 
1020   // Compute the offset of the pc from the (unchecked call) entry of the code
1021   // range. The checked call entry and the unchecked call entry have common
1022   // prefix, so pc before signature check in the checked call entry is
1023   // equivalent to the pc of the unchecked-call-entry. Thus, we can simplify the
1024   // below case analysis by redirecting all pc-in-checked-call-entry before
1025   // signature check cases to the pc-at-unchecked-call-entry case.
1026   uint32_t offsetFromEntry;
1027   if (codeRange->isFunction()) {
1028     if (offsetInCode < codeRange->funcUncheckedCallEntry()) {
1029       offsetFromEntry = offsetInCode - codeRange->funcCheckedCallEntry();
1030     } else {
1031       offsetFromEntry = offsetInCode - codeRange->funcUncheckedCallEntry();
1032     }
1033   } else {
1034     offsetFromEntry = offsetInCode - codeRange->begin();
1035   }
1036 
1037   // Most cases end up unwinding to the caller state; not unwinding is the
1038   // exception here.
1039   *unwoundCaller = true;
1040 
1041   uint8_t* fixedFP = nullptr;
1042   void* fixedPC = nullptr;
1043   switch (codeRange->kind()) {
1044     case CodeRange::Function:
1045     case CodeRange::FarJumpIsland:
1046     case CodeRange::ImportJitExit:
1047     case CodeRange::ImportInterpExit:
1048     case CodeRange::BuiltinThunk:
1049     case CodeRange::DebugTrap:
1050 #if defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
1051       if (codeRange->isThunk()) {
1052         // The FarJumpIsland sequence temporary scrambles ra.
1053         // Don't unwind to caller.
1054         fixedPC = pc;
1055         fixedFP = fp;
1056         *unwoundCaller = false;
1057         AssertMatchesCallSite(
1058             Frame::fromUntaggedWasmExitFP(fp)->returnAddress(),
1059             Frame::fromUntaggedWasmExitFP(fp)->rawCaller());
1060       } else if (offsetFromEntry < PushedFP) {
1061         // On MIPS we rely on register state instead of state saved on
1062         // stack until the wasm::Frame is completely built.
1063         // On entry the return address is in ra (registers.lr) and
1064         // fp holds the caller's fp.
1065         fixedPC = (uint8_t*)registers.lr;
1066         fixedFP = fp;
1067         AssertMatchesCallSite(fixedPC, fixedFP);
1068       } else
1069 #elif defined(JS_CODEGEN_ARM64)
1070       if (offsetFromEntry < PushedFP || codeRange->isThunk()) {
1071         // Constraints above ensure that this covers BeforePushRetAddr and
1072         // PushedRetAddr.
1073         //
1074         // On ARM64 we subtract the size of the Frame from SP and then store
1075         // values into the stack.  Execution can be interrupted at various
1076         // places in that sequence.  We rely on the register state for our
1077         // values.
1078         fixedPC = (uint8_t*)registers.lr;
1079         fixedFP = fp;
1080         AssertMatchesCallSite(fixedPC, fixedFP);
1081       } else
1082 #elif defined(JS_CODEGEN_ARM)
1083       if (offsetFromEntry == BeforePushRetAddr || codeRange->isThunk()) {
1084         // The return address is still in lr and fp holds the caller's fp.
1085         fixedPC = (uint8_t*)registers.lr;
1086         fixedFP = fp;
1087         AssertMatchesCallSite(fixedPC, fixedFP);
1088       } else
1089 #endif
1090           if (offsetFromEntry == PushedRetAddr || codeRange->isThunk()) {
1091         // The return address has been pushed on the stack but fp still
1092         // points to the caller's fp.
1093         fixedPC = sp[0];
1094         fixedFP = fp;
1095         AssertMatchesCallSite(fixedPC, fixedFP);
1096       } else if (offsetFromEntry == PushedFP) {
1097         // The full Frame has been pushed; fp is still the caller's fp.
1098         const auto* frame = Frame::fromUntaggedWasmExitFP(sp);
1099         DebugOnly<const uint8_t*> caller = frame->callerIsExitOrJitEntryFP()
1100                                                ? frame->jitEntryCaller()
1101                                                : frame->rawCaller();
1102         MOZ_ASSERT(caller == fp);
1103         fixedPC = frame->returnAddress();
1104         fixedFP = fp;
1105         AssertMatchesCallSite(fixedPC, fixedFP);
1106 #if defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
1107       } else if (offsetInCode >= codeRange->ret() - PoppedFP &&
1108                  offsetInCode <= codeRange->ret()) {
1109         // The fixedFP field of the Frame has been loaded into fp.
1110         // The ra and TLS might also be loaded, but the Frame structure is
1111         // still on stack, so we can acess the ra form there.
1112         MOZ_ASSERT(*sp == fp);
1113         fixedPC = Frame::fromUntaggedWasmExitFP(sp)->returnAddress();
1114         fixedFP = fp;
1115         AssertMatchesCallSite(fixedPC, fixedFP);
1116 #elif defined(JS_CODEGEN_ARM64)
1117         // The stack pointer does not move until all values have
1118         // been restored so several cases can be coalesced here.
1119       } else if (offsetInCode >= codeRange->ret() - PoppedFP &&
1120                  offsetInCode <= codeRange->ret()) {
1121         fixedPC = Frame::fromUntaggedWasmExitFP(sp)->returnAddress();
1122         fixedFP = fp;
1123         AssertMatchesCallSite(fixedPC, fixedFP);
1124 #else
1125       } else if (offsetInCode >= codeRange->ret() - PoppedFP &&
1126                  offsetInCode < codeRange->ret()) {
1127         // The fixedFP field of the Frame has been popped into fp.
1128         fixedPC = sp[1];
1129         fixedFP = fp;
1130         AssertMatchesCallSite(fixedPC, fixedFP);
1131       } else if (offsetInCode == codeRange->ret()) {
1132         // Both the TLS and fixedFP fields have been popped and fp now
1133         // points to the caller's frame.
1134         fixedPC = sp[0];
1135         fixedFP = fp;
1136         AssertMatchesCallSite(fixedPC, fixedFP);
1137 #endif
1138       } else {
1139         if (codeRange->kind() == CodeRange::ImportJitExit) {
1140           // The jit exit contains a range where the value of FP can't be
1141           // trusted. Technically, we could recover fp from sp, but since
1142           // the range is so short, for now just drop the stack.
1143           if (offsetInCode >= codeRange->jitExitUntrustedFPStart() &&
1144               offsetInCode < codeRange->jitExitUntrustedFPEnd()) {
1145             return false;
1146           }
1147         }
1148 
1149         if (isSignatureCheckFail(offsetInCode, codeRange)) {
1150           // Frame have been pushed and FP has been set.
1151           const auto* frame = Frame::fromUntaggedWasmExitFP(fp);
1152           fixedFP = frame->rawCaller();
1153           fixedPC = frame->returnAddress();
1154           AssertMatchesCallSite(fixedPC, fixedFP);
1155           break;
1156         }
1157 
1158         // Not in the prologue/epilogue.
1159         fixedPC = pc;
1160         fixedFP = fp;
1161         *unwoundCaller = false;
1162         AssertMatchesCallSite(
1163             Frame::fromUntaggedWasmExitFP(fp)->returnAddress(),
1164             Frame::fromUntaggedWasmExitFP(fp)->rawCaller());
1165         break;
1166       }
1167       break;
1168     case CodeRange::TrapExit:
1169       // These code stubs execute after the prologue/epilogue have completed
1170       // so pc/fp contains the right values here.
1171       fixedPC = pc;
1172       fixedFP = fp;
1173       *unwoundCaller = false;
1174       AssertMatchesCallSite(Frame::fromUntaggedWasmExitFP(fp)->returnAddress(),
1175                             Frame::fromUntaggedWasmExitFP(fp)->rawCaller());
1176       break;
1177     case CodeRange::InterpEntry:
1178       // The entry trampoline is the final frame in an wasm JitActivation. The
1179       // entry trampoline also doesn't GeneratePrologue/Epilogue so we can't
1180       // use the general unwinding logic above.
1181       break;
1182     case CodeRange::JitEntry:
1183       // There's a jit frame above the current one; we don't care about pc
1184       // since the Jit entry frame is a jit frame which can be considered as
1185       // an exit frame.
1186 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64) || \
1187     defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
1188       if (offsetFromEntry < PushedRetAddr) {
1189         // We haven't pushed the jit return address yet, thus the jit
1190         // frame is incomplete. During profiling frame iteration, it means
1191         // that the jit profiling frame iterator won't be able to unwind
1192         // this frame; drop it.
1193         return false;
1194       }
1195 #endif
1196       fixedFP =
1197           offsetFromEntry < SetJitEntryFP ? reinterpret_cast<uint8_t*>(sp) : fp;
1198       fixedPC = nullptr;
1199 
1200       // On the error return path, FP might be set to FailFP. Ignore these
1201       // transient frames.
1202       if (intptr_t(fixedFP) == (FailFP & ~ExitOrJitEntryFPTag)) {
1203         return false;
1204       }
1205       break;
1206     case CodeRange::Throw:
1207       // The throw stub executes a small number of instructions before popping
1208       // the entire activation. To simplify testing, we simply pretend throw
1209       // stubs have already popped the entire stack.
1210       return false;
1211   }
1212 
1213   unwindState->code = code;
1214   unwindState->codeRange = codeRange;
1215   unwindState->fp = fixedFP;
1216   unwindState->pc = fixedPC;
1217   return true;
1218 }
1219 
ProfilingFrameIterator(const JitActivation & activation,const RegisterState & state)1220 ProfilingFrameIterator::ProfilingFrameIterator(const JitActivation& activation,
1221                                                const RegisterState& state)
1222     : code_(nullptr),
1223       codeRange_(nullptr),
1224       callerFP_(nullptr),
1225       callerPC_(nullptr),
1226       stackAddress_(nullptr),
1227       unwoundIonCallerFP_(nullptr),
1228       exitReason_(ExitReason::Fixed::None) {
1229   // Let wasmExitFP take precedence to StartUnwinding when it is set since
1230   // during the body of an exit stub, the register state may not be valid
1231   // causing StartUnwinding() to abandon unwinding this activation.
1232   if (activation.hasWasmExitFP()) {
1233     exitReason_ = activation.wasmExitReason();
1234     initFromExitFP(activation.wasmExitFP());
1235     return;
1236   }
1237 
1238   bool unwoundCaller;
1239   UnwindState unwindState;
1240   if (!StartUnwinding(state, &unwindState, &unwoundCaller)) {
1241     MOZ_ASSERT(done());
1242     return;
1243   }
1244 
1245   MOZ_ASSERT(unwindState.codeRange);
1246 
1247   if (unwoundCaller) {
1248     callerFP_ = unwindState.fp;
1249     callerPC_ = unwindState.pc;
1250     // In the case of a function call, if the original FP value is tagged,
1251     // then we're being called through a direct JIT call (the interpreter
1252     // and the jit entry don't set FP's low bit). We can't observe
1253     // transient tagged values of FP (during wasm::SetExitFP) here because
1254     // StartUnwinding would not have unwound then.
1255     if (unwindState.codeRange->isFunction() &&
1256         Frame::isExitOrJitEntryFP(reinterpret_cast<uint8_t*>(state.fp))) {
1257       unwoundIonCallerFP_ = callerFP_;
1258     }
1259   } else {
1260     callerFP_ = Frame::fromUntaggedWasmExitFP(unwindState.fp)->rawCaller();
1261     callerPC_ = Frame::fromUntaggedWasmExitFP(unwindState.fp)->returnAddress();
1262     // See comment above. The only way to get a tagged FP here means that
1263     // the caller is a fast JIT caller which called into a wasm function.
1264     if (Frame::isExitOrJitEntryFP(callerFP_)) {
1265       MOZ_ASSERT(unwindState.codeRange->isFunction());
1266       unwoundIonCallerFP_ = Frame::toJitEntryCaller(callerFP_);
1267     }
1268   }
1269 
1270   if (unwindState.codeRange->isJitEntry()) {
1271     MOZ_ASSERT(!unwoundIonCallerFP_);
1272     unwoundIonCallerFP_ = callerFP_;
1273   }
1274 
1275   if (unwindState.codeRange->isInterpEntry()) {
1276     unwindState.codeRange = nullptr;
1277     exitReason_ = ExitReason(ExitReason::Fixed::FakeInterpEntry);
1278   }
1279 
1280   code_ = unwindState.code;
1281   codeRange_ = unwindState.codeRange;
1282   stackAddress_ = state.sp;
1283   MOZ_ASSERT(!done());
1284 }
1285 
operator ++()1286 void ProfilingFrameIterator::operator++() {
1287   if (!exitReason_.isNone()) {
1288     DebugOnly<bool> wasInterpEntry = exitReason_.isInterpEntry();
1289     exitReason_ = ExitReason::None();
1290     MOZ_ASSERT((!codeRange_) == wasInterpEntry);
1291     MOZ_ASSERT(done() == wasInterpEntry);
1292     return;
1293   }
1294 
1295   if (unwoundIonCallerFP_) {
1296     MOZ_ASSERT(codeRange_->isFunction() || codeRange_->isJitEntry());
1297     callerPC_ = nullptr;
1298     callerFP_ = nullptr;
1299     codeRange_ = nullptr;
1300     MOZ_ASSERT(done());
1301     return;
1302   }
1303 
1304   if (!callerPC_) {
1305     MOZ_ASSERT(!callerFP_);
1306     codeRange_ = nullptr;
1307     MOZ_ASSERT(done());
1308     return;
1309   }
1310 
1311   if (!callerFP_) {
1312     MOZ_ASSERT(LookupCode(callerPC_, &codeRange_) == code_);
1313     MOZ_ASSERT(codeRange_->kind() == CodeRange::InterpEntry);
1314     exitReason_ = ExitReason(ExitReason::Fixed::FakeInterpEntry);
1315     codeRange_ = nullptr;
1316     callerPC_ = nullptr;
1317     MOZ_ASSERT(!done());
1318     return;
1319   }
1320 
1321   code_ = LookupCode(callerPC_, &codeRange_);
1322 
1323   if (!code_ && Frame::isExitOrJitEntryFP(callerFP_)) {
1324     // The parent frame is an inlined wasm call, the tagged FP points to
1325     // the fake exit frame.
1326     MOZ_ASSERT(!codeRange_);
1327     AssertDirectJitCall(callerFP_);
1328     unwoundIonCallerFP_ = Frame::toJitEntryCaller(callerFP_);
1329     MOZ_ASSERT(done());
1330     return;
1331   }
1332 
1333   MOZ_ASSERT(codeRange_);
1334 
1335   if (codeRange_->isJitEntry()) {
1336     unwoundIonCallerFP_ = callerFP_;
1337     MOZ_ASSERT(!done());
1338     return;
1339   }
1340 
1341   MOZ_ASSERT(code_ ==
1342              &GetNearestEffectiveTls(Frame::fromUntaggedWasmExitFP(callerFP_))
1343                   ->instance->code());
1344 
1345   switch (codeRange_->kind()) {
1346     case CodeRange::Function:
1347     case CodeRange::ImportJitExit:
1348     case CodeRange::ImportInterpExit:
1349     case CodeRange::BuiltinThunk:
1350     case CodeRange::TrapExit:
1351     case CodeRange::DebugTrap:
1352     case CodeRange::FarJumpIsland: {
1353       stackAddress_ = callerFP_;
1354       const auto* frame = Frame::fromUntaggedWasmExitFP(callerFP_);
1355       callerPC_ = frame->returnAddress();
1356       AssertMatchesCallSite(callerPC_, frame->rawCaller());
1357       callerFP_ = frame->rawCaller();
1358       break;
1359     }
1360     case CodeRange::InterpEntry:
1361       MOZ_CRASH("should have had null caller fp");
1362     case CodeRange::JitEntry:
1363       MOZ_CRASH("should have been guarded above");
1364     case CodeRange::Throw:
1365       MOZ_CRASH("code range doesn't have frame");
1366   }
1367 
1368   MOZ_ASSERT(!done());
1369 }
1370 
ThunkedNativeToDescription(SymbolicAddress func)1371 static const char* ThunkedNativeToDescription(SymbolicAddress func) {
1372   MOZ_ASSERT(NeedsBuiltinThunk(func));
1373   switch (func) {
1374     case SymbolicAddress::HandleDebugTrap:
1375     case SymbolicAddress::HandleThrow:
1376     case SymbolicAddress::HandleTrap:
1377     case SymbolicAddress::CallImport_General:
1378     case SymbolicAddress::CoerceInPlace_ToInt32:
1379     case SymbolicAddress::CoerceInPlace_ToNumber:
1380     case SymbolicAddress::CoerceInPlace_ToBigInt:
1381     case SymbolicAddress::BoxValue_Anyref:
1382       MOZ_ASSERT(!NeedsBuiltinThunk(func),
1383                  "not in sync with NeedsBuiltinThunk");
1384       break;
1385     case SymbolicAddress::ToInt32:
1386       return "call to asm.js native ToInt32 coercion (in wasm)";
1387     case SymbolicAddress::DivI64:
1388       return "call to native i64.div_s (in wasm)";
1389     case SymbolicAddress::UDivI64:
1390       return "call to native i64.div_u (in wasm)";
1391     case SymbolicAddress::ModI64:
1392       return "call to native i64.rem_s (in wasm)";
1393     case SymbolicAddress::UModI64:
1394       return "call to native i64.rem_u (in wasm)";
1395     case SymbolicAddress::TruncateDoubleToUint64:
1396       return "call to native i64.trunc_u/f64 (in wasm)";
1397     case SymbolicAddress::TruncateDoubleToInt64:
1398       return "call to native i64.trunc_s/f64 (in wasm)";
1399     case SymbolicAddress::SaturatingTruncateDoubleToUint64:
1400       return "call to native i64.trunc_u:sat/f64 (in wasm)";
1401     case SymbolicAddress::SaturatingTruncateDoubleToInt64:
1402       return "call to native i64.trunc_s:sat/f64 (in wasm)";
1403     case SymbolicAddress::Uint64ToDouble:
1404       return "call to native f64.convert_u/i64 (in wasm)";
1405     case SymbolicAddress::Uint64ToFloat32:
1406       return "call to native f32.convert_u/i64 (in wasm)";
1407     case SymbolicAddress::Int64ToDouble:
1408       return "call to native f64.convert_s/i64 (in wasm)";
1409     case SymbolicAddress::Int64ToFloat32:
1410       return "call to native f32.convert_s/i64 (in wasm)";
1411 #if defined(JS_CODEGEN_ARM)
1412     case SymbolicAddress::aeabi_idivmod:
1413       return "call to native i32.div_s (in wasm)";
1414     case SymbolicAddress::aeabi_uidivmod:
1415       return "call to native i32.div_u (in wasm)";
1416 #endif
1417     case SymbolicAddress::AllocateBigInt:
1418       return "call to native Allocate<BigInt, NoGC> (in wasm)";
1419     case SymbolicAddress::ModD:
1420       return "call to asm.js native f64 % (mod)";
1421     case SymbolicAddress::SinD:
1422       return "call to asm.js native f64 Math.sin";
1423     case SymbolicAddress::CosD:
1424       return "call to asm.js native f64 Math.cos";
1425     case SymbolicAddress::TanD:
1426       return "call to asm.js native f64 Math.tan";
1427     case SymbolicAddress::ASinD:
1428       return "call to asm.js native f64 Math.asin";
1429     case SymbolicAddress::ACosD:
1430       return "call to asm.js native f64 Math.acos";
1431     case SymbolicAddress::ATanD:
1432       return "call to asm.js native f64 Math.atan";
1433     case SymbolicAddress::CeilD:
1434       return "call to native f64.ceil (in wasm)";
1435     case SymbolicAddress::CeilF:
1436       return "call to native f32.ceil (in wasm)";
1437     case SymbolicAddress::FloorD:
1438       return "call to native f64.floor (in wasm)";
1439     case SymbolicAddress::FloorF:
1440       return "call to native f32.floor (in wasm)";
1441     case SymbolicAddress::TruncD:
1442       return "call to native f64.trunc (in wasm)";
1443     case SymbolicAddress::TruncF:
1444       return "call to native f32.trunc (in wasm)";
1445     case SymbolicAddress::NearbyIntD:
1446       return "call to native f64.nearest (in wasm)";
1447     case SymbolicAddress::NearbyIntF:
1448       return "call to native f32.nearest (in wasm)";
1449     case SymbolicAddress::ExpD:
1450       return "call to asm.js native f64 Math.exp";
1451     case SymbolicAddress::LogD:
1452       return "call to asm.js native f64 Math.log";
1453     case SymbolicAddress::PowD:
1454       return "call to asm.js native f64 Math.pow";
1455     case SymbolicAddress::ATan2D:
1456       return "call to asm.js native f64 Math.atan2";
1457     case SymbolicAddress::MemoryGrow:
1458       return "call to native memory.grow (in wasm)";
1459     case SymbolicAddress::MemorySize:
1460       return "call to native memory.size (in wasm)";
1461     case SymbolicAddress::WaitI32:
1462       return "call to native i32.wait (in wasm)";
1463     case SymbolicAddress::WaitI64:
1464       return "call to native i64.wait (in wasm)";
1465     case SymbolicAddress::Wake:
1466       return "call to native wake (in wasm)";
1467     case SymbolicAddress::CoerceInPlace_JitEntry:
1468       return "out-of-line coercion for jit entry arguments (in wasm)";
1469     case SymbolicAddress::ReportV128JSCall:
1470       return "jit call to v128 wasm function";
1471     case SymbolicAddress::MemCopy32:
1472     case SymbolicAddress::MemCopyShared32:
1473       return "call to native memory.copy function";
1474     case SymbolicAddress::DataDrop:
1475       return "call to native data.drop function";
1476     case SymbolicAddress::MemFill32:
1477     case SymbolicAddress::MemFillShared32:
1478       return "call to native memory.fill function";
1479     case SymbolicAddress::MemInit32:
1480       return "call to native memory.init function";
1481     case SymbolicAddress::TableCopy:
1482       return "call to native table.copy function";
1483     case SymbolicAddress::TableFill:
1484       return "call to native table.fill function";
1485     case SymbolicAddress::ElemDrop:
1486       return "call to native elem.drop function";
1487     case SymbolicAddress::TableGet:
1488       return "call to native table.get function";
1489     case SymbolicAddress::TableGrow:
1490       return "call to native table.grow function";
1491     case SymbolicAddress::TableInit:
1492       return "call to native table.init function";
1493     case SymbolicAddress::TableSet:
1494       return "call to native table.set function";
1495     case SymbolicAddress::TableSize:
1496       return "call to native table.size function";
1497     case SymbolicAddress::RefFunc:
1498       return "call to native ref.func function";
1499     case SymbolicAddress::PreBarrierFiltering:
1500       return "call to native filtering GC prebarrier (in wasm)";
1501     case SymbolicAddress::PostBarrier:
1502       return "call to native GC postbarrier (in wasm)";
1503     case SymbolicAddress::PostBarrierFiltering:
1504       return "call to native filtering GC postbarrier (in wasm)";
1505     case SymbolicAddress::StructNew:
1506       return "call to native struct.new (in wasm)";
1507 #if defined(ENABLE_WASM_EXCEPTIONS)
1508     case SymbolicAddress::ExceptionNew:
1509       return "call to native exception new (in wasm)";
1510     case SymbolicAddress::ThrowException:
1511       return "call to native throw exception (in wasm)";
1512     case SymbolicAddress::GetLocalExceptionIndex:
1513       return "call to native get the local index of an exn's tag (in wasm)";
1514     case SymbolicAddress::PushRefIntoExn:
1515       return "call to native that pushes a ref value into an exn (in wasm)";
1516 #endif
1517     case SymbolicAddress::ArrayNew:
1518       return "call to native array.new (in wasm)";
1519     case SymbolicAddress::RefTest:
1520       return "call to native ref.test (in wasm)";
1521     case SymbolicAddress::RttSub:
1522       return "call to native rtt.sub (in wasm)";
1523     case SymbolicAddress::InlineTypedObjectClass:
1524       MOZ_CRASH();
1525 #if defined(JS_CODEGEN_MIPS32)
1526     case SymbolicAddress::js_jit_gAtomic64Lock:
1527       MOZ_CRASH();
1528 #endif
1529 #ifdef WASM_CODEGEN_DEBUG
1530     case SymbolicAddress::PrintI32:
1531     case SymbolicAddress::PrintPtr:
1532     case SymbolicAddress::PrintF32:
1533     case SymbolicAddress::PrintF64:
1534     case SymbolicAddress::PrintText:
1535 #endif
1536     case SymbolicAddress::Limit:
1537       break;
1538   }
1539   return "?";
1540 }
1541 
label() const1542 const char* ProfilingFrameIterator::label() const {
1543   MOZ_ASSERT(!done());
1544 
1545   // Use the same string for both time inside and under so that the two
1546   // entries will be coalesced by the profiler.
1547   // Must be kept in sync with /tools/profiler/tests/test_asm.js
1548   static const char importJitDescription[] = "fast exit trampoline (in wasm)";
1549   static const char importInterpDescription[] =
1550       "slow exit trampoline (in wasm)";
1551   static const char builtinNativeDescription[] =
1552       "fast exit trampoline to native (in wasm)";
1553   static const char trapDescription[] = "trap handling (in wasm)";
1554   static const char debugTrapDescription[] = "debug trap handling (in wasm)";
1555 
1556   if (!exitReason_.isFixed()) {
1557     return ThunkedNativeToDescription(exitReason_.symbolic());
1558   }
1559 
1560   switch (exitReason_.fixed()) {
1561     case ExitReason::Fixed::None:
1562       break;
1563     case ExitReason::Fixed::ImportJit:
1564       return importJitDescription;
1565     case ExitReason::Fixed::ImportInterp:
1566       return importInterpDescription;
1567     case ExitReason::Fixed::BuiltinNative:
1568       return builtinNativeDescription;
1569     case ExitReason::Fixed::Trap:
1570       return trapDescription;
1571     case ExitReason::Fixed::DebugTrap:
1572       return debugTrapDescription;
1573     case ExitReason::Fixed::FakeInterpEntry:
1574       return "slow entry trampoline (in wasm)";
1575   }
1576 
1577   switch (codeRange_->kind()) {
1578     case CodeRange::Function:
1579       return code_->profilingLabel(codeRange_->funcIndex());
1580     case CodeRange::InterpEntry:
1581       MOZ_CRASH("should be an ExitReason");
1582     case CodeRange::JitEntry:
1583       return "fast entry trampoline (in wasm)";
1584     case CodeRange::ImportJitExit:
1585       return importJitDescription;
1586     case CodeRange::BuiltinThunk:
1587       return builtinNativeDescription;
1588     case CodeRange::ImportInterpExit:
1589       return importInterpDescription;
1590     case CodeRange::TrapExit:
1591       return trapDescription;
1592     case CodeRange::DebugTrap:
1593       return debugTrapDescription;
1594     case CodeRange::FarJumpIsland:
1595       return "interstitial (in wasm)";
1596     case CodeRange::Throw:
1597       MOZ_CRASH("does not have a frame");
1598   }
1599 
1600   MOZ_CRASH("bad code range kind");
1601 }
1602