1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2  * vim: set ts=8 sts=2 et sw=2 tw=80:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "jit/JitFrames-inl.h"
8 
9 #include "mozilla/ScopeExit.h"
10 
11 #include <algorithm>
12 
13 #include "builtin/ModuleObject.h"
14 #include "jit/BaselineFrame.h"
15 #include "jit/BaselineIC.h"
16 #include "jit/BaselineJIT.h"
17 #include "jit/Ion.h"
18 #include "jit/IonScript.h"
19 #include "jit/JitRuntime.h"
20 #include "jit/JitSpewer.h"
21 #include "jit/LIR.h"
22 #include "jit/PcScriptCache.h"
23 #include "jit/Recover.h"
24 #include "jit/Safepoints.h"
25 #include "jit/ScriptFromCalleeToken.h"
26 #include "jit/Snapshots.h"
27 #include "jit/VMFunctions.h"
28 #include "js/friend/DumpFunctions.h"  // js::DumpObject, js::DumpValue
29 #include "vm/Interpreter.h"
30 #include "vm/JSContext.h"
31 #include "vm/JSFunction.h"
32 #include "vm/JSObject.h"
33 #include "vm/JSScript.h"
34 #include "vm/TraceLogging.h"
35 #include "wasm/WasmBuiltins.h"
36 #include "wasm/WasmInstance.h"
37 
38 #include "debugger/DebugAPI-inl.h"
39 #include "jit/JSJitFrameIter-inl.h"
40 #include "vm/GeckoProfiler-inl.h"
41 #include "vm/JSScript-inl.h"
42 #include "vm/Probes-inl.h"
43 
44 namespace js {
45 namespace jit {
46 
47 // Given a slot index, returns the offset, in bytes, of that slot from an
48 // JitFrameLayout. Slot distances are uniform across architectures, however,
49 // the distance does depend on the size of the frame header.
OffsetOfFrameSlot(int32_t slot)50 static inline int32_t OffsetOfFrameSlot(int32_t slot) { return -slot; }
51 
AddressOfFrameSlot(JitFrameLayout * fp,int32_t slot)52 static inline uint8_t* AddressOfFrameSlot(JitFrameLayout* fp, int32_t slot) {
53   return (uint8_t*)fp + OffsetOfFrameSlot(slot);
54 }
55 
ReadFrameSlot(JitFrameLayout * fp,int32_t slot)56 static inline uintptr_t ReadFrameSlot(JitFrameLayout* fp, int32_t slot) {
57   return *(uintptr_t*)AddressOfFrameSlot(fp, slot);
58 }
59 
WriteFrameSlot(JitFrameLayout * fp,int32_t slot,uintptr_t value)60 static inline void WriteFrameSlot(JitFrameLayout* fp, int32_t slot,
61                                   uintptr_t value) {
62   *(uintptr_t*)AddressOfFrameSlot(fp, slot) = value;
63 }
64 
ReadFrameDoubleSlot(JitFrameLayout * fp,int32_t slot)65 static inline double ReadFrameDoubleSlot(JitFrameLayout* fp, int32_t slot) {
66   return *(double*)AddressOfFrameSlot(fp, slot);
67 }
68 
ReadFrameFloat32Slot(JitFrameLayout * fp,int32_t slot)69 static inline float ReadFrameFloat32Slot(JitFrameLayout* fp, int32_t slot) {
70   return *(float*)AddressOfFrameSlot(fp, slot);
71 }
72 
ReadFrameInt32Slot(JitFrameLayout * fp,int32_t slot)73 static inline int32_t ReadFrameInt32Slot(JitFrameLayout* fp, int32_t slot) {
74   return *(int32_t*)AddressOfFrameSlot(fp, slot);
75 }
76 
ReadFrameBooleanSlot(JitFrameLayout * fp,int32_t slot)77 static inline bool ReadFrameBooleanSlot(JitFrameLayout* fp, int32_t slot) {
78   return *(bool*)AddressOfFrameSlot(fp, slot);
79 }
80 
NumArgAndLocalSlots(const InlineFrameIterator & frame)81 static uint32_t NumArgAndLocalSlots(const InlineFrameIterator& frame) {
82   JSScript* script = frame.script();
83   return CountArgSlots(script, frame.maybeCalleeTemplate()) + script->nfixed();
84 }
85 
CloseLiveIteratorIon(JSContext * cx,const InlineFrameIterator & frame,const TryNote * tn)86 static void CloseLiveIteratorIon(JSContext* cx,
87                                  const InlineFrameIterator& frame,
88                                  const TryNote* tn) {
89   MOZ_ASSERT(tn->kind() == TryNoteKind::ForIn ||
90              tn->kind() == TryNoteKind::Destructuring);
91 
92   bool isDestructuring = tn->kind() == TryNoteKind::Destructuring;
93   MOZ_ASSERT_IF(!isDestructuring, tn->stackDepth > 0);
94   MOZ_ASSERT_IF(isDestructuring, tn->stackDepth > 1);
95 
96   SnapshotIterator si = frame.snapshotIterator();
97 
98   // Skip stack slots until we reach the iterator object on the stack. For
99   // the destructuring case, we also need to get the "done" value.
100   uint32_t stackSlot = tn->stackDepth;
101   uint32_t adjust = isDestructuring ? 2 : 1;
102   uint32_t skipSlots = NumArgAndLocalSlots(frame) + stackSlot - adjust;
103 
104   for (unsigned i = 0; i < skipSlots; i++) {
105     si.skip();
106   }
107 
108   MaybeReadFallback recover(cx, cx->activation()->asJit(), &frame.frame(),
109                             MaybeReadFallback::Fallback_DoNothing);
110   Value v = si.maybeRead(recover);
111   MOZ_RELEASE_ASSERT(v.isObject());
112   RootedObject iterObject(cx, &v.toObject());
113 
114   if (isDestructuring) {
115     RootedValue doneValue(cx, si.read());
116     MOZ_RELEASE_ASSERT(!doneValue.isMagic());
117     bool done = ToBoolean(doneValue);
118     // Do not call IteratorClose if the destructuring iterator is already
119     // done.
120     if (done) {
121       return;
122     }
123   }
124 
125   if (cx->isExceptionPending()) {
126     if (tn->kind() == TryNoteKind::ForIn) {
127       CloseIterator(iterObject);
128     } else {
129       IteratorCloseForException(cx, iterObject);
130     }
131   } else {
132     UnwindIteratorForUncatchableException(iterObject);
133   }
134 }
135 
136 class IonTryNoteFilter {
137   uint32_t depth_;
138 
139  public:
IonTryNoteFilter(const InlineFrameIterator & frame)140   explicit IonTryNoteFilter(const InlineFrameIterator& frame) {
141     uint32_t base = NumArgAndLocalSlots(frame);
142     SnapshotIterator si = frame.snapshotIterator();
143     MOZ_ASSERT(si.numAllocations() >= base);
144     depth_ = si.numAllocations() - base;
145   }
146 
operator ()(const TryNote * note)147   bool operator()(const TryNote* note) { return note->stackDepth <= depth_; }
148 };
149 
150 class TryNoteIterIon : public TryNoteIter<IonTryNoteFilter> {
151  public:
TryNoteIterIon(JSContext * cx,const InlineFrameIterator & frame)152   TryNoteIterIon(JSContext* cx, const InlineFrameIterator& frame)
153       : TryNoteIter(cx, frame.script(), frame.pc(), IonTryNoteFilter(frame)) {}
154 };
155 
ShouldBailoutForDebugger(JSContext * cx,const InlineFrameIterator & frame,bool hitBailoutException)156 static bool ShouldBailoutForDebugger(JSContext* cx,
157                                      const InlineFrameIterator& frame,
158                                      bool hitBailoutException) {
159   if (hitBailoutException) {
160     MOZ_ASSERT(!cx->isPropagatingForcedReturn());
161     return false;
162   }
163 
164   // Bail out if we're propagating a forced return, even if the realm is no
165   // longer a debuggee.
166   if (cx->isPropagatingForcedReturn()) {
167     return true;
168   }
169 
170   if (!cx->realm()->isDebuggee()) {
171     return false;
172   }
173 
174   // Bail out if there's a catchable exception and we are the debuggee of a
175   // Debugger with a live onExceptionUnwind hook.
176   if (cx->isExceptionPending() &&
177       DebugAPI::hasExceptionUnwindHook(cx->global())) {
178     return true;
179   }
180 
181   // Bail out if a Debugger has observed this frame (e.g., for onPop).
182   JitActivation* act = cx->activation()->asJit();
183   RematerializedFrame* rematFrame =
184       act->lookupRematerializedFrame(frame.frame().fp(), frame.frameNo());
185   return rematFrame && rematFrame->isDebuggee();
186 }
187 
HandleExceptionIon(JSContext * cx,const InlineFrameIterator & frame,ResumeFromException * rfe,bool * hitBailoutException)188 static void HandleExceptionIon(JSContext* cx, const InlineFrameIterator& frame,
189                                ResumeFromException* rfe,
190                                bool* hitBailoutException) {
191   if (ShouldBailoutForDebugger(cx, frame, *hitBailoutException)) {
192     // We do the following:
193     //
194     //   1. Bailout to baseline to reconstruct a baseline frame.
195     //   2. Resume immediately into the exception tail afterwards, and
196     //      handle the exception again with the top frame now a baseline
197     //      frame.
198     //
199     // An empty exception info denotes that we're propagating an Ion
200     // exception due to debug mode, which BailoutIonToBaseline needs to
201     // know. This is because we might not be able to fully reconstruct up
202     // to the stack depth at the snapshot, as we could've thrown in the
203     // middle of a call.
204     ExceptionBailoutInfo propagateInfo;
205     if (ExceptionHandlerBailout(cx, frame, rfe, propagateInfo)) {
206       return;
207     }
208     *hitBailoutException = true;
209   }
210 
211   RootedScript script(cx, frame.script());
212 
213   for (TryNoteIterIon tni(cx, frame); !tni.done(); ++tni) {
214     const TryNote* tn = *tni;
215     switch (tn->kind()) {
216       case TryNoteKind::ForIn:
217       case TryNoteKind::Destructuring:
218         CloseLiveIteratorIon(cx, frame, tn);
219         break;
220 
221       case TryNoteKind::Catch:
222         if (cx->isExceptionPending()) {
223           // Ion can compile try-catch, but bailing out to catch
224           // exceptions is slow. Reset the warm-up counter so that if we
225           // catch many exceptions we won't Ion-compile the script.
226           script->resetWarmUpCounterToDelayIonCompilation();
227 
228           if (*hitBailoutException) {
229             break;
230           }
231 
232           // Bailout at the start of the catch block.
233           jsbytecode* catchPC = script->offsetToPC(tn->start + tn->length);
234           ExceptionBailoutInfo excInfo(frame.frameNo(), catchPC,
235                                        tn->stackDepth);
236           if (ExceptionHandlerBailout(cx, frame, rfe, excInfo)) {
237             // Record exception locations to allow scope unwinding in
238             // |FinishBailoutToBaseline|
239             MOZ_ASSERT(cx->isExceptionPending());
240             rfe->bailoutInfo->tryPC =
241                 UnwindEnvironmentToTryPc(frame.script(), tn);
242             rfe->bailoutInfo->faultPC = frame.pc();
243             return;
244           }
245 
246           *hitBailoutException = true;
247           MOZ_ASSERT(cx->isExceptionPending());
248         }
249         break;
250 
251       case TryNoteKind::ForOf:
252       case TryNoteKind::Loop:
253         break;
254 
255       // TryNoteKind::ForOfIterclose is handled internally by the try note
256       // iterator.
257       default:
258         MOZ_CRASH("Unexpected try note");
259     }
260   }
261 }
262 
OnLeaveBaselineFrame(JSContext * cx,const JSJitFrameIter & frame,jsbytecode * pc,ResumeFromException * rfe,bool frameOk)263 static void OnLeaveBaselineFrame(JSContext* cx, const JSJitFrameIter& frame,
264                                  jsbytecode* pc, ResumeFromException* rfe,
265                                  bool frameOk) {
266   BaselineFrame* baselineFrame = frame.baselineFrame();
267   if (jit::DebugEpilogue(cx, baselineFrame, pc, frameOk)) {
268     rfe->kind = ResumeFromException::RESUME_FORCED_RETURN;
269     rfe->framePointer = frame.fp() - BaselineFrame::FramePointerOffset;
270     rfe->stackPointer = reinterpret_cast<uint8_t*>(baselineFrame);
271   }
272 }
273 
BaselineFrameAndStackPointersFromTryNote(const TryNote * tn,const JSJitFrameIter & frame,uint8_t ** framePointer,uint8_t ** stackPointer)274 static inline void BaselineFrameAndStackPointersFromTryNote(
275     const TryNote* tn, const JSJitFrameIter& frame, uint8_t** framePointer,
276     uint8_t** stackPointer) {
277   JSScript* script = frame.baselineFrame()->script();
278   *framePointer = frame.fp() - BaselineFrame::FramePointerOffset;
279   *stackPointer = *framePointer - BaselineFrame::Size() -
280                   (script->nfixed() + tn->stackDepth) * sizeof(Value);
281 }
282 
SettleOnTryNote(JSContext * cx,const TryNote * tn,const JSJitFrameIter & frame,EnvironmentIter & ei,ResumeFromException * rfe,jsbytecode ** pc)283 static void SettleOnTryNote(JSContext* cx, const TryNote* tn,
284                             const JSJitFrameIter& frame, EnvironmentIter& ei,
285                             ResumeFromException* rfe, jsbytecode** pc) {
286   RootedScript script(cx, frame.baselineFrame()->script());
287 
288   // Unwind environment chain (pop block objects).
289   if (cx->isExceptionPending()) {
290     UnwindEnvironment(cx, ei, UnwindEnvironmentToTryPc(script, tn));
291   }
292 
293   // Compute base pointer and stack pointer.
294   BaselineFrameAndStackPointersFromTryNote(tn, frame, &rfe->framePointer,
295                                            &rfe->stackPointer);
296 
297   // Compute the pc.
298   *pc = script->offsetToPC(tn->start + tn->length);
299 }
300 
301 class BaselineTryNoteFilter {
302   const JSJitFrameIter& frame_;
303 
304  public:
BaselineTryNoteFilter(const JSJitFrameIter & frame)305   explicit BaselineTryNoteFilter(const JSJitFrameIter& frame) : frame_(frame) {}
operator ()(const TryNote * note)306   bool operator()(const TryNote* note) {
307     BaselineFrame* frame = frame_.baselineFrame();
308 
309     uint32_t numValueSlots = frame_.baselineFrameNumValueSlots();
310     MOZ_RELEASE_ASSERT(numValueSlots >= frame->script()->nfixed());
311 
312     uint32_t currDepth = numValueSlots - frame->script()->nfixed();
313     return note->stackDepth <= currDepth;
314   }
315 };
316 
317 class TryNoteIterBaseline : public TryNoteIter<BaselineTryNoteFilter> {
318  public:
TryNoteIterBaseline(JSContext * cx,const JSJitFrameIter & frame,jsbytecode * pc)319   TryNoteIterBaseline(JSContext* cx, const JSJitFrameIter& frame,
320                       jsbytecode* pc)
321       : TryNoteIter(cx, frame.script(), pc, BaselineTryNoteFilter(frame)) {}
322 };
323 
324 // Close all live iterators on a BaselineFrame due to exception unwinding. The
325 // pc parameter is updated to where the envs have been unwound to.
CloseLiveIteratorsBaselineForUncatchableException(JSContext * cx,const JSJitFrameIter & frame,jsbytecode * pc)326 static void CloseLiveIteratorsBaselineForUncatchableException(
327     JSContext* cx, const JSJitFrameIter& frame, jsbytecode* pc) {
328   for (TryNoteIterBaseline tni(cx, frame, pc); !tni.done(); ++tni) {
329     const TryNote* tn = *tni;
330     switch (tn->kind()) {
331       case TryNoteKind::ForIn: {
332         uint8_t* framePointer;
333         uint8_t* stackPointer;
334         BaselineFrameAndStackPointersFromTryNote(tn, frame, &framePointer,
335                                                  &stackPointer);
336         Value iterValue(*(Value*)stackPointer);
337         RootedObject iterObject(cx, &iterValue.toObject());
338         UnwindIteratorForUncatchableException(iterObject);
339         break;
340       }
341 
342       default:
343         break;
344     }
345   }
346 }
347 
ProcessTryNotesBaseline(JSContext * cx,const JSJitFrameIter & frame,EnvironmentIter & ei,ResumeFromException * rfe,jsbytecode ** pc)348 static bool ProcessTryNotesBaseline(JSContext* cx, const JSJitFrameIter& frame,
349                                     EnvironmentIter& ei,
350                                     ResumeFromException* rfe, jsbytecode** pc) {
351   MOZ_ASSERT(frame.baselineFrame()->runningInInterpreter(),
352              "Caller must ensure frame is an interpreter frame");
353 
354   RootedScript script(cx, frame.baselineFrame()->script());
355 
356   for (TryNoteIterBaseline tni(cx, frame, *pc); !tni.done(); ++tni) {
357     const TryNote* tn = *tni;
358 
359     MOZ_ASSERT(cx->isExceptionPending());
360     switch (tn->kind()) {
361       case TryNoteKind::Catch: {
362         // If we're closing a legacy generator, we have to skip catch
363         // blocks.
364         if (cx->isClosingGenerator()) {
365           break;
366         }
367 
368         SettleOnTryNote(cx, tn, frame, ei, rfe, pc);
369 
370         // Ion can compile try-catch, but bailing out to catch
371         // exceptions is slow. Reset the warm-up counter so that if we
372         // catch many exceptions we won't Ion-compile the script.
373         script->resetWarmUpCounterToDelayIonCompilation();
374 
375         // Resume at the start of the catch block.
376         const BaselineInterpreter& interp =
377             cx->runtime()->jitRuntime()->baselineInterpreter();
378         frame.baselineFrame()->setInterpreterFields(*pc);
379         rfe->kind = ResumeFromException::RESUME_CATCH;
380         rfe->target = interp.interpretOpAddr().value;
381         return true;
382       }
383 
384       case TryNoteKind::Finally: {
385         SettleOnTryNote(cx, tn, frame, ei, rfe, pc);
386 
387         const BaselineInterpreter& interp =
388             cx->runtime()->jitRuntime()->baselineInterpreter();
389         frame.baselineFrame()->setInterpreterFields(*pc);
390         rfe->kind = ResumeFromException::RESUME_FINALLY;
391         rfe->target = interp.interpretOpAddr().value;
392 
393         // Drop the exception instead of leaking cross compartment data.
394         if (!cx->getPendingException(
395                 MutableHandleValue::fromMarkedLocation(&rfe->exception))) {
396           rfe->exception = UndefinedValue();
397         }
398         cx->clearPendingException();
399         return true;
400       }
401 
402       case TryNoteKind::ForIn: {
403         uint8_t* framePointer;
404         uint8_t* stackPointer;
405         BaselineFrameAndStackPointersFromTryNote(tn, frame, &framePointer,
406                                                  &stackPointer);
407         Value iterValue(*reinterpret_cast<Value*>(stackPointer));
408         JSObject* iterObject = &iterValue.toObject();
409         CloseIterator(iterObject);
410         break;
411       }
412 
413       case TryNoteKind::Destructuring: {
414         uint8_t* framePointer;
415         uint8_t* stackPointer;
416         BaselineFrameAndStackPointersFromTryNote(tn, frame, &framePointer,
417                                                  &stackPointer);
418         // Note: if this ever changes, also update the
419         // TryNoteKind::Destructuring code in WarpBuilder.cpp!
420         RootedValue doneValue(cx, *(reinterpret_cast<Value*>(stackPointer)));
421         MOZ_RELEASE_ASSERT(!doneValue.isMagic());
422         bool done = ToBoolean(doneValue);
423         if (!done) {
424           Value iterValue(*(reinterpret_cast<Value*>(stackPointer) + 1));
425           RootedObject iterObject(cx, &iterValue.toObject());
426           if (!IteratorCloseForException(cx, iterObject)) {
427             SettleOnTryNote(cx, tn, frame, ei, rfe, pc);
428             return false;
429           }
430         }
431         break;
432       }
433 
434       case TryNoteKind::ForOf:
435       case TryNoteKind::Loop:
436         break;
437 
438       // TryNoteKind::ForOfIterClose is handled internally by the try note
439       // iterator.
440       default:
441         MOZ_CRASH("Invalid try note");
442     }
443   }
444   return true;
445 }
446 
HandleExceptionBaseline(JSContext * cx,JSJitFrameIter & frame,CommonFrameLayout * prevFrame,ResumeFromException * rfe)447 static void HandleExceptionBaseline(JSContext* cx, JSJitFrameIter& frame,
448                                     CommonFrameLayout* prevFrame,
449                                     ResumeFromException* rfe) {
450   MOZ_ASSERT(frame.isBaselineJS());
451   MOZ_ASSERT(prevFrame);
452 
453   jsbytecode* pc;
454   frame.baselineScriptAndPc(nullptr, &pc);
455 
456   // Ensure the BaselineFrame is an interpreter frame. This is easy to do and
457   // simplifies the code below and interaction with DebugModeOSR.
458   //
459   // Note that we never return to this frame via the previous frame's return
460   // address. We could set the return address to nullptr to ensure it's never
461   // used, but the profiler expects a non-null return value for its JitCode map
462   // lookup so we have to use an address in the interpreter code instead.
463   if (!frame.baselineFrame()->runningInInterpreter()) {
464     const BaselineInterpreter& interp =
465         cx->runtime()->jitRuntime()->baselineInterpreter();
466     uint8_t* retAddr = interp.codeRaw();
467     BaselineFrame* baselineFrame = frame.baselineFrame();
468 
469     // Suppress profiler sampling while we fix up the frame to ensure the
470     // sampler thread doesn't see an inconsistent state.
471     AutoSuppressProfilerSampling suppressProfilerSampling(cx);
472     baselineFrame->switchFromJitToInterpreterForExceptionHandler(cx, pc);
473     prevFrame->setReturnAddress(retAddr);
474 
475     // Ensure the current iterator's resumePCInCurrentFrame_ isn't used
476     // anywhere.
477     frame.setResumePCInCurrentFrame(nullptr);
478   }
479 
480   bool frameOk = false;
481   RootedScript script(cx, frame.baselineFrame()->script());
482 
483   if (script->hasScriptCounts()) {
484     PCCounts* counts = script->getThrowCounts(pc);
485     // If we failed to allocate, then skip the increment and continue to
486     // handle the exception.
487     if (counts) {
488       counts->numExec()++;
489     }
490   }
491 
492   bool hasTryNotes = !script->trynotes().empty();
493 
494 again:
495   if (cx->isExceptionPending()) {
496     if (!cx->isClosingGenerator()) {
497       if (!DebugAPI::onExceptionUnwind(cx, frame.baselineFrame())) {
498         if (!cx->isExceptionPending()) {
499           goto again;
500         }
501       }
502       // Ensure that the debugger hasn't returned 'true' while clearing the
503       // exception state.
504       MOZ_ASSERT(cx->isExceptionPending());
505     }
506 
507     if (hasTryNotes) {
508       EnvironmentIter ei(cx, frame.baselineFrame(), pc);
509       if (!ProcessTryNotesBaseline(cx, frame, ei, rfe, &pc)) {
510         goto again;
511       }
512       if (rfe->kind != ResumeFromException::RESUME_ENTRY_FRAME) {
513         // No need to increment the PCCounts number of execution here,
514         // as the interpreter increments any PCCounts if present.
515         MOZ_ASSERT_IF(script->hasScriptCounts(), script->maybeGetPCCounts(pc));
516         return;
517       }
518     }
519 
520     frameOk = HandleClosingGeneratorReturn(cx, frame.baselineFrame(), frameOk);
521   } else {
522     if (hasTryNotes) {
523       CloseLiveIteratorsBaselineForUncatchableException(cx, frame, pc);
524     }
525 
526     // We may be propagating a forced return from a debugger hook function.
527     if (MOZ_UNLIKELY(cx->isPropagatingForcedReturn())) {
528       cx->clearPropagatingForcedReturn();
529       frameOk = true;
530     }
531   }
532 
533   OnLeaveBaselineFrame(cx, frame, pc, rfe, frameOk);
534 }
535 
GetLastProfilingFrame(ResumeFromException * rfe)536 static void* GetLastProfilingFrame(ResumeFromException* rfe) {
537   switch (rfe->kind) {
538     case ResumeFromException::RESUME_ENTRY_FRAME:
539     case ResumeFromException::RESUME_WASM:
540       return nullptr;
541 
542     // The following all return into baseline frames.
543     case ResumeFromException::RESUME_CATCH:
544     case ResumeFromException::RESUME_FINALLY:
545     case ResumeFromException::RESUME_FORCED_RETURN:
546       return rfe->framePointer + BaselineFrame::FramePointerOffset;
547 
548     // When resuming into a bailed-out ion frame, use the bailout info to
549     // find the frame we are resuming into.
550     case ResumeFromException::RESUME_BAILOUT:
551       return rfe->bailoutInfo->incomingStack;
552   }
553 
554   MOZ_CRASH("Invalid ResumeFromException type!");
555   return nullptr;
556 }
557 
HandleExceptionWasm(JSContext * cx,wasm::WasmFrameIter * iter,ResumeFromException * rfe)558 void HandleExceptionWasm(JSContext* cx, wasm::WasmFrameIter* iter,
559                          ResumeFromException* rfe) {
560   MOZ_ASSERT(cx->activation()->asJit()->hasWasmExitFP());
561   wasm::HandleThrow(cx, *iter, rfe);
562   MOZ_ASSERT(iter->done());
563 }
564 
HandleException(ResumeFromException * rfe)565 void HandleException(ResumeFromException* rfe) {
566   JSContext* cx = TlsContext.get();
567   TraceLoggerThread* logger = TraceLoggerForCurrentThread(cx);
568 
569 #ifdef DEBUG
570   cx->runtime()->jitRuntime()->clearDisallowArbitraryCode();
571 #endif
572 
573   auto resetProfilerFrame = mozilla::MakeScopeExit([=] {
574     if (!cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(
575             cx->runtime())) {
576       return;
577     }
578 
579     MOZ_ASSERT(cx->jitActivation == cx->profilingActivation());
580 
581     void* lastProfilingFrame = GetLastProfilingFrame(rfe);
582     cx->jitActivation->setLastProfilingFrame(lastProfilingFrame);
583   });
584 
585   rfe->kind = ResumeFromException::RESUME_ENTRY_FRAME;
586 
587   JitSpew(JitSpew_IonInvalidate, "handling exception");
588 
589   JitActivation* activation = cx->activation()->asJit();
590 
591 #ifdef CHECK_OSIPOINT_REGISTERS
592   if (JitOptions.checkOsiPointRegisters) {
593     activation->setCheckRegs(false);
594   }
595 #endif
596 
597   JitFrameIter iter(cx->activation()->asJit(),
598                     /* mustUnwindActivation = */ true);
599   CommonFrameLayout* prevJitFrame = nullptr;
600   while (!iter.done()) {
601     if (iter.isWasm()) {
602       prevJitFrame = nullptr;
603       HandleExceptionWasm(cx, &iter.asWasm(), rfe);
604       // If a wasm try-catch handler is found, we can immediately jump to it
605       // and quit iterating through the stack.
606       if (rfe->kind == ResumeFromException::RESUME_WASM_CATCH) {
607         return;
608       }
609       if (!iter.done()) {
610         ++iter;
611       }
612       continue;
613     }
614 
615     JSJitFrameIter& frame = iter.asJSJit();
616 
617     // JIT code can enter same-compartment realms, so reset cx->realm to
618     // this frame's realm.
619     if (frame.isScripted()) {
620       cx->setRealmForJitExceptionHandler(iter.realm());
621     }
622 
623     if (frame.isIonJS()) {
624       // Search each inlined frame for live iterator objects, and close
625       // them.
626       InlineFrameIterator frames(cx, &frame);
627 
628       // Invalidation state will be the same for all inlined scripts in the
629       // frame.
630       IonScript* ionScript = nullptr;
631       bool invalidated = frame.checkInvalidation(&ionScript);
632 
633 #ifdef JS_TRACE_LOGGING
634       if (logger && cx->realm()->isDebuggee() && logger->enabled()) {
635         logger->disable(/* force = */ true,
636                         "Forcefully disabled tracelogger, due to "
637                         "throwing an exception with an active Debugger "
638                         "in IonMonkey.");
639       }
640 #endif
641 
642       // If we hit OOM or overrecursion while bailing out, we don't
643       // attempt to bail out a second time for this Ion frame. Just unwind
644       // and continue at the next frame.
645       bool hitBailoutException = false;
646       for (;;) {
647         HandleExceptionIon(cx, frames, rfe, &hitBailoutException);
648 
649         if (rfe->kind == ResumeFromException::RESUME_BAILOUT) {
650           if (invalidated) {
651             ionScript->decrementInvalidationCount(
652                 cx->runtime()->defaultFreeOp());
653           }
654           return;
655         }
656 
657         MOZ_ASSERT(rfe->kind == ResumeFromException::RESUME_ENTRY_FRAME);
658 
659         // When profiling, each frame popped needs a notification that
660         // the function has exited, so invoke the probe that a function
661         // is exiting.
662 
663         JSScript* script = frames.script();
664         probes::ExitScript(cx, script, script->function(),
665                            /* popProfilerFrame = */ false);
666         if (!frames.more()) {
667           TraceLogStopEvent(logger, TraceLogger_IonMonkey);
668           TraceLogStopEvent(logger, TraceLogger_Scripts);
669           break;
670         }
671         ++frames;
672       }
673 
674       // Remove left-over state which might have been needed for bailout.
675       activation->removeIonFrameRecovery(frame.jsFrame());
676       activation->removeRematerializedFrame(frame.fp());
677 
678       // If invalidated, decrement the number of frames remaining on the
679       // stack for the given IonScript.
680       if (invalidated) {
681         ionScript->decrementInvalidationCount(cx->runtime()->defaultFreeOp());
682       }
683 
684     } else if (frame.isBaselineJS()) {
685       HandleExceptionBaseline(cx, frame, prevJitFrame, rfe);
686 
687       if (rfe->kind != ResumeFromException::RESUME_ENTRY_FRAME &&
688           rfe->kind != ResumeFromException::RESUME_FORCED_RETURN) {
689         return;
690       }
691 
692       TraceLogStopEvent(logger, TraceLogger_Baseline);
693       TraceLogStopEvent(logger, TraceLogger_Scripts);
694 
695       // Unwind profiler pseudo-stack
696       JSScript* script = frame.script();
697       probes::ExitScript(cx, script, script->function(),
698                          /* popProfilerFrame = */ false);
699 
700       if (rfe->kind == ResumeFromException::RESUME_FORCED_RETURN) {
701         return;
702       }
703     }
704 
705     prevJitFrame = frame.current();
706     ++iter;
707   }
708 
709   // Wasm sets its own value of SP in HandleExceptionWasm.
710   if (iter.isJSJit()) {
711     rfe->stackPointer = iter.asJSJit().fp();
712   }
713 }
714 
715 // Turns a JitFrameLayout into an ExitFrameLayout. Note that it has to be a
716 // bare exit frame so it's ignored by TraceJitExitFrame.
EnsureBareExitFrame(JitActivation * act,JitFrameLayout * frame)717 void EnsureBareExitFrame(JitActivation* act, JitFrameLayout* frame) {
718   ExitFrameLayout* exitFrame = reinterpret_cast<ExitFrameLayout*>(frame);
719 
720   if (act->jsExitFP() == (uint8_t*)frame) {
721     // If we already called this function for the current frame, do
722     // nothing.
723     MOZ_ASSERT(exitFrame->isBareExit());
724     return;
725   }
726 
727 #ifdef DEBUG
728   JSJitFrameIter iter(act);
729   while (!iter.isScripted()) {
730     ++iter;
731   }
732   MOZ_ASSERT(iter.current() == frame, "|frame| must be the top JS frame");
733 
734   MOZ_ASSERT(!!act->jsExitFP());
735   MOZ_ASSERT((uint8_t*)exitFrame->footer() >= act->jsExitFP(),
736              "Must have space for ExitFooterFrame before jsExitFP");
737 #endif
738 
739   act->setJSExitFP((uint8_t*)frame);
740   exitFrame->footer()->setBareExitFrame();
741   MOZ_ASSERT(exitFrame->isBareExit());
742 }
743 
MaybeForwardedScriptFromCalleeToken(CalleeToken token)744 JSScript* MaybeForwardedScriptFromCalleeToken(CalleeToken token) {
745   switch (GetCalleeTokenTag(token)) {
746     case CalleeToken_Script:
747       return MaybeForwarded(CalleeTokenToScript(token));
748     case CalleeToken_Function:
749     case CalleeToken_FunctionConstructing: {
750       JSFunction* fun = MaybeForwarded(CalleeTokenToFunction(token));
751       return MaybeForwarded(fun)->nonLazyScript();
752     }
753   }
754   MOZ_CRASH("invalid callee token tag");
755 }
756 
TraceCalleeToken(JSTracer * trc,CalleeToken token)757 CalleeToken TraceCalleeToken(JSTracer* trc, CalleeToken token) {
758   switch (CalleeTokenTag tag = GetCalleeTokenTag(token)) {
759     case CalleeToken_Function:
760     case CalleeToken_FunctionConstructing: {
761       JSFunction* fun = CalleeTokenToFunction(token);
762       TraceRoot(trc, &fun, "jit-callee");
763       return CalleeToToken(fun, tag == CalleeToken_FunctionConstructing);
764     }
765     case CalleeToken_Script: {
766       JSScript* script = CalleeTokenToScript(token);
767       TraceRoot(trc, &script, "jit-script");
768       return CalleeToToken(script);
769     }
770     default:
771       MOZ_CRASH("unknown callee token type");
772   }
773 }
774 
slotRef(SafepointSlotEntry where)775 uintptr_t* JitFrameLayout::slotRef(SafepointSlotEntry where) {
776   if (where.stack) {
777     return (uintptr_t*)((uint8_t*)this - where.slot);
778   }
779   return (uintptr_t*)((uint8_t*)argv() + where.slot);
780 }
781 
782 #ifdef JS_NUNBOX32
ReadAllocation(const JSJitFrameIter & frame,const LAllocation * a)783 static inline uintptr_t ReadAllocation(const JSJitFrameIter& frame,
784                                        const LAllocation* a) {
785   if (a->isGeneralReg()) {
786     Register reg = a->toGeneralReg()->reg();
787     return frame.machineState().read(reg);
788   }
789   return *frame.jsFrame()->slotRef(SafepointSlotEntry(a));
790 }
791 #endif
792 
TraceThisAndArguments(JSTracer * trc,const JSJitFrameIter & frame,JitFrameLayout * layout)793 static void TraceThisAndArguments(JSTracer* trc, const JSJitFrameIter& frame,
794                                   JitFrameLayout* layout) {
795   // Trace |this| and any extra actual arguments for an Ion frame. Tracing
796   // of formal arguments is taken care of by the frame's safepoint/snapshot,
797   // except when the script might have lazy arguments or rest, in which case
798   // we trace them as well. We also have to trace formals if we have a
799   // LazyLink frame or an InterpreterStub frame or a special JSJit to wasm
800   // frame (since wasm doesn't use snapshots).
801 
802   if (!CalleeTokenIsFunction(layout->calleeToken())) {
803     return;
804   }
805 
806   size_t nargs = layout->numActualArgs();
807   size_t nformals = 0;
808 
809   JSFunction* fun = CalleeTokenToFunction(layout->calleeToken());
810   if (frame.type() != FrameType::JSJitToWasm &&
811       !frame.isExitFrameLayout<CalledFromJitExitFrameLayout>() &&
812       !fun->nonLazyScript()->mayReadFrameArgsDirectly()) {
813     nformals = fun->nargs();
814   }
815 
816   size_t newTargetOffset = std::max(nargs, fun->nargs());
817 
818   Value* argv = layout->argv();
819 
820   // Trace |this|.
821   TraceRoot(trc, argv, "ion-thisv");
822 
823   // Trace actual arguments beyond the formals. Note + 1 for thisv.
824   for (size_t i = nformals + 1; i < nargs + 1; i++) {
825     TraceRoot(trc, &argv[i], "ion-argv");
826   }
827 
828   // Always trace the new.target from the frame. It's not in the snapshots.
829   // +1 to pass |this|
830   if (CalleeTokenIsConstructing(layout->calleeToken())) {
831     TraceRoot(trc, &argv[1 + newTargetOffset], "ion-newTarget");
832   }
833 }
834 
835 #ifdef JS_NUNBOX32
WriteAllocation(const JSJitFrameIter & frame,const LAllocation * a,uintptr_t value)836 static inline void WriteAllocation(const JSJitFrameIter& frame,
837                                    const LAllocation* a, uintptr_t value) {
838   if (a->isGeneralReg()) {
839     Register reg = a->toGeneralReg()->reg();
840     frame.machineState().write(reg, value);
841   } else {
842     *frame.jsFrame()->slotRef(SafepointSlotEntry(a)) = value;
843   }
844 }
845 #endif
846 
TraceIonJSFrame(JSTracer * trc,const JSJitFrameIter & frame)847 static void TraceIonJSFrame(JSTracer* trc, const JSJitFrameIter& frame) {
848   JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
849 
850   layout->replaceCalleeToken(TraceCalleeToken(trc, layout->calleeToken()));
851 
852   IonScript* ionScript = nullptr;
853   if (frame.checkInvalidation(&ionScript)) {
854     // This frame has been invalidated, meaning that its IonScript is no
855     // longer reachable through the callee token (JSFunction/JSScript->ion
856     // is now nullptr or recompiled). Manually trace it here.
857     ionScript->trace(trc);
858   } else {
859     ionScript = frame.ionScriptFromCalleeToken();
860   }
861 
862   TraceThisAndArguments(trc, frame, frame.jsFrame());
863 
864   const SafepointIndex* si =
865       ionScript->getSafepointIndex(frame.resumePCinCurrentFrame());
866 
867   SafepointReader safepoint(ionScript, si);
868 
869   // Scan through slots which contain pointers (or on punboxing systems,
870   // actual values).
871   SafepointSlotEntry entry;
872 
873   while (safepoint.getGcSlot(&entry)) {
874     uintptr_t* ref = layout->slotRef(entry);
875     TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(ref),
876                             "ion-gc-slot");
877   }
878 
879   uintptr_t* spill = frame.spillBase();
880   LiveGeneralRegisterSet gcRegs = safepoint.gcSpills();
881   LiveGeneralRegisterSet valueRegs = safepoint.valueSpills();
882   for (GeneralRegisterBackwardIterator iter(safepoint.allGprSpills());
883        iter.more(); ++iter) {
884     --spill;
885     if (gcRegs.has(*iter)) {
886       TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(spill),
887                               "ion-gc-spill");
888     } else if (valueRegs.has(*iter)) {
889       TraceRoot(trc, reinterpret_cast<Value*>(spill), "ion-value-spill");
890     }
891   }
892 
893 #ifdef JS_PUNBOX64
894   while (safepoint.getValueSlot(&entry)) {
895     Value* v = (Value*)layout->slotRef(entry);
896     TraceRoot(trc, v, "ion-gc-slot");
897   }
898 #else
899   LAllocation type, payload;
900   while (safepoint.getNunboxSlot(&type, &payload)) {
901     JSValueTag tag = JSValueTag(ReadAllocation(frame, &type));
902     uintptr_t rawPayload = ReadAllocation(frame, &payload);
903 
904     Value v = Value::fromTagAndPayload(tag, rawPayload);
905     TraceRoot(trc, &v, "ion-torn-value");
906 
907     if (v != Value::fromTagAndPayload(tag, rawPayload)) {
908       // GC moved the value, replace the stored payload.
909       rawPayload = v.toNunboxPayload();
910       WriteAllocation(frame, &payload, rawPayload);
911     }
912   }
913 #endif
914 }
915 
TraceBailoutFrame(JSTracer * trc,const JSJitFrameIter & frame)916 static void TraceBailoutFrame(JSTracer* trc, const JSJitFrameIter& frame) {
917   JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
918 
919   layout->replaceCalleeToken(TraceCalleeToken(trc, layout->calleeToken()));
920 
921   // We have to trace the list of actual arguments, as only formal arguments
922   // are represented in the Snapshot.
923   TraceThisAndArguments(trc, frame, frame.jsFrame());
924 
925   // Under a bailout, do not have a Safepoint to only iterate over GC-things.
926   // Thus we use a SnapshotIterator to trace all the locations which would be
927   // used to reconstruct the Baseline frame.
928   //
929   // Note that at the time where this function is called, we have not yet
930   // started to reconstruct baseline frames.
931 
932   // The vector of recover instructions is already traced as part of the
933   // JitActivation.
934   SnapshotIterator snapIter(frame,
935                             frame.activation()->bailoutData()->machineState());
936 
937   // For each instruction, we read the allocations without evaluating the
938   // recover instruction, nor reconstructing the frame. We are only looking at
939   // tracing readable allocations.
940   while (true) {
941     while (snapIter.moreAllocations()) {
942       snapIter.traceAllocation(trc);
943     }
944 
945     if (!snapIter.moreInstructions()) {
946       break;
947     }
948     snapIter.nextInstruction();
949   }
950 }
951 
UpdateIonJSFrameForMinorGC(JSRuntime * rt,const JSJitFrameIter & frame)952 static void UpdateIonJSFrameForMinorGC(JSRuntime* rt,
953                                        const JSJitFrameIter& frame) {
954   // Minor GCs may move slots/elements allocated in the nursery. Update
955   // any slots/elements pointers stored in this frame.
956 
957   JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
958 
959   IonScript* ionScript = nullptr;
960   if (frame.checkInvalidation(&ionScript)) {
961     // This frame has been invalidated, meaning that its IonScript is no
962     // longer reachable through the callee token (JSFunction/JSScript->ion
963     // is now nullptr or recompiled).
964   } else {
965     ionScript = frame.ionScriptFromCalleeToken();
966   }
967 
968   Nursery& nursery = rt->gc.nursery();
969 
970   const SafepointIndex* si =
971       ionScript->getSafepointIndex(frame.resumePCinCurrentFrame());
972   SafepointReader safepoint(ionScript, si);
973 
974   LiveGeneralRegisterSet slotsRegs = safepoint.slotsOrElementsSpills();
975   uintptr_t* spill = frame.spillBase();
976   for (GeneralRegisterBackwardIterator iter(safepoint.allGprSpills());
977        iter.more(); ++iter) {
978     --spill;
979     if (slotsRegs.has(*iter)) {
980       nursery.forwardBufferPointer(spill);
981     }
982   }
983 
984   // Skip to the right place in the safepoint
985   SafepointSlotEntry entry;
986   while (safepoint.getGcSlot(&entry)) {
987   }
988 
989 #ifdef JS_PUNBOX64
990   while (safepoint.getValueSlot(&entry)) {
991   }
992 #else
993   LAllocation type, payload;
994   while (safepoint.getNunboxSlot(&type, &payload)) {
995   }
996 #endif
997 
998   while (safepoint.getSlotsOrElementsSlot(&entry)) {
999     nursery.forwardBufferPointer(layout->slotRef(entry));
1000   }
1001 }
1002 
TraceBaselineStubFrame(JSTracer * trc,const JSJitFrameIter & frame)1003 static void TraceBaselineStubFrame(JSTracer* trc, const JSJitFrameIter& frame) {
1004   // Trace the ICStub pointer stored in the stub frame. This is necessary
1005   // so that we don't destroy the stub code after unlinking the stub.
1006 
1007   MOZ_ASSERT(frame.type() == FrameType::BaselineStub);
1008   JitStubFrameLayout* layout = (JitStubFrameLayout*)frame.fp();
1009 
1010   if (ICStub* stub = layout->maybeStubPtr()) {
1011     if (stub->isFallback()) {
1012       // Fallback stubs use runtime-wide trampoline code we don't need to trace.
1013       MOZ_ASSERT(stub->usesTrampolineCode());
1014     } else {
1015       MOZ_ASSERT(stub->toCacheIRStub()->makesGCCalls());
1016       stub->toCacheIRStub()->trace(trc);
1017     }
1018   }
1019 }
1020 
TraceIonICCallFrame(JSTracer * trc,const JSJitFrameIter & frame)1021 static void TraceIonICCallFrame(JSTracer* trc, const JSJitFrameIter& frame) {
1022   MOZ_ASSERT(frame.type() == FrameType::IonICCall);
1023   IonICCallFrameLayout* layout = (IonICCallFrameLayout*)frame.fp();
1024   TraceRoot(trc, layout->stubCode(), "ion-ic-call-code");
1025 }
1026 
1027 #if defined(JS_CODEGEN_ARM64) || defined(JS_CODEGEN_MIPS32)
alignDoubleSpill(uint8_t * pointer)1028 uint8_t* alignDoubleSpill(uint8_t* pointer) {
1029   uintptr_t address = reinterpret_cast<uintptr_t>(pointer);
1030   address &= ~(uintptr_t(ABIStackAlignment) - 1);
1031   return reinterpret_cast<uint8_t*>(address);
1032 }
1033 #endif
1034 
1035 #ifdef JS_CODEGEN_MIPS32
TraceJitExitFrameCopiedArguments(JSTracer * trc,const VMFunctionData * f,ExitFooterFrame * footer)1036 static void TraceJitExitFrameCopiedArguments(JSTracer* trc,
1037                                              const VMFunctionData* f,
1038                                              ExitFooterFrame* footer) {
1039   uint8_t* doubleArgs = footer->alignedForABI();
1040   if (f->outParam == Type_Handle) {
1041     doubleArgs -= sizeof(Value);
1042   }
1043   doubleArgs -= f->doubleByRefArgs() * sizeof(double);
1044 
1045   for (uint32_t explicitArg = 0; explicitArg < f->explicitArgs; explicitArg++) {
1046     if (f->argProperties(explicitArg) == VMFunctionData::DoubleByRef) {
1047       // Arguments with double size can only have RootValue type.
1048       if (f->argRootType(explicitArg) == VMFunctionData::RootValue) {
1049         TraceRoot(trc, reinterpret_cast<Value*>(doubleArgs), "ion-vm-args");
1050       } else {
1051         MOZ_ASSERT(f->argRootType(explicitArg) == VMFunctionData::RootNone);
1052       }
1053       doubleArgs += sizeof(double);
1054     }
1055   }
1056 }
1057 #else
TraceJitExitFrameCopiedArguments(JSTracer * trc,const VMFunctionData * f,ExitFooterFrame * footer)1058 static void TraceJitExitFrameCopiedArguments(JSTracer* trc,
1059                                              const VMFunctionData* f,
1060                                              ExitFooterFrame* footer) {
1061   // This is NO-OP on other platforms.
1062 }
1063 #endif
1064 
TraceJitExitFrame(JSTracer * trc,const JSJitFrameIter & frame)1065 static void TraceJitExitFrame(JSTracer* trc, const JSJitFrameIter& frame) {
1066   ExitFooterFrame* footer = frame.exitFrame()->footer();
1067 
1068   // This corresponds to the case where we have build a fake exit frame which
1069   // handles the case of a native function call. We need to trace the argument
1070   // vector of the function call, and also new.target if it was a constructing
1071   // call.
1072   if (frame.isExitFrameLayout<NativeExitFrameLayout>()) {
1073     NativeExitFrameLayout* native =
1074         frame.exitFrame()->as<NativeExitFrameLayout>();
1075     size_t len = native->argc() + 2;
1076     Value* vp = native->vp();
1077     TraceRootRange(trc, len, vp, "ion-native-args");
1078     if (frame.isExitFrameLayout<ConstructNativeExitFrameLayout>()) {
1079       TraceRoot(trc, vp + len, "ion-native-new-target");
1080     }
1081     return;
1082   }
1083 
1084   if (frame.isExitFrameLayout<IonOOLNativeExitFrameLayout>()) {
1085     IonOOLNativeExitFrameLayout* oolnative =
1086         frame.exitFrame()->as<IonOOLNativeExitFrameLayout>();
1087     TraceRoot(trc, oolnative->stubCode(), "ion-ool-native-code");
1088     TraceRoot(trc, oolnative->vp(), "iol-ool-native-vp");
1089     size_t len = oolnative->argc() + 1;
1090     TraceRootRange(trc, len, oolnative->thisp(), "ion-ool-native-thisargs");
1091     return;
1092   }
1093 
1094   if (frame.isExitFrameLayout<IonOOLProxyExitFrameLayout>()) {
1095     IonOOLProxyExitFrameLayout* oolproxy =
1096         frame.exitFrame()->as<IonOOLProxyExitFrameLayout>();
1097     TraceRoot(trc, oolproxy->stubCode(), "ion-ool-proxy-code");
1098     TraceRoot(trc, oolproxy->vp(), "ion-ool-proxy-vp");
1099     TraceRoot(trc, oolproxy->id(), "ion-ool-proxy-id");
1100     TraceRoot(trc, oolproxy->proxy(), "ion-ool-proxy-proxy");
1101     return;
1102   }
1103 
1104   if (frame.isExitFrameLayout<IonDOMExitFrameLayout>()) {
1105     IonDOMExitFrameLayout* dom = frame.exitFrame()->as<IonDOMExitFrameLayout>();
1106     TraceRoot(trc, dom->thisObjAddress(), "ion-dom-args");
1107     if (dom->isMethodFrame()) {
1108       IonDOMMethodExitFrameLayout* method =
1109           reinterpret_cast<IonDOMMethodExitFrameLayout*>(dom);
1110       size_t len = method->argc() + 2;
1111       Value* vp = method->vp();
1112       TraceRootRange(trc, len, vp, "ion-dom-args");
1113     } else {
1114       TraceRoot(trc, dom->vp(), "ion-dom-args");
1115     }
1116     return;
1117   }
1118 
1119   if (frame.isExitFrameLayout<CalledFromJitExitFrameLayout>()) {
1120     auto* layout = frame.exitFrame()->as<CalledFromJitExitFrameLayout>();
1121     JitFrameLayout* jsLayout = layout->jsFrame();
1122     jsLayout->replaceCalleeToken(
1123         TraceCalleeToken(trc, jsLayout->calleeToken()));
1124     TraceThisAndArguments(trc, frame, jsLayout);
1125     return;
1126   }
1127 
1128   if (frame.isExitFrameLayout<DirectWasmJitCallFrameLayout>()) {
1129     // Nothing needs to be traced here at the moment -- the arguments to the
1130     // callee are traced by the callee, and the inlined caller does not push
1131     // anything else.
1132     return;
1133   }
1134 
1135   if (frame.isBareExit()) {
1136     // Nothing to trace. Fake exit frame pushed for VM functions with
1137     // nothing to trace on the stack.
1138     return;
1139   }
1140 
1141   MOZ_ASSERT(frame.exitFrame()->isWrapperExit());
1142 
1143   const VMFunctionData* f = footer->function();
1144   MOZ_ASSERT(f);
1145 
1146   // Trace arguments of the VM wrapper.
1147   uint8_t* argBase = frame.exitFrame()->argBase();
1148   for (uint32_t explicitArg = 0; explicitArg < f->explicitArgs; explicitArg++) {
1149     switch (f->argRootType(explicitArg)) {
1150       case VMFunctionData::RootNone:
1151         break;
1152       case VMFunctionData::RootObject: {
1153         // Sometimes we can bake in HandleObjects to nullptr.
1154         JSObject** pobj = reinterpret_cast<JSObject**>(argBase);
1155         if (*pobj) {
1156           TraceRoot(trc, pobj, "ion-vm-args");
1157         }
1158         break;
1159       }
1160       case VMFunctionData::RootString:
1161         TraceRoot(trc, reinterpret_cast<JSString**>(argBase), "ion-vm-args");
1162         break;
1163       case VMFunctionData::RootValue:
1164         TraceRoot(trc, reinterpret_cast<Value*>(argBase), "ion-vm-args");
1165         break;
1166       case VMFunctionData::RootId:
1167         TraceRoot(trc, reinterpret_cast<jsid*>(argBase), "ion-vm-args");
1168         break;
1169       case VMFunctionData::RootCell:
1170         TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(argBase),
1171                                 "ion-vm-args");
1172         break;
1173       case VMFunctionData::RootBigInt:
1174         TraceRoot(trc, reinterpret_cast<JS::BigInt**>(argBase), "ion-vm-args");
1175         break;
1176     }
1177 
1178     switch (f->argProperties(explicitArg)) {
1179       case VMFunctionData::WordByValue:
1180       case VMFunctionData::WordByRef:
1181         argBase += sizeof(void*);
1182         break;
1183       case VMFunctionData::DoubleByValue:
1184       case VMFunctionData::DoubleByRef:
1185         argBase += 2 * sizeof(void*);
1186         break;
1187     }
1188   }
1189 
1190   if (f->outParam == Type_Handle) {
1191     switch (f->outParamRootType) {
1192       case VMFunctionData::RootNone:
1193         MOZ_CRASH("Handle outparam must have root type");
1194       case VMFunctionData::RootObject:
1195         TraceRoot(trc, footer->outParam<JSObject*>(), "ion-vm-out");
1196         break;
1197       case VMFunctionData::RootString:
1198         TraceRoot(trc, footer->outParam<JSString*>(), "ion-vm-out");
1199         break;
1200       case VMFunctionData::RootValue:
1201         TraceRoot(trc, footer->outParam<Value>(), "ion-vm-outvp");
1202         break;
1203       case VMFunctionData::RootId:
1204         TraceRoot(trc, footer->outParam<jsid>(), "ion-vm-outvp");
1205         break;
1206       case VMFunctionData::RootCell:
1207         TraceGenericPointerRoot(trc, footer->outParam<gc::Cell*>(),
1208                                 "ion-vm-out");
1209         break;
1210       case VMFunctionData::RootBigInt:
1211         TraceRoot(trc, footer->outParam<JS::BigInt*>(), "ion-vm-out");
1212         break;
1213     }
1214   }
1215 
1216   TraceJitExitFrameCopiedArguments(trc, f, footer);
1217 }
1218 
TraceRectifierFrame(JSTracer * trc,const JSJitFrameIter & frame)1219 static void TraceRectifierFrame(JSTracer* trc, const JSJitFrameIter& frame) {
1220   // Trace thisv.
1221   //
1222   // Baseline JIT code generated as part of the ICCall_Fallback stub may use
1223   // it if we're calling a constructor that returns a primitive value.
1224   RectifierFrameLayout* layout = (RectifierFrameLayout*)frame.fp();
1225   TraceRoot(trc, &layout->argv()[0], "ion-thisv");
1226 }
1227 
TraceJSJitToWasmFrame(JSTracer * trc,const JSJitFrameIter & frame)1228 static void TraceJSJitToWasmFrame(JSTracer* trc, const JSJitFrameIter& frame) {
1229   // This is doing a subset of TraceIonJSFrame, since the callee doesn't
1230   // have a script.
1231   JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
1232   layout->replaceCalleeToken(TraceCalleeToken(trc, layout->calleeToken()));
1233   TraceThisAndArguments(trc, frame, layout);
1234 }
1235 
TraceJitActivation(JSTracer * trc,JitActivation * activation)1236 static void TraceJitActivation(JSTracer* trc, JitActivation* activation) {
1237 #ifdef CHECK_OSIPOINT_REGISTERS
1238   if (JitOptions.checkOsiPointRegisters) {
1239     // GC can modify spilled registers, breaking our register checks.
1240     // To handle this, we disable these checks for the current VM call
1241     // when a GC happens.
1242     activation->setCheckRegs(false);
1243   }
1244 #endif
1245 
1246   activation->traceRematerializedFrames(trc);
1247   activation->traceIonRecovery(trc);
1248 
1249   // This is used for sanity checking continuity of the sequence of wasm stack
1250   // maps as we unwind.  It has no functional purpose.
1251   uintptr_t highestByteVisitedInPrevWasmFrame = 0;
1252 
1253   for (JitFrameIter frames(activation); !frames.done(); ++frames) {
1254     if (frames.isJSJit()) {
1255       const JSJitFrameIter& jitFrame = frames.asJSJit();
1256       switch (jitFrame.type()) {
1257         case FrameType::Exit:
1258           TraceJitExitFrame(trc, jitFrame);
1259           break;
1260         case FrameType::BaselineJS:
1261           jitFrame.baselineFrame()->trace(trc, jitFrame);
1262           break;
1263         case FrameType::IonJS:
1264           TraceIonJSFrame(trc, jitFrame);
1265           break;
1266         case FrameType::BaselineStub:
1267           TraceBaselineStubFrame(trc, jitFrame);
1268           break;
1269         case FrameType::Bailout:
1270           TraceBailoutFrame(trc, jitFrame);
1271           break;
1272         case FrameType::Rectifier:
1273           TraceRectifierFrame(trc, jitFrame);
1274           break;
1275         case FrameType::IonICCall:
1276           TraceIonICCallFrame(trc, jitFrame);
1277           break;
1278         case FrameType::WasmToJSJit:
1279           // Ignore: this is a special marker used to let the
1280           // JitFrameIter know the frame above is a wasm frame, handled
1281           // in the next iteration.
1282           break;
1283         case FrameType::JSJitToWasm:
1284           TraceJSJitToWasmFrame(trc, jitFrame);
1285           break;
1286         default:
1287           MOZ_CRASH("unexpected frame type");
1288       }
1289       highestByteVisitedInPrevWasmFrame = 0; /* "unknown" */
1290     } else {
1291       MOZ_ASSERT(frames.isWasm());
1292       uint8_t* nextPC = frames.resumePCinCurrentFrame();
1293       MOZ_ASSERT(nextPC != 0);
1294       wasm::WasmFrameIter& wasmFrameIter = frames.asWasm();
1295       wasm::Instance* instance = wasmFrameIter.instance();
1296       instance->trace(trc);
1297       highestByteVisitedInPrevWasmFrame = instance->traceFrame(
1298           trc, wasmFrameIter, nextPC, highestByteVisitedInPrevWasmFrame);
1299     }
1300   }
1301 }
1302 
TraceJitActivations(JSContext * cx,JSTracer * trc)1303 void TraceJitActivations(JSContext* cx, JSTracer* trc) {
1304   for (JitActivationIterator activations(cx); !activations.done();
1305        ++activations) {
1306     TraceJitActivation(trc, activations->asJit());
1307   }
1308 }
1309 
UpdateJitActivationsForMinorGC(JSRuntime * rt)1310 void UpdateJitActivationsForMinorGC(JSRuntime* rt) {
1311   MOZ_ASSERT(JS::RuntimeHeapIsMinorCollecting());
1312   JSContext* cx = rt->mainContextFromOwnThread();
1313   for (JitActivationIterator activations(cx); !activations.done();
1314        ++activations) {
1315     for (OnlyJSJitFrameIter iter(activations); !iter.done(); ++iter) {
1316       if (iter.frame().type() == FrameType::IonJS) {
1317         UpdateIonJSFrameForMinorGC(rt, iter.frame());
1318       }
1319     }
1320   }
1321 }
1322 
GetTopJitJSScript(JSContext * cx)1323 JSScript* GetTopJitJSScript(JSContext* cx) {
1324   JSJitFrameIter frame(cx->activation()->asJit());
1325   MOZ_ASSERT(frame.type() == FrameType::Exit);
1326   ++frame;
1327 
1328   if (frame.isBaselineStub()) {
1329     ++frame;
1330     MOZ_ASSERT(frame.isBaselineJS());
1331   }
1332 
1333   MOZ_ASSERT(frame.isScripted());
1334   return frame.script();
1335 }
1336 
GetPcScript(JSContext * cx,JSScript ** scriptRes,jsbytecode ** pcRes)1337 void GetPcScript(JSContext* cx, JSScript** scriptRes, jsbytecode** pcRes) {
1338   JitSpew(JitSpew_IonSnapshots, "Recover PC & Script from the last frame.");
1339 
1340   // Recover the return address so that we can look it up in the
1341   // PcScriptCache, as script/pc computation is expensive.
1342   JitActivationIterator actIter(cx);
1343   OnlyJSJitFrameIter it(actIter);
1344   uint8_t* retAddr;
1345   if (it.frame().isExitFrame()) {
1346     ++it;
1347 
1348     // Skip rectifier frames.
1349     if (it.frame().isRectifier()) {
1350       ++it;
1351       MOZ_ASSERT(it.frame().isBaselineStub() || it.frame().isBaselineJS() ||
1352                  it.frame().isIonJS());
1353     }
1354 
1355     // Skip Baseline/Ion stub and IC call frames.
1356     if (it.frame().isBaselineStub()) {
1357       ++it;
1358       MOZ_ASSERT(it.frame().isBaselineJS());
1359     } else if (it.frame().isIonICCall()) {
1360       ++it;
1361       MOZ_ASSERT(it.frame().isIonJS());
1362     }
1363 
1364     MOZ_ASSERT(it.frame().isBaselineJS() || it.frame().isIonJS());
1365 
1366     // Don't use the return address and the cache if the BaselineFrame is
1367     // running in the Baseline Interpreter. In this case the bytecode pc is
1368     // cheap to get, so we won't benefit from the cache, and the return address
1369     // does not map to a single bytecode pc.
1370     if (it.frame().isBaselineJS() &&
1371         it.frame().baselineFrame()->runningInInterpreter()) {
1372       it.frame().baselineScriptAndPc(scriptRes, pcRes);
1373       return;
1374     }
1375 
1376     retAddr = it.frame().resumePCinCurrentFrame();
1377   } else {
1378     MOZ_ASSERT(it.frame().isBailoutJS());
1379     retAddr = it.frame().returnAddress();
1380   }
1381 
1382   MOZ_ASSERT(retAddr);
1383 
1384   uint32_t hash = PcScriptCache::Hash(retAddr);
1385 
1386   // Lazily initialize the cache. The allocation may safely fail and will not
1387   // GC.
1388   if (MOZ_UNLIKELY(cx->ionPcScriptCache == nullptr)) {
1389     cx->ionPcScriptCache =
1390         MakeUnique<PcScriptCache>(cx->runtime()->gc.gcNumber());
1391   }
1392 
1393   if (cx->ionPcScriptCache.ref() &&
1394       cx->ionPcScriptCache->get(cx->runtime(), hash, retAddr, scriptRes,
1395                                 pcRes)) {
1396     return;
1397   }
1398 
1399   // Lookup failed: undertake expensive process to determine script and pc.
1400   if (it.frame().isIonJS() || it.frame().isBailoutJS()) {
1401     InlineFrameIterator ifi(cx, &it.frame());
1402     *scriptRes = ifi.script();
1403     *pcRes = ifi.pc();
1404   } else {
1405     MOZ_ASSERT(it.frame().isBaselineJS());
1406     it.frame().baselineScriptAndPc(scriptRes, pcRes);
1407   }
1408 
1409   // Add entry to cache.
1410   if (cx->ionPcScriptCache.ref()) {
1411     cx->ionPcScriptCache->add(hash, retAddr, *pcRes, *scriptRes);
1412   }
1413 }
1414 
RInstructionResults(JitFrameLayout * fp)1415 RInstructionResults::RInstructionResults(JitFrameLayout* fp)
1416     : results_(nullptr), fp_(fp), initialized_(false) {}
1417 
RInstructionResults(RInstructionResults && src)1418 RInstructionResults::RInstructionResults(RInstructionResults&& src)
1419     : results_(std::move(src.results_)),
1420       fp_(src.fp_),
1421       initialized_(src.initialized_) {
1422   src.initialized_ = false;
1423 }
1424 
operator =(RInstructionResults && rhs)1425 RInstructionResults& RInstructionResults::operator=(RInstructionResults&& rhs) {
1426   MOZ_ASSERT(&rhs != this, "self-moves are prohibited");
1427   this->~RInstructionResults();
1428   new (this) RInstructionResults(std::move(rhs));
1429   return *this;
1430 }
1431 
~RInstructionResults()1432 RInstructionResults::~RInstructionResults() {
1433   // results_ is freed by the UniquePtr.
1434 }
1435 
init(JSContext * cx,uint32_t numResults)1436 bool RInstructionResults::init(JSContext* cx, uint32_t numResults) {
1437   if (numResults) {
1438     results_ = cx->make_unique<Values>();
1439     if (!results_) {
1440       return false;
1441     }
1442     if (!results_->growBy(numResults)) {
1443       ReportOutOfMemory(cx);
1444       return false;
1445     }
1446 
1447     Value guard = MagicValue(JS_ION_BAILOUT);
1448     for (size_t i = 0; i < numResults; i++) {
1449       (*results_)[i].init(guard);
1450     }
1451   }
1452 
1453   initialized_ = true;
1454   return true;
1455 }
1456 
isInitialized() const1457 bool RInstructionResults::isInitialized() const { return initialized_; }
1458 
length() const1459 size_t RInstructionResults::length() const { return results_->length(); }
1460 
frame() const1461 JitFrameLayout* RInstructionResults::frame() const {
1462   MOZ_ASSERT(fp_);
1463   return fp_;
1464 }
1465 
operator [](size_t index)1466 HeapPtr<Value>& RInstructionResults::operator[](size_t index) {
1467   return (*results_)[index];
1468 }
1469 
trace(JSTracer * trc)1470 void RInstructionResults::trace(JSTracer* trc) {
1471   // Note: The vector necessary exists, otherwise this object would not have
1472   // been stored on the activation from where the trace function is called.
1473   TraceRange(trc, results_->length(), results_->begin(), "ion-recover-results");
1474 }
1475 
SnapshotIterator(const JSJitFrameIter & iter,const MachineState * machineState)1476 SnapshotIterator::SnapshotIterator(const JSJitFrameIter& iter,
1477                                    const MachineState* machineState)
1478     : snapshot_(iter.ionScript()->snapshots(), iter.snapshotOffset(),
1479                 iter.ionScript()->snapshotsRVATableSize(),
1480                 iter.ionScript()->snapshotsListSize()),
1481       recover_(snapshot_, iter.ionScript()->recovers(),
1482                iter.ionScript()->recoversSize()),
1483       fp_(iter.jsFrame()),
1484       machine_(machineState),
1485       ionScript_(iter.ionScript()),
1486       instructionResults_(nullptr) {}
1487 
SnapshotIterator()1488 SnapshotIterator::SnapshotIterator()
1489     : snapshot_(nullptr, 0, 0, 0),
1490       recover_(snapshot_, nullptr, 0),
1491       fp_(nullptr),
1492       machine_(nullptr),
1493       ionScript_(nullptr),
1494       instructionResults_(nullptr) {}
1495 
fromStack(int32_t offset) const1496 uintptr_t SnapshotIterator::fromStack(int32_t offset) const {
1497   return ReadFrameSlot(fp_, offset);
1498 }
1499 
FromObjectPayload(uintptr_t payload)1500 static Value FromObjectPayload(uintptr_t payload) {
1501   MOZ_ASSERT(payload != 0);
1502   return ObjectValue(*reinterpret_cast<JSObject*>(payload));
1503 }
1504 
FromStringPayload(uintptr_t payload)1505 static Value FromStringPayload(uintptr_t payload) {
1506   return StringValue(reinterpret_cast<JSString*>(payload));
1507 }
1508 
FromSymbolPayload(uintptr_t payload)1509 static Value FromSymbolPayload(uintptr_t payload) {
1510   return SymbolValue(reinterpret_cast<JS::Symbol*>(payload));
1511 }
1512 
FromBigIntPayload(uintptr_t payload)1513 static Value FromBigIntPayload(uintptr_t payload) {
1514   return BigIntValue(reinterpret_cast<JS::BigInt*>(payload));
1515 }
1516 
FromTypedPayload(JSValueType type,uintptr_t payload)1517 static Value FromTypedPayload(JSValueType type, uintptr_t payload) {
1518   switch (type) {
1519     case JSVAL_TYPE_INT32:
1520       return Int32Value(payload);
1521     case JSVAL_TYPE_BOOLEAN:
1522       return BooleanValue(!!payload);
1523     case JSVAL_TYPE_STRING:
1524       return FromStringPayload(payload);
1525     case JSVAL_TYPE_SYMBOL:
1526       return FromSymbolPayload(payload);
1527     case JSVAL_TYPE_BIGINT:
1528       return FromBigIntPayload(payload);
1529     case JSVAL_TYPE_OBJECT:
1530       return FromObjectPayload(payload);
1531     default:
1532       MOZ_CRASH("unexpected type - needs payload");
1533   }
1534 }
1535 
allocationReadable(const RValueAllocation & alloc,ReadMethod rm)1536 bool SnapshotIterator::allocationReadable(const RValueAllocation& alloc,
1537                                           ReadMethod rm) {
1538   // If we have to recover stores, and if we are not interested in the
1539   // default value of the instruction, then we have to check if the recover
1540   // instruction results are available.
1541   if (alloc.needSideEffect() && !(rm & RM_AlwaysDefault)) {
1542     if (!hasInstructionResults()) {
1543       return false;
1544     }
1545   }
1546 
1547   switch (alloc.mode()) {
1548     case RValueAllocation::DOUBLE_REG:
1549       return hasRegister(alloc.fpuReg());
1550 
1551     case RValueAllocation::TYPED_REG:
1552       return hasRegister(alloc.reg2());
1553 
1554 #if defined(JS_NUNBOX32)
1555     case RValueAllocation::UNTYPED_REG_REG:
1556       return hasRegister(alloc.reg()) && hasRegister(alloc.reg2());
1557     case RValueAllocation::UNTYPED_REG_STACK:
1558       return hasRegister(alloc.reg()) && hasStack(alloc.stackOffset2());
1559     case RValueAllocation::UNTYPED_STACK_REG:
1560       return hasStack(alloc.stackOffset()) && hasRegister(alloc.reg2());
1561     case RValueAllocation::UNTYPED_STACK_STACK:
1562       return hasStack(alloc.stackOffset()) && hasStack(alloc.stackOffset2());
1563 #elif defined(JS_PUNBOX64)
1564     case RValueAllocation::UNTYPED_REG:
1565       return hasRegister(alloc.reg());
1566     case RValueAllocation::UNTYPED_STACK:
1567       return hasStack(alloc.stackOffset());
1568 #endif
1569 
1570     case RValueAllocation::RECOVER_INSTRUCTION:
1571       return hasInstructionResult(alloc.index());
1572     case RValueAllocation::RI_WITH_DEFAULT_CST:
1573       return rm & RM_AlwaysDefault || hasInstructionResult(alloc.index());
1574 
1575     default:
1576       return true;
1577   }
1578 }
1579 
allocationValue(const RValueAllocation & alloc,ReadMethod rm)1580 Value SnapshotIterator::allocationValue(const RValueAllocation& alloc,
1581                                         ReadMethod rm) {
1582   switch (alloc.mode()) {
1583     case RValueAllocation::CONSTANT:
1584       return ionScript_->getConstant(alloc.index());
1585 
1586     case RValueAllocation::CST_UNDEFINED:
1587       return UndefinedValue();
1588 
1589     case RValueAllocation::CST_NULL:
1590       return NullValue();
1591 
1592     case RValueAllocation::DOUBLE_REG:
1593       return DoubleValue(fromRegister(alloc.fpuReg()));
1594 
1595     case RValueAllocation::ANY_FLOAT_REG: {
1596       union {
1597         double d;
1598         float f;
1599       } pun;
1600       MOZ_ASSERT(alloc.fpuReg().isSingle());
1601       pun.d = fromRegister(alloc.fpuReg());
1602       // The register contains the encoding of a float32. We just read
1603       // the bits without making any conversion.
1604       return Float32Value(pun.f);
1605     }
1606 
1607     case RValueAllocation::ANY_FLOAT_STACK:
1608       return Float32Value(ReadFrameFloat32Slot(fp_, alloc.stackOffset()));
1609 
1610     case RValueAllocation::TYPED_REG:
1611       return FromTypedPayload(alloc.knownType(), fromRegister(alloc.reg2()));
1612 
1613     case RValueAllocation::TYPED_STACK: {
1614       switch (alloc.knownType()) {
1615         case JSVAL_TYPE_DOUBLE:
1616           return DoubleValue(ReadFrameDoubleSlot(fp_, alloc.stackOffset2()));
1617         case JSVAL_TYPE_INT32:
1618           return Int32Value(ReadFrameInt32Slot(fp_, alloc.stackOffset2()));
1619         case JSVAL_TYPE_BOOLEAN:
1620           return BooleanValue(ReadFrameBooleanSlot(fp_, alloc.stackOffset2()));
1621         case JSVAL_TYPE_STRING:
1622           return FromStringPayload(fromStack(alloc.stackOffset2()));
1623         case JSVAL_TYPE_SYMBOL:
1624           return FromSymbolPayload(fromStack(alloc.stackOffset2()));
1625         case JSVAL_TYPE_BIGINT:
1626           return FromBigIntPayload(fromStack(alloc.stackOffset2()));
1627         case JSVAL_TYPE_OBJECT:
1628           return FromObjectPayload(fromStack(alloc.stackOffset2()));
1629         default:
1630           MOZ_CRASH("Unexpected type");
1631       }
1632     }
1633 
1634 #if defined(JS_NUNBOX32)
1635     case RValueAllocation::UNTYPED_REG_REG: {
1636       return Value::fromTagAndPayload(JSValueTag(fromRegister(alloc.reg())),
1637                                       fromRegister(alloc.reg2()));
1638     }
1639 
1640     case RValueAllocation::UNTYPED_REG_STACK: {
1641       return Value::fromTagAndPayload(JSValueTag(fromRegister(alloc.reg())),
1642                                       fromStack(alloc.stackOffset2()));
1643     }
1644 
1645     case RValueAllocation::UNTYPED_STACK_REG: {
1646       return Value::fromTagAndPayload(
1647           JSValueTag(fromStack(alloc.stackOffset())),
1648           fromRegister(alloc.reg2()));
1649     }
1650 
1651     case RValueAllocation::UNTYPED_STACK_STACK: {
1652       return Value::fromTagAndPayload(
1653           JSValueTag(fromStack(alloc.stackOffset())),
1654           fromStack(alloc.stackOffset2()));
1655     }
1656 #elif defined(JS_PUNBOX64)
1657     case RValueAllocation::UNTYPED_REG: {
1658       return Value::fromRawBits(fromRegister(alloc.reg()));
1659     }
1660 
1661     case RValueAllocation::UNTYPED_STACK: {
1662       return Value::fromRawBits(fromStack(alloc.stackOffset()));
1663     }
1664 #endif
1665 
1666     case RValueAllocation::RECOVER_INSTRUCTION:
1667       return fromInstructionResult(alloc.index());
1668 
1669     case RValueAllocation::RI_WITH_DEFAULT_CST:
1670       if (rm & RM_Normal && hasInstructionResult(alloc.index())) {
1671         return fromInstructionResult(alloc.index());
1672       }
1673       MOZ_ASSERT(rm & RM_AlwaysDefault);
1674       return ionScript_->getConstant(alloc.index2());
1675 
1676     default:
1677       MOZ_CRASH("huh?");
1678   }
1679 }
1680 
floatAllocationPointer(const RValueAllocation & alloc) const1681 const FloatRegisters::RegisterContent* SnapshotIterator::floatAllocationPointer(
1682     const RValueAllocation& alloc) const {
1683   switch (alloc.mode()) {
1684     case RValueAllocation::ANY_FLOAT_REG:
1685       return machine_->address(alloc.fpuReg());
1686 
1687     case RValueAllocation::ANY_FLOAT_STACK:
1688       return (FloatRegisters::RegisterContent*)AddressOfFrameSlot(
1689           fp_, alloc.stackOffset());
1690 
1691     default:
1692       MOZ_CRASH("Not a float allocation.");
1693   }
1694 }
1695 
maybeRead(const RValueAllocation & a,MaybeReadFallback & fallback)1696 Value SnapshotIterator::maybeRead(const RValueAllocation& a,
1697                                   MaybeReadFallback& fallback) {
1698   if (allocationReadable(a)) {
1699     return allocationValue(a);
1700   }
1701 
1702   if (fallback.canRecoverResults()) {
1703     // Code paths which are calling maybeRead are not always capable of
1704     // returning an error code, as these code paths used to be infallible.
1705     AutoEnterOOMUnsafeRegion oomUnsafe;
1706     if (!initInstructionResults(fallback)) {
1707       oomUnsafe.crash("js::jit::SnapshotIterator::maybeRead");
1708     }
1709 
1710     if (allocationReadable(a)) {
1711       return allocationValue(a);
1712     }
1713 
1714     MOZ_ASSERT_UNREACHABLE("All allocations should be readable.");
1715   }
1716 
1717   return fallback.unreadablePlaceholder();
1718 }
1719 
writeAllocationValuePayload(const RValueAllocation & alloc,const Value & v)1720 void SnapshotIterator::writeAllocationValuePayload(
1721     const RValueAllocation& alloc, const Value& v) {
1722   MOZ_ASSERT(v.isGCThing());
1723 
1724   switch (alloc.mode()) {
1725     case RValueAllocation::CONSTANT:
1726       ionScript_->getConstant(alloc.index()) = v;
1727       break;
1728 
1729     case RValueAllocation::CST_UNDEFINED:
1730     case RValueAllocation::CST_NULL:
1731     case RValueAllocation::DOUBLE_REG:
1732     case RValueAllocation::ANY_FLOAT_REG:
1733     case RValueAllocation::ANY_FLOAT_STACK:
1734       MOZ_CRASH("Not a GC thing: Unexpected write");
1735       break;
1736 
1737     case RValueAllocation::TYPED_REG:
1738       machine_->write(alloc.reg2(), uintptr_t(v.toGCThing()));
1739       break;
1740 
1741     case RValueAllocation::TYPED_STACK:
1742       switch (alloc.knownType()) {
1743         default:
1744           MOZ_CRASH("Not a GC thing: Unexpected write");
1745           break;
1746         case JSVAL_TYPE_STRING:
1747         case JSVAL_TYPE_SYMBOL:
1748         case JSVAL_TYPE_BIGINT:
1749         case JSVAL_TYPE_OBJECT:
1750           WriteFrameSlot(fp_, alloc.stackOffset2(), uintptr_t(v.toGCThing()));
1751           break;
1752       }
1753       break;
1754 
1755 #if defined(JS_NUNBOX32)
1756     case RValueAllocation::UNTYPED_REG_REG:
1757     case RValueAllocation::UNTYPED_STACK_REG:
1758       machine_->write(alloc.reg2(), uintptr_t(v.toGCThing()));
1759       break;
1760 
1761     case RValueAllocation::UNTYPED_REG_STACK:
1762     case RValueAllocation::UNTYPED_STACK_STACK:
1763       WriteFrameSlot(fp_, alloc.stackOffset2(), uintptr_t(v.toGCThing()));
1764       break;
1765 #elif defined(JS_PUNBOX64)
1766     case RValueAllocation::UNTYPED_REG:
1767       machine_->write(alloc.reg(), v.asRawBits());
1768       break;
1769 
1770     case RValueAllocation::UNTYPED_STACK:
1771       WriteFrameSlot(fp_, alloc.stackOffset(), v.asRawBits());
1772       break;
1773 #endif
1774 
1775     case RValueAllocation::RECOVER_INSTRUCTION:
1776       MOZ_CRASH("Recover instructions are handled by the JitActivation.");
1777       break;
1778 
1779     case RValueAllocation::RI_WITH_DEFAULT_CST:
1780       // Assume that we are always going to be writing on the default value
1781       // while tracing.
1782       ionScript_->getConstant(alloc.index2()) = v;
1783       break;
1784 
1785     default:
1786       MOZ_CRASH("huh?");
1787   }
1788 }
1789 
traceAllocation(JSTracer * trc)1790 void SnapshotIterator::traceAllocation(JSTracer* trc) {
1791   RValueAllocation alloc = readAllocation();
1792   if (!allocationReadable(alloc, RM_AlwaysDefault)) {
1793     return;
1794   }
1795 
1796   Value v = allocationValue(alloc, RM_AlwaysDefault);
1797   if (!v.isGCThing()) {
1798     return;
1799   }
1800 
1801   Value copy = v;
1802   TraceRoot(trc, &v, "ion-typed-reg");
1803   if (v != copy) {
1804     MOZ_ASSERT(SameType(v, copy));
1805     writeAllocationValuePayload(alloc, v);
1806   }
1807 }
1808 
resumePoint() const1809 const RResumePoint* SnapshotIterator::resumePoint() const {
1810   return instruction()->toResumePoint();
1811 }
1812 
numAllocations() const1813 uint32_t SnapshotIterator::numAllocations() const {
1814   return instruction()->numOperands();
1815 }
1816 
pcOffset() const1817 uint32_t SnapshotIterator::pcOffset() const {
1818   return resumePoint()->pcOffset();
1819 }
1820 
skipInstruction()1821 void SnapshotIterator::skipInstruction() {
1822   MOZ_ASSERT(snapshot_.numAllocationsRead() == 0);
1823   size_t numOperands = instruction()->numOperands();
1824   for (size_t i = 0; i < numOperands; i++) {
1825     skip();
1826   }
1827   nextInstruction();
1828 }
1829 
initInstructionResults(MaybeReadFallback & fallback)1830 bool SnapshotIterator::initInstructionResults(MaybeReadFallback& fallback) {
1831   MOZ_ASSERT(fallback.canRecoverResults());
1832   JSContext* cx = fallback.maybeCx;
1833 
1834   // If there is only one resume point in the list of instructions, then there
1835   // is no instruction to recover, and thus no need to register any results.
1836   if (recover_.numInstructions() == 1) {
1837     return true;
1838   }
1839 
1840   JitFrameLayout* fp = fallback.frame->jsFrame();
1841   RInstructionResults* results = fallback.activation->maybeIonFrameRecovery(fp);
1842   if (!results) {
1843     AutoRealm ar(cx, fallback.frame->script());
1844 
1845     // We do not have the result yet, which means that an observable stack
1846     // slot is requested.  As we do not want to bailout every time for the
1847     // same reason, we need to recompile without optimizing away the
1848     // observable stack slots.  The script would later be recompiled to have
1849     // support for Argument objects.
1850     if (fallback.consequence == MaybeReadFallback::Fallback_Invalidate) {
1851       ionScript_->invalidate(cx, fallback.frame->script(),
1852                              /* resetUses = */ false,
1853                              "Observe recovered instruction.");
1854     }
1855 
1856     // Register the list of result on the activation.  We need to do that
1857     // before we initialize the list such as if any recover instruction
1858     // cause a GC, we can ensure that the results are properly traced by the
1859     // activation.
1860     RInstructionResults tmp(fallback.frame->jsFrame());
1861     if (!fallback.activation->registerIonFrameRecovery(std::move(tmp))) {
1862       return false;
1863     }
1864 
1865     results = fallback.activation->maybeIonFrameRecovery(fp);
1866 
1867     // Start a new snapshot at the beginning of the JSJitFrameIter.  This
1868     // SnapshotIterator is used for evaluating the content of all recover
1869     // instructions.  The result is then saved on the JitActivation.
1870     MachineState machine = fallback.frame->machineState();
1871     SnapshotIterator s(*fallback.frame, &machine);
1872     if (!s.computeInstructionResults(cx, results)) {
1873       // If the evaluation failed because of OOMs, then we discard the
1874       // current set of result that we collected so far.
1875       fallback.activation->removeIonFrameRecovery(fp);
1876       return false;
1877     }
1878   }
1879 
1880   MOZ_ASSERT(results->isInitialized());
1881   MOZ_RELEASE_ASSERT(results->length() == recover_.numInstructions() - 1);
1882   instructionResults_ = results;
1883   return true;
1884 }
1885 
computeInstructionResults(JSContext * cx,RInstructionResults * results) const1886 bool SnapshotIterator::computeInstructionResults(
1887     JSContext* cx, RInstructionResults* results) const {
1888   MOZ_ASSERT(!results->isInitialized());
1889   MOZ_ASSERT(recover_.numInstructionsRead() == 1);
1890 
1891   // The last instruction will always be a resume point.
1892   size_t numResults = recover_.numInstructions() - 1;
1893   if (!results->isInitialized()) {
1894     if (!results->init(cx, numResults)) {
1895       return false;
1896     }
1897 
1898     // No need to iterate over the only resume point.
1899     if (!numResults) {
1900       MOZ_ASSERT(results->isInitialized());
1901       return true;
1902     }
1903 
1904     // Avoid invoking the object metadata callback, which could try to walk the
1905     // stack while bailing out.
1906     gc::AutoSuppressGC suppressGC(cx);
1907     js::AutoSuppressAllocationMetadataBuilder suppressMetadata(cx);
1908 
1909     // Fill with the results of recover instructions.
1910     SnapshotIterator s(*this);
1911     s.instructionResults_ = results;
1912     while (s.moreInstructions()) {
1913       // Skip resume point and only interpret recover instructions.
1914       if (s.instruction()->isResumePoint()) {
1915         s.skipInstruction();
1916         continue;
1917       }
1918 
1919       if (!s.instruction()->recover(cx, s)) {
1920         return false;
1921       }
1922       s.nextInstruction();
1923     }
1924   }
1925 
1926   MOZ_ASSERT(results->isInitialized());
1927   return true;
1928 }
1929 
storeInstructionResult(const Value & v)1930 void SnapshotIterator::storeInstructionResult(const Value& v) {
1931   uint32_t currIns = recover_.numInstructionsRead() - 1;
1932   MOZ_ASSERT((*instructionResults_)[currIns].isMagic(JS_ION_BAILOUT));
1933   (*instructionResults_)[currIns] = v;
1934 }
1935 
fromInstructionResult(uint32_t index) const1936 Value SnapshotIterator::fromInstructionResult(uint32_t index) const {
1937   MOZ_ASSERT(!(*instructionResults_)[index].isMagic(JS_ION_BAILOUT));
1938   return (*instructionResults_)[index];
1939 }
1940 
settleOnFrame()1941 void SnapshotIterator::settleOnFrame() {
1942   // Check that the current instruction can still be use.
1943   MOZ_ASSERT(snapshot_.numAllocationsRead() == 0);
1944   while (!instruction()->isResumePoint()) {
1945     skipInstruction();
1946   }
1947 }
1948 
nextFrame()1949 void SnapshotIterator::nextFrame() {
1950   nextInstruction();
1951   settleOnFrame();
1952 }
1953 
maybeReadAllocByIndex(size_t index)1954 Value SnapshotIterator::maybeReadAllocByIndex(size_t index) {
1955   while (index--) {
1956     MOZ_ASSERT(moreAllocations());
1957     skip();
1958   }
1959 
1960   Value s;
1961   {
1962     // This MaybeReadFallback method cannot GC.
1963     JS::AutoSuppressGCAnalysis nogc;
1964     MaybeReadFallback fallback(UndefinedValue());
1965     s = maybeRead(fallback);
1966   }
1967 
1968   while (moreAllocations()) {
1969     skip();
1970   }
1971 
1972   return s;
1973 }
1974 
InlineFrameIterator(JSContext * cx,const JSJitFrameIter * iter)1975 InlineFrameIterator::InlineFrameIterator(JSContext* cx,
1976                                          const JSJitFrameIter* iter)
1977     : calleeTemplate_(cx),
1978       calleeRVA_(),
1979       script_(cx),
1980       pc_(nullptr),
1981       numActualArgs_(0) {
1982   resetOn(iter);
1983 }
1984 
InlineFrameIterator(JSContext * cx,const InlineFrameIterator * iter)1985 InlineFrameIterator::InlineFrameIterator(JSContext* cx,
1986                                          const InlineFrameIterator* iter)
1987     : frame_(iter ? iter->frame_ : nullptr),
1988       framesRead_(0),
1989       frameCount_(iter ? iter->frameCount_ : UINT32_MAX),
1990       calleeTemplate_(cx),
1991       calleeRVA_(),
1992       script_(cx),
1993       pc_(nullptr),
1994       numActualArgs_(0) {
1995   if (frame_) {
1996     machine_ = iter->machine_;
1997     start_ = SnapshotIterator(*frame_, &machine_);
1998 
1999     // findNextFrame will iterate to the next frame and init. everything.
2000     // Therefore to settle on the same frame, we report one frame less readed.
2001     framesRead_ = iter->framesRead_ - 1;
2002     findNextFrame();
2003   }
2004 }
2005 
resetOn(const JSJitFrameIter * iter)2006 void InlineFrameIterator::resetOn(const JSJitFrameIter* iter) {
2007   frame_ = iter;
2008   framesRead_ = 0;
2009   frameCount_ = UINT32_MAX;
2010 
2011   if (iter) {
2012     machine_ = iter->machineState();
2013     start_ = SnapshotIterator(*iter, &machine_);
2014     findNextFrame();
2015   }
2016 }
2017 
findNextFrame()2018 void InlineFrameIterator::findNextFrame() {
2019   MOZ_ASSERT(more());
2020 
2021   si_ = start_;
2022 
2023   // Read the initial frame out of the C stack.
2024   calleeTemplate_ = frame_->maybeCallee();
2025   calleeRVA_ = RValueAllocation();
2026   script_ = frame_->script();
2027   MOZ_ASSERT(script_->hasBaselineScript());
2028 
2029   // Settle on the outermost frame without evaluating any instructions before
2030   // looking for a pc.
2031   si_.settleOnFrame();
2032 
2033   pc_ = script_->offsetToPC(si_.pcOffset());
2034   numActualArgs_ = 0xbadbad;
2035 
2036   // This unfortunately is O(n*m), because we must skip over outer frames
2037   // before reading inner ones.
2038 
2039   // The first time (frameCount_ == UINT32_MAX) we do not know the number of
2040   // frames that we are going to inspect.  So we are iterating until there is
2041   // no more frames, to settle on the inner most frame and to count the number
2042   // of frames.
2043   size_t remaining = (frameCount_ != UINT32_MAX) ? frameNo() - 1 : SIZE_MAX;
2044 
2045   size_t i = 1;
2046   for (; i <= remaining && si_.moreFrames(); i++) {
2047     MOZ_ASSERT(IsIonInlinableOp(JSOp(*pc_)));
2048 
2049     // Recover the number of actual arguments from the script.
2050     if (JSOp(*pc_) != JSOp::FunApply) {
2051       numActualArgs_ = GET_ARGC(pc_);
2052     }
2053     if (JSOp(*pc_) == JSOp::FunCall) {
2054       if (numActualArgs_ > 0) {
2055         numActualArgs_--;
2056       }
2057     } else if (IsGetPropPC(pc_) || IsGetElemPC(pc_)) {
2058       numActualArgs_ = 0;
2059     } else if (IsSetPropPC(pc_)) {
2060       numActualArgs_ = 1;
2061     }
2062 
2063     if (numActualArgs_ == 0xbadbad) {
2064       MOZ_CRASH(
2065           "Couldn't deduce the number of arguments of an ionmonkey frame");
2066     }
2067 
2068     // Skip over non-argument slots, as well as |this|.
2069     bool skipNewTarget = IsConstructPC(pc_);
2070     unsigned skipCount =
2071         (si_.numAllocations() - 1) - numActualArgs_ - 1 - skipNewTarget;
2072     for (unsigned j = 0; j < skipCount; j++) {
2073       si_.skip();
2074     }
2075 
2076     // This value should correspond to the function which is being inlined.
2077     // The value must be readable to iterate over the inline frame. Most of
2078     // the time, these functions are stored as JSFunction constants,
2079     // register which are holding the JSFunction pointer, or recover
2080     // instruction with Default value.
2081     Value funval = si_.readWithDefault(&calleeRVA_);
2082 
2083     // Skip extra value allocations.
2084     while (si_.moreAllocations()) {
2085       si_.skip();
2086     }
2087 
2088     si_.nextFrame();
2089 
2090     calleeTemplate_ = &funval.toObject().as<JSFunction>();
2091     script_ = calleeTemplate_->nonLazyScript();
2092     MOZ_ASSERT(script_->hasBaselineScript());
2093 
2094     pc_ = script_->offsetToPC(si_.pcOffset());
2095   }
2096 
2097   // The first time we do not know the number of frames, we only settle on the
2098   // last frame, and update the number of frames based on the number of
2099   // iteration that we have done.
2100   if (frameCount_ == UINT32_MAX) {
2101     MOZ_ASSERT(!si_.moreFrames());
2102     frameCount_ = i;
2103   }
2104 
2105   framesRead_++;
2106 }
2107 
callee(MaybeReadFallback & fallback) const2108 JSFunction* InlineFrameIterator::callee(MaybeReadFallback& fallback) const {
2109   MOZ_ASSERT(isFunctionFrame());
2110   if (calleeRVA_.mode() == RValueAllocation::INVALID ||
2111       !fallback.canRecoverResults()) {
2112     return calleeTemplate_;
2113   }
2114 
2115   SnapshotIterator s(si_);
2116   // :TODO: Handle allocation failures from recover instruction.
2117   Value funval = s.maybeRead(calleeRVA_, fallback);
2118   return &funval.toObject().as<JSFunction>();
2119 }
2120 
computeEnvironmentChain(const Value & envChainValue,MaybeReadFallback & fallback,bool * hasInitialEnv) const2121 JSObject* InlineFrameIterator::computeEnvironmentChain(
2122     const Value& envChainValue, MaybeReadFallback& fallback,
2123     bool* hasInitialEnv) const {
2124   if (envChainValue.isObject()) {
2125     if (hasInitialEnv) {
2126       if (fallback.canRecoverResults()) {
2127         RootedObject obj(fallback.maybeCx, &envChainValue.toObject());
2128         *hasInitialEnv = isFunctionFrame() &&
2129                          callee(fallback)->needsFunctionEnvironmentObjects();
2130         return obj;
2131       }
2132       JS::AutoSuppressGCAnalysis
2133           nogc;  // If we cannot recover then we cannot GC.
2134       *hasInitialEnv = isFunctionFrame() &&
2135                        callee(fallback)->needsFunctionEnvironmentObjects();
2136     }
2137 
2138     return &envChainValue.toObject();
2139   }
2140 
2141   // Note we can hit this case even for functions with a CallObject, in case
2142   // we are walking the frame during the function prologue, before the env
2143   // chain has been initialized.
2144   if (isFunctionFrame()) {
2145     return callee(fallback)->environment();
2146   }
2147 
2148   if (isModuleFrame()) {
2149     return script()->module()->environment();
2150   }
2151 
2152   // Ion does not handle non-function scripts that have anything other than
2153   // the global on their env chain.
2154   MOZ_ASSERT(!script()->isForEval());
2155   MOZ_ASSERT(!script()->hasNonSyntacticScope());
2156   return &script()->global().lexicalEnvironment();
2157 }
2158 
isFunctionFrame() const2159 bool InlineFrameIterator::isFunctionFrame() const { return !!calleeTemplate_; }
2160 
isModuleFrame() const2161 bool InlineFrameIterator::isModuleFrame() const { return script()->module(); }
2162 
FromBailout(RegisterDump::GPRArray & regs,RegisterDump::FPUArray & fpregs)2163 MachineState MachineState::FromBailout(RegisterDump::GPRArray& regs,
2164                                        RegisterDump::FPUArray& fpregs) {
2165   MachineState machine;
2166 
2167   for (unsigned i = 0; i < Registers::Total; i++) {
2168     machine.setRegisterLocation(Register::FromCode(i), &regs[i].r);
2169   }
2170 #ifdef JS_CODEGEN_ARM
2171   float* fbase = (float*)&fpregs[0];
2172   for (unsigned i = 0; i < FloatRegisters::TotalDouble; i++) {
2173     machine.setRegisterLocation(FloatRegister(i, FloatRegister::Double),
2174                                 &fpregs[i].d);
2175   }
2176   for (unsigned i = 0; i < FloatRegisters::TotalSingle; i++) {
2177     machine.setRegisterLocation(FloatRegister(i, FloatRegister::Single),
2178                                 (double*)&fbase[i]);
2179 #  ifdef ENABLE_WASM_SIMD
2180 #    error "More care needed here"
2181 #  endif
2182   }
2183 #elif defined(JS_CODEGEN_MIPS32)
2184   for (unsigned i = 0; i < FloatRegisters::TotalPhys; i++) {
2185     machine.setRegisterLocation(
2186         FloatRegister::FromIndex(i, FloatRegister::Double), &fpregs[i]);
2187     machine.setRegisterLocation(
2188         FloatRegister::FromIndex(i, FloatRegister::Single), &fpregs[i]);
2189 #  ifdef ENABLE_WASM_SIMD
2190 #    error "More care needed here"
2191 #  endif
2192   }
2193 #elif defined(JS_CODEGEN_MIPS64)
2194   for (unsigned i = 0; i < FloatRegisters::TotalPhys; i++) {
2195     machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Double),
2196                                 &fpregs[i]);
2197     machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Single),
2198                                 &fpregs[i]);
2199 #  ifdef ENABLE_WASM_SIMD
2200 #    error "More care needed here"
2201 #  endif
2202   }
2203 #elif defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
2204   for (unsigned i = 0; i < FloatRegisters::TotalPhys; i++) {
2205     machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Single),
2206                                 &fpregs[i]);
2207     machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Double),
2208                                 &fpregs[i]);
2209     machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Simd128),
2210                                 &fpregs[i]);
2211   }
2212 #elif defined(JS_CODEGEN_ARM64)
2213   for (unsigned i = 0; i < FloatRegisters::TotalPhys; i++) {
2214     machine.setRegisterLocation(
2215         FloatRegister(FloatRegisters::Encoding(i), FloatRegisters::Single),
2216         &fpregs[i]);
2217     machine.setRegisterLocation(
2218         FloatRegister(FloatRegisters::Encoding(i), FloatRegisters::Double),
2219         &fpregs[i]);
2220     // No SIMD support in bailouts, SIMD is internal to wasm
2221   }
2222 #elif defined(JS_CODEGEN_LOONG64)
2223   for (unsigned i = 0; i < FloatRegisters::TotalPhys; i++) {
2224     machine.setRegisterLocation(
2225         FloatRegister(FloatRegisters::Encoding(i), FloatRegisters::Single),
2226         &fpregs[i]);
2227     machine.setRegisterLocation(
2228         FloatRegister(FloatRegisters::Encoding(i), FloatRegisters::Double),
2229         &fpregs[i]);
2230   }
2231 
2232 #elif defined(JS_CODEGEN_NONE)
2233   MOZ_CRASH();
2234 #else
2235 #  error "Unknown architecture!"
2236 #endif
2237   return machine;
2238 }
2239 
isConstructing() const2240 bool InlineFrameIterator::isConstructing() const {
2241   // Skip the current frame and look at the caller's.
2242   if (more()) {
2243     InlineFrameIterator parent(TlsContext.get(), this);
2244     ++parent;
2245 
2246     // In the case of a JS frame, look up the pc from the snapshot.
2247     JSOp parentOp = JSOp(*parent.pc());
2248 
2249     // Inlined Getters and Setters are never constructing.
2250     if (IsIonInlinableGetterOrSetterOp(parentOp)) {
2251       return false;
2252     }
2253 
2254     MOZ_ASSERT(IsInvokeOp(parentOp) && !IsSpreadOp(parentOp));
2255 
2256     return IsConstructOp(parentOp);
2257   }
2258 
2259   return frame_->isConstructing();
2260 }
2261 
warnUnreadableAllocation()2262 void SnapshotIterator::warnUnreadableAllocation() {
2263   fprintf(stderr,
2264           "Warning! Tried to access unreadable value allocation (possible "
2265           "f.arguments).\n");
2266 }
2267 
2268 struct DumpOp {
DumpOpjs::jit::DumpOp2269   explicit DumpOp(unsigned int i) : i_(i) {}
2270 
2271   unsigned int i_;
operator ()js::jit::DumpOp2272   void operator()(const Value& v) {
2273     fprintf(stderr, "  actual (arg %u): ", i_);
2274 #if defined(DEBUG) || defined(JS_JITSPEW)
2275     DumpValue(v);
2276 #else
2277     fprintf(stderr, "?\n");
2278 #endif
2279     i_++;
2280   }
2281 };
2282 
dump() const2283 void InlineFrameIterator::dump() const {
2284   MaybeReadFallback fallback(UndefinedValue());
2285 
2286   if (more()) {
2287     fprintf(stderr, " JS frame (inlined)\n");
2288   } else {
2289     fprintf(stderr, " JS frame\n");
2290   }
2291 
2292   bool isFunction = false;
2293   if (isFunctionFrame()) {
2294     isFunction = true;
2295     fprintf(stderr, "  callee fun: ");
2296 #if defined(DEBUG) || defined(JS_JITSPEW)
2297     DumpObject(callee(fallback));
2298 #else
2299     fprintf(stderr, "?\n");
2300 #endif
2301   } else {
2302     fprintf(stderr, "  global frame, no callee\n");
2303   }
2304 
2305   fprintf(stderr, "  file %s line %u\n", script()->filename(),
2306           script()->lineno());
2307 
2308   fprintf(stderr, "  script = %p, pc = %p\n", (void*)script(), pc());
2309   fprintf(stderr, "  current op: %s\n", CodeName(JSOp(*pc())));
2310 
2311   if (!more()) {
2312     numActualArgs();
2313   }
2314 
2315   SnapshotIterator si = snapshotIterator();
2316   fprintf(stderr, "  slots: %u\n", si.numAllocations() - 1);
2317   for (unsigned i = 0; i < si.numAllocations() - 1; i++) {
2318     if (isFunction) {
2319       if (i == 0) {
2320         fprintf(stderr, "  env chain: ");
2321       } else if (i == 1) {
2322         fprintf(stderr, "  this: ");
2323       } else if (i - 2 < calleeTemplate()->nargs()) {
2324         fprintf(stderr, "  formal (arg %u): ", i - 2);
2325       } else {
2326         if (i - 2 == calleeTemplate()->nargs() &&
2327             numActualArgs() > calleeTemplate()->nargs()) {
2328           DumpOp d(calleeTemplate()->nargs());
2329           unaliasedForEachActual(TlsContext.get(), d, ReadFrame_Overflown,
2330                                  fallback);
2331         }
2332 
2333         fprintf(stderr, "  slot %d: ", int(i - 2 - calleeTemplate()->nargs()));
2334       }
2335     } else
2336       fprintf(stderr, "  slot %u: ", i);
2337 #if defined(DEBUG) || defined(JS_JITSPEW)
2338     DumpValue(si.maybeRead(fallback));
2339 #else
2340     fprintf(stderr, "?\n");
2341 #endif
2342   }
2343 
2344   fputc('\n', stderr);
2345 }
2346 
fp() const2347 JitFrameLayout* InvalidationBailoutStack::fp() const {
2348   return (JitFrameLayout*)(sp() + ionScript_->frameSize());
2349 }
2350 
checkInvariants() const2351 void InvalidationBailoutStack::checkInvariants() const {
2352 #ifdef DEBUG
2353   JitFrameLayout* frame = fp();
2354   CalleeToken token = frame->calleeToken();
2355   MOZ_ASSERT(token);
2356 
2357   uint8_t* rawBase = ionScript()->method()->raw();
2358   uint8_t* rawLimit = rawBase + ionScript()->method()->instructionsSize();
2359   uint8_t* osiPoint = osiPointReturnAddress();
2360   MOZ_ASSERT(rawBase <= osiPoint && osiPoint <= rawLimit);
2361 #endif
2362 }
2363 
AssertJitStackInvariants(JSContext * cx)2364 void AssertJitStackInvariants(JSContext* cx) {
2365   for (JitActivationIterator activations(cx); !activations.done();
2366        ++activations) {
2367     JitFrameIter iter(activations->asJit());
2368     if (iter.isJSJit()) {
2369       JSJitFrameIter& frames = iter.asJSJit();
2370       size_t prevFrameSize = 0;
2371       size_t frameSize = 0;
2372       bool isScriptedCallee = false;
2373       for (; !frames.done(); ++frames) {
2374         size_t calleeFp = reinterpret_cast<size_t>(frames.fp());
2375         size_t callerFp = reinterpret_cast<size_t>(frames.prevFp());
2376         MOZ_ASSERT(callerFp >= calleeFp);
2377         prevFrameSize = frameSize;
2378         frameSize = callerFp - calleeFp;
2379 
2380         if (frames.isScripted() && frames.prevType() == FrameType::Rectifier) {
2381           MOZ_RELEASE_ASSERT(frameSize % JitStackAlignment == 0,
2382                              "The rectifier frame should keep the alignment");
2383 
2384           size_t expectedFrameSize =
2385               0
2386 #if defined(JS_CODEGEN_X86)
2387               + sizeof(void*) /* frame pointer */
2388 #endif
2389               + sizeof(Value) *
2390                     (frames.callee()->nargs() + 1 /* |this| argument */ +
2391                      frames.isConstructing() /* new.target */) +
2392               sizeof(JitFrameLayout);
2393           MOZ_RELEASE_ASSERT(frameSize >= expectedFrameSize,
2394                              "The frame is large enough to hold all arguments");
2395           MOZ_RELEASE_ASSERT(expectedFrameSize + JitStackAlignment > frameSize,
2396                              "The frame size is optimal");
2397         }
2398 
2399         if (frames.isExitFrame()) {
2400           // For the moment, we do not keep the JitStackAlignment
2401           // alignment for exit frames.
2402           frameSize -= ExitFrameLayout::Size();
2403         }
2404 
2405         if (frames.isIonJS()) {
2406           // Ideally, we should not have such requirement, but keep the
2407           // alignment-delta as part of the Safepoint such that we can pad
2408           // accordingly when making out-of-line calls.  In the mean time,
2409           // let us have check-points where we can garantee that
2410           // everything can properly be aligned before adding complexity.
2411           MOZ_RELEASE_ASSERT(
2412               frames.ionScript()->frameSize() % JitStackAlignment == 0,
2413               "Ensure that if the Ion frame is aligned, then the spill base is "
2414               "also aligned");
2415 
2416           if (isScriptedCallee) {
2417             MOZ_RELEASE_ASSERT(prevFrameSize % JitStackAlignment == 0,
2418                                "The ion frame should keep the alignment");
2419           }
2420         }
2421 
2422         // The stack is dynamically aligned by baseline stubs before calling
2423         // any jitted code.
2424         if (frames.prevType() == FrameType::BaselineStub && isScriptedCallee) {
2425           MOZ_RELEASE_ASSERT(calleeFp % JitStackAlignment == 0,
2426                              "The baseline stub restores the stack alignment");
2427         }
2428 
2429         isScriptedCallee =
2430             frames.isScripted() || frames.type() == FrameType::Rectifier;
2431       }
2432 
2433       MOZ_RELEASE_ASSERT(
2434           JSJitFrameIter::isEntry(frames.type()),
2435           "The first frame of a Jit activation should be an entry frame");
2436       MOZ_RELEASE_ASSERT(
2437           reinterpret_cast<size_t>(frames.fp()) % JitStackAlignment == 0,
2438           "The entry frame should be properly aligned");
2439     } else {
2440       MOZ_ASSERT(iter.isWasm());
2441       wasm::WasmFrameIter& frames = iter.asWasm();
2442       while (!frames.done()) {
2443         ++frames;
2444       }
2445     }
2446   }
2447 }
2448 
2449 }  // namespace jit
2450 }  // namespace js
2451