1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7 #include "jit/JitFrames-inl.h"
8
9 #include "mozilla/SizePrintfMacros.h"
10
11 #include "jsfun.h"
12 #include "jsobj.h"
13 #include "jsscript.h"
14 #include "jsutil.h"
15
16 #include "gc/Marking.h"
17 #include "jit/BaselineDebugModeOSR.h"
18 #include "jit/BaselineFrame.h"
19 #include "jit/BaselineIC.h"
20 #include "jit/BaselineJIT.h"
21 #include "jit/Ion.h"
22 #include "jit/JitcodeMap.h"
23 #include "jit/JitCompartment.h"
24 #include "jit/JitSpewer.h"
25 #include "jit/MacroAssembler.h"
26 #include "jit/PcScriptCache.h"
27 #include "jit/Recover.h"
28 #include "jit/Safepoints.h"
29 #include "jit/Snapshots.h"
30 #include "jit/VMFunctions.h"
31 #include "vm/ArgumentsObject.h"
32 #include "vm/Debugger.h"
33 #include "vm/Interpreter.h"
34 #include "vm/SPSProfiler.h"
35 #include "vm/TraceLogging.h"
36 #include "vm/TypeInference.h"
37
38 #include "jsscriptinlines.h"
39 #include "gc/Nursery-inl.h"
40 #include "jit/JitFrameIterator-inl.h"
41 #include "vm/Debugger-inl.h"
42 #include "vm/Probes-inl.h"
43 #include "vm/TypeInference-inl.h"
44
45 namespace js {
46 namespace jit {
47
48 // Given a slot index, returns the offset, in bytes, of that slot from an
49 // JitFrameLayout. Slot distances are uniform across architectures, however,
50 // the distance does depend on the size of the frame header.
51 static inline int32_t
OffsetOfFrameSlot(int32_t slot)52 OffsetOfFrameSlot(int32_t slot)
53 {
54 return -slot;
55 }
56
57 static inline uint8_t*
AddressOfFrameSlot(JitFrameLayout * fp,int32_t slot)58 AddressOfFrameSlot(JitFrameLayout* fp, int32_t slot)
59 {
60 return (uint8_t*) fp + OffsetOfFrameSlot(slot);
61 }
62
63 static inline uintptr_t
ReadFrameSlot(JitFrameLayout * fp,int32_t slot)64 ReadFrameSlot(JitFrameLayout* fp, int32_t slot)
65 {
66 return *(uintptr_t*) AddressOfFrameSlot(fp, slot);
67 }
68
69 static inline void
WriteFrameSlot(JitFrameLayout * fp,int32_t slot,uintptr_t value)70 WriteFrameSlot(JitFrameLayout* fp, int32_t slot, uintptr_t value)
71 {
72 *(uintptr_t*) AddressOfFrameSlot(fp, slot) = value;
73 }
74
75 static inline double
ReadFrameDoubleSlot(JitFrameLayout * fp,int32_t slot)76 ReadFrameDoubleSlot(JitFrameLayout* fp, int32_t slot)
77 {
78 return *(double*) AddressOfFrameSlot(fp, slot);
79 }
80
81 static inline float
ReadFrameFloat32Slot(JitFrameLayout * fp,int32_t slot)82 ReadFrameFloat32Slot(JitFrameLayout* fp, int32_t slot)
83 {
84 return *(float*) AddressOfFrameSlot(fp, slot);
85 }
86
87 static inline int32_t
ReadFrameInt32Slot(JitFrameLayout * fp,int32_t slot)88 ReadFrameInt32Slot(JitFrameLayout* fp, int32_t slot)
89 {
90 return *(int32_t*) AddressOfFrameSlot(fp, slot);
91 }
92
93 static inline bool
ReadFrameBooleanSlot(JitFrameLayout * fp,int32_t slot)94 ReadFrameBooleanSlot(JitFrameLayout* fp, int32_t slot)
95 {
96 return *(bool*) AddressOfFrameSlot(fp, slot);
97 }
98
JitFrameIterator()99 JitFrameIterator::JitFrameIterator()
100 : current_(nullptr),
101 type_(JitFrame_Exit),
102 returnAddressToFp_(nullptr),
103 frameSize_(0),
104 cachedSafepointIndex_(nullptr),
105 activation_(nullptr)
106 {
107 }
108
JitFrameIterator(JSContext * cx)109 JitFrameIterator::JitFrameIterator(JSContext* cx)
110 : current_(cx->runtime()->jitTop),
111 type_(JitFrame_Exit),
112 returnAddressToFp_(nullptr),
113 frameSize_(0),
114 cachedSafepointIndex_(nullptr),
115 activation_(cx->runtime()->activation()->asJit())
116 {
117 if (activation_->bailoutData()) {
118 current_ = activation_->bailoutData()->fp();
119 frameSize_ = activation_->bailoutData()->topFrameSize();
120 type_ = JitFrame_Bailout;
121 } else if (activation_->isLazyLinkExitFrame()) {
122 type_ = JitFrame_LazyLink;
123 MOZ_ASSERT(isExitFrameLayout<LazyLinkExitFrameLayout>());
124 }
125 }
126
JitFrameIterator(const ActivationIterator & activations)127 JitFrameIterator::JitFrameIterator(const ActivationIterator& activations)
128 : current_(activations.jitTop()),
129 type_(JitFrame_Exit),
130 returnAddressToFp_(nullptr),
131 frameSize_(0),
132 cachedSafepointIndex_(nullptr),
133 activation_(activations->asJit())
134 {
135 if (activation_->bailoutData()) {
136 current_ = activation_->bailoutData()->fp();
137 frameSize_ = activation_->bailoutData()->topFrameSize();
138 type_ = JitFrame_Bailout;
139 } else if (activation_->isLazyLinkExitFrame()) {
140 type_ = JitFrame_LazyLink;
141 MOZ_ASSERT(isExitFrameLayout<LazyLinkExitFrameLayout>());
142 }
143 }
144
145 bool
checkInvalidation() const146 JitFrameIterator::checkInvalidation() const
147 {
148 IonScript* dummy;
149 return checkInvalidation(&dummy);
150 }
151
152 bool
checkInvalidation(IonScript ** ionScriptOut) const153 JitFrameIterator::checkInvalidation(IonScript** ionScriptOut) const
154 {
155 JSScript* script = this->script();
156 if (isBailoutJS()) {
157 *ionScriptOut = activation_->bailoutData()->ionScript();
158 return !script->hasIonScript() || script->ionScript() != *ionScriptOut;
159 }
160
161 uint8_t* returnAddr = returnAddressToFp();
162 // N.B. the current IonScript is not the same as the frame's
163 // IonScript if the frame has since been invalidated.
164 bool invalidated = !script->hasIonScript() ||
165 !script->ionScript()->containsReturnAddress(returnAddr);
166 if (!invalidated)
167 return false;
168
169 int32_t invalidationDataOffset = ((int32_t*) returnAddr)[-1];
170 uint8_t* ionScriptDataOffset = returnAddr + invalidationDataOffset;
171 IonScript* ionScript = (IonScript*) Assembler::GetPointer(ionScriptDataOffset);
172 MOZ_ASSERT(ionScript->containsReturnAddress(returnAddr));
173 *ionScriptOut = ionScript;
174 return true;
175 }
176
177 CalleeToken
calleeToken() const178 JitFrameIterator::calleeToken() const
179 {
180 return ((JitFrameLayout*) current_)->calleeToken();
181 }
182
183 JSFunction*
callee() const184 JitFrameIterator::callee() const
185 {
186 MOZ_ASSERT(isScripted());
187 MOZ_ASSERT(isFunctionFrame());
188 return CalleeTokenToFunction(calleeToken());
189 }
190
191 JSFunction*
maybeCallee() const192 JitFrameIterator::maybeCallee() const
193 {
194 if (isScripted() && (isFunctionFrame()))
195 return callee();
196 return nullptr;
197 }
198
199 bool
isBareExit() const200 JitFrameIterator::isBareExit() const
201 {
202 if (type_ != JitFrame_Exit)
203 return false;
204 return exitFrame()->isBareExit();
205 }
206
207 bool
isFunctionFrame() const208 JitFrameIterator::isFunctionFrame() const
209 {
210 return CalleeTokenIsFunction(calleeToken());
211 }
212
213 JSScript*
script() const214 JitFrameIterator::script() const
215 {
216 MOZ_ASSERT(isScripted());
217 if (isBaselineJS())
218 return baselineFrame()->script();
219 JSScript* script = ScriptFromCalleeToken(calleeToken());
220 MOZ_ASSERT(script);
221 return script;
222 }
223
224 void
baselineScriptAndPc(JSScript ** scriptRes,jsbytecode ** pcRes) const225 JitFrameIterator::baselineScriptAndPc(JSScript** scriptRes, jsbytecode** pcRes) const
226 {
227 MOZ_ASSERT(isBaselineJS());
228 JSScript* script = this->script();
229 if (scriptRes)
230 *scriptRes = script;
231
232 MOZ_ASSERT(pcRes);
233
234 // Use the frame's override pc, if we have one. This should only happen
235 // when we're in FinishBailoutToBaseline, handling an exception or toggling
236 // debug mode.
237 if (jsbytecode* overridePc = baselineFrame()->maybeOverridePc()) {
238 *pcRes = overridePc;
239 return;
240 }
241
242 // Else, there must be an ICEntry for the current return address.
243 uint8_t* retAddr = returnAddressToFp();
244 ICEntry& icEntry = script->baselineScript()->icEntryFromReturnAddress(retAddr);
245 *pcRes = icEntry.pc(script);
246 }
247
248 Value*
actualArgs() const249 JitFrameIterator::actualArgs() const
250 {
251 return jsFrame()->argv() + 1;
252 }
253
254 static inline size_t
SizeOfFramePrefix(FrameType type)255 SizeOfFramePrefix(FrameType type)
256 {
257 switch (type) {
258 case JitFrame_Entry:
259 return EntryFrameLayout::Size();
260 case JitFrame_BaselineJS:
261 case JitFrame_IonJS:
262 case JitFrame_Bailout:
263 case JitFrame_Unwound_BaselineJS:
264 case JitFrame_Unwound_IonJS:
265 return JitFrameLayout::Size();
266 case JitFrame_BaselineStub:
267 case JitFrame_Unwound_BaselineStub:
268 return BaselineStubFrameLayout::Size();
269 case JitFrame_IonStub:
270 case JitFrame_Unwound_IonStub:
271 return JitStubFrameLayout::Size();
272 case JitFrame_Rectifier:
273 return RectifierFrameLayout::Size();
274 case JitFrame_Unwound_Rectifier:
275 return IonUnwoundRectifierFrameLayout::Size();
276 case JitFrame_Exit:
277 case JitFrame_LazyLink:
278 return ExitFrameLayout::Size();
279 case JitFrame_IonAccessorIC:
280 case JitFrame_Unwound_IonAccessorIC:
281 return IonAccessorICFrameLayout::Size();
282 }
283
284 MOZ_CRASH("unknown frame type");
285 }
286
287 uint8_t*
prevFp() const288 JitFrameIterator::prevFp() const
289 {
290 size_t currentSize = SizeOfFramePrefix(type_);
291 // This quick fix must be removed as soon as bug 717297 land. This is
292 // needed because the descriptor size of JS-to-JS frame which is just after
293 // a Rectifier frame should not change. (cf EnsureExitFrame function)
294 if (isFakeExitFrame()) {
295 MOZ_ASSERT(SizeOfFramePrefix(JitFrame_BaselineJS) ==
296 SizeOfFramePrefix(JitFrame_IonJS));
297 currentSize = SizeOfFramePrefix(JitFrame_IonJS);
298 }
299 currentSize += current()->prevFrameLocalSize();
300 return current_ + currentSize;
301 }
302
303 JitFrameIterator&
operator ++()304 JitFrameIterator::operator++()
305 {
306 MOZ_ASSERT(type_ != JitFrame_Entry);
307
308 frameSize_ = prevFrameLocalSize();
309 cachedSafepointIndex_ = nullptr;
310
311 // If the next frame is the entry frame, just exit. Don't update current_,
312 // since the entry and first frames overlap.
313 if (current()->prevType() == JitFrame_Entry) {
314 type_ = JitFrame_Entry;
315 return *this;
316 }
317
318 // Note: prevFp() needs the current type, so set it after computing the
319 // next frame.
320 uint8_t* prev = prevFp();
321 type_ = current()->prevType();
322 if (type_ == JitFrame_Unwound_IonJS)
323 type_ = JitFrame_IonJS;
324 else if (type_ == JitFrame_Unwound_IonStub)
325 type_ = JitFrame_IonStub;
326 else if (type_ == JitFrame_Unwound_BaselineJS)
327 type_ = JitFrame_BaselineJS;
328 else if (type_ == JitFrame_Unwound_BaselineStub)
329 type_ = JitFrame_BaselineStub;
330 else if (type_ == JitFrame_Unwound_IonAccessorIC)
331 type_ = JitFrame_IonAccessorIC;
332 returnAddressToFp_ = current()->returnAddress();
333 current_ = prev;
334
335 return *this;
336 }
337
338 uintptr_t*
spillBase() const339 JitFrameIterator::spillBase() const
340 {
341 MOZ_ASSERT(isIonJS());
342
343 // Get the base address to where safepoint registers are spilled.
344 // Out-of-line calls do not unwind the extra padding space used to
345 // aggregate bailout tables, so we use frameSize instead of frameLocals,
346 // which would only account for local stack slots.
347 return reinterpret_cast<uintptr_t*>(fp() - ionScript()->frameSize());
348 }
349
350 MachineState
machineState() const351 JitFrameIterator::machineState() const
352 {
353 MOZ_ASSERT(isIonScripted());
354
355 // The MachineState is used by GCs for marking call-sites.
356 if (MOZ_UNLIKELY(isBailoutJS()))
357 return *activation_->bailoutData()->machineState();
358
359 SafepointReader reader(ionScript(), safepoint());
360 uintptr_t* spill = spillBase();
361 MachineState machine;
362
363 for (GeneralRegisterBackwardIterator iter(reader.allGprSpills()); iter.more(); iter++)
364 machine.setRegisterLocation(*iter, --spill);
365
366 uint8_t* spillAlign = alignDoubleSpillWithOffset(reinterpret_cast<uint8_t*>(spill), 0);
367
368 char* floatSpill = reinterpret_cast<char*>(spillAlign);
369 FloatRegisterSet fregs = reader.allFloatSpills().set();
370 fregs = fregs.reduceSetForPush();
371 for (FloatRegisterBackwardIterator iter(fregs); iter.more(); iter++) {
372 floatSpill -= (*iter).size();
373 for (uint32_t a = 0; a < (*iter).numAlignedAliased(); a++) {
374 // Only say that registers that actually start here start here.
375 // e.g. d0 should not start at s1, only at s0.
376 FloatRegister ftmp;
377 (*iter).alignedAliased(a, &ftmp);
378 machine.setRegisterLocation(ftmp, (double*)floatSpill);
379 }
380 }
381
382 return machine;
383 }
384
385 static uint32_t
NumArgAndLocalSlots(const InlineFrameIterator & frame)386 NumArgAndLocalSlots(const InlineFrameIterator& frame)
387 {
388 JSScript* script = frame.script();
389 return CountArgSlots(script, frame.maybeCalleeTemplate()) + script->nfixed();
390 }
391
392 static void
CloseLiveIteratorIon(JSContext * cx,const InlineFrameIterator & frame,uint32_t stackSlot)393 CloseLiveIteratorIon(JSContext* cx, const InlineFrameIterator& frame, uint32_t stackSlot)
394 {
395 SnapshotIterator si = frame.snapshotIterator();
396
397 // Skip stack slots until we reach the iterator object.
398 uint32_t skipSlots = NumArgAndLocalSlots(frame) + stackSlot - 1;
399
400 for (unsigned i = 0; i < skipSlots; i++)
401 si.skip();
402
403 Value v = si.read();
404 RootedObject obj(cx, &v.toObject());
405
406 if (cx->isExceptionPending())
407 UnwindIteratorForException(cx, obj);
408 else
409 UnwindIteratorForUncatchableException(cx, obj);
410 }
411
412 class IonFrameStackDepthOp
413 {
414 uint32_t depth_;
415
416 public:
IonFrameStackDepthOp(const InlineFrameIterator & frame)417 explicit IonFrameStackDepthOp(const InlineFrameIterator& frame) {
418 uint32_t base = NumArgAndLocalSlots(frame);
419 SnapshotIterator si = frame.snapshotIterator();
420 MOZ_ASSERT(si.numAllocations() >= base);
421 depth_ = si.numAllocations() - base;
422 }
423
operator ()()424 uint32_t operator()() { return depth_; }
425 };
426
427 class TryNoteIterIon : public TryNoteIter<IonFrameStackDepthOp>
428 {
429 public:
TryNoteIterIon(JSContext * cx,const InlineFrameIterator & frame)430 TryNoteIterIon(JSContext* cx, const InlineFrameIterator& frame)
431 : TryNoteIter(cx, frame.script(), frame.pc(), IonFrameStackDepthOp(frame))
432 { }
433 };
434
435 static void
HandleExceptionIon(JSContext * cx,const InlineFrameIterator & frame,ResumeFromException * rfe,bool * overrecursed)436 HandleExceptionIon(JSContext* cx, const InlineFrameIterator& frame, ResumeFromException* rfe,
437 bool* overrecursed)
438 {
439 if (cx->compartment()->isDebuggee()) {
440 // We need to bail when there is a catchable exception, and we are the
441 // debuggee of a Debugger with a live onExceptionUnwind hook, or if a
442 // Debugger has observed this frame (e.g., for onPop).
443 bool shouldBail = Debugger::hasLiveHook(cx->global(), Debugger::OnExceptionUnwind);
444 RematerializedFrame* rematFrame = nullptr;
445 if (!shouldBail) {
446 JitActivation* act = cx->runtime()->activation()->asJit();
447 rematFrame = act->lookupRematerializedFrame(frame.frame().fp(), frame.frameNo());
448 shouldBail = rematFrame && rematFrame->isDebuggee();
449 }
450
451 if (shouldBail) {
452 // If we have an exception from within Ion and the debugger is active,
453 // we do the following:
454 //
455 // 1. Bailout to baseline to reconstruct a baseline frame.
456 // 2. Resume immediately into the exception tail afterwards, and
457 // handle the exception again with the top frame now a baseline
458 // frame.
459 //
460 // An empty exception info denotes that we're propagating an Ion
461 // exception due to debug mode, which BailoutIonToBaseline needs to
462 // know. This is because we might not be able to fully reconstruct up
463 // to the stack depth at the snapshot, as we could've thrown in the
464 // middle of a call.
465 ExceptionBailoutInfo propagateInfo;
466 uint32_t retval = ExceptionHandlerBailout(cx, frame, rfe, propagateInfo, overrecursed);
467 if (retval == BAILOUT_RETURN_OK)
468 return;
469 }
470
471 MOZ_ASSERT_IF(rematFrame, !Debugger::inFrameMaps(rematFrame));
472 }
473
474 RootedScript script(cx, frame.script());
475 if (!script->hasTrynotes())
476 return;
477
478 for (TryNoteIterIon tni(cx, frame); !tni.done(); ++tni) {
479 JSTryNote* tn = *tni;
480
481 switch (tn->kind) {
482 case JSTRY_FOR_IN: {
483 MOZ_ASSERT(JSOp(*(script->main() + tn->start + tn->length)) == JSOP_ENDITER);
484 MOZ_ASSERT(tn->stackDepth > 0);
485
486 uint32_t localSlot = tn->stackDepth;
487 CloseLiveIteratorIon(cx, frame, localSlot);
488 break;
489 }
490
491 case JSTRY_FOR_OF:
492 case JSTRY_LOOP:
493 break;
494
495 case JSTRY_CATCH:
496 if (cx->isExceptionPending()) {
497 // Ion can compile try-catch, but bailing out to catch
498 // exceptions is slow. Reset the warm-up counter so that if we
499 // catch many exceptions we won't Ion-compile the script.
500 script->resetWarmUpCounter();
501
502 // Bailout at the start of the catch block.
503 jsbytecode* catchPC = script->main() + tn->start + tn->length;
504 ExceptionBailoutInfo excInfo(frame.frameNo(), catchPC, tn->stackDepth);
505 uint32_t retval = ExceptionHandlerBailout(cx, frame, rfe, excInfo, overrecursed);
506 if (retval == BAILOUT_RETURN_OK)
507 return;
508
509 // Error on bailout clears pending exception.
510 MOZ_ASSERT(!cx->isExceptionPending());
511 }
512 break;
513
514 default:
515 MOZ_CRASH("Unexpected try note");
516 }
517 }
518 }
519
520 static void
OnLeaveBaselineFrame(JSContext * cx,const JitFrameIterator & frame,jsbytecode * pc,ResumeFromException * rfe,bool frameOk)521 OnLeaveBaselineFrame(JSContext* cx, const JitFrameIterator& frame, jsbytecode* pc,
522 ResumeFromException* rfe, bool frameOk)
523 {
524 BaselineFrame* baselineFrame = frame.baselineFrame();
525 if (jit::DebugEpilogue(cx, baselineFrame, pc, frameOk)) {
526 rfe->kind = ResumeFromException::RESUME_FORCED_RETURN;
527 rfe->framePointer = frame.fp() - BaselineFrame::FramePointerOffset;
528 rfe->stackPointer = reinterpret_cast<uint8_t*>(baselineFrame);
529 }
530 }
531
532 static inline void
ForcedReturn(JSContext * cx,const JitFrameIterator & frame,jsbytecode * pc,ResumeFromException * rfe)533 ForcedReturn(JSContext* cx, const JitFrameIterator& frame, jsbytecode* pc,
534 ResumeFromException* rfe)
535 {
536 OnLeaveBaselineFrame(cx, frame, pc, rfe, true);
537 }
538
539 static inline void
BaselineFrameAndStackPointersFromTryNote(JSTryNote * tn,const JitFrameIterator & frame,uint8_t ** framePointer,uint8_t ** stackPointer)540 BaselineFrameAndStackPointersFromTryNote(JSTryNote* tn, const JitFrameIterator& frame,
541 uint8_t** framePointer, uint8_t** stackPointer)
542 {
543 JSScript* script = frame.baselineFrame()->script();
544 *framePointer = frame.fp() - BaselineFrame::FramePointerOffset;
545 *stackPointer = *framePointer - BaselineFrame::Size() -
546 (script->nfixed() + tn->stackDepth) * sizeof(Value);
547 }
548
549 static void
SettleOnTryNote(JSContext * cx,JSTryNote * tn,const JitFrameIterator & frame,ScopeIter & si,ResumeFromException * rfe,jsbytecode ** pc)550 SettleOnTryNote(JSContext* cx, JSTryNote* tn, const JitFrameIterator& frame,
551 ScopeIter& si, ResumeFromException* rfe, jsbytecode** pc)
552 {
553 RootedScript script(cx, frame.baselineFrame()->script());
554
555 // Unwind scope chain (pop block objects).
556 if (cx->isExceptionPending())
557 UnwindScope(cx, si, UnwindScopeToTryPc(script, tn));
558
559 // Compute base pointer and stack pointer.
560 BaselineFrameAndStackPointersFromTryNote(tn, frame, &rfe->framePointer, &rfe->stackPointer);
561
562 // Compute the pc.
563 *pc = script->main() + tn->start + tn->length;
564 }
565
566 struct AutoBaselineHandlingException
567 {
568 BaselineFrame* frame;
AutoBaselineHandlingExceptionjs::jit::AutoBaselineHandlingException569 AutoBaselineHandlingException(BaselineFrame* frame, jsbytecode* pc)
570 : frame(frame)
571 {
572 frame->setIsHandlingException();
573 frame->setOverridePc(pc);
574 }
~AutoBaselineHandlingExceptionjs::jit::AutoBaselineHandlingException575 ~AutoBaselineHandlingException() {
576 frame->unsetIsHandlingException();
577 frame->clearOverridePc();
578 }
579 };
580
581 class BaselineFrameStackDepthOp
582 {
583 BaselineFrame* frame_;
584 public:
BaselineFrameStackDepthOp(BaselineFrame * frame)585 explicit BaselineFrameStackDepthOp(BaselineFrame* frame)
586 : frame_(frame)
587 { }
operator ()()588 uint32_t operator()() {
589 MOZ_ASSERT(frame_->numValueSlots() >= frame_->script()->nfixed());
590 return frame_->numValueSlots() - frame_->script()->nfixed();
591 }
592 };
593
594 class TryNoteIterBaseline : public TryNoteIter<BaselineFrameStackDepthOp>
595 {
596 public:
TryNoteIterBaseline(JSContext * cx,BaselineFrame * frame,jsbytecode * pc)597 TryNoteIterBaseline(JSContext* cx, BaselineFrame* frame, jsbytecode* pc)
598 : TryNoteIter(cx, frame->script(), pc, BaselineFrameStackDepthOp(frame))
599 { }
600 };
601
602 // Close all live iterators on a BaselineFrame due to exception unwinding. The
603 // pc parameter is updated to where the scopes have been unwound to.
604 static void
CloseLiveIteratorsBaselineForUncatchableException(JSContext * cx,const JitFrameIterator & frame,jsbytecode * pc)605 CloseLiveIteratorsBaselineForUncatchableException(JSContext* cx, const JitFrameIterator& frame,
606 jsbytecode* pc)
607 {
608 for (TryNoteIterBaseline tni(cx, frame.baselineFrame(), pc); !tni.done(); ++tni) {
609 JSTryNote* tn = *tni;
610
611 if (tn->kind == JSTRY_FOR_IN) {
612 uint8_t* framePointer;
613 uint8_t* stackPointer;
614 BaselineFrameAndStackPointersFromTryNote(tn, frame, &framePointer, &stackPointer);
615 Value iterValue(*(Value*) stackPointer);
616 RootedObject iterObject(cx, &iterValue.toObject());
617 UnwindIteratorForUncatchableException(cx, iterObject);
618 }
619 }
620 }
621
622 static bool
ProcessTryNotesBaseline(JSContext * cx,const JitFrameIterator & frame,ScopeIter & si,ResumeFromException * rfe,jsbytecode ** pc)623 ProcessTryNotesBaseline(JSContext* cx, const JitFrameIterator& frame, ScopeIter& si,
624 ResumeFromException* rfe, jsbytecode** pc)
625 {
626 RootedScript script(cx, frame.baselineFrame()->script());
627
628 for (TryNoteIterBaseline tni(cx, frame.baselineFrame(), *pc); !tni.done(); ++tni) {
629 JSTryNote* tn = *tni;
630
631 MOZ_ASSERT(cx->isExceptionPending());
632 switch (tn->kind) {
633 case JSTRY_CATCH: {
634 // If we're closing a legacy generator, we have to skip catch
635 // blocks.
636 if (cx->isClosingGenerator())
637 continue;
638
639 SettleOnTryNote(cx, tn, frame, si, rfe, pc);
640
641 // Ion can compile try-catch, but bailing out to catch
642 // exceptions is slow. Reset the warm-up counter so that if we
643 // catch many exceptions we won't Ion-compile the script.
644 script->resetWarmUpCounter();
645
646 // Resume at the start of the catch block.
647 rfe->kind = ResumeFromException::RESUME_CATCH;
648 rfe->target = script->baselineScript()->nativeCodeForPC(script, *pc);
649 return true;
650 }
651
652 case JSTRY_FINALLY: {
653 SettleOnTryNote(cx, tn, frame, si, rfe, pc);
654 rfe->kind = ResumeFromException::RESUME_FINALLY;
655 rfe->target = script->baselineScript()->nativeCodeForPC(script, *pc);
656 // Drop the exception instead of leaking cross compartment data.
657 if (!cx->getPendingException(MutableHandleValue::fromMarkedLocation(&rfe->exception)))
658 rfe->exception = UndefinedValue();
659 cx->clearPendingException();
660 return true;
661 }
662
663 case JSTRY_FOR_IN: {
664 uint8_t* framePointer;
665 uint8_t* stackPointer;
666 BaselineFrameAndStackPointersFromTryNote(tn, frame, &framePointer, &stackPointer);
667 Value iterValue(*(Value*) stackPointer);
668 RootedObject iterObject(cx, &iterValue.toObject());
669 if (!UnwindIteratorForException(cx, iterObject)) {
670 // See comment in the JSTRY_FOR_IN case in Interpreter.cpp's
671 // ProcessTryNotes.
672 SettleOnTryNote(cx, tn, frame, si, rfe, pc);
673 MOZ_ASSERT(**pc == JSOP_ENDITER);
674 return false;
675 }
676 break;
677 }
678
679 case JSTRY_FOR_OF:
680 case JSTRY_LOOP:
681 break;
682
683 default:
684 MOZ_CRASH("Invalid try note");
685 }
686 }
687 return true;
688 }
689
690 static void
HandleExceptionBaseline(JSContext * cx,const JitFrameIterator & frame,ResumeFromException * rfe,jsbytecode * pc)691 HandleExceptionBaseline(JSContext* cx, const JitFrameIterator& frame, ResumeFromException* rfe,
692 jsbytecode* pc)
693 {
694 MOZ_ASSERT(frame.isBaselineJS());
695
696 bool frameOk = false;
697 RootedScript script(cx, frame.baselineFrame()->script());
698
699 if (script->hasScriptCounts()) {
700 PCCounts* counts = script->getThrowCounts(pc);
701 // If we failed to allocate, then skip the increment and continue to
702 // handle the exception.
703 if (counts)
704 counts->numExec()++;
705 }
706
707 // We may be propagating a forced return from the interrupt
708 // callback, which cannot easily force a return.
709 if (cx->isPropagatingForcedReturn()) {
710 cx->clearPropagatingForcedReturn();
711 ForcedReturn(cx, frame, pc, rfe);
712 return;
713 }
714
715 again:
716 if (cx->isExceptionPending()) {
717 if (!cx->isClosingGenerator()) {
718 switch (Debugger::onExceptionUnwind(cx, frame.baselineFrame())) {
719 case JSTRAP_ERROR:
720 // Uncatchable exception.
721 MOZ_ASSERT(!cx->isExceptionPending());
722 goto again;
723
724 case JSTRAP_CONTINUE:
725 case JSTRAP_THROW:
726 MOZ_ASSERT(cx->isExceptionPending());
727 break;
728
729 case JSTRAP_RETURN:
730 if (script->hasTrynotes())
731 CloseLiveIteratorsBaselineForUncatchableException(cx, frame, pc);
732 ForcedReturn(cx, frame, pc, rfe);
733 return;
734
735 default:
736 MOZ_CRASH("Invalid trap status");
737 }
738 }
739
740 if (script->hasTrynotes()) {
741 ScopeIter si(cx, frame.baselineFrame(), pc);
742 if (!ProcessTryNotesBaseline(cx, frame, si, rfe, &pc))
743 goto again;
744 if (rfe->kind != ResumeFromException::RESUME_ENTRY_FRAME) {
745 // No need to increment the PCCounts number of execution here,
746 // as the interpreter increments any PCCounts if present.
747 MOZ_ASSERT_IF(script->hasScriptCounts(), script->maybeGetPCCounts(pc));
748 return;
749 }
750 }
751
752 frameOk = HandleClosingGeneratorReturn(cx, frame.baselineFrame(), frameOk);
753 frameOk = Debugger::onLeaveFrame(cx, frame.baselineFrame(), frameOk);
754 } else if (script->hasTrynotes()) {
755 CloseLiveIteratorsBaselineForUncatchableException(cx, frame, pc);
756 }
757
758 OnLeaveBaselineFrame(cx, frame, pc, rfe, frameOk);
759 }
760
761 struct AutoDeleteDebugModeOSRInfo
762 {
763 BaselineFrame* frame;
AutoDeleteDebugModeOSRInfojs::jit::AutoDeleteDebugModeOSRInfo764 explicit AutoDeleteDebugModeOSRInfo(BaselineFrame* frame) : frame(frame) { MOZ_ASSERT(frame); }
~AutoDeleteDebugModeOSRInfojs::jit::AutoDeleteDebugModeOSRInfo765 ~AutoDeleteDebugModeOSRInfo() { frame->deleteDebugModeOSRInfo(); }
766 };
767
768 struct AutoResetLastProfilerFrameOnReturnFromException
769 {
770 JSContext* cx;
771 ResumeFromException* rfe;
772
AutoResetLastProfilerFrameOnReturnFromExceptionjs::jit::AutoResetLastProfilerFrameOnReturnFromException773 AutoResetLastProfilerFrameOnReturnFromException(JSContext* cx, ResumeFromException* rfe)
774 : cx(cx), rfe(rfe) {}
775
~AutoResetLastProfilerFrameOnReturnFromExceptionjs::jit::AutoResetLastProfilerFrameOnReturnFromException776 ~AutoResetLastProfilerFrameOnReturnFromException() {
777 if (!cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime()))
778 return;
779
780 MOZ_ASSERT(cx->runtime()->jitActivation == cx->runtime()->profilingActivation());
781
782 void* lastProfilingFrame = getLastProfilingFrame();
783 cx->runtime()->jitActivation->setLastProfilingFrame(lastProfilingFrame);
784 }
785
getLastProfilingFramejs::jit::AutoResetLastProfilerFrameOnReturnFromException786 void* getLastProfilingFrame() {
787 switch (rfe->kind) {
788 case ResumeFromException::RESUME_ENTRY_FRAME:
789 return nullptr;
790
791 // The following all return into baseline frames.
792 case ResumeFromException::RESUME_CATCH:
793 case ResumeFromException::RESUME_FINALLY:
794 case ResumeFromException::RESUME_FORCED_RETURN:
795 return rfe->framePointer + BaselineFrame::FramePointerOffset;
796
797 // When resuming into a bailed-out ion frame, use the bailout info to
798 // find the frame we are resuming into.
799 case ResumeFromException::RESUME_BAILOUT:
800 return rfe->bailoutInfo->incomingStack;
801 }
802
803 MOZ_CRASH("Invalid ResumeFromException type!");
804 return nullptr;
805 }
806 };
807
808 void
HandleException(ResumeFromException * rfe)809 HandleException(ResumeFromException* rfe)
810 {
811 JSContext* cx = GetJSContextFromJitCode();
812 TraceLoggerThread* logger = TraceLoggerForMainThread(cx->runtime());
813
814 AutoResetLastProfilerFrameOnReturnFromException profFrameReset(cx, rfe);
815
816 rfe->kind = ResumeFromException::RESUME_ENTRY_FRAME;
817
818 JitSpew(JitSpew_IonInvalidate, "handling exception");
819
820 // Clear any Ion return override that's been set.
821 // This may happen if a callVM function causes an invalidation (setting the
822 // override), and then fails, bypassing the bailout handlers that would
823 // otherwise clear the return override.
824 if (cx->runtime()->jitRuntime()->hasIonReturnOverride())
825 cx->runtime()->jitRuntime()->takeIonReturnOverride();
826
827 JitActivation* activation = cx->runtime()->activation()->asJit();
828
829 #ifdef CHECK_OSIPOINT_REGISTERS
830 if (JitOptions.checkOsiPointRegisters)
831 activation->setCheckRegs(false);
832 #endif
833
834 // The Debugger onExceptionUnwind hook (reachable via
835 // HandleExceptionBaseline below) may cause on-stack recompilation of
836 // baseline scripts, which may patch return addresses on the stack. Since
837 // JitFrameIterators cache the previous frame's return address when
838 // iterating, we need a variant here that is automatically updated should
839 // on-stack recompilation occur.
840 DebugModeOSRVolatileJitFrameIterator iter(cx);
841 while (!iter.isEntry()) {
842 bool overrecursed = false;
843 if (iter.isIonJS()) {
844 // Search each inlined frame for live iterator objects, and close
845 // them.
846 InlineFrameIterator frames(cx, &iter);
847
848 // Invalidation state will be the same for all inlined scripts in the frame.
849 IonScript* ionScript = nullptr;
850 bool invalidated = iter.checkInvalidation(&ionScript);
851
852 for (;;) {
853 HandleExceptionIon(cx, frames, rfe, &overrecursed);
854
855 if (rfe->kind == ResumeFromException::RESUME_BAILOUT) {
856 if (invalidated)
857 ionScript->decrementInvalidationCount(cx->runtime()->defaultFreeOp());
858 return;
859 }
860
861 MOZ_ASSERT(rfe->kind == ResumeFromException::RESUME_ENTRY_FRAME);
862
863 // When profiling, each frame popped needs a notification that
864 // the function has exited, so invoke the probe that a function
865 // is exiting.
866
867 JSScript* script = frames.script();
868 probes::ExitScript(cx, script, script->functionNonDelazifying(),
869 /* popSPSFrame = */ false);
870 if (!frames.more()) {
871 TraceLogStopEvent(logger, TraceLogger_IonMonkey);
872 TraceLogStopEvent(logger, TraceLogger_Scripts);
873 break;
874 }
875 ++frames;
876 }
877
878 activation->removeIonFrameRecovery(iter.jsFrame());
879 if (invalidated)
880 ionScript->decrementInvalidationCount(cx->runtime()->defaultFreeOp());
881
882 } else if (iter.isBaselineJS()) {
883 // Set a flag on the frame to signal to DebugModeOSR that we're
884 // handling an exception. Also ensure the frame has an override
885 // pc. We clear the frame's override pc when we leave this block,
886 // this is fine because we're either:
887 //
888 // (1) Going to enter a catch or finally block. We don't want to
889 // keep the old pc when we're executing JIT code.
890 // (2) Going to pop the frame, either here or a forced return.
891 // In this case nothing will observe the frame's pc.
892 // (3) Performing an exception bailout. In this case
893 // FinishBailoutToBaseline will set the pc to the resume pc
894 // and clear it before it returns to JIT code.
895 jsbytecode* pc;
896 iter.baselineScriptAndPc(nullptr, &pc);
897 AutoBaselineHandlingException handlingException(iter.baselineFrame(), pc);
898
899 HandleExceptionBaseline(cx, iter, rfe, pc);
900
901 // If we are propagating an exception through a frame with
902 // on-stack recompile info, we should free the allocated
903 // RecompileInfo struct before we leave this block, as we will not
904 // be returning to the recompile handler.
905 AutoDeleteDebugModeOSRInfo deleteDebugModeOSRInfo(iter.baselineFrame());
906
907 if (rfe->kind != ResumeFromException::RESUME_ENTRY_FRAME &&
908 rfe->kind != ResumeFromException::RESUME_FORCED_RETURN)
909 {
910 return;
911 }
912
913 TraceLogStopEvent(logger, TraceLogger_Baseline);
914 TraceLogStopEvent(logger, TraceLogger_Scripts);
915
916 // Unwind profiler pseudo-stack
917 JSScript* script = iter.script();
918 probes::ExitScript(cx, script, script->functionNonDelazifying(),
919 /* popSPSFrame = */ false);
920
921 if (rfe->kind == ResumeFromException::RESUME_FORCED_RETURN)
922 return;
923 }
924
925 JitFrameLayout* current = iter.isScripted() ? iter.jsFrame() : nullptr;
926
927 ++iter;
928
929 if (current) {
930 // Unwind the frame by updating jitTop. This is necessary so that
931 // (1) debugger exception unwind and leave frame hooks don't see this
932 // frame when they use ScriptFrameIter, and (2) ScriptFrameIter does
933 // not crash when accessing an IonScript that's destroyed by the
934 // ionScript->decref call.
935 EnsureExitFrame(current);
936 cx->runtime()->jitTop = (uint8_t*)current;
937 }
938
939 if (overrecursed) {
940 // We hit an overrecursion error during bailout. Report it now.
941 ReportOverRecursed(cx);
942 }
943 }
944
945 rfe->stackPointer = iter.fp();
946 }
947
948 void
EnsureExitFrame(CommonFrameLayout * frame)949 EnsureExitFrame(CommonFrameLayout* frame)
950 {
951 switch (frame->prevType()) {
952 case JitFrame_Unwound_IonJS:
953 case JitFrame_Unwound_IonStub:
954 case JitFrame_Unwound_BaselineJS:
955 case JitFrame_Unwound_BaselineStub:
956 case JitFrame_Unwound_Rectifier:
957 case JitFrame_Unwound_IonAccessorIC:
958 // Already an exit frame, nothing to do.
959 return;
960
961 case JitFrame_Entry:
962 // The previous frame type is the entry frame, so there's no actual
963 // need for an exit frame.
964 return;
965
966 case JitFrame_Rectifier:
967 // The rectifier code uses the frame descriptor to discard its stack,
968 // so modifying its descriptor size here would be dangerous. Instead,
969 // we change the frame type, and teach the stack walking code how to
970 // deal with this edge case. bug 717297 would obviate the need
971 frame->changePrevType(JitFrame_Unwound_Rectifier);
972 return;
973
974 case JitFrame_BaselineStub:
975 frame->changePrevType(JitFrame_Unwound_BaselineStub);
976 return;
977
978 case JitFrame_BaselineJS:
979 frame->changePrevType(JitFrame_Unwound_BaselineJS);
980 return;
981
982 case JitFrame_IonJS:
983 frame->changePrevType(JitFrame_Unwound_IonJS);
984 return;
985
986 case JitFrame_IonStub:
987 frame->changePrevType(JitFrame_Unwound_IonStub);
988 return;
989
990 case JitFrame_IonAccessorIC:
991 frame->changePrevType(JitFrame_Unwound_IonAccessorIC);
992 return;
993
994 case JitFrame_Exit:
995 case JitFrame_Bailout:
996 case JitFrame_LazyLink:
997 // Fall-through to MOZ_CRASH below.
998 break;
999 }
1000
1001 MOZ_CRASH("Unexpected frame type");
1002 }
1003
1004 CalleeToken
MarkCalleeToken(JSTracer * trc,CalleeToken token)1005 MarkCalleeToken(JSTracer* trc, CalleeToken token)
1006 {
1007 switch (CalleeTokenTag tag = GetCalleeTokenTag(token)) {
1008 case CalleeToken_Function:
1009 case CalleeToken_FunctionConstructing:
1010 {
1011 JSFunction* fun = CalleeTokenToFunction(token);
1012 TraceRoot(trc, &fun, "jit-callee");
1013 return CalleeToToken(fun, tag == CalleeToken_FunctionConstructing);
1014 }
1015 case CalleeToken_Script:
1016 {
1017 JSScript* script = CalleeTokenToScript(token);
1018 TraceRoot(trc, &script, "jit-script");
1019 return CalleeToToken(script);
1020 }
1021 default:
1022 MOZ_CRASH("unknown callee token type");
1023 }
1024 }
1025
1026 uintptr_t*
slotRef(SafepointSlotEntry where)1027 JitFrameLayout::slotRef(SafepointSlotEntry where)
1028 {
1029 if (where.stack)
1030 return (uintptr_t*)((uint8_t*)this - where.slot);
1031 return (uintptr_t*)((uint8_t*)argv() + where.slot);
1032 }
1033
1034 #ifdef JS_NUNBOX32
1035 static inline uintptr_t
ReadAllocation(const JitFrameIterator & frame,const LAllocation * a)1036 ReadAllocation(const JitFrameIterator& frame, const LAllocation* a)
1037 {
1038 if (a->isGeneralReg()) {
1039 Register reg = a->toGeneralReg()->reg();
1040 return frame.machineState().read(reg);
1041 }
1042 return *frame.jsFrame()->slotRef(SafepointSlotEntry(a));
1043 }
1044 #endif
1045
1046 static void
MarkThisAndArguments(JSTracer * trc,const JitFrameIterator & frame)1047 MarkThisAndArguments(JSTracer* trc, const JitFrameIterator& frame)
1048 {
1049 // Mark |this| and any extra actual arguments for an Ion frame. Marking of
1050 // formal arguments is taken care of by the frame's safepoint/snapshot,
1051 // except when the script might have lazy arguments or rest, in which case
1052 // we mark them as well. We also have to mark formals if we have a LazyLink
1053 // frame.
1054
1055 JitFrameLayout* layout = frame.isExitFrameLayout<LazyLinkExitFrameLayout>()
1056 ? frame.exitFrame()->as<LazyLinkExitFrameLayout>()->jsFrame()
1057 : frame.jsFrame();
1058
1059 if (!CalleeTokenIsFunction(layout->calleeToken()))
1060 return;
1061
1062 size_t nargs = layout->numActualArgs();
1063 size_t nformals = 0;
1064
1065 JSFunction* fun = CalleeTokenToFunction(layout->calleeToken());
1066 if (!frame.isExitFrameLayout<LazyLinkExitFrameLayout>() &&
1067 !fun->nonLazyScript()->mayReadFrameArgsDirectly())
1068 {
1069 nformals = fun->nargs();
1070 }
1071
1072 size_t newTargetOffset = Max(nargs, fun->nargs());
1073
1074 Value* argv = layout->argv();
1075
1076 // Trace |this|.
1077 TraceRoot(trc, argv, "ion-thisv");
1078
1079 // Trace actual arguments beyond the formals. Note + 1 for thisv.
1080 for (size_t i = nformals + 1; i < nargs + 1; i++)
1081 TraceRoot(trc, &argv[i], "ion-argv");
1082
1083 // Always mark the new.target from the frame. It's not in the snapshots.
1084 // +1 to pass |this|
1085 if (CalleeTokenIsConstructing(layout->calleeToken()))
1086 TraceRoot(trc, &argv[1 + newTargetOffset], "ion-newTarget");
1087 }
1088
1089 #ifdef JS_NUNBOX32
1090 static inline void
WriteAllocation(const JitFrameIterator & frame,const LAllocation * a,uintptr_t value)1091 WriteAllocation(const JitFrameIterator& frame, const LAllocation* a, uintptr_t value)
1092 {
1093 if (a->isGeneralReg()) {
1094 Register reg = a->toGeneralReg()->reg();
1095 frame.machineState().write(reg, value);
1096 } else {
1097 *frame.jsFrame()->slotRef(SafepointSlotEntry(a)) = value;
1098 }
1099 }
1100 #endif
1101
1102 static void
MarkIonJSFrame(JSTracer * trc,const JitFrameIterator & frame)1103 MarkIonJSFrame(JSTracer* trc, const JitFrameIterator& frame)
1104 {
1105 JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
1106
1107 layout->replaceCalleeToken(MarkCalleeToken(trc, layout->calleeToken()));
1108
1109 IonScript* ionScript = nullptr;
1110 if (frame.checkInvalidation(&ionScript)) {
1111 // This frame has been invalidated, meaning that its IonScript is no
1112 // longer reachable through the callee token (JSFunction/JSScript->ion
1113 // is now nullptr or recompiled). Manually trace it here.
1114 IonScript::Trace(trc, ionScript);
1115 } else {
1116 ionScript = frame.ionScriptFromCalleeToken();
1117 }
1118
1119 MarkThisAndArguments(trc, frame);
1120
1121 const SafepointIndex* si = ionScript->getSafepointIndex(frame.returnAddressToFp());
1122
1123 SafepointReader safepoint(ionScript, si);
1124
1125 // Scan through slots which contain pointers (or on punboxing systems,
1126 // actual values).
1127 SafepointSlotEntry entry;
1128
1129 while (safepoint.getGcSlot(&entry)) {
1130 uintptr_t* ref = layout->slotRef(entry);
1131 TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(ref), "ion-gc-slot");
1132 }
1133
1134 while (safepoint.getValueSlot(&entry)) {
1135 Value* v = (Value*)layout->slotRef(entry);
1136 TraceRoot(trc, v, "ion-gc-slot");
1137 }
1138
1139 uintptr_t* spill = frame.spillBase();
1140 LiveGeneralRegisterSet gcRegs = safepoint.gcSpills();
1141 LiveGeneralRegisterSet valueRegs = safepoint.valueSpills();
1142 for (GeneralRegisterBackwardIterator iter(safepoint.allGprSpills()); iter.more(); iter++) {
1143 --spill;
1144 if (gcRegs.has(*iter))
1145 TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(spill), "ion-gc-spill");
1146 else if (valueRegs.has(*iter))
1147 TraceRoot(trc, reinterpret_cast<Value*>(spill), "ion-value-spill");
1148 }
1149
1150 #ifdef JS_NUNBOX32
1151 LAllocation type, payload;
1152 while (safepoint.getNunboxSlot(&type, &payload)) {
1153 jsval_layout layout;
1154 layout.s.tag = (JSValueTag)ReadAllocation(frame, &type);
1155 layout.s.payload.uintptr = ReadAllocation(frame, &payload);
1156
1157 Value v = IMPL_TO_JSVAL(layout);
1158 TraceRoot(trc, &v, "ion-torn-value");
1159
1160 if (v != IMPL_TO_JSVAL(layout)) {
1161 // GC moved the value, replace the stored payload.
1162 layout = JSVAL_TO_IMPL(v);
1163 WriteAllocation(frame, &payload, layout.s.payload.uintptr);
1164 }
1165 }
1166 #endif
1167 }
1168
1169 static void
MarkBailoutFrame(JSTracer * trc,const JitFrameIterator & frame)1170 MarkBailoutFrame(JSTracer* trc, const JitFrameIterator& frame)
1171 {
1172 JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
1173
1174 layout->replaceCalleeToken(MarkCalleeToken(trc, layout->calleeToken()));
1175
1176 // We have to mark the list of actual arguments, as only formal arguments
1177 // are represented in the Snapshot.
1178 MarkThisAndArguments(trc, frame);
1179
1180 // Under a bailout, do not have a Safepoint to only iterate over GC-things.
1181 // Thus we use a SnapshotIterator to trace all the locations which would be
1182 // used to reconstruct the Baseline frame.
1183 //
1184 // Note that at the time where this function is called, we have not yet
1185 // started to reconstruct baseline frames.
1186
1187 // The vector of recover instructions is already traced as part of the
1188 // JitActivation.
1189 SnapshotIterator snapIter(frame, frame.activation()->bailoutData()->machineState());
1190
1191 // For each instruction, we read the allocations without evaluating the
1192 // recover instruction, nor reconstructing the frame. We are only looking at
1193 // tracing readable allocations.
1194 while (true) {
1195 while (snapIter.moreAllocations())
1196 snapIter.traceAllocation(trc);
1197
1198 if (!snapIter.moreInstructions())
1199 break;
1200 snapIter.nextInstruction();
1201 };
1202
1203 }
1204
1205 void
UpdateIonJSFrameForMinorGC(JSTracer * trc,const JitFrameIterator & frame)1206 UpdateIonJSFrameForMinorGC(JSTracer* trc, const JitFrameIterator& frame)
1207 {
1208 // Minor GCs may move slots/elements allocated in the nursery. Update
1209 // any slots/elements pointers stored in this frame.
1210
1211 JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
1212
1213 IonScript* ionScript = nullptr;
1214 if (frame.checkInvalidation(&ionScript)) {
1215 // This frame has been invalidated, meaning that its IonScript is no
1216 // longer reachable through the callee token (JSFunction/JSScript->ion
1217 // is now nullptr or recompiled).
1218 } else {
1219 ionScript = frame.ionScriptFromCalleeToken();
1220 }
1221
1222 Nursery& nursery = trc->runtime()->gc.nursery;
1223
1224 const SafepointIndex* si = ionScript->getSafepointIndex(frame.returnAddressToFp());
1225 SafepointReader safepoint(ionScript, si);
1226
1227 LiveGeneralRegisterSet slotsRegs = safepoint.slotsOrElementsSpills();
1228 uintptr_t* spill = frame.spillBase();
1229 for (GeneralRegisterBackwardIterator iter(safepoint.allGprSpills()); iter.more(); iter++) {
1230 --spill;
1231 if (slotsRegs.has(*iter))
1232 nursery.forwardBufferPointer(reinterpret_cast<HeapSlot**>(spill));
1233 }
1234
1235 // Skip to the right place in the safepoint
1236 SafepointSlotEntry entry;
1237 while (safepoint.getGcSlot(&entry));
1238 while (safepoint.getValueSlot(&entry));
1239 #ifdef JS_NUNBOX32
1240 LAllocation type, payload;
1241 while (safepoint.getNunboxSlot(&type, &payload));
1242 #endif
1243
1244 while (safepoint.getSlotsOrElementsSlot(&entry)) {
1245 HeapSlot** slots = reinterpret_cast<HeapSlot**>(layout->slotRef(entry));
1246 nursery.forwardBufferPointer(slots);
1247 }
1248 }
1249
1250 static void
MarkJitStubFrame(JSTracer * trc,const JitFrameIterator & frame)1251 MarkJitStubFrame(JSTracer* trc, const JitFrameIterator& frame)
1252 {
1253 // Mark the ICStub pointer stored in the stub frame. This is necessary
1254 // so that we don't destroy the stub code after unlinking the stub.
1255
1256 MOZ_ASSERT(frame.type() == JitFrame_IonStub || frame.type() == JitFrame_BaselineStub);
1257 JitStubFrameLayout* layout = (JitStubFrameLayout*)frame.fp();
1258
1259 if (ICStub* stub = layout->maybeStubPtr()) {
1260 MOZ_ASSERT(ICStub::CanMakeCalls(stub->kind()));
1261 stub->trace(trc);
1262 }
1263 }
1264
1265 static void
MarkIonAccessorICFrame(JSTracer * trc,const JitFrameIterator & frame)1266 MarkIonAccessorICFrame(JSTracer* trc, const JitFrameIterator& frame)
1267 {
1268 MOZ_ASSERT(frame.type() == JitFrame_IonAccessorIC);
1269 IonAccessorICFrameLayout* layout = (IonAccessorICFrameLayout*)frame.fp();
1270 TraceRoot(trc, layout->stubCode(), "ion-ic-accessor-code");
1271 }
1272
1273 void
jitStackRange(uintptr_t * & min,uintptr_t * & end)1274 JitActivationIterator::jitStackRange(uintptr_t*& min, uintptr_t*& end)
1275 {
1276 JitFrameIterator frames(*this);
1277
1278 if (frames.isFakeExitFrame()) {
1279 min = reinterpret_cast<uintptr_t*>(frames.fp());
1280 } else {
1281 ExitFrameLayout* exitFrame = frames.exitFrame();
1282 ExitFooterFrame* footer = exitFrame->footer();
1283 const VMFunction* f = footer->function();
1284 if (exitFrame->isWrapperExit() && f->outParam == Type_Handle) {
1285 switch (f->outParamRootType) {
1286 case VMFunction::RootNone:
1287 MOZ_CRASH("Handle outparam must have root type");
1288 case VMFunction::RootObject:
1289 case VMFunction::RootString:
1290 case VMFunction::RootPropertyName:
1291 case VMFunction::RootFunction:
1292 case VMFunction::RootCell:
1293 // These are all handles to GCThing pointers.
1294 min = reinterpret_cast<uintptr_t*>(footer->outParam<void*>());
1295 break;
1296 case VMFunction::RootValue:
1297 min = reinterpret_cast<uintptr_t*>(footer->outParam<Value>());
1298 break;
1299 }
1300 } else {
1301 min = reinterpret_cast<uintptr_t*>(footer);
1302 }
1303 }
1304
1305 while (!frames.done())
1306 ++frames;
1307
1308 end = reinterpret_cast<uintptr_t*>(frames.prevFp());
1309 }
1310
1311 #ifdef JS_CODEGEN_MIPS32
1312 uint8_t*
alignDoubleSpillWithOffset(uint8_t * pointer,int32_t offset)1313 alignDoubleSpillWithOffset(uint8_t* pointer, int32_t offset)
1314 {
1315 uint32_t address = reinterpret_cast<uint32_t>(pointer);
1316 address = (address - offset) & ~(ABIStackAlignment - 1);
1317 return reinterpret_cast<uint8_t*>(address);
1318 }
1319
1320 static void
MarkJitExitFrameCopiedArguments(JSTracer * trc,const VMFunction * f,ExitFooterFrame * footer)1321 MarkJitExitFrameCopiedArguments(JSTracer* trc, const VMFunction* f, ExitFooterFrame* footer)
1322 {
1323 uint8_t* doubleArgs = reinterpret_cast<uint8_t*>(footer);
1324 doubleArgs = alignDoubleSpillWithOffset(doubleArgs, sizeof(intptr_t));
1325 if (f->outParam == Type_Handle)
1326 doubleArgs -= sizeof(Value);
1327 doubleArgs -= f->doubleByRefArgs() * sizeof(double);
1328
1329 for (uint32_t explicitArg = 0; explicitArg < f->explicitArgs; explicitArg++) {
1330 if (f->argProperties(explicitArg) == VMFunction::DoubleByRef) {
1331 // Arguments with double size can only have RootValue type.
1332 if (f->argRootType(explicitArg) == VMFunction::RootValue)
1333 TraceRoot(trc, reinterpret_cast<Value*>(doubleArgs), "ion-vm-args");
1334 else
1335 MOZ_ASSERT(f->argRootType(explicitArg) == VMFunction::RootNone);
1336 doubleArgs += sizeof(double);
1337 }
1338 }
1339 }
1340 #else
1341 static void
MarkJitExitFrameCopiedArguments(JSTracer * trc,const VMFunction * f,ExitFooterFrame * footer)1342 MarkJitExitFrameCopiedArguments(JSTracer* trc, const VMFunction* f, ExitFooterFrame* footer)
1343 {
1344 // This is NO-OP on other platforms.
1345 }
1346 #endif
1347
1348 static void
MarkJitExitFrame(JSTracer * trc,const JitFrameIterator & frame)1349 MarkJitExitFrame(JSTracer* trc, const JitFrameIterator& frame)
1350 {
1351 // Ignore fake exit frames created by EnsureExitFrame.
1352 if (frame.isFakeExitFrame())
1353 return;
1354
1355 ExitFooterFrame* footer = frame.exitFrame()->footer();
1356
1357 // Mark the code of the code handling the exit path. This is needed because
1358 // invalidated script are no longer marked because data are erased by the
1359 // invalidation and relocation data are no longer reliable. So the VM
1360 // wrapper or the invalidation code may be GC if no JitCode keep reference
1361 // on them.
1362 MOZ_ASSERT(uintptr_t(footer->jitCode()) != uintptr_t(-1));
1363
1364 // This corresponds to the case where we have build a fake exit frame which
1365 // handles the case of a native function call. We need to mark the argument
1366 // vector of the function call, and also new.target if it was a constructing
1367 // call.
1368 if (frame.isExitFrameLayout<NativeExitFrameLayout>()) {
1369 NativeExitFrameLayout* native = frame.exitFrame()->as<NativeExitFrameLayout>();
1370 size_t len = native->argc() + 2;
1371 Value* vp = native->vp();
1372 TraceRootRange(trc, len, vp, "ion-native-args");
1373 if (frame.isExitFrameLayout<ConstructNativeExitFrameLayout>())
1374 TraceRoot(trc, vp + len, "ion-native-new-target");
1375 return;
1376 }
1377
1378 if (frame.isExitFrameLayout<IonOOLNativeExitFrameLayout>()) {
1379 IonOOLNativeExitFrameLayout* oolnative =
1380 frame.exitFrame()->as<IonOOLNativeExitFrameLayout>();
1381 TraceRoot(trc, oolnative->stubCode(), "ion-ool-native-code");
1382 TraceRoot(trc, oolnative->vp(), "iol-ool-native-vp");
1383 size_t len = oolnative->argc() + 1;
1384 TraceRootRange(trc, len, oolnative->thisp(), "ion-ool-native-thisargs");
1385 return;
1386 }
1387
1388 if (frame.isExitFrameLayout<IonOOLPropertyOpExitFrameLayout>() ||
1389 frame.isExitFrameLayout<IonOOLSetterOpExitFrameLayout>())
1390 {
1391 // A SetterOp frame is a different size, but that's the only relevant
1392 // difference between the two. The fields that need marking are all in
1393 // the common base class.
1394 IonOOLPropertyOpExitFrameLayout* oolgetter =
1395 frame.isExitFrameLayout<IonOOLPropertyOpExitFrameLayout>()
1396 ? frame.exitFrame()->as<IonOOLPropertyOpExitFrameLayout>()
1397 : frame.exitFrame()->as<IonOOLSetterOpExitFrameLayout>();
1398 TraceRoot(trc, oolgetter->stubCode(), "ion-ool-property-op-code");
1399 TraceRoot(trc, oolgetter->vp(), "ion-ool-property-op-vp");
1400 TraceRoot(trc, oolgetter->id(), "ion-ool-property-op-id");
1401 TraceRoot(trc, oolgetter->obj(), "ion-ool-property-op-obj");
1402 return;
1403 }
1404
1405 if (frame.isExitFrameLayout<IonOOLProxyExitFrameLayout>()) {
1406 IonOOLProxyExitFrameLayout* oolproxy = frame.exitFrame()->as<IonOOLProxyExitFrameLayout>();
1407 TraceRoot(trc, oolproxy->stubCode(), "ion-ool-proxy-code");
1408 TraceRoot(trc, oolproxy->vp(), "ion-ool-proxy-vp");
1409 TraceRoot(trc, oolproxy->id(), "ion-ool-proxy-id");
1410 TraceRoot(trc, oolproxy->proxy(), "ion-ool-proxy-proxy");
1411 return;
1412 }
1413
1414 if (frame.isExitFrameLayout<IonDOMExitFrameLayout>()) {
1415 IonDOMExitFrameLayout* dom = frame.exitFrame()->as<IonDOMExitFrameLayout>();
1416 TraceRoot(trc, dom->thisObjAddress(), "ion-dom-args");
1417 if (dom->isMethodFrame()) {
1418 IonDOMMethodExitFrameLayout* method =
1419 reinterpret_cast<IonDOMMethodExitFrameLayout*>(dom);
1420 size_t len = method->argc() + 2;
1421 Value* vp = method->vp();
1422 TraceRootRange(trc, len, vp, "ion-dom-args");
1423 } else {
1424 TraceRoot(trc, dom->vp(), "ion-dom-args");
1425 }
1426 return;
1427 }
1428
1429 if (frame.isExitFrameLayout<LazyLinkExitFrameLayout>()) {
1430 LazyLinkExitFrameLayout* ll = frame.exitFrame()->as<LazyLinkExitFrameLayout>();
1431 JitFrameLayout* layout = ll->jsFrame();
1432
1433 TraceRoot(trc, ll->stubCode(), "lazy-link-code");
1434 layout->replaceCalleeToken(MarkCalleeToken(trc, layout->calleeToken()));
1435 MarkThisAndArguments(trc, frame);
1436 return;
1437 }
1438
1439 if (frame.isBareExit()) {
1440 // Nothing to mark. Fake exit frame pushed for VM functions with
1441 // nothing to mark on the stack.
1442 return;
1443 }
1444
1445 TraceRoot(trc, footer->addressOfJitCode(), "ion-exit-code");
1446
1447 const VMFunction* f = footer->function();
1448 if (f == nullptr)
1449 return;
1450
1451 // Mark arguments of the VM wrapper.
1452 uint8_t* argBase = frame.exitFrame()->argBase();
1453 for (uint32_t explicitArg = 0; explicitArg < f->explicitArgs; explicitArg++) {
1454 switch (f->argRootType(explicitArg)) {
1455 case VMFunction::RootNone:
1456 break;
1457 case VMFunction::RootObject: {
1458 // Sometimes we can bake in HandleObjects to nullptr.
1459 JSObject** pobj = reinterpret_cast<JSObject**>(argBase);
1460 if (*pobj)
1461 TraceRoot(trc, pobj, "ion-vm-args");
1462 break;
1463 }
1464 case VMFunction::RootString:
1465 case VMFunction::RootPropertyName:
1466 TraceRoot(trc, reinterpret_cast<JSString**>(argBase), "ion-vm-args");
1467 break;
1468 case VMFunction::RootFunction:
1469 TraceRoot(trc, reinterpret_cast<JSFunction**>(argBase), "ion-vm-args");
1470 break;
1471 case VMFunction::RootValue:
1472 TraceRoot(trc, reinterpret_cast<Value*>(argBase), "ion-vm-args");
1473 break;
1474 case VMFunction::RootCell:
1475 TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(argBase), "ion-vm-args");
1476 break;
1477 }
1478
1479 switch (f->argProperties(explicitArg)) {
1480 case VMFunction::WordByValue:
1481 case VMFunction::WordByRef:
1482 argBase += sizeof(void*);
1483 break;
1484 case VMFunction::DoubleByValue:
1485 case VMFunction::DoubleByRef:
1486 argBase += 2 * sizeof(void*);
1487 break;
1488 }
1489 }
1490
1491 if (f->outParam == Type_Handle) {
1492 switch (f->outParamRootType) {
1493 case VMFunction::RootNone:
1494 MOZ_CRASH("Handle outparam must have root type");
1495 case VMFunction::RootObject:
1496 TraceRoot(trc, footer->outParam<JSObject*>(), "ion-vm-out");
1497 break;
1498 case VMFunction::RootString:
1499 case VMFunction::RootPropertyName:
1500 TraceRoot(trc, footer->outParam<JSString*>(), "ion-vm-out");
1501 break;
1502 case VMFunction::RootFunction:
1503 TraceRoot(trc, footer->outParam<JSFunction*>(), "ion-vm-out");
1504 break;
1505 case VMFunction::RootValue:
1506 TraceRoot(trc, footer->outParam<Value>(), "ion-vm-outvp");
1507 break;
1508 case VMFunction::RootCell:
1509 TraceGenericPointerRoot(trc, footer->outParam<gc::Cell*>(), "ion-vm-out");
1510 break;
1511 }
1512 }
1513
1514 MarkJitExitFrameCopiedArguments(trc, f, footer);
1515 }
1516
1517 static void
MarkRectifierFrame(JSTracer * trc,const JitFrameIterator & frame)1518 MarkRectifierFrame(JSTracer* trc, const JitFrameIterator& frame)
1519 {
1520 // Mark thisv.
1521 //
1522 // Baseline JIT code generated as part of the ICCall_Fallback stub may use
1523 // it if we're calling a constructor that returns a primitive value.
1524 RectifierFrameLayout* layout = (RectifierFrameLayout*)frame.fp();
1525 TraceRoot(trc, &layout->argv()[0], "ion-thisv");
1526 }
1527
1528 static void
MarkJitActivation(JSTracer * trc,const JitActivationIterator & activations)1529 MarkJitActivation(JSTracer* trc, const JitActivationIterator& activations)
1530 {
1531 JitActivation* activation = activations->asJit();
1532
1533 #ifdef CHECK_OSIPOINT_REGISTERS
1534 if (JitOptions.checkOsiPointRegisters) {
1535 // GC can modify spilled registers, breaking our register checks.
1536 // To handle this, we disable these checks for the current VM call
1537 // when a GC happens.
1538 activation->setCheckRegs(false);
1539 }
1540 #endif
1541
1542 activation->markRematerializedFrames(trc);
1543 activation->markIonRecovery(trc);
1544
1545 for (JitFrameIterator frames(activations); !frames.done(); ++frames) {
1546 switch (frames.type()) {
1547 case JitFrame_Exit:
1548 case JitFrame_LazyLink:
1549 MarkJitExitFrame(trc, frames);
1550 break;
1551 case JitFrame_BaselineJS:
1552 frames.baselineFrame()->trace(trc, frames);
1553 break;
1554 case JitFrame_IonJS:
1555 MarkIonJSFrame(trc, frames);
1556 break;
1557 case JitFrame_BaselineStub:
1558 case JitFrame_IonStub:
1559 MarkJitStubFrame(trc, frames);
1560 break;
1561 case JitFrame_Bailout:
1562 MarkBailoutFrame(trc, frames);
1563 break;
1564 case JitFrame_Unwound_IonJS:
1565 case JitFrame_Unwound_BaselineJS:
1566 case JitFrame_Unwound_BaselineStub:
1567 case JitFrame_Unwound_IonAccessorIC:
1568 MOZ_CRASH("invalid");
1569 case JitFrame_Rectifier:
1570 MarkRectifierFrame(trc, frames);
1571 break;
1572 case JitFrame_Unwound_Rectifier:
1573 break;
1574 case JitFrame_IonAccessorIC:
1575 MarkIonAccessorICFrame(trc, frames);
1576 break;
1577 default:
1578 MOZ_CRASH("unexpected frame type");
1579 }
1580 }
1581 }
1582
1583 void
MarkJitActivations(JSRuntime * rt,JSTracer * trc)1584 MarkJitActivations(JSRuntime* rt, JSTracer* trc)
1585 {
1586 for (JitActivationIterator activations(rt); !activations.done(); ++activations)
1587 MarkJitActivation(trc, activations);
1588 }
1589
1590 JSCompartment*
TopmostIonActivationCompartment(JSRuntime * rt)1591 TopmostIonActivationCompartment(JSRuntime* rt)
1592 {
1593 for (JitActivationIterator activations(rt); !activations.done(); ++activations) {
1594 for (JitFrameIterator frames(activations); !frames.done(); ++frames) {
1595 if (frames.type() == JitFrame_IonJS)
1596 return activations.activation()->compartment();
1597 }
1598 }
1599 return nullptr;
1600 }
1601
UpdateJitActivationsForMinorGC(JSRuntime * rt,JSTracer * trc)1602 void UpdateJitActivationsForMinorGC(JSRuntime* rt, JSTracer* trc)
1603 {
1604 MOZ_ASSERT(trc->runtime()->isHeapMinorCollecting());
1605 for (JitActivationIterator activations(rt); !activations.done(); ++activations) {
1606 for (JitFrameIterator frames(activations); !frames.done(); ++frames) {
1607 if (frames.type() == JitFrame_IonJS)
1608 UpdateIonJSFrameForMinorGC(trc, frames);
1609 }
1610 }
1611 }
1612
1613 void
GetPcScript(JSContext * cx,JSScript ** scriptRes,jsbytecode ** pcRes)1614 GetPcScript(JSContext* cx, JSScript** scriptRes, jsbytecode** pcRes)
1615 {
1616 JitSpew(JitSpew_IonSnapshots, "Recover PC & Script from the last frame.");
1617
1618 // Recover the return address so that we can look it up in the
1619 // PcScriptCache, as script/pc computation is expensive.
1620 JSRuntime* rt = cx->runtime();
1621 JitActivationIterator iter(rt);
1622 JitFrameIterator it(iter);
1623 uint8_t* retAddr;
1624 if (it.isExitFrame()) {
1625 ++it;
1626
1627 // Skip rectifier frames.
1628 if (it.isRectifierMaybeUnwound()) {
1629 ++it;
1630 MOZ_ASSERT(it.isBaselineStub() || it.isBaselineJS() || it.isIonJS());
1631 }
1632
1633 // Skip Baseline or Ion stub frames.
1634 if (it.isBaselineStubMaybeUnwound()) {
1635 ++it;
1636 MOZ_ASSERT(it.isBaselineJS());
1637 } else if (it.isIonStubMaybeUnwound() || it.isIonAccessorICMaybeUnwound()) {
1638 ++it;
1639 MOZ_ASSERT(it.isIonJS());
1640 }
1641
1642 MOZ_ASSERT(it.isBaselineJS() || it.isIonJS());
1643
1644 // Don't use the return address if the BaselineFrame has an override pc.
1645 // The override pc is cheap to get, so we won't benefit from the cache,
1646 // and the override pc could change without the return address changing.
1647 // Moreover, sometimes when an override pc is present during exception
1648 // handling, the return address is set to nullptr as a sanity check,
1649 // since we do not return to the frame that threw the exception.
1650 if (!it.isBaselineJS() || !it.baselineFrame()->hasOverridePc()) {
1651 retAddr = it.returnAddressToFp();
1652 MOZ_ASSERT(retAddr);
1653 } else {
1654 retAddr = nullptr;
1655 }
1656 } else {
1657 MOZ_ASSERT(it.isBailoutJS());
1658 retAddr = it.returnAddress();
1659 }
1660
1661 uint32_t hash;
1662 if (retAddr) {
1663 hash = PcScriptCache::Hash(retAddr);
1664
1665 // Lazily initialize the cache. The allocation may safely fail and will not GC.
1666 if (MOZ_UNLIKELY(rt->ionPcScriptCache == nullptr)) {
1667 rt->ionPcScriptCache = (PcScriptCache*)js_malloc(sizeof(struct PcScriptCache));
1668 if (rt->ionPcScriptCache)
1669 rt->ionPcScriptCache->clear(rt->gc.gcNumber());
1670 }
1671
1672 if (rt->ionPcScriptCache && rt->ionPcScriptCache->get(rt, hash, retAddr, scriptRes, pcRes))
1673 return;
1674 }
1675
1676 // Lookup failed: undertake expensive process to recover the innermost inlined frame.
1677 jsbytecode* pc = nullptr;
1678 if (it.isIonJS() || it.isBailoutJS()) {
1679 InlineFrameIterator ifi(cx, &it);
1680 *scriptRes = ifi.script();
1681 pc = ifi.pc();
1682 } else {
1683 MOZ_ASSERT(it.isBaselineJS());
1684 it.baselineScriptAndPc(scriptRes, &pc);
1685 }
1686
1687 if (pcRes)
1688 *pcRes = pc;
1689
1690 // Add entry to cache.
1691 if (retAddr && rt->ionPcScriptCache)
1692 rt->ionPcScriptCache->add(hash, retAddr, pc, *scriptRes);
1693 }
1694
1695 uint32_t
returnPointDisplacement() const1696 OsiIndex::returnPointDisplacement() const
1697 {
1698 // In general, pointer arithmetic on code is bad, but in this case,
1699 // getting the return address from a call instruction, stepping over pools
1700 // would be wrong.
1701 return callPointDisplacement_ + Assembler::PatchWrite_NearCallSize();
1702 }
1703
RInstructionResults(JitFrameLayout * fp)1704 RInstructionResults::RInstructionResults(JitFrameLayout* fp)
1705 : results_(nullptr),
1706 fp_(fp),
1707 initialized_(false)
1708 {
1709 }
1710
RInstructionResults(RInstructionResults && src)1711 RInstructionResults::RInstructionResults(RInstructionResults&& src)
1712 : results_(mozilla::Move(src.results_)),
1713 fp_(src.fp_),
1714 initialized_(src.initialized_)
1715 {
1716 src.initialized_ = false;
1717 }
1718
1719 RInstructionResults&
operator =(RInstructionResults && rhs)1720 RInstructionResults::operator=(RInstructionResults&& rhs)
1721 {
1722 MOZ_ASSERT(&rhs != this, "self-moves are prohibited");
1723 this->~RInstructionResults();
1724 new(this) RInstructionResults(mozilla::Move(rhs));
1725 return *this;
1726 }
1727
~RInstructionResults()1728 RInstructionResults::~RInstructionResults()
1729 {
1730 // results_ is freed by the UniquePtr.
1731 }
1732
1733 bool
init(JSContext * cx,uint32_t numResults)1734 RInstructionResults::init(JSContext* cx, uint32_t numResults)
1735 {
1736 if (numResults) {
1737 results_ = cx->make_unique<Values>();
1738 if (!results_ || !results_->growBy(numResults))
1739 return false;
1740
1741 Value guard = MagicValue(JS_ION_BAILOUT);
1742 for (size_t i = 0; i < numResults; i++)
1743 (*results_)[i].init(guard);
1744 }
1745
1746 initialized_ = true;
1747 return true;
1748 }
1749
1750 bool
isInitialized() const1751 RInstructionResults::isInitialized() const
1752 {
1753 return initialized_;
1754 }
1755
1756 #ifdef DEBUG
1757 size_t
length() const1758 RInstructionResults::length() const
1759 {
1760 return results_->length();
1761 }
1762 #endif
1763
1764 JitFrameLayout*
frame() const1765 RInstructionResults::frame() const
1766 {
1767 MOZ_ASSERT(fp_);
1768 return fp_;
1769 }
1770
1771 RelocatableValue&
operator [](size_t index)1772 RInstructionResults::operator [](size_t index)
1773 {
1774 return (*results_)[index];
1775 }
1776
1777 void
trace(JSTracer * trc)1778 RInstructionResults::trace(JSTracer* trc)
1779 {
1780 // Note: The vector necessary exists, otherwise this object would not have
1781 // been stored on the activation from where the trace function is called.
1782 TraceRange(trc, results_->length(), results_->begin(), "ion-recover-results");
1783 }
1784
1785
SnapshotIterator(const JitFrameIterator & iter,const MachineState * machineState)1786 SnapshotIterator::SnapshotIterator(const JitFrameIterator& iter, const MachineState* machineState)
1787 : snapshot_(iter.ionScript()->snapshots(),
1788 iter.snapshotOffset(),
1789 iter.ionScript()->snapshotsRVATableSize(),
1790 iter.ionScript()->snapshotsListSize()),
1791 recover_(snapshot_,
1792 iter.ionScript()->recovers(),
1793 iter.ionScript()->recoversSize()),
1794 fp_(iter.jsFrame()),
1795 machine_(machineState),
1796 ionScript_(iter.ionScript()),
1797 instructionResults_(nullptr)
1798 {
1799 }
1800
SnapshotIterator()1801 SnapshotIterator::SnapshotIterator()
1802 : snapshot_(nullptr, 0, 0, 0),
1803 recover_(snapshot_, nullptr, 0),
1804 fp_(nullptr),
1805 ionScript_(nullptr),
1806 instructionResults_(nullptr)
1807 {
1808 }
1809
1810 int32_t
readOuterNumActualArgs() const1811 SnapshotIterator::readOuterNumActualArgs() const
1812 {
1813 return fp_->numActualArgs();
1814 }
1815
1816 uintptr_t
fromStack(int32_t offset) const1817 SnapshotIterator::fromStack(int32_t offset) const
1818 {
1819 return ReadFrameSlot(fp_, offset);
1820 }
1821
1822 static Value
FromObjectPayload(uintptr_t payload)1823 FromObjectPayload(uintptr_t payload)
1824 {
1825 // Note: Both MIRType_Object and MIRType_ObjectOrNull are encoded in
1826 // snapshots using JSVAL_TYPE_OBJECT.
1827 return ObjectOrNullValue(reinterpret_cast<JSObject*>(payload));
1828 }
1829
1830 static Value
FromStringPayload(uintptr_t payload)1831 FromStringPayload(uintptr_t payload)
1832 {
1833 return StringValue(reinterpret_cast<JSString*>(payload));
1834 }
1835
1836 static Value
FromSymbolPayload(uintptr_t payload)1837 FromSymbolPayload(uintptr_t payload)
1838 {
1839 return SymbolValue(reinterpret_cast<JS::Symbol*>(payload));
1840 }
1841
1842 static Value
FromTypedPayload(JSValueType type,uintptr_t payload)1843 FromTypedPayload(JSValueType type, uintptr_t payload)
1844 {
1845 switch (type) {
1846 case JSVAL_TYPE_INT32:
1847 return Int32Value(payload);
1848 case JSVAL_TYPE_BOOLEAN:
1849 return BooleanValue(!!payload);
1850 case JSVAL_TYPE_STRING:
1851 return FromStringPayload(payload);
1852 case JSVAL_TYPE_SYMBOL:
1853 return FromSymbolPayload(payload);
1854 case JSVAL_TYPE_OBJECT:
1855 return FromObjectPayload(payload);
1856 default:
1857 MOZ_CRASH("unexpected type - needs payload");
1858 }
1859 }
1860
1861 bool
allocationReadable(const RValueAllocation & alloc,ReadMethod rm)1862 SnapshotIterator::allocationReadable(const RValueAllocation& alloc, ReadMethod rm)
1863 {
1864 // If we have to recover stores, and if we are not interested in the
1865 // default value of the instruction, then we have to check if the recover
1866 // instruction results are available.
1867 if (alloc.needSideEffect() && !(rm & RM_AlwaysDefault)) {
1868 if (!hasInstructionResults())
1869 return false;
1870 }
1871
1872 switch (alloc.mode()) {
1873 case RValueAllocation::DOUBLE_REG:
1874 return hasRegister(alloc.fpuReg());
1875
1876 case RValueAllocation::TYPED_REG:
1877 return hasRegister(alloc.reg2());
1878
1879 #if defined(JS_NUNBOX32)
1880 case RValueAllocation::UNTYPED_REG_REG:
1881 return hasRegister(alloc.reg()) && hasRegister(alloc.reg2());
1882 case RValueAllocation::UNTYPED_REG_STACK:
1883 return hasRegister(alloc.reg()) && hasStack(alloc.stackOffset2());
1884 case RValueAllocation::UNTYPED_STACK_REG:
1885 return hasStack(alloc.stackOffset()) && hasRegister(alloc.reg2());
1886 case RValueAllocation::UNTYPED_STACK_STACK:
1887 return hasStack(alloc.stackOffset()) && hasStack(alloc.stackOffset2());
1888 #elif defined(JS_PUNBOX64)
1889 case RValueAllocation::UNTYPED_REG:
1890 return hasRegister(alloc.reg());
1891 case RValueAllocation::UNTYPED_STACK:
1892 return hasStack(alloc.stackOffset());
1893 #endif
1894
1895 case RValueAllocation::RECOVER_INSTRUCTION:
1896 return hasInstructionResult(alloc.index());
1897 case RValueAllocation::RI_WITH_DEFAULT_CST:
1898 return rm & RM_AlwaysDefault || hasInstructionResult(alloc.index());
1899
1900 default:
1901 return true;
1902 }
1903 }
1904
1905 Value
allocationValue(const RValueAllocation & alloc,ReadMethod rm)1906 SnapshotIterator::allocationValue(const RValueAllocation& alloc, ReadMethod rm)
1907 {
1908 switch (alloc.mode()) {
1909 case RValueAllocation::CONSTANT:
1910 return ionScript_->getConstant(alloc.index());
1911
1912 case RValueAllocation::CST_UNDEFINED:
1913 return UndefinedValue();
1914
1915 case RValueAllocation::CST_NULL:
1916 return NullValue();
1917
1918 case RValueAllocation::DOUBLE_REG:
1919 return DoubleValue(fromRegister(alloc.fpuReg()));
1920
1921 case RValueAllocation::ANY_FLOAT_REG:
1922 {
1923 union {
1924 double d;
1925 float f;
1926 } pun;
1927 MOZ_ASSERT(alloc.fpuReg().isSingle());
1928 pun.d = fromRegister(alloc.fpuReg());
1929 // The register contains the encoding of a float32. We just read
1930 // the bits without making any conversion.
1931 return Float32Value(pun.f);
1932 }
1933
1934 case RValueAllocation::ANY_FLOAT_STACK:
1935 return Float32Value(ReadFrameFloat32Slot(fp_, alloc.stackOffset()));
1936
1937 case RValueAllocation::TYPED_REG:
1938 return FromTypedPayload(alloc.knownType(), fromRegister(alloc.reg2()));
1939
1940 case RValueAllocation::TYPED_STACK:
1941 {
1942 switch (alloc.knownType()) {
1943 case JSVAL_TYPE_DOUBLE:
1944 return DoubleValue(ReadFrameDoubleSlot(fp_, alloc.stackOffset2()));
1945 case JSVAL_TYPE_INT32:
1946 return Int32Value(ReadFrameInt32Slot(fp_, alloc.stackOffset2()));
1947 case JSVAL_TYPE_BOOLEAN:
1948 return BooleanValue(ReadFrameBooleanSlot(fp_, alloc.stackOffset2()));
1949 case JSVAL_TYPE_STRING:
1950 return FromStringPayload(fromStack(alloc.stackOffset2()));
1951 case JSVAL_TYPE_SYMBOL:
1952 return FromSymbolPayload(fromStack(alloc.stackOffset2()));
1953 case JSVAL_TYPE_OBJECT:
1954 return FromObjectPayload(fromStack(alloc.stackOffset2()));
1955 default:
1956 MOZ_CRASH("Unexpected type");
1957 }
1958 }
1959
1960 #if defined(JS_NUNBOX32)
1961 case RValueAllocation::UNTYPED_REG_REG:
1962 {
1963 jsval_layout layout;
1964 layout.s.tag = (JSValueTag) fromRegister(alloc.reg());
1965 layout.s.payload.word = fromRegister(alloc.reg2());
1966 return IMPL_TO_JSVAL(layout);
1967 }
1968
1969 case RValueAllocation::UNTYPED_REG_STACK:
1970 {
1971 jsval_layout layout;
1972 layout.s.tag = (JSValueTag) fromRegister(alloc.reg());
1973 layout.s.payload.word = fromStack(alloc.stackOffset2());
1974 return IMPL_TO_JSVAL(layout);
1975 }
1976
1977 case RValueAllocation::UNTYPED_STACK_REG:
1978 {
1979 jsval_layout layout;
1980 layout.s.tag = (JSValueTag) fromStack(alloc.stackOffset());
1981 layout.s.payload.word = fromRegister(alloc.reg2());
1982 return IMPL_TO_JSVAL(layout);
1983 }
1984
1985 case RValueAllocation::UNTYPED_STACK_STACK:
1986 {
1987 jsval_layout layout;
1988 layout.s.tag = (JSValueTag) fromStack(alloc.stackOffset());
1989 layout.s.payload.word = fromStack(alloc.stackOffset2());
1990 return IMPL_TO_JSVAL(layout);
1991 }
1992 #elif defined(JS_PUNBOX64)
1993 case RValueAllocation::UNTYPED_REG:
1994 {
1995 jsval_layout layout;
1996 layout.asBits = fromRegister(alloc.reg());
1997 return IMPL_TO_JSVAL(layout);
1998 }
1999
2000 case RValueAllocation::UNTYPED_STACK:
2001 {
2002 jsval_layout layout;
2003 layout.asBits = fromStack(alloc.stackOffset());
2004 return IMPL_TO_JSVAL(layout);
2005 }
2006 #endif
2007
2008 case RValueAllocation::RECOVER_INSTRUCTION:
2009 return fromInstructionResult(alloc.index());
2010
2011 case RValueAllocation::RI_WITH_DEFAULT_CST:
2012 if (rm & RM_Normal && hasInstructionResult(alloc.index()))
2013 return fromInstructionResult(alloc.index());
2014 MOZ_ASSERT(rm & RM_AlwaysDefault);
2015 return ionScript_->getConstant(alloc.index2());
2016
2017 default:
2018 MOZ_CRASH("huh?");
2019 }
2020 }
2021
2022 const FloatRegisters::RegisterContent*
floatAllocationPointer(const RValueAllocation & alloc) const2023 SnapshotIterator::floatAllocationPointer(const RValueAllocation& alloc) const
2024 {
2025 switch (alloc.mode()) {
2026 case RValueAllocation::ANY_FLOAT_REG:
2027 return machine_->address(alloc.fpuReg());
2028
2029 case RValueAllocation::ANY_FLOAT_STACK:
2030 return (FloatRegisters::RegisterContent*) AddressOfFrameSlot(fp_, alloc.stackOffset());
2031
2032 default:
2033 MOZ_CRASH("Not a float allocation.");
2034 }
2035 }
2036
2037 Value
maybeRead(const RValueAllocation & a,MaybeReadFallback & fallback)2038 SnapshotIterator::maybeRead(const RValueAllocation& a, MaybeReadFallback& fallback)
2039 {
2040 if (allocationReadable(a))
2041 return allocationValue(a);
2042
2043 if (fallback.canRecoverResults()) {
2044 if (!initInstructionResults(fallback))
2045 MOZ_CRASH("Unable to recover allocations.");
2046
2047 if (allocationReadable(a))
2048 return allocationValue(a);
2049
2050 MOZ_ASSERT_UNREACHABLE("All allocations should be readable.");
2051 }
2052
2053 return fallback.unreadablePlaceholder();
2054 }
2055
2056 void
writeAllocationValuePayload(const RValueAllocation & alloc,Value v)2057 SnapshotIterator::writeAllocationValuePayload(const RValueAllocation& alloc, Value v)
2058 {
2059 uintptr_t payload = *v.payloadUIntPtr();
2060 #if defined(JS_PUNBOX64)
2061 // Do not write back the tag, as this will trigger an assertion when we will
2062 // reconstruct the JS Value while marking again or when bailing out.
2063 payload &= JSVAL_PAYLOAD_MASK;
2064 #endif
2065
2066 switch (alloc.mode()) {
2067 case RValueAllocation::CONSTANT:
2068 ionScript_->getConstant(alloc.index()) = v;
2069 break;
2070
2071 case RValueAllocation::CST_UNDEFINED:
2072 case RValueAllocation::CST_NULL:
2073 case RValueAllocation::DOUBLE_REG:
2074 case RValueAllocation::ANY_FLOAT_REG:
2075 case RValueAllocation::ANY_FLOAT_STACK:
2076 MOZ_CRASH("Not a GC thing: Unexpected write");
2077 break;
2078
2079 case RValueAllocation::TYPED_REG:
2080 machine_->write(alloc.reg2(), payload);
2081 break;
2082
2083 case RValueAllocation::TYPED_STACK:
2084 switch (alloc.knownType()) {
2085 default:
2086 MOZ_CRASH("Not a GC thing: Unexpected write");
2087 break;
2088 case JSVAL_TYPE_STRING:
2089 case JSVAL_TYPE_SYMBOL:
2090 case JSVAL_TYPE_OBJECT:
2091 WriteFrameSlot(fp_, alloc.stackOffset2(), payload);
2092 break;
2093 }
2094 break;
2095
2096 #if defined(JS_NUNBOX32)
2097 case RValueAllocation::UNTYPED_REG_REG:
2098 case RValueAllocation::UNTYPED_STACK_REG:
2099 machine_->write(alloc.reg2(), payload);
2100 break;
2101
2102 case RValueAllocation::UNTYPED_REG_STACK:
2103 case RValueAllocation::UNTYPED_STACK_STACK:
2104 WriteFrameSlot(fp_, alloc.stackOffset2(), payload);
2105 break;
2106 #elif defined(JS_PUNBOX64)
2107 case RValueAllocation::UNTYPED_REG:
2108 machine_->write(alloc.reg(), v.asRawBits());
2109 break;
2110
2111 case RValueAllocation::UNTYPED_STACK:
2112 WriteFrameSlot(fp_, alloc.stackOffset(), v.asRawBits());
2113 break;
2114 #endif
2115
2116 case RValueAllocation::RECOVER_INSTRUCTION:
2117 MOZ_CRASH("Recover instructions are handled by the JitActivation.");
2118 break;
2119
2120 case RValueAllocation::RI_WITH_DEFAULT_CST:
2121 // Assume that we are always going to be writing on the default value
2122 // while tracing.
2123 ionScript_->getConstant(alloc.index2()) = v;
2124 break;
2125
2126 default:
2127 MOZ_CRASH("huh?");
2128 }
2129 }
2130
2131 void
traceAllocation(JSTracer * trc)2132 SnapshotIterator::traceAllocation(JSTracer* trc)
2133 {
2134 RValueAllocation alloc = readAllocation();
2135 if (!allocationReadable(alloc, RM_AlwaysDefault))
2136 return;
2137
2138 Value v = allocationValue(alloc, RM_AlwaysDefault);
2139 if (!v.isMarkable())
2140 return;
2141
2142 Value copy = v;
2143 TraceRoot(trc, &v, "ion-typed-reg");
2144 if (v != copy) {
2145 MOZ_ASSERT(SameType(v, copy));
2146 writeAllocationValuePayload(alloc, v);
2147 }
2148 }
2149
2150 const RResumePoint*
resumePoint() const2151 SnapshotIterator::resumePoint() const
2152 {
2153 return instruction()->toResumePoint();
2154 }
2155
2156 uint32_t
numAllocations() const2157 SnapshotIterator::numAllocations() const
2158 {
2159 return instruction()->numOperands();
2160 }
2161
2162 uint32_t
pcOffset() const2163 SnapshotIterator::pcOffset() const
2164 {
2165 return resumePoint()->pcOffset();
2166 }
2167
2168 void
skipInstruction()2169 SnapshotIterator::skipInstruction()
2170 {
2171 MOZ_ASSERT(snapshot_.numAllocationsRead() == 0);
2172 size_t numOperands = instruction()->numOperands();
2173 for (size_t i = 0; i < numOperands; i++)
2174 skip();
2175 nextInstruction();
2176 }
2177
2178 bool
initInstructionResults(MaybeReadFallback & fallback)2179 SnapshotIterator::initInstructionResults(MaybeReadFallback& fallback)
2180 {
2181 MOZ_ASSERT(fallback.canRecoverResults());
2182 JSContext* cx = fallback.maybeCx;
2183
2184 // If there is only one resume point in the list of instructions, then there
2185 // is no instruction to recover, and thus no need to register any results.
2186 if (recover_.numInstructions() == 1)
2187 return true;
2188
2189 JitFrameLayout* fp = fallback.frame->jsFrame();
2190 RInstructionResults* results = fallback.activation->maybeIonFrameRecovery(fp);
2191 if (!results) {
2192 AutoCompartment ac(cx, fallback.frame->script()->compartment());
2193
2194 // We do not have the result yet, which means that an observable stack
2195 // slot is requested. As we do not want to bailout every time for the
2196 // same reason, we need to recompile without optimizing away the
2197 // observable stack slots. The script would later be recompiled to have
2198 // support for Argument objects.
2199 if (fallback.consequence == MaybeReadFallback::Fallback_Invalidate &&
2200 !ionScript_->invalidate(cx, /* resetUses = */ false, "Observe recovered instruction."))
2201 {
2202 return false;
2203 }
2204
2205 // Register the list of result on the activation. We need to do that
2206 // before we initialize the list such as if any recover instruction
2207 // cause a GC, we can ensure that the results are properly traced by the
2208 // activation.
2209 RInstructionResults tmp(fallback.frame->jsFrame());
2210 if (!fallback.activation->registerIonFrameRecovery(mozilla::Move(tmp)))
2211 return false;
2212
2213 results = fallback.activation->maybeIonFrameRecovery(fp);
2214
2215 // Start a new snapshot at the beginning of the JitFrameIterator. This
2216 // SnapshotIterator is used for evaluating the content of all recover
2217 // instructions. The result is then saved on the JitActivation.
2218 MachineState machine = fallback.frame->machineState();
2219 SnapshotIterator s(*fallback.frame, &machine);
2220 if (!s.computeInstructionResults(cx, results)) {
2221
2222 // If the evaluation failed because of OOMs, then we discard the
2223 // current set of result that we collected so far.
2224 fallback.activation->removeIonFrameRecovery(fp);
2225 return false;
2226 }
2227 }
2228
2229 MOZ_ASSERT(results->isInitialized());
2230 MOZ_ASSERT(results->length() == recover_.numInstructions() - 1);
2231 instructionResults_ = results;
2232 return true;
2233 }
2234
2235 bool
computeInstructionResults(JSContext * cx,RInstructionResults * results) const2236 SnapshotIterator::computeInstructionResults(JSContext* cx, RInstructionResults* results) const
2237 {
2238 MOZ_ASSERT(!results->isInitialized());
2239 MOZ_ASSERT(recover_.numInstructionsRead() == 1);
2240
2241 // The last instruction will always be a resume point.
2242 size_t numResults = recover_.numInstructions() - 1;
2243 if (!results->isInitialized()) {
2244 if (!results->init(cx, numResults))
2245 return false;
2246
2247 // No need to iterate over the only resume point.
2248 if (!numResults) {
2249 MOZ_ASSERT(results->isInitialized());
2250 return true;
2251 }
2252
2253 // Use AutoEnterAnalysis to avoid invoking the object metadata callback,
2254 // which could try to walk the stack while bailing out.
2255 AutoEnterAnalysis enter(cx);
2256
2257 // Fill with the results of recover instructions.
2258 SnapshotIterator s(*this);
2259 s.instructionResults_ = results;
2260 while (s.moreInstructions()) {
2261 // Skip resume point and only interpret recover instructions.
2262 if (s.instruction()->isResumePoint()) {
2263 s.skipInstruction();
2264 continue;
2265 }
2266
2267 if (!s.instruction()->recover(cx, s))
2268 return false;
2269 s.nextInstruction();
2270 }
2271 }
2272
2273 MOZ_ASSERT(results->isInitialized());
2274 return true;
2275 }
2276
2277 void
storeInstructionResult(Value v)2278 SnapshotIterator::storeInstructionResult(Value v)
2279 {
2280 uint32_t currIns = recover_.numInstructionsRead() - 1;
2281 MOZ_ASSERT((*instructionResults_)[currIns].isMagic(JS_ION_BAILOUT));
2282 (*instructionResults_)[currIns] = v;
2283 }
2284
2285 Value
fromInstructionResult(uint32_t index) const2286 SnapshotIterator::fromInstructionResult(uint32_t index) const
2287 {
2288 MOZ_ASSERT(!(*instructionResults_)[index].isMagic(JS_ION_BAILOUT));
2289 return (*instructionResults_)[index];
2290 }
2291
2292 void
settleOnFrame()2293 SnapshotIterator::settleOnFrame()
2294 {
2295 // Check that the current instruction can still be use.
2296 MOZ_ASSERT(snapshot_.numAllocationsRead() == 0);
2297 while (!instruction()->isResumePoint())
2298 skipInstruction();
2299 }
2300
2301 void
nextFrame()2302 SnapshotIterator::nextFrame()
2303 {
2304 nextInstruction();
2305 settleOnFrame();
2306 }
2307
2308 Value
maybeReadAllocByIndex(size_t index)2309 SnapshotIterator::maybeReadAllocByIndex(size_t index)
2310 {
2311 while (index--) {
2312 MOZ_ASSERT(moreAllocations());
2313 skip();
2314 }
2315
2316 Value s;
2317 {
2318 // This MaybeReadFallback method cannot GC.
2319 JS::AutoSuppressGCAnalysis nogc;
2320 MaybeReadFallback fallback(UndefinedValue());
2321 s = maybeRead(fallback);
2322 }
2323
2324 while (moreAllocations())
2325 skip();
2326
2327 return s;
2328 }
2329
2330 JitFrameLayout*
jsFrame() const2331 JitFrameIterator::jsFrame() const
2332 {
2333 MOZ_ASSERT(isScripted());
2334 if (isBailoutJS())
2335 return (JitFrameLayout*) activation_->bailoutData()->fp();
2336
2337 return (JitFrameLayout*) fp();
2338 }
2339
2340 IonScript*
ionScript() const2341 JitFrameIterator::ionScript() const
2342 {
2343 MOZ_ASSERT(isIonScripted());
2344 if (isBailoutJS())
2345 return activation_->bailoutData()->ionScript();
2346
2347 IonScript* ionScript = nullptr;
2348 if (checkInvalidation(&ionScript))
2349 return ionScript;
2350 return ionScriptFromCalleeToken();
2351 }
2352
2353 IonScript*
ionScriptFromCalleeToken() const2354 JitFrameIterator::ionScriptFromCalleeToken() const
2355 {
2356 MOZ_ASSERT(isIonJS());
2357 MOZ_ASSERT(!checkInvalidation());
2358 return script()->ionScript();
2359 }
2360
2361 const SafepointIndex*
safepoint() const2362 JitFrameIterator::safepoint() const
2363 {
2364 MOZ_ASSERT(isIonJS());
2365 if (!cachedSafepointIndex_)
2366 cachedSafepointIndex_ = ionScript()->getSafepointIndex(returnAddressToFp());
2367 return cachedSafepointIndex_;
2368 }
2369
2370 SnapshotOffset
snapshotOffset() const2371 JitFrameIterator::snapshotOffset() const
2372 {
2373 MOZ_ASSERT(isIonScripted());
2374 if (isBailoutJS())
2375 return activation_->bailoutData()->snapshotOffset();
2376 return osiIndex()->snapshotOffset();
2377 }
2378
2379 const OsiIndex*
osiIndex() const2380 JitFrameIterator::osiIndex() const
2381 {
2382 MOZ_ASSERT(isIonJS());
2383 SafepointReader reader(ionScript(), safepoint());
2384 return ionScript()->getOsiIndex(reader.osiReturnPointOffset());
2385 }
2386
InlineFrameIterator(JSContext * cx,const JitFrameIterator * iter)2387 InlineFrameIterator::InlineFrameIterator(JSContext* cx, const JitFrameIterator* iter)
2388 : calleeTemplate_(cx),
2389 calleeRVA_(),
2390 script_(cx)
2391 {
2392 resetOn(iter);
2393 }
2394
InlineFrameIterator(JSRuntime * rt,const JitFrameIterator * iter)2395 InlineFrameIterator::InlineFrameIterator(JSRuntime* rt, const JitFrameIterator* iter)
2396 : calleeTemplate_(rt),
2397 calleeRVA_(),
2398 script_(rt)
2399 {
2400 resetOn(iter);
2401 }
2402
InlineFrameIterator(JSContext * cx,const InlineFrameIterator * iter)2403 InlineFrameIterator::InlineFrameIterator(JSContext* cx, const InlineFrameIterator* iter)
2404 : frame_(iter ? iter->frame_ : nullptr),
2405 framesRead_(0),
2406 frameCount_(iter ? iter->frameCount_ : UINT32_MAX),
2407 calleeTemplate_(cx),
2408 calleeRVA_(),
2409 script_(cx)
2410 {
2411 if (frame_) {
2412 machine_ = iter->machine_;
2413 start_ = SnapshotIterator(*frame_, &machine_);
2414
2415 // findNextFrame will iterate to the next frame and init. everything.
2416 // Therefore to settle on the same frame, we report one frame less readed.
2417 framesRead_ = iter->framesRead_ - 1;
2418 findNextFrame();
2419 }
2420 }
2421
2422 void
resetOn(const JitFrameIterator * iter)2423 InlineFrameIterator::resetOn(const JitFrameIterator* iter)
2424 {
2425 frame_ = iter;
2426 framesRead_ = 0;
2427 frameCount_ = UINT32_MAX;
2428
2429 if (iter) {
2430 machine_ = iter->machineState();
2431 start_ = SnapshotIterator(*iter, &machine_);
2432 findNextFrame();
2433 }
2434 }
2435
2436 void
findNextFrame()2437 InlineFrameIterator::findNextFrame()
2438 {
2439 MOZ_ASSERT(more());
2440
2441 si_ = start_;
2442
2443 // Read the initial frame out of the C stack.
2444 calleeTemplate_ = frame_->maybeCallee();
2445 calleeRVA_ = RValueAllocation();
2446 script_ = frame_->script();
2447 MOZ_ASSERT(script_->hasBaselineScript());
2448
2449 // Settle on the outermost frame without evaluating any instructions before
2450 // looking for a pc.
2451 si_.settleOnFrame();
2452
2453 pc_ = script_->offsetToPC(si_.pcOffset());
2454 numActualArgs_ = 0xbadbad;
2455
2456 // This unfortunately is O(n*m), because we must skip over outer frames
2457 // before reading inner ones.
2458
2459 // The first time (frameCount_ == UINT32_MAX) we do not know the number of
2460 // frames that we are going to inspect. So we are iterating until there is
2461 // no more frames, to settle on the inner most frame and to count the number
2462 // of frames.
2463 size_t remaining = (frameCount_ != UINT32_MAX) ? frameNo() - 1 : SIZE_MAX;
2464
2465 size_t i = 1;
2466 for (; i <= remaining && si_.moreFrames(); i++) {
2467 MOZ_ASSERT(IsIonInlinablePC(pc_));
2468
2469 // Recover the number of actual arguments from the script.
2470 if (JSOp(*pc_) != JSOP_FUNAPPLY)
2471 numActualArgs_ = GET_ARGC(pc_);
2472 if (JSOp(*pc_) == JSOP_FUNCALL) {
2473 MOZ_ASSERT(GET_ARGC(pc_) > 0);
2474 numActualArgs_ = GET_ARGC(pc_) - 1;
2475 } else if (IsGetPropPC(pc_)) {
2476 numActualArgs_ = 0;
2477 } else if (IsSetPropPC(pc_)) {
2478 numActualArgs_ = 1;
2479 }
2480
2481 if (numActualArgs_ == 0xbadbad)
2482 MOZ_CRASH("Couldn't deduce the number of arguments of an ionmonkey frame");
2483
2484 // Skip over non-argument slots, as well as |this|.
2485 bool skipNewTarget = JSOp(*pc_) == JSOP_NEW;
2486 unsigned skipCount = (si_.numAllocations() - 1) - numActualArgs_ - 1 - skipNewTarget;
2487 for (unsigned j = 0; j < skipCount; j++)
2488 si_.skip();
2489
2490 // This value should correspond to the function which is being inlined.
2491 // The value must be readable to iterate over the inline frame. Most of
2492 // the time, these functions are stored as JSFunction constants,
2493 // register which are holding the JSFunction pointer, or recover
2494 // instruction with Default value.
2495 Value funval = si_.readWithDefault(&calleeRVA_);
2496
2497 // Skip extra value allocations.
2498 while (si_.moreAllocations())
2499 si_.skip();
2500
2501 si_.nextFrame();
2502
2503 calleeTemplate_ = &funval.toObject().as<JSFunction>();
2504
2505 // Inlined functions may be clones that still point to the lazy script
2506 // for the executed script, if they are clones. The actual script
2507 // exists though, just make sure the function points to it.
2508 script_ = calleeTemplate_->existingScriptForInlinedFunction();
2509 MOZ_ASSERT(script_->hasBaselineScript());
2510
2511 pc_ = script_->offsetToPC(si_.pcOffset());
2512 }
2513
2514 // The first time we do not know the number of frames, we only settle on the
2515 // last frame, and update the number of frames based on the number of
2516 // iteration that we have done.
2517 if (frameCount_ == UINT32_MAX) {
2518 MOZ_ASSERT(!si_.moreFrames());
2519 frameCount_ = i;
2520 }
2521
2522 framesRead_++;
2523 }
2524
2525 JSFunction*
callee(MaybeReadFallback & fallback) const2526 InlineFrameIterator::callee(MaybeReadFallback& fallback) const
2527 {
2528 MOZ_ASSERT(isFunctionFrame());
2529 if (calleeRVA_.mode() == RValueAllocation::INVALID || !fallback.canRecoverResults())
2530 return calleeTemplate_;
2531
2532 SnapshotIterator s(si_);
2533 // :TODO: Handle allocation failures from recover instruction.
2534 Value funval = s.maybeRead(calleeRVA_, fallback);
2535 return &funval.toObject().as<JSFunction>();
2536 }
2537
2538 JSObject*
computeScopeChain(Value scopeChainValue,MaybeReadFallback & fallback,bool * hasCallObj) const2539 InlineFrameIterator::computeScopeChain(Value scopeChainValue, MaybeReadFallback& fallback,
2540 bool* hasCallObj) const
2541 {
2542 if (scopeChainValue.isObject()) {
2543 if (hasCallObj) {
2544 if (fallback.canRecoverResults()) {
2545 RootedObject obj(fallback.maybeCx, &scopeChainValue.toObject());
2546 *hasCallObj = isFunctionFrame() && callee(fallback)->needsCallObject();
2547 return obj;
2548 } else {
2549 JS::AutoSuppressGCAnalysis nogc; // If we cannot recover then we cannot GC.
2550 *hasCallObj = isFunctionFrame() && callee(fallback)->needsCallObject();
2551 }
2552 }
2553
2554 return &scopeChainValue.toObject();
2555 }
2556
2557 // Note we can hit this case even for functions with a CallObject, in case
2558 // we are walking the frame during the function prologue, before the scope
2559 // chain has been initialized.
2560 if (isFunctionFrame())
2561 return callee(fallback)->environment();
2562
2563 // Ion does not handle non-function scripts that have anything other than
2564 // the global on their scope chain.
2565 MOZ_ASSERT(!script()->isForEval());
2566 MOZ_ASSERT(!script()->hasNonSyntacticScope());
2567 return &script()->global().lexicalScope();
2568 }
2569
2570 bool
isFunctionFrame() const2571 InlineFrameIterator::isFunctionFrame() const
2572 {
2573 return !!calleeTemplate_;
2574 }
2575
2576 MachineState
FromBailout(RegisterDump::GPRArray & regs,RegisterDump::FPUArray & fpregs)2577 MachineState::FromBailout(RegisterDump::GPRArray& regs, RegisterDump::FPUArray& fpregs)
2578 {
2579 MachineState machine;
2580
2581 for (unsigned i = 0; i < Registers::Total; i++)
2582 machine.setRegisterLocation(Register::FromCode(i), ®s[i].r);
2583 #ifdef JS_CODEGEN_ARM
2584 float* fbase = (float*)&fpregs[0];
2585 for (unsigned i = 0; i < FloatRegisters::TotalDouble; i++)
2586 machine.setRegisterLocation(FloatRegister(i, FloatRegister::Double), &fpregs[i].d);
2587 for (unsigned i = 0; i < FloatRegisters::TotalSingle; i++)
2588 machine.setRegisterLocation(FloatRegister(i, FloatRegister::Single), (double*)&fbase[i]);
2589 #elif defined(JS_CODEGEN_MIPS32)
2590 float* fbase = (float*)&fpregs[0];
2591 for (unsigned i = 0; i < FloatRegisters::TotalDouble; i++) {
2592 machine.setRegisterLocation(FloatRegister::FromIndex(i, FloatRegister::Double),
2593 &fpregs[i].d);
2594 }
2595 for (unsigned i = 0; i < FloatRegisters::TotalSingle; i++) {
2596 machine.setRegisterLocation(FloatRegister::FromIndex(i, FloatRegister::Single),
2597 (double*)&fbase[i]);
2598 }
2599 #elif defined(JS_CODEGEN_MIPS64)
2600 for (unsigned i = 0; i < FloatRegisters::TotalPhys; i++) {
2601 machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Double), &fpregs[i]);
2602 machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Single), &fpregs[i]);
2603 }
2604 #elif defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
2605 for (unsigned i = 0; i < FloatRegisters::TotalPhys; i++) {
2606 machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Single), &fpregs[i]);
2607 machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Double), &fpregs[i]);
2608 machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Simd128), &fpregs[i]);
2609 }
2610 #elif defined(JS_CODEGEN_ARM64)
2611 for (unsigned i = 0; i < FloatRegisters::TotalPhys; i++) {
2612 machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Single), &fpregs[i]);
2613 machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Double), &fpregs[i]);
2614 }
2615
2616 #elif defined(JS_CODEGEN_NONE)
2617 MOZ_CRASH();
2618 #else
2619 # error "Unknown architecture!"
2620 #endif
2621 return machine;
2622 }
2623
2624 bool
isConstructing() const2625 InlineFrameIterator::isConstructing() const
2626 {
2627 // Skip the current frame and look at the caller's.
2628 if (more()) {
2629 InlineFrameIterator parent(GetJSContextFromJitCode(), this);
2630 ++parent;
2631
2632 // Inlined Getters and Setters are never constructing.
2633 if (IsGetPropPC(parent.pc()) || IsSetPropPC(parent.pc()))
2634 return false;
2635
2636 // In the case of a JS frame, look up the pc from the snapshot.
2637 MOZ_ASSERT(IsCallPC(parent.pc()));
2638
2639 return (JSOp)*parent.pc() == JSOP_NEW;
2640 }
2641
2642 return frame_->isConstructing();
2643 }
2644
2645 bool
isConstructing() const2646 JitFrameIterator::isConstructing() const
2647 {
2648 return CalleeTokenIsConstructing(calleeToken());
2649 }
2650
2651 unsigned
numActualArgs() const2652 JitFrameIterator::numActualArgs() const
2653 {
2654 if (isScripted())
2655 return jsFrame()->numActualArgs();
2656
2657 MOZ_ASSERT(isExitFrameLayout<NativeExitFrameLayout>());
2658 return exitFrame()->as<NativeExitFrameLayout>()->argc();
2659 }
2660
2661 void
warnUnreadableAllocation()2662 SnapshotIterator::warnUnreadableAllocation()
2663 {
2664 fprintf(stderr, "Warning! Tried to access unreadable value allocation (possible f.arguments).\n");
2665 }
2666
2667 struct DumpOp {
DumpOpjs::jit::DumpOp2668 explicit DumpOp(unsigned int i) : i_(i) {}
2669
2670 unsigned int i_;
operator ()js::jit::DumpOp2671 void operator()(const Value& v) {
2672 fprintf(stderr, " actual (arg %d): ", i_);
2673 #ifdef DEBUG
2674 DumpValue(v);
2675 #else
2676 fprintf(stderr, "?\n");
2677 #endif
2678 i_++;
2679 }
2680 };
2681
2682 void
dumpBaseline() const2683 JitFrameIterator::dumpBaseline() const
2684 {
2685 MOZ_ASSERT(isBaselineJS());
2686
2687 fprintf(stderr, " JS Baseline frame\n");
2688 if (isFunctionFrame()) {
2689 fprintf(stderr, " callee fun: ");
2690 #ifdef DEBUG
2691 DumpObject(callee());
2692 #else
2693 fprintf(stderr, "?\n");
2694 #endif
2695 } else {
2696 fprintf(stderr, " global frame, no callee\n");
2697 }
2698
2699 fprintf(stderr, " file %s line %" PRIuSIZE "\n",
2700 script()->filename(), script()->lineno());
2701
2702 JSContext* cx = GetJSContextFromJitCode();
2703 RootedScript script(cx);
2704 jsbytecode* pc;
2705 baselineScriptAndPc(script.address(), &pc);
2706
2707 fprintf(stderr, " script = %p, pc = %p (offset %u)\n", (void*)script, pc, uint32_t(script->pcToOffset(pc)));
2708 fprintf(stderr, " current op: %s\n", CodeName[*pc]);
2709
2710 fprintf(stderr, " actual args: %d\n", numActualArgs());
2711
2712 BaselineFrame* frame = baselineFrame();
2713
2714 for (unsigned i = 0; i < frame->numValueSlots(); i++) {
2715 fprintf(stderr, " slot %u: ", i);
2716 #ifdef DEBUG
2717 Value* v = frame->valueSlot(i);
2718 DumpValue(*v);
2719 #else
2720 fprintf(stderr, "?\n");
2721 #endif
2722 }
2723 }
2724
2725 void
dump() const2726 InlineFrameIterator::dump() const
2727 {
2728 MaybeReadFallback fallback(UndefinedValue());
2729
2730 if (more())
2731 fprintf(stderr, " JS frame (inlined)\n");
2732 else
2733 fprintf(stderr, " JS frame\n");
2734
2735 bool isFunction = false;
2736 if (isFunctionFrame()) {
2737 isFunction = true;
2738 fprintf(stderr, " callee fun: ");
2739 #ifdef DEBUG
2740 DumpObject(callee(fallback));
2741 #else
2742 fprintf(stderr, "?\n");
2743 #endif
2744 } else {
2745 fprintf(stderr, " global frame, no callee\n");
2746 }
2747
2748 fprintf(stderr, " file %s line %" PRIuSIZE "\n",
2749 script()->filename(), script()->lineno());
2750
2751 fprintf(stderr, " script = %p, pc = %p\n", (void*) script(), pc());
2752 fprintf(stderr, " current op: %s\n", CodeName[*pc()]);
2753
2754 if (!more()) {
2755 numActualArgs();
2756 }
2757
2758 SnapshotIterator si = snapshotIterator();
2759 fprintf(stderr, " slots: %u\n", si.numAllocations() - 1);
2760 for (unsigned i = 0; i < si.numAllocations() - 1; i++) {
2761 if (isFunction) {
2762 if (i == 0)
2763 fprintf(stderr, " scope chain: ");
2764 else if (i == 1)
2765 fprintf(stderr, " this: ");
2766 else if (i - 2 < calleeTemplate()->nargs())
2767 fprintf(stderr, " formal (arg %d): ", i - 2);
2768 else {
2769 if (i - 2 == calleeTemplate()->nargs() && numActualArgs() > calleeTemplate()->nargs()) {
2770 DumpOp d(calleeTemplate()->nargs());
2771 unaliasedForEachActual(GetJSContextFromJitCode(), d, ReadFrame_Overflown, fallback);
2772 }
2773
2774 fprintf(stderr, " slot %d: ", int(i - 2 - calleeTemplate()->nargs()));
2775 }
2776 } else
2777 fprintf(stderr, " slot %u: ", i);
2778 #ifdef DEBUG
2779 DumpValue(si.maybeRead(fallback));
2780 #else
2781 fprintf(stderr, "?\n");
2782 #endif
2783 }
2784
2785 fputc('\n', stderr);
2786 }
2787
2788 void
dump() const2789 JitFrameIterator::dump() const
2790 {
2791 switch (type_) {
2792 case JitFrame_Entry:
2793 fprintf(stderr, " Entry frame\n");
2794 fprintf(stderr, " Frame size: %u\n", unsigned(current()->prevFrameLocalSize()));
2795 break;
2796 case JitFrame_BaselineJS:
2797 dumpBaseline();
2798 break;
2799 case JitFrame_BaselineStub:
2800 case JitFrame_Unwound_BaselineStub:
2801 fprintf(stderr, " Baseline stub frame\n");
2802 fprintf(stderr, " Frame size: %u\n", unsigned(current()->prevFrameLocalSize()));
2803 break;
2804 case JitFrame_Bailout:
2805 case JitFrame_IonJS:
2806 {
2807 InlineFrameIterator frames(GetJSContextFromJitCode(), this);
2808 for (;;) {
2809 frames.dump();
2810 if (!frames.more())
2811 break;
2812 ++frames;
2813 }
2814 break;
2815 }
2816 case JitFrame_IonStub:
2817 case JitFrame_Unwound_IonStub:
2818 fprintf(stderr, " Ion stub frame\n");
2819 fprintf(stderr, " Frame size: %u\n", unsigned(current()->prevFrameLocalSize()));
2820 break;
2821 case JitFrame_Rectifier:
2822 case JitFrame_Unwound_Rectifier:
2823 fprintf(stderr, " Rectifier frame\n");
2824 fprintf(stderr, " Frame size: %u\n", unsigned(current()->prevFrameLocalSize()));
2825 break;
2826 case JitFrame_IonAccessorIC:
2827 case JitFrame_Unwound_IonAccessorIC:
2828 fprintf(stderr, " Ion scripted accessor IC\n");
2829 fprintf(stderr, " Frame size: %u\n", unsigned(current()->prevFrameLocalSize()));
2830 break;
2831 case JitFrame_Unwound_IonJS:
2832 case JitFrame_Unwound_BaselineJS:
2833 fprintf(stderr, "Warning! Unwound JS frames are not observable.\n");
2834 break;
2835 case JitFrame_Exit:
2836 fprintf(stderr, " Exit frame\n");
2837 break;
2838 case JitFrame_LazyLink:
2839 fprintf(stderr, " Lazy link frame\n");
2840 break;
2841 };
2842 fputc('\n', stderr);
2843 }
2844
2845 #ifdef DEBUG
2846 bool
verifyReturnAddressUsingNativeToBytecodeMap()2847 JitFrameIterator::verifyReturnAddressUsingNativeToBytecodeMap()
2848 {
2849 MOZ_ASSERT(returnAddressToFp_ != nullptr);
2850
2851 // Only handle Ion frames for now.
2852 if (type_ != JitFrame_IonJS && type_ != JitFrame_BaselineJS)
2853 return true;
2854
2855 JSRuntime* rt = js::TlsPerThreadData.get()->runtimeIfOnOwnerThread();
2856
2857 // Don't verify on non-main-thread.
2858 if (!rt)
2859 return true;
2860
2861 // Don't verify if sampling is being suppressed.
2862 if (!rt->isProfilerSamplingEnabled())
2863 return true;
2864
2865 if (rt->isHeapMinorCollecting())
2866 return true;
2867
2868 JitRuntime* jitrt = rt->jitRuntime();
2869
2870 // Look up and print bytecode info for the native address.
2871 JitcodeGlobalEntry entry;
2872 if (!jitrt->getJitcodeGlobalTable()->lookup(returnAddressToFp_, &entry, rt))
2873 return true;
2874
2875 JitSpew(JitSpew_Profiling, "Found nativeToBytecode entry for %p: %p - %p",
2876 returnAddressToFp_, entry.nativeStartAddr(), entry.nativeEndAddr());
2877
2878 JitcodeGlobalEntry::BytecodeLocationVector location;
2879 uint32_t depth = UINT32_MAX;
2880 if (!entry.callStackAtAddr(rt, returnAddressToFp_, location, &depth))
2881 return false;
2882 MOZ_ASSERT(depth > 0 && depth != UINT32_MAX);
2883 MOZ_ASSERT(location.length() == depth);
2884
2885 JitSpew(JitSpew_Profiling, "Found bytecode location of depth %d:", depth);
2886 for (size_t i = 0; i < location.length(); i++) {
2887 JitSpew(JitSpew_Profiling, " %s:%" PRIuSIZE " - %" PRIuSIZE,
2888 location[i].script->filename(), location[i].script->lineno(),
2889 size_t(location[i].pc - location[i].script->code()));
2890 }
2891
2892 if (type_ == JitFrame_IonJS) {
2893 // Create an InlineFrameIterator here and verify the mapped info against the iterator info.
2894 InlineFrameIterator inlineFrames(GetJSContextFromJitCode(), this);
2895 for (size_t idx = 0; idx < location.length(); idx++) {
2896 MOZ_ASSERT(idx < location.length());
2897 MOZ_ASSERT_IF(idx < location.length() - 1, inlineFrames.more());
2898
2899 JitSpew(JitSpew_Profiling,
2900 "Match %d: ION %s:%" PRIuSIZE "(%" PRIuSIZE ") vs N2B %s:%" PRIuSIZE "(%" PRIuSIZE ")",
2901 (int)idx,
2902 inlineFrames.script()->filename(),
2903 inlineFrames.script()->lineno(),
2904 size_t(inlineFrames.pc() - inlineFrames.script()->code()),
2905 location[idx].script->filename(),
2906 location[idx].script->lineno(),
2907 size_t(location[idx].pc - location[idx].script->code()));
2908
2909 MOZ_ASSERT(inlineFrames.script() == location[idx].script);
2910
2911 if (inlineFrames.more())
2912 ++inlineFrames;
2913 }
2914 }
2915
2916 return true;
2917 }
2918 #endif // DEBUG
2919
JitProfilingFrameIterator(JSRuntime * rt,const JS::ProfilingFrameIterator::RegisterState & state)2920 JitProfilingFrameIterator::JitProfilingFrameIterator(
2921 JSRuntime* rt, const JS::ProfilingFrameIterator::RegisterState& state)
2922 {
2923 // If no profilingActivation is live, initialize directly to
2924 // end-of-iteration state.
2925 if (!rt->profilingActivation()) {
2926 type_ = JitFrame_Entry;
2927 fp_ = nullptr;
2928 returnAddressToFp_ = nullptr;
2929 return;
2930 }
2931
2932 MOZ_ASSERT(rt->profilingActivation()->isJit());
2933
2934 JitActivation* act = rt->profilingActivation()->asJit();
2935
2936 // If the top JitActivation has a null lastProfilingFrame, assume that
2937 // it's a trivially empty activation, and initialize directly
2938 // to end-of-iteration state.
2939 if (!act->lastProfilingFrame()) {
2940 type_ = JitFrame_Entry;
2941 fp_ = nullptr;
2942 returnAddressToFp_ = nullptr;
2943 return;
2944 }
2945
2946 // Get the fp from the current profilingActivation
2947 fp_ = (uint8_t*) act->lastProfilingFrame();
2948 void* lastCallSite = act->lastProfilingCallSite();
2949
2950 JitcodeGlobalTable* table = rt->jitRuntime()->getJitcodeGlobalTable();
2951
2952 // Profiler sampling must NOT be suppressed if we are here.
2953 MOZ_ASSERT(rt->isProfilerSamplingEnabled());
2954
2955 // Try initializing with sampler pc
2956 if (tryInitWithPC(state.pc))
2957 return;
2958
2959 // Try initializing with sampler pc using native=>bytecode table.
2960 if (tryInitWithTable(table, state.pc, rt, /* forLastCallSite = */ false))
2961 return;
2962
2963 // Try initializing with lastProfilingCallSite pc
2964 if (lastCallSite) {
2965 if (tryInitWithPC(lastCallSite))
2966 return;
2967
2968 // Try initializing with lastProfilingCallSite pc using native=>bytecode table.
2969 if (tryInitWithTable(table, lastCallSite, rt, /* forLastCallSite = */ true))
2970 return;
2971 }
2972
2973 // In some rare cases (e.g. baseline eval frame), the callee script may
2974 // not have a baselineScript. Treat this is an empty frame-sequence and
2975 // move on.
2976 if (!frameScript()->hasBaselineScript()) {
2977 type_ = JitFrame_Entry;
2978 fp_ = nullptr;
2979 returnAddressToFp_ = nullptr;
2980 return;
2981 }
2982
2983 // If nothing matches, for now just assume we are at the start of the last frame's
2984 // baseline jit code.
2985 type_ = JitFrame_BaselineJS;
2986 returnAddressToFp_ = frameScript()->baselineScript()->method()->raw();
2987 }
2988
2989 template <typename FrameType, typename ReturnType=CommonFrameLayout*>
2990 inline ReturnType
GetPreviousRawFrame(FrameType * frame)2991 GetPreviousRawFrame(FrameType* frame)
2992 {
2993 size_t prevSize = frame->prevFrameLocalSize() + FrameType::Size();
2994 return ReturnType(((uint8_t*) frame) + prevSize);
2995 }
2996
2997 template <typename ReturnType=CommonFrameLayout*>
2998 inline ReturnType
GetPreviousRawFrameOfExitFrame(ExitFrameLayout * frame)2999 GetPreviousRawFrameOfExitFrame(ExitFrameLayout* frame)
3000 {
3001 // Unwound exit frames are fake exit frames, and have the size of a
3002 // JitFrameLayout instead of ExitFrameLayout. See
3003 // JitFrameIterator::prevFp.
3004 size_t frameSize = IsUnwoundFrame(frame->prevType())
3005 ? JitFrameLayout::Size()
3006 : ExitFrameLayout::Size();
3007 size_t prevSize = frame->prevFrameLocalSize() + frameSize;
3008 return ReturnType(((uint8_t*) frame) + prevSize);
3009 }
3010
JitProfilingFrameIterator(void * exitFrame)3011 JitProfilingFrameIterator::JitProfilingFrameIterator(void* exitFrame)
3012 {
3013 ExitFrameLayout* frame = (ExitFrameLayout*) exitFrame;
3014 FrameType prevType = frame->prevType();
3015
3016 if (prevType == JitFrame_IonJS || prevType == JitFrame_Unwound_IonJS) {
3017 returnAddressToFp_ = frame->returnAddress();
3018 fp_ = GetPreviousRawFrameOfExitFrame<uint8_t*>(frame);
3019 type_ = JitFrame_IonJS;
3020 return;
3021 }
3022
3023 if (prevType == JitFrame_BaselineJS || prevType == JitFrame_Unwound_BaselineJS) {
3024 returnAddressToFp_ = frame->returnAddress();
3025 fp_ = GetPreviousRawFrameOfExitFrame<uint8_t*>(frame);
3026 type_ = JitFrame_BaselineJS;
3027 fixBaselineDebugModeOSRReturnAddress();
3028 return;
3029 }
3030
3031 if (prevType == JitFrame_BaselineStub || prevType == JitFrame_Unwound_BaselineStub) {
3032 BaselineStubFrameLayout* stubFrame =
3033 GetPreviousRawFrameOfExitFrame<BaselineStubFrameLayout*>(frame);
3034 MOZ_ASSERT_IF(prevType == JitFrame_BaselineStub,
3035 stubFrame->prevType() == JitFrame_BaselineJS);
3036 MOZ_ASSERT_IF(prevType == JitFrame_Unwound_BaselineStub,
3037 stubFrame->prevType() == JitFrame_BaselineJS ||
3038 stubFrame->prevType() == JitFrame_IonJS);
3039 returnAddressToFp_ = stubFrame->returnAddress();
3040 fp_ = ((uint8_t*) stubFrame->reverseSavedFramePtr())
3041 + jit::BaselineFrame::FramePointerOffset;
3042 type_ = JitFrame_BaselineJS;
3043 return;
3044 }
3045
3046 if (prevType == JitFrame_Unwound_Rectifier) {
3047 // Unwound rectifier exit frames still keep their 'JS' format (with
3048 // the target function and actual-args included in the frame and not
3049 // counted in the frame size).
3050 RectifierFrameLayout* rectFrame =
3051 GetPreviousRawFrame<JitFrameLayout, RectifierFrameLayout*>((JitFrameLayout*) frame);
3052
3053 MOZ_ASSERT(rectFrame->prevType() == JitFrame_BaselineStub ||
3054 rectFrame->prevType() == JitFrame_IonJS);
3055
3056 if (rectFrame->prevType() == JitFrame_BaselineStub) {
3057 // Unwind past stub frame.
3058 BaselineStubFrameLayout* stubFrame =
3059 GetPreviousRawFrame<RectifierFrameLayout, BaselineStubFrameLayout*>(rectFrame);
3060 MOZ_ASSERT(stubFrame->prevType() == JitFrame_BaselineJS);
3061 returnAddressToFp_ = stubFrame->returnAddress();
3062 fp_ = ((uint8_t*) stubFrame->reverseSavedFramePtr())
3063 + jit::BaselineFrame::FramePointerOffset;
3064 type_ = JitFrame_BaselineJS;
3065 return;
3066 }
3067
3068 // else, prior frame was ion frame.
3069 returnAddressToFp_ = rectFrame->returnAddress();
3070 fp_ = GetPreviousRawFrame<RectifierFrameLayout, uint8_t*>(rectFrame);
3071 type_ = JitFrame_IonJS;
3072 return;
3073 }
3074
3075 MOZ_CRASH("Invalid frame type prior to exit frame.");
3076 }
3077
3078 bool
tryInitWithPC(void * pc)3079 JitProfilingFrameIterator::tryInitWithPC(void* pc)
3080 {
3081 JSScript* callee = frameScript();
3082
3083 // Check for Ion first, since it's more likely for hot code.
3084 if (callee->hasIonScript() && callee->ionScript()->method()->containsNativePC(pc)) {
3085 type_ = JitFrame_IonJS;
3086 returnAddressToFp_ = pc;
3087 return true;
3088 }
3089
3090 // Check for containment in Baseline jitcode second.
3091 if (callee->hasBaselineScript() && callee->baselineScript()->method()->containsNativePC(pc)) {
3092 type_ = JitFrame_BaselineJS;
3093 returnAddressToFp_ = pc;
3094 return true;
3095 }
3096
3097 return false;
3098 }
3099
3100 bool
tryInitWithTable(JitcodeGlobalTable * table,void * pc,JSRuntime * rt,bool forLastCallSite)3101 JitProfilingFrameIterator::tryInitWithTable(JitcodeGlobalTable* table, void* pc, JSRuntime* rt,
3102 bool forLastCallSite)
3103 {
3104 if (!pc)
3105 return false;
3106
3107 JitcodeGlobalEntry entry;
3108 if (!table->lookup(pc, &entry, rt))
3109 return false;
3110
3111 JSScript* callee = frameScript();
3112
3113 MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache() || entry.isDummy());
3114
3115 // Treat dummy lookups as an empty frame sequence.
3116 if (entry.isDummy()) {
3117 type_ = JitFrame_Entry;
3118 fp_ = nullptr;
3119 returnAddressToFp_ = nullptr;
3120 return true;
3121 }
3122
3123 if (entry.isIon()) {
3124 // If looked-up callee doesn't match frame callee, don't accept lastProfilingCallSite
3125 if (entry.ionEntry().getScript(0) != callee)
3126 return false;
3127
3128 type_ = JitFrame_IonJS;
3129 returnAddressToFp_ = pc;
3130 return true;
3131 }
3132
3133 if (entry.isBaseline()) {
3134 // If looked-up callee doesn't match frame callee, don't accept lastProfilingCallSite
3135 if (forLastCallSite && entry.baselineEntry().script() != callee)
3136 return false;
3137
3138 type_ = JitFrame_BaselineJS;
3139 returnAddressToFp_ = pc;
3140 return true;
3141 }
3142
3143 if (entry.isIonCache()) {
3144 JitcodeGlobalEntry ionEntry;
3145 table->lookupInfallible(entry.ionCacheEntry().rejoinAddr(), &ionEntry, rt);
3146 MOZ_ASSERT(ionEntry.isIon());
3147
3148 if (ionEntry.ionEntry().getScript(0) != callee)
3149 return false;
3150
3151 type_ = JitFrame_IonJS;
3152 returnAddressToFp_ = pc;
3153 return true;
3154 }
3155
3156 return false;
3157 }
3158
3159 void
fixBaselineDebugModeOSRReturnAddress()3160 JitProfilingFrameIterator::fixBaselineDebugModeOSRReturnAddress()
3161 {
3162 MOZ_ASSERT(type_ == JitFrame_BaselineJS);
3163 BaselineFrame* bl = (BaselineFrame*)(fp_ - BaselineFrame::FramePointerOffset -
3164 BaselineFrame::Size());
3165 if (BaselineDebugModeOSRInfo* info = bl->getDebugModeOSRInfo())
3166 returnAddressToFp_ = info->resumeAddr;
3167 }
3168
3169 void
operator ++()3170 JitProfilingFrameIterator::operator++()
3171 {
3172 /*
3173 * fp_ points to a Baseline or Ion frame. The possible call-stacks
3174 * patterns occurring between this frame and a previous Ion or Baseline
3175 * frame are as follows:
3176 *
3177 * <Baseline-Or-Ion>
3178 * ^
3179 * |
3180 * ^--- Ion
3181 * |
3182 * ^--- Baseline Stub <---- Baseline
3183 * |
3184 * ^--- Argument Rectifier
3185 * | ^
3186 * | |
3187 * | ^--- Ion
3188 * | |
3189 * | ^--- Baseline Stub <---- Baseline
3190 * |
3191 * ^--- Entry Frame (From C++)
3192 * Exit Frame (From previous JitActivation)
3193 * ^
3194 * |
3195 * ^--- Ion
3196 * |
3197 * ^--- Baseline
3198 * |
3199 * ^--- Baseline Stub <---- Baseline
3200 */
3201 JitFrameLayout* frame = framePtr();
3202 FrameType prevType = frame->prevType();
3203
3204 if (prevType == JitFrame_IonJS || prevType == JitFrame_Unwound_IonJS) {
3205 returnAddressToFp_ = frame->returnAddress();
3206 fp_ = GetPreviousRawFrame<JitFrameLayout, uint8_t*>(frame);
3207 type_ = JitFrame_IonJS;
3208 return;
3209 }
3210
3211 if (prevType == JitFrame_BaselineJS || prevType == JitFrame_Unwound_BaselineJS) {
3212 returnAddressToFp_ = frame->returnAddress();
3213 fp_ = GetPreviousRawFrame<JitFrameLayout, uint8_t*>(frame);
3214 type_ = JitFrame_BaselineJS;
3215 fixBaselineDebugModeOSRReturnAddress();
3216 return;
3217 }
3218
3219 if (prevType == JitFrame_BaselineStub || prevType == JitFrame_Unwound_BaselineStub) {
3220 BaselineStubFrameLayout* stubFrame =
3221 GetPreviousRawFrame<JitFrameLayout, BaselineStubFrameLayout*>(frame);
3222 MOZ_ASSERT(stubFrame->prevType() == JitFrame_BaselineJS);
3223
3224 returnAddressToFp_ = stubFrame->returnAddress();
3225 fp_ = ((uint8_t*) stubFrame->reverseSavedFramePtr())
3226 + jit::BaselineFrame::FramePointerOffset;
3227 type_ = JitFrame_BaselineJS;
3228 return;
3229 }
3230
3231 if (prevType == JitFrame_Rectifier || prevType == JitFrame_Unwound_Rectifier) {
3232 RectifierFrameLayout* rectFrame =
3233 GetPreviousRawFrame<JitFrameLayout, RectifierFrameLayout*>(frame);
3234 FrameType rectPrevType = rectFrame->prevType();
3235
3236 if (rectPrevType == JitFrame_IonJS) {
3237 returnAddressToFp_ = rectFrame->returnAddress();
3238 fp_ = GetPreviousRawFrame<JitFrameLayout, uint8_t*>(rectFrame);
3239 type_ = JitFrame_IonJS;
3240 return;
3241 }
3242
3243 if (rectPrevType == JitFrame_BaselineStub) {
3244 BaselineStubFrameLayout* stubFrame =
3245 GetPreviousRawFrame<JitFrameLayout, BaselineStubFrameLayout*>(rectFrame);
3246 returnAddressToFp_ = stubFrame->returnAddress();
3247 fp_ = ((uint8_t*) stubFrame->reverseSavedFramePtr())
3248 + jit::BaselineFrame::FramePointerOffset;
3249 type_ = JitFrame_BaselineJS;
3250 return;
3251 }
3252
3253 MOZ_CRASH("Bad frame type prior to rectifier frame.");
3254 }
3255
3256 if (prevType == JitFrame_IonAccessorIC || prevType == JitFrame_Unwound_IonAccessorIC) {
3257 IonAccessorICFrameLayout* accessorFrame =
3258 GetPreviousRawFrame<JitFrameLayout, IonAccessorICFrameLayout*>(frame);
3259
3260 MOZ_ASSERT(accessorFrame->prevType() == JitFrame_IonJS);
3261
3262 returnAddressToFp_ = accessorFrame->returnAddress();
3263 fp_ = GetPreviousRawFrame<IonAccessorICFrameLayout, uint8_t*>(accessorFrame);
3264 type_ = JitFrame_IonJS;
3265 return;
3266 }
3267
3268 if (prevType == JitFrame_Entry) {
3269 // No previous frame, set to null to indicate that JitFrameIterator is done()
3270 returnAddressToFp_ = nullptr;
3271 fp_ = nullptr;
3272 type_ = JitFrame_Entry;
3273 return;
3274 }
3275
3276 MOZ_CRASH("Bad frame type.");
3277 }
3278
3279 JitFrameLayout*
fp() const3280 InvalidationBailoutStack::fp() const
3281 {
3282 return (JitFrameLayout*) (sp() + ionScript_->frameSize());
3283 }
3284
3285 void
checkInvariants() const3286 InvalidationBailoutStack::checkInvariants() const
3287 {
3288 #ifdef DEBUG
3289 JitFrameLayout* frame = fp();
3290 CalleeToken token = frame->calleeToken();
3291 MOZ_ASSERT(token);
3292
3293 uint8_t* rawBase = ionScript()->method()->raw();
3294 uint8_t* rawLimit = rawBase + ionScript()->method()->instructionsSize();
3295 uint8_t* osiPoint = osiPointReturnAddress();
3296 MOZ_ASSERT(rawBase <= osiPoint && osiPoint <= rawLimit);
3297 #endif
3298 }
3299
3300 void
AssertJitStackInvariants(JSContext * cx)3301 AssertJitStackInvariants(JSContext* cx)
3302 {
3303 for (JitActivationIterator activations(cx->runtime()); !activations.done(); ++activations) {
3304 JitFrameIterator frames(activations);
3305 size_t prevFrameSize = 0;
3306 size_t frameSize = 0;
3307 bool isScriptedCallee = false;
3308 for (; !frames.done(); ++frames) {
3309 size_t calleeFp = reinterpret_cast<size_t>(frames.fp());
3310 size_t callerFp = reinterpret_cast<size_t>(frames.prevFp());
3311 MOZ_ASSERT(callerFp >= calleeFp);
3312 prevFrameSize = frameSize;
3313 frameSize = callerFp - calleeFp;
3314
3315 if (frames.prevType() == JitFrame_Rectifier) {
3316 MOZ_RELEASE_ASSERT(frameSize % JitStackAlignment == 0,
3317 "The rectifier frame should keep the alignment");
3318
3319 size_t expectedFrameSize = 0
3320 #if defined(JS_CODEGEN_X86)
3321 + sizeof(void*) /* frame pointer */
3322 #endif
3323 + sizeof(Value) * (frames.callee()->nargs() +
3324 1 /* |this| argument */ +
3325 frames.isConstructing() /* new.target */)
3326 + sizeof(JitFrameLayout);
3327 MOZ_RELEASE_ASSERT(frameSize >= expectedFrameSize,
3328 "The frame is large enough to hold all arguments");
3329 MOZ_RELEASE_ASSERT(expectedFrameSize + JitStackAlignment > frameSize,
3330 "The frame size is optimal");
3331 }
3332
3333 if (frames.isExitFrame()) {
3334 // For the moment, we do not keep the JitStackAlignment
3335 // alignment for exit frames.
3336 frameSize -= ExitFrameLayout::Size();
3337 }
3338
3339 if (frames.isIonJS()) {
3340 // Ideally, we should not have such requirement, but keep the
3341 // alignment-delta as part of the Safepoint such that we can pad
3342 // accordingly when making out-of-line calls. In the mean time,
3343 // let us have check-points where we can garantee that
3344 // everything can properly be aligned before adding complexity.
3345 MOZ_RELEASE_ASSERT(frames.ionScript()->frameSize() % JitStackAlignment == 0,
3346 "Ensure that if the Ion frame is aligned, then the spill base is also aligned");
3347
3348 if (isScriptedCallee) {
3349 MOZ_RELEASE_ASSERT(prevFrameSize % JitStackAlignment == 0,
3350 "The ion frame should keep the alignment");
3351 }
3352 }
3353
3354 // The stack is dynamically aligned by baseline stubs before calling
3355 // any jitted code.
3356 if (frames.prevType() == JitFrame_BaselineStub && isScriptedCallee) {
3357 MOZ_RELEASE_ASSERT(calleeFp % JitStackAlignment == 0,
3358 "The baseline stub restores the stack alignment");
3359 }
3360
3361 isScriptedCallee = false
3362 || frames.isScripted()
3363 || frames.type() == JitFrame_Rectifier;
3364 }
3365
3366 MOZ_RELEASE_ASSERT(frames.type() == JitFrame_Entry,
3367 "The first frame of a Jit activation should be an entry frame");
3368 MOZ_RELEASE_ASSERT(reinterpret_cast<size_t>(frames.fp()) % JitStackAlignment == 0,
3369 "The entry frame should be properly aligned");
3370 }
3371 }
3372
3373 } // namespace jit
3374 } // namespace js
3375