1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7 #include "jit/JitFrames-inl.h"
8
9 #include "mozilla/ScopeExit.h"
10
11 #include <algorithm>
12
13 #include "builtin/ModuleObject.h"
14 #include "gc/Marking.h"
15 #include "jit/BaselineDebugModeOSR.h"
16 #include "jit/BaselineFrame.h"
17 #include "jit/BaselineIC.h"
18 #include "jit/BaselineJIT.h"
19 #include "jit/Ion.h"
20 #include "jit/IonScript.h"
21 #include "jit/JitcodeMap.h"
22 #include "jit/JitRuntime.h"
23 #include "jit/JitSpewer.h"
24 #include "jit/LIR.h"
25 #include "jit/PcScriptCache.h"
26 #include "jit/Recover.h"
27 #include "jit/Safepoints.h"
28 #include "jit/ScriptFromCalleeToken.h"
29 #include "jit/Snapshots.h"
30 #include "jit/VMFunctions.h"
31 #include "js/friend/DumpFunctions.h" // js::DumpObject, js::DumpValue
32 #include "vm/ArgumentsObject.h"
33 #include "vm/GeckoProfiler.h"
34 #include "vm/Interpreter.h"
35 #include "vm/JSContext.h"
36 #include "vm/JSFunction.h"
37 #include "vm/JSObject.h"
38 #include "vm/JSScript.h"
39 #include "vm/TraceLogging.h"
40 #include "wasm/WasmBuiltins.h"
41 #include "wasm/WasmInstance.h"
42
43 #include "debugger/DebugAPI-inl.h"
44 #include "gc/Nursery-inl.h"
45 #include "jit/JSJitFrameIter-inl.h"
46 #include "vm/GeckoProfiler-inl.h"
47 #include "vm/JSScript-inl.h"
48 #include "vm/Probes-inl.h"
49
50 namespace js {
51 namespace jit {
52
53 // Given a slot index, returns the offset, in bytes, of that slot from an
54 // JitFrameLayout. Slot distances are uniform across architectures, however,
55 // the distance does depend on the size of the frame header.
OffsetOfFrameSlot(int32_t slot)56 static inline int32_t OffsetOfFrameSlot(int32_t slot) { return -slot; }
57
AddressOfFrameSlot(JitFrameLayout * fp,int32_t slot)58 static inline uint8_t* AddressOfFrameSlot(JitFrameLayout* fp, int32_t slot) {
59 return (uint8_t*)fp + OffsetOfFrameSlot(slot);
60 }
61
ReadFrameSlot(JitFrameLayout * fp,int32_t slot)62 static inline uintptr_t ReadFrameSlot(JitFrameLayout* fp, int32_t slot) {
63 return *(uintptr_t*)AddressOfFrameSlot(fp, slot);
64 }
65
WriteFrameSlot(JitFrameLayout * fp,int32_t slot,uintptr_t value)66 static inline void WriteFrameSlot(JitFrameLayout* fp, int32_t slot,
67 uintptr_t value) {
68 *(uintptr_t*)AddressOfFrameSlot(fp, slot) = value;
69 }
70
ReadFrameDoubleSlot(JitFrameLayout * fp,int32_t slot)71 static inline double ReadFrameDoubleSlot(JitFrameLayout* fp, int32_t slot) {
72 return *(double*)AddressOfFrameSlot(fp, slot);
73 }
74
ReadFrameFloat32Slot(JitFrameLayout * fp,int32_t slot)75 static inline float ReadFrameFloat32Slot(JitFrameLayout* fp, int32_t slot) {
76 return *(float*)AddressOfFrameSlot(fp, slot);
77 }
78
ReadFrameInt32Slot(JitFrameLayout * fp,int32_t slot)79 static inline int32_t ReadFrameInt32Slot(JitFrameLayout* fp, int32_t slot) {
80 return *(int32_t*)AddressOfFrameSlot(fp, slot);
81 }
82
ReadFrameBooleanSlot(JitFrameLayout * fp,int32_t slot)83 static inline bool ReadFrameBooleanSlot(JitFrameLayout* fp, int32_t slot) {
84 return *(bool*)AddressOfFrameSlot(fp, slot);
85 }
86
NumArgAndLocalSlots(const InlineFrameIterator & frame)87 static uint32_t NumArgAndLocalSlots(const InlineFrameIterator& frame) {
88 JSScript* script = frame.script();
89 return CountArgSlots(script, frame.maybeCalleeTemplate()) + script->nfixed();
90 }
91
CloseLiveIteratorIon(JSContext * cx,const InlineFrameIterator & frame,const TryNote * tn)92 static void CloseLiveIteratorIon(JSContext* cx,
93 const InlineFrameIterator& frame,
94 const TryNote* tn) {
95 MOZ_ASSERT(tn->kind() == TryNoteKind::ForIn ||
96 tn->kind() == TryNoteKind::Destructuring);
97
98 bool isDestructuring = tn->kind() == TryNoteKind::Destructuring;
99 MOZ_ASSERT_IF(!isDestructuring, tn->stackDepth > 0);
100 MOZ_ASSERT_IF(isDestructuring, tn->stackDepth > 1);
101
102 SnapshotIterator si = frame.snapshotIterator();
103
104 // Skip stack slots until we reach the iterator object on the stack. For
105 // the destructuring case, we also need to get the "done" value.
106 uint32_t stackSlot = tn->stackDepth;
107 uint32_t adjust = isDestructuring ? 2 : 1;
108 uint32_t skipSlots = NumArgAndLocalSlots(frame) + stackSlot - adjust;
109
110 for (unsigned i = 0; i < skipSlots; i++) {
111 si.skip();
112 }
113
114 MaybeReadFallback recover(cx, cx->activation()->asJit(), &frame.frame(),
115 MaybeReadFallback::Fallback_DoNothing);
116 Value v = si.maybeRead(recover);
117 MOZ_RELEASE_ASSERT(v.isObject());
118 RootedObject iterObject(cx, &v.toObject());
119
120 if (isDestructuring) {
121 RootedValue doneValue(cx, si.read());
122 MOZ_RELEASE_ASSERT(!doneValue.isMagic());
123 bool done = ToBoolean(doneValue);
124 // Do not call IteratorClose if the destructuring iterator is already
125 // done.
126 if (done) {
127 return;
128 }
129 }
130
131 if (cx->isExceptionPending()) {
132 if (tn->kind() == TryNoteKind::ForIn) {
133 CloseIterator(iterObject);
134 } else {
135 IteratorCloseForException(cx, iterObject);
136 }
137 } else {
138 UnwindIteratorForUncatchableException(iterObject);
139 }
140 }
141
142 class IonTryNoteFilter {
143 uint32_t depth_;
144
145 public:
IonTryNoteFilter(const InlineFrameIterator & frame)146 explicit IonTryNoteFilter(const InlineFrameIterator& frame) {
147 uint32_t base = NumArgAndLocalSlots(frame);
148 SnapshotIterator si = frame.snapshotIterator();
149 MOZ_ASSERT(si.numAllocations() >= base);
150 depth_ = si.numAllocations() - base;
151 }
152
operator ()(const TryNote * note)153 bool operator()(const TryNote* note) { return note->stackDepth <= depth_; }
154 };
155
156 class TryNoteIterIon : public TryNoteIter<IonTryNoteFilter> {
157 public:
TryNoteIterIon(JSContext * cx,const InlineFrameIterator & frame)158 TryNoteIterIon(JSContext* cx, const InlineFrameIterator& frame)
159 : TryNoteIter(cx, frame.script(), frame.pc(), IonTryNoteFilter(frame)) {}
160 };
161
ShouldBailoutForDebugger(JSContext * cx,const InlineFrameIterator & frame,bool hitBailoutException)162 static bool ShouldBailoutForDebugger(JSContext* cx,
163 const InlineFrameIterator& frame,
164 bool hitBailoutException) {
165 if (hitBailoutException) {
166 MOZ_ASSERT(!cx->isPropagatingForcedReturn());
167 return false;
168 }
169
170 // Bail out if we're propagating a forced return, even if the realm is no
171 // longer a debuggee.
172 if (cx->isPropagatingForcedReturn()) {
173 return true;
174 }
175
176 if (!cx->realm()->isDebuggee()) {
177 return false;
178 }
179
180 // Bail out if there's a catchable exception and we are the debuggee of a
181 // Debugger with a live onExceptionUnwind hook.
182 if (cx->isExceptionPending() &&
183 DebugAPI::hasExceptionUnwindHook(cx->global())) {
184 return true;
185 }
186
187 // Bail out if a Debugger has observed this frame (e.g., for onPop).
188 JitActivation* act = cx->activation()->asJit();
189 RematerializedFrame* rematFrame =
190 act->lookupRematerializedFrame(frame.frame().fp(), frame.frameNo());
191 return rematFrame && rematFrame->isDebuggee();
192 }
193
HandleExceptionIon(JSContext * cx,const InlineFrameIterator & frame,ResumeFromException * rfe,bool * hitBailoutException)194 static void HandleExceptionIon(JSContext* cx, const InlineFrameIterator& frame,
195 ResumeFromException* rfe,
196 bool* hitBailoutException) {
197 if (ShouldBailoutForDebugger(cx, frame, *hitBailoutException)) {
198 // We do the following:
199 //
200 // 1. Bailout to baseline to reconstruct a baseline frame.
201 // 2. Resume immediately into the exception tail afterwards, and
202 // handle the exception again with the top frame now a baseline
203 // frame.
204 //
205 // An empty exception info denotes that we're propagating an Ion
206 // exception due to debug mode, which BailoutIonToBaseline needs to
207 // know. This is because we might not be able to fully reconstruct up
208 // to the stack depth at the snapshot, as we could've thrown in the
209 // middle of a call.
210 ExceptionBailoutInfo propagateInfo;
211 if (ExceptionHandlerBailout(cx, frame, rfe, propagateInfo)) {
212 return;
213 }
214 *hitBailoutException = true;
215 }
216
217 RootedScript script(cx, frame.script());
218
219 for (TryNoteIterIon tni(cx, frame); !tni.done(); ++tni) {
220 const TryNote* tn = *tni;
221 switch (tn->kind()) {
222 case TryNoteKind::ForIn:
223 case TryNoteKind::Destructuring:
224 CloseLiveIteratorIon(cx, frame, tn);
225 break;
226
227 case TryNoteKind::Catch:
228 if (cx->isExceptionPending()) {
229 // Ion can compile try-catch, but bailing out to catch
230 // exceptions is slow. Reset the warm-up counter so that if we
231 // catch many exceptions we won't Ion-compile the script.
232 script->resetWarmUpCounterToDelayIonCompilation();
233
234 if (*hitBailoutException) {
235 break;
236 }
237
238 // Bailout at the start of the catch block.
239 jsbytecode* catchPC = script->offsetToPC(tn->start + tn->length);
240 ExceptionBailoutInfo excInfo(frame.frameNo(), catchPC,
241 tn->stackDepth);
242 if (ExceptionHandlerBailout(cx, frame, rfe, excInfo)) {
243 // Record exception locations to allow scope unwinding in
244 // |FinishBailoutToBaseline|
245 MOZ_ASSERT(cx->isExceptionPending());
246 rfe->bailoutInfo->tryPC =
247 UnwindEnvironmentToTryPc(frame.script(), tn);
248 rfe->bailoutInfo->faultPC = frame.pc();
249 return;
250 }
251
252 *hitBailoutException = true;
253 MOZ_ASSERT(cx->isExceptionPending());
254 }
255 break;
256
257 case TryNoteKind::ForOf:
258 case TryNoteKind::Loop:
259 break;
260
261 // TryNoteKind::ForOfIterclose is handled internally by the try note
262 // iterator.
263 default:
264 MOZ_CRASH("Unexpected try note");
265 }
266 }
267 }
268
OnLeaveBaselineFrame(JSContext * cx,const JSJitFrameIter & frame,jsbytecode * pc,ResumeFromException * rfe,bool frameOk)269 static void OnLeaveBaselineFrame(JSContext* cx, const JSJitFrameIter& frame,
270 jsbytecode* pc, ResumeFromException* rfe,
271 bool frameOk) {
272 BaselineFrame* baselineFrame = frame.baselineFrame();
273 if (jit::DebugEpilogue(cx, baselineFrame, pc, frameOk)) {
274 rfe->kind = ResumeFromException::RESUME_FORCED_RETURN;
275 rfe->framePointer = frame.fp() - BaselineFrame::FramePointerOffset;
276 rfe->stackPointer = reinterpret_cast<uint8_t*>(baselineFrame);
277 }
278 }
279
BaselineFrameAndStackPointersFromTryNote(const TryNote * tn,const JSJitFrameIter & frame,uint8_t ** framePointer,uint8_t ** stackPointer)280 static inline void BaselineFrameAndStackPointersFromTryNote(
281 const TryNote* tn, const JSJitFrameIter& frame, uint8_t** framePointer,
282 uint8_t** stackPointer) {
283 JSScript* script = frame.baselineFrame()->script();
284 *framePointer = frame.fp() - BaselineFrame::FramePointerOffset;
285 *stackPointer = *framePointer - BaselineFrame::Size() -
286 (script->nfixed() + tn->stackDepth) * sizeof(Value);
287 }
288
SettleOnTryNote(JSContext * cx,const TryNote * tn,const JSJitFrameIter & frame,EnvironmentIter & ei,ResumeFromException * rfe,jsbytecode ** pc)289 static void SettleOnTryNote(JSContext* cx, const TryNote* tn,
290 const JSJitFrameIter& frame, EnvironmentIter& ei,
291 ResumeFromException* rfe, jsbytecode** pc) {
292 RootedScript script(cx, frame.baselineFrame()->script());
293
294 // Unwind environment chain (pop block objects).
295 if (cx->isExceptionPending()) {
296 UnwindEnvironment(cx, ei, UnwindEnvironmentToTryPc(script, tn));
297 }
298
299 // Compute base pointer and stack pointer.
300 BaselineFrameAndStackPointersFromTryNote(tn, frame, &rfe->framePointer,
301 &rfe->stackPointer);
302
303 // Compute the pc.
304 *pc = script->offsetToPC(tn->start + tn->length);
305 }
306
307 class BaselineTryNoteFilter {
308 const JSJitFrameIter& frame_;
309
310 public:
BaselineTryNoteFilter(const JSJitFrameIter & frame)311 explicit BaselineTryNoteFilter(const JSJitFrameIter& frame) : frame_(frame) {}
operator ()(const TryNote * note)312 bool operator()(const TryNote* note) {
313 BaselineFrame* frame = frame_.baselineFrame();
314
315 uint32_t numValueSlots = frame_.baselineFrameNumValueSlots();
316 MOZ_RELEASE_ASSERT(numValueSlots >= frame->script()->nfixed());
317
318 uint32_t currDepth = numValueSlots - frame->script()->nfixed();
319 return note->stackDepth <= currDepth;
320 }
321 };
322
323 class TryNoteIterBaseline : public TryNoteIter<BaselineTryNoteFilter> {
324 public:
TryNoteIterBaseline(JSContext * cx,const JSJitFrameIter & frame,jsbytecode * pc)325 TryNoteIterBaseline(JSContext* cx, const JSJitFrameIter& frame,
326 jsbytecode* pc)
327 : TryNoteIter(cx, frame.script(), pc, BaselineTryNoteFilter(frame)) {}
328 };
329
330 // Close all live iterators on a BaselineFrame due to exception unwinding. The
331 // pc parameter is updated to where the envs have been unwound to.
CloseLiveIteratorsBaselineForUncatchableException(JSContext * cx,const JSJitFrameIter & frame,jsbytecode * pc)332 static void CloseLiveIteratorsBaselineForUncatchableException(
333 JSContext* cx, const JSJitFrameIter& frame, jsbytecode* pc) {
334 for (TryNoteIterBaseline tni(cx, frame, pc); !tni.done(); ++tni) {
335 const TryNote* tn = *tni;
336 switch (tn->kind()) {
337 case TryNoteKind::ForIn: {
338 uint8_t* framePointer;
339 uint8_t* stackPointer;
340 BaselineFrameAndStackPointersFromTryNote(tn, frame, &framePointer,
341 &stackPointer);
342 Value iterValue(*(Value*)stackPointer);
343 RootedObject iterObject(cx, &iterValue.toObject());
344 UnwindIteratorForUncatchableException(iterObject);
345 break;
346 }
347
348 default:
349 break;
350 }
351 }
352 }
353
ProcessTryNotesBaseline(JSContext * cx,const JSJitFrameIter & frame,EnvironmentIter & ei,ResumeFromException * rfe,jsbytecode ** pc)354 static bool ProcessTryNotesBaseline(JSContext* cx, const JSJitFrameIter& frame,
355 EnvironmentIter& ei,
356 ResumeFromException* rfe, jsbytecode** pc) {
357 MOZ_ASSERT(frame.baselineFrame()->runningInInterpreter(),
358 "Caller must ensure frame is an interpreter frame");
359
360 RootedScript script(cx, frame.baselineFrame()->script());
361
362 for (TryNoteIterBaseline tni(cx, frame, *pc); !tni.done(); ++tni) {
363 const TryNote* tn = *tni;
364
365 MOZ_ASSERT(cx->isExceptionPending());
366 switch (tn->kind()) {
367 case TryNoteKind::Catch: {
368 // If we're closing a legacy generator, we have to skip catch
369 // blocks.
370 if (cx->isClosingGenerator()) {
371 break;
372 }
373
374 SettleOnTryNote(cx, tn, frame, ei, rfe, pc);
375
376 // Ion can compile try-catch, but bailing out to catch
377 // exceptions is slow. Reset the warm-up counter so that if we
378 // catch many exceptions we won't Ion-compile the script.
379 script->resetWarmUpCounterToDelayIonCompilation();
380
381 // Resume at the start of the catch block.
382 const BaselineInterpreter& interp =
383 cx->runtime()->jitRuntime()->baselineInterpreter();
384 frame.baselineFrame()->setInterpreterFields(*pc);
385 rfe->kind = ResumeFromException::RESUME_CATCH;
386 rfe->target = interp.interpretOpAddr().value;
387 return true;
388 }
389
390 case TryNoteKind::Finally: {
391 SettleOnTryNote(cx, tn, frame, ei, rfe, pc);
392
393 const BaselineInterpreter& interp =
394 cx->runtime()->jitRuntime()->baselineInterpreter();
395 frame.baselineFrame()->setInterpreterFields(*pc);
396 rfe->kind = ResumeFromException::RESUME_FINALLY;
397 rfe->target = interp.interpretOpAddr().value;
398
399 // Drop the exception instead of leaking cross compartment data.
400 if (!cx->getPendingException(
401 MutableHandleValue::fromMarkedLocation(&rfe->exception))) {
402 rfe->exception = UndefinedValue();
403 }
404 cx->clearPendingException();
405 return true;
406 }
407
408 case TryNoteKind::ForIn: {
409 uint8_t* framePointer;
410 uint8_t* stackPointer;
411 BaselineFrameAndStackPointersFromTryNote(tn, frame, &framePointer,
412 &stackPointer);
413 Value iterValue(*reinterpret_cast<Value*>(stackPointer));
414 JSObject* iterObject = &iterValue.toObject();
415 CloseIterator(iterObject);
416 break;
417 }
418
419 case TryNoteKind::Destructuring: {
420 uint8_t* framePointer;
421 uint8_t* stackPointer;
422 BaselineFrameAndStackPointersFromTryNote(tn, frame, &framePointer,
423 &stackPointer);
424 // Note: if this ever changes, also update the
425 // TryNoteKind::Destructuring code in WarpBuilder.cpp!
426 RootedValue doneValue(cx, *(reinterpret_cast<Value*>(stackPointer)));
427 MOZ_RELEASE_ASSERT(!doneValue.isMagic());
428 bool done = ToBoolean(doneValue);
429 if (!done) {
430 Value iterValue(*(reinterpret_cast<Value*>(stackPointer) + 1));
431 RootedObject iterObject(cx, &iterValue.toObject());
432 if (!IteratorCloseForException(cx, iterObject)) {
433 SettleOnTryNote(cx, tn, frame, ei, rfe, pc);
434 return false;
435 }
436 }
437 break;
438 }
439
440 case TryNoteKind::ForOf:
441 case TryNoteKind::Loop:
442 break;
443
444 // TryNoteKind::ForOfIterClose is handled internally by the try note
445 // iterator.
446 default:
447 MOZ_CRASH("Invalid try note");
448 }
449 }
450 return true;
451 }
452
HandleExceptionBaseline(JSContext * cx,JSJitFrameIter & frame,CommonFrameLayout * prevFrame,ResumeFromException * rfe)453 static void HandleExceptionBaseline(JSContext* cx, JSJitFrameIter& frame,
454 CommonFrameLayout* prevFrame,
455 ResumeFromException* rfe) {
456 MOZ_ASSERT(frame.isBaselineJS());
457 MOZ_ASSERT(prevFrame);
458
459 jsbytecode* pc;
460 frame.baselineScriptAndPc(nullptr, &pc);
461
462 // Ensure the BaselineFrame is an interpreter frame. This is easy to do and
463 // simplifies the code below and interaction with DebugModeOSR.
464 //
465 // Note that we never return to this frame via the previous frame's return
466 // address. We could set the return address to nullptr to ensure it's never
467 // used, but the profiler expects a non-null return value for its JitCode map
468 // lookup so we have to use an address in the interpreter code instead.
469 if (!frame.baselineFrame()->runningInInterpreter()) {
470 const BaselineInterpreter& interp =
471 cx->runtime()->jitRuntime()->baselineInterpreter();
472 uint8_t* retAddr = interp.codeRaw();
473 BaselineFrame* baselineFrame = frame.baselineFrame();
474
475 // Suppress profiler sampling while we fix up the frame to ensure the
476 // sampler thread doesn't see an inconsistent state.
477 AutoSuppressProfilerSampling suppressProfilerSampling(cx);
478 baselineFrame->switchFromJitToInterpreterForExceptionHandler(cx, pc);
479 prevFrame->setReturnAddress(retAddr);
480
481 // Ensure the current iterator's resumePCInCurrentFrame_ isn't used
482 // anywhere.
483 frame.setResumePCInCurrentFrame(nullptr);
484 }
485
486 bool frameOk = false;
487 RootedScript script(cx, frame.baselineFrame()->script());
488
489 if (script->hasScriptCounts()) {
490 PCCounts* counts = script->getThrowCounts(pc);
491 // If we failed to allocate, then skip the increment and continue to
492 // handle the exception.
493 if (counts) {
494 counts->numExec()++;
495 }
496 }
497
498 bool hasTryNotes = !script->trynotes().empty();
499
500 again:
501 if (cx->isExceptionPending()) {
502 if (!cx->isClosingGenerator()) {
503 if (!DebugAPI::onExceptionUnwind(cx, frame.baselineFrame())) {
504 if (!cx->isExceptionPending()) {
505 goto again;
506 }
507 }
508 // Ensure that the debugger hasn't returned 'true' while clearing the
509 // exception state.
510 MOZ_ASSERT(cx->isExceptionPending());
511 }
512
513 if (hasTryNotes) {
514 EnvironmentIter ei(cx, frame.baselineFrame(), pc);
515 if (!ProcessTryNotesBaseline(cx, frame, ei, rfe, &pc)) {
516 goto again;
517 }
518 if (rfe->kind != ResumeFromException::RESUME_ENTRY_FRAME) {
519 // No need to increment the PCCounts number of execution here,
520 // as the interpreter increments any PCCounts if present.
521 MOZ_ASSERT_IF(script->hasScriptCounts(), script->maybeGetPCCounts(pc));
522 return;
523 }
524 }
525
526 frameOk = HandleClosingGeneratorReturn(cx, frame.baselineFrame(), frameOk);
527 } else {
528 if (hasTryNotes) {
529 CloseLiveIteratorsBaselineForUncatchableException(cx, frame, pc);
530 }
531
532 // We may be propagating a forced return from a debugger hook function.
533 if (MOZ_UNLIKELY(cx->isPropagatingForcedReturn())) {
534 cx->clearPropagatingForcedReturn();
535 frameOk = true;
536 }
537 }
538
539 OnLeaveBaselineFrame(cx, frame, pc, rfe, frameOk);
540 }
541
GetLastProfilingFrame(ResumeFromException * rfe)542 static void* GetLastProfilingFrame(ResumeFromException* rfe) {
543 switch (rfe->kind) {
544 case ResumeFromException::RESUME_ENTRY_FRAME:
545 case ResumeFromException::RESUME_WASM:
546 return nullptr;
547
548 // The following all return into baseline frames.
549 case ResumeFromException::RESUME_CATCH:
550 case ResumeFromException::RESUME_FINALLY:
551 case ResumeFromException::RESUME_FORCED_RETURN:
552 return rfe->framePointer + BaselineFrame::FramePointerOffset;
553
554 // When resuming into a bailed-out ion frame, use the bailout info to
555 // find the frame we are resuming into.
556 case ResumeFromException::RESUME_BAILOUT:
557 return rfe->bailoutInfo->incomingStack;
558 }
559
560 MOZ_CRASH("Invalid ResumeFromException type!");
561 return nullptr;
562 }
563
HandleExceptionWasm(JSContext * cx,wasm::WasmFrameIter * iter,ResumeFromException * rfe)564 void HandleExceptionWasm(JSContext* cx, wasm::WasmFrameIter* iter,
565 ResumeFromException* rfe) {
566 MOZ_ASSERT(cx->activation()->asJit()->hasWasmExitFP());
567 wasm::HandleThrow(cx, *iter, rfe);
568 MOZ_ASSERT(iter->done());
569 }
570
HandleException(ResumeFromException * rfe)571 void HandleException(ResumeFromException* rfe) {
572 JSContext* cx = TlsContext.get();
573 TraceLoggerThread* logger = TraceLoggerForCurrentThread(cx);
574
575 #ifdef DEBUG
576 cx->runtime()->jitRuntime()->clearDisallowArbitraryCode();
577 #endif
578
579 auto resetProfilerFrame = mozilla::MakeScopeExit([=] {
580 if (!cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(
581 cx->runtime())) {
582 return;
583 }
584
585 MOZ_ASSERT(cx->jitActivation == cx->profilingActivation());
586
587 void* lastProfilingFrame = GetLastProfilingFrame(rfe);
588 cx->jitActivation->setLastProfilingFrame(lastProfilingFrame);
589 });
590
591 rfe->kind = ResumeFromException::RESUME_ENTRY_FRAME;
592
593 JitSpew(JitSpew_IonInvalidate, "handling exception");
594
595 JitActivation* activation = cx->activation()->asJit();
596
597 #ifdef CHECK_OSIPOINT_REGISTERS
598 if (JitOptions.checkOsiPointRegisters) {
599 activation->setCheckRegs(false);
600 }
601 #endif
602
603 JitFrameIter iter(cx->activation()->asJit(),
604 /* mustUnwindActivation = */ true);
605 CommonFrameLayout* prevJitFrame = nullptr;
606 while (!iter.done()) {
607 if (iter.isWasm()) {
608 prevJitFrame = nullptr;
609 HandleExceptionWasm(cx, &iter.asWasm(), rfe);
610 // If a wasm try-catch handler is found, we can immediately jump to it
611 // and quit iterating through the stack.
612 if (rfe->kind == ResumeFromException::RESUME_WASM_CATCH) {
613 return;
614 }
615 if (!iter.done()) {
616 ++iter;
617 }
618 continue;
619 }
620
621 JSJitFrameIter& frame = iter.asJSJit();
622
623 // JIT code can enter same-compartment realms, so reset cx->realm to
624 // this frame's realm.
625 if (frame.isScripted()) {
626 cx->setRealmForJitExceptionHandler(iter.realm());
627 }
628
629 if (frame.isIonJS()) {
630 // Search each inlined frame for live iterator objects, and close
631 // them.
632 InlineFrameIterator frames(cx, &frame);
633
634 // Invalidation state will be the same for all inlined scripts in the
635 // frame.
636 IonScript* ionScript = nullptr;
637 bool invalidated = frame.checkInvalidation(&ionScript);
638
639 #ifdef JS_TRACE_LOGGING
640 if (logger && cx->realm()->isDebuggee() && logger->enabled()) {
641 logger->disable(/* force = */ true,
642 "Forcefully disabled tracelogger, due to "
643 "throwing an exception with an active Debugger "
644 "in IonMonkey.");
645 }
646 #endif
647
648 // If we hit OOM or overrecursion while bailing out, we don't
649 // attempt to bail out a second time for this Ion frame. Just unwind
650 // and continue at the next frame.
651 bool hitBailoutException = false;
652 for (;;) {
653 HandleExceptionIon(cx, frames, rfe, &hitBailoutException);
654
655 if (rfe->kind == ResumeFromException::RESUME_BAILOUT) {
656 if (invalidated) {
657 ionScript->decrementInvalidationCount(
658 cx->runtime()->defaultFreeOp());
659 }
660 return;
661 }
662
663 MOZ_ASSERT(rfe->kind == ResumeFromException::RESUME_ENTRY_FRAME);
664
665 // When profiling, each frame popped needs a notification that
666 // the function has exited, so invoke the probe that a function
667 // is exiting.
668
669 JSScript* script = frames.script();
670 probes::ExitScript(cx, script, script->function(),
671 /* popProfilerFrame = */ false);
672 if (!frames.more()) {
673 TraceLogStopEvent(logger, TraceLogger_IonMonkey);
674 TraceLogStopEvent(logger, TraceLogger_Scripts);
675 break;
676 }
677 ++frames;
678 }
679
680 // Remove left-over state which might have been needed for bailout.
681 activation->removeIonFrameRecovery(frame.jsFrame());
682 activation->removeRematerializedFrame(frame.fp());
683
684 // If invalidated, decrement the number of frames remaining on the
685 // stack for the given IonScript.
686 if (invalidated) {
687 ionScript->decrementInvalidationCount(cx->runtime()->defaultFreeOp());
688 }
689
690 } else if (frame.isBaselineJS()) {
691 HandleExceptionBaseline(cx, frame, prevJitFrame, rfe);
692
693 if (rfe->kind != ResumeFromException::RESUME_ENTRY_FRAME &&
694 rfe->kind != ResumeFromException::RESUME_FORCED_RETURN) {
695 return;
696 }
697
698 TraceLogStopEvent(logger, TraceLogger_Baseline);
699 TraceLogStopEvent(logger, TraceLogger_Scripts);
700
701 // Unwind profiler pseudo-stack
702 JSScript* script = frame.script();
703 probes::ExitScript(cx, script, script->function(),
704 /* popProfilerFrame = */ false);
705
706 if (rfe->kind == ResumeFromException::RESUME_FORCED_RETURN) {
707 return;
708 }
709 }
710
711 prevJitFrame = frame.current();
712 ++iter;
713 }
714
715 // Wasm sets its own value of SP in HandleExceptionWasm.
716 if (iter.isJSJit()) {
717 rfe->stackPointer = iter.asJSJit().fp();
718 }
719 }
720
721 // Turns a JitFrameLayout into an ExitFrameLayout. Note that it has to be a
722 // bare exit frame so it's ignored by TraceJitExitFrame.
EnsureBareExitFrame(JitActivation * act,JitFrameLayout * frame)723 void EnsureBareExitFrame(JitActivation* act, JitFrameLayout* frame) {
724 ExitFrameLayout* exitFrame = reinterpret_cast<ExitFrameLayout*>(frame);
725
726 if (act->jsExitFP() == (uint8_t*)frame) {
727 // If we already called this function for the current frame, do
728 // nothing.
729 MOZ_ASSERT(exitFrame->isBareExit());
730 return;
731 }
732
733 #ifdef DEBUG
734 JSJitFrameIter iter(act);
735 while (!iter.isScripted()) {
736 ++iter;
737 }
738 MOZ_ASSERT(iter.current() == frame, "|frame| must be the top JS frame");
739
740 MOZ_ASSERT(!!act->jsExitFP());
741 MOZ_ASSERT((uint8_t*)exitFrame->footer() >= act->jsExitFP(),
742 "Must have space for ExitFooterFrame before jsExitFP");
743 #endif
744
745 act->setJSExitFP((uint8_t*)frame);
746 exitFrame->footer()->setBareExitFrame();
747 MOZ_ASSERT(exitFrame->isBareExit());
748 }
749
MaybeForwardedScriptFromCalleeToken(CalleeToken token)750 JSScript* MaybeForwardedScriptFromCalleeToken(CalleeToken token) {
751 switch (GetCalleeTokenTag(token)) {
752 case CalleeToken_Script:
753 return MaybeForwarded(CalleeTokenToScript(token));
754 case CalleeToken_Function:
755 case CalleeToken_FunctionConstructing: {
756 JSFunction* fun = MaybeForwarded(CalleeTokenToFunction(token));
757 return MaybeForwarded(fun)->nonLazyScript();
758 }
759 }
760 MOZ_CRASH("invalid callee token tag");
761 }
762
TraceCalleeToken(JSTracer * trc,CalleeToken token)763 CalleeToken TraceCalleeToken(JSTracer* trc, CalleeToken token) {
764 switch (CalleeTokenTag tag = GetCalleeTokenTag(token)) {
765 case CalleeToken_Function:
766 case CalleeToken_FunctionConstructing: {
767 JSFunction* fun = CalleeTokenToFunction(token);
768 TraceRoot(trc, &fun, "jit-callee");
769 return CalleeToToken(fun, tag == CalleeToken_FunctionConstructing);
770 }
771 case CalleeToken_Script: {
772 JSScript* script = CalleeTokenToScript(token);
773 TraceRoot(trc, &script, "jit-script");
774 return CalleeToToken(script);
775 }
776 default:
777 MOZ_CRASH("unknown callee token type");
778 }
779 }
780
slotRef(SafepointSlotEntry where)781 uintptr_t* JitFrameLayout::slotRef(SafepointSlotEntry where) {
782 if (where.stack) {
783 return (uintptr_t*)((uint8_t*)this - where.slot);
784 }
785 return (uintptr_t*)((uint8_t*)argv() + where.slot);
786 }
787
788 #ifdef JS_NUNBOX32
ReadAllocation(const JSJitFrameIter & frame,const LAllocation * a)789 static inline uintptr_t ReadAllocation(const JSJitFrameIter& frame,
790 const LAllocation* a) {
791 if (a->isGeneralReg()) {
792 Register reg = a->toGeneralReg()->reg();
793 return frame.machineState().read(reg);
794 }
795 return *frame.jsFrame()->slotRef(SafepointSlotEntry(a));
796 }
797 #endif
798
TraceThisAndArguments(JSTracer * trc,const JSJitFrameIter & frame,JitFrameLayout * layout)799 static void TraceThisAndArguments(JSTracer* trc, const JSJitFrameIter& frame,
800 JitFrameLayout* layout) {
801 // Trace |this| and any extra actual arguments for an Ion frame. Tracing
802 // of formal arguments is taken care of by the frame's safepoint/snapshot,
803 // except when the script might have lazy arguments or rest, in which case
804 // we trace them as well. We also have to trace formals if we have a
805 // LazyLink frame or an InterpreterStub frame or a special JSJit to wasm
806 // frame (since wasm doesn't use snapshots).
807
808 if (!CalleeTokenIsFunction(layout->calleeToken())) {
809 return;
810 }
811
812 size_t nargs = layout->numActualArgs();
813 size_t nformals = 0;
814
815 JSFunction* fun = CalleeTokenToFunction(layout->calleeToken());
816 if (frame.type() != FrameType::JSJitToWasm &&
817 !frame.isExitFrameLayout<CalledFromJitExitFrameLayout>() &&
818 !fun->nonLazyScript()->mayReadFrameArgsDirectly()) {
819 nformals = fun->nargs();
820 }
821
822 size_t newTargetOffset = std::max(nargs, fun->nargs());
823
824 Value* argv = layout->argv();
825
826 // Trace |this|.
827 TraceRoot(trc, argv, "ion-thisv");
828
829 // Trace actual arguments beyond the formals. Note + 1 for thisv.
830 for (size_t i = nformals + 1; i < nargs + 1; i++) {
831 TraceRoot(trc, &argv[i], "ion-argv");
832 }
833
834 // Always trace the new.target from the frame. It's not in the snapshots.
835 // +1 to pass |this|
836 if (CalleeTokenIsConstructing(layout->calleeToken())) {
837 TraceRoot(trc, &argv[1 + newTargetOffset], "ion-newTarget");
838 }
839 }
840
841 #ifdef JS_NUNBOX32
WriteAllocation(const JSJitFrameIter & frame,const LAllocation * a,uintptr_t value)842 static inline void WriteAllocation(const JSJitFrameIter& frame,
843 const LAllocation* a, uintptr_t value) {
844 if (a->isGeneralReg()) {
845 Register reg = a->toGeneralReg()->reg();
846 frame.machineState().write(reg, value);
847 } else {
848 *frame.jsFrame()->slotRef(SafepointSlotEntry(a)) = value;
849 }
850 }
851 #endif
852
TraceIonJSFrame(JSTracer * trc,const JSJitFrameIter & frame)853 static void TraceIonJSFrame(JSTracer* trc, const JSJitFrameIter& frame) {
854 JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
855
856 layout->replaceCalleeToken(TraceCalleeToken(trc, layout->calleeToken()));
857
858 IonScript* ionScript = nullptr;
859 if (frame.checkInvalidation(&ionScript)) {
860 // This frame has been invalidated, meaning that its IonScript is no
861 // longer reachable through the callee token (JSFunction/JSScript->ion
862 // is now nullptr or recompiled). Manually trace it here.
863 ionScript->trace(trc);
864 } else {
865 ionScript = frame.ionScriptFromCalleeToken();
866 }
867
868 TraceThisAndArguments(trc, frame, frame.jsFrame());
869
870 const SafepointIndex* si =
871 ionScript->getSafepointIndex(frame.resumePCinCurrentFrame());
872
873 SafepointReader safepoint(ionScript, si);
874
875 // Scan through slots which contain pointers (or on punboxing systems,
876 // actual values).
877 SafepointSlotEntry entry;
878
879 while (safepoint.getGcSlot(&entry)) {
880 uintptr_t* ref = layout->slotRef(entry);
881 TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(ref),
882 "ion-gc-slot");
883 }
884
885 uintptr_t* spill = frame.spillBase();
886 LiveGeneralRegisterSet gcRegs = safepoint.gcSpills();
887 LiveGeneralRegisterSet valueRegs = safepoint.valueSpills();
888 for (GeneralRegisterBackwardIterator iter(safepoint.allGprSpills());
889 iter.more(); ++iter) {
890 --spill;
891 if (gcRegs.has(*iter)) {
892 TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(spill),
893 "ion-gc-spill");
894 } else if (valueRegs.has(*iter)) {
895 TraceRoot(trc, reinterpret_cast<Value*>(spill), "ion-value-spill");
896 }
897 }
898
899 #ifdef JS_PUNBOX64
900 while (safepoint.getValueSlot(&entry)) {
901 Value* v = (Value*)layout->slotRef(entry);
902 TraceRoot(trc, v, "ion-gc-slot");
903 }
904 #else
905 LAllocation type, payload;
906 while (safepoint.getNunboxSlot(&type, &payload)) {
907 JSValueTag tag = JSValueTag(ReadAllocation(frame, &type));
908 uintptr_t rawPayload = ReadAllocation(frame, &payload);
909
910 Value v = Value::fromTagAndPayload(tag, rawPayload);
911 TraceRoot(trc, &v, "ion-torn-value");
912
913 if (v != Value::fromTagAndPayload(tag, rawPayload)) {
914 // GC moved the value, replace the stored payload.
915 rawPayload = v.toNunboxPayload();
916 WriteAllocation(frame, &payload, rawPayload);
917 }
918 }
919 #endif
920 }
921
TraceBailoutFrame(JSTracer * trc,const JSJitFrameIter & frame)922 static void TraceBailoutFrame(JSTracer* trc, const JSJitFrameIter& frame) {
923 JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
924
925 layout->replaceCalleeToken(TraceCalleeToken(trc, layout->calleeToken()));
926
927 // We have to trace the list of actual arguments, as only formal arguments
928 // are represented in the Snapshot.
929 TraceThisAndArguments(trc, frame, frame.jsFrame());
930
931 // Under a bailout, do not have a Safepoint to only iterate over GC-things.
932 // Thus we use a SnapshotIterator to trace all the locations which would be
933 // used to reconstruct the Baseline frame.
934 //
935 // Note that at the time where this function is called, we have not yet
936 // started to reconstruct baseline frames.
937
938 // The vector of recover instructions is already traced as part of the
939 // JitActivation.
940 SnapshotIterator snapIter(frame,
941 frame.activation()->bailoutData()->machineState());
942
943 // For each instruction, we read the allocations without evaluating the
944 // recover instruction, nor reconstructing the frame. We are only looking at
945 // tracing readable allocations.
946 while (true) {
947 while (snapIter.moreAllocations()) {
948 snapIter.traceAllocation(trc);
949 }
950
951 if (!snapIter.moreInstructions()) {
952 break;
953 }
954 snapIter.nextInstruction();
955 }
956 }
957
UpdateIonJSFrameForMinorGC(JSRuntime * rt,const JSJitFrameIter & frame)958 static void UpdateIonJSFrameForMinorGC(JSRuntime* rt,
959 const JSJitFrameIter& frame) {
960 // Minor GCs may move slots/elements allocated in the nursery. Update
961 // any slots/elements pointers stored in this frame.
962
963 JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
964
965 IonScript* ionScript = nullptr;
966 if (frame.checkInvalidation(&ionScript)) {
967 // This frame has been invalidated, meaning that its IonScript is no
968 // longer reachable through the callee token (JSFunction/JSScript->ion
969 // is now nullptr or recompiled).
970 } else {
971 ionScript = frame.ionScriptFromCalleeToken();
972 }
973
974 Nursery& nursery = rt->gc.nursery();
975
976 const SafepointIndex* si =
977 ionScript->getSafepointIndex(frame.resumePCinCurrentFrame());
978 SafepointReader safepoint(ionScript, si);
979
980 LiveGeneralRegisterSet slotsRegs = safepoint.slotsOrElementsSpills();
981 uintptr_t* spill = frame.spillBase();
982 for (GeneralRegisterBackwardIterator iter(safepoint.allGprSpills());
983 iter.more(); ++iter) {
984 --spill;
985 if (slotsRegs.has(*iter)) {
986 nursery.forwardBufferPointer(spill);
987 }
988 }
989
990 // Skip to the right place in the safepoint
991 SafepointSlotEntry entry;
992 while (safepoint.getGcSlot(&entry)) {
993 }
994
995 #ifdef JS_PUNBOX64
996 while (safepoint.getValueSlot(&entry)) {
997 }
998 #else
999 LAllocation type, payload;
1000 while (safepoint.getNunboxSlot(&type, &payload)) {
1001 }
1002 #endif
1003
1004 while (safepoint.getSlotsOrElementsSlot(&entry)) {
1005 nursery.forwardBufferPointer(layout->slotRef(entry));
1006 }
1007 }
1008
TraceBaselineStubFrame(JSTracer * trc,const JSJitFrameIter & frame)1009 static void TraceBaselineStubFrame(JSTracer* trc, const JSJitFrameIter& frame) {
1010 // Trace the ICStub pointer stored in the stub frame. This is necessary
1011 // so that we don't destroy the stub code after unlinking the stub.
1012
1013 MOZ_ASSERT(frame.type() == FrameType::BaselineStub);
1014 JitStubFrameLayout* layout = (JitStubFrameLayout*)frame.fp();
1015
1016 if (ICStub* stub = layout->maybeStubPtr()) {
1017 if (stub->isFallback()) {
1018 // Fallback stubs use runtime-wide trampoline code we don't need to trace.
1019 MOZ_ASSERT(stub->usesTrampolineCode());
1020 } else {
1021 MOZ_ASSERT(stub->toCacheIRStub()->makesGCCalls());
1022 stub->toCacheIRStub()->trace(trc);
1023 }
1024 }
1025 }
1026
TraceIonICCallFrame(JSTracer * trc,const JSJitFrameIter & frame)1027 static void TraceIonICCallFrame(JSTracer* trc, const JSJitFrameIter& frame) {
1028 MOZ_ASSERT(frame.type() == FrameType::IonICCall);
1029 IonICCallFrameLayout* layout = (IonICCallFrameLayout*)frame.fp();
1030 TraceRoot(trc, layout->stubCode(), "ion-ic-call-code");
1031 }
1032
1033 #if defined(JS_CODEGEN_ARM64) || defined(JS_CODEGEN_MIPS32)
alignDoubleSpill(uint8_t * pointer)1034 uint8_t* alignDoubleSpill(uint8_t* pointer) {
1035 uintptr_t address = reinterpret_cast<uintptr_t>(pointer);
1036 address &= ~(uintptr_t(ABIStackAlignment) - 1);
1037 return reinterpret_cast<uint8_t*>(address);
1038 }
1039 #endif
1040
1041 #ifdef JS_CODEGEN_MIPS32
TraceJitExitFrameCopiedArguments(JSTracer * trc,const VMFunctionData * f,ExitFooterFrame * footer)1042 static void TraceJitExitFrameCopiedArguments(JSTracer* trc,
1043 const VMFunctionData* f,
1044 ExitFooterFrame* footer) {
1045 uint8_t* doubleArgs = footer->alignedForABI();
1046 if (f->outParam == Type_Handle) {
1047 doubleArgs -= sizeof(Value);
1048 }
1049 doubleArgs -= f->doubleByRefArgs() * sizeof(double);
1050
1051 for (uint32_t explicitArg = 0; explicitArg < f->explicitArgs; explicitArg++) {
1052 if (f->argProperties(explicitArg) == VMFunctionData::DoubleByRef) {
1053 // Arguments with double size can only have RootValue type.
1054 if (f->argRootType(explicitArg) == VMFunctionData::RootValue) {
1055 TraceRoot(trc, reinterpret_cast<Value*>(doubleArgs), "ion-vm-args");
1056 } else {
1057 MOZ_ASSERT(f->argRootType(explicitArg) == VMFunctionData::RootNone);
1058 }
1059 doubleArgs += sizeof(double);
1060 }
1061 }
1062 }
1063 #else
TraceJitExitFrameCopiedArguments(JSTracer * trc,const VMFunctionData * f,ExitFooterFrame * footer)1064 static void TraceJitExitFrameCopiedArguments(JSTracer* trc,
1065 const VMFunctionData* f,
1066 ExitFooterFrame* footer) {
1067 // This is NO-OP on other platforms.
1068 }
1069 #endif
1070
TraceJitExitFrame(JSTracer * trc,const JSJitFrameIter & frame)1071 static void TraceJitExitFrame(JSTracer* trc, const JSJitFrameIter& frame) {
1072 ExitFooterFrame* footer = frame.exitFrame()->footer();
1073
1074 // This corresponds to the case where we have build a fake exit frame which
1075 // handles the case of a native function call. We need to trace the argument
1076 // vector of the function call, and also new.target if it was a constructing
1077 // call.
1078 if (frame.isExitFrameLayout<NativeExitFrameLayout>()) {
1079 NativeExitFrameLayout* native =
1080 frame.exitFrame()->as<NativeExitFrameLayout>();
1081 size_t len = native->argc() + 2;
1082 Value* vp = native->vp();
1083 TraceRootRange(trc, len, vp, "ion-native-args");
1084 if (frame.isExitFrameLayout<ConstructNativeExitFrameLayout>()) {
1085 TraceRoot(trc, vp + len, "ion-native-new-target");
1086 }
1087 return;
1088 }
1089
1090 if (frame.isExitFrameLayout<IonOOLNativeExitFrameLayout>()) {
1091 IonOOLNativeExitFrameLayout* oolnative =
1092 frame.exitFrame()->as<IonOOLNativeExitFrameLayout>();
1093 TraceRoot(trc, oolnative->stubCode(), "ion-ool-native-code");
1094 TraceRoot(trc, oolnative->vp(), "iol-ool-native-vp");
1095 size_t len = oolnative->argc() + 1;
1096 TraceRootRange(trc, len, oolnative->thisp(), "ion-ool-native-thisargs");
1097 return;
1098 }
1099
1100 if (frame.isExitFrameLayout<IonOOLProxyExitFrameLayout>()) {
1101 IonOOLProxyExitFrameLayout* oolproxy =
1102 frame.exitFrame()->as<IonOOLProxyExitFrameLayout>();
1103 TraceRoot(trc, oolproxy->stubCode(), "ion-ool-proxy-code");
1104 TraceRoot(trc, oolproxy->vp(), "ion-ool-proxy-vp");
1105 TraceRoot(trc, oolproxy->id(), "ion-ool-proxy-id");
1106 TraceRoot(trc, oolproxy->proxy(), "ion-ool-proxy-proxy");
1107 return;
1108 }
1109
1110 if (frame.isExitFrameLayout<IonDOMExitFrameLayout>()) {
1111 IonDOMExitFrameLayout* dom = frame.exitFrame()->as<IonDOMExitFrameLayout>();
1112 TraceRoot(trc, dom->thisObjAddress(), "ion-dom-args");
1113 if (dom->isMethodFrame()) {
1114 IonDOMMethodExitFrameLayout* method =
1115 reinterpret_cast<IonDOMMethodExitFrameLayout*>(dom);
1116 size_t len = method->argc() + 2;
1117 Value* vp = method->vp();
1118 TraceRootRange(trc, len, vp, "ion-dom-args");
1119 } else {
1120 TraceRoot(trc, dom->vp(), "ion-dom-args");
1121 }
1122 return;
1123 }
1124
1125 if (frame.isExitFrameLayout<CalledFromJitExitFrameLayout>()) {
1126 auto* layout = frame.exitFrame()->as<CalledFromJitExitFrameLayout>();
1127 JitFrameLayout* jsLayout = layout->jsFrame();
1128 jsLayout->replaceCalleeToken(
1129 TraceCalleeToken(trc, jsLayout->calleeToken()));
1130 TraceThisAndArguments(trc, frame, jsLayout);
1131 return;
1132 }
1133
1134 if (frame.isExitFrameLayout<DirectWasmJitCallFrameLayout>()) {
1135 // Nothing needs to be traced here at the moment -- the arguments to the
1136 // callee are traced by the callee, and the inlined caller does not push
1137 // anything else.
1138 return;
1139 }
1140
1141 if (frame.isBareExit()) {
1142 // Nothing to trace. Fake exit frame pushed for VM functions with
1143 // nothing to trace on the stack.
1144 return;
1145 }
1146
1147 MOZ_ASSERT(frame.exitFrame()->isWrapperExit());
1148
1149 const VMFunctionData* f = footer->function();
1150 MOZ_ASSERT(f);
1151
1152 // Trace arguments of the VM wrapper.
1153 uint8_t* argBase = frame.exitFrame()->argBase();
1154 for (uint32_t explicitArg = 0; explicitArg < f->explicitArgs; explicitArg++) {
1155 switch (f->argRootType(explicitArg)) {
1156 case VMFunctionData::RootNone:
1157 break;
1158 case VMFunctionData::RootObject: {
1159 // Sometimes we can bake in HandleObjects to nullptr.
1160 JSObject** pobj = reinterpret_cast<JSObject**>(argBase);
1161 if (*pobj) {
1162 TraceRoot(trc, pobj, "ion-vm-args");
1163 }
1164 break;
1165 }
1166 case VMFunctionData::RootString:
1167 TraceRoot(trc, reinterpret_cast<JSString**>(argBase), "ion-vm-args");
1168 break;
1169 case VMFunctionData::RootFunction:
1170 TraceRoot(trc, reinterpret_cast<JSFunction**>(argBase), "ion-vm-args");
1171 break;
1172 case VMFunctionData::RootValue:
1173 TraceRoot(trc, reinterpret_cast<Value*>(argBase), "ion-vm-args");
1174 break;
1175 case VMFunctionData::RootId:
1176 TraceRoot(trc, reinterpret_cast<jsid*>(argBase), "ion-vm-args");
1177 break;
1178 case VMFunctionData::RootCell:
1179 TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(argBase),
1180 "ion-vm-args");
1181 break;
1182 case VMFunctionData::RootBigInt:
1183 TraceRoot(trc, reinterpret_cast<JS::BigInt**>(argBase), "ion-vm-args");
1184 break;
1185 }
1186
1187 switch (f->argProperties(explicitArg)) {
1188 case VMFunctionData::WordByValue:
1189 case VMFunctionData::WordByRef:
1190 argBase += sizeof(void*);
1191 break;
1192 case VMFunctionData::DoubleByValue:
1193 case VMFunctionData::DoubleByRef:
1194 argBase += 2 * sizeof(void*);
1195 break;
1196 }
1197 }
1198
1199 if (f->outParam == Type_Handle) {
1200 switch (f->outParamRootType) {
1201 case VMFunctionData::RootNone:
1202 MOZ_CRASH("Handle outparam must have root type");
1203 case VMFunctionData::RootObject:
1204 TraceRoot(trc, footer->outParam<JSObject*>(), "ion-vm-out");
1205 break;
1206 case VMFunctionData::RootString:
1207 TraceRoot(trc, footer->outParam<JSString*>(), "ion-vm-out");
1208 break;
1209 case VMFunctionData::RootFunction:
1210 TraceRoot(trc, footer->outParam<JSFunction*>(), "ion-vm-out");
1211 break;
1212 case VMFunctionData::RootValue:
1213 TraceRoot(trc, footer->outParam<Value>(), "ion-vm-outvp");
1214 break;
1215 case VMFunctionData::RootId:
1216 TraceRoot(trc, footer->outParam<jsid>(), "ion-vm-outvp");
1217 break;
1218 case VMFunctionData::RootCell:
1219 TraceGenericPointerRoot(trc, footer->outParam<gc::Cell*>(),
1220 "ion-vm-out");
1221 break;
1222 case VMFunctionData::RootBigInt:
1223 TraceRoot(trc, footer->outParam<JS::BigInt*>(), "ion-vm-out");
1224 break;
1225 }
1226 }
1227
1228 TraceJitExitFrameCopiedArguments(trc, f, footer);
1229 }
1230
TraceRectifierFrame(JSTracer * trc,const JSJitFrameIter & frame)1231 static void TraceRectifierFrame(JSTracer* trc, const JSJitFrameIter& frame) {
1232 // Trace thisv.
1233 //
1234 // Baseline JIT code generated as part of the ICCall_Fallback stub may use
1235 // it if we're calling a constructor that returns a primitive value.
1236 RectifierFrameLayout* layout = (RectifierFrameLayout*)frame.fp();
1237 TraceRoot(trc, &layout->argv()[0], "ion-thisv");
1238 }
1239
TraceJSJitToWasmFrame(JSTracer * trc,const JSJitFrameIter & frame)1240 static void TraceJSJitToWasmFrame(JSTracer* trc, const JSJitFrameIter& frame) {
1241 // This is doing a subset of TraceIonJSFrame, since the callee doesn't
1242 // have a script.
1243 JitFrameLayout* layout = (JitFrameLayout*)frame.fp();
1244 layout->replaceCalleeToken(TraceCalleeToken(trc, layout->calleeToken()));
1245 TraceThisAndArguments(trc, frame, layout);
1246 }
1247
TraceJitActivation(JSTracer * trc,JitActivation * activation)1248 static void TraceJitActivation(JSTracer* trc, JitActivation* activation) {
1249 #ifdef CHECK_OSIPOINT_REGISTERS
1250 if (JitOptions.checkOsiPointRegisters) {
1251 // GC can modify spilled registers, breaking our register checks.
1252 // To handle this, we disable these checks for the current VM call
1253 // when a GC happens.
1254 activation->setCheckRegs(false);
1255 }
1256 #endif
1257
1258 activation->traceRematerializedFrames(trc);
1259 activation->traceIonRecovery(trc);
1260
1261 // This is used for sanity checking continuity of the sequence of wasm stack
1262 // maps as we unwind. It has no functional purpose.
1263 uintptr_t highestByteVisitedInPrevWasmFrame = 0;
1264
1265 for (JitFrameIter frames(activation); !frames.done(); ++frames) {
1266 if (frames.isJSJit()) {
1267 const JSJitFrameIter& jitFrame = frames.asJSJit();
1268 switch (jitFrame.type()) {
1269 case FrameType::Exit:
1270 TraceJitExitFrame(trc, jitFrame);
1271 break;
1272 case FrameType::BaselineJS:
1273 jitFrame.baselineFrame()->trace(trc, jitFrame);
1274 break;
1275 case FrameType::IonJS:
1276 TraceIonJSFrame(trc, jitFrame);
1277 break;
1278 case FrameType::BaselineStub:
1279 TraceBaselineStubFrame(trc, jitFrame);
1280 break;
1281 case FrameType::Bailout:
1282 TraceBailoutFrame(trc, jitFrame);
1283 break;
1284 case FrameType::Rectifier:
1285 TraceRectifierFrame(trc, jitFrame);
1286 break;
1287 case FrameType::IonICCall:
1288 TraceIonICCallFrame(trc, jitFrame);
1289 break;
1290 case FrameType::WasmToJSJit:
1291 // Ignore: this is a special marker used to let the
1292 // JitFrameIter know the frame above is a wasm frame, handled
1293 // in the next iteration.
1294 break;
1295 case FrameType::JSJitToWasm:
1296 TraceJSJitToWasmFrame(trc, jitFrame);
1297 break;
1298 default:
1299 MOZ_CRASH("unexpected frame type");
1300 }
1301 highestByteVisitedInPrevWasmFrame = 0; /* "unknown" */
1302 } else {
1303 MOZ_ASSERT(frames.isWasm());
1304 uint8_t* nextPC = frames.resumePCinCurrentFrame();
1305 MOZ_ASSERT(nextPC != 0);
1306 wasm::WasmFrameIter& wasmFrameIter = frames.asWasm();
1307 wasm::Instance* instance = wasmFrameIter.instance();
1308 instance->trace(trc);
1309 highestByteVisitedInPrevWasmFrame = instance->traceFrame(
1310 trc, wasmFrameIter, nextPC, highestByteVisitedInPrevWasmFrame);
1311 }
1312 }
1313 }
1314
TraceJitActivations(JSContext * cx,JSTracer * trc)1315 void TraceJitActivations(JSContext* cx, JSTracer* trc) {
1316 for (JitActivationIterator activations(cx); !activations.done();
1317 ++activations) {
1318 TraceJitActivation(trc, activations->asJit());
1319 }
1320 }
1321
UpdateJitActivationsForMinorGC(JSRuntime * rt)1322 void UpdateJitActivationsForMinorGC(JSRuntime* rt) {
1323 MOZ_ASSERT(JS::RuntimeHeapIsMinorCollecting());
1324 JSContext* cx = rt->mainContextFromOwnThread();
1325 for (JitActivationIterator activations(cx); !activations.done();
1326 ++activations) {
1327 for (OnlyJSJitFrameIter iter(activations); !iter.done(); ++iter) {
1328 if (iter.frame().type() == FrameType::IonJS) {
1329 UpdateIonJSFrameForMinorGC(rt, iter.frame());
1330 }
1331 }
1332 }
1333 }
1334
GetTopJitJSScript(JSContext * cx)1335 JSScript* GetTopJitJSScript(JSContext* cx) {
1336 JSJitFrameIter frame(cx->activation()->asJit());
1337 MOZ_ASSERT(frame.type() == FrameType::Exit);
1338 ++frame;
1339
1340 if (frame.isBaselineStub()) {
1341 ++frame;
1342 MOZ_ASSERT(frame.isBaselineJS());
1343 }
1344
1345 MOZ_ASSERT(frame.isScripted());
1346 return frame.script();
1347 }
1348
GetPcScript(JSContext * cx,JSScript ** scriptRes,jsbytecode ** pcRes)1349 void GetPcScript(JSContext* cx, JSScript** scriptRes, jsbytecode** pcRes) {
1350 JitSpew(JitSpew_IonSnapshots, "Recover PC & Script from the last frame.");
1351
1352 // Recover the return address so that we can look it up in the
1353 // PcScriptCache, as script/pc computation is expensive.
1354 JitActivationIterator actIter(cx);
1355 OnlyJSJitFrameIter it(actIter);
1356 uint8_t* retAddr;
1357 if (it.frame().isExitFrame()) {
1358 ++it;
1359
1360 // Skip rectifier frames.
1361 if (it.frame().isRectifier()) {
1362 ++it;
1363 MOZ_ASSERT(it.frame().isBaselineStub() || it.frame().isBaselineJS() ||
1364 it.frame().isIonJS());
1365 }
1366
1367 // Skip Baseline/Ion stub and IC call frames.
1368 if (it.frame().isBaselineStub()) {
1369 ++it;
1370 MOZ_ASSERT(it.frame().isBaselineJS());
1371 } else if (it.frame().isIonICCall()) {
1372 ++it;
1373 MOZ_ASSERT(it.frame().isIonJS());
1374 }
1375
1376 MOZ_ASSERT(it.frame().isBaselineJS() || it.frame().isIonJS());
1377
1378 // Don't use the return address and the cache if the BaselineFrame is
1379 // running in the Baseline Interpreter. In this case the bytecode pc is
1380 // cheap to get, so we won't benefit from the cache, and the return address
1381 // does not map to a single bytecode pc.
1382 if (it.frame().isBaselineJS() &&
1383 it.frame().baselineFrame()->runningInInterpreter()) {
1384 it.frame().baselineScriptAndPc(scriptRes, pcRes);
1385 return;
1386 }
1387
1388 retAddr = it.frame().resumePCinCurrentFrame();
1389 } else {
1390 MOZ_ASSERT(it.frame().isBailoutJS());
1391 retAddr = it.frame().returnAddress();
1392 }
1393
1394 MOZ_ASSERT(retAddr);
1395
1396 uint32_t hash = PcScriptCache::Hash(retAddr);
1397
1398 // Lazily initialize the cache. The allocation may safely fail and will not
1399 // GC.
1400 if (MOZ_UNLIKELY(cx->ionPcScriptCache == nullptr)) {
1401 cx->ionPcScriptCache =
1402 MakeUnique<PcScriptCache>(cx->runtime()->gc.gcNumber());
1403 }
1404
1405 if (cx->ionPcScriptCache.ref() &&
1406 cx->ionPcScriptCache->get(cx->runtime(), hash, retAddr, scriptRes,
1407 pcRes)) {
1408 return;
1409 }
1410
1411 // Lookup failed: undertake expensive process to determine script and pc.
1412 if (it.frame().isIonJS() || it.frame().isBailoutJS()) {
1413 InlineFrameIterator ifi(cx, &it.frame());
1414 *scriptRes = ifi.script();
1415 *pcRes = ifi.pc();
1416 } else {
1417 MOZ_ASSERT(it.frame().isBaselineJS());
1418 it.frame().baselineScriptAndPc(scriptRes, pcRes);
1419 }
1420
1421 // Add entry to cache.
1422 if (cx->ionPcScriptCache.ref()) {
1423 cx->ionPcScriptCache->add(hash, retAddr, *pcRes, *scriptRes);
1424 }
1425 }
1426
RInstructionResults(JitFrameLayout * fp)1427 RInstructionResults::RInstructionResults(JitFrameLayout* fp)
1428 : results_(nullptr), fp_(fp), initialized_(false) {}
1429
RInstructionResults(RInstructionResults && src)1430 RInstructionResults::RInstructionResults(RInstructionResults&& src)
1431 : results_(std::move(src.results_)),
1432 fp_(src.fp_),
1433 initialized_(src.initialized_) {
1434 src.initialized_ = false;
1435 }
1436
operator =(RInstructionResults && rhs)1437 RInstructionResults& RInstructionResults::operator=(RInstructionResults&& rhs) {
1438 MOZ_ASSERT(&rhs != this, "self-moves are prohibited");
1439 this->~RInstructionResults();
1440 new (this) RInstructionResults(std::move(rhs));
1441 return *this;
1442 }
1443
~RInstructionResults()1444 RInstructionResults::~RInstructionResults() {
1445 // results_ is freed by the UniquePtr.
1446 }
1447
init(JSContext * cx,uint32_t numResults)1448 bool RInstructionResults::init(JSContext* cx, uint32_t numResults) {
1449 if (numResults) {
1450 results_ = cx->make_unique<Values>();
1451 if (!results_ || !results_->growBy(numResults)) {
1452 return false;
1453 }
1454
1455 Value guard = MagicValue(JS_ION_BAILOUT);
1456 for (size_t i = 0; i < numResults; i++) {
1457 (*results_)[i].init(guard);
1458 }
1459 }
1460
1461 initialized_ = true;
1462 return true;
1463 }
1464
isInitialized() const1465 bool RInstructionResults::isInitialized() const { return initialized_; }
1466
length() const1467 size_t RInstructionResults::length() const { return results_->length(); }
1468
frame() const1469 JitFrameLayout* RInstructionResults::frame() const {
1470 MOZ_ASSERT(fp_);
1471 return fp_;
1472 }
1473
operator [](size_t index)1474 HeapPtr<Value>& RInstructionResults::operator[](size_t index) {
1475 return (*results_)[index];
1476 }
1477
trace(JSTracer * trc)1478 void RInstructionResults::trace(JSTracer* trc) {
1479 // Note: The vector necessary exists, otherwise this object would not have
1480 // been stored on the activation from where the trace function is called.
1481 TraceRange(trc, results_->length(), results_->begin(), "ion-recover-results");
1482 }
1483
SnapshotIterator(const JSJitFrameIter & iter,const MachineState * machineState)1484 SnapshotIterator::SnapshotIterator(const JSJitFrameIter& iter,
1485 const MachineState* machineState)
1486 : snapshot_(iter.ionScript()->snapshots(), iter.snapshotOffset(),
1487 iter.ionScript()->snapshotsRVATableSize(),
1488 iter.ionScript()->snapshotsListSize()),
1489 recover_(snapshot_, iter.ionScript()->recovers(),
1490 iter.ionScript()->recoversSize()),
1491 fp_(iter.jsFrame()),
1492 machine_(machineState),
1493 ionScript_(iter.ionScript()),
1494 instructionResults_(nullptr) {}
1495
SnapshotIterator()1496 SnapshotIterator::SnapshotIterator()
1497 : snapshot_(nullptr, 0, 0, 0),
1498 recover_(snapshot_, nullptr, 0),
1499 fp_(nullptr),
1500 machine_(nullptr),
1501 ionScript_(nullptr),
1502 instructionResults_(nullptr) {}
1503
fromStack(int32_t offset) const1504 uintptr_t SnapshotIterator::fromStack(int32_t offset) const {
1505 return ReadFrameSlot(fp_, offset);
1506 }
1507
FromObjectPayload(uintptr_t payload)1508 static Value FromObjectPayload(uintptr_t payload) {
1509 MOZ_ASSERT(payload != 0);
1510 return ObjectValue(*reinterpret_cast<JSObject*>(payload));
1511 }
1512
FromStringPayload(uintptr_t payload)1513 static Value FromStringPayload(uintptr_t payload) {
1514 return StringValue(reinterpret_cast<JSString*>(payload));
1515 }
1516
FromSymbolPayload(uintptr_t payload)1517 static Value FromSymbolPayload(uintptr_t payload) {
1518 return SymbolValue(reinterpret_cast<JS::Symbol*>(payload));
1519 }
1520
FromBigIntPayload(uintptr_t payload)1521 static Value FromBigIntPayload(uintptr_t payload) {
1522 return BigIntValue(reinterpret_cast<JS::BigInt*>(payload));
1523 }
1524
FromTypedPayload(JSValueType type,uintptr_t payload)1525 static Value FromTypedPayload(JSValueType type, uintptr_t payload) {
1526 switch (type) {
1527 case JSVAL_TYPE_INT32:
1528 return Int32Value(payload);
1529 case JSVAL_TYPE_BOOLEAN:
1530 return BooleanValue(!!payload);
1531 case JSVAL_TYPE_STRING:
1532 return FromStringPayload(payload);
1533 case JSVAL_TYPE_SYMBOL:
1534 return FromSymbolPayload(payload);
1535 case JSVAL_TYPE_BIGINT:
1536 return FromBigIntPayload(payload);
1537 case JSVAL_TYPE_OBJECT:
1538 return FromObjectPayload(payload);
1539 default:
1540 MOZ_CRASH("unexpected type - needs payload");
1541 }
1542 }
1543
allocationReadable(const RValueAllocation & alloc,ReadMethod rm)1544 bool SnapshotIterator::allocationReadable(const RValueAllocation& alloc,
1545 ReadMethod rm) {
1546 // If we have to recover stores, and if we are not interested in the
1547 // default value of the instruction, then we have to check if the recover
1548 // instruction results are available.
1549 if (alloc.needSideEffect() && !(rm & RM_AlwaysDefault)) {
1550 if (!hasInstructionResults()) {
1551 return false;
1552 }
1553 }
1554
1555 switch (alloc.mode()) {
1556 case RValueAllocation::DOUBLE_REG:
1557 return hasRegister(alloc.fpuReg());
1558
1559 case RValueAllocation::TYPED_REG:
1560 return hasRegister(alloc.reg2());
1561
1562 #if defined(JS_NUNBOX32)
1563 case RValueAllocation::UNTYPED_REG_REG:
1564 return hasRegister(alloc.reg()) && hasRegister(alloc.reg2());
1565 case RValueAllocation::UNTYPED_REG_STACK:
1566 return hasRegister(alloc.reg()) && hasStack(alloc.stackOffset2());
1567 case RValueAllocation::UNTYPED_STACK_REG:
1568 return hasStack(alloc.stackOffset()) && hasRegister(alloc.reg2());
1569 case RValueAllocation::UNTYPED_STACK_STACK:
1570 return hasStack(alloc.stackOffset()) && hasStack(alloc.stackOffset2());
1571 #elif defined(JS_PUNBOX64)
1572 case RValueAllocation::UNTYPED_REG:
1573 return hasRegister(alloc.reg());
1574 case RValueAllocation::UNTYPED_STACK:
1575 return hasStack(alloc.stackOffset());
1576 #endif
1577
1578 case RValueAllocation::RECOVER_INSTRUCTION:
1579 return hasInstructionResult(alloc.index());
1580 case RValueAllocation::RI_WITH_DEFAULT_CST:
1581 return rm & RM_AlwaysDefault || hasInstructionResult(alloc.index());
1582
1583 default:
1584 return true;
1585 }
1586 }
1587
allocationValue(const RValueAllocation & alloc,ReadMethod rm)1588 Value SnapshotIterator::allocationValue(const RValueAllocation& alloc,
1589 ReadMethod rm) {
1590 switch (alloc.mode()) {
1591 case RValueAllocation::CONSTANT:
1592 return ionScript_->getConstant(alloc.index());
1593
1594 case RValueAllocation::CST_UNDEFINED:
1595 return UndefinedValue();
1596
1597 case RValueAllocation::CST_NULL:
1598 return NullValue();
1599
1600 case RValueAllocation::DOUBLE_REG:
1601 return DoubleValue(fromRegister(alloc.fpuReg()));
1602
1603 case RValueAllocation::ANY_FLOAT_REG: {
1604 union {
1605 double d;
1606 float f;
1607 } pun;
1608 MOZ_ASSERT(alloc.fpuReg().isSingle());
1609 pun.d = fromRegister(alloc.fpuReg());
1610 // The register contains the encoding of a float32. We just read
1611 // the bits without making any conversion.
1612 return Float32Value(pun.f);
1613 }
1614
1615 case RValueAllocation::ANY_FLOAT_STACK:
1616 return Float32Value(ReadFrameFloat32Slot(fp_, alloc.stackOffset()));
1617
1618 case RValueAllocation::TYPED_REG:
1619 return FromTypedPayload(alloc.knownType(), fromRegister(alloc.reg2()));
1620
1621 case RValueAllocation::TYPED_STACK: {
1622 switch (alloc.knownType()) {
1623 case JSVAL_TYPE_DOUBLE:
1624 return DoubleValue(ReadFrameDoubleSlot(fp_, alloc.stackOffset2()));
1625 case JSVAL_TYPE_INT32:
1626 return Int32Value(ReadFrameInt32Slot(fp_, alloc.stackOffset2()));
1627 case JSVAL_TYPE_BOOLEAN:
1628 return BooleanValue(ReadFrameBooleanSlot(fp_, alloc.stackOffset2()));
1629 case JSVAL_TYPE_STRING:
1630 return FromStringPayload(fromStack(alloc.stackOffset2()));
1631 case JSVAL_TYPE_SYMBOL:
1632 return FromSymbolPayload(fromStack(alloc.stackOffset2()));
1633 case JSVAL_TYPE_BIGINT:
1634 return FromBigIntPayload(fromStack(alloc.stackOffset2()));
1635 case JSVAL_TYPE_OBJECT:
1636 return FromObjectPayload(fromStack(alloc.stackOffset2()));
1637 default:
1638 MOZ_CRASH("Unexpected type");
1639 }
1640 }
1641
1642 #if defined(JS_NUNBOX32)
1643 case RValueAllocation::UNTYPED_REG_REG: {
1644 return Value::fromTagAndPayload(JSValueTag(fromRegister(alloc.reg())),
1645 fromRegister(alloc.reg2()));
1646 }
1647
1648 case RValueAllocation::UNTYPED_REG_STACK: {
1649 return Value::fromTagAndPayload(JSValueTag(fromRegister(alloc.reg())),
1650 fromStack(alloc.stackOffset2()));
1651 }
1652
1653 case RValueAllocation::UNTYPED_STACK_REG: {
1654 return Value::fromTagAndPayload(
1655 JSValueTag(fromStack(alloc.stackOffset())),
1656 fromRegister(alloc.reg2()));
1657 }
1658
1659 case RValueAllocation::UNTYPED_STACK_STACK: {
1660 return Value::fromTagAndPayload(
1661 JSValueTag(fromStack(alloc.stackOffset())),
1662 fromStack(alloc.stackOffset2()));
1663 }
1664 #elif defined(JS_PUNBOX64)
1665 case RValueAllocation::UNTYPED_REG: {
1666 return Value::fromRawBits(fromRegister(alloc.reg()));
1667 }
1668
1669 case RValueAllocation::UNTYPED_STACK: {
1670 return Value::fromRawBits(fromStack(alloc.stackOffset()));
1671 }
1672 #endif
1673
1674 case RValueAllocation::RECOVER_INSTRUCTION:
1675 return fromInstructionResult(alloc.index());
1676
1677 case RValueAllocation::RI_WITH_DEFAULT_CST:
1678 if (rm & RM_Normal && hasInstructionResult(alloc.index())) {
1679 return fromInstructionResult(alloc.index());
1680 }
1681 MOZ_ASSERT(rm & RM_AlwaysDefault);
1682 return ionScript_->getConstant(alloc.index2());
1683
1684 default:
1685 MOZ_CRASH("huh?");
1686 }
1687 }
1688
floatAllocationPointer(const RValueAllocation & alloc) const1689 const FloatRegisters::RegisterContent* SnapshotIterator::floatAllocationPointer(
1690 const RValueAllocation& alloc) const {
1691 switch (alloc.mode()) {
1692 case RValueAllocation::ANY_FLOAT_REG:
1693 return machine_->address(alloc.fpuReg());
1694
1695 case RValueAllocation::ANY_FLOAT_STACK:
1696 return (FloatRegisters::RegisterContent*)AddressOfFrameSlot(
1697 fp_, alloc.stackOffset());
1698
1699 default:
1700 MOZ_CRASH("Not a float allocation.");
1701 }
1702 }
1703
maybeRead(const RValueAllocation & a,MaybeReadFallback & fallback)1704 Value SnapshotIterator::maybeRead(const RValueAllocation& a,
1705 MaybeReadFallback& fallback) {
1706 if (allocationReadable(a)) {
1707 return allocationValue(a);
1708 }
1709
1710 if (fallback.canRecoverResults()) {
1711 // Code paths which are calling maybeRead are not always capable of
1712 // returning an error code, as these code paths used to be infallible.
1713 AutoEnterOOMUnsafeRegion oomUnsafe;
1714 if (!initInstructionResults(fallback)) {
1715 oomUnsafe.crash("js::jit::SnapshotIterator::maybeRead");
1716 }
1717
1718 if (allocationReadable(a)) {
1719 return allocationValue(a);
1720 }
1721
1722 MOZ_ASSERT_UNREACHABLE("All allocations should be readable.");
1723 }
1724
1725 return fallback.unreadablePlaceholder();
1726 }
1727
writeAllocationValuePayload(const RValueAllocation & alloc,const Value & v)1728 void SnapshotIterator::writeAllocationValuePayload(
1729 const RValueAllocation& alloc, const Value& v) {
1730 MOZ_ASSERT(v.isGCThing());
1731
1732 switch (alloc.mode()) {
1733 case RValueAllocation::CONSTANT:
1734 ionScript_->getConstant(alloc.index()) = v;
1735 break;
1736
1737 case RValueAllocation::CST_UNDEFINED:
1738 case RValueAllocation::CST_NULL:
1739 case RValueAllocation::DOUBLE_REG:
1740 case RValueAllocation::ANY_FLOAT_REG:
1741 case RValueAllocation::ANY_FLOAT_STACK:
1742 MOZ_CRASH("Not a GC thing: Unexpected write");
1743 break;
1744
1745 case RValueAllocation::TYPED_REG:
1746 machine_->write(alloc.reg2(), uintptr_t(v.toGCThing()));
1747 break;
1748
1749 case RValueAllocation::TYPED_STACK:
1750 switch (alloc.knownType()) {
1751 default:
1752 MOZ_CRASH("Not a GC thing: Unexpected write");
1753 break;
1754 case JSVAL_TYPE_STRING:
1755 case JSVAL_TYPE_SYMBOL:
1756 case JSVAL_TYPE_BIGINT:
1757 case JSVAL_TYPE_OBJECT:
1758 WriteFrameSlot(fp_, alloc.stackOffset2(), uintptr_t(v.toGCThing()));
1759 break;
1760 }
1761 break;
1762
1763 #if defined(JS_NUNBOX32)
1764 case RValueAllocation::UNTYPED_REG_REG:
1765 case RValueAllocation::UNTYPED_STACK_REG:
1766 machine_->write(alloc.reg2(), uintptr_t(v.toGCThing()));
1767 break;
1768
1769 case RValueAllocation::UNTYPED_REG_STACK:
1770 case RValueAllocation::UNTYPED_STACK_STACK:
1771 WriteFrameSlot(fp_, alloc.stackOffset2(), uintptr_t(v.toGCThing()));
1772 break;
1773 #elif defined(JS_PUNBOX64)
1774 case RValueAllocation::UNTYPED_REG:
1775 machine_->write(alloc.reg(), v.asRawBits());
1776 break;
1777
1778 case RValueAllocation::UNTYPED_STACK:
1779 WriteFrameSlot(fp_, alloc.stackOffset(), v.asRawBits());
1780 break;
1781 #endif
1782
1783 case RValueAllocation::RECOVER_INSTRUCTION:
1784 MOZ_CRASH("Recover instructions are handled by the JitActivation.");
1785 break;
1786
1787 case RValueAllocation::RI_WITH_DEFAULT_CST:
1788 // Assume that we are always going to be writing on the default value
1789 // while tracing.
1790 ionScript_->getConstant(alloc.index2()) = v;
1791 break;
1792
1793 default:
1794 MOZ_CRASH("huh?");
1795 }
1796 }
1797
traceAllocation(JSTracer * trc)1798 void SnapshotIterator::traceAllocation(JSTracer* trc) {
1799 RValueAllocation alloc = readAllocation();
1800 if (!allocationReadable(alloc, RM_AlwaysDefault)) {
1801 return;
1802 }
1803
1804 Value v = allocationValue(alloc, RM_AlwaysDefault);
1805 if (!v.isGCThing()) {
1806 return;
1807 }
1808
1809 Value copy = v;
1810 TraceRoot(trc, &v, "ion-typed-reg");
1811 if (v != copy) {
1812 MOZ_ASSERT(SameType(v, copy));
1813 writeAllocationValuePayload(alloc, v);
1814 }
1815 }
1816
resumePoint() const1817 const RResumePoint* SnapshotIterator::resumePoint() const {
1818 return instruction()->toResumePoint();
1819 }
1820
numAllocations() const1821 uint32_t SnapshotIterator::numAllocations() const {
1822 return instruction()->numOperands();
1823 }
1824
pcOffset() const1825 uint32_t SnapshotIterator::pcOffset() const {
1826 return resumePoint()->pcOffset();
1827 }
1828
skipInstruction()1829 void SnapshotIterator::skipInstruction() {
1830 MOZ_ASSERT(snapshot_.numAllocationsRead() == 0);
1831 size_t numOperands = instruction()->numOperands();
1832 for (size_t i = 0; i < numOperands; i++) {
1833 skip();
1834 }
1835 nextInstruction();
1836 }
1837
initInstructionResults(MaybeReadFallback & fallback)1838 bool SnapshotIterator::initInstructionResults(MaybeReadFallback& fallback) {
1839 MOZ_ASSERT(fallback.canRecoverResults());
1840 JSContext* cx = fallback.maybeCx;
1841
1842 // If there is only one resume point in the list of instructions, then there
1843 // is no instruction to recover, and thus no need to register any results.
1844 if (recover_.numInstructions() == 1) {
1845 return true;
1846 }
1847
1848 JitFrameLayout* fp = fallback.frame->jsFrame();
1849 RInstructionResults* results = fallback.activation->maybeIonFrameRecovery(fp);
1850 if (!results) {
1851 AutoRealm ar(cx, fallback.frame->script());
1852
1853 // We do not have the result yet, which means that an observable stack
1854 // slot is requested. As we do not want to bailout every time for the
1855 // same reason, we need to recompile without optimizing away the
1856 // observable stack slots. The script would later be recompiled to have
1857 // support for Argument objects.
1858 if (fallback.consequence == MaybeReadFallback::Fallback_Invalidate) {
1859 ionScript_->invalidate(cx, fallback.frame->script(),
1860 /* resetUses = */ false,
1861 "Observe recovered instruction.");
1862 }
1863
1864 // Register the list of result on the activation. We need to do that
1865 // before we initialize the list such as if any recover instruction
1866 // cause a GC, we can ensure that the results are properly traced by the
1867 // activation.
1868 RInstructionResults tmp(fallback.frame->jsFrame());
1869 if (!fallback.activation->registerIonFrameRecovery(std::move(tmp))) {
1870 return false;
1871 }
1872
1873 results = fallback.activation->maybeIonFrameRecovery(fp);
1874
1875 // Start a new snapshot at the beginning of the JSJitFrameIter. This
1876 // SnapshotIterator is used for evaluating the content of all recover
1877 // instructions. The result is then saved on the JitActivation.
1878 MachineState machine = fallback.frame->machineState();
1879 SnapshotIterator s(*fallback.frame, &machine);
1880 if (!s.computeInstructionResults(cx, results)) {
1881 // If the evaluation failed because of OOMs, then we discard the
1882 // current set of result that we collected so far.
1883 fallback.activation->removeIonFrameRecovery(fp);
1884 return false;
1885 }
1886 }
1887
1888 MOZ_ASSERT(results->isInitialized());
1889 MOZ_RELEASE_ASSERT(results->length() == recover_.numInstructions() - 1);
1890 instructionResults_ = results;
1891 return true;
1892 }
1893
computeInstructionResults(JSContext * cx,RInstructionResults * results) const1894 bool SnapshotIterator::computeInstructionResults(
1895 JSContext* cx, RInstructionResults* results) const {
1896 MOZ_ASSERT(!results->isInitialized());
1897 MOZ_ASSERT(recover_.numInstructionsRead() == 1);
1898
1899 // The last instruction will always be a resume point.
1900 size_t numResults = recover_.numInstructions() - 1;
1901 if (!results->isInitialized()) {
1902 if (!results->init(cx, numResults)) {
1903 return false;
1904 }
1905
1906 // No need to iterate over the only resume point.
1907 if (!numResults) {
1908 MOZ_ASSERT(results->isInitialized());
1909 return true;
1910 }
1911
1912 // Avoid invoking the object metadata callback, which could try to walk the
1913 // stack while bailing out.
1914 gc::AutoSuppressGC suppressGC(cx);
1915 js::AutoSuppressAllocationMetadataBuilder suppressMetadata(cx);
1916
1917 // Fill with the results of recover instructions.
1918 SnapshotIterator s(*this);
1919 s.instructionResults_ = results;
1920 while (s.moreInstructions()) {
1921 // Skip resume point and only interpret recover instructions.
1922 if (s.instruction()->isResumePoint()) {
1923 s.skipInstruction();
1924 continue;
1925 }
1926
1927 if (!s.instruction()->recover(cx, s)) {
1928 return false;
1929 }
1930 s.nextInstruction();
1931 }
1932 }
1933
1934 MOZ_ASSERT(results->isInitialized());
1935 return true;
1936 }
1937
storeInstructionResult(const Value & v)1938 void SnapshotIterator::storeInstructionResult(const Value& v) {
1939 uint32_t currIns = recover_.numInstructionsRead() - 1;
1940 MOZ_ASSERT((*instructionResults_)[currIns].isMagic(JS_ION_BAILOUT));
1941 (*instructionResults_)[currIns] = v;
1942 }
1943
fromInstructionResult(uint32_t index) const1944 Value SnapshotIterator::fromInstructionResult(uint32_t index) const {
1945 MOZ_ASSERT(!(*instructionResults_)[index].isMagic(JS_ION_BAILOUT));
1946 return (*instructionResults_)[index];
1947 }
1948
settleOnFrame()1949 void SnapshotIterator::settleOnFrame() {
1950 // Check that the current instruction can still be use.
1951 MOZ_ASSERT(snapshot_.numAllocationsRead() == 0);
1952 while (!instruction()->isResumePoint()) {
1953 skipInstruction();
1954 }
1955 }
1956
nextFrame()1957 void SnapshotIterator::nextFrame() {
1958 nextInstruction();
1959 settleOnFrame();
1960 }
1961
maybeReadAllocByIndex(size_t index)1962 Value SnapshotIterator::maybeReadAllocByIndex(size_t index) {
1963 while (index--) {
1964 MOZ_ASSERT(moreAllocations());
1965 skip();
1966 }
1967
1968 Value s;
1969 {
1970 // This MaybeReadFallback method cannot GC.
1971 JS::AutoSuppressGCAnalysis nogc;
1972 MaybeReadFallback fallback(UndefinedValue());
1973 s = maybeRead(fallback);
1974 }
1975
1976 while (moreAllocations()) {
1977 skip();
1978 }
1979
1980 return s;
1981 }
1982
InlineFrameIterator(JSContext * cx,const JSJitFrameIter * iter)1983 InlineFrameIterator::InlineFrameIterator(JSContext* cx,
1984 const JSJitFrameIter* iter)
1985 : calleeTemplate_(cx),
1986 calleeRVA_(),
1987 script_(cx),
1988 pc_(nullptr),
1989 numActualArgs_(0) {
1990 resetOn(iter);
1991 }
1992
InlineFrameIterator(JSContext * cx,const InlineFrameIterator * iter)1993 InlineFrameIterator::InlineFrameIterator(JSContext* cx,
1994 const InlineFrameIterator* iter)
1995 : frame_(iter ? iter->frame_ : nullptr),
1996 framesRead_(0),
1997 frameCount_(iter ? iter->frameCount_ : UINT32_MAX),
1998 calleeTemplate_(cx),
1999 calleeRVA_(),
2000 script_(cx),
2001 pc_(nullptr),
2002 numActualArgs_(0) {
2003 if (frame_) {
2004 machine_ = iter->machine_;
2005 start_ = SnapshotIterator(*frame_, &machine_);
2006
2007 // findNextFrame will iterate to the next frame and init. everything.
2008 // Therefore to settle on the same frame, we report one frame less readed.
2009 framesRead_ = iter->framesRead_ - 1;
2010 findNextFrame();
2011 }
2012 }
2013
resetOn(const JSJitFrameIter * iter)2014 void InlineFrameIterator::resetOn(const JSJitFrameIter* iter) {
2015 frame_ = iter;
2016 framesRead_ = 0;
2017 frameCount_ = UINT32_MAX;
2018
2019 if (iter) {
2020 machine_ = iter->machineState();
2021 start_ = SnapshotIterator(*iter, &machine_);
2022 findNextFrame();
2023 }
2024 }
2025
findNextFrame()2026 void InlineFrameIterator::findNextFrame() {
2027 MOZ_ASSERT(more());
2028
2029 si_ = start_;
2030
2031 // Read the initial frame out of the C stack.
2032 calleeTemplate_ = frame_->maybeCallee();
2033 calleeRVA_ = RValueAllocation();
2034 script_ = frame_->script();
2035 MOZ_ASSERT(script_->hasBaselineScript());
2036
2037 // Settle on the outermost frame without evaluating any instructions before
2038 // looking for a pc.
2039 si_.settleOnFrame();
2040
2041 pc_ = script_->offsetToPC(si_.pcOffset());
2042 numActualArgs_ = 0xbadbad;
2043
2044 // This unfortunately is O(n*m), because we must skip over outer frames
2045 // before reading inner ones.
2046
2047 // The first time (frameCount_ == UINT32_MAX) we do not know the number of
2048 // frames that we are going to inspect. So we are iterating until there is
2049 // no more frames, to settle on the inner most frame and to count the number
2050 // of frames.
2051 size_t remaining = (frameCount_ != UINT32_MAX) ? frameNo() - 1 : SIZE_MAX;
2052
2053 size_t i = 1;
2054 for (; i <= remaining && si_.moreFrames(); i++) {
2055 MOZ_ASSERT(IsIonInlinableOp(JSOp(*pc_)));
2056
2057 // Recover the number of actual arguments from the script.
2058 if (JSOp(*pc_) != JSOp::FunApply) {
2059 numActualArgs_ = GET_ARGC(pc_);
2060 }
2061 if (JSOp(*pc_) == JSOp::FunCall) {
2062 if (numActualArgs_ > 0) {
2063 numActualArgs_--;
2064 }
2065 } else if (IsGetPropPC(pc_) || IsGetElemPC(pc_)) {
2066 numActualArgs_ = 0;
2067 } else if (IsSetPropPC(pc_)) {
2068 numActualArgs_ = 1;
2069 }
2070
2071 if (numActualArgs_ == 0xbadbad) {
2072 MOZ_CRASH(
2073 "Couldn't deduce the number of arguments of an ionmonkey frame");
2074 }
2075
2076 // Skip over non-argument slots, as well as |this|.
2077 bool skipNewTarget = IsConstructPC(pc_);
2078 unsigned skipCount =
2079 (si_.numAllocations() - 1) - numActualArgs_ - 1 - skipNewTarget;
2080 for (unsigned j = 0; j < skipCount; j++) {
2081 si_.skip();
2082 }
2083
2084 // This value should correspond to the function which is being inlined.
2085 // The value must be readable to iterate over the inline frame. Most of
2086 // the time, these functions are stored as JSFunction constants,
2087 // register which are holding the JSFunction pointer, or recover
2088 // instruction with Default value.
2089 Value funval = si_.readWithDefault(&calleeRVA_);
2090
2091 // Skip extra value allocations.
2092 while (si_.moreAllocations()) {
2093 si_.skip();
2094 }
2095
2096 si_.nextFrame();
2097
2098 calleeTemplate_ = &funval.toObject().as<JSFunction>();
2099 script_ = calleeTemplate_->nonLazyScript();
2100 MOZ_ASSERT(script_->hasBaselineScript());
2101
2102 pc_ = script_->offsetToPC(si_.pcOffset());
2103 }
2104
2105 // The first time we do not know the number of frames, we only settle on the
2106 // last frame, and update the number of frames based on the number of
2107 // iteration that we have done.
2108 if (frameCount_ == UINT32_MAX) {
2109 MOZ_ASSERT(!si_.moreFrames());
2110 frameCount_ = i;
2111 }
2112
2113 framesRead_++;
2114 }
2115
callee(MaybeReadFallback & fallback) const2116 JSFunction* InlineFrameIterator::callee(MaybeReadFallback& fallback) const {
2117 MOZ_ASSERT(isFunctionFrame());
2118 if (calleeRVA_.mode() == RValueAllocation::INVALID ||
2119 !fallback.canRecoverResults()) {
2120 return calleeTemplate_;
2121 }
2122
2123 SnapshotIterator s(si_);
2124 // :TODO: Handle allocation failures from recover instruction.
2125 Value funval = s.maybeRead(calleeRVA_, fallback);
2126 return &funval.toObject().as<JSFunction>();
2127 }
2128
computeEnvironmentChain(const Value & envChainValue,MaybeReadFallback & fallback,bool * hasInitialEnv) const2129 JSObject* InlineFrameIterator::computeEnvironmentChain(
2130 const Value& envChainValue, MaybeReadFallback& fallback,
2131 bool* hasInitialEnv) const {
2132 if (envChainValue.isObject()) {
2133 if (hasInitialEnv) {
2134 if (fallback.canRecoverResults()) {
2135 RootedObject obj(fallback.maybeCx, &envChainValue.toObject());
2136 *hasInitialEnv = isFunctionFrame() &&
2137 callee(fallback)->needsFunctionEnvironmentObjects();
2138 return obj;
2139 }
2140 JS::AutoSuppressGCAnalysis
2141 nogc; // If we cannot recover then we cannot GC.
2142 *hasInitialEnv = isFunctionFrame() &&
2143 callee(fallback)->needsFunctionEnvironmentObjects();
2144 }
2145
2146 return &envChainValue.toObject();
2147 }
2148
2149 // Note we can hit this case even for functions with a CallObject, in case
2150 // we are walking the frame during the function prologue, before the env
2151 // chain has been initialized.
2152 if (isFunctionFrame()) {
2153 return callee(fallback)->environment();
2154 }
2155
2156 if (isModuleFrame()) {
2157 return script()->module()->environment();
2158 }
2159
2160 // Ion does not handle non-function scripts that have anything other than
2161 // the global on their env chain.
2162 MOZ_ASSERT(!script()->isForEval());
2163 MOZ_ASSERT(!script()->hasNonSyntacticScope());
2164 return &script()->global().lexicalEnvironment();
2165 }
2166
isFunctionFrame() const2167 bool InlineFrameIterator::isFunctionFrame() const { return !!calleeTemplate_; }
2168
isModuleFrame() const2169 bool InlineFrameIterator::isModuleFrame() const { return script()->module(); }
2170
FromBailout(RegisterDump::GPRArray & regs,RegisterDump::FPUArray & fpregs)2171 MachineState MachineState::FromBailout(RegisterDump::GPRArray& regs,
2172 RegisterDump::FPUArray& fpregs) {
2173 MachineState machine;
2174
2175 for (unsigned i = 0; i < Registers::Total; i++) {
2176 machine.setRegisterLocation(Register::FromCode(i), ®s[i].r);
2177 }
2178 #ifdef JS_CODEGEN_ARM
2179 float* fbase = (float*)&fpregs[0];
2180 for (unsigned i = 0; i < FloatRegisters::TotalDouble; i++) {
2181 machine.setRegisterLocation(FloatRegister(i, FloatRegister::Double),
2182 &fpregs[i].d);
2183 }
2184 for (unsigned i = 0; i < FloatRegisters::TotalSingle; i++) {
2185 machine.setRegisterLocation(FloatRegister(i, FloatRegister::Single),
2186 (double*)&fbase[i]);
2187 # ifdef ENABLE_WASM_SIMD
2188 # error "More care needed here"
2189 # endif
2190 }
2191 #elif defined(JS_CODEGEN_MIPS32)
2192 for (unsigned i = 0; i < FloatRegisters::TotalPhys; i++) {
2193 machine.setRegisterLocation(
2194 FloatRegister::FromIndex(i, FloatRegister::Double), &fpregs[i]);
2195 machine.setRegisterLocation(
2196 FloatRegister::FromIndex(i, FloatRegister::Single), &fpregs[i]);
2197 # ifdef ENABLE_WASM_SIMD
2198 # error "More care needed here"
2199 # endif
2200 }
2201 #elif defined(JS_CODEGEN_MIPS64)
2202 for (unsigned i = 0; i < FloatRegisters::TotalPhys; i++) {
2203 machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Double),
2204 &fpregs[i]);
2205 machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Single),
2206 &fpregs[i]);
2207 # ifdef ENABLE_WASM_SIMD
2208 # error "More care needed here"
2209 # endif
2210 }
2211 #elif defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
2212 for (unsigned i = 0; i < FloatRegisters::TotalPhys; i++) {
2213 machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Single),
2214 &fpregs[i]);
2215 machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Double),
2216 &fpregs[i]);
2217 machine.setRegisterLocation(FloatRegister(i, FloatRegisters::Simd128),
2218 &fpregs[i]);
2219 }
2220 #elif defined(JS_CODEGEN_ARM64)
2221 for (unsigned i = 0; i < FloatRegisters::TotalPhys; i++) {
2222 machine.setRegisterLocation(
2223 FloatRegister(FloatRegisters::Encoding(i), FloatRegisters::Single),
2224 &fpregs[i]);
2225 machine.setRegisterLocation(
2226 FloatRegister(FloatRegisters::Encoding(i), FloatRegisters::Double),
2227 &fpregs[i]);
2228 // No SIMD support in bailouts, SIMD is internal to wasm
2229 }
2230
2231 #elif defined(JS_CODEGEN_NONE)
2232 MOZ_CRASH();
2233 #else
2234 # error "Unknown architecture!"
2235 #endif
2236 return machine;
2237 }
2238
isConstructing() const2239 bool InlineFrameIterator::isConstructing() const {
2240 // Skip the current frame and look at the caller's.
2241 if (more()) {
2242 InlineFrameIterator parent(TlsContext.get(), this);
2243 ++parent;
2244
2245 // Inlined Getters and Setters are never constructing.
2246 JSOp parentOp = JSOp(*parent.pc());
2247 if (IsIonInlinableGetterOrSetterOp(parentOp)) {
2248 return false;
2249 }
2250
2251 // In the case of a JS frame, look up the pc from the snapshot.
2252 MOZ_ASSERT(IsInvokeOp(parentOp) && !IsSpreadOp(parentOp));
2253
2254 return IsConstructOp(parentOp);
2255 }
2256
2257 return frame_->isConstructing();
2258 }
2259
warnUnreadableAllocation()2260 void SnapshotIterator::warnUnreadableAllocation() {
2261 fprintf(stderr,
2262 "Warning! Tried to access unreadable value allocation (possible "
2263 "f.arguments).\n");
2264 }
2265
2266 struct DumpOp {
DumpOpjs::jit::DumpOp2267 explicit DumpOp(unsigned int i) : i_(i) {}
2268
2269 unsigned int i_;
operator ()js::jit::DumpOp2270 void operator()(const Value& v) {
2271 fprintf(stderr, " actual (arg %u): ", i_);
2272 #if defined(DEBUG) || defined(JS_JITSPEW)
2273 DumpValue(v);
2274 #else
2275 fprintf(stderr, "?\n");
2276 #endif
2277 i_++;
2278 }
2279 };
2280
dump() const2281 void InlineFrameIterator::dump() const {
2282 MaybeReadFallback fallback(UndefinedValue());
2283
2284 if (more()) {
2285 fprintf(stderr, " JS frame (inlined)\n");
2286 } else {
2287 fprintf(stderr, " JS frame\n");
2288 }
2289
2290 bool isFunction = false;
2291 if (isFunctionFrame()) {
2292 isFunction = true;
2293 fprintf(stderr, " callee fun: ");
2294 #if defined(DEBUG) || defined(JS_JITSPEW)
2295 DumpObject(callee(fallback));
2296 #else
2297 fprintf(stderr, "?\n");
2298 #endif
2299 } else {
2300 fprintf(stderr, " global frame, no callee\n");
2301 }
2302
2303 fprintf(stderr, " file %s line %u\n", script()->filename(),
2304 script()->lineno());
2305
2306 fprintf(stderr, " script = %p, pc = %p\n", (void*)script(), pc());
2307 fprintf(stderr, " current op: %s\n", CodeName(JSOp(*pc())));
2308
2309 if (!more()) {
2310 numActualArgs();
2311 }
2312
2313 SnapshotIterator si = snapshotIterator();
2314 fprintf(stderr, " slots: %u\n", si.numAllocations() - 1);
2315 for (unsigned i = 0; i < si.numAllocations() - 1; i++) {
2316 if (isFunction) {
2317 if (i == 0) {
2318 fprintf(stderr, " env chain: ");
2319 } else if (i == 1) {
2320 fprintf(stderr, " this: ");
2321 } else if (i - 2 < calleeTemplate()->nargs()) {
2322 fprintf(stderr, " formal (arg %u): ", i - 2);
2323 } else {
2324 if (i - 2 == calleeTemplate()->nargs() &&
2325 numActualArgs() > calleeTemplate()->nargs()) {
2326 DumpOp d(calleeTemplate()->nargs());
2327 unaliasedForEachActual(TlsContext.get(), d, ReadFrame_Overflown,
2328 fallback);
2329 }
2330
2331 fprintf(stderr, " slot %d: ", int(i - 2 - calleeTemplate()->nargs()));
2332 }
2333 } else
2334 fprintf(stderr, " slot %u: ", i);
2335 #if defined(DEBUG) || defined(JS_JITSPEW)
2336 DumpValue(si.maybeRead(fallback));
2337 #else
2338 fprintf(stderr, "?\n");
2339 #endif
2340 }
2341
2342 fputc('\n', stderr);
2343 }
2344
fp() const2345 JitFrameLayout* InvalidationBailoutStack::fp() const {
2346 return (JitFrameLayout*)(sp() + ionScript_->frameSize());
2347 }
2348
checkInvariants() const2349 void InvalidationBailoutStack::checkInvariants() const {
2350 #ifdef DEBUG
2351 JitFrameLayout* frame = fp();
2352 CalleeToken token = frame->calleeToken();
2353 MOZ_ASSERT(token);
2354
2355 uint8_t* rawBase = ionScript()->method()->raw();
2356 uint8_t* rawLimit = rawBase + ionScript()->method()->instructionsSize();
2357 uint8_t* osiPoint = osiPointReturnAddress();
2358 MOZ_ASSERT(rawBase <= osiPoint && osiPoint <= rawLimit);
2359 #endif
2360 }
2361
AssertJitStackInvariants(JSContext * cx)2362 void AssertJitStackInvariants(JSContext* cx) {
2363 for (JitActivationIterator activations(cx); !activations.done();
2364 ++activations) {
2365 JitFrameIter iter(activations->asJit());
2366 if (iter.isJSJit()) {
2367 JSJitFrameIter& frames = iter.asJSJit();
2368 size_t prevFrameSize = 0;
2369 size_t frameSize = 0;
2370 bool isScriptedCallee = false;
2371 for (; !frames.done(); ++frames) {
2372 size_t calleeFp = reinterpret_cast<size_t>(frames.fp());
2373 size_t callerFp = reinterpret_cast<size_t>(frames.prevFp());
2374 MOZ_ASSERT(callerFp >= calleeFp);
2375 prevFrameSize = frameSize;
2376 frameSize = callerFp - calleeFp;
2377
2378 if (frames.isScripted() && frames.prevType() == FrameType::Rectifier) {
2379 MOZ_RELEASE_ASSERT(frameSize % JitStackAlignment == 0,
2380 "The rectifier frame should keep the alignment");
2381
2382 size_t expectedFrameSize =
2383 0
2384 #if defined(JS_CODEGEN_X86)
2385 + sizeof(void*) /* frame pointer */
2386 #endif
2387 + sizeof(Value) *
2388 (frames.callee()->nargs() + 1 /* |this| argument */ +
2389 frames.isConstructing() /* new.target */) +
2390 sizeof(JitFrameLayout);
2391 MOZ_RELEASE_ASSERT(frameSize >= expectedFrameSize,
2392 "The frame is large enough to hold all arguments");
2393 MOZ_RELEASE_ASSERT(expectedFrameSize + JitStackAlignment > frameSize,
2394 "The frame size is optimal");
2395 }
2396
2397 if (frames.isExitFrame()) {
2398 // For the moment, we do not keep the JitStackAlignment
2399 // alignment for exit frames.
2400 frameSize -= ExitFrameLayout::Size();
2401 }
2402
2403 if (frames.isIonJS()) {
2404 // Ideally, we should not have such requirement, but keep the
2405 // alignment-delta as part of the Safepoint such that we can pad
2406 // accordingly when making out-of-line calls. In the mean time,
2407 // let us have check-points where we can garantee that
2408 // everything can properly be aligned before adding complexity.
2409 MOZ_RELEASE_ASSERT(
2410 frames.ionScript()->frameSize() % JitStackAlignment == 0,
2411 "Ensure that if the Ion frame is aligned, then the spill base is "
2412 "also aligned");
2413
2414 if (isScriptedCallee) {
2415 MOZ_RELEASE_ASSERT(prevFrameSize % JitStackAlignment == 0,
2416 "The ion frame should keep the alignment");
2417 }
2418 }
2419
2420 // The stack is dynamically aligned by baseline stubs before calling
2421 // any jitted code.
2422 if (frames.prevType() == FrameType::BaselineStub && isScriptedCallee) {
2423 MOZ_RELEASE_ASSERT(calleeFp % JitStackAlignment == 0,
2424 "The baseline stub restores the stack alignment");
2425 }
2426
2427 isScriptedCallee =
2428 frames.isScripted() || frames.type() == FrameType::Rectifier;
2429 }
2430
2431 MOZ_RELEASE_ASSERT(
2432 JSJitFrameIter::isEntry(frames.type()),
2433 "The first frame of a Jit activation should be an entry frame");
2434 MOZ_RELEASE_ASSERT(
2435 reinterpret_cast<size_t>(frames.fp()) % JitStackAlignment == 0,
2436 "The entry frame should be properly aligned");
2437 } else {
2438 MOZ_ASSERT(iter.isWasm());
2439 wasm::WasmFrameIter& frames = iter.asWasm();
2440 while (!frames.done()) {
2441 ++frames;
2442 }
2443 }
2444 }
2445 }
2446
2447 } // namespace jit
2448 } // namespace js
2449