1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2  * vim: set ts=8 sts=4 et sw=4 tw=99:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "jit/BaselineJIT.h"
8 
9 #include "mozilla/BinarySearch.h"
10 #include "mozilla/DebugOnly.h"
11 #include "mozilla/MemoryReporting.h"
12 
13 #include "jit/BaselineCompiler.h"
14 #include "jit/BaselineIC.h"
15 #include "jit/CompileInfo.h"
16 #include "jit/JitCommon.h"
17 #include "jit/JitSpewer.h"
18 #include "vm/Debugger.h"
19 #include "vm/Interpreter.h"
20 #include "vm/TraceLogging.h"
21 #include "wasm/WasmInstance.h"
22 
23 #include "jsobjinlines.h"
24 #include "jsopcodeinlines.h"
25 #include "jsscriptinlines.h"
26 
27 #include "jit/JitFrames-inl.h"
28 #include "jit/MacroAssembler-inl.h"
29 #include "vm/Stack-inl.h"
30 
31 using mozilla::BinarySearchIf;
32 using mozilla::DebugOnly;
33 
34 using namespace js;
35 using namespace js::jit;
36 
37 /* static */ PCMappingSlotInfo::SlotLocation
ToSlotLocation(const StackValue * stackVal)38 PCMappingSlotInfo::ToSlotLocation(const StackValue* stackVal)
39 {
40     if (stackVal->kind() == StackValue::Register) {
41         if (stackVal->reg() == R0)
42             return SlotInR0;
43         MOZ_ASSERT(stackVal->reg() == R1);
44         return SlotInR1;
45     }
46     MOZ_ASSERT(stackVal->kind() != StackValue::Stack);
47     return SlotIgnore;
48 }
49 
50 void
freeAllAfterMinorGC(JSRuntime * rt)51 ICStubSpace::freeAllAfterMinorGC(JSRuntime* rt)
52 {
53     rt->gc.freeAllLifoBlocksAfterMinorGC(&allocator_);
54 }
55 
BaselineScript(uint32_t prologueOffset,uint32_t epilogueOffset,uint32_t profilerEnterToggleOffset,uint32_t profilerExitToggleOffset,uint32_t postDebugPrologueOffset)56 BaselineScript::BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset,
57                                uint32_t profilerEnterToggleOffset,
58                                uint32_t profilerExitToggleOffset,
59                                uint32_t postDebugPrologueOffset)
60   : method_(nullptr),
61     templateEnv_(nullptr),
62     fallbackStubSpace_(),
63     dependentWasmImports_(nullptr),
64     prologueOffset_(prologueOffset),
65     epilogueOffset_(epilogueOffset),
66     profilerEnterToggleOffset_(profilerEnterToggleOffset),
67     profilerExitToggleOffset_(profilerExitToggleOffset),
68 #ifdef JS_TRACE_LOGGING
69 # ifdef DEBUG
70     traceLoggerScriptsEnabled_(false),
71     traceLoggerEngineEnabled_(false),
72 # endif
73     traceLoggerScriptEvent_(),
74 #endif
75     postDebugPrologueOffset_(postDebugPrologueOffset),
76     flags_(0),
77     inlinedBytecodeLength_(0),
78     maxInliningDepth_(UINT8_MAX),
79     pendingBuilder_(nullptr)
80 { }
81 
82 static const unsigned BASELINE_MAX_ARGS_LENGTH = 20000;
83 
84 static bool
CheckFrame(InterpreterFrame * fp)85 CheckFrame(InterpreterFrame* fp)
86 {
87     if (fp->isDebuggerEvalFrame()) {
88         // Debugger eval-in-frame. These are likely short-running scripts so
89         // don't bother compiling them for now.
90         JitSpew(JitSpew_BaselineAbort, "debugger frame");
91         return false;
92     }
93 
94     if (fp->isFunctionFrame() && fp->numActualArgs() > BASELINE_MAX_ARGS_LENGTH) {
95         // Fall back to the interpreter to avoid running out of stack space.
96         JitSpew(JitSpew_BaselineAbort, "Too many arguments (%u)", fp->numActualArgs());
97         return false;
98     }
99 
100     return true;
101 }
102 
103 static JitExecStatus
EnterBaseline(JSContext * cx,EnterJitData & data)104 EnterBaseline(JSContext* cx, EnterJitData& data)
105 {
106     if (data.osrFrame) {
107         // Check for potential stack overflow before OSR-ing.
108         uint8_t spDummy;
109         uint32_t extra = BaselineFrame::Size() + (data.osrNumStackValues * sizeof(Value));
110         uint8_t* checkSp = (&spDummy) - extra;
111         JS_CHECK_RECURSION_WITH_SP(cx, checkSp, return JitExec_Aborted);
112     } else {
113         JS_CHECK_RECURSION(cx, return JitExec_Aborted);
114     }
115 
116 #ifdef DEBUG
117     // Assert we don't GC before entering JIT code. A GC could discard JIT code
118     // or move the function stored in the CalleeToken (it won't be traced at
119     // this point). We use Maybe<> here so we can call reset() to call the
120     // AutoAssertNoGC destructor before we enter JIT code.
121     mozilla::Maybe<JS::AutoAssertNoGC> nogc;
122     nogc.emplace(cx);
123 #endif
124 
125     MOZ_ASSERT(jit::IsBaselineEnabled(cx));
126     MOZ_ASSERT_IF(data.osrFrame, CheckFrame(data.osrFrame));
127 
128     EnterJitCode enter = cx->runtime()->jitRuntime()->enterBaseline();
129 
130     bool constructingLegacyGen =
131         data.constructing && CalleeTokenToFunction(data.calleeToken)->isLegacyGenerator();
132 
133     // Caller must construct |this| before invoking the Ion function. Legacy
134     // generators can be called with 'new' but when we resume them, the
135     // this-slot and arguments are |undefined| (they are stored in the
136     // CallObject).
137     MOZ_ASSERT_IF(data.constructing && !constructingLegacyGen,
138                   data.maxArgv[0].isObject() || data.maxArgv[0].isMagic(JS_UNINITIALIZED_LEXICAL));
139 
140     data.result.setInt32(data.numActualArgs);
141     {
142         AssertCompartmentUnchanged pcc(cx);
143         ActivationEntryMonitor entryMonitor(cx, data.calleeToken);
144         JitActivation activation(cx);
145 
146         if (data.osrFrame)
147             data.osrFrame->setRunningInJit();
148 
149 #ifdef DEBUG
150         nogc.reset();
151 #endif
152         // Single transition point from Interpreter to Baseline.
153         CALL_GENERATED_CODE(enter, data.jitcode, data.maxArgc, data.maxArgv, data.osrFrame,
154                             data.calleeToken, data.envChain.get(), data.osrNumStackValues,
155                             data.result.address());
156 
157         if (data.osrFrame)
158             data.osrFrame->clearRunningInJit();
159     }
160 
161     MOZ_ASSERT(!cx->runtime()->jitRuntime()->hasIonReturnOverride());
162 
163     // Jit callers wrap primitive constructor return, except for derived
164     // class constructors, which are forced to do it themselves.
165     if (!data.result.isMagic() &&
166         data.constructing &&
167         data.result.isPrimitive() &&
168         !constructingLegacyGen)
169     {
170         MOZ_ASSERT(data.maxArgv[0].isObject());
171         data.result = data.maxArgv[0];
172     }
173 
174     // Release temporary buffer used for OSR into Ion.
175     cx->runtime()->getJitRuntime(cx)->freeOsrTempData();
176 
177     MOZ_ASSERT_IF(data.result.isMagic(), data.result.isMagic(JS_ION_ERROR));
178     return data.result.isMagic() ? JitExec_Error : JitExec_Ok;
179 }
180 
181 JitExecStatus
EnterBaselineMethod(JSContext * cx,RunState & state)182 jit::EnterBaselineMethod(JSContext* cx, RunState& state)
183 {
184     BaselineScript* baseline = state.script()->baselineScript();
185 
186     EnterJitData data(cx);
187     data.jitcode = baseline->method()->raw();
188 
189     Rooted<GCVector<Value>> vals(cx, GCVector<Value>(cx));
190     if (!SetEnterJitData(cx, data, state, &vals))
191         return JitExec_Error;
192 
193     JitExecStatus status = EnterBaseline(cx, data);
194     if (status != JitExec_Ok)
195         return status;
196 
197     state.setReturnValue(data.result);
198     return JitExec_Ok;
199 }
200 
201 JitExecStatus
EnterBaselineAtBranch(JSContext * cx,InterpreterFrame * fp,jsbytecode * pc)202 jit::EnterBaselineAtBranch(JSContext* cx, InterpreterFrame* fp, jsbytecode* pc)
203 {
204     MOZ_ASSERT(JSOp(*pc) == JSOP_LOOPENTRY);
205 
206     BaselineScript* baseline = fp->script()->baselineScript();
207 
208     EnterJitData data(cx);
209     data.jitcode = baseline->nativeCodeForPC(fp->script(), pc);
210 
211     // Skip debug breakpoint/trap handler, the interpreter already handled it
212     // for the current op.
213     if (fp->isDebuggee()) {
214         MOZ_RELEASE_ASSERT(baseline->hasDebugInstrumentation());
215         data.jitcode += MacroAssembler::ToggledCallSize(data.jitcode);
216     }
217 
218     data.osrFrame = fp;
219     data.osrNumStackValues = fp->script()->nfixed() + cx->interpreterRegs().stackDepth();
220 
221     AutoValueVector vals(cx);
222     RootedValue thisv(cx);
223 
224     if (fp->isFunctionFrame()) {
225         data.constructing = fp->isConstructing();
226         data.numActualArgs = fp->numActualArgs();
227         data.maxArgc = Max(fp->numActualArgs(), fp->numFormalArgs()) + 1; // +1 = include |this|
228         data.maxArgv = fp->argv() - 1; // -1 = include |this|
229         data.envChain = nullptr;
230         data.calleeToken = CalleeToToken(&fp->callee(), data.constructing);
231     } else {
232         thisv.setUndefined();
233         data.constructing = false;
234         data.numActualArgs = 0;
235         data.maxArgc = 1;
236         data.maxArgv = thisv.address();
237         data.envChain = fp->environmentChain();
238 
239         data.calleeToken = CalleeToToken(fp->script());
240 
241         if (fp->isEvalFrame()) {
242             if (!vals.reserve(2))
243                 return JitExec_Aborted;
244 
245             vals.infallibleAppend(thisv);
246 
247             if (fp->script()->isDirectEvalInFunction())
248                 vals.infallibleAppend(fp->newTarget());
249             else
250                 vals.infallibleAppend(NullValue());
251 
252             data.maxArgc = 2;
253             data.maxArgv = vals.begin();
254         }
255     }
256 
257     TraceLoggerThread* logger = TraceLoggerForMainThread(cx->runtime());
258     TraceLogStopEvent(logger, TraceLogger_Interpreter);
259     TraceLogStartEvent(logger, TraceLogger_Baseline);
260 
261     JitExecStatus status = EnterBaseline(cx, data);
262     if (status != JitExec_Ok)
263         return status;
264 
265     fp->setReturnValue(data.result);
266     return JitExec_Ok;
267 }
268 
269 MethodStatus
BaselineCompile(JSContext * cx,JSScript * script,bool forceDebugInstrumentation)270 jit::BaselineCompile(JSContext* cx, JSScript* script, bool forceDebugInstrumentation)
271 {
272     MOZ_ASSERT(!script->hasBaselineScript());
273     MOZ_ASSERT(script->canBaselineCompile());
274     MOZ_ASSERT(IsBaselineEnabled(cx));
275 
276     script->ensureNonLazyCanonicalFunction(cx);
277 
278     LifoAlloc alloc(TempAllocator::PreferredLifoChunkSize);
279     TempAllocator* temp = alloc.new_<TempAllocator>(&alloc);
280     if (!temp) {
281         ReportOutOfMemory(cx);
282         return Method_Error;
283     }
284 
285     JitContext jctx(cx, temp);
286 
287     BaselineCompiler compiler(cx, *temp, script);
288     if (!compiler.init()) {
289         ReportOutOfMemory(cx);
290         return Method_Error;
291     }
292 
293     if (forceDebugInstrumentation)
294         compiler.setCompileDebugInstrumentation();
295 
296     MethodStatus status = compiler.compile();
297 
298     MOZ_ASSERT_IF(status == Method_Compiled, script->hasBaselineScript());
299     MOZ_ASSERT_IF(status != Method_Compiled, !script->hasBaselineScript());
300 
301     if (status == Method_CantCompile)
302         script->setBaselineScript(cx->runtime(), BASELINE_DISABLED_SCRIPT);
303 
304     return status;
305 }
306 
307 static MethodStatus
CanEnterBaselineJIT(JSContext * cx,HandleScript script,InterpreterFrame * osrFrame)308 CanEnterBaselineJIT(JSContext* cx, HandleScript script, InterpreterFrame* osrFrame)
309 {
310     MOZ_ASSERT(jit::IsBaselineEnabled(cx));
311 
312     // Skip if the script has been disabled.
313     if (!script->canBaselineCompile())
314         return Method_Skipped;
315 
316     if (script->length() > BaselineScript::MAX_JSSCRIPT_LENGTH)
317         return Method_CantCompile;
318 
319     if (script->nslots() > BaselineScript::MAX_JSSCRIPT_SLOTS)
320         return Method_CantCompile;
321 
322     if (script->hasBaselineScript())
323         return Method_Compiled;
324 
325     // Check this before calling ensureJitCompartmentExists, so we're less
326     // likely to report OOM in JSRuntime::createJitRuntime.
327     if (!CanLikelyAllocateMoreExecutableMemory())
328         return Method_Skipped;
329 
330     if (!cx->compartment()->ensureJitCompartmentExists(cx))
331         return Method_Error;
332 
333     // Check script warm-up counter.
334     if (script->incWarmUpCounter() <= JitOptions.baselineWarmUpThreshold)
335         return Method_Skipped;
336 
337     // Frames can be marked as debuggee frames independently of its underlying
338     // script being a debuggee script, e.g., when performing
339     // Debugger.Frame.prototype.eval.
340     return BaselineCompile(cx, script, osrFrame && osrFrame->isDebuggee());
341 }
342 
343 MethodStatus
CanEnterBaselineAtBranch(JSContext * cx,InterpreterFrame * fp,bool newType)344 jit::CanEnterBaselineAtBranch(JSContext* cx, InterpreterFrame* fp, bool newType)
345 {
346    if (!CheckFrame(fp))
347        return Method_CantCompile;
348 
349    // This check is needed in the following corner case. Consider a function h,
350    //
351    //   function h(x) {
352    //      h(false);
353    //      if (!x)
354    //        return;
355    //      for (var i = 0; i < N; i++)
356    //         /* do stuff */
357    //   }
358    //
359    // Suppose h is not yet compiled in baseline and is executing in the
360    // interpreter. Let this interpreter frame be f_older. The debugger marks
361    // f_older as isDebuggee. At the point of the recursive call h(false), h is
362    // compiled in baseline without debug instrumentation, pushing a baseline
363    // frame f_newer. The debugger never flags f_newer as isDebuggee, and never
364    // recompiles h. When the recursive call returns and execution proceeds to
365    // the loop, the interpreter attempts to OSR into baseline. Since h is
366    // already compiled in baseline, execution jumps directly into baseline
367    // code. This is incorrect as h's baseline script does not have debug
368    // instrumentation.
369    if (fp->isDebuggee() && !Debugger::ensureExecutionObservabilityOfOsrFrame(cx, fp))
370        return Method_Error;
371 
372    RootedScript script(cx, fp->script());
373    return CanEnterBaselineJIT(cx, script, fp);
374 }
375 
376 MethodStatus
CanEnterBaselineMethod(JSContext * cx,RunState & state)377 jit::CanEnterBaselineMethod(JSContext* cx, RunState& state)
378 {
379     if (state.isInvoke()) {
380         InvokeState& invoke = *state.asInvoke();
381 
382         if (invoke.args().length() > BASELINE_MAX_ARGS_LENGTH) {
383             JitSpew(JitSpew_BaselineAbort, "Too many arguments (%u)", invoke.args().length());
384             return Method_CantCompile;
385         }
386 
387         if (!state.maybeCreateThisForConstructor(cx)) {
388             if (cx->isThrowingOutOfMemory()) {
389                 cx->recoverFromOutOfMemory();
390                 return Method_Skipped;
391             }
392             return Method_Error;
393         }
394     } else {
395         if (state.asExecute()->isDebuggerEval()) {
396             JitSpew(JitSpew_BaselineAbort, "debugger frame");
397             return Method_CantCompile;
398         }
399     }
400 
401     RootedScript script(cx, state.script());
402     return CanEnterBaselineJIT(cx, script, /* osrFrame = */ nullptr);
403 };
404 
405 BaselineScript*
New(JSScript * jsscript,uint32_t prologueOffset,uint32_t epilogueOffset,uint32_t profilerEnterToggleOffset,uint32_t profilerExitToggleOffset,uint32_t postDebugPrologueOffset,size_t icEntries,size_t pcMappingIndexEntries,size_t pcMappingSize,size_t bytecodeTypeMapEntries,size_t yieldEntries,size_t traceLoggerToggleOffsetEntries)406 BaselineScript::New(JSScript* jsscript,
407                     uint32_t prologueOffset, uint32_t epilogueOffset,
408                     uint32_t profilerEnterToggleOffset,
409                     uint32_t profilerExitToggleOffset,
410                     uint32_t postDebugPrologueOffset,
411                     size_t icEntries,
412                     size_t pcMappingIndexEntries, size_t pcMappingSize,
413                     size_t bytecodeTypeMapEntries,
414                     size_t yieldEntries,
415                     size_t traceLoggerToggleOffsetEntries)
416 {
417     static const unsigned DataAlignment = sizeof(uintptr_t);
418 
419     size_t icEntriesSize = icEntries * sizeof(BaselineICEntry);
420     size_t pcMappingIndexEntriesSize = pcMappingIndexEntries * sizeof(PCMappingIndexEntry);
421     size_t bytecodeTypeMapSize = bytecodeTypeMapEntries * sizeof(uint32_t);
422     size_t yieldEntriesSize = yieldEntries * sizeof(uintptr_t);
423     size_t tlEntriesSize = traceLoggerToggleOffsetEntries * sizeof(uint32_t);
424 
425     size_t paddedICEntriesSize = AlignBytes(icEntriesSize, DataAlignment);
426     size_t paddedPCMappingIndexEntriesSize = AlignBytes(pcMappingIndexEntriesSize, DataAlignment);
427     size_t paddedPCMappingSize = AlignBytes(pcMappingSize, DataAlignment);
428     size_t paddedBytecodeTypesMapSize = AlignBytes(bytecodeTypeMapSize, DataAlignment);
429     size_t paddedYieldEntriesSize = AlignBytes(yieldEntriesSize, DataAlignment);
430     size_t paddedTLEntriesSize = AlignBytes(tlEntriesSize, DataAlignment);
431 
432     size_t allocBytes = paddedICEntriesSize +
433                         paddedPCMappingIndexEntriesSize +
434                         paddedPCMappingSize +
435                         paddedBytecodeTypesMapSize +
436                         paddedYieldEntriesSize +
437                         paddedTLEntriesSize;
438 
439     BaselineScript* script = jsscript->zone()->pod_malloc_with_extra<BaselineScript, uint8_t>(allocBytes);
440     if (!script)
441         return nullptr;
442     new (script) BaselineScript(prologueOffset, epilogueOffset,
443                                 profilerEnterToggleOffset, profilerExitToggleOffset,
444                                 postDebugPrologueOffset);
445 
446     size_t offsetCursor = sizeof(BaselineScript);
447     MOZ_ASSERT(offsetCursor == AlignBytes(sizeof(BaselineScript), DataAlignment));
448 
449     script->icEntriesOffset_ = offsetCursor;
450     script->icEntries_ = icEntries;
451     offsetCursor += paddedICEntriesSize;
452 
453     script->pcMappingIndexOffset_ = offsetCursor;
454     script->pcMappingIndexEntries_ = pcMappingIndexEntries;
455     offsetCursor += paddedPCMappingIndexEntriesSize;
456 
457     script->pcMappingOffset_ = offsetCursor;
458     script->pcMappingSize_ = pcMappingSize;
459     offsetCursor += paddedPCMappingSize;
460 
461     script->bytecodeTypeMapOffset_ = bytecodeTypeMapEntries ? offsetCursor : 0;
462     offsetCursor += paddedBytecodeTypesMapSize;
463 
464     script->yieldEntriesOffset_ = yieldEntries ? offsetCursor : 0;
465     offsetCursor += paddedYieldEntriesSize;
466 
467     script->traceLoggerToggleOffsetsOffset_ = tlEntriesSize ? offsetCursor : 0;
468     script->numTraceLoggerToggleOffsets_ = traceLoggerToggleOffsetEntries;
469     offsetCursor += paddedTLEntriesSize;
470 
471     MOZ_ASSERT(offsetCursor == sizeof(BaselineScript) + allocBytes);
472     return script;
473 }
474 
475 void
trace(JSTracer * trc)476 BaselineScript::trace(JSTracer* trc)
477 {
478     TraceEdge(trc, &method_, "baseline-method");
479     TraceNullableEdge(trc, &templateEnv_, "baseline-template-environment");
480 
481     // Mark all IC stub codes hanging off the IC stub entries.
482     for (size_t i = 0; i < numICEntries(); i++) {
483         BaselineICEntry& ent = icEntry(i);
484         ent.trace(trc);
485     }
486 }
487 
488 /* static */
489 void
writeBarrierPre(Zone * zone,BaselineScript * script)490 BaselineScript::writeBarrierPre(Zone* zone, BaselineScript* script)
491 {
492     if (zone->needsIncrementalBarrier())
493         script->trace(zone->barrierTracer());
494 }
495 
496 void
Trace(JSTracer * trc,BaselineScript * script)497 BaselineScript::Trace(JSTracer* trc, BaselineScript* script)
498 {
499     script->trace(trc);
500 }
501 
502 void
Destroy(FreeOp * fop,BaselineScript * script)503 BaselineScript::Destroy(FreeOp* fop, BaselineScript* script)
504 {
505 
506     MOZ_ASSERT(!script->hasPendingIonBuilder());
507 
508     script->unlinkDependentWasmImports(fop);
509 
510     /*
511      * When the script contains pointers to nursery things, the store buffer can
512      * contain entries that point into the fallback stub space. Since we can
513      * destroy scripts outside the context of a GC, this situation could result
514      * in us trying to mark invalid store buffer entries.
515      *
516      * Defer freeing any allocated blocks until after the next minor GC.
517      */
518     script->fallbackStubSpace_.freeAllAfterMinorGC(fop->runtime());
519 
520     fop->delete_(script);
521 }
522 
523 void
operator ()(const js::jit::BaselineScript * script)524 JS::DeletePolicy<js::jit::BaselineScript>::operator()(const js::jit::BaselineScript* script)
525 {
526     BaselineScript::Destroy(rt_->defaultFreeOp(), const_cast<BaselineScript*>(script));
527 }
528 
529 void
clearDependentWasmImports()530 BaselineScript::clearDependentWasmImports()
531 {
532     // Remove any links from wasm::Instances that contain optimized import calls into
533     // this BaselineScript.
534     if (dependentWasmImports_) {
535         for (DependentWasmImport& dep : *dependentWasmImports_)
536             dep.instance->deoptimizeImportExit(dep.importIndex);
537         dependentWasmImports_->clear();
538     }
539 }
540 
541 void
unlinkDependentWasmImports(FreeOp * fop)542 BaselineScript::unlinkDependentWasmImports(FreeOp* fop)
543 {
544     // Remove any links from wasm::Instances that contain optimized FFI calls into
545     // this BaselineScript.
546     clearDependentWasmImports();
547     if (dependentWasmImports_) {
548         fop->delete_(dependentWasmImports_);
549         dependentWasmImports_ = nullptr;
550     }
551 }
552 
553 bool
addDependentWasmImport(JSContext * cx,wasm::Instance & instance,uint32_t idx)554 BaselineScript::addDependentWasmImport(JSContext* cx, wasm::Instance& instance, uint32_t idx)
555 {
556     if (!dependentWasmImports_) {
557         dependentWasmImports_ = cx->new_<Vector<DependentWasmImport>>(cx);
558         if (!dependentWasmImports_)
559             return false;
560     }
561     return dependentWasmImports_->emplaceBack(instance, idx);
562 }
563 
564 void
removeDependentWasmImport(wasm::Instance & instance,uint32_t idx)565 BaselineScript::removeDependentWasmImport(wasm::Instance& instance, uint32_t idx)
566 {
567     if (!dependentWasmImports_)
568         return;
569 
570     for (DependentWasmImport& dep : *dependentWasmImports_) {
571         if (dep.instance == &instance && dep.importIndex == idx) {
572             dependentWasmImports_->erase(&dep);
573             break;
574         }
575     }
576 }
577 
578 BaselineICEntry&
icEntry(size_t index)579 BaselineScript::icEntry(size_t index)
580 {
581     MOZ_ASSERT(index < numICEntries());
582     return icEntryList()[index];
583 }
584 
585 PCMappingIndexEntry&
pcMappingIndexEntry(size_t index)586 BaselineScript::pcMappingIndexEntry(size_t index)
587 {
588     MOZ_ASSERT(index < numPCMappingIndexEntries());
589     return pcMappingIndexEntryList()[index];
590 }
591 
592 CompactBufferReader
pcMappingReader(size_t indexEntry)593 BaselineScript::pcMappingReader(size_t indexEntry)
594 {
595     PCMappingIndexEntry& entry = pcMappingIndexEntry(indexEntry);
596 
597     uint8_t* dataStart = pcMappingData() + entry.bufferOffset;
598     uint8_t* dataEnd = (indexEntry == numPCMappingIndexEntries() - 1)
599         ? pcMappingData() + pcMappingSize_
600         : pcMappingData() + pcMappingIndexEntry(indexEntry + 1).bufferOffset;
601 
602     return CompactBufferReader(dataStart, dataEnd);
603 }
604 
605 struct ICEntries
606 {
607     BaselineScript* const baseline_;
608 
ICEntriesICEntries609     explicit ICEntries(BaselineScript* baseline) : baseline_(baseline) {}
610 
operator []ICEntries611     BaselineICEntry& operator[](size_t index) const {
612         return baseline_->icEntry(index);
613     }
614 };
615 
616 BaselineICEntry&
icEntryFromReturnOffset(CodeOffset returnOffset)617 BaselineScript::icEntryFromReturnOffset(CodeOffset returnOffset)
618 {
619     size_t loc;
620 #ifdef DEBUG
621     bool found =
622 #endif
623         BinarySearchIf(ICEntries(this), 0, numICEntries(),
624                        [&returnOffset](BaselineICEntry& entry) {
625                            size_t roffset = returnOffset.offset();
626                            size_t entryRoffset = entry.returnOffset().offset();
627                            if (roffset < entryRoffset)
628                                return -1;
629                            if (entryRoffset < roffset)
630                                return 1;
631                            return 0;
632                        },
633                        &loc);
634 
635     MOZ_ASSERT(found);
636     MOZ_ASSERT(loc < numICEntries());
637     MOZ_ASSERT(icEntry(loc).returnOffset().offset() == returnOffset.offset());
638     return icEntry(loc);
639 }
640 
641 static inline size_t
ComputeBinarySearchMid(BaselineScript * baseline,uint32_t pcOffset)642 ComputeBinarySearchMid(BaselineScript* baseline, uint32_t pcOffset)
643 {
644     size_t loc;
645     BinarySearchIf(ICEntries(baseline), 0, baseline->numICEntries(),
646                    [pcOffset](BaselineICEntry& entry) {
647                        uint32_t entryOffset = entry.pcOffset();
648                        if (pcOffset < entryOffset)
649                            return -1;
650                        if (entryOffset < pcOffset)
651                            return 1;
652                        return 0;
653                    },
654                    &loc);
655     return loc;
656 }
657 
658 uint8_t*
returnAddressForIC(const BaselineICEntry & ent)659 BaselineScript::returnAddressForIC(const BaselineICEntry& ent)
660 {
661     return method()->raw() + ent.returnOffset().offset();
662 }
663 
664 BaselineICEntry&
icEntryFromPCOffset(uint32_t pcOffset)665 BaselineScript::icEntryFromPCOffset(uint32_t pcOffset)
666 {
667     // Multiple IC entries can have the same PC offset, but this method only looks for
668     // those which have isForOp() set.
669     size_t mid = ComputeBinarySearchMid(this, pcOffset);
670 
671     // Found an IC entry with a matching PC offset.  Search backward, and then
672     // forward from this IC entry, looking for one with the same PC offset which
673     // has isForOp() set.
674     for (size_t i = mid; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i--) {
675         if (icEntry(i).isForOp())
676             return icEntry(i);
677     }
678     for (size_t i = mid+1; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i++) {
679         if (icEntry(i).isForOp())
680             return icEntry(i);
681     }
682     MOZ_CRASH("Invalid PC offset for IC entry.");
683 }
684 
685 BaselineICEntry&
icEntryFromPCOffset(uint32_t pcOffset,BaselineICEntry * prevLookedUpEntry)686 BaselineScript::icEntryFromPCOffset(uint32_t pcOffset, BaselineICEntry* prevLookedUpEntry)
687 {
688     // Do a linear forward search from the last queried PC offset, or fallback to a
689     // binary search if the last offset is too far away.
690     if (prevLookedUpEntry && pcOffset >= prevLookedUpEntry->pcOffset() &&
691         (pcOffset - prevLookedUpEntry->pcOffset()) <= 10)
692     {
693         BaselineICEntry* firstEntry = &icEntry(0);
694         BaselineICEntry* lastEntry = &icEntry(numICEntries() - 1);
695         BaselineICEntry* curEntry = prevLookedUpEntry;
696         while (curEntry >= firstEntry && curEntry <= lastEntry) {
697             if (curEntry->pcOffset() == pcOffset && curEntry->isForOp())
698                 break;
699             curEntry++;
700         }
701         MOZ_ASSERT(curEntry->pcOffset() == pcOffset && curEntry->isForOp());
702         return *curEntry;
703     }
704 
705     return icEntryFromPCOffset(pcOffset);
706 }
707 
708 BaselineICEntry&
callVMEntryFromPCOffset(uint32_t pcOffset)709 BaselineScript::callVMEntryFromPCOffset(uint32_t pcOffset)
710 {
711     // Like icEntryFromPCOffset, but only looks for the fake ICEntries
712     // inserted by VM calls.
713     size_t mid = ComputeBinarySearchMid(this, pcOffset);
714 
715     for (size_t i = mid; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i--) {
716         if (icEntry(i).kind() == ICEntry::Kind_CallVM)
717             return icEntry(i);
718     }
719     for (size_t i = mid+1; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i++) {
720         if (icEntry(i).kind() == ICEntry::Kind_CallVM)
721             return icEntry(i);
722     }
723     MOZ_CRASH("Invalid PC offset for callVM entry.");
724 }
725 
726 BaselineICEntry&
stackCheckICEntry(bool earlyCheck)727 BaselineScript::stackCheckICEntry(bool earlyCheck)
728 {
729     // The stack check will always be at offset 0, so just do a linear search
730     // from the beginning. This is only needed for debug mode OSR, when
731     // patching a frame that has invoked a Debugger hook via the interrupt
732     // handler via the stack check, which is part of the prologue.
733     ICEntry::Kind kind = earlyCheck ? ICEntry::Kind_EarlyStackCheck : ICEntry::Kind_StackCheck;
734     for (size_t i = 0; i < numICEntries() && icEntry(i).pcOffset() == 0; i++) {
735         if (icEntry(i).kind() == kind)
736             return icEntry(i);
737     }
738     MOZ_CRASH("No stack check ICEntry found.");
739 }
740 
741 BaselineICEntry&
warmupCountICEntry()742 BaselineScript::warmupCountICEntry()
743 {
744     // The stack check will be at a very low offset, so just do a linear search
745     // from the beginning.
746     for (size_t i = 0; i < numICEntries() && icEntry(i).pcOffset() == 0; i++) {
747         if (icEntry(i).kind() == ICEntry::Kind_WarmupCounter)
748             return icEntry(i);
749     }
750     MOZ_CRASH("No warmup count ICEntry found.");
751 }
752 
753 BaselineICEntry&
icEntryFromReturnAddress(uint8_t * returnAddr)754 BaselineScript::icEntryFromReturnAddress(uint8_t* returnAddr)
755 {
756     MOZ_ASSERT(returnAddr > method_->raw());
757     MOZ_ASSERT(returnAddr < method_->raw() + method_->instructionsSize());
758     CodeOffset offset(returnAddr - method_->raw());
759     return icEntryFromReturnOffset(offset);
760 }
761 
762 void
copyYieldEntries(JSScript * script,Vector<uint32_t> & yieldOffsets)763 BaselineScript::copyYieldEntries(JSScript* script, Vector<uint32_t>& yieldOffsets)
764 {
765     uint8_t** entries = yieldEntryList();
766 
767     for (size_t i = 0; i < yieldOffsets.length(); i++) {
768         uint32_t offset = yieldOffsets[i];
769         entries[i] = nativeCodeForPC(script, script->offsetToPC(offset));
770     }
771 }
772 
773 void
copyICEntries(JSScript * script,const BaselineICEntry * entries,MacroAssembler & masm)774 BaselineScript::copyICEntries(JSScript* script, const BaselineICEntry* entries, MacroAssembler& masm)
775 {
776     // Fix up the return offset in the IC entries and copy them in.
777     // Also write out the IC entry ptrs in any fallback stubs that were added.
778     for (uint32_t i = 0; i < numICEntries(); i++) {
779         BaselineICEntry& realEntry = icEntry(i);
780         realEntry = entries[i];
781 
782         if (!realEntry.hasStub()) {
783             // VM call without any stubs.
784             continue;
785         }
786 
787         // If the attached stub is a fallback stub, then fix it up with
788         // a pointer to the (now available) realEntry.
789         if (realEntry.firstStub()->isFallback())
790             realEntry.firstStub()->toFallbackStub()->fixupICEntry(&realEntry);
791 
792         if (realEntry.firstStub()->isTypeMonitor_Fallback()) {
793             ICTypeMonitor_Fallback* stub = realEntry.firstStub()->toTypeMonitor_Fallback();
794             stub->fixupICEntry(&realEntry);
795         }
796 
797         if (realEntry.firstStub()->isTableSwitch()) {
798             ICTableSwitch* stub = realEntry.firstStub()->toTableSwitch();
799             stub->fixupJumpTable(script, this);
800         }
801     }
802 }
803 
804 void
adoptFallbackStubs(FallbackICStubSpace * stubSpace)805 BaselineScript::adoptFallbackStubs(FallbackICStubSpace* stubSpace)
806 {
807     fallbackStubSpace_.adoptFrom(stubSpace);
808 }
809 
810 void
copyPCMappingEntries(const CompactBufferWriter & entries)811 BaselineScript::copyPCMappingEntries(const CompactBufferWriter& entries)
812 {
813     MOZ_ASSERT(entries.length() > 0);
814     MOZ_ASSERT(entries.length() == pcMappingSize_);
815 
816     memcpy(pcMappingData(), entries.buffer(), entries.length());
817 }
818 
819 void
copyPCMappingIndexEntries(const PCMappingIndexEntry * entries)820 BaselineScript::copyPCMappingIndexEntries(const PCMappingIndexEntry* entries)
821 {
822     for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++)
823         pcMappingIndexEntry(i) = entries[i];
824 }
825 
826 uint8_t*
nativeCodeForPC(JSScript * script,jsbytecode * pc,PCMappingSlotInfo * slotInfo)827 BaselineScript::nativeCodeForPC(JSScript* script, jsbytecode* pc, PCMappingSlotInfo* slotInfo)
828 {
829     MOZ_ASSERT_IF(script->hasBaselineScript(), script->baselineScript() == this);
830 
831     uint32_t pcOffset = script->pcToOffset(pc);
832 
833     // Look for the first PCMappingIndexEntry with pc > the pc we are
834     // interested in.
835     uint32_t i = 1;
836     for (; i < numPCMappingIndexEntries(); i++) {
837         if (pcMappingIndexEntry(i).pcOffset > pcOffset)
838             break;
839     }
840 
841     // The previous entry contains the current pc.
842     MOZ_ASSERT(i > 0);
843     i--;
844 
845     PCMappingIndexEntry& entry = pcMappingIndexEntry(i);
846     MOZ_ASSERT(pcOffset >= entry.pcOffset);
847 
848     CompactBufferReader reader(pcMappingReader(i));
849     jsbytecode* curPC = script->offsetToPC(entry.pcOffset);
850     uint32_t nativeOffset = entry.nativeOffset;
851 
852     MOZ_ASSERT(script->containsPC(curPC));
853     MOZ_ASSERT(curPC <= pc);
854 
855     while (reader.more()) {
856         // If the high bit is set, the native offset relative to the
857         // previous pc != 0 and comes next.
858         uint8_t b = reader.readByte();
859         if (b & 0x80)
860             nativeOffset += reader.readUnsigned();
861 
862         if (curPC == pc) {
863             if (slotInfo)
864                 *slotInfo = PCMappingSlotInfo(b & ~0x80);
865             return method_->raw() + nativeOffset;
866         }
867 
868         curPC += GetBytecodeLength(curPC);
869     }
870 
871     MOZ_CRASH("No native code for this pc");
872 }
873 
874 jsbytecode*
approximatePcForNativeAddress(JSScript * script,uint8_t * nativeAddress)875 BaselineScript::approximatePcForNativeAddress(JSScript* script, uint8_t* nativeAddress)
876 {
877     MOZ_ASSERT(script->baselineScript() == this);
878     MOZ_ASSERT(nativeAddress >= method_->raw());
879     MOZ_ASSERT(nativeAddress < method_->raw() + method_->instructionsSize());
880 
881     uint32_t nativeOffset = nativeAddress - method_->raw();
882     MOZ_ASSERT(nativeOffset < method_->instructionsSize());
883 
884     // Look for the first PCMappingIndexEntry with native offset > the native offset we are
885     // interested in.
886     uint32_t i = 1;
887     for (; i < numPCMappingIndexEntries(); i++) {
888         if (pcMappingIndexEntry(i).nativeOffset > nativeOffset)
889             break;
890     }
891 
892     // Go back an entry to search forward from.
893     MOZ_ASSERT(i > 0);
894     i--;
895 
896     PCMappingIndexEntry& entry = pcMappingIndexEntry(i);
897 
898     CompactBufferReader reader(pcMappingReader(i));
899     jsbytecode* curPC = script->offsetToPC(entry.pcOffset);
900     uint32_t curNativeOffset = entry.nativeOffset;
901 
902     MOZ_ASSERT(script->containsPC(curPC));
903 
904     // The native code address can occur before the start of ops.
905     // Associate those with bytecode offset 0.
906     if (curNativeOffset > nativeOffset)
907         return script->code();
908 
909     jsbytecode* lastPC = curPC;
910     while (true) {
911         // If the high bit is set, the native offset relative to the
912         // previous pc != 0 and comes next.
913         uint8_t b = reader.readByte();
914         if (b & 0x80)
915             curNativeOffset += reader.readUnsigned();
916 
917         // Return the last PC that matched nativeOffset. Some bytecode
918         // generate no native code (e.g., constant-pushing bytecode like
919         // JSOP_INT8), and so their entries share the same nativeOffset as the
920         // next op that does generate code.
921         if (curNativeOffset > nativeOffset)
922             return lastPC;
923 
924         // The native address may lie in-between the last delta-entry in
925         // a pcMappingIndexEntry, and the next pcMappingIndexEntry.
926         if (!reader.more())
927             return curPC;
928 
929         lastPC = curPC;
930         curPC += GetBytecodeLength(curPC);
931     }
932 }
933 
934 void
toggleDebugTraps(JSScript * script,jsbytecode * pc)935 BaselineScript::toggleDebugTraps(JSScript* script, jsbytecode* pc)
936 {
937     MOZ_ASSERT(script->baselineScript() == this);
938 
939     // Only scripts compiled for debug mode have toggled calls.
940     if (!hasDebugInstrumentation())
941         return;
942 
943     SrcNoteLineScanner scanner(script->notes(), script->lineno());
944 
945     AutoWritableJitCode awjc(method());
946 
947     for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++) {
948         PCMappingIndexEntry& entry = pcMappingIndexEntry(i);
949 
950         CompactBufferReader reader(pcMappingReader(i));
951         jsbytecode* curPC = script->offsetToPC(entry.pcOffset);
952         uint32_t nativeOffset = entry.nativeOffset;
953 
954         MOZ_ASSERT(script->containsPC(curPC));
955 
956         while (reader.more()) {
957             uint8_t b = reader.readByte();
958             if (b & 0x80)
959                 nativeOffset += reader.readUnsigned();
960 
961             scanner.advanceTo(script->pcToOffset(curPC));
962 
963             if (!pc || pc == curPC) {
964                 bool enabled = (script->stepModeEnabled() && scanner.isLineHeader()) ||
965                     script->hasBreakpointsAt(curPC);
966 
967                 // Patch the trap.
968                 CodeLocationLabel label(method(), CodeOffset(nativeOffset));
969                 Assembler::ToggleCall(label, enabled);
970             }
971 
972             curPC += GetBytecodeLength(curPC);
973         }
974     }
975 }
976 
977 #ifdef JS_TRACE_LOGGING
978 void
initTraceLogger(JSRuntime * runtime,JSScript * script,const Vector<CodeOffset> & offsets)979 BaselineScript::initTraceLogger(JSRuntime* runtime, JSScript* script,
980                                 const Vector<CodeOffset>& offsets)
981 {
982 #ifdef DEBUG
983     traceLoggerScriptsEnabled_ = TraceLogTextIdEnabled(TraceLogger_Scripts);
984     traceLoggerEngineEnabled_ = TraceLogTextIdEnabled(TraceLogger_Engine);
985 #endif
986 
987     TraceLoggerThread* logger = TraceLoggerForMainThread(runtime);
988 
989     MOZ_ASSERT(offsets.length() == numTraceLoggerToggleOffsets_);
990     for (size_t i = 0; i < offsets.length(); i++)
991         traceLoggerToggleOffsets()[i] = offsets[i].offset();
992 
993     if (TraceLogTextIdEnabled(TraceLogger_Engine) || TraceLogTextIdEnabled(TraceLogger_Scripts)) {
994         traceLoggerScriptEvent_ = TraceLoggerEvent(logger, TraceLogger_Scripts, script);
995         for (size_t i = 0; i < numTraceLoggerToggleOffsets_; i++) {
996             CodeLocationLabel label(method_, CodeOffset(traceLoggerToggleOffsets()[i]));
997             Assembler::ToggleToCmp(label);
998         }
999     }
1000 }
1001 
1002 void
toggleTraceLoggerScripts(JSRuntime * runtime,JSScript * script,bool enable)1003 BaselineScript::toggleTraceLoggerScripts(JSRuntime* runtime, JSScript* script, bool enable)
1004 {
1005     DebugOnly<bool> engineEnabled = TraceLogTextIdEnabled(TraceLogger_Engine);
1006     MOZ_ASSERT(enable == !traceLoggerScriptsEnabled_);
1007     MOZ_ASSERT(engineEnabled == traceLoggerEngineEnabled_);
1008 
1009     // Patch the logging script textId to be correct.
1010     // When logging log the specific textId else the global Scripts textId.
1011     TraceLoggerThread* logger = TraceLoggerForMainThread(runtime);
1012     if (enable && !traceLoggerScriptEvent_.hasPayload())
1013         traceLoggerScriptEvent_ = TraceLoggerEvent(logger, TraceLogger_Scripts, script);
1014 
1015     AutoWritableJitCode awjc(method());
1016 
1017     // Enable/Disable the traceLogger.
1018     for (size_t i = 0; i < numTraceLoggerToggleOffsets_; i++) {
1019         CodeLocationLabel label(method_, CodeOffset(traceLoggerToggleOffsets()[i]));
1020         if (enable)
1021             Assembler::ToggleToCmp(label);
1022         else
1023             Assembler::ToggleToJmp(label);
1024     }
1025 
1026 #if DEBUG
1027     traceLoggerScriptsEnabled_ = enable;
1028 #endif
1029 }
1030 
1031 void
toggleTraceLoggerEngine(bool enable)1032 BaselineScript::toggleTraceLoggerEngine(bool enable)
1033 {
1034     DebugOnly<bool> scriptsEnabled = TraceLogTextIdEnabled(TraceLogger_Scripts);
1035     MOZ_ASSERT(enable == !traceLoggerEngineEnabled_);
1036     MOZ_ASSERT(scriptsEnabled == traceLoggerScriptsEnabled_);
1037 
1038     AutoWritableJitCode awjc(method());
1039 
1040     // Enable/Disable the traceLogger prologue and epilogue.
1041     for (size_t i = 0; i < numTraceLoggerToggleOffsets_; i++) {
1042         CodeLocationLabel label(method_, CodeOffset(traceLoggerToggleOffsets()[i]));
1043         if (enable)
1044             Assembler::ToggleToCmp(label);
1045         else
1046             Assembler::ToggleToJmp(label);
1047     }
1048 
1049 #if DEBUG
1050     traceLoggerEngineEnabled_ = enable;
1051 #endif
1052 }
1053 #endif
1054 
1055 void
toggleProfilerInstrumentation(bool enable)1056 BaselineScript::toggleProfilerInstrumentation(bool enable)
1057 {
1058     if (enable == isProfilerInstrumentationOn())
1059         return;
1060 
1061     JitSpew(JitSpew_BaselineIC, "  toggling profiling %s for BaselineScript %p",
1062             enable ? "on" : "off", this);
1063 
1064     // Toggle the jump
1065     CodeLocationLabel enterToggleLocation(method_, CodeOffset(profilerEnterToggleOffset_));
1066     CodeLocationLabel exitToggleLocation(method_, CodeOffset(profilerExitToggleOffset_));
1067     if (enable) {
1068         Assembler::ToggleToCmp(enterToggleLocation);
1069         Assembler::ToggleToCmp(exitToggleLocation);
1070         flags_ |= uint32_t(PROFILER_INSTRUMENTATION_ON);
1071     } else {
1072         Assembler::ToggleToJmp(enterToggleLocation);
1073         Assembler::ToggleToJmp(exitToggleLocation);
1074         flags_ &= ~uint32_t(PROFILER_INSTRUMENTATION_ON);
1075     }
1076 }
1077 
1078 void
purgeOptimizedStubs(Zone * zone)1079 BaselineScript::purgeOptimizedStubs(Zone* zone)
1080 {
1081     JitSpew(JitSpew_BaselineIC, "Purging optimized stubs");
1082 
1083     for (size_t i = 0; i < numICEntries(); i++) {
1084         BaselineICEntry& entry = icEntry(i);
1085         if (!entry.hasStub())
1086             continue;
1087 
1088         ICStub* lastStub = entry.firstStub();
1089         while (lastStub->next())
1090             lastStub = lastStub->next();
1091 
1092         if (lastStub->isFallback()) {
1093             // Unlink all stubs allocated in the optimized space.
1094             ICStub* stub = entry.firstStub();
1095             ICStub* prev = nullptr;
1096 
1097             while (stub->next()) {
1098                 if (!stub->allocatedInFallbackSpace()) {
1099                     lastStub->toFallbackStub()->unlinkStub(zone, prev, stub);
1100                     stub = stub->next();
1101                     continue;
1102                 }
1103 
1104                 prev = stub;
1105                 stub = stub->next();
1106             }
1107 
1108             if (lastStub->isMonitoredFallback()) {
1109                 // Monitor stubs can't make calls, so are always in the
1110                 // optimized stub space.
1111                 ICTypeMonitor_Fallback* lastMonStub =
1112                     lastStub->toMonitoredFallbackStub()->fallbackMonitorStub();
1113                 lastMonStub->resetMonitorStubChain(zone);
1114             }
1115         } else if (lastStub->isTypeMonitor_Fallback()) {
1116             lastStub->toTypeMonitor_Fallback()->resetMonitorStubChain(zone);
1117         } else {
1118             MOZ_ASSERT(lastStub->isTableSwitch());
1119         }
1120     }
1121 
1122 #ifdef DEBUG
1123     // All remaining stubs must be allocated in the fallback space.
1124     for (size_t i = 0; i < numICEntries(); i++) {
1125         BaselineICEntry& entry = icEntry(i);
1126         if (!entry.hasStub())
1127             continue;
1128 
1129         ICStub* stub = entry.firstStub();
1130         while (stub->next()) {
1131             MOZ_ASSERT(stub->allocatedInFallbackSpace());
1132             stub = stub->next();
1133         }
1134     }
1135 #endif
1136 }
1137 
1138 void
FinishDiscardBaselineScript(FreeOp * fop,JSScript * script)1139 jit::FinishDiscardBaselineScript(FreeOp* fop, JSScript* script)
1140 {
1141     if (!script->hasBaselineScript())
1142         return;
1143 
1144     if (script->baselineScript()->active()) {
1145         // Script is live on the stack. Keep the BaselineScript, but destroy
1146         // stubs allocated in the optimized stub space.
1147         script->baselineScript()->purgeOptimizedStubs(script->zone());
1148 
1149         // Reset |active| flag so that we don't need a separate script
1150         // iteration to unmark them.
1151         script->baselineScript()->resetActive();
1152 
1153         // The baseline caches have been wiped out, so the script will need to
1154         // warm back up before it can be inlined during Ion compilation.
1155         script->baselineScript()->clearIonCompiledOrInlined();
1156         return;
1157     }
1158 
1159     BaselineScript* baseline = script->baselineScript();
1160     script->setBaselineScript(nullptr, nullptr);
1161     BaselineScript::Destroy(fop, baseline);
1162 }
1163 
1164 void
AddSizeOfBaselineData(JSScript * script,mozilla::MallocSizeOf mallocSizeOf,size_t * data,size_t * fallbackStubs)1165 jit::AddSizeOfBaselineData(JSScript* script, mozilla::MallocSizeOf mallocSizeOf, size_t* data,
1166                            size_t* fallbackStubs)
1167 {
1168     if (script->hasBaselineScript())
1169         script->baselineScript()->addSizeOfIncludingThis(mallocSizeOf, data, fallbackStubs);
1170 }
1171 
1172 void
ToggleBaselineProfiling(JSRuntime * runtime,bool enable)1173 jit::ToggleBaselineProfiling(JSRuntime* runtime, bool enable)
1174 {
1175     JitRuntime* jrt = runtime->jitRuntime();
1176     if (!jrt)
1177         return;
1178 
1179     for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
1180         for (auto script = zone->cellIter<JSScript>(); !script.done(); script.next()) {
1181             if (!script->hasBaselineScript())
1182                 continue;
1183             AutoWritableJitCode awjc(script->baselineScript()->method());
1184             script->baselineScript()->toggleProfilerInstrumentation(enable);
1185         }
1186     }
1187 }
1188 
1189 #ifdef JS_TRACE_LOGGING
1190 void
ToggleBaselineTraceLoggerScripts(JSRuntime * runtime,bool enable)1191 jit::ToggleBaselineTraceLoggerScripts(JSRuntime* runtime, bool enable)
1192 {
1193     for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
1194         for (auto script = zone->cellIter<JSScript>(); !script.done(); script.next()) {
1195             if (!script->hasBaselineScript())
1196                 continue;
1197             script->baselineScript()->toggleTraceLoggerScripts(runtime, script, enable);
1198         }
1199     }
1200 }
1201 
1202 void
ToggleBaselineTraceLoggerEngine(JSRuntime * runtime,bool enable)1203 jit::ToggleBaselineTraceLoggerEngine(JSRuntime* runtime, bool enable)
1204 {
1205     for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
1206         for (auto script = zone->cellIter<JSScript>(); !script.done(); script.next()) {
1207             if (!script->hasBaselineScript())
1208                 continue;
1209             script->baselineScript()->toggleTraceLoggerEngine(enable);
1210         }
1211     }
1212 }
1213 #endif
1214 
1215 static void
MarkActiveBaselineScripts(JSRuntime * rt,const JitActivationIterator & activation)1216 MarkActiveBaselineScripts(JSRuntime* rt, const JitActivationIterator& activation)
1217 {
1218     for (jit::JitFrameIterator iter(activation); !iter.done(); ++iter) {
1219         switch (iter.type()) {
1220           case JitFrame_BaselineJS:
1221             iter.script()->baselineScript()->setActive();
1222             break;
1223           case JitFrame_Exit:
1224             if (iter.exitFrame()->is<LazyLinkExitFrameLayout>()) {
1225                 LazyLinkExitFrameLayout* ll = iter.exitFrame()->as<LazyLinkExitFrameLayout>();
1226                 ScriptFromCalleeToken(ll->jsFrame()->calleeToken())->baselineScript()->setActive();
1227             }
1228             break;
1229           case JitFrame_Bailout:
1230           case JitFrame_IonJS: {
1231             // Keep the baseline script around, since bailouts from the ion
1232             // jitcode might need to re-enter into the baseline jitcode.
1233             iter.script()->baselineScript()->setActive();
1234             for (InlineFrameIterator inlineIter(rt, &iter); inlineIter.more(); ++inlineIter)
1235                 inlineIter.script()->baselineScript()->setActive();
1236             break;
1237           }
1238           default:;
1239         }
1240     }
1241 }
1242 
1243 void
MarkActiveBaselineScripts(Zone * zone)1244 jit::MarkActiveBaselineScripts(Zone* zone)
1245 {
1246     JSRuntime* rt = zone->runtimeFromMainThread();
1247     for (JitActivationIterator iter(rt); !iter.done(); ++iter) {
1248         if (iter->compartment()->zone() == zone)
1249             MarkActiveBaselineScripts(rt, iter);
1250     }
1251 }
1252