1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7 #include "jit/JitScript-inl.h"
8
9 #include "mozilla/BinarySearch.h"
10 #include "mozilla/CheckedInt.h"
11
12 #include <utility>
13
14 #include "jit/BaselineIC.h"
15 #include "jit/BaselineJIT.h"
16 #include "jit/BytecodeAnalysis.h"
17 #include "jit/IonScript.h"
18 #include "jit/JitFrames.h"
19 #include "jit/JitSpewer.h"
20 #include "jit/ScriptFromCalleeToken.h"
21 #include "jit/TrialInlining.h"
22 #include "vm/BytecodeUtil.h"
23 #include "vm/FrameIter.h" // js::OnlyJSJitFrameIter
24 #include "vm/JSScript.h"
25
26 #include "gc/FreeOp-inl.h"
27 #include "jit/JSJitFrameIter-inl.h"
28 #include "vm/JSScript-inl.h"
29
30 using namespace js;
31 using namespace js::jit;
32
33 using mozilla::CheckedInt;
34
JitScript(JSScript * script,Offset fallbackStubsOffset,Offset endOffset,const char * profileString)35 JitScript::JitScript(JSScript* script, Offset fallbackStubsOffset,
36 Offset endOffset, const char* profileString)
37 : profileString_(profileString),
38 endOffset_(endOffset),
39 icScript_(script->getWarmUpCount(),
40 fallbackStubsOffset - offsetOfICScript(),
41 endOffset - offsetOfICScript(),
42 /*depth=*/0) {
43 // Ensure the baselineScript_ and ionScript_ fields match the BaselineDisabled
44 // and IonDisabled script flags.
45 if (!script->canBaselineCompile()) {
46 setBaselineScriptImpl(script, BaselineDisabledScriptPtr);
47 }
48 if (!script->canIonCompile()) {
49 setIonScriptImpl(script, IonDisabledScriptPtr);
50 }
51 }
52
53 #ifdef DEBUG
~JitScript()54 JitScript::~JitScript() {
55 // The contents of the stub space are removed and freed separately after the
56 // next minor GC. See prepareForDestruction.
57 MOZ_ASSERT(jitScriptStubSpace_.isEmpty());
58
59 // BaselineScript and IonScript must have been destroyed at this point.
60 MOZ_ASSERT(!hasBaselineScript());
61 MOZ_ASSERT(!hasIonScript());
62 }
63 #else
64 JitScript::~JitScript() = default;
65 #endif
66
createJitScript(JSContext * cx)67 bool JSScript::createJitScript(JSContext* cx) {
68 MOZ_ASSERT(!hasJitScript());
69 cx->check(this);
70
71 // Scripts with a JitScript can run in the Baseline Interpreter. Make sure
72 // we don't create a JitScript for scripts we shouldn't Baseline interpret.
73 MOZ_ASSERT_IF(IsBaselineInterpreterEnabled(),
74 CanBaselineInterpretScript(this));
75
76 // Store the profile string in the JitScript if the profiler is enabled.
77 const char* profileString = nullptr;
78 if (cx->runtime()->geckoProfiler().enabled()) {
79 profileString = cx->runtime()->geckoProfiler().profileString(cx, this);
80 if (!profileString) {
81 return false;
82 }
83 }
84
85 static_assert(sizeof(JitScript) % sizeof(uintptr_t) == 0,
86 "Trailing arrays must be aligned properly");
87 static_assert(sizeof(ICEntry) % sizeof(uintptr_t) == 0,
88 "Trailing arrays must be aligned properly");
89
90 static_assert(
91 sizeof(JitScript) == offsetof(JitScript, icScript_) + sizeof(ICScript),
92 "icScript_ must be the last field");
93
94 // Calculate allocation size.
95 CheckedInt<uint32_t> allocSize = sizeof(JitScript);
96 allocSize += CheckedInt<uint32_t>(numICEntries()) * sizeof(ICEntry);
97 allocSize += CheckedInt<uint32_t>(numICEntries()) * sizeof(ICFallbackStub);
98 if (!allocSize.isValid()) {
99 ReportAllocationOverflow(cx);
100 return false;
101 }
102
103 void* raw = cx->pod_malloc<uint8_t>(allocSize.value());
104 MOZ_ASSERT(uintptr_t(raw) % alignof(JitScript) == 0);
105 if (!raw) {
106 return false;
107 }
108
109 size_t fallbackStubsOffset =
110 sizeof(JitScript) + numICEntries() * sizeof(ICEntry);
111
112 UniquePtr<JitScript> jitScript(new (raw) JitScript(
113 this, fallbackStubsOffset, allocSize.value(), profileString));
114
115 // Sanity check the length computation.
116 MOZ_ASSERT(jitScript->numICEntries() == numICEntries());
117
118 jitScript->icScript()->initICEntries(cx, this);
119
120 warmUpData_.initJitScript(jitScript.release());
121 AddCellMemory(this, allocSize.value(), MemoryUse::JitScript);
122
123 // We have a JitScript so we can set the script's jitCodeRaw pointer to the
124 // Baseline Interpreter code.
125 updateJitCodeRaw(cx->runtime());
126
127 return true;
128 }
129
maybeReleaseJitScript(JSFreeOp * fop)130 void JSScript::maybeReleaseJitScript(JSFreeOp* fop) {
131 MOZ_ASSERT(hasJitScript());
132
133 if (zone()->jitZone()->keepJitScripts() || jitScript()->hasBaselineScript() ||
134 jitScript()->active()) {
135 return;
136 }
137
138 releaseJitScript(fop);
139 }
140
releaseJitScript(JSFreeOp * fop)141 void JSScript::releaseJitScript(JSFreeOp* fop) {
142 MOZ_ASSERT(hasJitScript());
143 MOZ_ASSERT(!hasBaselineScript());
144 MOZ_ASSERT(!hasIonScript());
145
146 fop->removeCellMemory(this, jitScript()->allocBytes(), MemoryUse::JitScript);
147
148 JitScript::Destroy(zone(), jitScript());
149 warmUpData_.clearJitScript();
150 updateJitCodeRaw(fop->runtime());
151 }
152
releaseJitScriptOnFinalize(JSFreeOp * fop)153 void JSScript::releaseJitScriptOnFinalize(JSFreeOp* fop) {
154 MOZ_ASSERT(hasJitScript());
155
156 if (hasIonScript()) {
157 IonScript* ion = jitScript()->clearIonScript(fop, this);
158 jit::IonScript::Destroy(fop, ion);
159 }
160
161 if (hasBaselineScript()) {
162 BaselineScript* baseline = jitScript()->clearBaselineScript(fop, this);
163 jit::BaselineScript::Destroy(fop, baseline);
164 }
165
166 releaseJitScript(fop);
167 }
168
trace(JSTracer * trc)169 void JitScript::CachedIonData::trace(JSTracer* trc) {
170 TraceNullableEdge(trc, &templateEnv, "jitscript-iondata-template-env");
171 }
172
trace(JSTracer * trc)173 void JitScript::trace(JSTracer* trc) {
174 icScript_.trace(trc);
175
176 if (hasBaselineScript()) {
177 baselineScript()->trace(trc);
178 }
179
180 if (hasIonScript()) {
181 ionScript()->trace(trc);
182 }
183
184 if (hasCachedIonData()) {
185 cachedIonData().trace(trc);
186 }
187
188 if (hasInliningRoot()) {
189 inliningRoot()->trace(trc);
190 }
191 }
192
trace(JSTracer * trc)193 void ICScript::trace(JSTracer* trc) {
194 // Mark all IC stub codes hanging off the IC stub entries.
195 for (size_t i = 0; i < numICEntries(); i++) {
196 ICEntry& ent = icEntry(i);
197 ent.trace(trc);
198 }
199 }
200
addInlinedChild(JSContext * cx,UniquePtr<ICScript> child,uint32_t pcOffset)201 bool ICScript::addInlinedChild(JSContext* cx, UniquePtr<ICScript> child,
202 uint32_t pcOffset) {
203 MOZ_ASSERT(!hasInlinedChild(pcOffset));
204
205 if (!inlinedChildren_) {
206 inlinedChildren_ = cx->make_unique<Vector<CallSite>>(cx);
207 if (!inlinedChildren_) {
208 return false;
209 }
210 }
211
212 // First reserve space in inlinedChildren_ to ensure that if the ICScript is
213 // added to the inlining root, it can also be added to inlinedChildren_.
214 CallSite callsite(child.get(), pcOffset);
215 if (!inlinedChildren_->reserve(inlinedChildren_->length() + 1)) {
216 return false;
217 }
218 if (!inliningRoot()->addInlinedScript(std::move(child))) {
219 return false;
220 }
221 inlinedChildren_->infallibleAppend(callsite);
222 return true;
223 }
224
findInlinedChild(uint32_t pcOffset)225 ICScript* ICScript::findInlinedChild(uint32_t pcOffset) {
226 for (auto& callsite : *inlinedChildren_) {
227 if (callsite.pcOffset_ == pcOffset) {
228 return callsite.callee_;
229 }
230 }
231 MOZ_CRASH("Inlined child expected at pcOffset");
232 }
233
removeInlinedChild(uint32_t pcOffset)234 void ICScript::removeInlinedChild(uint32_t pcOffset) {
235 MOZ_ASSERT(inliningRoot());
236 inlinedChildren_->eraseIf([pcOffset](const CallSite& callsite) -> bool {
237 return callsite.pcOffset_ == pcOffset;
238 });
239 }
240
hasInlinedChild(uint32_t pcOffset)241 bool ICScript::hasInlinedChild(uint32_t pcOffset) {
242 if (!inlinedChildren_) {
243 return false;
244 }
245 for (auto& callsite : *inlinedChildren_) {
246 if (callsite.pcOffset_ == pcOffset) {
247 return true;
248 }
249 }
250 return false;
251 }
252
resetWarmUpCount(uint32_t count)253 void JitScript::resetWarmUpCount(uint32_t count) {
254 icScript_.resetWarmUpCount(count);
255 if (hasInliningRoot()) {
256 inliningRoot()->resetWarmUpCounts(count);
257 }
258 }
259
ensureProfileString(JSContext * cx,JSScript * script)260 void JitScript::ensureProfileString(JSContext* cx, JSScript* script) {
261 MOZ_ASSERT(cx->runtime()->geckoProfiler().enabled());
262
263 if (profileString_) {
264 return;
265 }
266
267 AutoEnterOOMUnsafeRegion oomUnsafe;
268 profileString_ = cx->runtime()->geckoProfiler().profileString(cx, script);
269 if (!profileString_) {
270 oomUnsafe.crash("Failed to allocate profile string");
271 }
272 }
273
274 /* static */
Destroy(Zone * zone,JitScript * script)275 void JitScript::Destroy(Zone* zone, JitScript* script) {
276 script->prepareForDestruction(zone);
277
278 js_delete(script);
279 }
280
prepareForDestruction(Zone * zone)281 void JitScript::prepareForDestruction(Zone* zone) {
282 // When the script contains pointers to nursery things, the store buffer can
283 // contain entries that point into the fallback stub space. Since we can
284 // destroy scripts outside the context of a GC, this situation could result
285 // in us trying to mark invalid store buffer entries.
286 //
287 // Defer freeing any allocated blocks until after the next minor GC.
288 jitScriptStubSpace_.freeAllAfterMinorGC(zone);
289
290 // Trigger write barriers.
291 baselineScript_.set(zone, nullptr);
292 ionScript_.set(zone, nullptr);
293 }
294
295 struct FallbackStubs {
296 ICScript* const icScript_;
297
FallbackStubsFallbackStubs298 explicit FallbackStubs(ICScript* icScript) : icScript_(icScript) {}
299
numEntriesFallbackStubs300 size_t numEntries() const { return icScript_->numICEntries(); }
operator []FallbackStubs301 ICFallbackStub* operator[](size_t index) const {
302 return icScript_->fallbackStub(index);
303 }
304 };
305
ComputeBinarySearchMid(FallbackStubs stubs,uint32_t pcOffset,size_t * loc)306 static bool ComputeBinarySearchMid(FallbackStubs stubs, uint32_t pcOffset,
307 size_t* loc) {
308 return mozilla::BinarySearchIf(
309 stubs, 0, stubs.numEntries(),
310 [pcOffset](const ICFallbackStub* stub) {
311 if (pcOffset < stub->pcOffset()) {
312 return -1;
313 }
314 if (stub->pcOffset() < pcOffset) {
315 return 1;
316 }
317 return 0;
318 },
319 loc);
320 }
321
icEntryFromPCOffset(uint32_t pcOffset)322 ICEntry& ICScript::icEntryFromPCOffset(uint32_t pcOffset) {
323 size_t mid;
324 MOZ_ALWAYS_TRUE(ComputeBinarySearchMid(FallbackStubs(this), pcOffset, &mid));
325
326 MOZ_ASSERT(mid < numICEntries());
327
328 ICEntry& entry = icEntry(mid);
329 MOZ_ASSERT(fallbackStubForICEntry(&entry)->pcOffset() == pcOffset);
330 return entry;
331 }
332
interpreterICEntryFromPCOffset(uint32_t pcOffset)333 ICEntry* ICScript::interpreterICEntryFromPCOffset(uint32_t pcOffset) {
334 // We have to return the entry to store in BaselineFrame::interpreterICEntry
335 // when resuming in the Baseline Interpreter at pcOffset. The bytecode op at
336 // pcOffset does not necessarily have an ICEntry, so we want to return the
337 // first ICEntry for which the following is true:
338 //
339 // entry.pcOffset() >= pcOffset
340 //
341 // Fortunately, ComputeBinarySearchMid returns exactly this entry.
342
343 size_t mid;
344 ComputeBinarySearchMid(FallbackStubs(this), pcOffset, &mid);
345
346 if (mid < numICEntries()) {
347 ICEntry& entry = icEntry(mid);
348 MOZ_ASSERT(fallbackStubForICEntry(&entry)->pcOffset() >= pcOffset);
349 return &entry;
350 }
351
352 // Resuming at a pc after the last ICEntry. Just return nullptr:
353 // BaselineFrame::interpreterICEntry will never be used in this case.
354 return nullptr;
355 }
356
purgeOptimizedStubs(JSScript * script)357 void JitScript::purgeOptimizedStubs(JSScript* script) {
358 MOZ_ASSERT(script->jitScript() == this);
359
360 Zone* zone = script->zone();
361 if (IsAboutToBeFinalizedUnbarriered(script)) {
362 // We're sweeping and the script is dead. Don't purge optimized stubs
363 // because (1) accessing CacheIRStubInfo pointers in ICStubs is invalid
364 // because we may have swept them already when we started (incremental)
365 // sweeping and (2) it's unnecessary because this script will be finalized
366 // soon anyway.
367 return;
368 }
369
370 JitSpew(JitSpew_BaselineIC, "Purging optimized stubs");
371
372 icScript()->purgeOptimizedStubs(zone);
373 if (hasInliningRoot()) {
374 inliningRoot()->purgeOptimizedStubs(zone);
375 }
376 #ifdef DEBUG
377 failedICHash_.reset();
378 hasPurgedStubs_ = true;
379 #endif
380 }
381
purgeOptimizedStubs(Zone * zone)382 void ICScript::purgeOptimizedStubs(Zone* zone) {
383 for (size_t i = 0; i < numICEntries(); i++) {
384 ICEntry& entry = icEntry(i);
385 ICStub* lastStub = entry.firstStub();
386 while (!lastStub->isFallback()) {
387 lastStub = lastStub->toCacheIRStub()->next();
388 }
389
390 // Unlink all stubs allocated in the optimized space.
391 ICStub* stub = entry.firstStub();
392 ICCacheIRStub* prev = nullptr;
393
394 while (stub != lastStub) {
395 if (!stub->toCacheIRStub()->allocatedInFallbackSpace()) {
396 lastStub->toFallbackStub()->unlinkStub(zone, &entry, prev,
397 stub->toCacheIRStub());
398 stub = stub->toCacheIRStub()->next();
399 continue;
400 }
401
402 prev = stub->toCacheIRStub();
403 stub = stub->toCacheIRStub()->next();
404 }
405 }
406
407 #ifdef DEBUG
408 // All remaining stubs must be allocated in the fallback space.
409 for (size_t i = 0; i < numICEntries(); i++) {
410 ICEntry& entry = icEntry(i);
411 ICStub* stub = entry.firstStub();
412 while (!stub->isFallback()) {
413 MOZ_ASSERT(stub->toCacheIRStub()->allocatedInFallbackSpace());
414 stub = stub->toCacheIRStub()->next();
415 }
416 }
417 #endif
418 }
419
CachedIonData(EnvironmentObject * templateEnv,IonBytecodeInfo bytecodeInfo)420 JitScript::CachedIonData::CachedIonData(EnvironmentObject* templateEnv,
421 IonBytecodeInfo bytecodeInfo)
422 : templateEnv(templateEnv), bytecodeInfo(bytecodeInfo) {}
423
ensureHasCachedIonData(JSContext * cx,HandleScript script)424 bool JitScript::ensureHasCachedIonData(JSContext* cx, HandleScript script) {
425 MOZ_ASSERT(script->jitScript() == this);
426
427 if (hasCachedIonData()) {
428 return true;
429 }
430
431 Rooted<EnvironmentObject*> templateEnv(cx);
432 if (script->function()) {
433 RootedFunction fun(cx, script->function());
434
435 if (fun->needsNamedLambdaEnvironment()) {
436 templateEnv =
437 NamedLambdaObject::createTemplateObject(cx, fun, gc::TenuredHeap);
438 if (!templateEnv) {
439 return false;
440 }
441 }
442
443 if (fun->needsCallObject()) {
444 templateEnv = CallObject::createTemplateObject(cx, script, templateEnv,
445 gc::TenuredHeap);
446 if (!templateEnv) {
447 return false;
448 }
449 }
450 }
451
452 IonBytecodeInfo bytecodeInfo = AnalyzeBytecodeForIon(cx, script);
453
454 UniquePtr<CachedIonData> data =
455 cx->make_unique<CachedIonData>(templateEnv, bytecodeInfo);
456 if (!data) {
457 return false;
458 }
459
460 cachedIonData_ = std::move(data);
461 return true;
462 }
463
setBaselineScriptImpl(JSScript * script,BaselineScript * baselineScript)464 void JitScript::setBaselineScriptImpl(JSScript* script,
465 BaselineScript* baselineScript) {
466 JSRuntime* rt = script->runtimeFromMainThread();
467 setBaselineScriptImpl(rt->defaultFreeOp(), script, baselineScript);
468 }
469
setBaselineScriptImpl(JSFreeOp * fop,JSScript * script,BaselineScript * baselineScript)470 void JitScript::setBaselineScriptImpl(JSFreeOp* fop, JSScript* script,
471 BaselineScript* baselineScript) {
472 if (hasBaselineScript()) {
473 fop->removeCellMemory(script, baselineScript_->allocBytes(),
474 MemoryUse::BaselineScript);
475 baselineScript_.set(script->zone(), nullptr);
476 }
477
478 MOZ_ASSERT(ionScript_ == nullptr || ionScript_ == IonDisabledScriptPtr);
479
480 baselineScript_.set(script->zone(), baselineScript);
481 if (hasBaselineScript()) {
482 AddCellMemory(script, baselineScript_->allocBytes(),
483 MemoryUse::BaselineScript);
484 }
485
486 script->resetWarmUpResetCounter();
487 script->updateJitCodeRaw(fop->runtime());
488 }
489
setIonScriptImpl(JSScript * script,IonScript * ionScript)490 void JitScript::setIonScriptImpl(JSScript* script, IonScript* ionScript) {
491 JSRuntime* rt = script->runtimeFromMainThread();
492 setIonScriptImpl(rt->defaultFreeOp(), script, ionScript);
493 }
494
setIonScriptImpl(JSFreeOp * fop,JSScript * script,IonScript * ionScript)495 void JitScript::setIonScriptImpl(JSFreeOp* fop, JSScript* script,
496 IonScript* ionScript) {
497 MOZ_ASSERT_IF(ionScript != IonDisabledScriptPtr,
498 !baselineScript()->hasPendingIonCompileTask());
499
500 JS::Zone* zone = script->zone();
501 if (hasIonScript()) {
502 fop->removeCellMemory(script, ionScript_->allocBytes(),
503 MemoryUse::IonScript);
504 ionScript_.set(zone, nullptr);
505 }
506
507 ionScript_.set(zone, ionScript);
508 MOZ_ASSERT_IF(hasIonScript(), hasBaselineScript());
509 if (hasIonScript()) {
510 AddCellMemory(script, ionScript_->allocBytes(), MemoryUse::IonScript);
511 }
512
513 script->updateJitCodeRaw(fop->runtime());
514 }
515
516 #ifdef JS_STRUCTURED_SPEW
HasEnteredCounters(ICEntry & entry)517 static bool HasEnteredCounters(ICEntry& entry) {
518 ICStub* stub = entry.firstStub();
519 if (stub && !stub->isFallback()) {
520 return true;
521 }
522 return false;
523 }
524
JitSpewBaselineICStats(JSScript * script,const char * dumpReason)525 void jit::JitSpewBaselineICStats(JSScript* script, const char* dumpReason) {
526 MOZ_ASSERT(script->hasJitScript());
527 JSContext* cx = TlsContext.get();
528 AutoStructuredSpewer spew(cx, SpewChannel::BaselineICStats, script);
529 if (!spew) {
530 return;
531 }
532
533 JitScript* jitScript = script->jitScript();
534 spew->property("reason", dumpReason);
535 spew->beginListProperty("entries");
536 for (size_t i = 0; i < jitScript->numICEntries(); i++) {
537 ICEntry& entry = jitScript->icEntry(i);
538 ICFallbackStub* fallback = jitScript->fallbackStub(i);
539 if (!HasEnteredCounters(entry)) {
540 continue;
541 }
542
543 uint32_t pcOffset = fallback->pcOffset();
544 jsbytecode* pc = script->offsetToPC(pcOffset);
545
546 unsigned column;
547 unsigned int line = PCToLineNumber(script, pc, &column);
548
549 spew->beginObject();
550 spew->property("op", CodeName(JSOp(*pc)));
551 spew->property("pc", pcOffset);
552 spew->property("line", line);
553 spew->property("column", column);
554
555 spew->beginListProperty("counts");
556 ICStub* stub = entry.firstStub();
557 while (stub && !stub->isFallback()) {
558 uint32_t count = stub->enteredCount();
559 spew->value(count);
560 stub = stub->toCacheIRStub()->next();
561 }
562 spew->endList();
563 spew->property("fallback_count", fallback->enteredCount());
564 spew->endObject();
565 }
566 spew->endList();
567 }
568 #endif
569
MarkActiveJitScripts(JSContext * cx,const JitActivationIterator & activation)570 static void MarkActiveJitScripts(JSContext* cx,
571 const JitActivationIterator& activation) {
572 for (OnlyJSJitFrameIter iter(activation); !iter.done(); ++iter) {
573 const JSJitFrameIter& frame = iter.frame();
574 switch (frame.type()) {
575 case FrameType::BaselineJS:
576 frame.script()->jitScript()->setActive();
577 break;
578 case FrameType::Exit:
579 if (frame.exitFrame()->is<LazyLinkExitFrameLayout>()) {
580 LazyLinkExitFrameLayout* ll =
581 frame.exitFrame()->as<LazyLinkExitFrameLayout>();
582 JSScript* script =
583 ScriptFromCalleeToken(ll->jsFrame()->calleeToken());
584 script->jitScript()->setActive();
585 }
586 break;
587 case FrameType::Bailout:
588 case FrameType::IonJS: {
589 // Keep the JitScript and BaselineScript around, since bailouts from
590 // the ion jitcode need to re-enter into the Baseline code.
591 frame.script()->jitScript()->setActive();
592 for (InlineFrameIterator inlineIter(cx, &frame); inlineIter.more();
593 ++inlineIter) {
594 inlineIter.script()->jitScript()->setActive();
595 }
596 break;
597 }
598 default:;
599 }
600 }
601 }
602
MarkActiveJitScripts(Zone * zone)603 void jit::MarkActiveJitScripts(Zone* zone) {
604 if (zone->isAtomsZone()) {
605 return;
606 }
607 JSContext* cx = TlsContext.get();
608 for (JitActivationIterator iter(cx); !iter.done(); ++iter) {
609 if (iter->compartment()->zone() == zone) {
610 MarkActiveJitScripts(cx, iter);
611 }
612 }
613 }
614
getOrCreateInliningRoot(JSContext * cx,JSScript * script)615 InliningRoot* JitScript::getOrCreateInliningRoot(JSContext* cx,
616 JSScript* script) {
617 if (!inliningRoot_) {
618 inliningRoot_ = js::MakeUnique<InliningRoot>(cx, script);
619 if (!inliningRoot_) {
620 ReportOutOfMemory(cx);
621 return nullptr;
622 }
623 icScript_.inliningRoot_ = inliningRoot_.get();
624 }
625 return inliningRoot_.get();
626 }
627
createAllocSite(JSScript * script)628 gc::AllocSite* JitScript::createAllocSite(JSScript* script) {
629 MOZ_ASSERT(script->jitScript() == this);
630
631 Nursery& nursery = script->runtimeFromMainThread()->gc.nursery();
632 if (!nursery.canCreateAllocSite()) {
633 // Don't block attaching an optimized stub, but don't process allocations
634 // for this site.
635 return script->zone()->unknownAllocSite();
636 }
637
638 if (!allocSites_.reserve(allocSites_.length() + 1)) {
639 return nullptr;
640 }
641
642 ICStubSpace* stubSpace = jitScriptStubSpace();
643 auto* site =
644 static_cast<gc::AllocSite*>(stubSpace->alloc(sizeof(gc::AllocSite)));
645 if (!site) {
646 return nullptr;
647 }
648
649 new (site) gc::AllocSite(script->zone(), script);
650
651 allocSites_.infallibleAppend(site);
652
653 nursery.noteAllocSiteCreated();
654
655 return site;
656 }
657
resetAllocSites(bool resetNurserySites,bool resetPretenuredSites)658 bool JitScript::resetAllocSites(bool resetNurserySites,
659 bool resetPretenuredSites) {
660 MOZ_ASSERT(resetNurserySites || resetPretenuredSites);
661
662 bool anyReset = false;
663
664 for (gc::AllocSite* site : allocSites_) {
665 if ((resetNurserySites && site->initialHeap() == gc::DefaultHeap) ||
666 (resetPretenuredSites && site->initialHeap() == gc::TenuredHeap)) {
667 if (site->maybeResetState()) {
668 anyReset = true;
669 }
670 }
671 }
672
673 return anyReset;
674 }
675
jitScriptStubSpace()676 JitScriptICStubSpace* ICScript::jitScriptStubSpace() {
677 if (isInlined()) {
678 return inliningRoot_->jitScriptStubSpace();
679 }
680 return outerJitScript()->jitScriptStubSpace();
681 }
682
outerJitScript()683 JitScript* ICScript::outerJitScript() {
684 MOZ_ASSERT(!isInlined());
685 uint8_t* ptr = reinterpret_cast<uint8_t*>(this);
686 return reinterpret_cast<JitScript*>(ptr - JitScript::offsetOfICScript());
687 }
688
689 #ifdef DEBUG
690 // This hash is used to verify that we do not recompile after a
691 // TranspiledCacheIR invalidation with the exact same ICs.
692 //
693 // It should change iff an ICEntry in this ICScript (or an ICScript
694 // inlined into this ICScript) is modified such that we will make a
695 // different decision in WarpScriptOracle::maybeInlineIC. This means:
696 //
697 // 1. The hash will change if we attach a new stub.
698 // 2. The hash will change if the entered count of any CacheIR stub
699 // other than the first changes from 0.
700 // 3. The hash will change if the entered count of the fallback stub
701 // changes from 0.
702 //
hash()703 HashNumber ICScript::hash() {
704 HashNumber h = 0;
705 for (size_t i = 0; i < numICEntries(); i++) {
706 ICStub* stub = icEntry(i).firstStub();
707
708 // Hash the address of the first stub.
709 h = mozilla::AddToHash(h, stub);
710
711 // Hash whether subsequent stubs have entry count 0.
712 if (!stub->isFallback()) {
713 stub = stub->toCacheIRStub()->next();
714 while (!stub->isFallback()) {
715 h = mozilla::AddToHash(h, stub->enteredCount() == 0);
716 stub = stub->toCacheIRStub()->next();
717 }
718 }
719
720 // Hash whether the fallback has entry count 0.
721 MOZ_ASSERT(stub->isFallback());
722 h = mozilla::AddToHash(h, stub->enteredCount() == 0);
723 }
724
725 if (inlinedChildren_) {
726 for (auto& callsite : *inlinedChildren_) {
727 h = mozilla::AddToHash(h, callsite.callee_->hash());
728 }
729 }
730 return h;
731 }
732 #endif
733