1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2  * vim: set ts=8 sts=4 et sw=4 tw=99:
3  * This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "jit/SharedIC.h"
8 
9 #include "mozilla/Casting.h"
10 #include "mozilla/IntegerPrintfMacros.h"
11 #include "mozilla/Sprintf.h"
12 
13 #include "jslibmath.h"
14 #include "jstypes.h"
15 
16 #include "gc/Policy.h"
17 #include "jit/BaselineCacheIRCompiler.h"
18 #include "jit/BaselineDebugModeOSR.h"
19 #include "jit/BaselineIC.h"
20 #include "jit/JitSpewer.h"
21 #include "jit/Linker.h"
22 #include "jit/SharedICHelpers.h"
23 #ifdef JS_ION_PERF
24 #include "jit/PerfSpewer.h"
25 #endif
26 #include "jit/VMFunctions.h"
27 #include "vm/Interpreter.h"
28 #include "vm/StringType.h"
29 
30 #include "jit/MacroAssembler-inl.h"
31 #include "jit/SharedICHelpers-inl.h"
32 #include "vm/Interpreter-inl.h"
33 
34 using mozilla::BitwiseCast;
35 
36 namespace js {
37 namespace jit {
38 
39 #ifdef JS_JITSPEW
FallbackICSpew(JSContext * cx,ICFallbackStub * stub,const char * fmt,...)40 void FallbackICSpew(JSContext* cx, ICFallbackStub* stub, const char* fmt, ...) {
41   if (JitSpewEnabled(JitSpew_BaselineICFallback)) {
42     RootedScript script(cx, GetTopJitJSScript(cx));
43     jsbytecode* pc = stub->icEntry()->pc(script);
44 
45     char fmtbuf[100];
46     va_list args;
47     va_start(args, fmt);
48     (void)VsprintfLiteral(fmtbuf, fmt, args);
49     va_end(args);
50 
51     JitSpew(JitSpew_BaselineICFallback,
52             "Fallback hit for (%s:%zu) (pc=%zu,line=%d,uses=%d,stubs=%zu): %s",
53             script->filename(), script->lineno(), script->pcToOffset(pc),
54             PCToLineNumber(script, pc), script->getWarmUpCount(),
55             stub->numOptimizedStubs(), fmtbuf);
56   }
57 }
58 
TypeFallbackICSpew(JSContext * cx,ICTypeMonitor_Fallback * stub,const char * fmt,...)59 void TypeFallbackICSpew(JSContext* cx, ICTypeMonitor_Fallback* stub,
60                         const char* fmt, ...) {
61   if (JitSpewEnabled(JitSpew_BaselineICFallback)) {
62     RootedScript script(cx, GetTopJitJSScript(cx));
63     jsbytecode* pc = stub->icEntry()->pc(script);
64 
65     char fmtbuf[100];
66     va_list args;
67     va_start(args, fmt);
68     (void)VsprintfLiteral(fmtbuf, fmt, args);
69     va_end(args);
70 
71     JitSpew(JitSpew_BaselineICFallback,
72             "Type monitor fallback hit for (%s:%zu) "
73             "(pc=%zu,line=%d,uses=%d,stubs=%d): %s",
74             script->filename(), script->lineno(), script->pcToOffset(pc),
75             PCToLineNumber(script, pc), script->getWarmUpCount(),
76             (int)stub->numOptimizedMonitorStubs(), fmtbuf);
77   }
78 }
79 #endif  // JS_JITSPEW
80 
fallbackStub() const81 ICFallbackStub* ICEntry::fallbackStub() const {
82   return firstStub()->getChainFallback();
83 }
84 
trace(JSTracer * trc)85 void IonICEntry::trace(JSTracer* trc) {
86   TraceManuallyBarrieredEdge(trc, &script_, "IonICEntry::script_");
87   traceEntry(trc);
88 }
89 
trace(JSTracer * trc)90 void BaselineICEntry::trace(JSTracer* trc) { traceEntry(trc); }
91 
traceEntry(JSTracer * trc)92 void ICEntry::traceEntry(JSTracer* trc) {
93   if (!hasStub()) return;
94   for (ICStub* stub = firstStub(); stub; stub = stub->next()) stub->trace(trc);
95 }
96 
operator ++()97 ICStubConstIterator& ICStubConstIterator::operator++() {
98   MOZ_ASSERT(currentStub_ != nullptr);
99   currentStub_ = currentStub_->next();
100   return *this;
101 }
102 
ICStubIterator(ICFallbackStub * fallbackStub,bool end)103 ICStubIterator::ICStubIterator(ICFallbackStub* fallbackStub, bool end)
104     : icEntry_(fallbackStub->icEntry()),
105       fallbackStub_(fallbackStub),
106       previousStub_(nullptr),
107       currentStub_(end ? fallbackStub : icEntry_->firstStub()),
108       unlinked_(false) {}
109 
operator ++()110 ICStubIterator& ICStubIterator::operator++() {
111   MOZ_ASSERT(currentStub_->next() != nullptr);
112   if (!unlinked_) previousStub_ = currentStub_;
113   currentStub_ = currentStub_->next();
114   unlinked_ = false;
115   return *this;
116 }
117 
unlink(JSContext * cx)118 void ICStubIterator::unlink(JSContext* cx) {
119   MOZ_ASSERT(currentStub_->next() != nullptr);
120   MOZ_ASSERT(currentStub_ != fallbackStub_);
121   MOZ_ASSERT(!unlinked_);
122 
123   fallbackStub_->unlinkStub(cx->zone(), previousStub_, currentStub_);
124 
125   // Mark the current iterator position as unlinked, so operator++ works
126   // properly.
127   unlinked_ = true;
128 }
129 
NonCacheIRStubMakesGCCalls(Kind kind)130 /* static */ bool ICStub::NonCacheIRStubMakesGCCalls(Kind kind) {
131   MOZ_ASSERT(IsValidKind(kind));
132   MOZ_ASSERT(!IsCacheIRKind(kind));
133 
134   switch (kind) {
135     case Call_Fallback:
136     case Call_Scripted:
137     case Call_AnyScripted:
138     case Call_Native:
139     case Call_ClassHook:
140     case Call_ScriptedApplyArray:
141     case Call_ScriptedApplyArguments:
142     case Call_ScriptedFunCall:
143     case Call_ConstStringSplit:
144     case WarmUpCounter_Fallback:
145     case RetSub_Fallback:
146     // These two fallback stubs don't actually make non-tail calls,
147     // but the fallback code for the bailout path needs to pop the stub frame
148     // pushed during the bailout.
149     case GetProp_Fallback:
150     case SetProp_Fallback:
151       return true;
152     default:
153       return false;
154   }
155 }
156 
makesGCCalls() const157 bool ICStub::makesGCCalls() const {
158   switch (kind()) {
159     case CacheIR_Regular:
160       return toCacheIR_Regular()->stubInfo()->makesGCCalls();
161     case CacheIR_Monitored:
162       return toCacheIR_Monitored()->stubInfo()->makesGCCalls();
163     case CacheIR_Updated:
164       return toCacheIR_Updated()->stubInfo()->makesGCCalls();
165     default:
166       return NonCacheIRStubMakesGCCalls(kind());
167   }
168 }
169 
traceCode(JSTracer * trc,const char * name)170 void ICStub::traceCode(JSTracer* trc, const char* name) {
171   JitCode* stubJitCode = jitCode();
172   TraceManuallyBarrieredEdge(trc, &stubJitCode, name);
173 }
174 
updateCode(JitCode * code)175 void ICStub::updateCode(JitCode* code) {
176   // Write barrier on the old code.
177   JitCode::writeBarrierPre(jitCode());
178   stubCode_ = code->raw();
179 }
180 
trace(JSTracer * trc)181 /* static */ void ICStub::trace(JSTracer* trc) {
182   traceCode(trc, "shared-stub-jitcode");
183 
184   // If the stub is a monitored fallback stub, then trace the monitor ICs
185   // hanging off of that stub.  We don't need to worry about the regular
186   // monitored stubs, because the regular monitored stubs will always have a
187   // monitored fallback stub that references the same stub chain.
188   if (isMonitoredFallback()) {
189     ICTypeMonitor_Fallback* lastMonStub =
190         toMonitoredFallbackStub()->maybeFallbackMonitorStub();
191     if (lastMonStub) {
192       for (ICStubConstIterator iter(lastMonStub->firstMonitorStub());
193            !iter.atEnd(); iter++) {
194         MOZ_ASSERT_IF(iter->next() == nullptr, *iter == lastMonStub);
195         iter->trace(trc);
196       }
197     }
198   }
199 
200   if (isUpdated()) {
201     for (ICStubConstIterator iter(toUpdatedStub()->firstUpdateStub());
202          !iter.atEnd(); iter++) {
203       MOZ_ASSERT_IF(iter->next() == nullptr, iter->isTypeUpdate_Fallback());
204       iter->trace(trc);
205     }
206   }
207 
208   switch (kind()) {
209     case ICStub::Call_Scripted: {
210       ICCall_Scripted* callStub = toCall_Scripted();
211       TraceEdge(trc, &callStub->callee(), "baseline-callscripted-callee");
212       TraceNullableEdge(trc, &callStub->templateObject(),
213                         "baseline-callscripted-template");
214       break;
215     }
216     case ICStub::Call_Native: {
217       ICCall_Native* callStub = toCall_Native();
218       TraceEdge(trc, &callStub->callee(), "baseline-callnative-callee");
219       TraceNullableEdge(trc, &callStub->templateObject(),
220                         "baseline-callnative-template");
221       break;
222     }
223     case ICStub::Call_ClassHook: {
224       ICCall_ClassHook* callStub = toCall_ClassHook();
225       TraceNullableEdge(trc, &callStub->templateObject(),
226                         "baseline-callclasshook-template");
227       break;
228     }
229     case ICStub::Call_ConstStringSplit: {
230       ICCall_ConstStringSplit* callStub = toCall_ConstStringSplit();
231       TraceEdge(trc, &callStub->templateObject(),
232                 "baseline-callstringsplit-template");
233       TraceEdge(trc, &callStub->expectedSep(), "baseline-callstringsplit-sep");
234       TraceEdge(trc, &callStub->expectedStr(), "baseline-callstringsplit-str");
235       break;
236     }
237     case ICStub::TypeMonitor_SingleObject: {
238       ICTypeMonitor_SingleObject* monitorStub = toTypeMonitor_SingleObject();
239       TraceEdge(trc, &monitorStub->object(), "baseline-monitor-singleton");
240       break;
241     }
242     case ICStub::TypeMonitor_ObjectGroup: {
243       ICTypeMonitor_ObjectGroup* monitorStub = toTypeMonitor_ObjectGroup();
244       TraceEdge(trc, &monitorStub->group(), "baseline-monitor-group");
245       break;
246     }
247     case ICStub::TypeUpdate_SingleObject: {
248       ICTypeUpdate_SingleObject* updateStub = toTypeUpdate_SingleObject();
249       TraceEdge(trc, &updateStub->object(), "baseline-update-singleton");
250       break;
251     }
252     case ICStub::TypeUpdate_ObjectGroup: {
253       ICTypeUpdate_ObjectGroup* updateStub = toTypeUpdate_ObjectGroup();
254       TraceEdge(trc, &updateStub->group(), "baseline-update-group");
255       break;
256     }
257     case ICStub::NewArray_Fallback: {
258       ICNewArray_Fallback* stub = toNewArray_Fallback();
259       TraceNullableEdge(trc, &stub->templateObject(),
260                         "baseline-newarray-template");
261       TraceEdge(trc, &stub->templateGroup(),
262                 "baseline-newarray-template-group");
263       break;
264     }
265     case ICStub::NewObject_Fallback: {
266       ICNewObject_Fallback* stub = toNewObject_Fallback();
267       TraceNullableEdge(trc, &stub->templateObject(),
268                         "baseline-newobject-template");
269       break;
270     }
271     case ICStub::Rest_Fallback: {
272       ICRest_Fallback* stub = toRest_Fallback();
273       TraceEdge(trc, &stub->templateObject(), "baseline-rest-template");
274       break;
275     }
276     case ICStub::CacheIR_Regular:
277       TraceCacheIRStub(trc, this, toCacheIR_Regular()->stubInfo());
278       break;
279     case ICStub::CacheIR_Monitored:
280       TraceCacheIRStub(trc, this, toCacheIR_Monitored()->stubInfo());
281       break;
282     case ICStub::CacheIR_Updated: {
283       ICCacheIR_Updated* stub = toCacheIR_Updated();
284       TraceNullableEdge(trc, &stub->updateStubGroup(),
285                         "baseline-update-stub-group");
286       TraceEdge(trc, &stub->updateStubId(), "baseline-update-stub-id");
287       TraceCacheIRStub(trc, this, stub->stubInfo());
288       break;
289     }
290     default:
291       break;
292   }
293 }
294 
unlinkStub(Zone * zone,ICStub * prev,ICStub * stub)295 void ICFallbackStub::unlinkStub(Zone* zone, ICStub* prev, ICStub* stub) {
296   MOZ_ASSERT(stub->next());
297 
298   // If stub is the last optimized stub, update lastStubPtrAddr.
299   if (stub->next() == this) {
300     MOZ_ASSERT(lastStubPtrAddr_ == stub->addressOfNext());
301     if (prev)
302       lastStubPtrAddr_ = prev->addressOfNext();
303     else
304       lastStubPtrAddr_ = icEntry()->addressOfFirstStub();
305     *lastStubPtrAddr_ = this;
306   } else {
307     if (prev) {
308       MOZ_ASSERT(prev->next() == stub);
309       prev->setNext(stub->next());
310     } else {
311       MOZ_ASSERT(icEntry()->firstStub() == stub);
312       icEntry()->setFirstStub(stub->next());
313     }
314   }
315 
316   state_.trackUnlinkedStub();
317 
318   if (zone->needsIncrementalBarrier()) {
319     // We are removing edges from ICStub to gcthings. Perform one final trace
320     // of the stub for incremental GC, as it must know about those edges.
321     stub->trace(zone->barrierTracer());
322   }
323 
324   if (stub->makesGCCalls() && stub->isMonitored()) {
325     // This stub can make calls so we can return to it if it's on the stack.
326     // We just have to reset its firstMonitorStub_ field to avoid a stale
327     // pointer when purgeOptimizedStubs destroys all optimized monitor
328     // stubs (unlinked stubs won't be updated).
329     ICTypeMonitor_Fallback* monitorFallback =
330         toMonitoredFallbackStub()->maybeFallbackMonitorStub();
331     MOZ_ASSERT(monitorFallback);
332     stub->toMonitoredStub()->resetFirstMonitorStub(monitorFallback);
333   }
334 
335 #ifdef DEBUG
336   // Poison stub code to ensure we don't call this stub again. However, if
337   // this stub can make calls, a pointer to it may be stored in a stub frame
338   // on the stack, so we can't touch the stubCode_ or GC will crash when
339   // tracing this pointer.
340   if (!stub->makesGCCalls()) stub->stubCode_ = (uint8_t*)0xbad;
341 #endif
342 }
343 
unlinkStubsWithKind(JSContext * cx,ICStub::Kind kind)344 void ICFallbackStub::unlinkStubsWithKind(JSContext* cx, ICStub::Kind kind) {
345   for (ICStubIterator iter = beginChain(); !iter.atEnd(); iter++) {
346     if (iter->kind() == kind) iter.unlink(cx);
347   }
348 }
349 
discardStubs(JSContext * cx)350 void ICFallbackStub::discardStubs(JSContext* cx) {
351   for (ICStubIterator iter = beginChain(); !iter.atEnd(); iter++)
352     iter.unlink(cx);
353 }
354 
resetMonitorStubChain(Zone * zone)355 void ICTypeMonitor_Fallback::resetMonitorStubChain(Zone* zone) {
356   if (zone->needsIncrementalBarrier()) {
357     // We are removing edges from monitored stubs to gcthings (JitCode).
358     // Perform one final trace of all monitor stubs for incremental GC,
359     // as it must know about those edges.
360     for (ICStub* s = firstMonitorStub_; !s->isTypeMonitor_Fallback();
361          s = s->next())
362       s->trace(zone->barrierTracer());
363   }
364 
365   firstMonitorStub_ = this;
366   numOptimizedMonitorStubs_ = 0;
367 
368   if (hasFallbackStub_) {
369     lastMonitorStubPtrAddr_ = nullptr;
370 
371     // Reset firstMonitorStub_ field of all monitored stubs.
372     for (ICStubConstIterator iter = mainFallbackStub_->beginChainConst();
373          !iter.atEnd(); iter++) {
374       if (!iter->isMonitored()) continue;
375       iter->toMonitoredStub()->resetFirstMonitorStub(this);
376     }
377   } else {
378     icEntry_->setFirstStub(this);
379     lastMonitorStubPtrAddr_ = icEntry_->addressOfFirstStub();
380   }
381 }
382 
resetUpdateStubChain(Zone * zone)383 void ICUpdatedStub::resetUpdateStubChain(Zone* zone) {
384   while (!firstUpdateStub_->isTypeUpdate_Fallback()) {
385     if (zone->needsIncrementalBarrier()) {
386       // We are removing edges from update stubs to gcthings (JitCode).
387       // Perform one final trace of all update stubs for incremental GC,
388       // as it must know about those edges.
389       firstUpdateStub_->trace(zone->barrierTracer());
390     }
391     firstUpdateStub_ = firstUpdateStub_->next();
392   }
393 
394   numOptimizedStubs_ = 0;
395 }
396 
ICMonitoredStub(Kind kind,JitCode * stubCode,ICStub * firstMonitorStub)397 ICMonitoredStub::ICMonitoredStub(Kind kind, JitCode* stubCode,
398                                  ICStub* firstMonitorStub)
399     : ICStub(kind, ICStub::Monitored, stubCode),
400       firstMonitorStub_(firstMonitorStub) {
401   // In order to silence Coverity - null pointer dereference checker
402   MOZ_ASSERT(firstMonitorStub_);
403   // If the first monitored stub is a ICTypeMonitor_Fallback stub, then
404   // double check that _its_ firstMonitorStub is the same as this one.
405   MOZ_ASSERT_IF(
406       firstMonitorStub_->isTypeMonitor_Fallback(),
407       firstMonitorStub_->toTypeMonitor_Fallback()->firstMonitorStub() ==
408           firstMonitorStub_);
409 }
410 
initMonitoringChain(JSContext * cx,JSScript * script)411 bool ICMonitoredFallbackStub::initMonitoringChain(JSContext* cx,
412                                                   JSScript* script) {
413   MOZ_ASSERT(fallbackMonitorStub_ == nullptr);
414 
415   ICTypeMonitor_Fallback::Compiler compiler(cx, this);
416   ICStubSpace* space = script->baselineScript()->fallbackStubSpace();
417   ICTypeMonitor_Fallback* stub = compiler.getStub(space);
418   if (!stub) return false;
419   fallbackMonitorStub_ = stub;
420   return true;
421 }
422 
addMonitorStubForValue(JSContext * cx,BaselineFrame * frame,StackTypeSet * types,HandleValue val)423 bool ICMonitoredFallbackStub::addMonitorStubForValue(JSContext* cx,
424                                                      BaselineFrame* frame,
425                                                      StackTypeSet* types,
426                                                      HandleValue val) {
427   ICTypeMonitor_Fallback* typeMonitorFallback =
428       getFallbackMonitorStub(cx, frame->script());
429   if (!typeMonitorFallback) return false;
430   return typeMonitorFallback->addMonitorStubForValue(cx, frame, types, val);
431 }
432 
initUpdatingChain(JSContext * cx,ICStubSpace * space)433 bool ICUpdatedStub::initUpdatingChain(JSContext* cx, ICStubSpace* space) {
434   MOZ_ASSERT(firstUpdateStub_ == nullptr);
435 
436   ICTypeUpdate_Fallback::Compiler compiler(cx);
437   ICTypeUpdate_Fallback* stub = compiler.getStub(space);
438   if (!stub) return false;
439 
440   firstUpdateStub_ = stub;
441   return true;
442 }
443 
getStubCode()444 JitCode* ICStubCompiler::getStubCode() {
445   JitCompartment* comp = cx->compartment()->jitCompartment();
446 
447   // Check for existing cached stubcode.
448   uint32_t stubKey = getKey();
449   JitCode* stubCode = comp->getStubCode(stubKey);
450   if (stubCode) return stubCode;
451 
452   // Compile new stubcode.
453   JitContext jctx(cx, nullptr);
454   MacroAssembler masm;
455 #ifndef JS_USE_LINK_REGISTER
456   // The first value contains the return addres,
457   // which we pull into ICTailCallReg for tail calls.
458   masm.adjustFrame(sizeof(intptr_t));
459 #endif
460 #ifdef JS_CODEGEN_ARM
461   masm.setSecondScratchReg(BaselineSecondScratchReg);
462 #endif
463 
464   if (!generateStubCode(masm)) return nullptr;
465   Linker linker(masm);
466   AutoFlushICache afc("getStubCode");
467   Rooted<JitCode*> newStubCode(cx, linker.newCode(cx, CodeKind::Baseline));
468   if (!newStubCode) return nullptr;
469 
470   // Cache newly compiled stubcode.
471   if (!comp->putStubCode(cx, stubKey, newStubCode)) return nullptr;
472 
473   // After generating code, run postGenerateStubCode().  We must not fail
474   // after this point.
475   postGenerateStubCode(masm, newStubCode);
476 
477   MOZ_ASSERT(entersStubFrame_ == ICStub::NonCacheIRStubMakesGCCalls(kind));
478   MOZ_ASSERT(!inStubFrame_);
479 
480 #ifdef JS_ION_PERF
481   writePerfSpewerJitCodeProfile(newStubCode, "BaselineIC");
482 #endif
483 
484   return newStubCode;
485 }
486 
tailCallVM(const VMFunction & fun,MacroAssembler & masm)487 bool ICStubCompiler::tailCallVM(const VMFunction& fun, MacroAssembler& masm) {
488   TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
489   MOZ_ASSERT(fun.expectTailCall == TailCall);
490   uint32_t argSize = fun.explicitStackSlots() * sizeof(void*);
491   if (engine_ == Engine::Baseline) {
492     EmitBaselineTailCallVM(code, masm, argSize);
493   } else {
494     uint32_t stackSize = argSize + fun.extraValuesToPop * sizeof(Value);
495     EmitIonTailCallVM(code, masm, stackSize);
496   }
497   return true;
498 }
499 
callVM(const VMFunction & fun,MacroAssembler & masm)500 bool ICStubCompiler::callVM(const VMFunction& fun, MacroAssembler& masm) {
501   MOZ_ASSERT(inStubFrame_);
502 
503   TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
504   MOZ_ASSERT(fun.expectTailCall == NonTailCall);
505   MOZ_ASSERT(engine_ == Engine::Baseline);
506 
507   EmitBaselineCallVM(code, masm);
508   return true;
509 }
510 
enterStubFrame(MacroAssembler & masm,Register scratch)511 void ICStubCompiler::enterStubFrame(MacroAssembler& masm, Register scratch) {
512   MOZ_ASSERT(engine_ == Engine::Baseline);
513   EmitBaselineEnterStubFrame(masm, scratch);
514 #ifdef DEBUG
515   framePushedAtEnterStubFrame_ = masm.framePushed();
516 #endif
517 
518   MOZ_ASSERT(!inStubFrame_);
519   inStubFrame_ = true;
520 
521 #ifdef DEBUG
522   entersStubFrame_ = true;
523 #endif
524 }
525 
assumeStubFrame()526 void ICStubCompiler::assumeStubFrame() {
527   MOZ_ASSERT(!inStubFrame_);
528   inStubFrame_ = true;
529 
530 #ifdef DEBUG
531   entersStubFrame_ = true;
532 
533   // |framePushed| isn't tracked precisely in ICStubs, so simply assume it to
534   // be STUB_FRAME_SIZE so that assertions don't fail in leaveStubFrame.
535   framePushedAtEnterStubFrame_ = STUB_FRAME_SIZE;
536 #endif
537 }
538 
leaveStubFrame(MacroAssembler & masm,bool calledIntoIon)539 void ICStubCompiler::leaveStubFrame(MacroAssembler& masm, bool calledIntoIon) {
540   MOZ_ASSERT(entersStubFrame_ && inStubFrame_);
541   inStubFrame_ = false;
542 
543   MOZ_ASSERT(engine_ == Engine::Baseline);
544 #ifdef DEBUG
545   masm.setFramePushed(framePushedAtEnterStubFrame_);
546   if (calledIntoIon)
547     masm.adjustFrame(sizeof(intptr_t));  // Calls into ion have this extra.
548 #endif
549   EmitBaselineLeaveStubFrame(masm, calledIntoIon);
550 }
551 
pushStubPayload(MacroAssembler & masm,Register scratch)552 void ICStubCompiler::pushStubPayload(MacroAssembler& masm, Register scratch) {
553   if (engine_ == Engine::IonSharedIC) {
554     masm.push(Imm32(0));
555     return;
556   }
557 
558   if (inStubFrame_) {
559     masm.loadPtr(Address(BaselineFrameReg, 0), scratch);
560     masm.pushBaselineFramePtr(scratch, scratch);
561   } else {
562     masm.pushBaselineFramePtr(BaselineFrameReg, scratch);
563   }
564 }
565 
PushStubPayload(MacroAssembler & masm,Register scratch)566 void ICStubCompiler::PushStubPayload(MacroAssembler& masm, Register scratch) {
567   pushStubPayload(masm, scratch);
568   masm.adjustFrame(sizeof(intptr_t));
569 }
570 
SharedStubInfo(JSContext * cx,void * payload,ICEntry * icEntry)571 SharedStubInfo::SharedStubInfo(JSContext* cx, void* payload, ICEntry* icEntry)
572     : maybeFrame_(nullptr),
573       outerScript_(cx),
574       innerScript_(cx),
575       icEntry_(icEntry) {
576   if (payload) {
577     maybeFrame_ = (BaselineFrame*)payload;
578     outerScript_ = maybeFrame_->script();
579     innerScript_ = maybeFrame_->script();
580   } else {
581     IonICEntry* entry = (IonICEntry*)icEntry;
582     innerScript_ = entry->script();
583     // outerScript_ is initialized lazily.
584   }
585 }
586 
outerScript(JSContext * cx)587 HandleScript SharedStubInfo::outerScript(JSContext* cx) {
588   if (!outerScript_) {
589     js::jit::JitActivationIterator actIter(cx);
590     JSJitFrameIter it(actIter->asJit());
591     MOZ_ASSERT(it.isExitFrame());
592     ++it;
593     MOZ_ASSERT(it.isIonJS());
594     outerScript_ = it.script();
595     MOZ_ASSERT(!it.ionScript()->invalidated());
596   }
597   return outerScript_;
598 }
599 
600 //
601 // BinaryArith_Fallback
602 //
603 
DoBinaryArithFallback(JSContext * cx,void * payload,ICBinaryArith_Fallback * stub_,HandleValue lhs,HandleValue rhs,MutableHandleValue ret)604 static bool DoBinaryArithFallback(JSContext* cx, void* payload,
605                                   ICBinaryArith_Fallback* stub_,
606                                   HandleValue lhs, HandleValue rhs,
607                                   MutableHandleValue ret) {
608   SharedStubInfo info(cx, payload, stub_->icEntry());
609   ICStubCompiler::Engine engine = info.engine();
610 
611   // This fallback stub may trigger debug mode toggling.
612   DebugModeOSRVolatileStub<ICBinaryArith_Fallback*> stub(
613       engine, info.maybeFrame(), stub_);
614 
615   jsbytecode* pc = info.pc();
616   JSOp op = JSOp(*pc);
617   FallbackICSpew(
618       cx, stub, "BinaryArith(%s,%d,%d)", CodeName[op],
619       int(lhs.isDouble() ? JSVAL_TYPE_DOUBLE : lhs.extractNonDoubleType()),
620       int(rhs.isDouble() ? JSVAL_TYPE_DOUBLE : rhs.extractNonDoubleType()));
621 
622   // Don't pass lhs/rhs directly, we need the original values when
623   // generating stubs.
624   RootedValue lhsCopy(cx, lhs);
625   RootedValue rhsCopy(cx, rhs);
626 
627   // Perform the compare operation.
628   switch (op) {
629     case JSOP_ADD:
630       // Do an add.
631       if (!AddValues(cx, &lhsCopy, &rhsCopy, ret)) return false;
632       break;
633     case JSOP_SUB:
634       if (!SubValues(cx, &lhsCopy, &rhsCopy, ret)) return false;
635       break;
636     case JSOP_MUL:
637       if (!MulValues(cx, &lhsCopy, &rhsCopy, ret)) return false;
638       break;
639     case JSOP_DIV:
640       if (!DivValues(cx, &lhsCopy, &rhsCopy, ret)) return false;
641       break;
642     case JSOP_MOD:
643       if (!ModValues(cx, &lhsCopy, &rhsCopy, ret)) return false;
644       break;
645     case JSOP_POW:
646       if (!math_pow_handle(cx, lhsCopy, rhsCopy, ret)) return false;
647       break;
648     case JSOP_BITOR: {
649       int32_t result;
650       if (!BitOr(cx, lhs, rhs, &result)) return false;
651       ret.setInt32(result);
652       break;
653     }
654     case JSOP_BITXOR: {
655       int32_t result;
656       if (!BitXor(cx, lhs, rhs, &result)) return false;
657       ret.setInt32(result);
658       break;
659     }
660     case JSOP_BITAND: {
661       int32_t result;
662       if (!BitAnd(cx, lhs, rhs, &result)) return false;
663       ret.setInt32(result);
664       break;
665     }
666     case JSOP_LSH: {
667       int32_t result;
668       if (!BitLsh(cx, lhs, rhs, &result)) return false;
669       ret.setInt32(result);
670       break;
671     }
672     case JSOP_RSH: {
673       int32_t result;
674       if (!BitRsh(cx, lhs, rhs, &result)) return false;
675       ret.setInt32(result);
676       break;
677     }
678     case JSOP_URSH: {
679       if (!UrshOperation(cx, lhs, rhs, ret)) return false;
680       break;
681     }
682     default:
683       MOZ_CRASH("Unhandled baseline arith op");
684   }
685 
686   // Check if debug mode toggling made the stub invalid.
687   if (stub.invalid()) return true;
688 
689   if (ret.isDouble()) stub->setSawDoubleResult();
690 
691   // Check to see if a new stub should be generated.
692   if (stub->numOptimizedStubs() >=
693       ICBinaryArith_Fallback::MAX_OPTIMIZED_STUBS) {
694     stub->noteUnoptimizableOperands();
695     return true;
696   }
697 
698   // Handle string concat.
699   if (op == JSOP_ADD) {
700     if (lhs.isString() && rhs.isString()) {
701       JitSpew(JitSpew_BaselineIC, "  Generating %s(String, String) stub",
702               CodeName[op]);
703       MOZ_ASSERT(ret.isString());
704       ICBinaryArith_StringConcat::Compiler compiler(cx, engine);
705       ICStub* strcatStub =
706           compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
707       if (!strcatStub) return false;
708       stub->addNewStub(strcatStub);
709       return true;
710     }
711 
712     if ((lhs.isString() && rhs.isObject()) ||
713         (lhs.isObject() && rhs.isString())) {
714       JitSpew(JitSpew_BaselineIC, "  Generating %s(%s, %s) stub", CodeName[op],
715               lhs.isString() ? "String" : "Object",
716               lhs.isString() ? "Object" : "String");
717       MOZ_ASSERT(ret.isString());
718       ICBinaryArith_StringObjectConcat::Compiler compiler(cx, engine,
719                                                           lhs.isString());
720       ICStub* strcatStub =
721           compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
722       if (!strcatStub) return false;
723       stub->addNewStub(strcatStub);
724       return true;
725     }
726   }
727 
728   if (((lhs.isBoolean() && (rhs.isBoolean() || rhs.isInt32())) ||
729        (rhs.isBoolean() && (lhs.isBoolean() || lhs.isInt32()))) &&
730       (op == JSOP_ADD || op == JSOP_SUB || op == JSOP_BITOR ||
731        op == JSOP_BITAND || op == JSOP_BITXOR)) {
732     JitSpew(JitSpew_BaselineIC, "  Generating %s(%s, %s) stub", CodeName[op],
733             lhs.isBoolean() ? "Boolean" : "Int32",
734             rhs.isBoolean() ? "Boolean" : "Int32");
735     ICBinaryArith_BooleanWithInt32::Compiler compiler(
736         cx, op, engine, lhs.isBoolean(), rhs.isBoolean());
737     ICStub* arithStub =
738         compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
739     if (!arithStub) return false;
740     stub->addNewStub(arithStub);
741     return true;
742   }
743 
744   // Handle only int32 or double.
745   if (!lhs.isNumber() || !rhs.isNumber()) {
746     stub->noteUnoptimizableOperands();
747     return true;
748   }
749 
750   MOZ_ASSERT(ret.isNumber());
751 
752   if (lhs.isDouble() || rhs.isDouble() || ret.isDouble()) {
753     if (!cx->runtime()->jitSupportsFloatingPoint) return true;
754 
755     switch (op) {
756       case JSOP_ADD:
757       case JSOP_SUB:
758       case JSOP_MUL:
759       case JSOP_DIV:
760       case JSOP_MOD: {
761         // Unlink int32 stubs, it's faster to always use the double stub.
762         stub->unlinkStubsWithKind(cx, ICStub::BinaryArith_Int32);
763         JitSpew(JitSpew_BaselineIC, "  Generating %s(Double, Double) stub",
764                 CodeName[op]);
765 
766         ICBinaryArith_Double::Compiler compiler(cx, op, engine);
767         ICStub* doubleStub =
768             compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
769         if (!doubleStub) return false;
770         stub->addNewStub(doubleStub);
771         return true;
772       }
773       default:
774         break;
775     }
776   }
777 
778   if (lhs.isInt32() && rhs.isInt32() && op != JSOP_POW) {
779     bool allowDouble = ret.isDouble();
780     if (allowDouble) stub->unlinkStubsWithKind(cx, ICStub::BinaryArith_Int32);
781     JitSpew(JitSpew_BaselineIC, "  Generating %s(Int32, Int32%s) stub",
782             CodeName[op], allowDouble ? " => Double" : "");
783     ICBinaryArith_Int32::Compiler compilerInt32(cx, op, engine, allowDouble);
784     ICStub* int32Stub =
785         compilerInt32.getStub(compilerInt32.getStubSpace(info.outerScript(cx)));
786     if (!int32Stub) return false;
787     stub->addNewStub(int32Stub);
788     return true;
789   }
790 
791   // Handle Double <BITOP> Int32 or Int32 <BITOP> Double case.
792   if (((lhs.isDouble() && rhs.isInt32()) ||
793        (lhs.isInt32() && rhs.isDouble())) &&
794       ret.isInt32()) {
795     switch (op) {
796       case JSOP_BITOR:
797       case JSOP_BITXOR:
798       case JSOP_BITAND: {
799         JitSpew(JitSpew_BaselineIC, "  Generating %s(%s, %s) stub",
800                 CodeName[op], lhs.isDouble() ? "Double" : "Int32",
801                 lhs.isDouble() ? "Int32" : "Double");
802         ICBinaryArith_DoubleWithInt32::Compiler compiler(cx, op, engine,
803                                                          lhs.isDouble());
804         ICStub* optStub =
805             compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
806         if (!optStub) return false;
807         stub->addNewStub(optStub);
808         return true;
809       }
810       default:
811         break;
812     }
813   }
814 
815   stub->noteUnoptimizableOperands();
816   return true;
817 }
818 
819 typedef bool (*DoBinaryArithFallbackFn)(JSContext*, void*,
820                                         ICBinaryArith_Fallback*, HandleValue,
821                                         HandleValue, MutableHandleValue);
822 static const VMFunction DoBinaryArithFallbackInfo =
823     FunctionInfo<DoBinaryArithFallbackFn>(
824         DoBinaryArithFallback, "DoBinaryArithFallback", TailCall, PopValues(2));
825 
generateStubCode(MacroAssembler & masm)826 bool ICBinaryArith_Fallback::Compiler::generateStubCode(MacroAssembler& masm) {
827   MOZ_ASSERT(R0 == JSReturnOperand);
828 
829   // Restore the tail call register.
830   EmitRestoreTailCallReg(masm);
831 
832   // Ensure stack is fully synced for the expression decompiler.
833   masm.pushValue(R0);
834   masm.pushValue(R1);
835 
836   // Push arguments.
837   masm.pushValue(R1);
838   masm.pushValue(R0);
839   masm.push(ICStubReg);
840   pushStubPayload(masm, R0.scratchReg());
841 
842   return tailCallVM(DoBinaryArithFallbackInfo, masm);
843 }
844 
DoConcatStrings(JSContext * cx,HandleString lhs,HandleString rhs,MutableHandleValue res)845 static bool DoConcatStrings(JSContext* cx, HandleString lhs, HandleString rhs,
846                             MutableHandleValue res) {
847   JSString* result = ConcatStrings<CanGC>(cx, lhs, rhs);
848   if (!result) return false;
849 
850   res.setString(result);
851   return true;
852 }
853 
854 typedef bool (*DoConcatStringsFn)(JSContext*, HandleString, HandleString,
855                                   MutableHandleValue);
856 static const VMFunction DoConcatStringsInfo = FunctionInfo<DoConcatStringsFn>(
857     DoConcatStrings, "DoConcatStrings", TailCall);
858 
generateStubCode(MacroAssembler & masm)859 bool ICBinaryArith_StringConcat::Compiler::generateStubCode(
860     MacroAssembler& masm) {
861   Label failure;
862   masm.branchTestString(Assembler::NotEqual, R0, &failure);
863   masm.branchTestString(Assembler::NotEqual, R1, &failure);
864 
865   // Restore the tail call register.
866   EmitRestoreTailCallReg(masm);
867 
868   masm.unboxString(R0, R0.scratchReg());
869   masm.unboxString(R1, R1.scratchReg());
870 
871   masm.push(R1.scratchReg());
872   masm.push(R0.scratchReg());
873   if (!tailCallVM(DoConcatStringsInfo, masm)) return false;
874 
875   // Failure case - jump to next stub
876   masm.bind(&failure);
877   EmitStubGuardFailure(masm);
878   return true;
879 }
880 
ConvertObjectToStringForConcat(JSContext * cx,HandleValue obj)881 static JSString* ConvertObjectToStringForConcat(JSContext* cx,
882                                                 HandleValue obj) {
883   MOZ_ASSERT(obj.isObject());
884   RootedValue rootedObj(cx, obj);
885   if (!ToPrimitive(cx, &rootedObj)) return nullptr;
886   return ToString<CanGC>(cx, rootedObj);
887 }
888 
DoConcatStringObject(JSContext * cx,bool lhsIsString,HandleValue lhs,HandleValue rhs,MutableHandleValue res)889 static bool DoConcatStringObject(JSContext* cx, bool lhsIsString,
890                                  HandleValue lhs, HandleValue rhs,
891                                  MutableHandleValue res) {
892   JSString* lstr = nullptr;
893   JSString* rstr = nullptr;
894   if (lhsIsString) {
895     // Convert rhs first.
896     MOZ_ASSERT(lhs.isString() && rhs.isObject());
897     rstr = ConvertObjectToStringForConcat(cx, rhs);
898     if (!rstr) return false;
899 
900     // lhs is already string.
901     lstr = lhs.toString();
902   } else {
903     MOZ_ASSERT(rhs.isString() && lhs.isObject());
904     // Convert lhs first.
905     lstr = ConvertObjectToStringForConcat(cx, lhs);
906     if (!lstr) return false;
907 
908     // rhs is already string.
909     rstr = rhs.toString();
910   }
911 
912   JSString* str = ConcatStrings<NoGC>(cx, lstr, rstr);
913   if (!str) {
914     RootedString nlstr(cx, lstr), nrstr(cx, rstr);
915     str = ConcatStrings<CanGC>(cx, nlstr, nrstr);
916     if (!str) return false;
917   }
918 
919   // Technically, we need to call TypeScript::MonitorString for this PC, however
920   // it was called when this stub was attached so it's OK.
921 
922   res.setString(str);
923   return true;
924 }
925 
926 typedef bool (*DoConcatStringObjectFn)(JSContext*, bool lhsIsString,
927                                        HandleValue, HandleValue,
928                                        MutableHandleValue);
929 static const VMFunction DoConcatStringObjectInfo =
930     FunctionInfo<DoConcatStringObjectFn>(
931         DoConcatStringObject, "DoConcatStringObject", TailCall, PopValues(2));
932 
generateStubCode(MacroAssembler & masm)933 bool ICBinaryArith_StringObjectConcat::Compiler::generateStubCode(
934     MacroAssembler& masm) {
935   Label failure;
936   if (lhsIsString_) {
937     masm.branchTestString(Assembler::NotEqual, R0, &failure);
938     masm.branchTestObject(Assembler::NotEqual, R1, &failure);
939   } else {
940     masm.branchTestObject(Assembler::NotEqual, R0, &failure);
941     masm.branchTestString(Assembler::NotEqual, R1, &failure);
942   }
943 
944   // Restore the tail call register.
945   EmitRestoreTailCallReg(masm);
946 
947   // Sync for the decompiler.
948   masm.pushValue(R0);
949   masm.pushValue(R1);
950 
951   // Push arguments.
952   masm.pushValue(R1);
953   masm.pushValue(R0);
954   masm.push(Imm32(lhsIsString_));
955   if (!tailCallVM(DoConcatStringObjectInfo, masm)) return false;
956 
957   // Failure case - jump to next stub
958   masm.bind(&failure);
959   EmitStubGuardFailure(masm);
960   return true;
961 }
962 
generateStubCode(MacroAssembler & masm)963 bool ICBinaryArith_Double::Compiler::generateStubCode(MacroAssembler& masm) {
964   Label failure;
965   masm.ensureDouble(R0, FloatReg0, &failure);
966   masm.ensureDouble(R1, FloatReg1, &failure);
967 
968   switch (op) {
969     case JSOP_ADD:
970       masm.addDouble(FloatReg1, FloatReg0);
971       break;
972     case JSOP_SUB:
973       masm.subDouble(FloatReg1, FloatReg0);
974       break;
975     case JSOP_MUL:
976       masm.mulDouble(FloatReg1, FloatReg0);
977       break;
978     case JSOP_DIV:
979       masm.divDouble(FloatReg1, FloatReg0);
980       break;
981     case JSOP_MOD:
982       masm.setupUnalignedABICall(R0.scratchReg());
983       masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
984       masm.passABIArg(FloatReg1, MoveOp::DOUBLE);
985       masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, NumberMod), MoveOp::DOUBLE);
986       MOZ_ASSERT(ReturnDoubleReg == FloatReg0);
987       break;
988     default:
989       MOZ_CRASH("Unexpected op");
990   }
991 
992   masm.boxDouble(FloatReg0, R0, FloatReg0);
993   EmitReturnFromIC(masm);
994 
995   // Failure case - jump to next stub
996   masm.bind(&failure);
997   EmitStubGuardFailure(masm);
998   return true;
999 }
1000 
generateStubCode(MacroAssembler & masm)1001 bool ICBinaryArith_BooleanWithInt32::Compiler::generateStubCode(
1002     MacroAssembler& masm) {
1003   Label failure;
1004   if (lhsIsBool_)
1005     masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
1006   else
1007     masm.branchTestInt32(Assembler::NotEqual, R0, &failure);
1008 
1009   if (rhsIsBool_)
1010     masm.branchTestBoolean(Assembler::NotEqual, R1, &failure);
1011   else
1012     masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
1013 
1014   Register lhsReg = lhsIsBool_ ? masm.extractBoolean(R0, ExtractTemp0)
1015                                : masm.extractInt32(R0, ExtractTemp0);
1016   Register rhsReg = rhsIsBool_ ? masm.extractBoolean(R1, ExtractTemp1)
1017                                : masm.extractInt32(R1, ExtractTemp1);
1018 
1019   MOZ_ASSERT(op_ == JSOP_ADD || op_ == JSOP_SUB || op_ == JSOP_BITOR ||
1020              op_ == JSOP_BITXOR || op_ == JSOP_BITAND);
1021 
1022   switch (op_) {
1023     case JSOP_ADD: {
1024       Label fixOverflow;
1025 
1026       masm.branchAdd32(Assembler::Overflow, rhsReg, lhsReg, &fixOverflow);
1027       masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
1028       EmitReturnFromIC(masm);
1029 
1030       masm.bind(&fixOverflow);
1031       masm.sub32(rhsReg, lhsReg);
1032       // Proceed to failure below.
1033       break;
1034     }
1035     case JSOP_SUB: {
1036       Label fixOverflow;
1037 
1038       masm.branchSub32(Assembler::Overflow, rhsReg, lhsReg, &fixOverflow);
1039       masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
1040       EmitReturnFromIC(masm);
1041 
1042       masm.bind(&fixOverflow);
1043       masm.add32(rhsReg, lhsReg);
1044       // Proceed to failure below.
1045       break;
1046     }
1047     case JSOP_BITOR: {
1048       masm.or32(rhsReg, lhsReg);
1049       masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
1050       EmitReturnFromIC(masm);
1051       break;
1052     }
1053     case JSOP_BITXOR: {
1054       masm.xor32(rhsReg, lhsReg);
1055       masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
1056       EmitReturnFromIC(masm);
1057       break;
1058     }
1059     case JSOP_BITAND: {
1060       masm.and32(rhsReg, lhsReg);
1061       masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
1062       EmitReturnFromIC(masm);
1063       break;
1064     }
1065     default:
1066       MOZ_CRASH("Unhandled op for BinaryArith_BooleanWithInt32.");
1067   }
1068 
1069   // Failure case - jump to next stub
1070   masm.bind(&failure);
1071   EmitStubGuardFailure(masm);
1072   return true;
1073 }
1074 
generateStubCode(MacroAssembler & masm)1075 bool ICBinaryArith_DoubleWithInt32::Compiler::generateStubCode(
1076     MacroAssembler& masm) {
1077   MOZ_ASSERT(op == JSOP_BITOR || op == JSOP_BITAND || op == JSOP_BITXOR);
1078 
1079   Label failure;
1080   Register intReg;
1081   Register scratchReg;
1082   if (lhsIsDouble_) {
1083     masm.branchTestDouble(Assembler::NotEqual, R0, &failure);
1084     masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
1085     intReg = masm.extractInt32(R1, ExtractTemp0);
1086     masm.unboxDouble(R0, FloatReg0);
1087     scratchReg = R0.scratchReg();
1088   } else {
1089     masm.branchTestInt32(Assembler::NotEqual, R0, &failure);
1090     masm.branchTestDouble(Assembler::NotEqual, R1, &failure);
1091     intReg = masm.extractInt32(R0, ExtractTemp0);
1092     masm.unboxDouble(R1, FloatReg0);
1093     scratchReg = R1.scratchReg();
1094   }
1095 
1096   // Truncate the double to an int32.
1097   {
1098     Label doneTruncate;
1099     Label truncateABICall;
1100     masm.branchTruncateDoubleMaybeModUint32(FloatReg0, scratchReg,
1101                                             &truncateABICall);
1102     masm.jump(&doneTruncate);
1103 
1104     masm.bind(&truncateABICall);
1105     masm.push(intReg);
1106     masm.setupUnalignedABICall(scratchReg);
1107     masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
1108     masm.callWithABI(
1109         mozilla::BitwiseCast<void*, int32_t (*)(double)>(JS::ToInt32),
1110         MoveOp::GENERAL, CheckUnsafeCallWithABI::DontCheckOther);
1111     masm.storeCallInt32Result(scratchReg);
1112     masm.pop(intReg);
1113 
1114     masm.bind(&doneTruncate);
1115   }
1116 
1117   Register intReg2 = scratchReg;
1118   // All handled ops commute, so no need to worry about ordering.
1119   switch (op) {
1120     case JSOP_BITOR:
1121       masm.or32(intReg, intReg2);
1122       break;
1123     case JSOP_BITXOR:
1124       masm.xor32(intReg, intReg2);
1125       break;
1126     case JSOP_BITAND:
1127       masm.and32(intReg, intReg2);
1128       break;
1129     default:
1130       MOZ_CRASH("Unhandled op for BinaryArith_DoubleWithInt32.");
1131   }
1132   masm.tagValue(JSVAL_TYPE_INT32, intReg2, R0);
1133   EmitReturnFromIC(masm);
1134 
1135   // Failure case - jump to next stub
1136   masm.bind(&failure);
1137   EmitStubGuardFailure(masm);
1138   return true;
1139 }
1140 
1141 //
1142 // UnaryArith_Fallback
1143 //
1144 
DoUnaryArithFallback(JSContext * cx,void * payload,ICUnaryArith_Fallback * stub_,HandleValue val,MutableHandleValue res)1145 static bool DoUnaryArithFallback(JSContext* cx, void* payload,
1146                                  ICUnaryArith_Fallback* stub_, HandleValue val,
1147                                  MutableHandleValue res) {
1148   SharedStubInfo info(cx, payload, stub_->icEntry());
1149   ICStubCompiler::Engine engine = info.engine();
1150 
1151   // This fallback stub may trigger debug mode toggling.
1152   DebugModeOSRVolatileStub<ICUnaryArith_Fallback*> stub(
1153       engine, info.maybeFrame(), stub_);
1154 
1155   jsbytecode* pc = info.pc();
1156   JSOp op = JSOp(*pc);
1157   FallbackICSpew(cx, stub, "UnaryArith(%s)", CodeName[op]);
1158 
1159   switch (op) {
1160     case JSOP_BITNOT: {
1161       int32_t result;
1162       if (!BitNot(cx, val, &result)) return false;
1163       res.setInt32(result);
1164       break;
1165     }
1166     case JSOP_NEG:
1167       if (!NegOperation(cx, val, res)) return false;
1168       break;
1169     default:
1170       MOZ_CRASH("Unexpected op");
1171   }
1172 
1173   // Check if debug mode toggling made the stub invalid.
1174   if (stub.invalid()) return true;
1175 
1176   if (res.isDouble()) stub->setSawDoubleResult();
1177 
1178   if (stub->numOptimizedStubs() >= ICUnaryArith_Fallback::MAX_OPTIMIZED_STUBS) {
1179     // TODO: Discard/replace stubs.
1180     return true;
1181   }
1182 
1183   if (val.isInt32() && res.isInt32()) {
1184     JitSpew(JitSpew_BaselineIC, "  Generating %s(Int32 => Int32) stub",
1185             CodeName[op]);
1186     ICUnaryArith_Int32::Compiler compiler(cx, op, engine);
1187     ICStub* int32Stub =
1188         compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1189     if (!int32Stub) return false;
1190     stub->addNewStub(int32Stub);
1191     return true;
1192   }
1193 
1194   if (val.isNumber() && res.isNumber() &&
1195       cx->runtime()->jitSupportsFloatingPoint) {
1196     JitSpew(JitSpew_BaselineIC, "  Generating %s(Number => Number) stub",
1197             CodeName[op]);
1198 
1199     // Unlink int32 stubs, the double stub handles both cases and TI specializes
1200     // for both.
1201     stub->unlinkStubsWithKind(cx, ICStub::UnaryArith_Int32);
1202 
1203     ICUnaryArith_Double::Compiler compiler(cx, op, engine);
1204     ICStub* doubleStub =
1205         compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1206     if (!doubleStub) return false;
1207     stub->addNewStub(doubleStub);
1208     return true;
1209   }
1210 
1211   return true;
1212 }
1213 
1214 typedef bool (*DoUnaryArithFallbackFn)(JSContext*, void*,
1215                                        ICUnaryArith_Fallback*, HandleValue,
1216                                        MutableHandleValue);
1217 static const VMFunction DoUnaryArithFallbackInfo =
1218     FunctionInfo<DoUnaryArithFallbackFn>(
1219         DoUnaryArithFallback, "DoUnaryArithFallback", TailCall, PopValues(1));
1220 
generateStubCode(MacroAssembler & masm)1221 bool ICUnaryArith_Fallback::Compiler::generateStubCode(MacroAssembler& masm) {
1222   MOZ_ASSERT(R0 == JSReturnOperand);
1223 
1224   // Restore the tail call register.
1225   EmitRestoreTailCallReg(masm);
1226 
1227   // Ensure stack is fully synced for the expression decompiler.
1228   masm.pushValue(R0);
1229 
1230   // Push arguments.
1231   masm.pushValue(R0);
1232   masm.push(ICStubReg);
1233   pushStubPayload(masm, R0.scratchReg());
1234 
1235   return tailCallVM(DoUnaryArithFallbackInfo, masm);
1236 }
1237 
generateStubCode(MacroAssembler & masm)1238 bool ICUnaryArith_Double::Compiler::generateStubCode(MacroAssembler& masm) {
1239   Label failure;
1240   masm.ensureDouble(R0, FloatReg0, &failure);
1241 
1242   MOZ_ASSERT(op == JSOP_NEG || op == JSOP_BITNOT);
1243 
1244   if (op == JSOP_NEG) {
1245     masm.negateDouble(FloatReg0);
1246     masm.boxDouble(FloatReg0, R0, FloatReg0);
1247   } else {
1248     // Truncate the double to an int32.
1249     Register scratchReg = R1.scratchReg();
1250 
1251     Label doneTruncate;
1252     Label truncateABICall;
1253     masm.branchTruncateDoubleMaybeModUint32(FloatReg0, scratchReg,
1254                                             &truncateABICall);
1255     masm.jump(&doneTruncate);
1256 
1257     masm.bind(&truncateABICall);
1258     masm.setupUnalignedABICall(scratchReg);
1259     masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
1260     masm.callWithABI(BitwiseCast<void*, int32_t (*)(double)>(JS::ToInt32),
1261                      MoveOp::GENERAL, CheckUnsafeCallWithABI::DontCheckOther);
1262     masm.storeCallInt32Result(scratchReg);
1263 
1264     masm.bind(&doneTruncate);
1265     masm.not32(scratchReg);
1266     masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R0);
1267   }
1268 
1269   EmitReturnFromIC(masm);
1270 
1271   // Failure case - jump to next stub
1272   masm.bind(&failure);
1273   EmitStubGuardFailure(masm);
1274   return true;
1275 }
1276 
1277 //
1278 // Compare_Fallback
1279 //
1280 
DoCompareFallback(JSContext * cx,void * payload,ICCompare_Fallback * stub_,HandleValue lhs,HandleValue rhs,MutableHandleValue ret)1281 static bool DoCompareFallback(JSContext* cx, void* payload,
1282                               ICCompare_Fallback* stub_, HandleValue lhs,
1283                               HandleValue rhs, MutableHandleValue ret) {
1284   SharedStubInfo info(cx, payload, stub_->icEntry());
1285   ICStubCompiler::Engine engine = info.engine();
1286 
1287   // This fallback stub may trigger debug mode toggling.
1288   DebugModeOSRVolatileStub<ICCompare_Fallback*> stub(engine, info.maybeFrame(),
1289                                                      stub_);
1290 
1291   jsbytecode* pc = info.pc();
1292   JSOp op = JSOp(*pc);
1293 
1294   FallbackICSpew(cx, stub, "Compare(%s)", CodeName[op]);
1295 
1296   // Case operations in a CONDSWITCH are performing strict equality.
1297   if (op == JSOP_CASE) op = JSOP_STRICTEQ;
1298 
1299   // Don't pass lhs/rhs directly, we need the original values when
1300   // generating stubs.
1301   RootedValue lhsCopy(cx, lhs);
1302   RootedValue rhsCopy(cx, rhs);
1303 
1304   // Perform the compare operation.
1305   bool out;
1306   switch (op) {
1307     case JSOP_LT:
1308       if (!LessThan(cx, &lhsCopy, &rhsCopy, &out)) return false;
1309       break;
1310     case JSOP_LE:
1311       if (!LessThanOrEqual(cx, &lhsCopy, &rhsCopy, &out)) return false;
1312       break;
1313     case JSOP_GT:
1314       if (!GreaterThan(cx, &lhsCopy, &rhsCopy, &out)) return false;
1315       break;
1316     case JSOP_GE:
1317       if (!GreaterThanOrEqual(cx, &lhsCopy, &rhsCopy, &out)) return false;
1318       break;
1319     case JSOP_EQ:
1320       if (!LooselyEqual<true>(cx, &lhsCopy, &rhsCopy, &out)) return false;
1321       break;
1322     case JSOP_NE:
1323       if (!LooselyEqual<false>(cx, &lhsCopy, &rhsCopy, &out)) return false;
1324       break;
1325     case JSOP_STRICTEQ:
1326       if (!StrictlyEqual<true>(cx, &lhsCopy, &rhsCopy, &out)) return false;
1327       break;
1328     case JSOP_STRICTNE:
1329       if (!StrictlyEqual<false>(cx, &lhsCopy, &rhsCopy, &out)) return false;
1330       break;
1331     default:
1332       MOZ_ASSERT_UNREACHABLE("Unhandled baseline compare op");
1333       return false;
1334   }
1335 
1336   ret.setBoolean(out);
1337 
1338   // Check if debug mode toggling made the stub invalid.
1339   if (stub.invalid()) return true;
1340 
1341   // Check to see if a new stub should be generated.
1342   if (stub->numOptimizedStubs() >= ICCompare_Fallback::MAX_OPTIMIZED_STUBS) {
1343     // TODO: Discard all stubs in this IC and replace with inert megamorphic
1344     // stub. But for now we just bail.
1345     return true;
1346   }
1347 
1348   if (engine == ICStubEngine::Baseline) {
1349     RootedScript script(cx, info.outerScript(cx));
1350     CompareIRGenerator gen(cx, script, pc, stub->state().mode(), op, lhs, rhs);
1351     bool attached = false;
1352     if (gen.tryAttachStub()) {
1353       ICStub* newStub = AttachBaselineCacheIRStub(
1354           cx, gen.writerRef(), gen.cacheKind(),
1355           BaselineCacheIRStubKind::Regular, engine, script, stub, &attached);
1356       if (newStub) JitSpew(JitSpew_BaselineIC, "  Attached CacheIR stub");
1357       return true;
1358     }
1359   }
1360 
1361   // Try to generate new stubs.
1362   if (lhs.isInt32() && rhs.isInt32()) {
1363     JitSpew(JitSpew_BaselineIC, "  Generating %s(Int32, Int32) stub",
1364             CodeName[op]);
1365     ICCompare_Int32::Compiler compiler(cx, op, engine);
1366     ICStub* int32Stub =
1367         compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1368     if (!int32Stub) return false;
1369 
1370     stub->addNewStub(int32Stub);
1371     return true;
1372   }
1373 
1374   if (!cx->runtime()->jitSupportsFloatingPoint &&
1375       (lhs.isNumber() || rhs.isNumber()))
1376     return true;
1377 
1378   if (lhs.isNumber() && rhs.isNumber()) {
1379     JitSpew(JitSpew_BaselineIC, "  Generating %s(Number, Number) stub",
1380             CodeName[op]);
1381 
1382     // Unlink int32 stubs, it's faster to always use the double stub.
1383     stub->unlinkStubsWithKind(cx, ICStub::Compare_Int32);
1384 
1385     ICCompare_Double::Compiler compiler(cx, op, engine);
1386     ICStub* doubleStub =
1387         compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1388     if (!doubleStub) return false;
1389 
1390     stub->addNewStub(doubleStub);
1391     return true;
1392   }
1393 
1394   if ((lhs.isNumber() && rhs.isUndefined()) ||
1395       (lhs.isUndefined() && rhs.isNumber())) {
1396     JitSpew(JitSpew_BaselineIC, "  Generating %s(%s, %s) stub", CodeName[op],
1397             rhs.isUndefined() ? "Number" : "Undefined",
1398             rhs.isUndefined() ? "Undefined" : "Number");
1399     ICCompare_NumberWithUndefined::Compiler compiler(cx, op, engine,
1400                                                      lhs.isUndefined());
1401     ICStub* doubleStub =
1402         compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1403     if (!doubleStub) return false;
1404 
1405     stub->addNewStub(doubleStub);
1406     return true;
1407   }
1408 
1409   if (lhs.isBoolean() && rhs.isBoolean()) {
1410     JitSpew(JitSpew_BaselineIC, "  Generating %s(Boolean, Boolean) stub",
1411             CodeName[op]);
1412     ICCompare_Boolean::Compiler compiler(cx, op, engine);
1413     ICStub* booleanStub =
1414         compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1415     if (!booleanStub) return false;
1416 
1417     stub->addNewStub(booleanStub);
1418     return true;
1419   }
1420 
1421   if ((lhs.isBoolean() && rhs.isInt32()) ||
1422       (lhs.isInt32() && rhs.isBoolean())) {
1423     JitSpew(JitSpew_BaselineIC, "  Generating %s(%s, %s) stub", CodeName[op],
1424             rhs.isInt32() ? "Boolean" : "Int32",
1425             rhs.isInt32() ? "Int32" : "Boolean");
1426     ICCompare_Int32WithBoolean::Compiler compiler(cx, op, engine,
1427                                                   lhs.isInt32());
1428     ICStub* optStub =
1429         compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1430     if (!optStub) return false;
1431 
1432     stub->addNewStub(optStub);
1433     return true;
1434   }
1435 
1436   if (IsEqualityOp(op)) {
1437     if (lhs.isString() && rhs.isString() &&
1438         !stub->hasStub(ICStub::Compare_String)) {
1439       JitSpew(JitSpew_BaselineIC, "  Generating %s(String, String) stub",
1440               CodeName[op]);
1441       ICCompare_String::Compiler compiler(cx, op, engine);
1442       ICStub* stringStub =
1443           compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1444       if (!stringStub) return false;
1445 
1446       stub->addNewStub(stringStub);
1447       return true;
1448     }
1449 
1450     if (lhs.isSymbol() && rhs.isSymbol() &&
1451         !stub->hasStub(ICStub::Compare_Symbol)) {
1452       JitSpew(JitSpew_BaselineIC, "  Generating %s(Symbol, Symbol) stub",
1453               CodeName[op]);
1454       ICCompare_Symbol::Compiler compiler(cx, op, engine);
1455       ICStub* symbolStub =
1456           compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1457       if (!symbolStub) return false;
1458 
1459       stub->addNewStub(symbolStub);
1460       return true;
1461     }
1462 
1463     if (lhs.isObject() && rhs.isObject()) {
1464       MOZ_ASSERT(!stub->hasStub(ICStub::Compare_Object));
1465       JitSpew(JitSpew_BaselineIC, "  Generating %s(Object, Object) stub",
1466               CodeName[op]);
1467       ICCompare_Object::Compiler compiler(cx, op, engine);
1468       ICStub* objectStub =
1469           compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1470       if (!objectStub) return false;
1471 
1472       stub->addNewStub(objectStub);
1473       return true;
1474     }
1475 
1476     if (lhs.isNullOrUndefined() || rhs.isNullOrUndefined()) {
1477       JitSpew(JitSpew_BaselineIC,
1478               "  Generating %s(Null/Undef or X, Null/Undef or X) stub",
1479               CodeName[op]);
1480       bool lhsIsUndefined = lhs.isNullOrUndefined();
1481       bool compareWithNull = lhs.isNull() || rhs.isNull();
1482       ICCompare_ObjectWithUndefined::Compiler compiler(
1483           cx, op, engine, lhsIsUndefined, compareWithNull);
1484       ICStub* objectStub =
1485           compiler.getStub(compiler.getStubSpace(info.outerScript(cx)));
1486       if (!objectStub) return false;
1487 
1488       stub->addNewStub(objectStub);
1489       return true;
1490     }
1491   }
1492 
1493   stub->noteUnoptimizableAccess();
1494 
1495   return true;
1496 }
1497 
1498 typedef bool (*DoCompareFallbackFn)(JSContext*, void*, ICCompare_Fallback*,
1499                                     HandleValue, HandleValue,
1500                                     MutableHandleValue);
1501 static const VMFunction DoCompareFallbackInfo =
1502     FunctionInfo<DoCompareFallbackFn>(DoCompareFallback, "DoCompareFallback",
1503                                       TailCall, PopValues(2));
1504 
generateStubCode(MacroAssembler & masm)1505 bool ICCompare_Fallback::Compiler::generateStubCode(MacroAssembler& masm) {
1506   MOZ_ASSERT(R0 == JSReturnOperand);
1507 
1508   // Restore the tail call register.
1509   EmitRestoreTailCallReg(masm);
1510 
1511   // Ensure stack is fully synced for the expression decompiler.
1512   masm.pushValue(R0);
1513   masm.pushValue(R1);
1514 
1515   // Push arguments.
1516   masm.pushValue(R1);
1517   masm.pushValue(R0);
1518   masm.push(ICStubReg);
1519   pushStubPayload(masm, R0.scratchReg());
1520   return tailCallVM(DoCompareFallbackInfo, masm);
1521 }
1522 
1523 //
1524 // Compare_String
1525 //
1526 
generateStubCode(MacroAssembler & masm)1527 bool ICCompare_String::Compiler::generateStubCode(MacroAssembler& masm) {
1528   Label failure;
1529   masm.branchTestString(Assembler::NotEqual, R0, &failure);
1530   masm.branchTestString(Assembler::NotEqual, R1, &failure);
1531 
1532   MOZ_ASSERT(IsEqualityOp(op));
1533 
1534   Register left = masm.extractString(R0, ExtractTemp0);
1535   Register right = masm.extractString(R1, ExtractTemp1);
1536 
1537   AllocatableGeneralRegisterSet regs(availableGeneralRegs(2));
1538   Register scratchReg = regs.takeAny();
1539 
1540   masm.compareStrings(op, left, right, scratchReg, &failure);
1541   masm.tagValue(JSVAL_TYPE_BOOLEAN, scratchReg, R0);
1542   EmitReturnFromIC(masm);
1543 
1544   masm.bind(&failure);
1545   EmitStubGuardFailure(masm);
1546   return true;
1547 }
1548 
1549 //
1550 // Compare_Symbol
1551 //
1552 
generateStubCode(MacroAssembler & masm)1553 bool ICCompare_Symbol::Compiler::generateStubCode(MacroAssembler& masm) {
1554   Label failure;
1555   masm.branchTestSymbol(Assembler::NotEqual, R0, &failure);
1556   masm.branchTestSymbol(Assembler::NotEqual, R1, &failure);
1557 
1558   MOZ_ASSERT(IsEqualityOp(op));
1559 
1560   Register left = masm.extractSymbol(R0, ExtractTemp0);
1561   Register right = masm.extractSymbol(R1, ExtractTemp1);
1562 
1563   Label ifTrue;
1564   masm.branchPtr(JSOpToCondition(op, /* signed = */ true), left, right,
1565                  &ifTrue);
1566 
1567   masm.moveValue(BooleanValue(false), R0);
1568   EmitReturnFromIC(masm);
1569 
1570   masm.bind(&ifTrue);
1571   masm.moveValue(BooleanValue(true), R0);
1572   EmitReturnFromIC(masm);
1573 
1574   // Failure case - jump to next stub
1575   masm.bind(&failure);
1576   EmitStubGuardFailure(masm);
1577   return true;
1578 }
1579 
1580 //
1581 // Compare_Boolean
1582 //
1583 
generateStubCode(MacroAssembler & masm)1584 bool ICCompare_Boolean::Compiler::generateStubCode(MacroAssembler& masm) {
1585   Label failure;
1586   masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
1587   masm.branchTestBoolean(Assembler::NotEqual, R1, &failure);
1588 
1589   Register left = masm.extractInt32(R0, ExtractTemp0);
1590   Register right = masm.extractInt32(R1, ExtractTemp1);
1591 
1592   // Compare payload regs of R0 and R1.
1593   Assembler::Condition cond = JSOpToCondition(op, /* signed = */ true);
1594   masm.cmp32Set(cond, left, right, left);
1595 
1596   // Box the result and return
1597   masm.tagValue(JSVAL_TYPE_BOOLEAN, left, R0);
1598   EmitReturnFromIC(masm);
1599 
1600   // Failure case - jump to next stub
1601   masm.bind(&failure);
1602   EmitStubGuardFailure(masm);
1603   return true;
1604 }
1605 
1606 //
1607 // Compare_NumberWithUndefined
1608 //
1609 
generateStubCode(MacroAssembler & masm)1610 bool ICCompare_NumberWithUndefined::Compiler::generateStubCode(
1611     MacroAssembler& masm) {
1612   ValueOperand numberOperand, undefinedOperand;
1613   if (lhsIsUndefined) {
1614     numberOperand = R1;
1615     undefinedOperand = R0;
1616   } else {
1617     numberOperand = R0;
1618     undefinedOperand = R1;
1619   }
1620 
1621   Label failure;
1622   masm.branchTestNumber(Assembler::NotEqual, numberOperand, &failure);
1623   masm.branchTestUndefined(Assembler::NotEqual, undefinedOperand, &failure);
1624 
1625   // Comparing a number with undefined will always be true for NE/STRICTNE,
1626   // and always be false for other compare ops.
1627   masm.moveValue(BooleanValue(op == JSOP_NE || op == JSOP_STRICTNE), R0);
1628 
1629   EmitReturnFromIC(masm);
1630 
1631   // Failure case - jump to next stub
1632   masm.bind(&failure);
1633   EmitStubGuardFailure(masm);
1634   return true;
1635 }
1636 
1637 //
1638 // Compare_Object
1639 //
1640 
generateStubCode(MacroAssembler & masm)1641 bool ICCompare_Object::Compiler::generateStubCode(MacroAssembler& masm) {
1642   Label failure;
1643   masm.branchTestObject(Assembler::NotEqual, R0, &failure);
1644   masm.branchTestObject(Assembler::NotEqual, R1, &failure);
1645 
1646   MOZ_ASSERT(IsEqualityOp(op));
1647 
1648   Register left = masm.extractObject(R0, ExtractTemp0);
1649   Register right = masm.extractObject(R1, ExtractTemp1);
1650 
1651   Label ifTrue;
1652   masm.branchPtr(JSOpToCondition(op, /* signed = */ true), left, right,
1653                  &ifTrue);
1654 
1655   masm.moveValue(BooleanValue(false), R0);
1656   EmitReturnFromIC(masm);
1657 
1658   masm.bind(&ifTrue);
1659   masm.moveValue(BooleanValue(true), R0);
1660   EmitReturnFromIC(masm);
1661 
1662   // Failure case - jump to next stub
1663   masm.bind(&failure);
1664   EmitStubGuardFailure(masm);
1665   return true;
1666 }
1667 
1668 //
1669 // Compare_ObjectWithUndefined
1670 //
1671 
generateStubCode(MacroAssembler & masm)1672 bool ICCompare_ObjectWithUndefined::Compiler::generateStubCode(
1673     MacroAssembler& masm) {
1674   MOZ_ASSERT(IsEqualityOp(op));
1675 
1676   ValueOperand objectOperand, undefinedOperand;
1677   if (lhsIsUndefined) {
1678     objectOperand = R1;
1679     undefinedOperand = R0;
1680   } else {
1681     objectOperand = R0;
1682     undefinedOperand = R1;
1683   }
1684 
1685   Label failure;
1686   if (compareWithNull)
1687     masm.branchTestNull(Assembler::NotEqual, undefinedOperand, &failure);
1688   else
1689     masm.branchTestUndefined(Assembler::NotEqual, undefinedOperand, &failure);
1690 
1691   Label notObject;
1692   masm.branchTestObject(Assembler::NotEqual, objectOperand, &notObject);
1693 
1694   if (op == JSOP_STRICTEQ || op == JSOP_STRICTNE) {
1695     // obj !== undefined for all objects.
1696     masm.moveValue(BooleanValue(op == JSOP_STRICTNE), R0);
1697     EmitReturnFromIC(masm);
1698   } else {
1699     // obj != undefined only where !obj->getClass()->emulatesUndefined()
1700     Register obj = masm.extractObject(objectOperand, ExtractTemp0);
1701 
1702     // We need a scratch register.
1703     masm.push(obj);
1704     Label slow, emulatesUndefined;
1705     masm.branchIfObjectEmulatesUndefined(obj, obj, &slow, &emulatesUndefined);
1706 
1707     masm.pop(obj);
1708     masm.moveValue(BooleanValue(op == JSOP_NE), R0);
1709     EmitReturnFromIC(masm);
1710 
1711     masm.bind(&emulatesUndefined);
1712     masm.pop(obj);
1713     masm.moveValue(BooleanValue(op == JSOP_EQ), R0);
1714     EmitReturnFromIC(masm);
1715 
1716     masm.bind(&slow);
1717     masm.pop(obj);
1718     masm.jump(&failure);
1719   }
1720 
1721   masm.bind(&notObject);
1722 
1723   // Also support null == null or undefined == undefined comparisons.
1724   Label differentTypes;
1725   if (compareWithNull)
1726     masm.branchTestNull(Assembler::NotEqual, objectOperand, &differentTypes);
1727   else
1728     masm.branchTestUndefined(Assembler::NotEqual, objectOperand,
1729                              &differentTypes);
1730 
1731   masm.moveValue(BooleanValue(op == JSOP_STRICTEQ || op == JSOP_EQ), R0);
1732   EmitReturnFromIC(masm);
1733 
1734   masm.bind(&differentTypes);
1735   // Also support null == undefined or undefined == null.
1736   Label neverEqual;
1737   if (compareWithNull)
1738     masm.branchTestUndefined(Assembler::NotEqual, objectOperand, &neverEqual);
1739   else
1740     masm.branchTestNull(Assembler::NotEqual, objectOperand, &neverEqual);
1741 
1742   masm.moveValue(BooleanValue(op == JSOP_EQ || op == JSOP_STRICTNE), R0);
1743   EmitReturnFromIC(masm);
1744 
1745   // null/undefined can only be equal to null/undefined or emulatesUndefined.
1746   masm.bind(&neverEqual);
1747   masm.moveValue(BooleanValue(op == JSOP_NE || op == JSOP_STRICTNE), R0);
1748   EmitReturnFromIC(masm);
1749 
1750   // Failure case - jump to next stub
1751   masm.bind(&failure);
1752   EmitStubGuardFailure(masm);
1753   return true;
1754 }
1755 
1756 //
1757 // Compare_Int32WithBoolean
1758 //
1759 
generateStubCode(MacroAssembler & masm)1760 bool ICCompare_Int32WithBoolean::Compiler::generateStubCode(
1761     MacroAssembler& masm) {
1762   Label failure;
1763   ValueOperand int32Val;
1764   ValueOperand boolVal;
1765   if (lhsIsInt32_) {
1766     int32Val = R0;
1767     boolVal = R1;
1768   } else {
1769     boolVal = R0;
1770     int32Val = R1;
1771   }
1772   masm.branchTestBoolean(Assembler::NotEqual, boolVal, &failure);
1773   masm.branchTestInt32(Assembler::NotEqual, int32Val, &failure);
1774 
1775   if (op_ == JSOP_STRICTEQ || op_ == JSOP_STRICTNE) {
1776     // Ints and booleans are never strictly equal, always strictly not equal.
1777     masm.moveValue(BooleanValue(op_ == JSOP_STRICTNE), R0);
1778     EmitReturnFromIC(masm);
1779   } else {
1780     Register boolReg = masm.extractBoolean(boolVal, ExtractTemp0);
1781     Register int32Reg = masm.extractInt32(int32Val, ExtractTemp1);
1782 
1783     // Compare payload regs of R0 and R1.
1784     Assembler::Condition cond = JSOpToCondition(op_, /* signed = */ true);
1785     masm.cmp32Set(cond, (lhsIsInt32_ ? int32Reg : boolReg),
1786                   (lhsIsInt32_ ? boolReg : int32Reg), R0.scratchReg());
1787 
1788     // Box the result and return
1789     masm.tagValue(JSVAL_TYPE_BOOLEAN, R0.scratchReg(), R0);
1790     EmitReturnFromIC(masm);
1791   }
1792 
1793   // Failure case - jump to next stub
1794   masm.bind(&failure);
1795   EmitStubGuardFailure(masm);
1796   return true;
1797 }
1798 
1799 //
1800 // GetProp_Fallback
1801 //
1802 
StripPreliminaryObjectStubs(JSContext * cx,ICFallbackStub * stub)1803 void StripPreliminaryObjectStubs(JSContext* cx, ICFallbackStub* stub) {
1804   // Before the new script properties analysis has been performed on a type,
1805   // all instances of that type have the maximum number of fixed slots.
1806   // Afterwards, the objects (even the preliminary ones) might be changed
1807   // to reduce the number of fixed slots they have. If we generate stubs for
1808   // both the old and new number of fixed slots, the stub will look
1809   // polymorphic to IonBuilder when it is actually monomorphic. To avoid
1810   // this, strip out any stubs for preliminary objects before attaching a new
1811   // stub which isn't on a preliminary object.
1812 
1813   for (ICStubIterator iter = stub->beginChain(); !iter.atEnd(); iter++) {
1814     if (iter->isCacheIR_Regular() &&
1815         iter->toCacheIR_Regular()->hasPreliminaryObject())
1816       iter.unlink(cx);
1817     else if (iter->isCacheIR_Monitored() &&
1818              iter->toCacheIR_Monitored()->hasPreliminaryObject())
1819       iter.unlink(cx);
1820     else if (iter->isCacheIR_Updated() &&
1821              iter->toCacheIR_Updated()->hasPreliminaryObject())
1822       iter.unlink(cx);
1823   }
1824 }
1825 
ComputeGetPropResult(JSContext * cx,BaselineFrame * frame,JSOp op,HandlePropertyName name,MutableHandleValue val,MutableHandleValue res)1826 static bool ComputeGetPropResult(JSContext* cx, BaselineFrame* frame, JSOp op,
1827                                  HandlePropertyName name,
1828                                  MutableHandleValue val,
1829                                  MutableHandleValue res) {
1830   // Handle arguments.length and arguments.callee on optimized arguments, as
1831   // it is not an object.
1832   if (val.isMagic(JS_OPTIMIZED_ARGUMENTS) && IsOptimizedArguments(frame, val)) {
1833     if (op == JSOP_LENGTH) {
1834       res.setInt32(frame->numActualArgs());
1835     } else {
1836       MOZ_ASSERT(name == cx->names().callee);
1837       MOZ_ASSERT(frame->script()->hasMappedArgsObj());
1838       res.setObject(*frame->callee());
1839     }
1840   } else {
1841     if (op == JSOP_GETBOUNDNAME) {
1842       RootedObject env(cx, &val.toObject());
1843       RootedId id(cx, NameToId(name));
1844       if (!GetNameBoundInEnvironment(cx, env, id, res)) return false;
1845     } else {
1846       MOZ_ASSERT(op == JSOP_GETPROP || op == JSOP_CALLPROP ||
1847                  op == JSOP_LENGTH);
1848       if (!GetProperty(cx, val, name, res)) return false;
1849     }
1850   }
1851 
1852   return true;
1853 }
1854 
DoGetPropFallback(JSContext * cx,BaselineFrame * frame,ICGetProp_Fallback * stub_,MutableHandleValue val,MutableHandleValue res)1855 static bool DoGetPropFallback(JSContext* cx, BaselineFrame* frame,
1856                               ICGetProp_Fallback* stub_, MutableHandleValue val,
1857                               MutableHandleValue res) {
1858   // This fallback stub may trigger debug mode toggling.
1859   DebugModeOSRVolatileStub<ICGetProp_Fallback*> stub(frame, stub_);
1860 
1861   RootedScript script(cx, frame->script());
1862   jsbytecode* pc = stub_->icEntry()->pc(script);
1863   JSOp op = JSOp(*pc);
1864   FallbackICSpew(cx, stub, "GetProp(%s)", CodeName[op]);
1865 
1866   MOZ_ASSERT(op == JSOP_GETPROP || op == JSOP_CALLPROP || op == JSOP_LENGTH ||
1867              op == JSOP_GETBOUNDNAME);
1868 
1869   RootedPropertyName name(cx, script->getName(pc));
1870 
1871   // There are some reasons we can fail to attach a stub that are temporary.
1872   // We want to avoid calling noteUnoptimizableAccess() if the reason we
1873   // failed to attach a stub is one of those temporary reasons, since we might
1874   // end up attaching a stub for the exact same access later.
1875   bool isTemporarilyUnoptimizable = false;
1876 
1877   if (stub->state().maybeTransition()) stub->discardStubs(cx);
1878 
1879   bool attached = false;
1880   if (stub->state().canAttachStub()) {
1881     RootedValue idVal(cx, StringValue(name));
1882     GetPropIRGenerator gen(cx, script, pc, CacheKind::GetProp,
1883                            stub->state().mode(), &isTemporarilyUnoptimizable,
1884                            val, idVal, val, GetPropertyResultFlags::All);
1885     if (gen.tryAttachStub()) {
1886       ICStub* newStub = AttachBaselineCacheIRStub(
1887           cx, gen.writerRef(), gen.cacheKind(),
1888           BaselineCacheIRStubKind::Monitored, ICStubEngine::Baseline, script,
1889           stub, &attached);
1890       if (newStub) {
1891         JitSpew(JitSpew_BaselineIC, "  Attached CacheIR stub");
1892         if (gen.shouldNotePreliminaryObjectStub())
1893           newStub->toCacheIR_Monitored()->notePreliminaryObject();
1894         else if (gen.shouldUnlinkPreliminaryObjectStubs())
1895           StripPreliminaryObjectStubs(cx, stub);
1896       }
1897     }
1898     if (!attached && !isTemporarilyUnoptimizable)
1899       stub->state().trackNotAttached();
1900   }
1901 
1902   if (!ComputeGetPropResult(cx, frame, op, name, val, res)) return false;
1903 
1904   StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
1905   TypeScript::Monitor(cx, script, pc, types, res);
1906 
1907   // Check if debug mode toggling made the stub invalid.
1908   if (stub.invalid()) return true;
1909 
1910   // Add a type monitor stub for the resulting value.
1911   if (!stub->addMonitorStubForValue(cx, frame, types, res)) return false;
1912 
1913   if (attached) return true;
1914 
1915   MOZ_ASSERT(!attached);
1916   if (!isTemporarilyUnoptimizable) stub->noteUnoptimizableAccess();
1917 
1918   return true;
1919 }
1920 
DoGetPropSuperFallback(JSContext * cx,BaselineFrame * frame,ICGetProp_Fallback * stub_,HandleValue receiver,MutableHandleValue val,MutableHandleValue res)1921 static bool DoGetPropSuperFallback(JSContext* cx, BaselineFrame* frame,
1922                                    ICGetProp_Fallback* stub_,
1923                                    HandleValue receiver, MutableHandleValue val,
1924                                    MutableHandleValue res) {
1925   // This fallback stub may trigger debug mode toggling.
1926   DebugModeOSRVolatileStub<ICGetProp_Fallback*> stub(frame, stub_);
1927 
1928   RootedScript script(cx, frame->script());
1929   jsbytecode* pc = stub_->icEntry()->pc(script);
1930   FallbackICSpew(cx, stub, "GetPropSuper(%s)", CodeName[JSOp(*pc)]);
1931 
1932   MOZ_ASSERT(JSOp(*pc) == JSOP_GETPROP_SUPER);
1933 
1934   RootedPropertyName name(cx, script->getName(pc));
1935 
1936   // There are some reasons we can fail to attach a stub that are temporary.
1937   // We want to avoid calling noteUnoptimizableAccess() if the reason we
1938   // failed to attach a stub is one of those temporary reasons, since we might
1939   // end up attaching a stub for the exact same access later.
1940   bool isTemporarilyUnoptimizable = false;
1941 
1942   if (stub->state().maybeTransition()) stub->discardStubs(cx);
1943 
1944   bool attached = false;
1945   if (stub->state().canAttachStub()) {
1946     RootedValue idVal(cx, StringValue(name));
1947     GetPropIRGenerator gen(cx, script, pc, CacheKind::GetPropSuper,
1948                            stub->state().mode(), &isTemporarilyUnoptimizable,
1949                            val, idVal, receiver, GetPropertyResultFlags::All);
1950     if (gen.tryAttachStub()) {
1951       ICStub* newStub = AttachBaselineCacheIRStub(
1952           cx, gen.writerRef(), gen.cacheKind(),
1953           BaselineCacheIRStubKind::Monitored, ICStubEngine::Baseline, script,
1954           stub, &attached);
1955       if (newStub) {
1956         JitSpew(JitSpew_BaselineIC, "  Attached CacheIR stub");
1957         if (gen.shouldNotePreliminaryObjectStub())
1958           newStub->toCacheIR_Monitored()->notePreliminaryObject();
1959         else if (gen.shouldUnlinkPreliminaryObjectStubs())
1960           StripPreliminaryObjectStubs(cx, stub);
1961       }
1962     }
1963     if (!attached && !isTemporarilyUnoptimizable)
1964       stub->state().trackNotAttached();
1965   }
1966 
1967   // |val| is [[HomeObject]].[[Prototype]] which must be Object
1968   RootedObject valObj(cx, &val.toObject());
1969   if (!GetProperty(cx, valObj, receiver, name, res)) return false;
1970 
1971   StackTypeSet* types = TypeScript::BytecodeTypes(script, pc);
1972   TypeScript::Monitor(cx, script, pc, types, res);
1973 
1974   // Check if debug mode toggling made the stub invalid.
1975   if (stub.invalid()) return true;
1976 
1977   // Add a type monitor stub for the resulting value.
1978   if (!stub->addMonitorStubForValue(cx, frame, types, res)) return false;
1979 
1980   if (attached) return true;
1981 
1982   MOZ_ASSERT(!attached);
1983   if (!isTemporarilyUnoptimizable) stub->noteUnoptimizableAccess();
1984 
1985   return true;
1986 }
1987 
1988 typedef bool (*DoGetPropFallbackFn)(JSContext*, BaselineFrame*,
1989                                     ICGetProp_Fallback*, MutableHandleValue,
1990                                     MutableHandleValue);
1991 static const VMFunction DoGetPropFallbackInfo =
1992     FunctionInfo<DoGetPropFallbackFn>(DoGetPropFallback, "DoGetPropFallback",
1993                                       TailCall, PopValues(1));
1994 
1995 typedef bool (*DoGetPropSuperFallbackFn)(JSContext*, BaselineFrame*,
1996                                          ICGetProp_Fallback*, HandleValue,
1997                                          MutableHandleValue,
1998                                          MutableHandleValue);
1999 static const VMFunction DoGetPropSuperFallbackInfo =
2000     FunctionInfo<DoGetPropSuperFallbackFn>(DoGetPropSuperFallback,
2001                                            "DoGetPropSuperFallback", TailCall);
2002 
generateStubCode(MacroAssembler & masm)2003 bool ICGetProp_Fallback::Compiler::generateStubCode(MacroAssembler& masm) {
2004   MOZ_ASSERT(R0 == JSReturnOperand);
2005 
2006   EmitRestoreTailCallReg(masm);
2007 
2008   // Super property getters use a |this| that differs from base object
2009   if (hasReceiver_) {
2010     // Push arguments.
2011     masm.pushValue(R0);
2012     masm.pushValue(R1);
2013     masm.push(ICStubReg);
2014     masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
2015 
2016     if (!tailCallVM(DoGetPropSuperFallbackInfo, masm)) return false;
2017   } else {
2018     // Ensure stack is fully synced for the expression decompiler.
2019     masm.pushValue(R0);
2020 
2021     // Push arguments.
2022     masm.pushValue(R0);
2023     masm.push(ICStubReg);
2024     masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
2025 
2026     if (!tailCallVM(DoGetPropFallbackInfo, masm)) return false;
2027   }
2028 
2029   // This is the resume point used when bailout rewrites call stack to undo
2030   // Ion inlined frames. The return address pushed onto reconstructed stack
2031   // will point here.
2032   assumeStubFrame();
2033   bailoutReturnOffset_.bind(masm.currentOffset());
2034 
2035   leaveStubFrame(masm, true);
2036 
2037   // When we get here, ICStubReg contains the ICGetProp_Fallback stub,
2038   // which we can't use to enter the TypeMonitor IC, because it's a
2039   // MonitoredFallbackStub instead of a MonitoredStub. So, we cheat. Note that
2040   // we must have a non-null fallbackMonitorStub here because InitFromBailout
2041   // delazifies.
2042   masm.loadPtr(Address(ICStubReg,
2043                        ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
2044                ICStubReg);
2045   EmitEnterTypeMonitorIC(masm,
2046                          ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
2047 
2048   return true;
2049 }
2050 
postGenerateStubCode(MacroAssembler & masm,Handle<JitCode * > code)2051 void ICGetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler& masm,
2052                                                         Handle<JitCode*> code) {
2053   if (engine_ == Engine::Baseline) {
2054     BailoutReturnStub kind = hasReceiver_ ? BailoutReturnStub::GetPropSuper
2055                                           : BailoutReturnStub::GetProp;
2056     void* address = code->raw() + bailoutReturnOffset_.offset();
2057     cx->compartment()->jitCompartment()->initBailoutReturnAddr(address,
2058                                                                getKey(), kind);
2059   }
2060 }
2061 
LoadTypedThingData(MacroAssembler & masm,TypedThingLayout layout,Register obj,Register result)2062 void LoadTypedThingData(MacroAssembler& masm, TypedThingLayout layout,
2063                         Register obj, Register result) {
2064   switch (layout) {
2065     case Layout_TypedArray:
2066       masm.loadPtr(Address(obj, TypedArrayObject::dataOffset()), result);
2067       break;
2068     case Layout_OutlineTypedObject:
2069       masm.loadPtr(Address(obj, OutlineTypedObject::offsetOfData()), result);
2070       break;
2071     case Layout_InlineTypedObject:
2072       masm.computeEffectiveAddress(
2073           Address(obj, InlineTypedObject::offsetOfDataStart()), result);
2074       break;
2075     default:
2076       MOZ_CRASH();
2077   }
2078 }
2079 
noteAccessedGetter(uint32_t pcOffset)2080 void BaselineScript::noteAccessedGetter(uint32_t pcOffset) {
2081   ICEntry& entry = icEntryFromPCOffset(pcOffset);
2082   ICFallbackStub* stub = entry.fallbackStub();
2083 
2084   if (stub->isGetProp_Fallback())
2085     stub->toGetProp_Fallback()->noteAccessedGetter();
2086 }
2087 
2088 // TypeMonitor_Fallback
2089 //
2090 
addMonitorStubForValue(JSContext * cx,BaselineFrame * frame,StackTypeSet * types,HandleValue val)2091 bool ICTypeMonitor_Fallback::addMonitorStubForValue(JSContext* cx,
2092                                                     BaselineFrame* frame,
2093                                                     StackTypeSet* types,
2094                                                     HandleValue val) {
2095   MOZ_ASSERT(types);
2096 
2097   // Don't attach too many SingleObject/ObjectGroup stubs. If the value is a
2098   // primitive or if we will attach an any-object stub, we can handle this
2099   // with a single PrimitiveSet or AnyValue stub so we always optimize.
2100   if (numOptimizedMonitorStubs_ >= MAX_OPTIMIZED_STUBS && val.isObject() &&
2101       !types->unknownObject()) {
2102     return true;
2103   }
2104 
2105   bool wasDetachedMonitorChain = lastMonitorStubPtrAddr_ == nullptr;
2106   MOZ_ASSERT_IF(wasDetachedMonitorChain, numOptimizedMonitorStubs_ == 0);
2107 
2108   if (types->unknown()) {
2109     // The TypeSet got marked as unknown so attach a stub that always
2110     // succeeds.
2111 
2112     // Check for existing TypeMonitor_AnyValue stubs.
2113     for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd(); iter++) {
2114       if (iter->isTypeMonitor_AnyValue()) return true;
2115     }
2116 
2117     // Discard existing stubs.
2118     resetMonitorStubChain(cx->zone());
2119     wasDetachedMonitorChain = (lastMonitorStubPtrAddr_ == nullptr);
2120 
2121     ICTypeMonitor_AnyValue::Compiler compiler(cx);
2122     ICStub* stub = compiler.getStub(compiler.getStubSpace(frame->script()));
2123     if (!stub) {
2124       ReportOutOfMemory(cx);
2125       return false;
2126     }
2127 
2128     JitSpew(JitSpew_BaselineIC, "  Added TypeMonitor stub %p for any value",
2129             stub);
2130     addOptimizedMonitorStub(stub);
2131 
2132   } else if (val.isPrimitive() || types->unknownObject()) {
2133     if (val.isMagic(JS_UNINITIALIZED_LEXICAL)) return true;
2134     MOZ_ASSERT(!val.isMagic());
2135     JSValueType type =
2136         val.isDouble() ? JSVAL_TYPE_DOUBLE : val.extractNonDoubleType();
2137 
2138     // Check for existing TypeMonitor stub.
2139     ICTypeMonitor_PrimitiveSet* existingStub = nullptr;
2140     for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd(); iter++) {
2141       if (iter->isTypeMonitor_PrimitiveSet()) {
2142         existingStub = iter->toTypeMonitor_PrimitiveSet();
2143         if (existingStub->containsType(type)) return true;
2144       }
2145     }
2146 
2147     if (val.isObject()) {
2148       // Check for existing SingleObject/ObjectGroup stubs and discard
2149       // stubs if we find one. Ideally we would discard just these stubs,
2150       // but unlinking individual type monitor stubs is somewhat
2151       // complicated.
2152       MOZ_ASSERT(types->unknownObject());
2153       bool hasObjectStubs = false;
2154       for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd();
2155            iter++) {
2156         if (iter->isTypeMonitor_SingleObject() ||
2157             iter->isTypeMonitor_ObjectGroup()) {
2158           hasObjectStubs = true;
2159           break;
2160         }
2161       }
2162       if (hasObjectStubs) {
2163         resetMonitorStubChain(cx->zone());
2164         wasDetachedMonitorChain = (lastMonitorStubPtrAddr_ == nullptr);
2165         existingStub = nullptr;
2166       }
2167     }
2168 
2169     ICTypeMonitor_PrimitiveSet::Compiler compiler(cx, existingStub, type);
2170     ICStub* stub =
2171         existingStub ? compiler.updateStub()
2172                      : compiler.getStub(compiler.getStubSpace(frame->script()));
2173     if (!stub) {
2174       ReportOutOfMemory(cx);
2175       return false;
2176     }
2177 
2178     JitSpew(JitSpew_BaselineIC,
2179             "  %s TypeMonitor stub %p for primitive type %d",
2180             existingStub ? "Modified existing" : "Created new", stub, type);
2181 
2182     if (!existingStub) {
2183       MOZ_ASSERT(!hasStub(TypeMonitor_PrimitiveSet));
2184       addOptimizedMonitorStub(stub);
2185     }
2186 
2187   } else if (val.toObject().isSingleton()) {
2188     RootedObject obj(cx, &val.toObject());
2189 
2190     // Check for existing TypeMonitor stub.
2191     for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd(); iter++) {
2192       if (iter->isTypeMonitor_SingleObject() &&
2193           iter->toTypeMonitor_SingleObject()->object() == obj) {
2194         return true;
2195       }
2196     }
2197 
2198     ICTypeMonitor_SingleObject::Compiler compiler(cx, obj);
2199     ICStub* stub = compiler.getStub(compiler.getStubSpace(frame->script()));
2200     if (!stub) {
2201       ReportOutOfMemory(cx);
2202       return false;
2203     }
2204 
2205     JitSpew(JitSpew_BaselineIC, "  Added TypeMonitor stub %p for singleton %p",
2206             stub, obj.get());
2207 
2208     addOptimizedMonitorStub(stub);
2209 
2210   } else {
2211     RootedObjectGroup group(cx, val.toObject().group());
2212 
2213     // Check for existing TypeMonitor stub.
2214     for (ICStubConstIterator iter(firstMonitorStub()); !iter.atEnd(); iter++) {
2215       if (iter->isTypeMonitor_ObjectGroup() &&
2216           iter->toTypeMonitor_ObjectGroup()->group() == group) {
2217         return true;
2218       }
2219     }
2220 
2221     ICTypeMonitor_ObjectGroup::Compiler compiler(cx, group);
2222     ICStub* stub = compiler.getStub(compiler.getStubSpace(frame->script()));
2223     if (!stub) {
2224       ReportOutOfMemory(cx);
2225       return false;
2226     }
2227 
2228     JitSpew(JitSpew_BaselineIC,
2229             "  Added TypeMonitor stub %p for ObjectGroup %p", stub,
2230             group.get());
2231 
2232     addOptimizedMonitorStub(stub);
2233   }
2234 
2235   bool firstMonitorStubAdded =
2236       wasDetachedMonitorChain && (numOptimizedMonitorStubs_ > 0);
2237 
2238   if (firstMonitorStubAdded) {
2239     // Was an empty monitor chain before, but a new stub was added.  This is the
2240     // only time that any main stubs' firstMonitorStub fields need to be updated
2241     // to refer to the newly added monitor stub.
2242     ICStub* firstStub = mainFallbackStub_->icEntry()->firstStub();
2243     for (ICStubConstIterator iter(firstStub); !iter.atEnd(); iter++) {
2244       // Non-monitored stubs are used if the result has always the same type,
2245       // e.g. a StringLength stub will always return int32.
2246       if (!iter->isMonitored()) continue;
2247 
2248       // Since we just added the first optimized monitoring stub, any
2249       // existing main stub's |firstMonitorStub| MUST be pointing to the
2250       // fallback monitor stub (i.e. this stub).
2251       MOZ_ASSERT(iter->toMonitoredStub()->firstMonitorStub() == this);
2252       iter->toMonitoredStub()->updateFirstMonitorStub(firstMonitorStub_);
2253     }
2254   }
2255 
2256   return true;
2257 }
2258 
DoTypeMonitorFallback(JSContext * cx,BaselineFrame * frame,ICTypeMonitor_Fallback * stub,HandleValue value,MutableHandleValue res)2259 static bool DoTypeMonitorFallback(JSContext* cx, BaselineFrame* frame,
2260                                   ICTypeMonitor_Fallback* stub,
2261                                   HandleValue value, MutableHandleValue res) {
2262   JSScript* script = frame->script();
2263   jsbytecode* pc = stub->icEntry()->pc(script);
2264   TypeFallbackICSpew(cx, stub, "TypeMonitor");
2265 
2266   // Copy input value to res.
2267   res.set(value);
2268 
2269   if (MOZ_UNLIKELY(value.isMagic())) {
2270     // It's possible that we arrived here from bailing out of Ion, and that
2271     // Ion proved that the value is dead and optimized out. In such cases,
2272     // do nothing. However, it's also possible that we have an uninitialized
2273     // this, in which case we should not look for other magic values.
2274 
2275     if (value.whyMagic() == JS_OPTIMIZED_OUT) {
2276       MOZ_ASSERT(!stub->monitorsThis());
2277       return true;
2278     }
2279 
2280     // In derived class constructors (including nested arrows/eval), the
2281     // |this| argument or GETALIASEDVAR can return the magic TDZ value.
2282     MOZ_ASSERT(value.isMagic(JS_UNINITIALIZED_LEXICAL));
2283     MOZ_ASSERT(frame->isFunctionFrame() || frame->isEvalFrame());
2284     MOZ_ASSERT(stub->monitorsThis() || *GetNextPc(pc) == JSOP_CHECKTHIS ||
2285                *GetNextPc(pc) == JSOP_CHECKTHISREINIT ||
2286                *GetNextPc(pc) == JSOP_CHECKRETURN);
2287     if (stub->monitorsThis())
2288       TypeScript::SetThis(cx, script, TypeSet::UnknownType());
2289     else
2290       TypeScript::Monitor(cx, script, pc, TypeSet::UnknownType());
2291     return true;
2292   }
2293 
2294   // Note: ideally we would merge this if-else statement with the one below,
2295   // but that triggers an MSVC 2015 compiler bug. See bug 1363054.
2296   StackTypeSet* types;
2297   uint32_t argument;
2298   if (stub->monitorsArgument(&argument))
2299     types = TypeScript::ArgTypes(script, argument);
2300   else if (stub->monitorsThis())
2301     types = TypeScript::ThisTypes(script);
2302   else
2303     types = TypeScript::BytecodeTypes(script, pc);
2304 
2305   if (stub->monitorsArgument(&argument)) {
2306     MOZ_ASSERT(pc == script->code());
2307     TypeScript::SetArgument(cx, script, argument, value);
2308   } else if (stub->monitorsThis()) {
2309     MOZ_ASSERT(pc == script->code());
2310     TypeScript::SetThis(cx, script, value);
2311   } else {
2312     TypeScript::Monitor(cx, script, pc, types, value);
2313   }
2314 
2315   if (MOZ_UNLIKELY(stub->invalid())) return true;
2316 
2317   return stub->addMonitorStubForValue(cx, frame, types, value);
2318 }
2319 
2320 typedef bool (*DoTypeMonitorFallbackFn)(JSContext*, BaselineFrame*,
2321                                         ICTypeMonitor_Fallback*, HandleValue,
2322                                         MutableHandleValue);
2323 static const VMFunction DoTypeMonitorFallbackInfo =
2324     FunctionInfo<DoTypeMonitorFallbackFn>(DoTypeMonitorFallback,
2325                                           "DoTypeMonitorFallback", TailCall);
2326 
generateStubCode(MacroAssembler & masm)2327 bool ICTypeMonitor_Fallback::Compiler::generateStubCode(MacroAssembler& masm) {
2328   MOZ_ASSERT(R0 == JSReturnOperand);
2329 
2330   // Restore the tail call register.
2331   EmitRestoreTailCallReg(masm);
2332 
2333   masm.pushValue(R0);
2334   masm.push(ICStubReg);
2335   masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
2336 
2337   return tailCallVM(DoTypeMonitorFallbackInfo, masm);
2338 }
2339 
generateStubCode(MacroAssembler & masm)2340 bool ICTypeMonitor_PrimitiveSet::Compiler::generateStubCode(
2341     MacroAssembler& masm) {
2342   Label success;
2343   if ((flags_ & TypeToFlag(JSVAL_TYPE_INT32)) &&
2344       !(flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE)))
2345     masm.branchTestInt32(Assembler::Equal, R0, &success);
2346 
2347   if (flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE))
2348     masm.branchTestNumber(Assembler::Equal, R0, &success);
2349 
2350   if (flags_ & TypeToFlag(JSVAL_TYPE_UNDEFINED))
2351     masm.branchTestUndefined(Assembler::Equal, R0, &success);
2352 
2353   if (flags_ & TypeToFlag(JSVAL_TYPE_BOOLEAN))
2354     masm.branchTestBoolean(Assembler::Equal, R0, &success);
2355 
2356   if (flags_ & TypeToFlag(JSVAL_TYPE_STRING))
2357     masm.branchTestString(Assembler::Equal, R0, &success);
2358 
2359   if (flags_ & TypeToFlag(JSVAL_TYPE_SYMBOL))
2360     masm.branchTestSymbol(Assembler::Equal, R0, &success);
2361 
2362   if (flags_ & TypeToFlag(JSVAL_TYPE_OBJECT))
2363     masm.branchTestObject(Assembler::Equal, R0, &success);
2364 
2365   if (flags_ & TypeToFlag(JSVAL_TYPE_NULL))
2366     masm.branchTestNull(Assembler::Equal, R0, &success);
2367 
2368   EmitStubGuardFailure(masm);
2369 
2370   masm.bind(&success);
2371   EmitReturnFromIC(masm);
2372   return true;
2373 }
2374 
MaybeWorkAroundAmdBug(MacroAssembler & masm)2375 static void MaybeWorkAroundAmdBug(MacroAssembler& masm) {
2376 // Attempt to work around an AMD bug (see bug 1034706 and bug 1281759), by
2377 // inserting 32-bytes of NOPs.
2378 #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
2379   if (CPUInfo::NeedAmdBugWorkaround()) {
2380     masm.nop(9);
2381     masm.nop(9);
2382     masm.nop(9);
2383     masm.nop(5);
2384   }
2385 #endif
2386 }
2387 
generateStubCode(MacroAssembler & masm)2388 bool ICTypeMonitor_SingleObject::Compiler::generateStubCode(
2389     MacroAssembler& masm) {
2390   Label failure;
2391   masm.branchTestObject(Assembler::NotEqual, R0, &failure);
2392   MaybeWorkAroundAmdBug(masm);
2393 
2394   // Guard on the object's identity.
2395   Register obj = masm.extractObject(R0, ExtractTemp0);
2396   Address expectedObject(ICStubReg,
2397                          ICTypeMonitor_SingleObject::offsetOfObject());
2398   masm.branchPtr(Assembler::NotEqual, expectedObject, obj, &failure);
2399   MaybeWorkAroundAmdBug(masm);
2400 
2401   EmitReturnFromIC(masm);
2402   MaybeWorkAroundAmdBug(masm);
2403 
2404   masm.bind(&failure);
2405   EmitStubGuardFailure(masm);
2406   return true;
2407 }
2408 
generateStubCode(MacroAssembler & masm)2409 bool ICTypeMonitor_ObjectGroup::Compiler::generateStubCode(
2410     MacroAssembler& masm) {
2411   Label failure;
2412   masm.branchTestObject(Assembler::NotEqual, R0, &failure);
2413   MaybeWorkAroundAmdBug(masm);
2414 
2415   // Guard on the object's ObjectGroup. No Spectre mitigations are needed
2416   // here: we're just recording type information for Ion compilation and
2417   // it's safe to speculatively return.
2418   Register obj = masm.extractObject(R0, ExtractTemp0);
2419   Address expectedGroup(ICStubReg, ICTypeMonitor_ObjectGroup::offsetOfGroup());
2420   masm.branchTestObjGroupNoSpectreMitigations(
2421       Assembler::NotEqual, obj, expectedGroup, R1.scratchReg(), &failure);
2422   MaybeWorkAroundAmdBug(masm);
2423 
2424   EmitReturnFromIC(masm);
2425   MaybeWorkAroundAmdBug(masm);
2426 
2427   masm.bind(&failure);
2428   EmitStubGuardFailure(masm);
2429   return true;
2430 }
2431 
generateStubCode(MacroAssembler & masm)2432 bool ICTypeMonitor_AnyValue::Compiler::generateStubCode(MacroAssembler& masm) {
2433   EmitReturnFromIC(masm);
2434   return true;
2435 }
2436 
addUpdateStubForValue(JSContext * cx,HandleScript outerScript,HandleObject obj,HandleObjectGroup group,HandleId id,HandleValue val)2437 bool ICUpdatedStub::addUpdateStubForValue(JSContext* cx,
2438                                           HandleScript outerScript,
2439                                           HandleObject obj,
2440                                           HandleObjectGroup group, HandleId id,
2441                                           HandleValue val) {
2442   EnsureTrackPropertyTypes(cx, obj, id);
2443 
2444   // Make sure that undefined values are explicitly included in the property
2445   // types for an object if generating a stub to write an undefined value.
2446   if (val.isUndefined() && CanHaveEmptyPropertyTypesForOwnProperty(obj)) {
2447     MOZ_ASSERT(obj->group() == group);
2448     AddTypePropertyId(cx, obj, id, val);
2449   }
2450 
2451   bool unknown = false, unknownObject = false;
2452   if (group->unknownProperties()) {
2453     unknown = unknownObject = true;
2454   } else {
2455     if (HeapTypeSet* types = group->maybeGetProperty(id)) {
2456       unknown = types->unknown();
2457       unknownObject = types->unknownObject();
2458     } else {
2459       // We don't record null/undefined types for certain TypedObject
2460       // properties. In these cases |types| is allowed to be nullptr
2461       // without implying unknown types. See DoTypeUpdateFallback.
2462       MOZ_ASSERT(obj->is<TypedObject>());
2463       MOZ_ASSERT(val.isNullOrUndefined());
2464     }
2465   }
2466   MOZ_ASSERT_IF(unknown, unknownObject);
2467 
2468   // Don't attach too many SingleObject/ObjectGroup stubs unless we can
2469   // replace them with a single PrimitiveSet or AnyValue stub.
2470   if (numOptimizedStubs_ >= MAX_OPTIMIZED_STUBS && val.isObject() &&
2471       !unknownObject) {
2472     return true;
2473   }
2474 
2475   if (unknown) {
2476     // Attach a stub that always succeeds. We should not have a
2477     // TypeUpdate_AnyValue stub yet.
2478     MOZ_ASSERT(!hasTypeUpdateStub(TypeUpdate_AnyValue));
2479 
2480     // Discard existing stubs.
2481     resetUpdateStubChain(cx->zone());
2482 
2483     ICTypeUpdate_AnyValue::Compiler compiler(cx);
2484     ICStub* stub = compiler.getStub(compiler.getStubSpace(outerScript));
2485     if (!stub) return false;
2486 
2487     JitSpew(JitSpew_BaselineIC, "  Added TypeUpdate stub %p for any value",
2488             stub);
2489     addOptimizedUpdateStub(stub);
2490 
2491   } else if (val.isPrimitive() || unknownObject) {
2492     JSValueType type =
2493         val.isDouble() ? JSVAL_TYPE_DOUBLE : val.extractNonDoubleType();
2494 
2495     // Check for existing TypeUpdate stub.
2496     ICTypeUpdate_PrimitiveSet* existingStub = nullptr;
2497     for (ICStubConstIterator iter(firstUpdateStub_); !iter.atEnd(); iter++) {
2498       if (iter->isTypeUpdate_PrimitiveSet()) {
2499         existingStub = iter->toTypeUpdate_PrimitiveSet();
2500         MOZ_ASSERT(!existingStub->containsType(type));
2501       }
2502     }
2503 
2504     if (val.isObject()) {
2505       // Discard existing ObjectGroup/SingleObject stubs.
2506       resetUpdateStubChain(cx->zone());
2507       if (existingStub) addOptimizedUpdateStub(existingStub);
2508     }
2509 
2510     ICTypeUpdate_PrimitiveSet::Compiler compiler(cx, existingStub, type);
2511     ICStub* stub = existingStub
2512                        ? compiler.updateStub()
2513                        : compiler.getStub(compiler.getStubSpace(outerScript));
2514     if (!stub) return false;
2515     if (!existingStub) {
2516       MOZ_ASSERT(!hasTypeUpdateStub(TypeUpdate_PrimitiveSet));
2517       addOptimizedUpdateStub(stub);
2518     }
2519 
2520     JitSpew(JitSpew_BaselineIC, "  %s TypeUpdate stub %p for primitive type %d",
2521             existingStub ? "Modified existing" : "Created new", stub, type);
2522 
2523   } else if (val.toObject().isSingleton()) {
2524     RootedObject obj(cx, &val.toObject());
2525 
2526 #ifdef DEBUG
2527     // We should not have a stub for this object.
2528     for (ICStubConstIterator iter(firstUpdateStub_); !iter.atEnd(); iter++) {
2529       MOZ_ASSERT_IF(iter->isTypeUpdate_SingleObject(),
2530                     iter->toTypeUpdate_SingleObject()->object() != obj);
2531     }
2532 #endif
2533 
2534     ICTypeUpdate_SingleObject::Compiler compiler(cx, obj);
2535     ICStub* stub = compiler.getStub(compiler.getStubSpace(outerScript));
2536     if (!stub) return false;
2537 
2538     JitSpew(JitSpew_BaselineIC, "  Added TypeUpdate stub %p for singleton %p",
2539             stub, obj.get());
2540 
2541     addOptimizedUpdateStub(stub);
2542 
2543   } else {
2544     RootedObjectGroup group(cx, val.toObject().group());
2545 
2546 #ifdef DEBUG
2547     // We should not have a stub for this group.
2548     for (ICStubConstIterator iter(firstUpdateStub_); !iter.atEnd(); iter++) {
2549       MOZ_ASSERT_IF(iter->isTypeUpdate_ObjectGroup(),
2550                     iter->toTypeUpdate_ObjectGroup()->group() != group);
2551     }
2552 #endif
2553 
2554     ICTypeUpdate_ObjectGroup::Compiler compiler(cx, group);
2555     ICStub* stub = compiler.getStub(compiler.getStubSpace(outerScript));
2556     if (!stub) return false;
2557 
2558     JitSpew(JitSpew_BaselineIC, "  Added TypeUpdate stub %p for ObjectGroup %p",
2559             stub, group.get());
2560 
2561     addOptimizedUpdateStub(stub);
2562   }
2563 
2564   return true;
2565 }
2566 
2567 //
2568 // NewArray_Fallback
2569 //
2570 
DoNewArray(JSContext * cx,void * payload,ICNewArray_Fallback * stub,uint32_t length,MutableHandleValue res)2571 static bool DoNewArray(JSContext* cx, void* payload, ICNewArray_Fallback* stub,
2572                        uint32_t length, MutableHandleValue res) {
2573   SharedStubInfo info(cx, payload, stub->icEntry());
2574 
2575   FallbackICSpew(cx, stub, "NewArray");
2576 
2577   RootedObject obj(cx);
2578   if (stub->templateObject()) {
2579     RootedObject templateObject(cx, stub->templateObject());
2580     obj = NewArrayOperationWithTemplate(cx, templateObject);
2581     if (!obj) return false;
2582   } else {
2583     HandleScript script = info.script();
2584     jsbytecode* pc = info.pc();
2585     obj = NewArrayOperation(cx, script, pc, length);
2586     if (!obj) return false;
2587 
2588     if (obj && !obj->isSingleton() &&
2589         !obj->group()->maybePreliminaryObjects()) {
2590       JSObject* templateObject =
2591           NewArrayOperation(cx, script, pc, length, TenuredObject);
2592       if (!templateObject) return false;
2593       stub->setTemplateObject(templateObject);
2594     }
2595   }
2596 
2597   res.setObject(*obj);
2598   return true;
2599 }
2600 
2601 typedef bool (*DoNewArrayFn)(JSContext*, void*, ICNewArray_Fallback*, uint32_t,
2602                              MutableHandleValue);
2603 static const VMFunction DoNewArrayInfo =
2604     FunctionInfo<DoNewArrayFn>(DoNewArray, "DoNewArray", TailCall);
2605 
generateStubCode(MacroAssembler & masm)2606 bool ICNewArray_Fallback::Compiler::generateStubCode(MacroAssembler& masm) {
2607   EmitRestoreTailCallReg(masm);
2608 
2609   masm.push(R0.scratchReg());  // length
2610   masm.push(ICStubReg);        // stub.
2611   pushStubPayload(masm, R0.scratchReg());
2612 
2613   return tailCallVM(DoNewArrayInfo, masm);
2614 }
2615 
2616 //
2617 // NewObject_Fallback
2618 //
2619 
2620 // Unlike typical baseline IC stubs, the code for NewObject_WithTemplate is
2621 // specialized for the template object being allocated.
GenerateNewObjectWithTemplateCode(JSContext * cx,JSObject * templateObject)2622 static JitCode* GenerateNewObjectWithTemplateCode(JSContext* cx,
2623                                                   JSObject* templateObject) {
2624   JitContext jctx(cx, nullptr);
2625   MacroAssembler masm;
2626 #ifdef JS_CODEGEN_ARM
2627   masm.setSecondScratchReg(BaselineSecondScratchReg);
2628 #endif
2629 
2630   Label failure;
2631   Register objReg = R0.scratchReg();
2632   Register tempReg = R1.scratchReg();
2633   masm.branchIfPretenuredGroup(templateObject->group(), tempReg, &failure);
2634   masm.branchPtr(Assembler::NotEqual,
2635                  AbsoluteAddress(cx->compartment()->addressOfMetadataBuilder()),
2636                  ImmWord(0), &failure);
2637   masm.createGCObject(objReg, tempReg, templateObject, gc::DefaultHeap,
2638                       &failure);
2639   masm.tagValue(JSVAL_TYPE_OBJECT, objReg, R0);
2640 
2641   EmitReturnFromIC(masm);
2642   masm.bind(&failure);
2643   EmitStubGuardFailure(masm);
2644 
2645   Linker linker(masm);
2646   AutoFlushICache afc("GenerateNewObjectWithTemplateCode");
2647   return linker.newCode(cx, CodeKind::Baseline);
2648 }
2649 
DoNewObject(JSContext * cx,void * payload,ICNewObject_Fallback * stub,MutableHandleValue res)2650 static bool DoNewObject(JSContext* cx, void* payload,
2651                         ICNewObject_Fallback* stub, MutableHandleValue res) {
2652   SharedStubInfo info(cx, payload, stub->icEntry());
2653 
2654   FallbackICSpew(cx, stub, "NewObject");
2655 
2656   RootedObject obj(cx);
2657 
2658   RootedObject templateObject(cx, stub->templateObject());
2659   if (templateObject) {
2660     MOZ_ASSERT(!templateObject->group()->maybePreliminaryObjects());
2661     obj = NewObjectOperationWithTemplate(cx, templateObject);
2662   } else {
2663     HandleScript script = info.script();
2664     jsbytecode* pc = info.pc();
2665     obj = NewObjectOperation(cx, script, pc);
2666 
2667     if (obj && !obj->isSingleton() &&
2668         !obj->group()->maybePreliminaryObjects()) {
2669       JSObject* templateObject =
2670           NewObjectOperation(cx, script, pc, TenuredObject);
2671       if (!templateObject) return false;
2672 
2673       if (!stub->invalid() &&
2674           (templateObject->is<UnboxedPlainObject>() ||
2675            !templateObject->as<PlainObject>().hasDynamicSlots())) {
2676         JitCode* code = GenerateNewObjectWithTemplateCode(cx, templateObject);
2677         if (!code) return false;
2678 
2679         ICStubSpace* space =
2680             ICStubCompiler::StubSpaceForStub(/* makesGCCalls = */ false, script,
2681                                              ICStubCompiler::Engine::Baseline);
2682         ICStub* templateStub =
2683             ICStub::New<ICNewObject_WithTemplate>(cx, space, code);
2684         if (!templateStub) return false;
2685 
2686         stub->addNewStub(templateStub);
2687       }
2688 
2689       stub->setTemplateObject(templateObject);
2690     }
2691   }
2692 
2693   if (!obj) return false;
2694 
2695   res.setObject(*obj);
2696   return true;
2697 }
2698 
2699 typedef bool (*DoNewObjectFn)(JSContext*, void*, ICNewObject_Fallback*,
2700                               MutableHandleValue);
2701 static const VMFunction DoNewObjectInfo =
2702     FunctionInfo<DoNewObjectFn>(DoNewObject, "DoNewObject", TailCall);
2703 
generateStubCode(MacroAssembler & masm)2704 bool ICNewObject_Fallback::Compiler::generateStubCode(MacroAssembler& masm) {
2705   EmitRestoreTailCallReg(masm);
2706 
2707   masm.push(ICStubReg);  // stub.
2708   pushStubPayload(masm, R0.scratchReg());
2709 
2710   return tailCallVM(DoNewObjectInfo, masm);
2711 }
2712 
2713 }  // namespace jit
2714 }  // namespace js
2715