1 // Copyright 2013, ARM Limited
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 //   * Redistributions of source code must retain the above copyright notice,
8 //     this list of conditions and the following disclaimer.
9 //   * Redistributions in binary form must reproduce the above copyright notice,
10 //     this list of conditions and the following disclaimer in the documentation
11 //     and/or other materials provided with the distribution.
12 //   * Neither the name of ARM Limited nor the names of its contributors may be
13 //     used to endorse or promote products derived from this software without
14 //     specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 
27 #include "mozilla/DebugOnly.h"
28 
29 #include "jit/arm64/vixl/Debugger-vixl.h"
30 #include "jit/arm64/vixl/Simulator-vixl.h"
31 #include "jit/IonTypes.h"
32 #include "threading/LockGuard.h"
33 #include "vm/Runtime.h"
34 
35 namespace vixl {
36 
37 
38 using mozilla::DebugOnly;
39 using js::jit::ABIFunctionType;
40 
41 Simulator::Simulator(Decoder* decoder, FILE* stream)
42   : stream_(nullptr)
43   , print_disasm_(nullptr)
44   , instrumentation_(nullptr)
45   , stack_(nullptr)
46   , stack_limit_(nullptr)
47   , decoder_(nullptr)
48   , oom_(false)
49   , lock_(js::mutexid::Arm64SimulatorLock)
50 {
51     this->init(decoder, stream);
52 }
53 
54 
55 Simulator::~Simulator() {
56   js_free(stack_);
57   stack_ = nullptr;
58 
59   // The decoder may outlive the simulator.
60   if (print_disasm_) {
61     decoder_->RemoveVisitor(print_disasm_);
62     js_delete(print_disasm_);
63     print_disasm_ = nullptr;
64   }
65 
66   if (instrumentation_) {
67     decoder_->RemoveVisitor(instrumentation_);
68     js_delete(instrumentation_);
69     instrumentation_ = nullptr;
70   }
71 }
72 
73 
74 void Simulator::ResetState() {
75   // Reset the system registers.
76   nzcv_ = SimSystemRegister::DefaultValueFor(NZCV);
77   fpcr_ = SimSystemRegister::DefaultValueFor(FPCR);
78 
79   // Reset registers to 0.
80   pc_ = nullptr;
81   pc_modified_ = false;
82   for (unsigned i = 0; i < kNumberOfRegisters; i++) {
83     set_xreg(i, 0xbadbeef);
84   }
85   // Set FP registers to a value that is a NaN in both 32-bit and 64-bit FP.
86   uint64_t nan_bits = UINT64_C(0x7ff0dead7f8beef1);
87   VIXL_ASSERT(IsSignallingNaN(rawbits_to_double(nan_bits & kDRegMask)));
88   VIXL_ASSERT(IsSignallingNaN(rawbits_to_float(nan_bits & kSRegMask)));
89   for (unsigned i = 0; i < kNumberOfFPRegisters; i++) {
90     set_dreg_bits(i, nan_bits);
91   }
92   // Returning to address 0 exits the Simulator.
93   set_lr(kEndOfSimAddress);
94   set_resume_pc(nullptr);
95 }
96 
97 
98 void Simulator::init(Decoder* decoder, FILE* stream) {
99   // Ensure that shift operations act as the simulator expects.
100   VIXL_ASSERT((static_cast<int32_t>(-1) >> 1) == -1);
101   VIXL_ASSERT((static_cast<uint32_t>(-1) >> 1) == 0x7FFFFFFF);
102 
103   instruction_stats_ = false;
104 
105   // Set up the decoder.
106   decoder_ = decoder;
107   decoder_->AppendVisitor(this);
108 
109   stream_ = stream;
110   print_disasm_ = js_new<PrintDisassembler>(stream_);
111   if (!print_disasm_) {
112     oom_ = true;
113     return;
114   }
115   set_coloured_trace(false);
116   trace_parameters_ = LOG_NONE;
117 
118   ResetState();
119 
120   // Allocate and set up the simulator stack.
121   stack_ = (byte*)js_malloc(stack_size_);
122   if (!stack_) {
123     oom_ = true;
124     return;
125   }
126   stack_limit_ = stack_ + stack_protection_size_;
127   // Configure the starting stack pointer.
128   //  - Find the top of the stack.
129   byte * tos = stack_ + stack_size_;
130   //  - There's a protection region at both ends of the stack.
131   tos -= stack_protection_size_;
132   //  - The stack pointer must be 16-byte aligned.
133   tos = AlignDown(tos, 16);
134   set_sp(tos);
135 
136   // Set the sample period to 10, as the VIXL examples and tests are short.
137   instrumentation_ = js_new<Instrument>("vixl_stats.csv", 10);
138   if (!instrumentation_) {
139     oom_ = true;
140     return;
141   }
142 
143   // Print a warning about exclusive-access instructions, but only the first
144   // time they are encountered. This warning can be silenced using
145   // SilenceExclusiveAccessWarning().
146   print_exclusive_access_warning_ = true;
147 
148   redirection_ = nullptr;
149 }
150 
151 
152 Simulator* Simulator::Current() {
153   return js::TlsPerThreadData.get()->simulator();
154 }
155 
156 
157 Simulator* Simulator::Create(JSContext* cx) {
158   Decoder *decoder = js_new<vixl::Decoder>();
159   if (!decoder)
160     return nullptr;
161 
162   // FIXME: This just leaks the Decoder object for now, which is probably OK.
163   // FIXME: We should free it at some point.
164   // FIXME: Note that it can't be stored in the SimulatorRuntime due to lifetime conflicts.
165   Simulator *sim;
166   if (getenv("USE_DEBUGGER") != nullptr)
167     sim = js_new<Debugger>(decoder, stdout);
168   else
169     sim = js_new<Simulator>(decoder, stdout);
170 
171   // Check if Simulator:init ran out of memory.
172   if (sim && sim->oom()) {
173     js_delete(sim);
174     return nullptr;
175   }
176 
177   return sim;
178 }
179 
180 
181 void Simulator::Destroy(Simulator* sim) {
182   js_delete(sim);
183 }
184 
185 
186 void Simulator::ExecuteInstruction() {
187   // The program counter should always be aligned.
188   VIXL_ASSERT(IsWordAligned(pc_));
189   decoder_->Decode(pc_);
190   const Instruction* rpc = resume_pc_;
191   increment_pc();
192 
193   if (MOZ_UNLIKELY(rpc)) {
194     JSRuntime::innermostWasmActivation()->setResumePC((void*)pc());
195     set_pc(rpc);
196     // Just calling set_pc turns the pc_modified_ flag on, which means it doesn't
197     // auto-step after executing the next instruction.  Force that to off so it
198     // will auto-step after executing the first instruction of the handler.
199     pc_modified_ = false;
200     resume_pc_ = nullptr;
201   }
202 }
203 
204 
205 uintptr_t Simulator::stackLimit() const {
206   return reinterpret_cast<uintptr_t>(stack_limit_);
207 }
208 
209 
210 uintptr_t* Simulator::addressOfStackLimit() {
211   return (uintptr_t*)&stack_limit_;
212 }
213 
214 
215 bool Simulator::overRecursed(uintptr_t newsp) const {
216   if (newsp)
217     newsp = xreg(31, Reg31IsStackPointer);
218   return newsp <= stackLimit();
219 }
220 
221 
222 bool Simulator::overRecursedWithExtra(uint32_t extra) const {
223   uintptr_t newsp = xreg(31, Reg31IsStackPointer) - extra;
224   return newsp <= stackLimit();
225 }
226 
227 
228 void Simulator::set_resume_pc(void* new_resume_pc) {
229   resume_pc_ = AddressUntag(reinterpret_cast<Instruction*>(new_resume_pc));
230 }
231 
232 
233 int64_t Simulator::call(uint8_t* entry, int argument_count, ...) {
234   va_list parameters;
235   va_start(parameters, argument_count);
236 
237   // First eight arguments passed in registers.
238   VIXL_ASSERT(argument_count <= 8);
239   // This code should use the type of the called function
240   // (with templates, like the callVM machinery), but since the
241   // number of called functions is miniscule, their types have been
242   // divined from the number of arguments.
243   if (argument_count == 8) {
244       // EnterJitData::jitcode.
245       set_xreg(0, va_arg(parameters, int64_t));
246       // EnterJitData::maxArgc.
247       set_xreg(1, va_arg(parameters, unsigned));
248       // EnterJitData::maxArgv.
249       set_xreg(2, va_arg(parameters, int64_t));
250       // EnterJitData::osrFrame.
251       set_xreg(3, va_arg(parameters, int64_t));
252       // EnterJitData::calleeToken.
253       set_xreg(4, va_arg(parameters, int64_t));
254       // EnterJitData::scopeChain.
255       set_xreg(5, va_arg(parameters, int64_t));
256       // EnterJitData::osrNumStackValues.
257       set_xreg(6, va_arg(parameters, unsigned));
258       // Address of EnterJitData::result.
259       set_xreg(7, va_arg(parameters, int64_t));
260   } else if (argument_count == 2) {
261       // EntryArg* args
262       set_xreg(0, va_arg(parameters, int64_t));
263       // uint8_t* GlobalData
264       set_xreg(1, va_arg(parameters, int64_t));
265   } else if (argument_count == 1) { // irregexp
266       // InputOutputData& data
267       set_xreg(0, va_arg(parameters, int64_t));
268   } else {
269       MOZ_CRASH("Unknown number of arguments");
270   }
271 
272   va_end(parameters);
273 
274   // Call must transition back to native code on exit.
275   VIXL_ASSERT(xreg(30) == int64_t(kEndOfSimAddress));
276 
277   // Execute the simulation.
278   DebugOnly<int64_t> entryStack = xreg(31, Reg31IsStackPointer);
279   RunFrom((Instruction*)entry);
280   DebugOnly<int64_t> exitStack = xreg(31, Reg31IsStackPointer);
281   VIXL_ASSERT(entryStack == exitStack);
282 
283   int64_t result = xreg(0);
284   if (getenv("USE_DEBUGGER"))
285       printf("LEAVE\n");
286   return result;
287 }
288 
289 
290 // Protects the icache and redirection properties of the simulator.
291 class AutoLockSimulatorCache : public js::LockGuard<js::Mutex>
292 {
293   friend class Simulator;
294   using Base = js::LockGuard<js::Mutex>;
295 
296  public:
297   explicit AutoLockSimulatorCache(Simulator* sim)
298     : Base(sim->lock_)
299   {
300   }
301 };
302 
303 
304 // When the generated code calls a VM function (masm.callWithABI) we need to
305 // call that function instead of trying to execute it with the simulator
306 // (because it's x64 code instead of AArch64 code). We do that by redirecting the VM
307 // call to a svc (Supervisor Call) instruction that is handled by the
308 // simulator. We write the original destination of the jump just at a known
309 // offset from the svc instruction so the simulator knows what to call.
310 class Redirection
311 {
312   friend class Simulator;
313 
314   Redirection(void* nativeFunction, ABIFunctionType type, Simulator* sim)
315     : nativeFunction_(nativeFunction),
316     type_(type),
317     next_(nullptr)
318   {
319     next_ = sim->redirection();
320     // TODO: Flush ICache?
321     sim->setRedirection(this);
322 
323     Instruction* instr = (Instruction*)(&svcInstruction_);
324     vixl::Assembler::svc(instr, kCallRtRedirected);
325   }
326 
327  public:
328   void* addressOfSvcInstruction() { return &svcInstruction_; }
329   void* nativeFunction() const { return nativeFunction_; }
330   ABIFunctionType type() const { return type_; }
331 
332   static Redirection* Get(void* nativeFunction, ABIFunctionType type) {
333     Simulator* sim = Simulator::Current();
334     AutoLockSimulatorCache alsr(sim);
335 
336     // TODO: Store srt_ in the simulator for this assertion.
337     // VIXL_ASSERT_IF(pt->simulator(), pt->simulator()->srt_ == srt);
338 
339     Redirection* current = sim->redirection();
340     for (; current != nullptr; current = current->next_) {
341       if (current->nativeFunction_ == nativeFunction) {
342         VIXL_ASSERT(current->type() == type);
343         return current;
344       }
345     }
346 
347     js::AutoEnterOOMUnsafeRegion oomUnsafe;
348     Redirection* redir = (Redirection*)js_malloc(sizeof(Redirection));
349     if (!redir)
350         oomUnsafe.crash("Simulator redirection");
351     new(redir) Redirection(nativeFunction, type, sim);
352     return redir;
353   }
354 
355   static const Redirection* FromSvcInstruction(const Instruction* svcInstruction) {
356     const uint8_t* addrOfSvc = reinterpret_cast<const uint8_t*>(svcInstruction);
357     const uint8_t* addrOfRedirection = addrOfSvc - offsetof(Redirection, svcInstruction_);
358     return reinterpret_cast<const Redirection*>(addrOfRedirection);
359   }
360 
361  private:
362   void* nativeFunction_;
363   uint32_t svcInstruction_;
364   ABIFunctionType type_;
365   Redirection* next_;
366 };
367 
368 
369 void Simulator::setRedirection(Redirection* redirection) {
370   redirection_ = redirection;
371 }
372 
373 
374 Redirection* Simulator::redirection() const {
375   return redirection_;
376 }
377 
378 
379 void* Simulator::RedirectNativeFunction(void* nativeFunction, ABIFunctionType type) {
380   Redirection* redirection = Redirection::Get(nativeFunction, type);
381   return redirection->addressOfSvcInstruction();
382 }
383 
384 
385 void Simulator::VisitException(const Instruction* instr) {
386   switch (instr->Mask(ExceptionMask)) {
387     case BRK: {
388       int lowbit  = ImmException_offset;
389       int highbit = ImmException_offset + ImmException_width - 1;
390       HostBreakpoint(instr->Bits(highbit, lowbit));
391       break;
392     }
393     case HLT:
394       switch (instr->ImmException()) {
395         case kUnreachableOpcode:
396           DoUnreachable(instr);
397           return;
398         case kTraceOpcode:
399           DoTrace(instr);
400           return;
401         case kLogOpcode:
402           DoLog(instr);
403           return;
404         case kPrintfOpcode:
405           DoPrintf(instr);
406           return;
407         default:
408           HostBreakpoint();
409           return;
410       }
411     case SVC:
412       // The SVC instruction is hijacked by the JIT as a pseudo-instruction
413       // causing the Simulator to execute host-native code for callWithABI.
414       switch (instr->ImmException()) {
415         case kCallRtRedirected:
416           VisitCallRedirection(instr);
417           return;
418         case kMarkStackPointer:
419           spStack_.append(xreg(31, Reg31IsStackPointer));
420           return;
421         case kCheckStackPointer: {
422           int64_t current = xreg(31, Reg31IsStackPointer);
423           int64_t expected = spStack_.popCopy();
424           VIXL_ASSERT(current == expected);
425           return;
426         }
427         default:
428           VIXL_UNIMPLEMENTED();
429       }
430       break;
431     default:
432       VIXL_UNIMPLEMENTED();
433   }
434 }
435 
436 
437 void Simulator::setGPR32Result(int32_t result) {
438     set_wreg(0, result);
439 }
440 
441 
442 void Simulator::setGPR64Result(int64_t result) {
443     set_xreg(0, result);
444 }
445 
446 
447 void Simulator::setFP32Result(float result) {
448     set_sreg(0, result);
449 }
450 
451 
452 void Simulator::setFP64Result(double result) {
453     set_dreg(0, result);
454 }
455 
456 
457 typedef int64_t (*Prototype_General0)();
458 typedef int64_t (*Prototype_General1)(int64_t arg0);
459 typedef int64_t (*Prototype_General2)(int64_t arg0, int64_t arg1);
460 typedef int64_t (*Prototype_General3)(int64_t arg0, int64_t arg1, int64_t arg2);
461 typedef int64_t (*Prototype_General4)(int64_t arg0, int64_t arg1, int64_t arg2, int64_t arg3);
462 typedef int64_t (*Prototype_General5)(int64_t arg0, int64_t arg1, int64_t arg2, int64_t arg3,
463                                       int64_t arg4);
464 typedef int64_t (*Prototype_General6)(int64_t arg0, int64_t arg1, int64_t arg2, int64_t arg3,
465                                       int64_t arg4, int64_t arg5);
466 typedef int64_t (*Prototype_General7)(int64_t arg0, int64_t arg1, int64_t arg2, int64_t arg3,
467                                       int64_t arg4, int64_t arg5, int64_t arg6);
468 typedef int64_t (*Prototype_General8)(int64_t arg0, int64_t arg1, int64_t arg2, int64_t arg3,
469                                       int64_t arg4, int64_t arg5, int64_t arg6, int64_t arg7);
470 
471 typedef int64_t (*Prototype_Int_Double)(double arg0);
472 typedef int64_t (*Prototype_Int_IntDouble)(int32_t arg0, double arg1);
473 typedef int64_t (*Prototype_Int_DoubleIntInt)(double arg0, uint64_t arg1, uint64_t arg2);
474 typedef int64_t (*Prototype_Int_IntDoubleIntInt)(uint64_t arg0, double arg1,
475                                                  uint64_t arg2, uint64_t arg3);
476 
477 typedef float (*Prototype_Float32_Float32)(float arg0);
478 
479 typedef double (*Prototype_Double_None)();
480 typedef double (*Prototype_Double_Double)(double arg0);
481 typedef double (*Prototype_Double_Int)(int32_t arg0);
482 typedef double (*Prototype_Double_DoubleInt)(double arg0, int64_t arg1);
483 typedef double (*Prototype_Double_IntDouble)(int64_t arg0, double arg1);
484 typedef double (*Prototype_Double_DoubleDouble)(double arg0, double arg1);
485 typedef double (*Prototype_Double_DoubleDoubleDouble)(double arg0, double arg1, double arg2);
486 typedef double (*Prototype_Double_DoubleDoubleDoubleDouble)(double arg0, double arg1,
487                                                             double arg2, double arg3);
488 
489 
490 // Simulator support for callWithABI().
491 void
492 Simulator::VisitCallRedirection(const Instruction* instr)
493 {
494   VIXL_ASSERT(instr->Mask(ExceptionMask) == SVC);
495   VIXL_ASSERT(instr->ImmException() == kCallRtRedirected);
496 
497   const Redirection* redir = Redirection::FromSvcInstruction(instr);
498   uintptr_t nativeFn = reinterpret_cast<uintptr_t>(redir->nativeFunction());
499 
500   // Stack must be aligned prior to the call.
501   // FIXME: It's actually our job to perform the alignment...
502   //VIXL_ASSERT((xreg(31, Reg31IsStackPointer) & (StackAlignment - 1)) == 0);
503 
504   // Used to assert that callee-saved registers are preserved.
505   DebugOnly<int64_t> x19 = xreg(19);
506   DebugOnly<int64_t> x20 = xreg(20);
507   DebugOnly<int64_t> x21 = xreg(21);
508   DebugOnly<int64_t> x22 = xreg(22);
509   DebugOnly<int64_t> x23 = xreg(23);
510   DebugOnly<int64_t> x24 = xreg(24);
511   DebugOnly<int64_t> x25 = xreg(25);
512   DebugOnly<int64_t> x26 = xreg(26);
513   DebugOnly<int64_t> x27 = xreg(27);
514   DebugOnly<int64_t> x28 = xreg(28);
515   DebugOnly<int64_t> x29 = xreg(29);
516   DebugOnly<int64_t> savedSP = xreg(31, Reg31IsStackPointer);
517 
518   // Remember LR for returning from the "call".
519   int64_t savedLR = xreg(30);
520 
521   // Allow recursive Simulator calls: returning from the call must stop
522   // the simulation and transition back to native Simulator code.
523   set_xreg(30, int64_t(kEndOfSimAddress));
524 
525   // Store argument register values in local variables for ease of use below.
526   int64_t x0 = xreg(0);
527   int64_t x1 = xreg(1);
528   int64_t x2 = xreg(2);
529   int64_t x3 = xreg(3);
530   int64_t x4 = xreg(4);
531   int64_t x5 = xreg(5);
532   int64_t x6 = xreg(6);
533   int64_t x7 = xreg(7);
534   double d0 = dreg(0);
535   double d1 = dreg(1);
536   double d2 = dreg(2);
537   double d3 = dreg(3);
538   float s0 = sreg(0);
539 
540   // Dispatch the call and set the return value.
541   switch (redir->type()) {
542     // Cases with int64_t return type.
543     case js::jit::Args_General0: {
544       int64_t ret = reinterpret_cast<Prototype_General0>(nativeFn)();
545       setGPR64Result(ret);
546       break;
547     }
548     case js::jit::Args_General1: {
549       int64_t ret = reinterpret_cast<Prototype_General1>(nativeFn)(x0);
550       setGPR64Result(ret);
551       break;
552     }
553     case js::jit::Args_General2: {
554       int64_t ret = reinterpret_cast<Prototype_General2>(nativeFn)(x0, x1);
555       setGPR64Result(ret);
556       break;
557     }
558     case js::jit::Args_General3: {
559       int64_t ret = reinterpret_cast<Prototype_General3>(nativeFn)(x0, x1, x2);
560       setGPR64Result(ret);
561       break;
562     }
563     case js::jit::Args_General4: {
564       int64_t ret = reinterpret_cast<Prototype_General4>(nativeFn)(x0, x1, x2, x3);
565       setGPR64Result(ret);
566       break;
567     }
568     case js::jit::Args_General5: {
569       int64_t ret = reinterpret_cast<Prototype_General5>(nativeFn)(x0, x1, x2, x3, x4);
570       setGPR64Result(ret);
571       break;
572     }
573     case js::jit::Args_General6: {
574       int64_t ret = reinterpret_cast<Prototype_General6>(nativeFn)(x0, x1, x2, x3, x4, x5);
575       setGPR64Result(ret);
576       break;
577     }
578     case js::jit::Args_General7: {
579       int64_t ret = reinterpret_cast<Prototype_General7>(nativeFn)(x0, x1, x2, x3, x4, x5, x6);
580       setGPR64Result(ret);
581       break;
582     }
583     case js::jit::Args_General8: {
584       int64_t ret = reinterpret_cast<Prototype_General8>(nativeFn)(x0, x1, x2, x3, x4, x5, x6, x7);
585       setGPR64Result(ret);
586       break;
587     }
588 
589     // Cases with GPR return type. This can be int32 or int64, but int64 is a safer assumption.
590     case js::jit::Args_Int_Double: {
591       int64_t ret = reinterpret_cast<Prototype_Int_Double>(nativeFn)(d0);
592       setGPR64Result(ret);
593       break;
594     }
595     case js::jit::Args_Int_IntDouble: {
596       int64_t ret = reinterpret_cast<Prototype_Int_IntDouble>(nativeFn)(x0, d0);
597       setGPR64Result(ret);
598       break;
599     }
600 
601     case js::jit::Args_Int_IntDoubleIntInt: {
602       int64_t ret = reinterpret_cast<Prototype_Int_IntDoubleIntInt>(nativeFn)(x0, d0, x1, x2);
603       setGPR64Result(ret);
604       break;
605     }
606 
607     case js::jit::Args_Int_DoubleIntInt: {
608       int64_t ret = reinterpret_cast<Prototype_Int_DoubleIntInt>(nativeFn)(d0, x0, x1);
609       setGPR64Result(ret);
610       break;
611     }
612 
613     // Cases with float return type.
614     case js::jit::Args_Float32_Float32: {
615       float ret = reinterpret_cast<Prototype_Float32_Float32>(nativeFn)(s0);
616       setFP32Result(ret);
617       break;
618     }
619 
620     // Cases with double return type.
621     case js::jit::Args_Double_None: {
622       double ret = reinterpret_cast<Prototype_Double_None>(nativeFn)();
623       setFP64Result(ret);
624       break;
625     }
626     case js::jit::Args_Double_Double: {
627       double ret = reinterpret_cast<Prototype_Double_Double>(nativeFn)(d0);
628       setFP64Result(ret);
629       break;
630     }
631     case js::jit::Args_Double_Int: {
632       double ret = reinterpret_cast<Prototype_Double_Int>(nativeFn)(x0);
633       setFP64Result(ret);
634       break;
635     }
636     case js::jit::Args_Double_DoubleInt: {
637       double ret = reinterpret_cast<Prototype_Double_DoubleInt>(nativeFn)(d0, x0);
638       setFP64Result(ret);
639       break;
640     }
641     case js::jit::Args_Double_DoubleDouble: {
642       double ret = reinterpret_cast<Prototype_Double_DoubleDouble>(nativeFn)(d0, d1);
643       setFP64Result(ret);
644       break;
645     }
646     case js::jit::Args_Double_DoubleDoubleDouble: {
647       double ret = reinterpret_cast<Prototype_Double_DoubleDoubleDouble>(nativeFn)(d0, d1, d2);
648       setFP64Result(ret);
649       break;
650     }
651     case js::jit::Args_Double_DoubleDoubleDoubleDouble: {
652       double ret = reinterpret_cast<Prototype_Double_DoubleDoubleDoubleDouble>(nativeFn)(d0, d1, d2, d3);
653       setFP64Result(ret);
654       break;
655     }
656 
657     case js::jit::Args_Double_IntDouble: {
658       double ret = reinterpret_cast<Prototype_Double_IntDouble>(nativeFn)(x0, d0);
659       setFP64Result(ret);
660       break;
661     }
662 
663     default:
664       MOZ_CRASH("Unknown function type.");
665   }
666 
667   // TODO: Nuke the volatile registers.
668 
669   // Assert that callee-saved registers are unchanged.
670   VIXL_ASSERT(xreg(19) == x19);
671   VIXL_ASSERT(xreg(20) == x20);
672   VIXL_ASSERT(xreg(21) == x21);
673   VIXL_ASSERT(xreg(22) == x22);
674   VIXL_ASSERT(xreg(23) == x23);
675   VIXL_ASSERT(xreg(24) == x24);
676   VIXL_ASSERT(xreg(25) == x25);
677   VIXL_ASSERT(xreg(26) == x26);
678   VIXL_ASSERT(xreg(27) == x27);
679   VIXL_ASSERT(xreg(28) == x28);
680   VIXL_ASSERT(xreg(29) == x29);
681 
682   // Assert that the stack is unchanged.
683   VIXL_ASSERT(savedSP == xreg(31, Reg31IsStackPointer));
684 
685   // Simulate a return.
686   set_lr(savedLR);
687   set_pc((Instruction*)savedLR);
688   if (getenv("USE_DEBUGGER"))
689     printf("SVCRET\n");
690 }
691 
692 
693 }  // namespace vixl
694 
695 
696 vixl::Simulator* js::PerThreadData::simulator() const {
697   return runtime_->simulator();
698 }
699 
700 
701 vixl::Simulator* JSRuntime::simulator() const {
702   return simulator_;
703 }
704 
705 
706 uintptr_t* JSRuntime::addressOfSimulatorStackLimit() {
707   return simulator_->addressOfStackLimit();
708 }
709