1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2  * vim: set ts=8 sts=4 et sw=4 tw=99:
3  *
4  * Copyright 2016 Mozilla Foundation
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *     http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  */
18 
19 #include "wasm/WasmInstance.h"
20 
21 #include "jit/BaselineJIT.h"
22 #include "jit/JitCommon.h"
23 #include "wasm/WasmModule.h"
24 
25 #include "jsobjinlines.h"
26 
27 #include "vm/ArrayBufferObject-inl.h"
28 
29 using namespace js;
30 using namespace js::jit;
31 using namespace js::wasm;
32 using mozilla::BinarySearch;
33 using mozilla::BitwiseCast;
34 using mozilla::IsNaN;
35 using mozilla::Swap;
36 
37 class SigIdSet
38 {
39     typedef HashMap<const Sig*, uint32_t, SigHashPolicy, SystemAllocPolicy> Map;
40     Map map_;
41 
42   public:
~SigIdSet()43     ~SigIdSet() {
44         MOZ_ASSERT_IF(!JSRuntime::hasLiveRuntimes(), !map_.initialized() || map_.empty());
45     }
46 
ensureInitialized(JSContext * cx)47     bool ensureInitialized(JSContext* cx) {
48         if (!map_.initialized() && !map_.init()) {
49             ReportOutOfMemory(cx);
50             return false;
51         }
52 
53         return true;
54     }
55 
allocateSigId(JSContext * cx,const Sig & sig,const void ** sigId)56     bool allocateSigId(JSContext* cx, const Sig& sig, const void** sigId) {
57         Map::AddPtr p = map_.lookupForAdd(sig);
58         if (p) {
59             MOZ_ASSERT(p->value() > 0);
60             p->value()++;
61             *sigId = p->key();
62             return true;
63         }
64 
65         UniquePtr<Sig> clone = MakeUnique<Sig>();
66         if (!clone || !clone->clone(sig) || !map_.add(p, clone.get(), 1)) {
67             ReportOutOfMemory(cx);
68             return false;
69         }
70 
71         *sigId = clone.release();
72         MOZ_ASSERT(!(uintptr_t(*sigId) & SigIdDesc::ImmediateBit));
73         return true;
74     }
75 
deallocateSigId(const Sig & sig,const void * sigId)76     void deallocateSigId(const Sig& sig, const void* sigId) {
77         Map::Ptr p = map_.lookup(sig);
78         MOZ_RELEASE_ASSERT(p && p->key() == sigId && p->value() > 0);
79 
80         p->value()--;
81         if (!p->value()) {
82             js_delete(p->key());
83             map_.remove(p);
84         }
85     }
86 };
87 
88 ExclusiveData<SigIdSet>* sigIdSet = nullptr;
89 
90 bool
InitInstanceStaticData()91 js::wasm::InitInstanceStaticData()
92 {
93     MOZ_ASSERT(!sigIdSet);
94     sigIdSet = js_new<ExclusiveData<SigIdSet>>(mutexid::WasmSigIdSet);
95     return sigIdSet != nullptr;
96 }
97 
98 void
ShutDownInstanceStaticData()99 js::wasm::ShutDownInstanceStaticData()
100 {
101     MOZ_ASSERT(sigIdSet);
102     js_delete(sigIdSet);
103     sigIdSet = nullptr;
104 }
105 
106 const void**
addressOfSigId(const SigIdDesc & sigId) const107 Instance::addressOfSigId(const SigIdDesc& sigId) const
108 {
109     MOZ_ASSERT(sigId.globalDataOffset() >= InitialGlobalDataBytes);
110     return (const void**)(codeSegment().globalData() + sigId.globalDataOffset());
111 }
112 
113 FuncImportTls&
funcImportTls(const FuncImport & fi)114 Instance::funcImportTls(const FuncImport& fi)
115 {
116     MOZ_ASSERT(fi.tlsDataOffset() >= InitialGlobalDataBytes);
117     return *(FuncImportTls*)(codeSegment().globalData() + fi.tlsDataOffset());
118 }
119 
120 TableTls&
tableTls(const TableDesc & td) const121 Instance::tableTls(const TableDesc& td) const
122 {
123     MOZ_ASSERT(td.globalDataOffset >= InitialGlobalDataBytes);
124     return *(TableTls*)(codeSegment().globalData() + td.globalDataOffset);
125 }
126 
127 bool
callImport(JSContext * cx,uint32_t funcImportIndex,unsigned argc,const uint64_t * argv,MutableHandleValue rval)128 Instance::callImport(JSContext* cx, uint32_t funcImportIndex, unsigned argc, const uint64_t* argv,
129                      MutableHandleValue rval)
130 {
131     const FuncImport& fi = metadata().funcImports[funcImportIndex];
132 
133     InvokeArgs args(cx);
134     if (!args.init(cx, argc))
135         return false;
136 
137     bool hasI64Arg = false;
138     MOZ_ASSERT(fi.sig().args().length() == argc);
139     for (size_t i = 0; i < argc; i++) {
140         switch (fi.sig().args()[i]) {
141           case ValType::I32:
142             args[i].set(Int32Value(*(int32_t*)&argv[i]));
143             break;
144           case ValType::F32:
145             args[i].set(JS::CanonicalizedDoubleValue(*(float*)&argv[i]));
146             break;
147           case ValType::F64:
148             args[i].set(JS::CanonicalizedDoubleValue(*(double*)&argv[i]));
149             break;
150           case ValType::I64: {
151             if (!JitOptions.wasmTestMode) {
152                 JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_WASM_BAD_I64);
153                 return false;
154             }
155             RootedObject obj(cx, CreateI64Object(cx, *(int64_t*)&argv[i]));
156             if (!obj)
157                 return false;
158             args[i].set(ObjectValue(*obj));
159             hasI64Arg = true;
160             break;
161           }
162           case ValType::I8x16:
163           case ValType::I16x8:
164           case ValType::I32x4:
165           case ValType::F32x4:
166           case ValType::B8x16:
167           case ValType::B16x8:
168           case ValType::B32x4:
169             MOZ_CRASH("unhandled type in callImport");
170         }
171     }
172 
173     FuncImportTls& import = funcImportTls(fi);
174     RootedFunction importFun(cx, &import.obj->as<JSFunction>());
175     RootedValue fval(cx, ObjectValue(*import.obj));
176     RootedValue thisv(cx, UndefinedValue());
177     if (!Call(cx, fval, thisv, args, rval))
178         return false;
179 
180     // Throw an error if returning i64 and not in test mode.
181     if (!JitOptions.wasmTestMode && fi.sig().ret() == ExprType::I64) {
182         JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_WASM_BAD_I64);
183         return false;
184     }
185 
186     // Don't try to optimize if the function has at least one i64 arg or if
187     // it returns an int64. GenerateJitExit relies on this, as does the
188     // type inference code below in this function.
189     if (hasI64Arg || fi.sig().ret() == ExprType::I64)
190         return true;
191 
192     // The import may already have become optimized.
193     void* jitExitCode = codeBase() + fi.jitExitCodeOffset();
194     if (import.code == jitExitCode)
195         return true;
196 
197     // Test if the function is JIT compiled.
198     if (!importFun->hasScript())
199         return true;
200 
201     JSScript* script = importFun->nonLazyScript();
202     if (!script->hasBaselineScript()) {
203         MOZ_ASSERT(!script->hasIonScript());
204         return true;
205     }
206 
207     // Don't enable jit entry when we have a pending ion builder.
208     // Take the interpreter path which will link it and enable
209     // the fast path on the next call.
210     if (script->baselineScript()->hasPendingIonBuilder())
211         return true;
212 
213     // Currently we can't rectify arguments. Therefore disable if argc is too low.
214     if (importFun->nargs() > fi.sig().args().length())
215         return true;
216 
217     // Ensure the argument types are included in the argument TypeSets stored in
218     // the TypeScript. This is necessary for Ion, because the import will use
219     // the skip-arg-checks entry point.
220     //
221     // Note that the TypeScript is never discarded while the script has a
222     // BaselineScript, so if those checks hold now they must hold at least until
223     // the BaselineScript is discarded and when that happens the import is
224     // patched back.
225     if (!TypeScript::ThisTypes(script)->hasType(TypeSet::UndefinedType()))
226         return true;
227     for (uint32_t i = 0; i < importFun->nargs(); i++) {
228         TypeSet::Type type = TypeSet::UnknownType();
229         switch (fi.sig().args()[i]) {
230           case ValType::I32:   type = TypeSet::Int32Type(); break;
231           case ValType::I64:   MOZ_CRASH("can't happen because of above guard");
232           case ValType::F32:   type = TypeSet::DoubleType(); break;
233           case ValType::F64:   type = TypeSet::DoubleType(); break;
234           case ValType::I8x16: MOZ_CRASH("NYI");
235           case ValType::I16x8: MOZ_CRASH("NYI");
236           case ValType::I32x4: MOZ_CRASH("NYI");
237           case ValType::F32x4: MOZ_CRASH("NYI");
238           case ValType::B8x16: MOZ_CRASH("NYI");
239           case ValType::B16x8: MOZ_CRASH("NYI");
240           case ValType::B32x4: MOZ_CRASH("NYI");
241         }
242         if (!TypeScript::ArgTypes(script, i)->hasType(type))
243             return true;
244     }
245 
246     // Let's optimize it!
247     if (!script->baselineScript()->addDependentWasmImport(cx, *this, funcImportIndex))
248         return false;
249 
250     import.code = jitExitCode;
251     import.baselineScript = script->baselineScript();
252     return true;
253 }
254 
255 /* static */ int32_t
callImport_void(Instance * instance,int32_t funcImportIndex,int32_t argc,uint64_t * argv)256 Instance::callImport_void(Instance* instance, int32_t funcImportIndex, int32_t argc, uint64_t* argv)
257 {
258     JSContext* cx = instance->cx();
259     RootedValue rval(cx);
260     return instance->callImport(cx, funcImportIndex, argc, argv, &rval);
261 }
262 
263 /* static */ int32_t
callImport_i32(Instance * instance,int32_t funcImportIndex,int32_t argc,uint64_t * argv)264 Instance::callImport_i32(Instance* instance, int32_t funcImportIndex, int32_t argc, uint64_t* argv)
265 {
266     JSContext* cx = instance->cx();
267     RootedValue rval(cx);
268     if (!instance->callImport(cx, funcImportIndex, argc, argv, &rval))
269         return false;
270 
271     return ToInt32(cx, rval, (int32_t*)argv);
272 }
273 
274 /* static */ int32_t
callImport_i64(Instance * instance,int32_t funcImportIndex,int32_t argc,uint64_t * argv)275 Instance::callImport_i64(Instance* instance, int32_t funcImportIndex, int32_t argc, uint64_t* argv)
276 {
277     JSContext* cx = instance->cx();
278     RootedValue rval(cx);
279     if (!instance->callImport(cx, funcImportIndex, argc, argv, &rval))
280         return false;
281 
282     return ReadI64Object(cx, rval, (int64_t*)argv);
283 }
284 
285 /* static */ int32_t
callImport_f64(Instance * instance,int32_t funcImportIndex,int32_t argc,uint64_t * argv)286 Instance::callImport_f64(Instance* instance, int32_t funcImportIndex, int32_t argc, uint64_t* argv)
287 {
288     JSContext* cx = instance->cx();
289     RootedValue rval(cx);
290     if (!instance->callImport(cx, funcImportIndex, argc, argv, &rval))
291         return false;
292 
293     return ToNumber(cx, rval, (double*)argv);
294 }
295 
296 /* static */ uint32_t
growMemory_i32(Instance * instance,uint32_t delta)297 Instance::growMemory_i32(Instance* instance, uint32_t delta)
298 {
299     MOZ_ASSERT(!instance->isAsmJS());
300 
301     JSContext* cx = instance->cx();
302     RootedWasmMemoryObject memory(cx, instance->memory_);
303 
304     uint32_t ret = WasmMemoryObject::grow(memory, delta, cx);
305 
306     // If there has been a moving grow, this Instance should have been notified.
307     MOZ_RELEASE_ASSERT(instance->tlsData_.memoryBase ==
308                        instance->memory_->buffer().dataPointerEither());
309 
310     return ret;
311 }
312 
313 /* static */ uint32_t
currentMemory_i32(Instance * instance)314 Instance::currentMemory_i32(Instance* instance)
315 {
316     uint32_t byteLength = instance->memoryLength();
317     MOZ_ASSERT(byteLength % wasm::PageSize == 0);
318     return byteLength / wasm::PageSize;
319 }
320 
Instance(JSContext * cx,Handle<WasmInstanceObject * > object,UniqueCode code,HandleWasmMemoryObject memory,SharedTableVector && tables,Handle<FunctionVector> funcImports,const ValVector & globalImports)321 Instance::Instance(JSContext* cx,
322                    Handle<WasmInstanceObject*> object,
323                    UniqueCode code,
324                    HandleWasmMemoryObject memory,
325                    SharedTableVector&& tables,
326                    Handle<FunctionVector> funcImports,
327                    const ValVector& globalImports)
328   : compartment_(cx->compartment()),
329     object_(object),
330     code_(Move(code)),
331     memory_(memory),
332     tables_(Move(tables))
333 {
334     MOZ_ASSERT(funcImports.length() == metadata().funcImports.length());
335     MOZ_ASSERT(tables_.length() == metadata().tables.length());
336 
337     tlsData_.cx = cx;
338     tlsData_.instance = this;
339     tlsData_.globalData = code_->segment().globalData();
340     tlsData_.memoryBase = memory ? memory->buffer().dataPointerEither().unwrap() : nullptr;
341     tlsData_.stackLimit = *(void**)cx->stackLimitAddressForJitCode(StackForUntrustedScript);
342 
343     for (size_t i = 0; i < metadata().funcImports.length(); i++) {
344         HandleFunction f = funcImports[i];
345         const FuncImport& fi = metadata().funcImports[i];
346         FuncImportTls& import = funcImportTls(fi);
347         if (!isAsmJS() && IsExportedWasmFunction(f)) {
348             WasmInstanceObject* calleeInstanceObj = ExportedFunctionToInstanceObject(f);
349             const CodeRange& codeRange = calleeInstanceObj->getExportedFunctionCodeRange(f);
350             Instance& calleeInstance = calleeInstanceObj->instance();
351             import.tls = &calleeInstance.tlsData_;
352             import.code = calleeInstance.codeSegment().base() + codeRange.funcNonProfilingEntry();
353             import.baselineScript = nullptr;
354             import.obj = calleeInstanceObj;
355         } else {
356             import.tls = &tlsData_;
357             import.code = codeBase() + fi.interpExitCodeOffset();
358             import.baselineScript = nullptr;
359             import.obj = f;
360         }
361     }
362 
363     for (size_t i = 0; i < tables_.length(); i++) {
364         const TableDesc& td = metadata().tables[i];
365         TableTls& table = tableTls(td);
366         table.length = tables_[i]->length();
367         table.base = tables_[i]->base();
368     }
369 
370     uint8_t* globalData = code_->segment().globalData();
371 
372     for (size_t i = 0; i < metadata().globals.length(); i++) {
373         const GlobalDesc& global = metadata().globals[i];
374         if (global.isConstant())
375             continue;
376 
377         uint8_t* globalAddr = globalData + global.offset();
378         switch (global.kind()) {
379           case GlobalKind::Import: {
380             globalImports[global.importIndex()].writePayload(globalAddr);
381             break;
382           }
383           case GlobalKind::Variable: {
384             const InitExpr& init = global.initExpr();
385             switch (init.kind()) {
386               case InitExpr::Kind::Constant: {
387                 init.val().writePayload(globalAddr);
388                 break;
389               }
390               case InitExpr::Kind::GetGlobal: {
391                 const GlobalDesc& imported = metadata().globals[init.globalIndex()];
392                 globalImports[imported.importIndex()].writePayload(globalAddr);
393                 break;
394               }
395             }
396             break;
397           }
398           case GlobalKind::Constant: {
399             MOZ_CRASH("skipped at the top");
400           }
401         }
402     }
403 }
404 
405 bool
init(JSContext * cx)406 Instance::init(JSContext* cx)
407 {
408     if (memory_ && memory_->movingGrowable() && !memory_->addMovingGrowObserver(cx, object_))
409         return false;
410 
411     for (const SharedTable& table : tables_) {
412         if (table->movingGrowable() && !table->addMovingGrowObserver(cx, object_))
413             return false;
414     }
415 
416     if (!metadata().sigIds.empty()) {
417         ExclusiveData<SigIdSet>::Guard lockedSigIdSet = sigIdSet->lock();
418 
419         if (!lockedSigIdSet->ensureInitialized(cx))
420             return false;
421 
422         for (const SigWithId& sig : metadata().sigIds) {
423             const void* sigId;
424             if (!lockedSigIdSet->allocateSigId(cx, sig, &sigId))
425                 return false;
426 
427             *addressOfSigId(sig.id) = sigId;
428         }
429     }
430 
431     return true;
432 }
433 
~Instance()434 Instance::~Instance()
435 {
436     compartment_->wasm.unregisterInstance(*this);
437 
438     for (unsigned i = 0; i < metadata().funcImports.length(); i++) {
439         FuncImportTls& import = funcImportTls(metadata().funcImports[i]);
440         if (import.baselineScript)
441             import.baselineScript->removeDependentWasmImport(*this, i);
442     }
443 
444     if (!metadata().sigIds.empty()) {
445         ExclusiveData<SigIdSet>::Guard lockedSigIdSet = sigIdSet->lock();
446 
447         for (const SigWithId& sig : metadata().sigIds) {
448             if (const void* sigId = *addressOfSigId(sig.id))
449                 lockedSigIdSet->deallocateSigId(sig, sigId);
450         }
451     }
452 }
453 
454 size_t
memoryMappedSize() const455 Instance::memoryMappedSize() const
456 {
457     return memory_->buffer().wasmMappedSize();
458 }
459 
460 bool
memoryAccessInGuardRegion(uint8_t * addr,unsigned numBytes) const461 Instance::memoryAccessInGuardRegion(uint8_t* addr, unsigned numBytes) const
462 {
463     MOZ_ASSERT(numBytes > 0);
464 
465     if (!metadata().usesMemory())
466         return false;
467 
468     uint8_t* base = memoryBase().unwrap(/* comparison */);
469     if (addr < base)
470         return false;
471 
472     size_t lastByteOffset = addr - base + (numBytes - 1);
473     return lastByteOffset >= memoryLength() && lastByteOffset < memoryMappedSize();
474 }
475 
476 void
tracePrivate(JSTracer * trc)477 Instance::tracePrivate(JSTracer* trc)
478 {
479     // This method is only called from WasmInstanceObject so the only reason why
480     // TraceEdge is called is so that the pointer can be updated during a moving
481     // GC. TraceWeakEdge may sound better, but it is less efficient given that
482     // we know object_ is already marked.
483     MOZ_ASSERT(!gc::IsAboutToBeFinalized(&object_));
484     TraceEdge(trc, &object_, "wasm instance object");
485 
486     for (const FuncImport& fi : metadata().funcImports)
487         TraceNullableEdge(trc, &funcImportTls(fi).obj, "wasm import");
488 
489     for (const SharedTable& table : tables_)
490         table->trace(trc);
491 
492     TraceNullableEdge(trc, &memory_, "wasm buffer");
493 }
494 
495 void
trace(JSTracer * trc)496 Instance::trace(JSTracer* trc)
497 {
498     // Technically, instead of having this method, the caller could use
499     // Instance::object() to get the owning WasmInstanceObject to mark,
500     // but this method is simpler and more efficient. The trace hook of
501     // WasmInstanceObject will call Instance::tracePrivate at which point we
502     // can mark the rest of the children.
503     TraceEdge(trc, &object_, "wasm instance object");
504 }
505 
506 SharedMem<uint8_t*>
memoryBase() const507 Instance::memoryBase() const
508 {
509     MOZ_ASSERT(metadata().usesMemory());
510     MOZ_ASSERT(tlsData_.memoryBase == memory_->buffer().dataPointerEither());
511     return memory_->buffer().dataPointerEither();
512 }
513 
514 size_t
memoryLength() const515 Instance::memoryLength() const
516 {
517     return memory_->buffer().byteLength();
518 }
519 
520 WasmInstanceObject*
objectUnbarriered() const521 Instance::objectUnbarriered() const
522 {
523     return object_.unbarrieredGet();
524 }
525 
526 WasmInstanceObject*
object() const527 Instance::object() const
528 {
529     return object_;
530 }
531 
532 bool
callExport(JSContext * cx,uint32_t funcIndex,CallArgs args)533 Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args)
534 {
535     // If there has been a moving grow, this Instance should have been notified.
536     MOZ_RELEASE_ASSERT(!memory_ || tlsData_.memoryBase == memory_->buffer().dataPointerEither());
537 
538     if (!cx->compartment()->wasm.ensureProfilingState(cx))
539         return false;
540 
541     const FuncExport& func = metadata().lookupFuncExport(funcIndex);
542 
543     // The calling convention for an external call into wasm is to pass an
544     // array of 16-byte values where each value contains either a coerced int32
545     // (in the low word), a double value (in the low dword) or a SIMD vector
546     // value, with the coercions specified by the wasm signature. The external
547     // entry point unpacks this array into the system-ABI-specified registers
548     // and stack memory and then calls into the internal entry point. The return
549     // value is stored in the first element of the array (which, therefore, must
550     // have length >= 1).
551     Vector<ExportArg, 8> exportArgs(cx);
552     if (!exportArgs.resize(Max<size_t>(1, func.sig().args().length())))
553         return false;
554 
555     RootedValue v(cx);
556     for (unsigned i = 0; i < func.sig().args().length(); ++i) {
557         v = i < args.length() ? args[i] : UndefinedValue();
558         switch (func.sig().arg(i)) {
559           case ValType::I32:
560             if (!ToInt32(cx, v, (int32_t*)&exportArgs[i]))
561                 return false;
562             break;
563           case ValType::I64:
564             if (!JitOptions.wasmTestMode) {
565                 JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_WASM_BAD_I64);
566                 return false;
567             }
568             if (!ReadI64Object(cx, v, (int64_t*)&exportArgs[i]))
569                 return false;
570             break;
571           case ValType::F32:
572             if (JitOptions.wasmTestMode && v.isObject()) {
573                 if (!ReadCustomFloat32NaNObject(cx, v, (uint32_t*)&exportArgs[i]))
574                     return false;
575                 break;
576             }
577             if (!RoundFloat32(cx, v, (float*)&exportArgs[i]))
578                 return false;
579             break;
580           case ValType::F64:
581             if (JitOptions.wasmTestMode && v.isObject()) {
582                 if (!ReadCustomDoubleNaNObject(cx, v, (uint64_t*)&exportArgs[i]))
583                     return false;
584                 break;
585             }
586             if (!ToNumber(cx, v, (double*)&exportArgs[i]))
587                 return false;
588             break;
589           case ValType::I8x16: {
590             SimdConstant simd;
591             if (!ToSimdConstant<Int8x16>(cx, v, &simd))
592                 return false;
593             memcpy(&exportArgs[i], simd.asInt8x16(), Simd128DataSize);
594             break;
595           }
596           case ValType::I16x8: {
597             SimdConstant simd;
598             if (!ToSimdConstant<Int16x8>(cx, v, &simd))
599                 return false;
600             memcpy(&exportArgs[i], simd.asInt16x8(), Simd128DataSize);
601             break;
602           }
603           case ValType::I32x4: {
604             SimdConstant simd;
605             if (!ToSimdConstant<Int32x4>(cx, v, &simd))
606                 return false;
607             memcpy(&exportArgs[i], simd.asInt32x4(), Simd128DataSize);
608             break;
609           }
610           case ValType::F32x4: {
611             SimdConstant simd;
612             if (!ToSimdConstant<Float32x4>(cx, v, &simd))
613                 return false;
614             memcpy(&exportArgs[i], simd.asFloat32x4(), Simd128DataSize);
615             break;
616           }
617           case ValType::B8x16: {
618             SimdConstant simd;
619             if (!ToSimdConstant<Bool8x16>(cx, v, &simd))
620                 return false;
621             // Bool8x16 uses the same representation as Int8x16.
622             memcpy(&exportArgs[i], simd.asInt8x16(), Simd128DataSize);
623             break;
624           }
625           case ValType::B16x8: {
626             SimdConstant simd;
627             if (!ToSimdConstant<Bool16x8>(cx, v, &simd))
628                 return false;
629             // Bool16x8 uses the same representation as Int16x8.
630             memcpy(&exportArgs[i], simd.asInt16x8(), Simd128DataSize);
631             break;
632           }
633           case ValType::B32x4: {
634             SimdConstant simd;
635             if (!ToSimdConstant<Bool32x4>(cx, v, &simd))
636                 return false;
637             // Bool32x4 uses the same representation as Int32x4.
638             memcpy(&exportArgs[i], simd.asInt32x4(), Simd128DataSize);
639             break;
640           }
641         }
642     }
643 
644     {
645         // Push a WasmActivation to describe the wasm frames we're about to push
646         // when running this module. Additionally, push a JitActivation so that
647         // the optimized wasm-to-Ion FFI call path (which we want to be very
648         // fast) can avoid doing so. The JitActivation is marked as inactive so
649         // stack iteration will skip over it.
650         WasmActivation activation(cx);
651         JitActivation jitActivation(cx, /* active */ false);
652 
653         // Call the per-exported-function trampoline created by GenerateEntry.
654         auto funcPtr = JS_DATA_TO_FUNC_PTR(ExportFuncPtr, codeBase() + func.entryOffset());
655         if (!CALL_GENERATED_2(funcPtr, exportArgs.begin(), &tlsData_))
656             return false;
657     }
658 
659     if (isAsmJS() && args.isConstructing()) {
660         // By spec, when a JS function is called as a constructor and this
661         // function returns a primary type, which is the case for all asm.js
662         // exported functions, the returned value is discarded and an empty
663         // object is returned instead.
664         PlainObject* obj = NewBuiltinClassInstance<PlainObject>(cx);
665         if (!obj)
666             return false;
667         args.rval().set(ObjectValue(*obj));
668         return true;
669     }
670 
671     void* retAddr = &exportArgs[0];
672     JSObject* retObj = nullptr;
673     switch (func.sig().ret()) {
674       case ExprType::Void:
675         args.rval().set(UndefinedValue());
676         break;
677       case ExprType::I32:
678         args.rval().set(Int32Value(*(int32_t*)retAddr));
679         break;
680       case ExprType::I64:
681         if (!JitOptions.wasmTestMode) {
682             JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_WASM_BAD_I64);
683             return false;
684         }
685         retObj = CreateI64Object(cx, *(int64_t*)retAddr);
686         if (!retObj)
687             return false;
688         break;
689       case ExprType::F32:
690         if (JitOptions.wasmTestMode && IsNaN(*(float*)retAddr)) {
691             retObj = CreateCustomNaNObject(cx, (float*)retAddr);
692             if (!retObj)
693                 return false;
694             break;
695         }
696         args.rval().set(NumberValue(*(float*)retAddr));
697         break;
698       case ExprType::F64:
699         if (JitOptions.wasmTestMode && IsNaN(*(double*)retAddr)) {
700             retObj = CreateCustomNaNObject(cx, (double*)retAddr);
701             if (!retObj)
702                 return false;
703             break;
704         }
705         args.rval().set(NumberValue(*(double*)retAddr));
706         break;
707       case ExprType::I8x16:
708         retObj = CreateSimd<Int8x16>(cx, (int8_t*)retAddr);
709         if (!retObj)
710             return false;
711         break;
712       case ExprType::I16x8:
713         retObj = CreateSimd<Int16x8>(cx, (int16_t*)retAddr);
714         if (!retObj)
715             return false;
716         break;
717       case ExprType::I32x4:
718         retObj = CreateSimd<Int32x4>(cx, (int32_t*)retAddr);
719         if (!retObj)
720             return false;
721         break;
722       case ExprType::F32x4:
723         retObj = CreateSimd<Float32x4>(cx, (float*)retAddr);
724         if (!retObj)
725             return false;
726         break;
727       case ExprType::B8x16:
728         retObj = CreateSimd<Bool8x16>(cx, (int8_t*)retAddr);
729         if (!retObj)
730             return false;
731         break;
732       case ExprType::B16x8:
733         retObj = CreateSimd<Bool16x8>(cx, (int16_t*)retAddr);
734         if (!retObj)
735             return false;
736         break;
737       case ExprType::B32x4:
738         retObj = CreateSimd<Bool32x4>(cx, (int32_t*)retAddr);
739         if (!retObj)
740             return false;
741         break;
742       case ExprType::Limit:
743         MOZ_CRASH("Limit");
744     }
745 
746     if (retObj)
747         args.rval().set(ObjectValue(*retObj));
748 
749     return true;
750 }
751 
752 void
onMovingGrowMemory(uint8_t * prevMemoryBase)753 Instance::onMovingGrowMemory(uint8_t* prevMemoryBase)
754 {
755     MOZ_ASSERT(!isAsmJS());
756     ArrayBufferObject& buffer = memory_->buffer().as<ArrayBufferObject>();
757     tlsData_.memoryBase = buffer.dataPointer();
758     code_->segment().onMovingGrow(prevMemoryBase, metadata(), buffer);
759 }
760 
761 void
onMovingGrowTable()762 Instance::onMovingGrowTable()
763 {
764     MOZ_ASSERT(!isAsmJS());
765     MOZ_ASSERT(tables_.length() == 1);
766     TableTls& table = tableTls(metadata().tables[0]);
767     table.length = tables_[0]->length();
768     table.base = tables_[0]->base();
769 }
770 
771 void
deoptimizeImportExit(uint32_t funcImportIndex)772 Instance::deoptimizeImportExit(uint32_t funcImportIndex)
773 {
774     const FuncImport& fi = metadata().funcImports[funcImportIndex];
775     FuncImportTls& import = funcImportTls(fi);
776     import.code = codeBase() + fi.interpExitCodeOffset();
777     import.baselineScript = nullptr;
778 }
779 
780 static void
UpdateEntry(const Code & code,bool profilingEnabled,void ** entry)781 UpdateEntry(const Code& code, bool profilingEnabled, void** entry)
782 {
783     const CodeRange& codeRange = *code.lookupRange(*entry);
784     void* from = code.segment().base() + codeRange.funcNonProfilingEntry();
785     void* to = code.segment().base() + codeRange.funcProfilingEntry();
786 
787     if (!profilingEnabled)
788         Swap(from, to);
789 
790     MOZ_ASSERT(*entry == from);
791     *entry = to;
792 }
793 
794 bool
ensureProfilingState(JSContext * cx,bool newProfilingEnabled)795 Instance::ensureProfilingState(JSContext* cx, bool newProfilingEnabled)
796 {
797     if (code_->profilingEnabled() == newProfilingEnabled)
798         return true;
799 
800     if (!code_->ensureProfilingState(cx, newProfilingEnabled))
801         return false;
802 
803     // Imported wasm functions and typed function tables point directly to
804     // either the profiling or non-profiling prologue and must therefore be
805     // updated when the profiling mode is toggled.
806 
807     for (const FuncImport& fi : metadata().funcImports) {
808         FuncImportTls& import = funcImportTls(fi);
809         if (import.obj && import.obj->is<WasmInstanceObject>()) {
810             Code& code = import.obj->as<WasmInstanceObject>().instance().code();
811             UpdateEntry(code, newProfilingEnabled, &import.code);
812         }
813     }
814 
815     for (const SharedTable& table : tables_) {
816         if (!table->isTypedFunction())
817             continue;
818 
819         // This logic will have to be generalized to match the import logic
820         // above if wasm can create typed function tables since a single table
821         // can contain elements from multiple instances.
822         MOZ_ASSERT(metadata().kind == ModuleKind::AsmJS);
823 
824         void** array = table->internalArray();
825         uint32_t length = table->length();
826         for (size_t i = 0; i < length; i++) {
827             if (array[i])
828                 UpdateEntry(*code_, newProfilingEnabled, &array[i]);
829         }
830     }
831 
832     return true;
833 }
834 
835 void
addSizeOfMisc(MallocSizeOf mallocSizeOf,Metadata::SeenSet * seenMetadata,ShareableBytes::SeenSet * seenBytes,Table::SeenSet * seenTables,size_t * code,size_t * data) const836 Instance::addSizeOfMisc(MallocSizeOf mallocSizeOf,
837                         Metadata::SeenSet* seenMetadata,
838                         ShareableBytes::SeenSet* seenBytes,
839                         Table::SeenSet* seenTables,
840                         size_t* code,
841                         size_t* data) const
842 {
843     *data += mallocSizeOf(this);
844 
845     code_->addSizeOfMisc(mallocSizeOf, seenMetadata, seenBytes, code, data);
846 
847     for (const SharedTable& table : tables_)
848          *data += table->sizeOfIncludingThisIfNotSeen(mallocSizeOf, seenTables);
849 }
850